mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-01-08 21:24:26 -06:00
Compare commits
4 Commits
dependabot
...
feature-be
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9a29195792 | ||
|
|
bdd00498a1 | ||
|
|
92deebddd4 | ||
|
|
c7efcee3d6 |
104
.github/workflows/ci-backend.yml
vendored
104
.github/workflows/ci-backend.yml
vendored
@@ -1,104 +0,0 @@
|
||||
name: Backend Tests
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
paths:
|
||||
- 'src/**'
|
||||
- 'pyproject.toml'
|
||||
- 'uv.lock'
|
||||
- 'docker/compose/docker-compose.ci-test.yml'
|
||||
- '.github/workflows/ci-backend.yml'
|
||||
pull_request:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
paths:
|
||||
- 'src/**'
|
||||
- 'pyproject.toml'
|
||||
- 'uv.lock'
|
||||
- 'docker/compose/docker-compose.ci-test.yml'
|
||||
- '.github/workflows/ci-backend.yml'
|
||||
workflow_dispatch:
|
||||
concurrency:
|
||||
group: backend-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
env:
|
||||
DEFAULT_UV_VERSION: "0.9.x"
|
||||
NLTK_DATA: "/usr/share/nltk_data"
|
||||
jobs:
|
||||
test:
|
||||
name: "Python ${{ matrix.python-version }}"
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.10', '3.11', '3.12']
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Start containers
|
||||
run: |
|
||||
docker compose --file docker/compose/docker-compose.ci-test.yml pull --quiet
|
||||
docker compose --file docker/compose/docker-compose.ci-test.yml up --detach
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "${{ matrix.python-version }}"
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends \
|
||||
unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
|
||||
- name: Configure ImageMagick
|
||||
run: |
|
||||
sudo cp docker/rootfs/etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--group testing \
|
||||
--frozen
|
||||
- name: List installed Python dependencies
|
||||
run: |
|
||||
uv pip list
|
||||
- name: Install NLTK data
|
||||
run: |
|
||||
uv run python -m nltk.downloader punkt punkt_tab snowball_data stopwords -d ${{ env.NLTK_DATA }}
|
||||
- name: Run tests
|
||||
env:
|
||||
NLTK_DATA: ${{ env.NLTK_DATA }}
|
||||
PAPERLESS_CI_TEST: 1
|
||||
PAPERLESS_MAIL_TEST_HOST: ${{ secrets.TEST_MAIL_HOST }}
|
||||
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
|
||||
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
|
||||
run: |
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
pytest
|
||||
- name: Upload test results to Codecov
|
||||
if: always()
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: backend-python-${{ matrix.python-version }}
|
||||
files: junit.xml
|
||||
report_type: test_results
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: backend-python-${{ matrix.python-version }}
|
||||
files: coverage.xml
|
||||
report_type: coverage
|
||||
- name: Stop containers
|
||||
if: always()
|
||||
run: |
|
||||
docker compose --file docker/compose/docker-compose.ci-test.yml logs
|
||||
docker compose --file docker/compose/docker-compose.ci-test.yml down
|
||||
233
.github/workflows/ci-docker.yml
vendored
233
.github/workflows/ci-docker.yml
vendored
@@ -1,233 +0,0 @@
|
||||
name: Docker Build
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-beta.rc[0-9]+'
|
||||
branches:
|
||||
- dev
|
||||
- beta
|
||||
pull_request:
|
||||
branches:
|
||||
- dev
|
||||
- main
|
||||
workflow_dispatch:
|
||||
concurrency:
|
||||
group: docker-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
jobs:
|
||||
build-arch:
|
||||
name: Build ${{ matrix.arch }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: ubuntu-24.04
|
||||
arch: amd64
|
||||
platform: linux/amd64
|
||||
- runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
platform: linux/arm64
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
outputs:
|
||||
can-push: ${{ steps.check-push.outputs.can-push }}
|
||||
push-external: ${{ steps.check-push.outputs.push-external }}
|
||||
repository: ${{ steps.repo.outputs.name }}
|
||||
ref-name: ${{ steps.ref.outputs.name }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6.0.1
|
||||
- name: Determine ref name
|
||||
id: ref
|
||||
run: |
|
||||
ref_name="${GITHUB_HEAD_REF:-$GITHUB_REF_NAME}"
|
||||
# Sanitize by replacing / with - for cache keys
|
||||
cache_ref="${ref_name//\//-}"
|
||||
|
||||
echo "ref_name=${ref_name}"
|
||||
echo "cache_ref=${cache_ref}"
|
||||
|
||||
echo "name=${ref_name}" >> $GITHUB_OUTPUT
|
||||
echo "cache-ref=${cache_ref}" >> $GITHUB_OUTPUT
|
||||
- name: Check push permissions
|
||||
id: check-push
|
||||
env:
|
||||
REF_NAME: ${{ steps.ref.outputs.name }}
|
||||
run: |
|
||||
# can-push: Can we push to GHCR?
|
||||
# True for: pushes, or PRs from the same repo (not forks)
|
||||
can_push=${{ github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
echo "can-push=${can_push}"
|
||||
echo "can-push=${can_push}" >> $GITHUB_OUTPUT
|
||||
|
||||
# push-external: Should we also push to Docker Hub and Quay.io?
|
||||
# Only for main repo on dev/beta branches or version tags
|
||||
push_external="false"
|
||||
if [[ "${can_push}" == "true" && "${{ github.repository_owner }}" == "paperless-ngx" ]]; then
|
||||
case "${REF_NAME}" in
|
||||
dev|beta)
|
||||
push_external="true"
|
||||
;;
|
||||
esac
|
||||
case "${{ github.ref }}" in
|
||||
refs/tags/v*|*beta.rc*)
|
||||
push_external="true"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
echo "push-external=${push_external}"
|
||||
echo "push-external=${push_external}" >> $GITHUB_OUTPUT
|
||||
- name: Set repository name
|
||||
id: repo
|
||||
run: |
|
||||
repo_name="${{ github.repository }}"
|
||||
repo_name="${repo_name,,}"
|
||||
|
||||
echo "repository=${repo_name}"
|
||||
echo "name=${repo_name}" >> $GITHUB_OUTPUT
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.12.0
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.6.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Docker metadata
|
||||
id: docker-meta
|
||||
uses: docker/metadata-action@v5.10.0
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=raw,value=${{ steps.ref.outputs.name }},enable=${{ github.event_name == 'pull_request' }}
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@v6.18.0
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
labels: ${{ steps.docker-meta.outputs.labels }}
|
||||
build-args: |
|
||||
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
|
||||
outputs: type=image,name=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }},push-by-digest=true,name-canonical=true,push=${{ steps.check-push.outputs.can-push }}
|
||||
cache-from: |
|
||||
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:${{ steps.ref.outputs.cache-ref }}-${{ matrix.arch }}
|
||||
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:dev-${{ matrix.arch }}
|
||||
cache-to: ${{ steps.check-push.outputs.can-push == 'true' && format('type=registry,mode=max,ref={0}/{1}/cache/app:{2}-{3}', env.REGISTRY, steps.repo.outputs.name, steps.ref.outputs.cache-ref, matrix.arch) || '' }}
|
||||
- name: Export digest
|
||||
if: steps.check-push.outputs.can-push == 'true'
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
echo "digest=${digest}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
- name: Upload digest
|
||||
if: steps.check-push.outputs.can-push == 'true'
|
||||
uses: actions/upload-artifact@v6.0.0
|
||||
with:
|
||||
name: digests-${{ matrix.arch }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
merge-and-push:
|
||||
name: Merge and Push Manifest
|
||||
runs-on: ubuntu-24.04
|
||||
needs: build-arch
|
||||
if: needs.build-arch.outputs.can-push == 'true'
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v7.0.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
- name: List digests
|
||||
run: |
|
||||
echo "Downloaded digests:"
|
||||
ls -la /tmp/digests/
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.12.0
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.6.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to Docker Hub
|
||||
if: needs.build-arch.outputs.push-external == 'true'
|
||||
uses: docker/login-action@v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Login to Quay.io
|
||||
if: needs.build-arch.outputs.push-external == 'true'
|
||||
uses: docker/login-action@v3.6.0
|
||||
with:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.QUAY_USERNAME }}
|
||||
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
||||
- name: Docker metadata
|
||||
id: docker-meta
|
||||
uses: docker/metadata-action@v5.10.0
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY }}/${{ needs.build-arch.outputs.repository }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=raw,value=${{ needs.build-arch.outputs.ref-name }},enable=${{ github.event_name == 'pull_request' }}
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
env:
|
||||
REPOSITORY: ${{ needs.build-arch.outputs.repository }}
|
||||
run: |
|
||||
tags=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "${DOCKER_METADATA_OUTPUT_JSON}")
|
||||
|
||||
digests=""
|
||||
for digest in *; do
|
||||
digests+="${{ env.REGISTRY }}/${REPOSITORY}@sha256:${digest} "
|
||||
done
|
||||
|
||||
echo "Creating manifest with tags: ${tags}"
|
||||
echo "From digests: ${digests}"
|
||||
|
||||
docker buildx imagetools create ${tags} ${digests}
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||
- name: Copy to Docker Hub
|
||||
if: needs.build-arch.outputs.push-external == 'true'
|
||||
env:
|
||||
TAGS: ${{ steps.docker-meta.outputs.tags }}
|
||||
GHCR_REPO: ${{ env.REGISTRY }}/${{ needs.build-arch.outputs.repository }}
|
||||
run: |
|
||||
for tag in ${TAGS}; do
|
||||
dockerhub_tag="${tag/${GHCR_REPO}/docker.io/paperlessngx/paperless-ngx}"
|
||||
echo "Copying ${tag} to ${dockerhub_tag}"
|
||||
skopeo copy --all "docker://${tag}" "docker://${dockerhub_tag}"
|
||||
done
|
||||
- name: Copy to Quay.io
|
||||
if: needs.build-arch.outputs.push-external == 'true'
|
||||
env:
|
||||
TAGS: ${{ steps.docker-meta.outputs.tags }}
|
||||
GHCR_REPO: ${{ env.REGISTRY }}/${{ needs.build-arch.outputs.repository }}
|
||||
run: |
|
||||
for tag in ${TAGS}; do
|
||||
quay_tag="${tag/${GHCR_REPO}/quay.io/paperlessngx/paperless-ngx}"
|
||||
echo "Copying ${tag} to ${quay_tag}"
|
||||
skopeo copy --all "docker://${tag}" "docker://${quay_tag}"
|
||||
done
|
||||
88
.github/workflows/ci-docs.yml
vendored
88
.github/workflows/ci-docs.yml
vendored
@@ -1,88 +0,0 @@
|
||||
name: Documentation
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- dev
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'mkdocs.yml'
|
||||
- '.github/workflows/ci-docs.yml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'mkdocs.yml'
|
||||
- '.github/workflows/ci-docs.yml'
|
||||
workflow_dispatch:
|
||||
concurrency:
|
||||
group: docs-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
env:
|
||||
DEFAULT_UV_VERSION: "0.9.x"
|
||||
DEFAULT_PYTHON_VERSION: "3.11"
|
||||
jobs:
|
||||
build:
|
||||
name: Build Documentation
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||
- name: Build documentation
|
||||
run: |
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
mkdocs build --config-file ./mkdocs.yml
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: documentation
|
||||
path: site/
|
||||
retention-days: 7
|
||||
deploy:
|
||||
name: Deploy Documentation
|
||||
needs: build
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||
- name: Deploy documentation
|
||||
run: |
|
||||
echo "docs.paperless-ngx.com" > "${{ github.workspace }}/docs/CNAME"
|
||||
git config --global user.name "${{ github.actor }}"
|
||||
git config --global user.email "${{ github.actor }}@users.noreply.github.com"
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
mkdocs gh-deploy --force --no-history
|
||||
189
.github/workflows/ci-frontend.yml
vendored
189
.github/workflows/ci-frontend.yml
vendored
@@ -1,189 +0,0 @@
|
||||
name: Frontend Tests
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
paths:
|
||||
- 'src-ui/**'
|
||||
- '.github/workflows/ci-frontend.yml'
|
||||
pull_request:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
paths:
|
||||
- 'src-ui/**'
|
||||
- '.github/workflows/ci-frontend.yml'
|
||||
workflow_dispatch:
|
||||
concurrency:
|
||||
group: frontend-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
jobs:
|
||||
install-dependencies:
|
||||
name: Install Dependencies
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontend-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Install dependencies
|
||||
run: cd src-ui && pnpm install
|
||||
lint:
|
||||
name: Lint
|
||||
needs: install-dependencies
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontend-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Re-link Angular CLI
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Run lint
|
||||
run: cd src-ui && pnpm run lint
|
||||
unit-tests:
|
||||
name: "Unit Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||
needs: install-dependencies
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
shard-index: [1, 2, 3, 4]
|
||||
shard-count: [4]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontend-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Re-link Angular CLI
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Run Jest unit tests
|
||||
run: cd src-ui && pnpm run test --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||
- name: Upload test results to Codecov
|
||||
if: always()
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: frontend-node-${{ matrix.node-version }}
|
||||
directory: src-ui/
|
||||
report_type: test_results
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: frontend-node-${{ matrix.node-version }}
|
||||
directory: src-ui/coverage/
|
||||
e2e-tests:
|
||||
name: "E2E Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||
needs: install-dependencies
|
||||
runs-on: ubuntu-24.04
|
||||
container: mcr.microsoft.com/playwright:v1.57.0-noble
|
||||
env:
|
||||
PLAYWRIGHT_BROWSERS_PATH: /ms-playwright
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
shard-index: [1, 2]
|
||||
shard-count: [2]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontend-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Re-link Angular CLI
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Install dependencies
|
||||
run: cd src-ui && pnpm install --no-frozen-lockfile
|
||||
- name: Run Playwright E2E tests
|
||||
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||
bundle-analysis:
|
||||
name: Bundle Analysis
|
||||
needs: [unit-tests, e2e-tests]
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontend-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Re-link Angular CLI
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Build and analyze
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: cd src-ui && pnpm run build --configuration=production
|
||||
24
.github/workflows/ci-lint.yml
vendored
24
.github/workflows/ci-lint.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Lint
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
pull_request:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
concurrency:
|
||||
group: lint-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
jobs:
|
||||
pre-commit:
|
||||
name: Pre-commit Checks
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- name: Run pre-commit
|
||||
uses: pre-commit/action@v3.0.1
|
||||
237
.github/workflows/ci-release.yml
vendored
237
.github/workflows/ci-release.yml
vendored
@@ -1,237 +0,0 @@
|
||||
name: Release
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-beta.rc[0-9]+'
|
||||
concurrency:
|
||||
group: release-${{ github.ref }}
|
||||
cancel-in-progress: false
|
||||
env:
|
||||
DEFAULT_UV_VERSION: "0.9.x"
|
||||
DEFAULT_PYTHON_VERSION: "3.11"
|
||||
jobs:
|
||||
wait-for-docker:
|
||||
name: Wait for Docker Build
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Wait for Docker build
|
||||
uses: lewagon/wait-on-check-action@v1.4.1
|
||||
with:
|
||||
ref: ${{ github.sha }}
|
||||
check-name: 'Build Docker Image'
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
wait-interval: 60
|
||||
build-release:
|
||||
name: Build Release
|
||||
needs: wait-for-docker
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
# ---- Frontend Build ----
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Install frontend dependencies
|
||||
run: cd src-ui && pnpm install
|
||||
- name: Build frontend
|
||||
run: cd src-ui && pnpm run build --configuration production
|
||||
# ---- Backend Setup ----
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends gettext liblept5
|
||||
# ---- Build Documentation ----
|
||||
- name: Build documentation
|
||||
run: |
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
mkdocs build --config-file ./mkdocs.yml
|
||||
# ---- Prepare Release ----
|
||||
- name: Generate requirements file
|
||||
run: |
|
||||
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
|
||||
- name: Compile messages
|
||||
run: |
|
||||
cd src/
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
manage.py compilemessages
|
||||
- name: Collect static files
|
||||
run: |
|
||||
cd src/
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
manage.py collectstatic --no-input --clear
|
||||
- name: Assemble release package
|
||||
run: |
|
||||
mkdir -p dist/paperless-ngx/scripts
|
||||
|
||||
for file_name in .dockerignore \
|
||||
.env \
|
||||
Dockerfile \
|
||||
pyproject.toml \
|
||||
uv.lock \
|
||||
requirements.txt \
|
||||
LICENSE \
|
||||
README.md \
|
||||
paperless.conf.example
|
||||
do
|
||||
cp --verbose ${file_name} dist/paperless-ngx/
|
||||
done
|
||||
mv dist/paperless-ngx/paperless.conf.example dist/paperless-ngx/paperless.conf
|
||||
|
||||
cp --recursive docker/ dist/paperless-ngx/docker
|
||||
cp scripts/*.service scripts/*.sh scripts/*.socket dist/paperless-ngx/scripts/
|
||||
cp --recursive src/ dist/paperless-ngx/src
|
||||
cp --recursive site/ dist/paperless-ngx/docs
|
||||
mv static dist/paperless-ngx/
|
||||
|
||||
find dist/paperless-ngx -name "__pycache__" -type d -exec rm -rf {} +
|
||||
- name: Create release archive
|
||||
run: |
|
||||
cd dist
|
||||
sudo chown -R 1000:1000 paperless-ngx/
|
||||
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
||||
- name: Upload release artifact
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: release
|
||||
path: dist/paperless-ngx.tar.xz
|
||||
retention-days: 7
|
||||
publish-release:
|
||||
name: Publish Release
|
||||
needs: build-release
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
prerelease: ${{ steps.get-version.outputs.prerelease }}
|
||||
changelog: ${{ steps.create-release.outputs.body }}
|
||||
version: ${{ steps.get-version.outputs.version }}
|
||||
steps:
|
||||
- name: Download release artifact
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: release
|
||||
path: ./
|
||||
- name: Get version info
|
||||
id: get-version
|
||||
run: |
|
||||
echo "version=${{ github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
if [[ "${{ github.ref_name }}" == *"-beta.rc"* ]]; then
|
||||
echo "prerelease=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "prerelease=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Create release and changelog
|
||||
id: create-release
|
||||
uses: release-drafter/release-drafter@v6
|
||||
with:
|
||||
name: Paperless-ngx ${{ steps.get-version.outputs.version }}
|
||||
tag: ${{ steps.get-version.outputs.version }}
|
||||
version: ${{ steps.get-version.outputs.version }}
|
||||
prerelease: ${{ steps.get-version.outputs.prerelease }}
|
||||
publish: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Upload release archive
|
||||
uses: shogo82148/actions-upload-release-asset@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
asset_path: ./paperless-ngx.tar.xz
|
||||
asset_name: paperless-ngx-${{ steps.get-version.outputs.version }}.tar.xz
|
||||
asset_content_type: application/x-xz
|
||||
# ---------------------------------------------------------------------------
|
||||
# Append changelog to docs (only on non-prerelease)
|
||||
# ---------------------------------------------------------------------------
|
||||
append-changelog:
|
||||
name: Append Changelog
|
||||
needs: publish-release
|
||||
if: needs.publish-release.outputs.prerelease == 'false'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: main
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Update changelog
|
||||
working-directory: docs
|
||||
run: |
|
||||
git branch ${{ needs.publish-release.outputs.version }}-changelog
|
||||
git checkout ${{ needs.publish-release.outputs.version }}-changelog
|
||||
|
||||
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
|
||||
|
||||
echo "Manually linking usernames"
|
||||
sed -i -r 's|@([a-zA-Z0-9_]+) \(\[#|[@\1](https://github.com/\1) ([#|g' changelog-new.md
|
||||
|
||||
echo "Removing unneeded comment tags"
|
||||
sed -i -r 's|@<!---->|@|g' changelog-new.md
|
||||
|
||||
CURRENT_CHANGELOG=$(tail --lines +2 changelog.md)
|
||||
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
|
||||
mv changelog-new.md changelog.md
|
||||
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
pre-commit run --files changelog.md || true
|
||||
|
||||
git config --global user.name "github-actions"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
|
||||
git push origin ${{ needs.publish-release.outputs.version }}-changelog
|
||||
- name: Create pull request
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const { repo, owner } = context.repo;
|
||||
const result = await github.rest.pulls.create({
|
||||
title: 'Documentation: Add ${{ needs.publish-release.outputs.version }} changelog',
|
||||
owner,
|
||||
repo,
|
||||
head: '${{ needs.publish-release.outputs.version }}-changelog',
|
||||
base: 'main',
|
||||
body: 'This PR is auto-generated by CI.'
|
||||
});
|
||||
github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: result.data.number,
|
||||
labels: ['documentation', 'skip-changelog']
|
||||
});
|
||||
693
.github/workflows/ci.yml
vendored
Normal file
693
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,693 @@
|
||||
name: ci
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
# https://semver.org/#spec-item-2
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
# https://semver.org/#spec-item-9
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-beta.rc[0-9]+'
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
pull_request:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
env:
|
||||
DEFAULT_UV_VERSION: "0.9.x"
|
||||
# This is the default version of Python to use in most steps which aren't specific
|
||||
DEFAULT_PYTHON_VERSION: "3.11"
|
||||
NLTK_DATA: "/usr/share/nltk_data"
|
||||
jobs:
|
||||
detect-duplicate:
|
||||
name: Detect Duplicate Run
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- name: Check if workflow should run
|
||||
id: check
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
if (context.eventName !== 'push') {
|
||||
core.info('Not a push event; running workflow.');
|
||||
core.setOutput('should_run', 'true');
|
||||
return;
|
||||
}
|
||||
|
||||
const ref = context.ref || '';
|
||||
if (!ref.startsWith('refs/heads/')) {
|
||||
core.info('Push is not to a branch; running workflow.');
|
||||
core.setOutput('should_run', 'true');
|
||||
return;
|
||||
}
|
||||
|
||||
const branch = ref.substring('refs/heads/'.length);
|
||||
const { owner, repo } = context.repo;
|
||||
const prs = await github.paginate(github.rest.pulls.list, {
|
||||
owner,
|
||||
repo,
|
||||
state: 'open',
|
||||
head: `${owner}:${branch}`,
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
if (prs.length === 0) {
|
||||
core.info(`No open PR found for ${branch}; running workflow.`);
|
||||
core.setOutput('should_run', 'true');
|
||||
} else {
|
||||
core.info(`Found ${prs.length} open PR(s) for ${branch}; skipping duplicate push run.`);
|
||||
core.setOutput('should_run', 'false');
|
||||
}
|
||||
pre-commit:
|
||||
needs:
|
||||
- detect-duplicate
|
||||
if: needs.detect-duplicate.outputs.should_run == 'true'
|
||||
name: Linting Checks
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Check files
|
||||
uses: pre-commit/action@v3.0.1
|
||||
documentation:
|
||||
name: "Build & Deploy Documentation"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||
- name: Make documentation
|
||||
run: |
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
mkdocs build --config-file ./mkdocs.yml
|
||||
- name: Deploy documentation
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
run: |
|
||||
echo "docs.paperless-ngx.com" > "${{ github.workspace }}/docs/CNAME"
|
||||
git config --global user.name "${{ github.actor }}"
|
||||
git config --global user.email "${{ github.actor }}@users.noreply.github.com"
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
mkdocs gh-deploy --force --no-history
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: documentation
|
||||
path: site/
|
||||
retention-days: 7
|
||||
tests-backend:
|
||||
name: "Backend Tests (Python ${{ matrix.python-version }})"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- pre-commit
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.10', '3.11', '3.12']
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Start containers
|
||||
run: |
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml pull --quiet
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml up --detach
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "${{ matrix.python-version }}"
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
|
||||
- name: Configure ImageMagick
|
||||
run: |
|
||||
sudo cp docker/rootfs/etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--group testing \
|
||||
--frozen
|
||||
- name: List installed Python dependencies
|
||||
run: |
|
||||
uv pip list
|
||||
- name: Install or update NLTK dependencies
|
||||
run: uv run python -m nltk.downloader punkt punkt_tab snowball_data stopwords -d ${{ env.NLTK_DATA }}
|
||||
- name: Tests
|
||||
env:
|
||||
NLTK_DATA: ${{ env.NLTK_DATA }}
|
||||
PAPERLESS_CI_TEST: 1
|
||||
# Enable paperless_mail testing against real server
|
||||
PAPERLESS_MAIL_TEST_HOST: ${{ secrets.TEST_MAIL_HOST }}
|
||||
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
|
||||
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
|
||||
run: |
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
pytest
|
||||
- name: Upload backend test results to Codecov
|
||||
if: always()
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: backend-python-${{ matrix.python-version }}
|
||||
files: junit.xml
|
||||
report_type: test_results
|
||||
- name: Upload backend coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: backend-python-${{ matrix.python-version }}
|
||||
files: coverage.xml
|
||||
- name: Stop containers
|
||||
if: always()
|
||||
run: |
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml logs
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml down
|
||||
install-frontend-dependencies:
|
||||
name: "Install Frontend Dependencies"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Install dependencies
|
||||
run: cd src-ui && pnpm install
|
||||
tests-frontend:
|
||||
name: "Frontend Unit Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- install-frontend-dependencies
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
shard-index: [1, 2, 3, 4]
|
||||
shard-count: [4]
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Re-link Angular cli
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Linting checks
|
||||
run: cd src-ui && pnpm run lint
|
||||
- name: Run Jest unit tests
|
||||
run: cd src-ui && pnpm run test --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||
- name: Upload frontend test results to Codecov
|
||||
if: always()
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: frontend-node-${{ matrix.node-version }}
|
||||
directory: src-ui/
|
||||
report_type: test_results
|
||||
- name: Upload frontend coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: frontend-node-${{ matrix.node-version }}
|
||||
directory: src-ui/coverage/
|
||||
tests-frontend-e2e:
|
||||
name: "Frontend E2E Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||
runs-on: ubuntu-24.04
|
||||
container: mcr.microsoft.com/playwright:v1.57.0-noble
|
||||
needs:
|
||||
- install-frontend-dependencies
|
||||
env:
|
||||
PLAYWRIGHT_BROWSERS_PATH: /ms-playwright
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
shard-index: [1, 2]
|
||||
shard-count: [2]
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Re-link Angular cli
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Install dependencies
|
||||
run: cd src-ui && pnpm install --no-frozen-lockfile
|
||||
- name: Run Playwright e2e tests
|
||||
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||
frontend-bundle-analysis:
|
||||
name: "Frontend Bundle Analysis"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- tests-frontend
|
||||
- tests-frontend-e2e
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
||||
- name: Re-link Angular cli
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Build frontend and upload analysis
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: cd src-ui && pnpm run build --configuration=production
|
||||
build-docker-image:
|
||||
name: Build Docker image for ${{ github.event_name == 'pull_request' && github.head_ref || github.ref_name }}
|
||||
runs-on: ubuntu-24.04
|
||||
if: (github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || startsWith(github.ref, 'refs/heads/fix-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v') || startsWith(github.ref, 'refs/heads/l10n_'))) || (github.event_name == 'pull_request' && (startsWith(github.head_ref, 'feature-') || startsWith(github.head_ref, 'fix-') || github.head_ref == 'dev' || github.head_ref == 'beta' || contains(github.head_ref, 'beta.rc') || startsWith(github.head_ref, 'l10n_')))
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
|
||||
cancel-in-progress: true
|
||||
needs:
|
||||
- tests-backend
|
||||
- tests-frontend
|
||||
- tests-frontend-e2e
|
||||
steps:
|
||||
- name: Prepare build variables
|
||||
id: build-vars
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
result-encoding: string
|
||||
script: |
|
||||
const isPR = context.eventName === 'pull_request';
|
||||
const defaultRefName = context.ref.replace('refs/heads/', '');
|
||||
const headRef = isPR ? context.payload.pull_request.head.ref : defaultRefName;
|
||||
const buildRef = isPR ? `refs/heads/${headRef}` : context.ref;
|
||||
const buildCacheKey = headRef.split('/').join('-');
|
||||
const canPush = context.eventName === 'push' || (isPR && context.payload.pull_request.head.repo.full_name === `${context.repo.owner}/${context.repo.repo}`);
|
||||
|
||||
core.setOutput('build-ref', buildRef);
|
||||
core.setOutput('build-ref-name', headRef);
|
||||
core.setOutput('build-cache-key', buildCacheKey);
|
||||
core.setOutput('can-push', canPush ? 'true' : 'false');
|
||||
- name: Check pushing to Docker Hub
|
||||
id: push-other-places
|
||||
# Only push to Dockerhub from the main repo AND the ref is either:
|
||||
# main
|
||||
# dev
|
||||
# beta
|
||||
# a tag
|
||||
# Otherwise forks would require a Docker Hub account and secrets setup
|
||||
env:
|
||||
BUILD_REF: ${{ steps.build-vars.outputs.build-ref }}
|
||||
BUILD_REF_NAME: ${{ steps.build-vars.outputs.build-ref-name }}
|
||||
run: |
|
||||
if [[ ${{ github.repository_owner }} == "paperless-ngx" && ( "$BUILD_REF_NAME" == "dev" || "$BUILD_REF_NAME" == "beta" || $BUILD_REF == refs/tags/v* || $BUILD_REF == *beta.rc* ) ]] ; then
|
||||
echo "Enabling DockerHub image push"
|
||||
echo "enable=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Not pushing to DockerHub"
|
||||
echo "enable=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Set ghcr repository name
|
||||
id: set-ghcr-repository
|
||||
run: |
|
||||
ghcr_name=$(echo "${{ github.repository }}" | awk '{ print tolower($0) }')
|
||||
echo "Name is ${ghcr_name}"
|
||||
echo "ghcr-repository=${ghcr_name}" >> $GITHUB_OUTPUT
|
||||
- name: Gather Docker metadata
|
||||
id: docker-meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}
|
||||
name=paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
|
||||
name=quay.io/paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
|
||||
tags: |
|
||||
# Tag branches with branch name
|
||||
type=ref,event=branch
|
||||
# Pull requests need a sanitized branch tag for pushing images
|
||||
type=raw,value=${{ steps.build-vars.outputs.build-cache-key }},enable=${{ github.event_name == 'pull_request' }}
|
||||
# Process semver tags
|
||||
# For a tag x.y.z or vX.Y.Z, output an x.y.z and x.y image tag
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
# If https://github.com/docker/buildx/issues/1044 is resolved,
|
||||
# the append input with a native arm64 arch could be used to
|
||||
# significantly speed up building
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: arm64
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
# Don't attempt to login if not pushing to Docker Hub
|
||||
if: steps.push-other-places.outputs.enable == 'true'
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Login to Quay.io
|
||||
uses: docker/login-action@v3
|
||||
# Don't attempt to login if not pushing to Quay.io
|
||||
if: steps.push-other-places.outputs.enable == 'true'
|
||||
with:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.QUAY_USERNAME }}
|
||||
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ steps.build-vars.outputs.can-push == 'true' }}
|
||||
tags: ${{ steps.docker-meta.outputs.tags }}
|
||||
labels: ${{ steps.docker-meta.outputs.labels }}
|
||||
build-args: |
|
||||
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
|
||||
# Get cache layers from this branch, then dev
|
||||
# This allows new branches to get at least some cache benefits, generally from dev
|
||||
cache-from: |
|
||||
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ steps.build-vars.outputs.build-cache-key }}
|
||||
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:dev
|
||||
cache-to: ${{ steps.build-vars.outputs.can-push == 'true' && format('type=registry,mode=max,ref=ghcr.io/{0}/builder/cache/app:{1}', steps.set-ghcr-repository.outputs.ghcr-repository, steps.build-vars.outputs.build-cache-key) || '' }}
|
||||
- name: Inspect image
|
||||
if: steps.build-vars.outputs.can-push == 'true'
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||
- name: Export frontend artifact from docker
|
||||
if: steps.build-vars.outputs.can-push == 'true'
|
||||
run: |
|
||||
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
|
||||
- name: Upload frontend artifact
|
||||
if: steps.build-vars.outputs.can-push == 'true'
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: frontend-compiled
|
||||
path: src/documents/static/frontend/
|
||||
retention-days: 7
|
||||
build-release:
|
||||
name: "Build Release"
|
||||
needs:
|
||||
- build-docker-image
|
||||
- documentation
|
||||
if: github.event_name == 'push'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends gettext liblept5
|
||||
- name: Download frontend artifact
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: frontend-compiled
|
||||
path: src/documents/static/frontend/
|
||||
- name: Download documentation artifact
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: documentation
|
||||
path: docs/_build/html/
|
||||
- name: Generate requirements file
|
||||
run: |
|
||||
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
|
||||
- name: Compile messages
|
||||
run: |
|
||||
cd src/
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
manage.py compilemessages
|
||||
- name: Collect static files
|
||||
run: |
|
||||
cd src/
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
manage.py collectstatic --no-input
|
||||
- name: Move files
|
||||
run: |
|
||||
echo "Making dist folders"
|
||||
for directory in dist \
|
||||
dist/paperless-ngx \
|
||||
dist/paperless-ngx/scripts;
|
||||
do
|
||||
mkdir --verbose --parents ${directory}
|
||||
done
|
||||
|
||||
echo "Copying basic files"
|
||||
for file_name in .dockerignore \
|
||||
.env \
|
||||
Dockerfile \
|
||||
pyproject.toml \
|
||||
uv.lock \
|
||||
requirements.txt \
|
||||
LICENSE \
|
||||
README.md \
|
||||
paperless.conf.example
|
||||
do
|
||||
cp --verbose ${file_name} dist/paperless-ngx/
|
||||
done
|
||||
mv --verbose dist/paperless-ngx/paperless.conf.example dist/paperless-ngx/paperless.conf
|
||||
|
||||
echo "Copying Docker related files"
|
||||
cp --recursive docker/ dist/paperless-ngx/docker
|
||||
|
||||
echo "Copying startup scripts"
|
||||
cp --verbose scripts/*.service scripts/*.sh scripts/*.socket dist/paperless-ngx/scripts/
|
||||
|
||||
echo "Copying source files"
|
||||
cp --recursive src/ dist/paperless-ngx/src
|
||||
echo "Copying documentation"
|
||||
cp --recursive docs/_build/html/ dist/paperless-ngx/docs
|
||||
|
||||
mv --verbose static dist/paperless-ngx
|
||||
- name: Make release package
|
||||
run: |
|
||||
echo "Creating release archive"
|
||||
cd dist
|
||||
sudo chown -R 1000:1000 paperless-ngx/
|
||||
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
||||
- name: Upload release artifact
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: release
|
||||
path: dist/paperless-ngx.tar.xz
|
||||
retention-days: 7
|
||||
publish-release:
|
||||
name: "Publish Release"
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
||||
changelog: ${{ steps.create-release.outputs.body }}
|
||||
version: ${{ steps.get_version.outputs.version }}
|
||||
needs:
|
||||
- build-release
|
||||
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
|
||||
steps:
|
||||
- name: Download release artifact
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: release
|
||||
path: ./
|
||||
- name: Get version
|
||||
id: get_version
|
||||
run: |
|
||||
echo "version=${{ github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
if [[ ${{ contains(github.ref_name, '-beta.rc') }} == 'true' ]]; then
|
||||
echo "prerelease=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "prerelease=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Create Release and Changelog
|
||||
id: create-release
|
||||
uses: release-drafter/release-drafter@v6
|
||||
with:
|
||||
name: Paperless-ngx ${{ steps.get_version.outputs.version }}
|
||||
tag: ${{ steps.get_version.outputs.version }}
|
||||
version: ${{ steps.get_version.outputs.version }}
|
||||
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
||||
publish: true # ensures release is not marked as draft
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Upload release archive
|
||||
id: upload-release-asset
|
||||
uses: shogo82148/actions-upload-release-asset@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
asset_path: ./paperless-ngx.tar.xz
|
||||
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
|
||||
asset_content_type: application/x-xz
|
||||
append-changelog:
|
||||
name: "Append Changelog"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- publish-release
|
||||
if: needs.publish-release.outputs.prerelease == 'false'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: main
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Append Changelog to docs
|
||||
id: append-Changelog
|
||||
working-directory: docs
|
||||
run: |
|
||||
git branch ${{ needs.publish-release.outputs.version }}-changelog
|
||||
git checkout ${{ needs.publish-release.outputs.version }}-changelog
|
||||
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
|
||||
echo "Manually linking usernames"
|
||||
sed -i -r 's|@([a-zA-Z0-9_]+) \(\[#|[@\1](https://github.com/\1) ([#|g' changelog-new.md
|
||||
echo "Removing unneeded comment tags"
|
||||
sed -i -r 's|@<!---->|@|g' changelog-new.md
|
||||
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
|
||||
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
|
||||
mv changelog-new.md changelog.md
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
pre-commit run --files changelog.md || true
|
||||
git config --global user.name "github-actions"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
|
||||
git push origin ${{ needs.publish-release.outputs.version }}-changelog
|
||||
- name: Create Pull Request
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const { repo, owner } = context.repo;
|
||||
const result = await github.rest.pulls.create({
|
||||
title: 'Documentation: Add ${{ needs.publish-release.outputs.version }} changelog',
|
||||
owner,
|
||||
repo,
|
||||
head: '${{ needs.publish-release.outputs.version }}-changelog',
|
||||
base: 'main',
|
||||
body: 'This PR is auto-generated by CI.'
|
||||
});
|
||||
github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: result.data.number,
|
||||
labels: ['documentation', 'skip-changelog']
|
||||
});
|
||||
2
.github/workflows/repo-maintenance.yml
vendored
2
.github/workflows/repo-maintenance.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v6
|
||||
- uses: dessant/lock-threads@v5
|
||||
with:
|
||||
issue-inactive-days: '30'
|
||||
pr-inactive-days: '30'
|
||||
|
||||
2
.github/workflows/translate-strings.yml
vendored
2
.github/workflows/translate-strings.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v5
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
|
||||
@@ -294,13 +294,6 @@ The following methods are supported:
|
||||
- `"delete_original": true` to delete the original documents after editing.
|
||||
- `"update_document": true` to update the existing document with the edited PDF.
|
||||
- `"include_metadata": true` to copy metadata from the original document to the edited document.
|
||||
- `remove_password`
|
||||
- Requires `parameters`:
|
||||
- `"password": "PASSWORD_STRING"` The password to remove from the PDF documents.
|
||||
- Optional `parameters`:
|
||||
- `"update_document": true` to replace the existing document with the password-less PDF.
|
||||
- `"delete_original": true` to delete the original document after editing.
|
||||
- `"include_metadata": true` to copy metadata from the original document to the new password-less document.
|
||||
- `merge`
|
||||
- No additional `parameters` required.
|
||||
- The ordering of the merged document is determined by the list of IDs.
|
||||
|
||||
@@ -1804,23 +1804,3 @@ password. All of these options come from their similarly-named [Django settings]
|
||||
#### [`PAPERLESS_EMAIL_USE_SSL=<bool>`](#PAPERLESS_EMAIL_USE_SSL) {#PAPERLESS_EMAIL_USE_SSL}
|
||||
|
||||
: Defaults to false.
|
||||
|
||||
## Remote OCR
|
||||
|
||||
#### [`PAPERLESS_REMOTE_OCR_ENGINE=<str>`](#PAPERLESS_REMOTE_OCR_ENGINE) {#PAPERLESS_REMOTE_OCR_ENGINE}
|
||||
|
||||
: The remote OCR engine to use. Currently only Azure AI is supported as "azureai".
|
||||
|
||||
Defaults to None, which disables remote OCR.
|
||||
|
||||
#### [`PAPERLESS_REMOTE_OCR_API_KEY=<str>`](#PAPERLESS_REMOTE_OCR_API_KEY) {#PAPERLESS_REMOTE_OCR_API_KEY}
|
||||
|
||||
: The API key to use for the remote OCR engine.
|
||||
|
||||
Defaults to None.
|
||||
|
||||
#### [`PAPERLESS_REMOTE_OCR_ENDPOINT=<str>`](#PAPERLESS_REMOTE_OCR_ENDPOINT) {#PAPERLESS_REMOTE_OCR_ENDPOINT}
|
||||
|
||||
: The endpoint to use for the remote OCR engine. This is required for Azure AI.
|
||||
|
||||
Defaults to None.
|
||||
|
||||
@@ -25,10 +25,9 @@ physical documents into a searchable online archive so you can keep, well, _less
|
||||
## Features
|
||||
|
||||
- **Organize and index** your scanned documents with tags, correspondents, types, and more.
|
||||
- _Your_ data is stored locally on _your_ server and is never transmitted or shared in any way, unless you explicitly choose to do so.
|
||||
- _Your_ data is stored locally on _your_ server and is never transmitted or shared in any way.
|
||||
- Performs **OCR** on your documents, adding searchable and selectable text, even to documents scanned with only images.
|
||||
- Utilizes the open-source Tesseract engine to recognize more than 100 languages.
|
||||
- _New!_ Supports remote OCR with Azure AI (opt-in).
|
||||
- Utilizes the open-source Tesseract engine to recognize more than 100 languages.
|
||||
- Documents are saved as PDF/A format which is designed for long term storage, alongside the unaltered originals.
|
||||
- Uses machine-learning to automatically add tags, correspondents and document types to your documents.
|
||||
- Supports PDF documents, images, plain text files, Office documents (Word, Excel, PowerPoint, and LibreOffice equivalents)[^1] and more.
|
||||
|
||||
@@ -901,21 +901,6 @@ how regularly you intend to scan documents and use paperless.
|
||||
performed the task associated with the document, move it to the
|
||||
inbox.
|
||||
|
||||
## Remote OCR
|
||||
|
||||
!!! important
|
||||
|
||||
This feature is disabled by default and will always remain strictly "opt-in".
|
||||
|
||||
Paperless-ngx supports performing OCR on documents using remote services. At the moment, this is limited to
|
||||
[Microsoft's Azure "Document Intelligence" service](https://azure.microsoft.com/en-us/products/ai-services/ai-document-intelligence).
|
||||
This is of course a paid service (with a free tier) which requires an Azure account and subscription. Azure AI is not affiliated with
|
||||
Paperless-ngx in any way. When enabled, Paperless-ngx will automatically send appropriate documents to Azure for OCR processing, bypassing
|
||||
the local OCR engine. See the [configuration](configuration.md#PAPERLESS_REMOTE_OCR_ENGINE) options for more details.
|
||||
|
||||
Additionally, when using a commercial service with this feature, consider both potential costs as well as any associated file size
|
||||
or page limitations (e.g. with a free tier).
|
||||
|
||||
## Architecture
|
||||
|
||||
Paperless-ngx consists of the following components:
|
||||
|
||||
@@ -16,7 +16,6 @@ classifiers = [
|
||||
# This will allow testing to not install a webserver, mysql, etc
|
||||
|
||||
dependencies = [
|
||||
"azure-ai-documentintelligence>=1.0.2",
|
||||
"babel>=2.17",
|
||||
"bleach~=6.3.0",
|
||||
"celery[redis]~=5.5.1",
|
||||
@@ -46,14 +45,14 @@ dependencies = [
|
||||
"drf-writable-nested~=0.7.1",
|
||||
"filelock~=3.20.0",
|
||||
"flower~=2.0.1",
|
||||
"gotenberg-client~=0.13.1",
|
||||
"gotenberg-client~=0.12.0",
|
||||
"httpx-oauth~=0.16",
|
||||
"imap-tools~=1.11.0",
|
||||
"inotifyrecursive~=0.3",
|
||||
"jinja2~=3.1.5",
|
||||
"langdetect~=1.0.9",
|
||||
"nltk~=3.9.1",
|
||||
"ocrmypdf~=16.13.0",
|
||||
"ocrmypdf~=16.12.0",
|
||||
"pathvalidate~=3.3.1",
|
||||
"pdf2image~=1.17.0",
|
||||
"python-dateutil~=2.9.0",
|
||||
@@ -254,7 +253,6 @@ testpaths = [
|
||||
"src/paperless_tesseract/tests/",
|
||||
"src/paperless_tika/tests",
|
||||
"src/paperless_text/tests/",
|
||||
"src/paperless_remote/tests/",
|
||||
]
|
||||
addopts = [
|
||||
"--pythonwarnings=all",
|
||||
|
||||
@@ -328,23 +328,23 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.html</context>
|
||||
<context context-type="linenumber">70</context>
|
||||
<context context-type="linenumber">61</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
|
||||
<context context-type="linenumber">151</context>
|
||||
<context context-type="linenumber">139</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
|
||||
<context context-type="linenumber">151</context>
|
||||
<context context-type="linenumber">139</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
|
||||
<context context-type="linenumber">151</context>
|
||||
<context context-type="linenumber">139</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
|
||||
<context context-type="linenumber">151</context>
|
||||
<context context-type="linenumber">139</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4930506384627295710" datatype="html">
|
||||
@@ -385,7 +385,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">119</context>
|
||||
<context context-type="linenumber">113</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="1241348629231510663" datatype="html">
|
||||
@@ -534,7 +534,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">380</context>
|
||||
<context context-type="linenumber">374</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="3768927257183755959" datatype="html">
|
||||
@@ -593,7 +593,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">373</context>
|
||||
<context context-type="linenumber">367</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/custom-fields-bulk-edit-dialog/custom-fields-bulk-edit-dialog.component.html</context>
|
||||
@@ -761,7 +761,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">393</context>
|
||||
<context context-type="linenumber">387</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/document-list.component.html</context>
|
||||
@@ -1234,7 +1234,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">349</context>
|
||||
<context context-type="linenumber">343</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.html</context>
|
||||
@@ -2164,7 +2164,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.html</context>
|
||||
<context context-type="linenumber">61</context>
|
||||
<context context-type="linenumber">55</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/mail/mail.component.html</context>
|
||||
@@ -2216,19 +2216,19 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
|
||||
<context context-type="linenumber">140</context>
|
||||
<context context-type="linenumber">133</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
|
||||
<context context-type="linenumber">140</context>
|
||||
<context context-type="linenumber">133</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
|
||||
<context context-type="linenumber">140</context>
|
||||
<context context-type="linenumber">133</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
|
||||
<context context-type="linenumber">140</context>
|
||||
<context context-type="linenumber">133</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.ts</context>
|
||||
@@ -2300,7 +2300,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.ts</context>
|
||||
<context context-type="linenumber">106</context>
|
||||
<context context-type="linenumber">104</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/mail/mail.component.ts</context>
|
||||
@@ -2483,7 +2483,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.html</context>
|
||||
<context context-type="linenumber">58</context>
|
||||
<context context-type="linenumber">52</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/mail/mail.component.html</context>
|
||||
@@ -2519,19 +2519,19 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
|
||||
<context context-type="linenumber">137</context>
|
||||
<context context-type="linenumber">130</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
|
||||
<context context-type="linenumber">137</context>
|
||||
<context context-type="linenumber">130</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
|
||||
<context context-type="linenumber">137</context>
|
||||
<context context-type="linenumber">130</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
|
||||
<context context-type="linenumber">137</context>
|
||||
<context context-type="linenumber">130</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/workflows/workflows.component.html</context>
|
||||
@@ -2607,11 +2607,11 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1030</context>
|
||||
<context context-type="linenumber">1028</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1395</context>
|
||||
<context context-type="linenumber">1393</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
|
||||
@@ -2627,7 +2627,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.ts</context>
|
||||
<context context-type="linenumber">108</context>
|
||||
<context context-type="linenumber">106</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/mail/mail.component.ts</context>
|
||||
@@ -3223,7 +3223,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">983</context>
|
||||
<context context-type="linenumber">981</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
|
||||
@@ -3292,67 +3292,6 @@
|
||||
<context context-type="linenumber">39</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="1309641780471803652" datatype="html">
|
||||
<source>Replace current document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component.html</context>
|
||||
<context context-type="linenumber">22</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="272894153768683842" datatype="html">
|
||||
<source>Create new document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component.html</context>
|
||||
<context context-type="linenumber">35</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8543248035448303050" datatype="html">
|
||||
<source> Copy metadata </source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component.html</context>
|
||||
<context context-type="linenumber">43,44</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="1489964994929867410" datatype="html">
|
||||
<source> Delete original</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component.html</context>
|
||||
<context context-type="linenumber">48</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8295618356624504599" datatype="html">
|
||||
<source>Remove password protection</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component.ts</context>
|
||||
<context context-type="linenumber">18</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1446</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6661109599266152398" datatype="html">
|
||||
<source>Create an unprotected copy or replace the existing file.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component.ts</context>
|
||||
<context context-type="linenumber">22</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1447</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="5162686434580248853" datatype="html">
|
||||
<source>Start</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component.ts</context>
|
||||
<context context-type="linenumber">25</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1448</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8157388568390631653" datatype="html">
|
||||
<source>Note that only PDFs will be rotated.</source>
|
||||
<context-group purpose="location">
|
||||
@@ -3401,7 +3340,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.ts</context>
|
||||
<context context-type="linenumber">87</context>
|
||||
<context context-type="linenumber">85</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="1841172489943868696" datatype="html">
|
||||
@@ -3412,7 +3351,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.ts</context>
|
||||
<context context-type="linenumber">96</context>
|
||||
<context context-type="linenumber">94</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6048892649018070225" datatype="html">
|
||||
@@ -3474,7 +3413,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">113</context>
|
||||
<context context-type="linenumber">107</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/guards/dirty-saved-view.guard.ts</context>
|
||||
@@ -4422,7 +4361,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/storage-path-list/storage-path-list.component.ts</context>
|
||||
<context context-type="linenumber">51</context>
|
||||
<context context-type="linenumber">49</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2816147949408898105" datatype="html">
|
||||
@@ -4440,7 +4379,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">315</context>
|
||||
<context context-type="linenumber">309</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8057014866157903311" datatype="html">
|
||||
@@ -4497,7 +4436,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/tag-list/tag-list.component.ts</context>
|
||||
<context context-type="linenumber">51</context>
|
||||
<context context-type="linenumber">49</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8621797738551294959" datatype="html">
|
||||
@@ -4551,7 +4490,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">98</context>
|
||||
<context context-type="linenumber">92</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.html</context>
|
||||
@@ -6271,7 +6210,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">94</context>
|
||||
<context context-type="linenumber">88</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="3429210839568770054" datatype="html">
|
||||
@@ -6867,42 +6806,35 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1394</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2336375155355449543" datatype="html">
|
||||
<source>Remove Password</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">71</context>
|
||||
<context context-type="linenumber">1392</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6490688569532630280" datatype="html">
|
||||
<source>Send</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">90</context>
|
||||
<context context-type="linenumber">84</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4452427314943113135" datatype="html">
|
||||
<source>Previous</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">116</context>
|
||||
<context context-type="linenumber">110</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="5028777105388019087" datatype="html">
|
||||
<source>Details</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">129</context>
|
||||
<context context-type="linenumber">123</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="5701618810648052610" datatype="html">
|
||||
<source>Title</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">132</context>
|
||||
<context context-type="linenumber">126</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/document-list.component.html</context>
|
||||
@@ -6925,21 +6857,21 @@
|
||||
<source>Archive serial number</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">133</context>
|
||||
<context context-type="linenumber">127</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="5114742157723900905" datatype="html">
|
||||
<source>Date created</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">134</context>
|
||||
<context context-type="linenumber">128</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2691296884221415710" datatype="html">
|
||||
<source>Correspondent</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">136</context>
|
||||
<context context-type="linenumber">130</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.html</context>
|
||||
@@ -6966,7 +6898,7 @@
|
||||
<source>Document type</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">138</context>
|
||||
<context context-type="linenumber">132</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.html</context>
|
||||
@@ -6993,7 +6925,7 @@
|
||||
<source>Storage path</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">140</context>
|
||||
<context context-type="linenumber">134</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.html</context>
|
||||
@@ -7016,7 +6948,7 @@
|
||||
<source>Default</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">141</context>
|
||||
<context context-type="linenumber">135</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/saved-views/saved-views.component.html</context>
|
||||
@@ -7027,14 +6959,14 @@
|
||||
<source>Content</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">245</context>
|
||||
<context context-type="linenumber">239</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="218403386307979629" datatype="html">
|
||||
<source>Metadata</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">254</context>
|
||||
<context context-type="linenumber">248</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/metadata-collapse/metadata-collapse.component.ts</context>
|
||||
@@ -7045,175 +6977,175 @@
|
||||
<source>Date modified</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">261</context>
|
||||
<context context-type="linenumber">255</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6392918669949841614" datatype="html">
|
||||
<source>Date added</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">265</context>
|
||||
<context context-type="linenumber">259</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="146828917013192897" datatype="html">
|
||||
<source>Media filename</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">269</context>
|
||||
<context context-type="linenumber">263</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4500855521601039868" datatype="html">
|
||||
<source>Original filename</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">273</context>
|
||||
<context context-type="linenumber">267</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="7985558498848210210" datatype="html">
|
||||
<source>Original MD5 checksum</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">277</context>
|
||||
<context context-type="linenumber">271</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="5888243105821763422" datatype="html">
|
||||
<source>Original file size</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">281</context>
|
||||
<context context-type="linenumber">275</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2696647325713149563" datatype="html">
|
||||
<source>Original mime type</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">285</context>
|
||||
<context context-type="linenumber">279</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="342875990758166588" datatype="html">
|
||||
<source>Archive MD5 checksum</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">290</context>
|
||||
<context context-type="linenumber">284</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6033581412811562084" datatype="html">
|
||||
<source>Archive file size</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">296</context>
|
||||
<context context-type="linenumber">290</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6992781481378431874" datatype="html">
|
||||
<source>Original document metadata</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">305</context>
|
||||
<context context-type="linenumber">299</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2846565152091361585" datatype="html">
|
||||
<source>Archived document metadata</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">308</context>
|
||||
<context context-type="linenumber">302</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="7206723502037428235" datatype="html">
|
||||
<source>Notes <x id="START_BLOCK_IF" equiv-text="@if (document?.notes.length) {"/><x id="START_TAG_SPAN" ctype="x-span" equiv-text="<span class="badge text-bg-secondary ms-1">"/><x id="INTERPOLATION" equiv-text="ngth}}"/><x id="CLOSE_TAG_SPAN" ctype="x-span"/><x id="CLOSE_BLOCK_IF" equiv-text="}"/></source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">327,330</context>
|
||||
<context context-type="linenumber">321,324</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="186236568870281953" datatype="html">
|
||||
<source>History</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">338</context>
|
||||
<context context-type="linenumber">332</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="5129524307369213584" datatype="html">
|
||||
<source>Save & next</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">375</context>
|
||||
<context context-type="linenumber">369</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4910102545766233758" datatype="html">
|
||||
<source>Save & close</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">378</context>
|
||||
<context context-type="linenumber">372</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="1309556917227148591" datatype="html">
|
||||
<source>Document loading...</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">388</context>
|
||||
<context context-type="linenumber">382</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8191371354890763172" datatype="html">
|
||||
<source>Enter Password</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
|
||||
<context context-type="linenumber">442</context>
|
||||
<context context-type="linenumber">436</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2218903673684131427" datatype="html">
|
||||
<source>An error occurred loading content: <x id="PH" equiv-text="err.message ?? err.toString()"/></source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">418,420</context>
|
||||
<context context-type="linenumber">416,418</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="3200733026060976258" datatype="html">
|
||||
<source>Document changes detected</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">452</context>
|
||||
<context context-type="linenumber">450</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2887155916749964" datatype="html">
|
||||
<source>The version of this document in your browser session appears older than the existing version.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">453</context>
|
||||
<context context-type="linenumber">451</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="237142428785956348" datatype="html">
|
||||
<source>Saving the document here may overwrite other changes that were made. To restore the existing version, discard your changes or close the document.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">454</context>
|
||||
<context context-type="linenumber">452</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8720977247725652816" datatype="html">
|
||||
<source>Ok</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">456</context>
|
||||
<context context-type="linenumber">454</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6142395741265832184" datatype="html">
|
||||
<source>Next document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">582</context>
|
||||
<context context-type="linenumber">580</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="651985345816518480" datatype="html">
|
||||
<source>Previous document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">592</context>
|
||||
<context context-type="linenumber">590</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2885986061416655600" datatype="html">
|
||||
<source>Close document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">600</context>
|
||||
<context context-type="linenumber">598</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/services/open-documents.service.ts</context>
|
||||
@@ -7224,67 +7156,67 @@
|
||||
<source>Save document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">607</context>
|
||||
<context context-type="linenumber">605</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="1784543155727940353" datatype="html">
|
||||
<source>Save and close / next</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">616</context>
|
||||
<context context-type="linenumber">614</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="5758784066858623886" datatype="html">
|
||||
<source>Error retrieving metadata</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">671</context>
|
||||
<context context-type="linenumber">669</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="3456881259945295697" datatype="html">
|
||||
<source>Error retrieving suggestions.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">700</context>
|
||||
<context context-type="linenumber">698</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2194092841814123758" datatype="html">
|
||||
<source>Document "<x id="PH" equiv-text="newValues.title"/>" saved successfully.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">872</context>
|
||||
<context context-type="linenumber">870</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">896</context>
|
||||
<context context-type="linenumber">894</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6626387786259219838" datatype="html">
|
||||
<source>Error saving document "<x id="PH" equiv-text="this.document.title"/>"</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">902</context>
|
||||
<context context-type="linenumber">900</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="448882439049417053" datatype="html">
|
||||
<source>Error saving document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">952</context>
|
||||
<context context-type="linenumber">950</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8410796510716511826" datatype="html">
|
||||
<source>Do you really want to move the document "<x id="PH" equiv-text="this.document.title"/>" to the trash?</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">984</context>
|
||||
<context context-type="linenumber">982</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="282586936710748252" datatype="html">
|
||||
<source>Documents can be restored prior to permanent deletion.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">985</context>
|
||||
<context context-type="linenumber">983</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
|
||||
@@ -7295,7 +7227,7 @@
|
||||
<source>Move to trash</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">987</context>
|
||||
<context context-type="linenumber">985</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
|
||||
@@ -7306,14 +7238,14 @@
|
||||
<source>Error deleting document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1006</context>
|
||||
<context context-type="linenumber">1004</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="619486176823357521" datatype="html">
|
||||
<source>Reprocess confirm</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1026</context>
|
||||
<context context-type="linenumber">1024</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
|
||||
@@ -7324,102 +7256,81 @@
|
||||
<source>This operation will permanently recreate the archive file for this document.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1027</context>
|
||||
<context context-type="linenumber">1025</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="302054111564709516" datatype="html">
|
||||
<source>The archive file will be re-generated with the current settings.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1028</context>
|
||||
<context context-type="linenumber">1026</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8251197608401006898" datatype="html">
|
||||
<source>Reprocess operation for "<x id="PH" equiv-text="this.document.title"/>" will begin in the background. Close and re-open or reload this document after the operation has completed to see new content.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1038</context>
|
||||
<context context-type="linenumber">1036</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4409560272830824468" datatype="html">
|
||||
<source>Error executing operation</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1049</context>
|
||||
<context context-type="linenumber">1047</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6030453331794586802" datatype="html">
|
||||
<source>Error downloading document</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1098</context>
|
||||
<context context-type="linenumber">1096</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4458954481601077369" datatype="html">
|
||||
<source>Page Fit</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1175</context>
|
||||
<context context-type="linenumber">1173</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4663705961777238777" datatype="html">
|
||||
<source>PDF edit operation for "<x id="PH" equiv-text="this.document.title"/>" will begin in the background.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1413</context>
|
||||
<context context-type="linenumber">1411</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="9043972994040261999" datatype="html">
|
||||
<source>Error executing PDF edit operation</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1425</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6172690334763056188" datatype="html">
|
||||
<source>Please enter the current password before attempting to remove it.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1436</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="968660764814228922" datatype="html">
|
||||
<source>Password removal operation for "<x id="PH" equiv-text="this.document.title"/>" will begin in the background.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1468</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2282118435712883014" datatype="html">
|
||||
<source>Error executing password removal operation</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1482</context>
|
||||
<context context-type="linenumber">1423</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="3740891324955700797" datatype="html">
|
||||
<source>Print failed.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1519</context>
|
||||
<context context-type="linenumber">1460</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6457245677384603573" datatype="html">
|
||||
<source>Error loading document for printing.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1531</context>
|
||||
<context context-type="linenumber">1472</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6085793215710522488" datatype="html">
|
||||
<source>An error occurred loading tiff: <x id="PH" equiv-text="err.toString()"/></source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1596</context>
|
||||
<context context-type="linenumber">1537</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
|
||||
<context context-type="linenumber">1600</context>
|
||||
<context context-type="linenumber">1541</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4958946940233632319" datatype="html">
|
||||
@@ -8589,28 +8500,28 @@
|
||||
<source>correspondent</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/correspondent-list/correspondent-list.component.ts</context>
|
||||
<context context-type="linenumber">49</context>
|
||||
<context context-type="linenumber">47</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="1612355304340685070" datatype="html">
|
||||
<source>correspondents</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/correspondent-list/correspondent-list.component.ts</context>
|
||||
<context context-type="linenumber">50</context>
|
||||
<context context-type="linenumber">48</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6360600151505327572" datatype="html">
|
||||
<source>Last used</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/correspondent-list/correspondent-list.component.ts</context>
|
||||
<context context-type="linenumber">55</context>
|
||||
<context context-type="linenumber">53</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="7427874343955308724" datatype="html">
|
||||
<source>Do you really want to delete the correspondent "<x id="PH" equiv-text="object.name"/>"?</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/correspondent-list/correspondent-list.component.ts</context>
|
||||
<context context-type="linenumber">80</context>
|
||||
<context context-type="linenumber">78</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8384138406252790442" datatype="html">
|
||||
@@ -8638,79 +8549,79 @@
|
||||
<source>Filter Documents (<x id="INTERPOLATION" equiv-text="{{ field.document_count }}"/>)</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.html</context>
|
||||
<context context-type="linenumber">50</context>
|
||||
<context context-type="linenumber">45</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
|
||||
<context context-type="linenumber">129</context>
|
||||
<context context-type="linenumber">123</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
|
||||
<context context-type="linenumber">129</context>
|
||||
<context context-type="linenumber">123</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
|
||||
<context context-type="linenumber">129</context>
|
||||
<context context-type="linenumber">123</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
|
||||
<context context-type="linenumber">129</context>
|
||||
<context context-type="linenumber">123</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="651372623796033489" datatype="html">
|
||||
<source>No fields defined.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.html</context>
|
||||
<context context-type="linenumber">80</context>
|
||||
<context context-type="linenumber">70</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="3032792139967609806" datatype="html">
|
||||
<source>Confirm delete field</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.ts</context>
|
||||
<context context-type="linenumber">104</context>
|
||||
<context context-type="linenumber">102</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2939457975223185057" datatype="html">
|
||||
<source>This operation will permanently delete this field.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.ts</context>
|
||||
<context context-type="linenumber">105</context>
|
||||
<context context-type="linenumber">103</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4679555638382452936" datatype="html">
|
||||
<source>Deleted field "<x id="PH" equiv-text="field.name"/>"</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.ts</context>
|
||||
<context context-type="linenumber">114</context>
|
||||
<context context-type="linenumber">112</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4704551499967874824" datatype="html">
|
||||
<source>Error deleting field "<x id="PH" equiv-text="field.name"/>".</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.ts</context>
|
||||
<context context-type="linenumber">123</context>
|
||||
<context context-type="linenumber">121</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8084492669582894778" datatype="html">
|
||||
<source>document type</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/document-type-list/document-type-list.component.ts</context>
|
||||
<context context-type="linenumber">45</context>
|
||||
<context context-type="linenumber">43</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2992451138146293104" datatype="html">
|
||||
<source>document types</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/document-type-list/document-type-list.component.ts</context>
|
||||
<context context-type="linenumber">46</context>
|
||||
<context context-type="linenumber">44</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4990731724078522539" datatype="html">
|
||||
<source>Do you really want to delete the document type "<x id="PH" equiv-text="object.name"/>"?</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/document-type-list/document-type-list.component.ts</context>
|
||||
<context context-type="linenumber">51</context>
|
||||
<context context-type="linenumber">49</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8957855217409261143" datatype="html">
|
||||
@@ -9250,42 +9161,42 @@
|
||||
<source>storage path</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/storage-path-list/storage-path-list.component.ts</context>
|
||||
<context context-type="linenumber">45</context>
|
||||
<context context-type="linenumber">43</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="22235115124223314" datatype="html">
|
||||
<source>storage paths</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/storage-path-list/storage-path-list.component.ts</context>
|
||||
<context context-type="linenumber">46</context>
|
||||
<context context-type="linenumber">44</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="1569070683025071137" datatype="html">
|
||||
<source>Do you really want to delete the storage path "<x id="PH" equiv-text="object.name"/>"?</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/storage-path-list/storage-path-list.component.ts</context>
|
||||
<context context-type="linenumber">62</context>
|
||||
<context context-type="linenumber">60</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6402703264596649214" datatype="html">
|
||||
<source>tag</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/tag-list/tag-list.component.ts</context>
|
||||
<context context-type="linenumber">45</context>
|
||||
<context context-type="linenumber">43</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4975748273657042999" datatype="html">
|
||||
<source>tags</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/tag-list/tag-list.component.ts</context>
|
||||
<context context-type="linenumber">46</context>
|
||||
<context context-type="linenumber">44</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="93754014749412887" datatype="html">
|
||||
<source>Do you really want to delete the tag "<x id="PH" equiv-text="object.name"/>"?</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/manage/tag-list/tag-list.component.ts</context>
|
||||
<context context-type="linenumber">62</context>
|
||||
<context context-type="linenumber">60</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="1229748338333965418" datatype="html">
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
<div class="modal-header">
|
||||
<h4 class="modal-title" id="modal-basic-title">{{title}}</h4>
|
||||
<button type="button" class="btn-close" aria-label="Close" (click)="cancel()">
|
||||
</button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
@if (message) {
|
||||
<p class="mb-3" [innerHTML]="message"></p>
|
||||
}
|
||||
<div class="btn-group mb-3" role="group">
|
||||
<input
|
||||
type="radio"
|
||||
class="btn-check"
|
||||
name="passwordRemoveMode"
|
||||
id="removeReplace"
|
||||
[(ngModel)]="updateDocument"
|
||||
[value]="true"
|
||||
(ngModelChange)="onUpdateDocumentChange($event)"
|
||||
/>
|
||||
<label class="btn btn-outline-primary btn-sm" for="removeReplace">
|
||||
<i-bs name="pencil"></i-bs>
|
||||
<span class="ms-2" i18n>Replace current document</span>
|
||||
</label>
|
||||
<input
|
||||
type="radio"
|
||||
class="btn-check"
|
||||
name="passwordRemoveMode"
|
||||
id="removeCreate"
|
||||
[(ngModel)]="updateDocument"
|
||||
[value]="false"
|
||||
(ngModelChange)="onUpdateDocumentChange($event)"
|
||||
/>
|
||||
<label class="btn btn-outline-primary btn-sm" for="removeCreate">
|
||||
<i-bs name="plus"></i-bs>
|
||||
<span class="ms-2" i18n>Create new document</span>
|
||||
</label>
|
||||
</div>
|
||||
@if (!updateDocument) {
|
||||
<div class="d-flex flex-column flex-md-row w-100 gap-3 align-items-center">
|
||||
<div class="form-group d-flex">
|
||||
<div class="form-check">
|
||||
<input class="form-check-input" type="checkbox" id="copyMetaRemove" [(ngModel)]="includeMetadata" />
|
||||
<label class="form-check-label" for="copyMetaRemove" i18n> Copy metadata
|
||||
</label>
|
||||
</div>
|
||||
<div class="form-check ms-3">
|
||||
<input class="form-check-input" type="checkbox" id="deleteOriginalRemove" [(ngModel)]="deleteOriginal" />
|
||||
<label class="form-check-label" for="deleteOriginalRemove" i18n> Delete original</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
<div class="modal-footer flex-nowrap gap-2">
|
||||
<button
|
||||
type="button"
|
||||
class="btn"
|
||||
[class]="cancelBtnClass"
|
||||
(click)="cancel()"
|
||||
[disabled]="!buttonsEnabled"
|
||||
>
|
||||
<span class="d-inline-block" style="padding-bottom: 1px;">
|
||||
{{cancelBtnCaption}}
|
||||
</span>
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
class="btn"
|
||||
[class]="btnClass"
|
||||
(click)="confirm()"
|
||||
[disabled]="!confirmButtonEnabled || !buttonsEnabled"
|
||||
>
|
||||
{{btnCaption}}
|
||||
</button>
|
||||
</div>
|
||||
@@ -1,53 +0,0 @@
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing'
|
||||
import { By } from '@angular/platform-browser'
|
||||
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
|
||||
import { PasswordRemovalConfirmDialogComponent } from './password-removal-confirm-dialog.component'
|
||||
|
||||
describe('PasswordRemovalConfirmDialogComponent', () => {
|
||||
let component: PasswordRemovalConfirmDialogComponent
|
||||
let fixture: ComponentFixture<PasswordRemovalConfirmDialogComponent>
|
||||
|
||||
beforeEach(async () => {
|
||||
await TestBed.configureTestingModule({
|
||||
providers: [NgbActiveModal],
|
||||
imports: [
|
||||
NgxBootstrapIconsModule.pick(allIcons),
|
||||
PasswordRemovalConfirmDialogComponent,
|
||||
],
|
||||
}).compileComponents()
|
||||
|
||||
fixture = TestBed.createComponent(PasswordRemovalConfirmDialogComponent)
|
||||
component = fixture.componentInstance
|
||||
fixture.detectChanges()
|
||||
})
|
||||
|
||||
it('should default to replacing the document', () => {
|
||||
expect(component.updateDocument).toBe(true)
|
||||
expect(
|
||||
fixture.debugElement.query(By.css('#removeReplace')).nativeElement.checked
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('should allow creating a new document with metadata and delete toggle', () => {
|
||||
component.onUpdateDocumentChange(false)
|
||||
fixture.detectChanges()
|
||||
|
||||
expect(component.updateDocument).toBe(false)
|
||||
expect(fixture.debugElement.query(By.css('#copyMetaRemove'))).not.toBeNull()
|
||||
|
||||
component.includeMetadata = false
|
||||
component.deleteOriginal = true
|
||||
component.onUpdateDocumentChange(true)
|
||||
expect(component.updateDocument).toBe(true)
|
||||
expect(component.includeMetadata).toBe(true)
|
||||
expect(component.deleteOriginal).toBe(false)
|
||||
})
|
||||
|
||||
it('should emit confirm when confirmed', () => {
|
||||
let confirmed = false
|
||||
component.confirmClicked.subscribe(() => (confirmed = true))
|
||||
component.confirm()
|
||||
expect(confirmed).toBe(true)
|
||||
})
|
||||
})
|
||||
@@ -1,38 +0,0 @@
|
||||
import { Component, Input } from '@angular/core'
|
||||
import { FormsModule } from '@angular/forms'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { ConfirmDialogComponent } from '../confirm-dialog.component'
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-password-removal-confirm-dialog',
|
||||
templateUrl: './password-removal-confirm-dialog.component.html',
|
||||
styleUrls: ['./password-removal-confirm-dialog.component.scss'],
|
||||
imports: [FormsModule, NgxBootstrapIconsModule],
|
||||
})
|
||||
export class PasswordRemovalConfirmDialogComponent extends ConfirmDialogComponent {
|
||||
updateDocument: boolean = true
|
||||
includeMetadata: boolean = true
|
||||
deleteOriginal: boolean = false
|
||||
|
||||
@Input()
|
||||
override title = $localize`Remove password protection`
|
||||
|
||||
@Input()
|
||||
override message =
|
||||
$localize`Create an unprotected copy or replace the existing file.`
|
||||
|
||||
@Input()
|
||||
override btnCaption = $localize`Start`
|
||||
|
||||
constructor() {
|
||||
super()
|
||||
}
|
||||
|
||||
onUpdateDocumentChange(updateDocument: boolean) {
|
||||
this.updateDocument = updateDocument
|
||||
if (this.updateDocument) {
|
||||
this.deleteOriginal = false
|
||||
this.includeMetadata = true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -65,12 +65,6 @@
|
||||
<button ngbDropdownItem (click)="editPdf()" [disabled]="!userIsOwner || !userCanEdit || originalContentRenderType !== ContentRenderType.PDF">
|
||||
<i-bs name="pencil"></i-bs> <ng-container i18n>PDF Editor</ng-container>
|
||||
</button>
|
||||
|
||||
@if (userIsOwner && (requiresPassword || password)) {
|
||||
<button ngbDropdownItem (click)="removePassword()" [disabled]="!password">
|
||||
<i-bs name="unlock"></i-bs> <ng-container i18n>Remove Password</ng-container>
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -66,7 +66,6 @@ import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { ConfirmDialogComponent } from '../common/confirm-dialog/confirm-dialog.component'
|
||||
import { PasswordRemovalConfirmDialogComponent } from '../common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component'
|
||||
import { CustomFieldsDropdownComponent } from '../common/custom-fields-dropdown/custom-fields-dropdown.component'
|
||||
import {
|
||||
DocumentDetailComponent,
|
||||
@@ -1210,88 +1209,6 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(closeSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should support removing password protection from pdfs', () => {
|
||||
let modal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((m) => (modal = m[0]))
|
||||
initNormally()
|
||||
component.password = 'secret'
|
||||
component.removePassword()
|
||||
const dialog =
|
||||
modal.componentInstance as PasswordRemovalConfirmDialogComponent
|
||||
dialog.updateDocument = false
|
||||
dialog.includeMetadata = false
|
||||
dialog.deleteOriginal = true
|
||||
dialog.confirm()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/bulk_edit/`
|
||||
)
|
||||
expect(req.request.body).toEqual({
|
||||
documents: [doc.id],
|
||||
method: 'remove_password',
|
||||
parameters: {
|
||||
password: 'secret',
|
||||
update_document: false,
|
||||
include_metadata: false,
|
||||
delete_original: true,
|
||||
},
|
||||
})
|
||||
req.flush(true)
|
||||
})
|
||||
|
||||
it('should require the current password before removing it', () => {
|
||||
initNormally()
|
||||
const errorSpy = jest.spyOn(toastService, 'showError')
|
||||
component.requiresPassword = true
|
||||
component.password = ''
|
||||
|
||||
component.removePassword()
|
||||
|
||||
expect(errorSpy).toHaveBeenCalled()
|
||||
httpTestingController.expectNone(
|
||||
`${environment.apiBaseUrl}documents/bulk_edit/`
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle failures when removing password protection', () => {
|
||||
let modal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((m) => (modal = m[0]))
|
||||
initNormally()
|
||||
const errorSpy = jest.spyOn(toastService, 'showError')
|
||||
component.password = 'secret'
|
||||
|
||||
component.removePassword()
|
||||
const dialog =
|
||||
modal.componentInstance as PasswordRemovalConfirmDialogComponent
|
||||
dialog.confirm()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/bulk_edit/`
|
||||
)
|
||||
req.error(new ErrorEvent('failed'))
|
||||
|
||||
expect(errorSpy).toHaveBeenCalled()
|
||||
expect(component.networkActive).toBe(false)
|
||||
expect(dialog.buttonsEnabled).toBe(true)
|
||||
})
|
||||
|
||||
it('should refresh the document when removing password in update mode', () => {
|
||||
let modal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((m) => (modal = m[0]))
|
||||
const refreshSpy = jest.spyOn(openDocumentsService, 'refreshDocument')
|
||||
initNormally()
|
||||
component.password = 'secret'
|
||||
|
||||
component.removePassword()
|
||||
const dialog =
|
||||
modal.componentInstance as PasswordRemovalConfirmDialogComponent
|
||||
dialog.confirm()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/bulk_edit/`
|
||||
)
|
||||
req.flush(true)
|
||||
|
||||
expect(refreshSpy).toHaveBeenCalledWith(doc.id)
|
||||
})
|
||||
|
||||
it('should support keyboard shortcuts', () => {
|
||||
initNormally()
|
||||
|
||||
|
||||
@@ -83,7 +83,6 @@ import { getFilenameFromContentDisposition } from 'src/app/utils/http'
|
||||
import { ISODateAdapter } from 'src/app/utils/ngb-iso-date-adapter'
|
||||
import * as UTIF from 'utif'
|
||||
import { ConfirmDialogComponent } from '../common/confirm-dialog/confirm-dialog.component'
|
||||
import { PasswordRemovalConfirmDialogComponent } from '../common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component'
|
||||
import { CustomFieldsDropdownComponent } from '../common/custom-fields-dropdown/custom-fields-dropdown.component'
|
||||
import { CorrespondentEditDialogComponent } from '../common/edit-dialog/correspondent-edit-dialog/correspondent-edit-dialog.component'
|
||||
import { DocumentTypeEditDialogComponent } from '../common/edit-dialog/document-type-edit-dialog/document-type-edit-dialog.component'
|
||||
@@ -176,7 +175,6 @@ export enum ZoomSetting {
|
||||
NgxBootstrapIconsModule,
|
||||
PdfViewerModule,
|
||||
TextAreaComponent,
|
||||
PasswordRemovalConfirmDialogComponent,
|
||||
],
|
||||
})
|
||||
export class DocumentDetailComponent
|
||||
@@ -1430,63 +1428,6 @@ export class DocumentDetailComponent
|
||||
})
|
||||
}
|
||||
|
||||
removePassword() {
|
||||
if (this.requiresPassword || !this.password) {
|
||||
this.toastService.showError(
|
||||
$localize`Please enter the current password before attempting to remove it.`
|
||||
)
|
||||
return
|
||||
}
|
||||
const modal = this.modalService.open(
|
||||
PasswordRemovalConfirmDialogComponent,
|
||||
{
|
||||
backdrop: 'static',
|
||||
}
|
||||
)
|
||||
modal.componentInstance.title = $localize`Remove password protection`
|
||||
modal.componentInstance.message = $localize`Create an unprotected copy or replace the existing file.`
|
||||
modal.componentInstance.btnCaption = $localize`Start`
|
||||
|
||||
modal.componentInstance.confirmClicked
|
||||
.pipe(takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe(() => {
|
||||
const dialog =
|
||||
modal.componentInstance as PasswordRemovalConfirmDialogComponent
|
||||
dialog.buttonsEnabled = false
|
||||
this.networkActive = true
|
||||
this.documentsService
|
||||
.bulkEdit([this.document.id], 'remove_password', {
|
||||
password: this.password,
|
||||
update_document: dialog.updateDocument,
|
||||
include_metadata: dialog.includeMetadata,
|
||||
delete_original: dialog.deleteOriginal,
|
||||
})
|
||||
.pipe(first(), takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe({
|
||||
next: () => {
|
||||
this.toastService.showInfo(
|
||||
$localize`Password removal operation for "${this.document.title}" will begin in the background.`
|
||||
)
|
||||
this.networkActive = false
|
||||
modal.close()
|
||||
if (!dialog.updateDocument && dialog.deleteOriginal) {
|
||||
this.openDocumentService.closeDocument(this.document)
|
||||
} else if (dialog.updateDocument) {
|
||||
this.openDocumentService.refreshDocument(this.documentId)
|
||||
}
|
||||
},
|
||||
error: (error) => {
|
||||
dialog.buttonsEnabled = true
|
||||
this.networkActive = false
|
||||
this.toastService.showError(
|
||||
$localize`Error executing password removal operation`,
|
||||
error
|
||||
)
|
||||
},
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
printDocument() {
|
||||
const printUrl = this.documentsService.getDownloadUrl(
|
||||
this.document.id,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
|
||||
import { Component, inject } from '@angular/core'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { RouterModule } from '@angular/router'
|
||||
import {
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
@@ -30,7 +29,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
||||
TitleCasePipe,
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
RouterModule,
|
||||
NgClass,
|
||||
NgTemplateOutlet,
|
||||
NgbDropdownModule,
|
||||
|
||||
@@ -42,13 +42,7 @@
|
||||
<button (click)="editField(field)" *pngxIfPermissions="{ action: PermissionAction.Change, type: PermissionType.CustomField }" ngbDropdownItem i18n>Edit</button>
|
||||
<button class="text-danger" (click)="deleteField(field)" *pngxIfPermissions="{ action: PermissionAction.Delete, type: PermissionType.CustomField }" ngbDropdownItem i18n>Delete</button>
|
||||
@if (field.document_count > 0) {
|
||||
<a
|
||||
*pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }"
|
||||
ngbDropdownItem
|
||||
[routerLink]="getDocumentFilterUrl(field)"
|
||||
i18n
|
||||
>Filter Documents ({{ field.document_count }})</a
|
||||
>
|
||||
<button (click)="filterDocuments(field)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }" ngbDropdownItem i18n>Filter Documents ({{ field.document_count }})</button>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
@@ -63,13 +57,9 @@
|
||||
</div>
|
||||
@if (field.document_count > 0) {
|
||||
<div class="btn-group d-none d-sm-inline-block ms-2">
|
||||
<a
|
||||
class="btn btn-sm btn-outline-secondary"
|
||||
[routerLink]="getDocumentFilterUrl(field)"
|
||||
>
|
||||
<i-bs width="1em" height="1em" name="filter"></i-bs> <ng-container i18n>Documents</ng-container
|
||||
><span class="badge bg-light text-secondary ms-2">{{ field.document_count }}</span>
|
||||
</a>
|
||||
<button class="btn btn-sm btn-outline-secondary" type="button" (click)="filterDocuments(field)">
|
||||
<i-bs width="1em" height="1em" name="filter"></i-bs> <ng-container i18n>Documents</ng-container><span class="badge bg-light text-secondary ms-2">{{ field.document_count }}</span>
|
||||
</button>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
|
||||
@@ -4,7 +4,6 @@ import { provideHttpClient, withInterceptorsFromDi } from '@angular/common/http'
|
||||
import { provideHttpClientTesting } from '@angular/common/http/testing'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { By } from '@angular/platform-browser'
|
||||
import { RouterTestingModule } from '@angular/router/testing'
|
||||
import {
|
||||
NgbModal,
|
||||
NgbModalModule,
|
||||
@@ -62,7 +61,6 @@ describe('CustomFieldsComponent', () => {
|
||||
NgbModalModule,
|
||||
NgbPopoverModule,
|
||||
NgxBootstrapIconsModule.pick(allIcons),
|
||||
RouterTestingModule,
|
||||
CustomFieldsComponent,
|
||||
IfPermissionsDirective,
|
||||
PageHeaderComponent,
|
||||
@@ -110,9 +108,7 @@ describe('CustomFieldsComponent', () => {
|
||||
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
|
||||
const reloadSpy = jest.spyOn(component, 'reload')
|
||||
|
||||
const createButton = fixture.debugElement
|
||||
.queryAll(By.css('button'))
|
||||
.find((btn) => btn.nativeElement.textContent.trim().includes('Add Field'))
|
||||
const createButton = fixture.debugElement.queryAll(By.css('button'))[1]
|
||||
createButton.triggerEventHandler('click')
|
||||
|
||||
expect(modal).not.toBeUndefined()
|
||||
@@ -137,11 +133,7 @@ describe('CustomFieldsComponent', () => {
|
||||
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
|
||||
const reloadSpy = jest.spyOn(component, 'reload')
|
||||
|
||||
const editButton = fixture.debugElement
|
||||
.queryAll(By.css('button'))
|
||||
.find((btn) =>
|
||||
btn.nativeElement.textContent.trim().includes(fields[0].name)
|
||||
)
|
||||
const editButton = fixture.debugElement.queryAll(By.css('button'))[2]
|
||||
editButton.triggerEventHandler('click')
|
||||
|
||||
expect(modal).not.toBeUndefined()
|
||||
@@ -166,9 +158,7 @@ describe('CustomFieldsComponent', () => {
|
||||
const deleteSpy = jest.spyOn(customFieldsService, 'delete')
|
||||
const reloadSpy = jest.spyOn(component, 'reload')
|
||||
|
||||
const deleteButton = fixture.debugElement
|
||||
.queryAll(By.css('button'))
|
||||
.find((btn) => btn.nativeElement.textContent.trim().includes('Delete'))
|
||||
const deleteButton = fixture.debugElement.queryAll(By.css('button'))[5]
|
||||
deleteButton.triggerEventHandler('click')
|
||||
|
||||
expect(modal).not.toBeUndefined()
|
||||
@@ -186,10 +176,10 @@ describe('CustomFieldsComponent', () => {
|
||||
expect(reloadSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should provide document filter url', () => {
|
||||
const urlSpy = jest.spyOn(listViewService, 'getQuickFilterUrl')
|
||||
component.getDocumentFilterUrl(fields[0])
|
||||
expect(urlSpy).toHaveBeenCalledWith([
|
||||
it('should support filter documents', () => {
|
||||
const filterSpy = jest.spyOn(listViewService, 'quickFilter')
|
||||
component.filterDocuments(fields[0])
|
||||
expect(filterSpy).toHaveBeenCalledWith([
|
||||
{
|
||||
rule_type: FILTER_CUSTOM_FIELDS_QUERY,
|
||||
value: JSON.stringify([
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Component, OnInit, inject } from '@angular/core'
|
||||
import { RouterModule } from '@angular/router'
|
||||
import {
|
||||
NgbDropdownModule,
|
||||
NgbModal,
|
||||
@@ -37,7 +36,6 @@ import { LoadingComponentWithPermissions } from '../../loading-component/loading
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
NgxBootstrapIconsModule,
|
||||
RouterModule,
|
||||
],
|
||||
})
|
||||
export class CustomFieldsComponent
|
||||
@@ -132,8 +130,8 @@ export class CustomFieldsComponent
|
||||
return DATA_TYPE_LABELS.find((l) => l.id === field.data_type).name
|
||||
}
|
||||
|
||||
getDocumentFilterUrl(field: CustomField) {
|
||||
return this.documentListViewService.getQuickFilterUrl([
|
||||
filterDocuments(field: CustomField) {
|
||||
this.documentListViewService.quickFilter([
|
||||
{
|
||||
rule_type: FILTER_CUSTOM_FIELDS_QUERY,
|
||||
value: JSON.stringify([
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
|
||||
import { Component, inject } from '@angular/core'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { RouterModule } from '@angular/router'
|
||||
import {
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
@@ -28,7 +27,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
||||
IfPermissionsDirective,
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
RouterModule,
|
||||
NgClass,
|
||||
NgTemplateOutlet,
|
||||
NgbDropdownModule,
|
||||
|
||||
@@ -120,14 +120,7 @@
|
||||
<button (click)="openEditDialog(object)" *pngxIfPermissions="{ action: PermissionAction.Change, type: permissionType }" ngbDropdownItem i18n>Edit</button>
|
||||
<button class="text-danger" (click)="openDeleteDialog(object)" *pngxIfPermissions="{ action: PermissionAction.Delete, type: permissionType }" ngbDropdownItem i18n>Delete</button>
|
||||
@if (getDocumentCount(object) > 0) {
|
||||
<a
|
||||
*pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }"
|
||||
ngbDropdownItem
|
||||
[routerLink]="getDocumentFilterUrl(object)"
|
||||
(click)="$event?.stopPropagation()"
|
||||
i18n
|
||||
>Filter Documents ({{ getDocumentCount(object) }})</a
|
||||
>
|
||||
<button (click)="filterDocuments(object)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }" ngbDropdownItem i18n>Filter Documents ({{ getDocumentCount(object) }})</button>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
@@ -142,15 +135,9 @@
|
||||
</div>
|
||||
@if (getDocumentCount(object) > 0) {
|
||||
<div class="btn-group d-none d-sm-inline-block">
|
||||
<a
|
||||
class="btn btn-sm btn-outline-secondary"
|
||||
*pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }"
|
||||
[routerLink]="getDocumentFilterUrl(object)"
|
||||
(click)="$event?.stopPropagation()"
|
||||
>
|
||||
<i-bs width="1em" height="1em" name="filter"></i-bs> <ng-container i18n>Documents</ng-container
|
||||
><span class="badge bg-light text-secondary ms-2">{{ getDocumentCount(object) }}</span>
|
||||
</a>
|
||||
<button class="btn btn-sm btn-outline-secondary" (click)="filterDocuments(object); $event.stopPropagation();" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }">
|
||||
<i-bs width="1em" height="1em" name="filter"></i-bs> <ng-container i18n>Documents</ng-container><span class="badge bg-light text-secondary ms-2">{{ getDocumentCount(object) }}</span>
|
||||
</button>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
|
||||
@@ -13,7 +13,6 @@ import {
|
||||
} from '@angular/core/testing'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { By } from '@angular/platform-browser'
|
||||
import { RouterLinkWithHref } from '@angular/router'
|
||||
import { RouterTestingModule } from '@angular/router/testing'
|
||||
import {
|
||||
NgbModal,
|
||||
@@ -231,15 +230,12 @@ describe('ManagementListComponent', () => {
|
||||
})
|
||||
|
||||
it('should support quick filter for objects', () => {
|
||||
const expectedUrl = documentListViewService.getQuickFilterUrl([
|
||||
const qfSpy = jest.spyOn(documentListViewService, 'quickFilter')
|
||||
const filterButton = fixture.debugElement.queryAll(By.css('button'))[9]
|
||||
filterButton.triggerEventHandler('click')
|
||||
expect(qfSpy).toHaveBeenCalledWith([
|
||||
{ rule_type: FILTER_HAS_TAGS_ALL, value: tags[0].id.toString() },
|
||||
])
|
||||
const filterLink = fixture.debugElement.query(
|
||||
By.css('a.btn-outline-secondary')
|
||||
)
|
||||
expect(filterLink).toBeTruthy()
|
||||
const routerLink = filterLink.injector.get(RouterLinkWithHref)
|
||||
expect(routerLink.urlTree).toEqual(expectedUrl)
|
||||
]) // subclasses set the filter rule type
|
||||
})
|
||||
|
||||
it('should reload on sort', () => {
|
||||
|
||||
@@ -230,8 +230,8 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
|
||||
abstract getDeleteMessage(object: T)
|
||||
|
||||
getDocumentFilterUrl(object: MatchingModel) {
|
||||
return this.documentListViewService.getQuickFilterUrl([
|
||||
filterDocuments(object: MatchingModel) {
|
||||
this.documentListViewService.quickFilter([
|
||||
{ rule_type: this.filterRuleType, value: object.id.toString() },
|
||||
])
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
|
||||
import { Component, inject } from '@angular/core'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { RouterModule } from '@angular/router'
|
||||
import {
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
@@ -28,7 +27,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
||||
IfPermissionsDirective,
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
RouterModule,
|
||||
NgClass,
|
||||
NgTemplateOutlet,
|
||||
NgbDropdownModule,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
|
||||
import { Component, inject } from '@angular/core'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { RouterModule } from '@angular/router'
|
||||
import {
|
||||
NgbDropdownModule,
|
||||
NgbPaginationModule,
|
||||
@@ -28,7 +27,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
||||
IfPermissionsDirective,
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
RouterModule,
|
||||
NgClass,
|
||||
NgTemplateOutlet,
|
||||
NgbDropdownModule,
|
||||
|
||||
@@ -651,25 +651,4 @@ describe('DocumentListViewService', () => {
|
||||
documentListViewService.displayFields = customFields as any
|
||||
expect(documentListViewService.displayFields).toEqual(['custom_field_1'])
|
||||
})
|
||||
|
||||
it('should generate quick filter URL with filter rules', () => {
|
||||
const routerSpy = jest.spyOn(router, 'createUrlTree')
|
||||
const urlTree = documentListViewService.getQuickFilterUrl(filterRules)
|
||||
expect(routerSpy).toHaveBeenCalledWith(['/documents'], {
|
||||
queryParams: expect.objectContaining({
|
||||
tags__id__all: tags__id__all,
|
||||
}),
|
||||
})
|
||||
expect(urlTree).toBeDefined()
|
||||
})
|
||||
|
||||
it('should generate quick filter URL preserving default state', () => {
|
||||
documentListViewService.reload()
|
||||
httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
)
|
||||
const urlTree = documentListViewService.getQuickFilterUrl(filterRules)
|
||||
expect(urlTree).toBeDefined()
|
||||
expect(router.createUrlTree).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Injectable, inject } from '@angular/core'
|
||||
import { ParamMap, Router, UrlTree } from '@angular/router'
|
||||
import { ParamMap, Router } from '@angular/router'
|
||||
import { Observable, Subject, first, takeUntil } from 'rxjs'
|
||||
import {
|
||||
DEFAULT_DISPLAY_FIELDS,
|
||||
@@ -483,18 +483,6 @@ export class DocumentListViewService {
|
||||
this.router.navigate(['documents'])
|
||||
}
|
||||
|
||||
getQuickFilterUrl(filterRules: FilterRule[]): UrlTree {
|
||||
const defaultState = {
|
||||
...this.defaultListViewState(),
|
||||
...this.listViewStates.get(null),
|
||||
filterRules,
|
||||
}
|
||||
const params = paramsFromViewState(defaultState)
|
||||
return this.router.createUrlTree(['/documents'], {
|
||||
queryParams: params,
|
||||
})
|
||||
}
|
||||
|
||||
getLastPage(): number {
|
||||
return Math.ceil(this.collectionSize / this.pageSize)
|
||||
}
|
||||
|
||||
@@ -132,7 +132,6 @@ import {
|
||||
threeDotsVertical,
|
||||
trash,
|
||||
uiRadios,
|
||||
unlock,
|
||||
upcScan,
|
||||
windowStack,
|
||||
x,
|
||||
@@ -349,7 +348,6 @@ const icons = {
|
||||
threeDotsVertical,
|
||||
trash,
|
||||
uiRadios,
|
||||
unlock,
|
||||
upcScan,
|
||||
windowStack,
|
||||
x,
|
||||
|
||||
@@ -16,6 +16,7 @@ from pikepdf import Pdf
|
||||
from documents.converters import convert_from_tiff_to_pdf
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.models import Document
|
||||
from documents.models import Tag
|
||||
from documents.plugins.base import ConsumeTaskPlugin
|
||||
from documents.plugins.base import StopConsumeTaskError
|
||||
@@ -115,6 +116,24 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
||||
self._tiff_conversion_done = False
|
||||
self.barcodes: list[Barcode] = []
|
||||
|
||||
def _apply_detected_asn(self, detected_asn: int) -> None:
|
||||
"""
|
||||
Apply a detected ASN to metadata if allowed.
|
||||
"""
|
||||
if (
|
||||
self.metadata.skip_asn_if_exists
|
||||
and Document.global_objects.filter(
|
||||
archive_serial_number=detected_asn,
|
||||
).exists()
|
||||
):
|
||||
logger.info(
|
||||
f"Found ASN in barcode {detected_asn} but skipping because it already exists.",
|
||||
)
|
||||
return
|
||||
|
||||
logger.info(f"Found ASN in barcode: {detected_asn}")
|
||||
self.metadata.asn = detected_asn
|
||||
|
||||
def run(self) -> None:
|
||||
# Some operations may use PIL, override pixel setting if needed
|
||||
maybe_override_pixel_limit()
|
||||
@@ -186,13 +205,8 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
||||
|
||||
# Update/overwrite an ASN if possible
|
||||
# After splitting, as otherwise each split document gets the same ASN
|
||||
if (
|
||||
self.settings.barcode_enable_asn
|
||||
and not self.metadata.skip_asn
|
||||
and (located_asn := self.asn) is not None
|
||||
):
|
||||
logger.info(f"Found ASN in barcode: {located_asn}")
|
||||
self.metadata.asn = located_asn
|
||||
if self.settings.barcode_enable_asn and (located_asn := self.asn) is not None:
|
||||
self._apply_detected_asn(located_asn)
|
||||
|
||||
def cleanup(self) -> None:
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
@@ -7,7 +7,6 @@ from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Literal
|
||||
|
||||
from celery import chain
|
||||
from celery import chord
|
||||
from celery import group
|
||||
from celery import shared_task
|
||||
@@ -38,6 +37,42 @@ if TYPE_CHECKING:
|
||||
logger: logging.Logger = logging.getLogger("paperless.bulk_edit")
|
||||
|
||||
|
||||
@shared_task(bind=True)
|
||||
def restore_archive_serial_numbers_task(
|
||||
self,
|
||||
backup: dict[int, int],
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
restore_archive_serial_numbers(backup)
|
||||
|
||||
|
||||
def release_archive_serial_numbers(doc_ids: list[int]) -> dict[int, int]:
|
||||
"""
|
||||
Clears ASNs on documents that are about to be replaced so new documents
|
||||
can be assigned ASNs without uniqueness collisions. Returns a backup map
|
||||
of doc_id -> previous ASN for potential restoration.
|
||||
"""
|
||||
qs = Document.objects.filter(
|
||||
id__in=doc_ids,
|
||||
archive_serial_number__isnull=False,
|
||||
).only("pk", "archive_serial_number")
|
||||
backup = dict(qs.values_list("pk", "archive_serial_number"))
|
||||
qs.update(archive_serial_number=None)
|
||||
logger.info(f"Released archive serial numbers for documents {list(backup.keys())}")
|
||||
return backup
|
||||
|
||||
|
||||
def restore_archive_serial_numbers(backup: dict[int, int]) -> None:
|
||||
"""
|
||||
Restores ASNs using the provided backup map, intended for
|
||||
rollback when replacement consumption fails.
|
||||
"""
|
||||
for doc_id, asn in backup.items():
|
||||
Document.objects.filter(pk=doc_id).update(archive_serial_number=asn)
|
||||
logger.info(f"Restored archive serial numbers for documents {list(backup.keys())}")
|
||||
|
||||
|
||||
def set_correspondent(
|
||||
doc_ids: list[int],
|
||||
correspondent: Correspondent,
|
||||
@@ -386,6 +421,7 @@ def merge(
|
||||
|
||||
merged_pdf = pikepdf.new()
|
||||
version: str = merged_pdf.pdf_version
|
||||
handoff_asn: int | None = None
|
||||
# use doc_ids to preserve order
|
||||
for doc_id in doc_ids:
|
||||
doc = qs.get(id=doc_id)
|
||||
@@ -401,6 +437,8 @@ def merge(
|
||||
version = max(version, pdf.pdf_version)
|
||||
merged_pdf.pages.extend(pdf.pages)
|
||||
affected_docs.append(doc.id)
|
||||
if handoff_asn is None and doc.archive_serial_number is not None:
|
||||
handoff_asn = doc.archive_serial_number
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"Error merging document {doc.id}, it will not be included in the merge: {e}",
|
||||
@@ -426,6 +464,8 @@ def merge(
|
||||
DocumentMetadataOverrides.from_document(metadata_document)
|
||||
)
|
||||
overrides.title = metadata_document.title + " (merged)"
|
||||
if metadata_document.archive_serial_number is not None:
|
||||
handoff_asn = metadata_document.archive_serial_number
|
||||
else:
|
||||
overrides = DocumentMetadataOverrides()
|
||||
else:
|
||||
@@ -433,8 +473,11 @@ def merge(
|
||||
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
# Avoid copying or detecting ASN from merged PDFs to prevent collision
|
||||
overrides.skip_asn = True
|
||||
if not delete_originals:
|
||||
overrides.skip_asn_if_exists = True
|
||||
|
||||
if delete_originals and handoff_asn is not None:
|
||||
overrides.asn = handoff_asn
|
||||
|
||||
logger.info("Adding merged document to the task queue.")
|
||||
|
||||
@@ -447,12 +490,20 @@ def merge(
|
||||
)
|
||||
|
||||
if delete_originals:
|
||||
backup = release_archive_serial_numbers(affected_docs)
|
||||
logger.info(
|
||||
"Queueing removal of original documents after consumption of merged document",
|
||||
)
|
||||
chain(consume_task, delete.si(affected_docs)).delay()
|
||||
else:
|
||||
consume_task.delay()
|
||||
try:
|
||||
consume_task.apply_async(
|
||||
link=[delete.si(affected_docs)],
|
||||
link_error=[restore_archive_serial_numbers_task.s(backup)],
|
||||
)
|
||||
except Exception:
|
||||
restore_archive_serial_numbers(backup)
|
||||
raise
|
||||
else:
|
||||
consume_task.delay()
|
||||
|
||||
return "OK"
|
||||
|
||||
@@ -494,6 +545,8 @@ def split(
|
||||
overrides.title = f"{doc.title} (split {idx + 1})"
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
if not delete_originals:
|
||||
overrides.skip_asn_if_exists = True
|
||||
logger.info(
|
||||
f"Adding split document with pages {split_doc} to the task queue.",
|
||||
)
|
||||
@@ -508,10 +561,20 @@ def split(
|
||||
)
|
||||
|
||||
if delete_originals:
|
||||
backup = release_archive_serial_numbers([doc.id])
|
||||
logger.info(
|
||||
"Queueing removal of original document after consumption of the split documents",
|
||||
)
|
||||
chord(header=consume_tasks, body=delete.si([doc.id])).delay()
|
||||
try:
|
||||
chord(
|
||||
header=consume_tasks,
|
||||
body=delete.si([doc.id]),
|
||||
).apply_async(
|
||||
link_error=[restore_archive_serial_numbers_task.s(backup)],
|
||||
)
|
||||
except Exception:
|
||||
restore_archive_serial_numbers(backup)
|
||||
raise
|
||||
else:
|
||||
group(consume_tasks).delay()
|
||||
|
||||
@@ -614,7 +677,10 @@ def edit_pdf(
|
||||
)
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
|
||||
if not delete_original:
|
||||
overrides.skip_asn_if_exists = True
|
||||
if delete_original and len(pdf_docs) == 1:
|
||||
overrides.asn = doc.archive_serial_number
|
||||
for idx, pdf in enumerate(pdf_docs, start=1):
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
@@ -633,7 +699,17 @@ def edit_pdf(
|
||||
)
|
||||
|
||||
if delete_original:
|
||||
chord(header=consume_tasks, body=delete.si([doc.id])).delay()
|
||||
backup = release_archive_serial_numbers([doc.id])
|
||||
try:
|
||||
chord(
|
||||
header=consume_tasks,
|
||||
body=delete.si([doc.id]),
|
||||
).apply_async(
|
||||
link_error=[restore_archive_serial_numbers_task.s(backup)],
|
||||
)
|
||||
except Exception:
|
||||
restore_archive_serial_numbers(backup)
|
||||
raise
|
||||
else:
|
||||
group(consume_tasks).delay()
|
||||
|
||||
@@ -646,77 +722,6 @@ def edit_pdf(
|
||||
return "OK"
|
||||
|
||||
|
||||
def remove_password(
|
||||
doc_ids: list[int],
|
||||
password: str,
|
||||
*,
|
||||
update_document: bool = False,
|
||||
delete_original: bool = False,
|
||||
include_metadata: bool = True,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
"""
|
||||
Remove password protection from PDF documents.
|
||||
"""
|
||||
import pikepdf
|
||||
|
||||
for doc_id in doc_ids:
|
||||
doc = Document.objects.get(id=doc_id)
|
||||
try:
|
||||
logger.info(
|
||||
f"Attempting password removal from document {doc_ids[0]}",
|
||||
)
|
||||
with pikepdf.open(doc.source_path, password=password) as pdf:
|
||||
temp_path = doc.source_path.with_suffix(".tmp.pdf")
|
||||
pdf.remove_unreferenced_resources()
|
||||
pdf.save(temp_path)
|
||||
|
||||
if update_document:
|
||||
# replace the original document with the unprotected one
|
||||
temp_path.replace(doc.source_path)
|
||||
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
doc.page_count = len(pdf.pages)
|
||||
doc.save()
|
||||
update_document_content_maybe_archive_file.delay(document_id=doc.id)
|
||||
else:
|
||||
consume_tasks = []
|
||||
overrides = (
|
||||
DocumentMetadataOverrides().from_document(doc)
|
||||
if include_metadata
|
||||
else DocumentMetadataOverrides()
|
||||
)
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{doc.id}_unprotected.pdf"
|
||||
)
|
||||
temp_path.replace(filepath)
|
||||
consume_tasks.append(
|
||||
consume_file.s(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
),
|
||||
overrides,
|
||||
),
|
||||
)
|
||||
|
||||
if delete_original:
|
||||
chord(header=consume_tasks, body=delete.si([doc.id])).delay()
|
||||
else:
|
||||
group(consume_tasks).delay()
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Error removing password from document {doc.id}: {e}")
|
||||
raise ValueError(
|
||||
f"An error occurred while removing the password: {e}",
|
||||
) from e
|
||||
|
||||
return "OK"
|
||||
|
||||
|
||||
def reflect_doclinks(
|
||||
document: Document,
|
||||
field: CustomField,
|
||||
|
||||
@@ -696,7 +696,7 @@ class ConsumerPlugin(
|
||||
pk=self.metadata.storage_path_id,
|
||||
)
|
||||
|
||||
if self.metadata.asn is not None and not self.metadata.skip_asn:
|
||||
if self.metadata.asn is not None:
|
||||
document.archive_serial_number = self.metadata.asn
|
||||
|
||||
if self.metadata.owner_id:
|
||||
@@ -812,8 +812,8 @@ class ConsumerPreflightPlugin(
|
||||
"""
|
||||
Check that if override_asn is given, it is unique and within a valid range
|
||||
"""
|
||||
if self.metadata.skip_asn or self.metadata.asn is None:
|
||||
# if skip is set or ASN is None
|
||||
if self.metadata.asn is None:
|
||||
# if ASN is None
|
||||
return
|
||||
# Validate the range is above zero and less than uint32_t max
|
||||
# otherwise, Whoosh can't handle it in the index
|
||||
|
||||
@@ -30,7 +30,7 @@ class DocumentMetadataOverrides:
|
||||
change_users: list[int] | None = None
|
||||
change_groups: list[int] | None = None
|
||||
custom_fields: dict | None = None
|
||||
skip_asn: bool = False
|
||||
skip_asn_if_exists: bool = False
|
||||
|
||||
def update(self, other: "DocumentMetadataOverrides") -> "DocumentMetadataOverrides":
|
||||
"""
|
||||
@@ -50,8 +50,8 @@ class DocumentMetadataOverrides:
|
||||
self.storage_path_id = other.storage_path_id
|
||||
if other.owner_id is not None:
|
||||
self.owner_id = other.owner_id
|
||||
if other.skip_asn:
|
||||
self.skip_asn = True
|
||||
if other.skip_asn_if_exists:
|
||||
self.skip_asn_if_exists = True
|
||||
|
||||
# merge
|
||||
if self.tag_ids is None:
|
||||
|
||||
@@ -10,7 +10,6 @@ from datetime import time
|
||||
from datetime import timedelta
|
||||
from datetime import timezone
|
||||
from shutil import rmtree
|
||||
from time import sleep
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Literal
|
||||
|
||||
@@ -33,7 +32,6 @@ from whoosh.highlight import HtmlFormatter
|
||||
from whoosh.idsets import BitSet
|
||||
from whoosh.idsets import DocIdSet
|
||||
from whoosh.index import FileIndex
|
||||
from whoosh.index import LockError
|
||||
from whoosh.index import create_in
|
||||
from whoosh.index import exists_in
|
||||
from whoosh.index import open_dir
|
||||
@@ -99,33 +97,11 @@ def get_schema() -> Schema:
|
||||
|
||||
|
||||
def open_index(*, recreate=False) -> FileIndex:
|
||||
transient_exceptions = (FileNotFoundError, LockError)
|
||||
max_retries = 3
|
||||
retry_delay = 0.1
|
||||
|
||||
for attempt in range(max_retries + 1):
|
||||
try:
|
||||
if exists_in(settings.INDEX_DIR) and not recreate:
|
||||
return open_dir(settings.INDEX_DIR, schema=get_schema())
|
||||
break
|
||||
except transient_exceptions as exc:
|
||||
is_last_attempt = attempt == max_retries or recreate
|
||||
if is_last_attempt:
|
||||
logger.exception(
|
||||
"Error while opening the index after retries, recreating.",
|
||||
)
|
||||
break
|
||||
|
||||
logger.warning(
|
||||
"Transient error while opening the index (attempt %s/%s): %s. Retrying.",
|
||||
attempt + 1,
|
||||
max_retries + 1,
|
||||
exc,
|
||||
)
|
||||
sleep(retry_delay)
|
||||
except Exception:
|
||||
logger.exception("Error while opening the index, recreating.")
|
||||
break
|
||||
try:
|
||||
if exists_in(settings.INDEX_DIR) and not recreate:
|
||||
return open_dir(settings.INDEX_DIR, schema=get_schema())
|
||||
except Exception:
|
||||
logger.exception("Error while opening the index, recreating.")
|
||||
|
||||
# create_in doesn't handle corrupted indexes very well, remove the directory entirely first
|
||||
if settings.INDEX_DIR.is_dir():
|
||||
|
||||
@@ -1430,7 +1430,6 @@ class BulkEditSerializer(
|
||||
"split",
|
||||
"delete_pages",
|
||||
"edit_pdf",
|
||||
"remove_password",
|
||||
],
|
||||
label="Method",
|
||||
write_only=True,
|
||||
@@ -1506,8 +1505,6 @@ class BulkEditSerializer(
|
||||
return bulk_edit.delete_pages
|
||||
elif method == "edit_pdf":
|
||||
return bulk_edit.edit_pdf
|
||||
elif method == "remove_password":
|
||||
return bulk_edit.remove_password
|
||||
else: # pragma: no cover
|
||||
# This will never happen as it is handled by the ChoiceField
|
||||
raise serializers.ValidationError("Unsupported method.")
|
||||
@@ -1704,12 +1701,6 @@ class BulkEditSerializer(
|
||||
f"Page {op['page']} is out of bounds for document with {doc.page_count} pages.",
|
||||
)
|
||||
|
||||
def validate_parameters_remove_password(self, parameters):
|
||||
if "password" not in parameters:
|
||||
raise serializers.ValidationError("password not specified")
|
||||
if not isinstance(parameters["password"], str):
|
||||
raise serializers.ValidationError("password must be a string")
|
||||
|
||||
def validate(self, attrs):
|
||||
method = attrs["method"]
|
||||
parameters = attrs["parameters"]
|
||||
@@ -1750,8 +1741,6 @@ class BulkEditSerializer(
|
||||
"Edit PDF method only supports one document",
|
||||
)
|
||||
self._validate_parameters_edit_pdf(parameters, attrs["documents"][0])
|
||||
elif method == bulk_edit.remove_password:
|
||||
self.validate_parameters_remove_password(parameters)
|
||||
|
||||
return attrs
|
||||
|
||||
|
||||
@@ -493,7 +493,7 @@ def check_scheduled_workflows():
|
||||
trigger.schedule_is_recurring
|
||||
and workflow_runs.exists()
|
||||
and (
|
||||
workflow_runs.first().run_at
|
||||
workflow_runs.last().run_at
|
||||
> now
|
||||
- datetime.timedelta(
|
||||
days=trigger.schedule_recurring_interval_days,
|
||||
|
||||
@@ -1582,58 +1582,6 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn(b"out of bounds", response.content)
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.remove_password")
|
||||
def test_remove_password(self, m):
|
||||
self.setup_mock(m, "remove_password")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
{
|
||||
"documents": [self.doc2.id],
|
||||
"method": "remove_password",
|
||||
"parameters": {"password": "secret", "update_document": True},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
m.assert_called_once()
|
||||
args, kwargs = m.call_args
|
||||
self.assertCountEqual(args[0], [self.doc2.id])
|
||||
self.assertEqual(kwargs["password"], "secret")
|
||||
self.assertTrue(kwargs["update_document"])
|
||||
self.assertEqual(kwargs["user"], self.user)
|
||||
|
||||
def test_remove_password_invalid_params(self):
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
{
|
||||
"documents": [self.doc2.id],
|
||||
"method": "remove_password",
|
||||
"parameters": {},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn(b"password not specified", response.content)
|
||||
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
{
|
||||
"documents": [self.doc2.id],
|
||||
"method": "remove_password",
|
||||
"parameters": {"password": 123},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn(b"password must be a string", response.content)
|
||||
|
||||
@override_settings(AUDIT_LOG_ENABLED=True)
|
||||
def test_bulk_edit_audit_log_enabled_simple_field(self):
|
||||
"""
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import hashlib
|
||||
import shutil
|
||||
from datetime import date
|
||||
from pathlib import Path
|
||||
@@ -603,23 +602,21 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
expected_filename,
|
||||
)
|
||||
self.assertEqual(consume_file_args[1].title, None)
|
||||
self.assertTrue(consume_file_args[1].skip_asn)
|
||||
# No metadata_document_id, delete_originals False, so ASN should be None
|
||||
self.assertIsNone(consume_file_args[1].asn)
|
||||
|
||||
# With metadata_document_id overrides
|
||||
result = bulk_edit.merge(doc_ids, metadata_document_id=metadata_document_id)
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
self.assertEqual(consume_file_args[1].title, "B (merged)")
|
||||
self.assertEqual(consume_file_args[1].created, self.doc2.created)
|
||||
self.assertTrue(consume_file_args[1].skip_asn)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.bulk_edit.delete.si")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
@mock.patch("documents.bulk_edit.chain")
|
||||
def test_merge_and_delete_originals(
|
||||
self,
|
||||
mock_chain,
|
||||
mock_consume_file,
|
||||
mock_delete_documents,
|
||||
):
|
||||
@@ -633,6 +630,12 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
- Document deletion task should be called
|
||||
"""
|
||||
doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
|
||||
self.doc1.archive_serial_number = 101
|
||||
self.doc2.archive_serial_number = 102
|
||||
self.doc3.archive_serial_number = 103
|
||||
self.doc1.save()
|
||||
self.doc2.save()
|
||||
self.doc3.save()
|
||||
|
||||
result = bulk_edit.merge(doc_ids, delete_originals=True)
|
||||
self.assertEqual(result, "OK")
|
||||
@@ -643,7 +646,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
|
||||
mock_consume_file.assert_called()
|
||||
mock_delete_documents.assert_called()
|
||||
mock_chain.assert_called_once()
|
||||
consume_sig = mock_consume_file.return_value
|
||||
consume_sig.apply_async.assert_called_once()
|
||||
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
self.assertEqual(
|
||||
@@ -651,7 +655,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
expected_filename,
|
||||
)
|
||||
self.assertEqual(consume_file_args[1].title, None)
|
||||
self.assertTrue(consume_file_args[1].skip_asn)
|
||||
self.assertEqual(consume_file_args[1].asn, 101)
|
||||
|
||||
delete_documents_args, _ = mock_delete_documents.call_args
|
||||
self.assertEqual(
|
||||
@@ -659,6 +663,92 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
doc_ids,
|
||||
)
|
||||
|
||||
self.doc1.refresh_from_db()
|
||||
self.doc2.refresh_from_db()
|
||||
self.doc3.refresh_from_db()
|
||||
self.assertIsNone(self.doc1.archive_serial_number)
|
||||
self.assertIsNone(self.doc2.archive_serial_number)
|
||||
self.assertIsNone(self.doc3.archive_serial_number)
|
||||
|
||||
@mock.patch("documents.bulk_edit.delete.si")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
def test_merge_and_delete_originals_restore_on_failure(
|
||||
self,
|
||||
mock_consume_file,
|
||||
mock_delete_documents,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing documents
|
||||
WHEN:
|
||||
- Merge action with deleting documents is called with 1 document
|
||||
- Error occurs when queuing consume file task
|
||||
THEN:
|
||||
- Archive serial numbers are restored
|
||||
"""
|
||||
doc_ids = [self.doc1.id]
|
||||
self.doc1.archive_serial_number = 111
|
||||
self.doc1.save()
|
||||
sig = mock.Mock()
|
||||
sig.apply_async.side_effect = Exception("boom")
|
||||
mock_consume_file.return_value = sig
|
||||
|
||||
with self.assertRaises(Exception):
|
||||
bulk_edit.merge(doc_ids, delete_originals=True)
|
||||
|
||||
self.doc1.refresh_from_db()
|
||||
self.assertEqual(self.doc1.archive_serial_number, 111)
|
||||
|
||||
@mock.patch("documents.bulk_edit.delete.si")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
def test_merge_and_delete_originals_metadata_handoff(
|
||||
self,
|
||||
mock_consume_file,
|
||||
mock_delete_documents,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing documents with ASNs
|
||||
WHEN:
|
||||
- Merge with delete_originals=True and metadata_document_id set
|
||||
THEN:
|
||||
- Handoff ASN uses metadata document ASN
|
||||
"""
|
||||
doc_ids = [self.doc1.id, self.doc2.id]
|
||||
self.doc1.archive_serial_number = 101
|
||||
self.doc2.archive_serial_number = 202
|
||||
self.doc1.save()
|
||||
self.doc2.save()
|
||||
|
||||
result = bulk_edit.merge(
|
||||
doc_ids,
|
||||
metadata_document_id=self.doc2.id,
|
||||
delete_originals=True,
|
||||
)
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
self.assertEqual(consume_file_args[1].asn, 202)
|
||||
|
||||
def test_restore_archive_serial_numbers_task(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing document with no archive serial number
|
||||
WHEN:
|
||||
- Restore archive serial number task is called with backup data
|
||||
THEN:
|
||||
- Document archive serial number is restored
|
||||
"""
|
||||
self.doc1.archive_serial_number = 444
|
||||
self.doc1.save()
|
||||
Document.objects.filter(pk=self.doc1.id).update(archive_serial_number=None)
|
||||
|
||||
backup = {self.doc1.id: 444}
|
||||
bulk_edit.restore_archive_serial_numbers_task(backup)
|
||||
|
||||
self.doc1.refresh_from_db()
|
||||
self.assertEqual(self.doc1.archive_serial_number, 444)
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
def test_merge_with_archive_fallback(self, mock_consume_file):
|
||||
"""
|
||||
@@ -727,6 +817,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(mock_consume_file.call_count, 2)
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
self.assertEqual(consume_file_args[1].title, "B (split 2)")
|
||||
self.assertIsNone(consume_file_args[1].asn)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@@ -751,6 +842,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
"""
|
||||
doc_ids = [self.doc2.id]
|
||||
pages = [[1, 2], [3]]
|
||||
self.doc2.archive_serial_number = 200
|
||||
self.doc2.save()
|
||||
|
||||
result = bulk_edit.split(doc_ids, pages, delete_originals=True)
|
||||
self.assertEqual(result, "OK")
|
||||
@@ -768,6 +861,42 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
doc_ids,
|
||||
)
|
||||
|
||||
self.doc2.refresh_from_db()
|
||||
self.assertIsNone(self.doc2.archive_serial_number)
|
||||
|
||||
@mock.patch("documents.bulk_edit.delete.si")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
@mock.patch("documents.bulk_edit.chord")
|
||||
def test_split_restore_on_failure(
|
||||
self,
|
||||
mock_chord,
|
||||
mock_consume_file,
|
||||
mock_delete_documents,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing documents
|
||||
WHEN:
|
||||
- Split action with deleting documents is called with 1 document and 2 page groups
|
||||
- Error occurs when queuing chord task
|
||||
THEN:
|
||||
- Archive serial numbers are restored
|
||||
"""
|
||||
doc_ids = [self.doc2.id]
|
||||
pages = [[1, 2]]
|
||||
self.doc2.archive_serial_number = 222
|
||||
self.doc2.save()
|
||||
|
||||
sig = mock.Mock()
|
||||
sig.apply_async.side_effect = Exception("boom")
|
||||
mock_chord.return_value = sig
|
||||
|
||||
result = bulk_edit.split(doc_ids, pages, delete_originals=True)
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
self.doc2.refresh_from_db()
|
||||
self.assertEqual(self.doc2.archive_serial_number, 222)
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
def test_split_with_errors(self, mock_save_pdf, mock_consume_file):
|
||||
@@ -968,10 +1097,49 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
mock_chord.return_value.delay.return_value = None
|
||||
doc_ids = [self.doc2.id]
|
||||
operations = [{"page": 1}, {"page": 2}]
|
||||
self.doc2.archive_serial_number = 250
|
||||
self.doc2.save()
|
||||
|
||||
result = bulk_edit.edit_pdf(doc_ids, operations, delete_original=True)
|
||||
self.assertEqual(result, "OK")
|
||||
mock_chord.assert_called_once()
|
||||
consume_file_args, _ = mock_consume_file.call_args
|
||||
self.assertEqual(consume_file_args[1].asn, 250)
|
||||
self.doc2.refresh_from_db()
|
||||
self.assertIsNone(self.doc2.archive_serial_number)
|
||||
|
||||
@mock.patch("documents.bulk_edit.delete.si")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
@mock.patch("documents.bulk_edit.chord")
|
||||
def test_edit_pdf_restore_on_failure(
|
||||
self,
|
||||
mock_chord,
|
||||
mock_consume_file,
|
||||
mock_delete_documents,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing document
|
||||
WHEN:
|
||||
- edit_pdf is called with delete_original=True
|
||||
- Error occurs when queuing chord task
|
||||
THEN:
|
||||
- Archive serial numbers are restored
|
||||
"""
|
||||
doc_ids = [self.doc2.id]
|
||||
operations = [{"page": 1}]
|
||||
self.doc2.archive_serial_number = 333
|
||||
self.doc2.save()
|
||||
|
||||
sig = mock.Mock()
|
||||
sig.apply_async.side_effect = Exception("boom")
|
||||
mock_chord.return_value = sig
|
||||
|
||||
with self.assertRaises(Exception):
|
||||
bulk_edit.edit_pdf(doc_ids, operations, delete_original=True)
|
||||
|
||||
self.doc2.refresh_from_db()
|
||||
self.assertEqual(self.doc2.archive_serial_number, 333)
|
||||
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
|
||||
def test_edit_pdf_with_update_document(self, mock_update_document):
|
||||
@@ -1067,147 +1235,3 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
bulk_edit.edit_pdf(doc_ids, operations, update_document=True)
|
||||
mock_group.assert_not_called()
|
||||
mock_consume_file.assert_not_called()
|
||||
|
||||
@mock.patch("documents.bulk_edit.update_document_content_maybe_archive_file.delay")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_remove_password_update_document(self, mock_open, mock_update_document):
|
||||
doc = self.doc1
|
||||
original_checksum = doc.checksum
|
||||
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock(), mock.Mock(), mock.Mock()]
|
||||
|
||||
def save_side_effect(target_path):
|
||||
Path(target_path).write_bytes(b"new pdf content")
|
||||
|
||||
fake_pdf.save.side_effect = save_side_effect
|
||||
mock_open.return_value.__enter__.return_value = fake_pdf
|
||||
|
||||
result = bulk_edit.remove_password(
|
||||
[doc.id],
|
||||
password="secret",
|
||||
update_document=True,
|
||||
)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open.assert_called_once_with(doc.source_path, password="secret")
|
||||
fake_pdf.remove_unreferenced_resources.assert_called_once()
|
||||
doc.refresh_from_db()
|
||||
self.assertNotEqual(doc.checksum, original_checksum)
|
||||
expected_checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
self.assertEqual(doc.checksum, expected_checksum)
|
||||
self.assertEqual(doc.page_count, len(fake_pdf.pages))
|
||||
mock_update_document.assert_called_once_with(document_id=doc.id)
|
||||
|
||||
@mock.patch("documents.bulk_edit.chord")
|
||||
@mock.patch("documents.bulk_edit.group")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
@mock.patch("documents.bulk_edit.tempfile.mkdtemp")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_remove_password_creates_consumable_document(
|
||||
self,
|
||||
mock_open,
|
||||
mock_mkdtemp,
|
||||
mock_consume_file,
|
||||
mock_group,
|
||||
mock_chord,
|
||||
):
|
||||
doc = self.doc2
|
||||
temp_dir = self.dirs.scratch_dir / "remove-password"
|
||||
temp_dir.mkdir(parents=True, exist_ok=True)
|
||||
mock_mkdtemp.return_value = str(temp_dir)
|
||||
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock(), mock.Mock()]
|
||||
|
||||
def save_side_effect(target_path):
|
||||
Path(target_path).write_bytes(b"password removed")
|
||||
|
||||
fake_pdf.save.side_effect = save_side_effect
|
||||
mock_open.return_value.__enter__.return_value = fake_pdf
|
||||
mock_group.return_value.delay.return_value = None
|
||||
|
||||
user = User.objects.create(username="owner")
|
||||
|
||||
result = bulk_edit.remove_password(
|
||||
[doc.id],
|
||||
password="secret",
|
||||
include_metadata=False,
|
||||
update_document=False,
|
||||
delete_original=False,
|
||||
user=user,
|
||||
)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open.assert_called_once_with(doc.source_path, password="secret")
|
||||
mock_consume_file.assert_called_once()
|
||||
consume_args, _ = mock_consume_file.call_args
|
||||
consumable_document = consume_args[0]
|
||||
overrides = consume_args[1]
|
||||
expected_path = temp_dir / f"{doc.id}_unprotected.pdf"
|
||||
self.assertTrue(expected_path.exists())
|
||||
self.assertEqual(
|
||||
Path(consumable_document.original_file).resolve(),
|
||||
expected_path.resolve(),
|
||||
)
|
||||
self.assertEqual(overrides.owner_id, user.id)
|
||||
mock_group.assert_called_once_with([mock_consume_file.return_value])
|
||||
mock_group.return_value.delay.assert_called_once()
|
||||
mock_chord.assert_not_called()
|
||||
|
||||
@mock.patch("documents.bulk_edit.delete")
|
||||
@mock.patch("documents.bulk_edit.chord")
|
||||
@mock.patch("documents.bulk_edit.group")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
@mock.patch("documents.bulk_edit.tempfile.mkdtemp")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_remove_password_deletes_original(
|
||||
self,
|
||||
mock_open,
|
||||
mock_mkdtemp,
|
||||
mock_consume_file,
|
||||
mock_group,
|
||||
mock_chord,
|
||||
mock_delete,
|
||||
):
|
||||
doc = self.doc2
|
||||
temp_dir = self.dirs.scratch_dir / "remove-password-delete"
|
||||
temp_dir.mkdir(parents=True, exist_ok=True)
|
||||
mock_mkdtemp.return_value = str(temp_dir)
|
||||
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock(), mock.Mock()]
|
||||
|
||||
def save_side_effect(target_path):
|
||||
Path(target_path).write_bytes(b"password removed")
|
||||
|
||||
fake_pdf.save.side_effect = save_side_effect
|
||||
mock_open.return_value.__enter__.return_value = fake_pdf
|
||||
mock_chord.return_value.delay.return_value = None
|
||||
|
||||
result = bulk_edit.remove_password(
|
||||
[doc.id],
|
||||
password="secret",
|
||||
include_metadata=False,
|
||||
update_document=False,
|
||||
delete_original=True,
|
||||
)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open.assert_called_once_with(doc.source_path, password="secret")
|
||||
mock_consume_file.assert_called_once()
|
||||
mock_group.assert_not_called()
|
||||
mock_chord.assert_called_once()
|
||||
mock_chord.return_value.delay.assert_called_once()
|
||||
mock_delete.si.assert_called_once_with([doc.id])
|
||||
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_remove_password_open_failure(self, mock_open):
|
||||
mock_open.side_effect = RuntimeError("wrong password")
|
||||
|
||||
with self.assertLogs("paperless.bulk_edit", level="ERROR") as cm:
|
||||
with self.assertRaises(ValueError) as exc:
|
||||
bulk_edit.remove_password([self.doc1.id], password="secret")
|
||||
|
||||
self.assertIn("wrong password", str(exc.exception))
|
||||
self.assertIn("Error removing password from document", cm.output[0])
|
||||
|
||||
@@ -14,6 +14,7 @@ from django.test import override_settings
|
||||
from django.utils import timezone
|
||||
from guardian.core import ObjectPermissionChecker
|
||||
|
||||
from documents.barcodes import BarcodePlugin
|
||||
from documents.consumer import ConsumerError
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.data_models import DocumentSource
|
||||
@@ -412,14 +413,6 @@ class TestConsumer(
|
||||
self.assertEqual(document.archive_serial_number, 123)
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testMetadataOverridesSkipAsnPropagation(self):
|
||||
overrides = DocumentMetadataOverrides()
|
||||
incoming = DocumentMetadataOverrides(skip_asn=True)
|
||||
|
||||
overrides.update(incoming)
|
||||
|
||||
self.assertTrue(overrides.skip_asn)
|
||||
|
||||
def testOverrideTitlePlaceholders(self):
|
||||
c = Correspondent.objects.create(name="Correspondent Name")
|
||||
dt = DocumentType.objects.create(name="DocType Name")
|
||||
@@ -1240,3 +1233,46 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
|
||||
r"sample\.pdf: Error while executing post-consume script: Command '\[.*\]' returned non-zero exit status \d+\.",
|
||||
):
|
||||
consumer.run_post_consume_script(doc)
|
||||
|
||||
|
||||
class TestMetadataOverrides(TestCase):
|
||||
def test_update_skip_asn_if_exists(self):
|
||||
base = DocumentMetadataOverrides()
|
||||
incoming = DocumentMetadataOverrides(skip_asn_if_exists=True)
|
||||
base.update(incoming)
|
||||
self.assertTrue(base.skip_asn_if_exists)
|
||||
|
||||
|
||||
class TestBarcodeApplyDetectedASN(TestCase):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing Documents with ASN 123
|
||||
WHEN:
|
||||
- A BarcodePlugin which detected an ASN
|
||||
THEN:
|
||||
- If skip_asn_if_exists is set, and ASN exists, do not set ASN
|
||||
- If skip_asn_if_exists is set, and ASN does not exist, set ASN
|
||||
"""
|
||||
|
||||
def test_apply_detected_asn_skips_existing_when_flag_set(self):
|
||||
doc = Document.objects.create(
|
||||
checksum="X1",
|
||||
title="D1",
|
||||
archive_serial_number=123,
|
||||
)
|
||||
metadata = DocumentMetadataOverrides(skip_asn_if_exists=True)
|
||||
plugin = BarcodePlugin(
|
||||
input_doc=mock.Mock(),
|
||||
metadata=metadata,
|
||||
status_mgr=mock.Mock(),
|
||||
base_tmp_dir=tempfile.gettempdir(),
|
||||
task_id="test-task",
|
||||
)
|
||||
|
||||
plugin._apply_detected_asn(123)
|
||||
self.assertIsNone(plugin.metadata.asn)
|
||||
|
||||
doc.hard_delete()
|
||||
|
||||
plugin._apply_detected_asn(123)
|
||||
self.assertEqual(plugin.metadata.asn, 123)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from datetime import datetime
|
||||
from unittest import mock
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import SimpleTestCase
|
||||
from django.test import TestCase
|
||||
@@ -252,120 +251,3 @@ class TestRewriteNaturalDateKeywords(SimpleTestCase):
|
||||
result = self._rewrite_with_now("added:today", fixed_now)
|
||||
# Should convert to UTC properly
|
||||
self.assertIn("added:[20250719", result)
|
||||
|
||||
|
||||
class TestIndexResilience(DirectoriesMixin, SimpleTestCase):
|
||||
def _assert_recreate_called(self, mock_create_in):
|
||||
mock_create_in.assert_called_once()
|
||||
path_arg, schema_arg = mock_create_in.call_args.args
|
||||
self.assertEqual(path_arg, settings.INDEX_DIR)
|
||||
self.assertEqual(schema_arg.__class__.__name__, "Schema")
|
||||
|
||||
def test_transient_missing_segment_does_not_force_recreate(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Index directory exists
|
||||
WHEN:
|
||||
- open_index is called
|
||||
- Opening the index raises FileNotFoundError once due to a
|
||||
transient missing segment
|
||||
THEN:
|
||||
- Index is opened successfully on retry
|
||||
- Index is not recreated
|
||||
"""
|
||||
file_marker = settings.INDEX_DIR / "file_marker.txt"
|
||||
file_marker.write_text("keep")
|
||||
expected_index = object()
|
||||
|
||||
with (
|
||||
mock.patch("documents.index.exists_in", return_value=True),
|
||||
mock.patch(
|
||||
"documents.index.open_dir",
|
||||
side_effect=[FileNotFoundError("missing"), expected_index],
|
||||
) as mock_open_dir,
|
||||
mock.patch(
|
||||
"documents.index.create_in",
|
||||
) as mock_create_in,
|
||||
mock.patch(
|
||||
"documents.index.rmtree",
|
||||
) as mock_rmtree,
|
||||
):
|
||||
ix = index.open_index()
|
||||
|
||||
self.assertIs(ix, expected_index)
|
||||
self.assertGreaterEqual(mock_open_dir.call_count, 2)
|
||||
mock_rmtree.assert_not_called()
|
||||
mock_create_in.assert_not_called()
|
||||
self.assertEqual(file_marker.read_text(), "keep")
|
||||
|
||||
def test_transient_errors_exhaust_retries_and_recreate(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Index directory exists
|
||||
WHEN:
|
||||
- open_index is called
|
||||
- Opening the index raises FileNotFoundError multiple times due to
|
||||
transient missing segments
|
||||
THEN:
|
||||
- Index is recreated after retries are exhausted
|
||||
"""
|
||||
recreated_index = object()
|
||||
|
||||
with (
|
||||
self.assertLogs("paperless.index", level="ERROR") as cm,
|
||||
mock.patch("documents.index.exists_in", return_value=True),
|
||||
mock.patch(
|
||||
"documents.index.open_dir",
|
||||
side_effect=FileNotFoundError("missing"),
|
||||
) as mock_open_dir,
|
||||
mock.patch("documents.index.rmtree") as mock_rmtree,
|
||||
mock.patch(
|
||||
"documents.index.create_in",
|
||||
return_value=recreated_index,
|
||||
) as mock_create_in,
|
||||
):
|
||||
ix = index.open_index()
|
||||
|
||||
self.assertIs(ix, recreated_index)
|
||||
self.assertEqual(mock_open_dir.call_count, 4)
|
||||
mock_rmtree.assert_called_once_with(settings.INDEX_DIR)
|
||||
self._assert_recreate_called(mock_create_in)
|
||||
self.assertIn(
|
||||
"Error while opening the index after retries, recreating.",
|
||||
cm.output[0],
|
||||
)
|
||||
|
||||
def test_non_transient_error_recreates_index(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Index directory exists
|
||||
WHEN:
|
||||
- open_index is called
|
||||
- Opening the index raises a "non-transient" error
|
||||
THEN:
|
||||
- Index is recreated
|
||||
"""
|
||||
recreated_index = object()
|
||||
|
||||
with (
|
||||
self.assertLogs("paperless.index", level="ERROR") as cm,
|
||||
mock.patch("documents.index.exists_in", return_value=True),
|
||||
mock.patch(
|
||||
"documents.index.open_dir",
|
||||
side_effect=RuntimeError("boom"),
|
||||
),
|
||||
mock.patch("documents.index.rmtree") as mock_rmtree,
|
||||
mock.patch(
|
||||
"documents.index.create_in",
|
||||
return_value=recreated_index,
|
||||
) as mock_create_in,
|
||||
):
|
||||
ix = index.open_index()
|
||||
|
||||
self.assertIs(ix, recreated_index)
|
||||
mock_rmtree.assert_called_once_with(settings.INDEX_DIR)
|
||||
self._assert_recreate_called(mock_create_in)
|
||||
self.assertIn(
|
||||
"Error while opening the index, recreating.",
|
||||
cm.output[0],
|
||||
)
|
||||
|
||||
@@ -2094,68 +2094,6 @@ class TestWorkflows(
|
||||
doc.refresh_from_db()
|
||||
self.assertIsNone(doc.owner)
|
||||
|
||||
def test_workflow_scheduled_recurring_respects_latest_run(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Scheduled workflow marked as recurring with a 1-day interval
|
||||
- Document that matches the trigger
|
||||
- Two prior runs exist: one 2 days ago and one 1 hour ago
|
||||
WHEN:
|
||||
- Scheduled workflows are checked again
|
||||
THEN:
|
||||
- Workflow does not run because the most recent run is inside the interval
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
schedule_date_field=WorkflowTrigger.ScheduleDateField.CREATED,
|
||||
schedule_is_recurring=True,
|
||||
schedule_recurring_interval_days=1,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc assign owner",
|
||||
assign_owner=self.user2,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
created=timezone.now().date() - timedelta(days=3),
|
||||
)
|
||||
|
||||
WorkflowRun.objects.create(
|
||||
workflow=w,
|
||||
document=doc,
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
run_at=timezone.now() - timedelta(days=2),
|
||||
)
|
||||
WorkflowRun.objects.create(
|
||||
workflow=w,
|
||||
document=doc,
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
run_at=timezone.now() - timedelta(hours=1),
|
||||
)
|
||||
|
||||
tasks.check_scheduled_workflows()
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertIsNone(doc.owner)
|
||||
self.assertEqual(
|
||||
WorkflowRun.objects.filter(
|
||||
workflow=w,
|
||||
document=doc,
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
).count(),
|
||||
2,
|
||||
)
|
||||
|
||||
def test_workflow_scheduled_trigger_negative_offset_customfield(self):
|
||||
"""
|
||||
GIVEN:
|
||||
|
||||
@@ -1504,7 +1504,6 @@ class BulkEditView(PassUserMixin):
|
||||
"merge": None,
|
||||
"edit_pdf": "checksum",
|
||||
"reprocess": "checksum",
|
||||
"remove_password": "checksum",
|
||||
}
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
@@ -1523,7 +1522,6 @@ class BulkEditView(PassUserMixin):
|
||||
bulk_edit.split,
|
||||
bulk_edit.merge,
|
||||
bulk_edit.edit_pdf,
|
||||
bulk_edit.remove_password,
|
||||
]:
|
||||
parameters["user"] = user
|
||||
|
||||
@@ -1552,7 +1550,6 @@ class BulkEditView(PassUserMixin):
|
||||
bulk_edit.rotate,
|
||||
bulk_edit.delete_pages,
|
||||
bulk_edit.edit_pdf,
|
||||
bulk_edit.remove_password,
|
||||
]
|
||||
)
|
||||
or (
|
||||
@@ -1569,7 +1566,7 @@ class BulkEditView(PassUserMixin):
|
||||
and (
|
||||
method in [bulk_edit.split, bulk_edit.merge]
|
||||
or (
|
||||
method in [bulk_edit.edit_pdf, bulk_edit.remove_password]
|
||||
method == bulk_edit.edit_pdf
|
||||
and not parameters["update_document"]
|
||||
)
|
||||
)
|
||||
|
||||
@@ -2,7 +2,7 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: paperless-ngx\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2026-01-08 21:37+0000\n"
|
||||
"POT-Creation-Date: 2025-12-29 14:49+0000\n"
|
||||
"PO-Revision-Date: 2022-02-17 04:17\n"
|
||||
"Last-Translator: \n"
|
||||
"Language-Team: English\n"
|
||||
@@ -1223,31 +1223,31 @@ msgstr ""
|
||||
msgid "Invalid color."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:1846
|
||||
#: documents/serialisers.py:1835
|
||||
#, python-format
|
||||
msgid "File type %(type)s not supported"
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:1890
|
||||
#: documents/serialisers.py:1879
|
||||
#, python-format
|
||||
msgid "Custom field id must be an integer: %(id)s"
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:1897
|
||||
#: documents/serialisers.py:1886
|
||||
#, python-format
|
||||
msgid "Custom field with id %(id)s does not exist"
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:1914 documents/serialisers.py:1924
|
||||
#: documents/serialisers.py:1903 documents/serialisers.py:1913
|
||||
msgid ""
|
||||
"Custom fields must be a list of integers or an object mapping ids to values."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:1919
|
||||
#: documents/serialisers.py:1908
|
||||
msgid "Some custom fields don't exist or were specified twice."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2034
|
||||
#: documents/serialisers.py:2023
|
||||
msgid "Invalid variable detected."
|
||||
msgstr ""
|
||||
|
||||
@@ -1702,151 +1702,151 @@ msgstr ""
|
||||
msgid "paperless application settings"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:767
|
||||
#: paperless/settings.py:773
|
||||
msgid "English (US)"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:768
|
||||
#: paperless/settings.py:774
|
||||
msgid "Arabic"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:769
|
||||
#: paperless/settings.py:775
|
||||
msgid "Afrikaans"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:770
|
||||
#: paperless/settings.py:776
|
||||
msgid "Belarusian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:771
|
||||
#: paperless/settings.py:777
|
||||
msgid "Bulgarian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:772
|
||||
#: paperless/settings.py:778
|
||||
msgid "Catalan"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:773
|
||||
#: paperless/settings.py:779
|
||||
msgid "Czech"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:774
|
||||
#: paperless/settings.py:780
|
||||
msgid "Danish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:775
|
||||
#: paperless/settings.py:781
|
||||
msgid "German"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:776
|
||||
#: paperless/settings.py:782
|
||||
msgid "Greek"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:777
|
||||
#: paperless/settings.py:783
|
||||
msgid "English (GB)"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:778
|
||||
#: paperless/settings.py:784
|
||||
msgid "Spanish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:779
|
||||
#: paperless/settings.py:785
|
||||
msgid "Persian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:780
|
||||
#: paperless/settings.py:786
|
||||
msgid "Finnish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:781
|
||||
#: paperless/settings.py:787
|
||||
msgid "French"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:782
|
||||
#: paperless/settings.py:788
|
||||
msgid "Hungarian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:783
|
||||
#: paperless/settings.py:789
|
||||
msgid "Indonesian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:784
|
||||
#: paperless/settings.py:790
|
||||
msgid "Italian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:785
|
||||
#: paperless/settings.py:791
|
||||
msgid "Japanese"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:786
|
||||
#: paperless/settings.py:792
|
||||
msgid "Korean"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:787
|
||||
#: paperless/settings.py:793
|
||||
msgid "Luxembourgish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:788
|
||||
#: paperless/settings.py:794
|
||||
msgid "Norwegian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:789
|
||||
#: paperless/settings.py:795
|
||||
msgid "Dutch"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:790
|
||||
#: paperless/settings.py:796
|
||||
msgid "Polish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:791
|
||||
#: paperless/settings.py:797
|
||||
msgid "Portuguese (Brazil)"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:792
|
||||
#: paperless/settings.py:798
|
||||
msgid "Portuguese"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:793
|
||||
#: paperless/settings.py:799
|
||||
msgid "Romanian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:794
|
||||
#: paperless/settings.py:800
|
||||
msgid "Russian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:795
|
||||
#: paperless/settings.py:801
|
||||
msgid "Slovak"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:796
|
||||
#: paperless/settings.py:802
|
||||
msgid "Slovenian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:797
|
||||
#: paperless/settings.py:803
|
||||
msgid "Serbian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:798
|
||||
#: paperless/settings.py:804
|
||||
msgid "Swedish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:799
|
||||
#: paperless/settings.py:805
|
||||
msgid "Turkish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:800
|
||||
#: paperless/settings.py:806
|
||||
msgid "Ukrainian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:801
|
||||
#: paperless/settings.py:807
|
||||
msgid "Vietnamese"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:802
|
||||
#: paperless/settings.py:808
|
||||
msgid "Chinese Simplified"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:803
|
||||
#: paperless/settings.py:809
|
||||
msgid "Chinese Traditional"
|
||||
msgstr ""
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ import os
|
||||
import tempfile
|
||||
from os import PathLike
|
||||
from pathlib import Path
|
||||
from platform import machine
|
||||
from typing import Final
|
||||
from urllib.parse import urlparse
|
||||
|
||||
@@ -321,7 +322,6 @@ INSTALLED_APPS = [
|
||||
"paperless_tesseract.apps.PaperlessTesseractConfig",
|
||||
"paperless_text.apps.PaperlessTextConfig",
|
||||
"paperless_mail.apps.PaperlessMailConfig",
|
||||
"paperless_remote.apps.PaperlessRemoteParserConfig",
|
||||
"django.contrib.admin",
|
||||
"rest_framework",
|
||||
"rest_framework.authtoken",
|
||||
@@ -423,9 +423,14 @@ ASGI_APPLICATION = "paperless.asgi.application"
|
||||
STATIC_URL = os.getenv("PAPERLESS_STATIC_URL", BASE_URL + "static/")
|
||||
WHITENOISE_STATIC_PREFIX = "/static/"
|
||||
|
||||
if machine().lower() == "aarch64": # pragma: no cover
|
||||
_static_backend = "django.contrib.staticfiles.storage.StaticFilesStorage"
|
||||
else:
|
||||
_static_backend = "whitenoise.storage.CompressedStaticFilesStorage"
|
||||
|
||||
STORAGES = {
|
||||
"staticfiles": {
|
||||
"BACKEND": "whitenoise.storage.CompressedStaticFilesStorage",
|
||||
"BACKEND": _static_backend,
|
||||
},
|
||||
"default": {"BACKEND": "django.core.files.storage.FileSystemStorage"},
|
||||
}
|
||||
@@ -1397,10 +1402,3 @@ WEBHOOKS_ALLOW_INTERNAL_REQUESTS = __get_boolean(
|
||||
"PAPERLESS_WEBHOOKS_ALLOW_INTERNAL_REQUESTS",
|
||||
"true",
|
||||
)
|
||||
|
||||
###############################################################################
|
||||
# Remote Parser #
|
||||
###############################################################################
|
||||
REMOTE_OCR_ENGINE = os.getenv("PAPERLESS_REMOTE_OCR_ENGINE")
|
||||
REMOTE_OCR_API_KEY = os.getenv("PAPERLESS_REMOTE_OCR_API_KEY")
|
||||
REMOTE_OCR_ENDPOINT = os.getenv("PAPERLESS_REMOTE_OCR_ENDPOINT")
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
# this is here so that django finds the checks.
|
||||
from paperless_remote.checks import check_remote_parser_configured
|
||||
|
||||
__all__ = ["check_remote_parser_configured"]
|
||||
@@ -1,14 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
from paperless_remote.signals import remote_consumer_declaration
|
||||
|
||||
|
||||
class PaperlessRemoteParserConfig(AppConfig):
|
||||
name = "paperless_remote"
|
||||
|
||||
def ready(self):
|
||||
from documents.signals import document_consumer_declaration
|
||||
|
||||
document_consumer_declaration.connect(remote_consumer_declaration)
|
||||
|
||||
AppConfig.ready(self)
|
||||
@@ -1,17 +0,0 @@
|
||||
from django.conf import settings
|
||||
from django.core.checks import Error
|
||||
from django.core.checks import register
|
||||
|
||||
|
||||
@register()
|
||||
def check_remote_parser_configured(app_configs, **kwargs):
|
||||
if settings.REMOTE_OCR_ENGINE == "azureai" and not (
|
||||
settings.REMOTE_OCR_ENDPOINT and settings.REMOTE_OCR_API_KEY
|
||||
):
|
||||
return [
|
||||
Error(
|
||||
"Azure AI remote parser requires endpoint and API key to be configured.",
|
||||
),
|
||||
]
|
||||
|
||||
return []
|
||||
@@ -1,118 +0,0 @@
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from paperless_tesseract.parsers import RasterisedDocumentParser
|
||||
|
||||
|
||||
class RemoteEngineConfig:
|
||||
def __init__(
|
||||
self,
|
||||
engine: str,
|
||||
api_key: str | None = None,
|
||||
endpoint: str | None = None,
|
||||
):
|
||||
self.engine = engine
|
||||
self.api_key = api_key
|
||||
self.endpoint = endpoint
|
||||
|
||||
def engine_is_valid(self):
|
||||
valid = self.engine in ["azureai"] and self.api_key is not None
|
||||
if self.engine == "azureai":
|
||||
valid = valid and self.endpoint is not None
|
||||
return valid
|
||||
|
||||
|
||||
class RemoteDocumentParser(RasterisedDocumentParser):
|
||||
"""
|
||||
This parser uses a remote OCR engine to parse documents. Currently, it supports Azure AI Vision
|
||||
as this is the only service that provides a remote OCR API with text-embedded PDF output.
|
||||
"""
|
||||
|
||||
logging_name = "paperless.parsing.remote"
|
||||
|
||||
def get_settings(self) -> RemoteEngineConfig:
|
||||
"""
|
||||
Returns the configuration for the remote OCR engine, loaded from Django settings.
|
||||
"""
|
||||
return RemoteEngineConfig(
|
||||
engine=settings.REMOTE_OCR_ENGINE,
|
||||
api_key=settings.REMOTE_OCR_API_KEY,
|
||||
endpoint=settings.REMOTE_OCR_ENDPOINT,
|
||||
)
|
||||
|
||||
def supported_mime_types(self):
|
||||
if self.settings.engine_is_valid():
|
||||
return {
|
||||
"application/pdf": ".pdf",
|
||||
"image/png": ".png",
|
||||
"image/jpeg": ".jpg",
|
||||
"image/tiff": ".tiff",
|
||||
"image/bmp": ".bmp",
|
||||
"image/gif": ".gif",
|
||||
"image/webp": ".webp",
|
||||
}
|
||||
else:
|
||||
return {}
|
||||
|
||||
def azure_ai_vision_parse(
|
||||
self,
|
||||
file: Path,
|
||||
) -> str | None:
|
||||
"""
|
||||
Uses Azure AI Vision to parse the document and return the text content.
|
||||
It requests a searchable PDF output with embedded text.
|
||||
The PDF is saved to the archive_path attribute.
|
||||
Returns the text content extracted from the document.
|
||||
If the parsing fails, it returns None.
|
||||
"""
|
||||
from azure.ai.documentintelligence import DocumentIntelligenceClient
|
||||
from azure.ai.documentintelligence.models import AnalyzeDocumentRequest
|
||||
from azure.ai.documentintelligence.models import AnalyzeOutputOption
|
||||
from azure.ai.documentintelligence.models import DocumentContentFormat
|
||||
from azure.core.credentials import AzureKeyCredential
|
||||
|
||||
client = DocumentIntelligenceClient(
|
||||
endpoint=self.settings.endpoint,
|
||||
credential=AzureKeyCredential(self.settings.api_key),
|
||||
)
|
||||
|
||||
try:
|
||||
with file.open("rb") as f:
|
||||
analyze_request = AnalyzeDocumentRequest(bytes_source=f.read())
|
||||
poller = client.begin_analyze_document(
|
||||
model_id="prebuilt-read",
|
||||
body=analyze_request,
|
||||
output_content_format=DocumentContentFormat.TEXT,
|
||||
output=[AnalyzeOutputOption.PDF], # request searchable PDF output
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
poller.wait()
|
||||
result_id = poller.details["operation_id"]
|
||||
result = poller.result()
|
||||
|
||||
# Download the PDF with embedded text
|
||||
self.archive_path = self.tempdir / "archive.pdf"
|
||||
with self.archive_path.open("wb") as f:
|
||||
for chunk in client.get_analyze_result_pdf(
|
||||
model_id="prebuilt-read",
|
||||
result_id=result_id,
|
||||
):
|
||||
f.write(chunk)
|
||||
return result.content
|
||||
except Exception as e:
|
||||
self.log.error(f"Azure AI Vision parsing failed: {e}")
|
||||
finally:
|
||||
client.close()
|
||||
|
||||
return None
|
||||
|
||||
def parse(self, document_path: Path, mime_type, file_name=None):
|
||||
if not self.settings.engine_is_valid():
|
||||
self.log.warning(
|
||||
"No valid remote parser engine is configured, content will be empty.",
|
||||
)
|
||||
self.text = ""
|
||||
elif self.settings.engine == "azureai":
|
||||
self.text = self.azure_ai_vision_parse(document_path)
|
||||
@@ -1,18 +0,0 @@
|
||||
def get_parser(*args, **kwargs):
|
||||
from paperless_remote.parsers import RemoteDocumentParser
|
||||
|
||||
return RemoteDocumentParser(*args, **kwargs)
|
||||
|
||||
|
||||
def get_supported_mime_types():
|
||||
from paperless_remote.parsers import RemoteDocumentParser
|
||||
|
||||
return RemoteDocumentParser(None).supported_mime_types()
|
||||
|
||||
|
||||
def remote_consumer_declaration(sender, **kwargs):
|
||||
return {
|
||||
"parser": get_parser,
|
||||
"weight": 5,
|
||||
"mime_types": get_supported_mime_types(),
|
||||
}
|
||||
Binary file not shown.
@@ -1,24 +0,0 @@
|
||||
from unittest import TestCase
|
||||
|
||||
from django.test import override_settings
|
||||
|
||||
from paperless_remote import check_remote_parser_configured
|
||||
|
||||
|
||||
class TestChecks(TestCase):
|
||||
@override_settings(REMOTE_OCR_ENGINE=None)
|
||||
def test_no_engine(self):
|
||||
msgs = check_remote_parser_configured(None)
|
||||
self.assertEqual(len(msgs), 0)
|
||||
|
||||
@override_settings(REMOTE_OCR_ENGINE="azureai")
|
||||
@override_settings(REMOTE_OCR_API_KEY="somekey")
|
||||
@override_settings(REMOTE_OCR_ENDPOINT=None)
|
||||
def test_azure_no_endpoint(self):
|
||||
msgs = check_remote_parser_configured(None)
|
||||
self.assertEqual(len(msgs), 1)
|
||||
self.assertTrue(
|
||||
msgs[0].msg.startswith(
|
||||
"Azure AI remote parser requires endpoint and API key to be configured.",
|
||||
),
|
||||
)
|
||||
@@ -1,128 +0,0 @@
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
from django.test import TestCase
|
||||
from django.test import override_settings
|
||||
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from documents.tests.utils import FileSystemAssertsMixin
|
||||
from paperless_remote.parsers import RemoteDocumentParser
|
||||
from paperless_remote.signals import get_parser
|
||||
|
||||
|
||||
class TestParser(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
SAMPLE_FILES = Path(__file__).resolve().parent / "samples"
|
||||
|
||||
def assertContainsStrings(self, content: str, strings: list[str]):
|
||||
# Asserts that all strings appear in content, in the given order.
|
||||
indices = []
|
||||
for s in strings:
|
||||
if s in content:
|
||||
indices.append(content.index(s))
|
||||
else:
|
||||
self.fail(f"'{s}' is not in '{content}'")
|
||||
self.assertListEqual(indices, sorted(indices))
|
||||
|
||||
@mock.patch("paperless_tesseract.parsers.run_subprocess")
|
||||
@mock.patch("azure.ai.documentintelligence.DocumentIntelligenceClient")
|
||||
def test_get_text_with_azure(self, mock_client_cls, mock_subprocess):
|
||||
# Arrange mock Azure client
|
||||
mock_client = mock.Mock()
|
||||
mock_client_cls.return_value = mock_client
|
||||
|
||||
# Simulate poller result and its `.details`
|
||||
mock_poller = mock.Mock()
|
||||
mock_poller.wait.return_value = None
|
||||
mock_poller.details = {"operation_id": "fake-op-id"}
|
||||
mock_client.begin_analyze_document.return_value = mock_poller
|
||||
mock_poller.result.return_value.content = "This is a test document."
|
||||
|
||||
# Return dummy PDF bytes
|
||||
mock_client.get_analyze_result_pdf.return_value = [
|
||||
b"%PDF-",
|
||||
b"1.7 ",
|
||||
b"FAKEPDF",
|
||||
]
|
||||
|
||||
# Simulate pdftotext by writing dummy text to sidecar file
|
||||
def fake_run(cmd, *args, **kwargs):
|
||||
with Path(cmd[-1]).open("w", encoding="utf-8") as f:
|
||||
f.write("This is a test document.")
|
||||
|
||||
mock_subprocess.side_effect = fake_run
|
||||
|
||||
with override_settings(
|
||||
REMOTE_OCR_ENGINE="azureai",
|
||||
REMOTE_OCR_API_KEY="somekey",
|
||||
REMOTE_OCR_ENDPOINT="https://endpoint.cognitiveservices.azure.com",
|
||||
):
|
||||
parser = get_parser(uuid.uuid4())
|
||||
parser.parse(
|
||||
self.SAMPLE_FILES / "simple-digital.pdf",
|
||||
"application/pdf",
|
||||
)
|
||||
|
||||
self.assertContainsStrings(
|
||||
parser.text.strip(),
|
||||
["This is a test document."],
|
||||
)
|
||||
|
||||
@mock.patch("azure.ai.documentintelligence.DocumentIntelligenceClient")
|
||||
def test_get_text_with_azure_error_logged_and_returns_none(self, mock_client_cls):
|
||||
mock_client = mock.Mock()
|
||||
mock_client.begin_analyze_document.side_effect = RuntimeError("fail")
|
||||
mock_client_cls.return_value = mock_client
|
||||
|
||||
with override_settings(
|
||||
REMOTE_OCR_ENGINE="azureai",
|
||||
REMOTE_OCR_API_KEY="somekey",
|
||||
REMOTE_OCR_ENDPOINT="https://endpoint.cognitiveservices.azure.com",
|
||||
):
|
||||
parser = get_parser(uuid.uuid4())
|
||||
with mock.patch.object(parser.log, "error") as mock_log_error:
|
||||
parser.parse(
|
||||
self.SAMPLE_FILES / "simple-digital.pdf",
|
||||
"application/pdf",
|
||||
)
|
||||
|
||||
self.assertIsNone(parser.text)
|
||||
mock_client.begin_analyze_document.assert_called_once()
|
||||
mock_client.close.assert_called_once()
|
||||
mock_log_error.assert_called_once()
|
||||
self.assertIn(
|
||||
"Azure AI Vision parsing failed",
|
||||
mock_log_error.call_args[0][0],
|
||||
)
|
||||
|
||||
@override_settings(
|
||||
REMOTE_OCR_ENGINE="azureai",
|
||||
REMOTE_OCR_API_KEY="key",
|
||||
REMOTE_OCR_ENDPOINT="https://endpoint.cognitiveservices.azure.com",
|
||||
)
|
||||
def test_supported_mime_types_valid_config(self):
|
||||
parser = RemoteDocumentParser(uuid.uuid4())
|
||||
expected_types = {
|
||||
"application/pdf": ".pdf",
|
||||
"image/png": ".png",
|
||||
"image/jpeg": ".jpg",
|
||||
"image/tiff": ".tiff",
|
||||
"image/bmp": ".bmp",
|
||||
"image/gif": ".gif",
|
||||
"image/webp": ".webp",
|
||||
}
|
||||
self.assertEqual(parser.supported_mime_types(), expected_types)
|
||||
|
||||
def test_supported_mime_types_invalid_config(self):
|
||||
parser = get_parser(uuid.uuid4())
|
||||
self.assertEqual(parser.supported_mime_types(), {})
|
||||
|
||||
@override_settings(
|
||||
REMOTE_OCR_ENGINE=None,
|
||||
REMOTE_OCR_API_KEY=None,
|
||||
REMOTE_OCR_ENDPOINT=None,
|
||||
)
|
||||
def test_parse_with_invalid_config(self):
|
||||
parser = get_parser(uuid.uuid4())
|
||||
parser.parse(self.SAMPLE_FILES / "simple-digital.pdf", "application/pdf")
|
||||
self.assertEqual(parser.text, "")
|
||||
61
uv.lock
generated
61
uv.lock
generated
@@ -95,34 +95,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/02/ff/1175b0b7371e46244032d43a56862d0af455823b5280a50c63d99cc50f18/automat-25.4.16-py3-none-any.whl", hash = "sha256:04e9bce696a8d5671ee698005af6e5a9fa15354140a87f4870744604dcdd3ba1", size = 42842, upload-time = "2025-04-16T20:12:14.447Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "azure-ai-documentintelligence"
|
||||
version = "1.0.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "azure-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "isodate", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/44/7b/8115cd713e2caa5e44def85f2b7ebd02a74ae74d7113ba20bdd41fd6dd80/azure_ai_documentintelligence-1.0.2.tar.gz", hash = "sha256:4d75a2513f2839365ebabc0e0e1772f5601b3a8c9a71e75da12440da13b63484", size = 170940, upload-time = "2025-03-27T02:46:20.606Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/75/c9ec040f23082f54ffb1977ff8f364c2d21c79a640a13d1c1809e7fd6b1a/azure_ai_documentintelligence-1.0.2-py3-none-any.whl", hash = "sha256:e1fb446abbdeccc9759d897898a0fe13141ed29f9ad11fc705f951925822ed59", size = 106005, upload-time = "2025-03-27T02:46:22.356Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "azure-core"
|
||||
version = "1.33.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/75/aa/7c9db8edd626f1a7d99d09ef7926f6f4fb34d5f9fa00dc394afdfe8e2a80/azure_core-1.33.0.tar.gz", hash = "sha256:f367aa07b5e3005fec2c1e184b882b0b039910733907d001c20fb08ebb8c0eb9", size = 295633, upload-time = "2025-04-03T23:51:02.058Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/07/b7/76b7e144aa53bd206bf1ce34fa75350472c3f69bf30e5c8c18bc9881035d/azure_core-1.33.0-py3-none-any.whl", hash = "sha256:9b5b6d0223a1d38c37500e6971118c1e0f13f54951e6893968b38910bc9cda8f", size = 207071, upload-time = "2025-04-03T23:51:03.806Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "babel"
|
||||
version = "2.17.0"
|
||||
@@ -1101,15 +1073,15 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "gotenberg-client"
|
||||
version = "0.13.1"
|
||||
version = "0.12.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "httpx", extra = ["http2"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e4/6c/aaadd6657ca42fbd148b1c00604b98c1ead5a22552f4e5365ce5f0632430/gotenberg_client-0.13.1.tar.gz", hash = "sha256:cdd6bbb535cd739b87446cd1b4f6347ed7f9af6a0d4b19baf7c064b75528ee54", size = 1211143, upload-time = "2025-12-04T20:45:24.151Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/61/6d/07ea213c146bbe91dffebff2d8f4dc61e7076d3dd34d4fd1467f9163e752/gotenberg_client-0.12.0.tar.gz", hash = "sha256:1ab50878024469fc003c414ee9810ceeb00d4d7d7c36bd2fb75318fbff139e9b", size = 1210884, upload-time = "2025-10-15T15:32:37.669Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/79/f6/7a6e6785295332d2538f729ae19516cef712273a5ab8b90d015f08e37a45/gotenberg_client-0.13.1-py3-none-any.whl", hash = "sha256:613f7083a5e8a81699dd8d715c97e5806a424ac48920aad25d7c11b600cdfaf3", size = 51058, upload-time = "2025-12-04T20:45:22.603Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/39/fcb24ff053b1be7e5124f56c3d358706a23a328f685c6db33bc9dbc5472d/gotenberg_client-0.12.0-py3-none-any.whl", hash = "sha256:a540b35ac518e902c2860a88fbe448c15fe5a56fe8ec8604e6a2c8c2228fd0cb", size = 51051, upload-time = "2025-10-15T15:32:36.32Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1479,15 +1451,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/fc/4e5a141c3f7c7bed550ac1f69e599e92b6be449dd4677ec09f325cad0955/inotifyrecursive-0.3.5-py3-none-any.whl", hash = "sha256:7e5f4a2e1dc2bef0efa3b5f6b339c41fb4599055a2b54909d020e9e932cc8d2f", size = 8009, upload-time = "2020-11-20T12:38:46.981Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "isodate"
|
||||
version = "0.7.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705, upload-time = "2024-10-08T23:04:11.5Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320, upload-time = "2024-10-08T23:04:09.501Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jinja2"
|
||||
version = "3.1.6"
|
||||
@@ -2114,7 +2077,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "ocrmypdf"
|
||||
version = "16.13.0"
|
||||
version = "16.12.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "deprecation", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
@@ -2127,9 +2090,9 @@ dependencies = [
|
||||
{ name = "pluggy", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "rich", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8c/52/be1aaece0703a736757d8957c0d4f19c37561054169b501eb0e7132f15e5/ocrmypdf-16.13.0.tar.gz", hash = "sha256:29d37e915234ce717374863a9cc5dd32d29e063dfe60c51380dda71254c88248", size = 7042247, upload-time = "2025-12-24T07:58:35.86Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/2b/ed/dacc0f189e4fcefc52d709e9961929e3f622a85efa5ae47c9d9663d75cab/ocrmypdf-16.12.0.tar.gz", hash = "sha256:a0f6509e7780b286391f8847fae1811d2b157b14283ad74a2431d6755c5c0ed0", size = 7037326, upload-time = "2025-11-11T22:30:14.223Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/41/b1/e2e7ad98de0d3ee05b44dbc3f78ccb158a620f3add82d00c85490120e7f2/ocrmypdf-16.13.0-py3-none-any.whl", hash = "sha256:fad8a6f7cc52cdc6225095c401a1766c778c47efe9f1e854ae4dc64a550a3d37", size = 165377, upload-time = "2025-12-24T07:58:33.925Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/34/d9d04420e6f7a71e2135b41599dae273e4ef36e2ce79b065b65fb2471636/ocrmypdf-16.12.0-py3-none-any.whl", hash = "sha256:0ea5c42027db9cf3bd12b0d0b4190689027ef813fdad3377106ea66bba0012c3", size = 163415, upload-time = "2025-11-11T22:30:11.56Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2155,7 +2118,6 @@ name = "paperless-ngx"
|
||||
version = "2.20.3"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "azure-ai-documentintelligence", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "babel", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "bleach", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "celery", extra = ["redis"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
@@ -2293,7 +2255,6 @@ typing = [
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "azure-ai-documentintelligence", specifier = ">=1.0.2" },
|
||||
{ name = "babel", specifier = ">=2.17" },
|
||||
{ name = "bleach", specifier = "~=6.3.0" },
|
||||
{ name = "celery", extras = ["redis"], specifier = "~=5.5.1" },
|
||||
@@ -2321,7 +2282,7 @@ requires-dist = [
|
||||
{ name = "drf-writable-nested", specifier = "~=0.7.1" },
|
||||
{ name = "filelock", specifier = "~=3.20.0" },
|
||||
{ name = "flower", specifier = "~=2.0.1" },
|
||||
{ name = "gotenberg-client", specifier = "~=0.13.1" },
|
||||
{ name = "gotenberg-client", specifier = "~=0.12.0" },
|
||||
{ name = "granian", extras = ["uvloop"], marker = "extra == 'webserver'", specifier = "~=2.5.1" },
|
||||
{ name = "httpx-oauth", specifier = "~=0.16" },
|
||||
{ name = "imap-tools", specifier = "~=1.11.0" },
|
||||
@@ -2330,7 +2291,7 @@ requires-dist = [
|
||||
{ name = "langdetect", specifier = "~=1.0.9" },
|
||||
{ name = "mysqlclient", marker = "extra == 'mariadb'", specifier = "~=2.2.7" },
|
||||
{ name = "nltk", specifier = "~=3.9.1" },
|
||||
{ name = "ocrmypdf", specifier = "~=16.13.0" },
|
||||
{ name = "ocrmypdf", specifier = "~=16.12.0" },
|
||||
{ name = "pathvalidate", specifier = "~=3.3.1" },
|
||||
{ name = "pdf2image", specifier = "~=1.17.0" },
|
||||
{ name = "psycopg", extras = ["c", "pool"], marker = "extra == 'postgres'", specifier = "==3.2.12" },
|
||||
@@ -3004,11 +2965,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "python-gnupg"
|
||||
version = "0.5.6"
|
||||
version = "0.5.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/98/2c/6cd2c7cff4bdbb434be5429ef6b8e96ee6b50155551361f30a1bb2ea3c1d/python_gnupg-0.5.6.tar.gz", hash = "sha256:5743e96212d38923fc19083812dc127907e44dbd3bcf0db4d657e291d3c21eac", size = 66825, upload-time = "2025-12-31T17:19:33.19Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/42/d0/72a14a79f26c6119b281f6ccc475a787432ef155560278e60df97ce68a86/python-gnupg-0.5.5.tar.gz", hash = "sha256:3fdcaf76f60a1b948ff8e37dc398d03cf9ce7427065d583082b92da7a4ff5a63", size = 66467, upload-time = "2025-08-04T19:26:55.778Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/ab/0ea9de971caf3cd2e268d2b05dfe9883b21cfe686a59249bd2dccb4bae33/python_gnupg-0.5.6-py2.py3-none-any.whl", hash = "sha256:b5050a55663d8ab9fcc8d97556d229af337a87a3ebebd7054cbd8b7e2043394a", size = 22082, upload-time = "2025-12-31T17:16:22.743Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/19/c147f78cc18c8788f54d4a16a22f6c05deba85ead5672d3ddf6dcba5a5fe/python_gnupg-0.5.5-py2.py3-none-any.whl", hash = "sha256:51fa7b8831ff0914bc73d74c59b99c613de7247b91294323c39733bb85ac3fc1", size = 21916, upload-time = "2025-08-04T19:26:54.307Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
Reference in New Issue
Block a user