Compare commits

..

2 Commits

Author SHA1 Message Date
shamoon
c968505f64 Fix: run pre-flight ASN check after barcode detection 2025-12-30 10:02:32 -08:00
shamoon
fbb5864757 Add test to reproduce asn in trash error 2025-12-30 10:02:31 -08:00
31 changed files with 868 additions and 1278 deletions

View File

@@ -1,104 +0,0 @@
name: Backend Tests
on:
push:
branches-ignore:
- 'translations**'
paths:
- 'src/**'
- 'pyproject.toml'
- 'uv.lock'
- 'docker/compose/docker-compose.ci-test.yml'
- '.github/workflows/ci-backend.yml'
pull_request:
branches-ignore:
- 'translations**'
paths:
- 'src/**'
- 'pyproject.toml'
- 'uv.lock'
- 'docker/compose/docker-compose.ci-test.yml'
- '.github/workflows/ci-backend.yml'
workflow_dispatch:
concurrency:
group: backend-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
DEFAULT_UV_VERSION: "0.9.x"
NLTK_DATA: "/usr/share/nltk_data"
jobs:
test:
name: "Python ${{ matrix.python-version }}"
runs-on: ubuntu-24.04
strategy:
matrix:
python-version: ['3.10', '3.11', '3.12']
fail-fast: false
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Start containers
run: |
docker compose --file docker/compose/docker-compose.ci-test.yml pull --quiet
docker compose --file docker/compose/docker-compose.ci-test.yml up --detach
- name: Set up Python
id: setup-python
uses: actions/setup-python@v6
with:
python-version: "${{ matrix.python-version }}"
- name: Install uv
uses: astral-sh/setup-uv@v7
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ steps.setup-python.outputs.python-version }}
- name: Install system dependencies
run: |
sudo apt-get update -qq
sudo apt-get install -qq --no-install-recommends \
unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
- name: Configure ImageMagick
run: |
sudo cp docker/rootfs/etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml
- name: Install Python dependencies
run: |
uv sync \
--python ${{ steps.setup-python.outputs.python-version }} \
--group testing \
--frozen
- name: List installed Python dependencies
run: |
uv pip list
- name: Install NLTK data
run: |
uv run python -m nltk.downloader punkt punkt_tab snowball_data stopwords -d ${{ env.NLTK_DATA }}
- name: Run tests
env:
NLTK_DATA: ${{ env.NLTK_DATA }}
PAPERLESS_CI_TEST: 1
PAPERLESS_MAIL_TEST_HOST: ${{ secrets.TEST_MAIL_HOST }}
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
run: |
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
--frozen \
pytest
- name: Upload test results to Codecov
if: always()
uses: codecov/codecov-action@v5
with:
flags: backend-python-${{ matrix.python-version }}
files: junit.xml
report_type: test_results
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
with:
flags: backend-python-${{ matrix.python-version }}
files: coverage.xml
report_type: coverage
- name: Stop containers
if: always()
run: |
docker compose --file docker/compose/docker-compose.ci-test.yml logs
docker compose --file docker/compose/docker-compose.ci-test.yml down

View File

@@ -1,233 +0,0 @@
name: Docker Build
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-beta.rc[0-9]+'
branches:
- dev
- beta
pull_request:
branches:
- dev
- main
workflow_dispatch:
concurrency:
group: docker-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
jobs:
build-arch:
name: Build ${{ matrix.arch }}
strategy:
fail-fast: false
matrix:
include:
- runner: ubuntu-24.04
arch: amd64
platform: linux/amd64
- runner: ubuntu-24.04-arm
arch: arm64
platform: linux/arm64
runs-on: ${{ matrix.runner }}
permissions:
contents: read
packages: write
outputs:
can-push: ${{ steps.check-push.outputs.can-push }}
push-external: ${{ steps.check-push.outputs.push-external }}
repository: ${{ steps.repo.outputs.name }}
ref-name: ${{ steps.ref.outputs.name }}
steps:
- name: Checkout
uses: actions/checkout@v6.0.1
- name: Determine ref name
id: ref
run: |
ref_name="${GITHUB_HEAD_REF:-$GITHUB_REF_NAME}"
# Sanitize by replacing / with - for cache keys
cache_ref="${ref_name//\//-}"
echo "ref_name=${ref_name}"
echo "cache_ref=${cache_ref}"
echo "name=${ref_name}" >> $GITHUB_OUTPUT
echo "cache-ref=${cache_ref}" >> $GITHUB_OUTPUT
- name: Check push permissions
id: check-push
env:
REF_NAME: ${{ steps.ref.outputs.name }}
run: |
# can-push: Can we push to GHCR?
# True for: pushes, or PRs from the same repo (not forks)
can_push=${{ github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository }}
echo "can-push=${can_push}"
echo "can-push=${can_push}" >> $GITHUB_OUTPUT
# push-external: Should we also push to Docker Hub and Quay.io?
# Only for main repo on dev/beta branches or version tags
push_external="false"
if [[ "${can_push}" == "true" && "${{ github.repository_owner }}" == "paperless-ngx" ]]; then
case "${REF_NAME}" in
dev|beta)
push_external="true"
;;
esac
case "${{ github.ref }}" in
refs/tags/v*|*beta.rc*)
push_external="true"
;;
esac
fi
echo "push-external=${push_external}"
echo "push-external=${push_external}" >> $GITHUB_OUTPUT
- name: Set repository name
id: repo
run: |
repo_name="${{ github.repository }}"
repo_name="${repo_name,,}"
echo "repository=${repo_name}"
echo "name=${repo_name}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.12.0
- name: Login to GitHub Container Registry
uses: docker/login-action@v3.6.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Docker metadata
id: docker-meta
uses: docker/metadata-action@v5.10.0
with:
images: |
${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}
tags: |
type=ref,event=branch
type=raw,value=${{ steps.ref.outputs.name }},enable=${{ github.event_name == 'pull_request' }}
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
- name: Build and push by digest
id: build
uses: docker/build-push-action@v6.18.0
with:
context: .
file: ./Dockerfile
platforms: ${{ matrix.platform }}
labels: ${{ steps.docker-meta.outputs.labels }}
build-args: |
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
outputs: type=image,name=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }},push-by-digest=true,name-canonical=true,push=${{ steps.check-push.outputs.can-push }}
cache-from: |
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:${{ steps.ref.outputs.cache-ref }}-${{ matrix.arch }}
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:dev-${{ matrix.arch }}
cache-to: ${{ steps.check-push.outputs.can-push == 'true' && format('type=registry,mode=max,ref={0}/{1}/cache/app:{2}-{3}', env.REGISTRY, steps.repo.outputs.name, steps.ref.outputs.cache-ref, matrix.arch) || '' }}
- name: Export digest
if: steps.check-push.outputs.can-push == 'true'
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
echo "digest=${digest}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
if: steps.check-push.outputs.can-push == 'true'
uses: actions/upload-artifact@v6.0.0
with:
name: digests-${{ matrix.arch }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge-and-push:
name: Merge and Push Manifest
runs-on: ubuntu-24.04
needs: build-arch
if: needs.build-arch.outputs.can-push == 'true'
permissions:
contents: read
packages: write
steps:
- name: Download digests
uses: actions/download-artifact@v7.0.0
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- name: List digests
run: |
echo "Downloaded digests:"
ls -la /tmp/digests/
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.12.0
- name: Login to GitHub Container Registry
uses: docker/login-action@v3.6.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
if: needs.build-arch.outputs.push-external == 'true'
uses: docker/login-action@v3.6.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Quay.io
if: needs.build-arch.outputs.push-external == 'true'
uses: docker/login-action@v3.6.0
with:
registry: quay.io
username: ${{ secrets.QUAY_USERNAME }}
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
- name: Docker metadata
id: docker-meta
uses: docker/metadata-action@v5.10.0
with:
images: |
${{ env.REGISTRY }}/${{ needs.build-arch.outputs.repository }}
tags: |
type=ref,event=branch
type=raw,value=${{ needs.build-arch.outputs.ref-name }},enable=${{ github.event_name == 'pull_request' }}
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
- name: Create manifest list and push
working-directory: /tmp/digests
env:
REPOSITORY: ${{ needs.build-arch.outputs.repository }}
run: |
tags=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "${DOCKER_METADATA_OUTPUT_JSON}")
digests=""
for digest in *; do
digests+="${{ env.REGISTRY }}/${REPOSITORY}@sha256:${digest} "
done
echo "Creating manifest with tags: ${tags}"
echo "From digests: ${digests}"
docker buildx imagetools create ${tags} ${digests}
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
- name: Copy to Docker Hub
if: needs.build-arch.outputs.push-external == 'true'
env:
TAGS: ${{ steps.docker-meta.outputs.tags }}
GHCR_REPO: ${{ env.REGISTRY }}/${{ needs.build-arch.outputs.repository }}
run: |
for tag in ${TAGS}; do
dockerhub_tag="${tag/${GHCR_REPO}/docker.io/paperlessngx/paperless-ngx}"
echo "Copying ${tag} to ${dockerhub_tag}"
skopeo copy --all "docker://${tag}" "docker://${dockerhub_tag}"
done
- name: Copy to Quay.io
if: needs.build-arch.outputs.push-external == 'true'
env:
TAGS: ${{ steps.docker-meta.outputs.tags }}
GHCR_REPO: ${{ env.REGISTRY }}/${{ needs.build-arch.outputs.repository }}
run: |
for tag in ${TAGS}; do
quay_tag="${tag/${GHCR_REPO}/quay.io/paperlessngx/paperless-ngx}"
echo "Copying ${tag} to ${quay_tag}"
skopeo copy --all "docker://${tag}" "docker://${quay_tag}"
done

View File

@@ -1,88 +0,0 @@
name: Documentation
on:
push:
branches:
- main
- dev
paths:
- 'docs/**'
- 'mkdocs.yml'
- '.github/workflows/ci-docs.yml'
pull_request:
paths:
- 'docs/**'
- 'mkdocs.yml'
- '.github/workflows/ci-docs.yml'
workflow_dispatch:
concurrency:
group: docs-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
DEFAULT_UV_VERSION: "0.9.x"
DEFAULT_PYTHON_VERSION: "3.11"
jobs:
build:
name: Build Documentation
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Set up Python
id: setup-python
uses: actions/setup-python@v6
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v7
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install Python dependencies
run: |
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
- name: Build documentation
run: |
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
--frozen \
mkdocs build --config-file ./mkdocs.yml
- name: Upload artifact
uses: actions/upload-artifact@v6
with:
name: documentation
path: site/
retention-days: 7
deploy:
name: Deploy Documentation
needs: build
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Set up Python
id: setup-python
uses: actions/setup-python@v6
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v7
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install Python dependencies
run: |
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
- name: Deploy documentation
run: |
echo "docs.paperless-ngx.com" > "${{ github.workspace }}/docs/CNAME"
git config --global user.name "${{ github.actor }}"
git config --global user.email "${{ github.actor }}@users.noreply.github.com"
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
--frozen \
mkdocs gh-deploy --force --no-history

View File

@@ -1,189 +0,0 @@
name: Frontend Tests
on:
push:
branches-ignore:
- 'translations**'
paths:
- 'src-ui/**'
- '.github/workflows/ci-frontend.yml'
pull_request:
branches-ignore:
- 'translations**'
paths:
- 'src-ui/**'
- '.github/workflows/ci-frontend.yml'
workflow_dispatch:
concurrency:
group: frontend-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
install-dependencies:
name: Install Dependencies
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: 'pnpm'
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
- name: Cache frontend dependencies
id: cache-frontend-deps
uses: actions/cache@v5
with:
path: |
~/.pnpm-store
~/.cache
key: ${{ runner.os }}-frontend-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
- name: Install dependencies
run: cd src-ui && pnpm install
lint:
name: Lint
needs: install-dependencies
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: 'pnpm'
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
- name: Cache frontend dependencies
uses: actions/cache@v5
with:
path: |
~/.pnpm-store
~/.cache
key: ${{ runner.os }}-frontend-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
- name: Re-link Angular CLI
run: cd src-ui && pnpm link @angular/cli
- name: Run lint
run: cd src-ui && pnpm run lint
unit-tests:
name: "Unit Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
needs: install-dependencies
runs-on: ubuntu-24.04
strategy:
fail-fast: false
matrix:
node-version: [20.x]
shard-index: [1, 2, 3, 4]
shard-count: [4]
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: 'pnpm'
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
- name: Cache frontend dependencies
uses: actions/cache@v5
with:
path: |
~/.pnpm-store
~/.cache
key: ${{ runner.os }}-frontend-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
- name: Re-link Angular CLI
run: cd src-ui && pnpm link @angular/cli
- name: Run Jest unit tests
run: cd src-ui && pnpm run test --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
- name: Upload test results to Codecov
if: always()
uses: codecov/codecov-action@v5
with:
flags: frontend-node-${{ matrix.node-version }}
directory: src-ui/
report_type: test_results
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
with:
flags: frontend-node-${{ matrix.node-version }}
directory: src-ui/coverage/
e2e-tests:
name: "E2E Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
needs: install-dependencies
runs-on: ubuntu-24.04
container: mcr.microsoft.com/playwright:v1.57.0-noble
env:
PLAYWRIGHT_BROWSERS_PATH: /ms-playwright
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
strategy:
fail-fast: false
matrix:
node-version: [20.x]
shard-index: [1, 2]
shard-count: [2]
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: 'pnpm'
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
- name: Cache frontend dependencies
uses: actions/cache@v5
with:
path: |
~/.pnpm-store
~/.cache
key: ${{ runner.os }}-frontend-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
- name: Re-link Angular CLI
run: cd src-ui && pnpm link @angular/cli
- name: Install dependencies
run: cd src-ui && pnpm install --no-frozen-lockfile
- name: Run Playwright E2E tests
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
bundle-analysis:
name: Bundle Analysis
needs: [unit-tests, e2e-tests]
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: 'pnpm'
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
- name: Cache frontend dependencies
uses: actions/cache@v5
with:
path: |
~/.pnpm-store
~/.cache
key: ${{ runner.os }}-frontend-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
- name: Re-link Angular CLI
run: cd src-ui && pnpm link @angular/cli
- name: Build and analyze
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
run: cd src-ui && pnpm run build --configuration=production

View File

@@ -1,24 +0,0 @@
name: Lint
on:
push:
branches-ignore:
- 'translations**'
pull_request:
branches-ignore:
- 'translations**'
concurrency:
group: lint-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
pre-commit:
name: Pre-commit Checks
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Install Python
uses: actions/setup-python@v6
with:
python-version: "3.11"
- name: Run pre-commit
uses: pre-commit/action@v3.0.1

View File

@@ -1,237 +0,0 @@
name: Release
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-beta.rc[0-9]+'
concurrency:
group: release-${{ github.ref }}
cancel-in-progress: false
env:
DEFAULT_UV_VERSION: "0.9.x"
DEFAULT_PYTHON_VERSION: "3.11"
jobs:
wait-for-docker:
name: Wait for Docker Build
runs-on: ubuntu-24.04
steps:
- name: Wait for Docker build
uses: lewagon/wait-on-check-action@v1.4.1
with:
ref: ${{ github.sha }}
check-name: 'Build Docker Image'
repo-token: ${{ secrets.GITHUB_TOKEN }}
wait-interval: 60
build-release:
name: Build Release
needs: wait-for-docker
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v6
# ---- Frontend Build ----
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: 'pnpm'
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
- name: Install frontend dependencies
run: cd src-ui && pnpm install
- name: Build frontend
run: cd src-ui && pnpm run build --configuration production
# ---- Backend Setup ----
- name: Set up Python
id: setup-python
uses: actions/setup-python@v6
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v7
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ steps.setup-python.outputs.python-version }}
- name: Install Python dependencies
run: |
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
- name: Install system dependencies
run: |
sudo apt-get update -qq
sudo apt-get install -qq --no-install-recommends gettext liblept5
# ---- Build Documentation ----
- name: Build documentation
run: |
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
--frozen \
mkdocs build --config-file ./mkdocs.yml
# ---- Prepare Release ----
- name: Generate requirements file
run: |
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
- name: Compile messages
run: |
cd src/
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
manage.py compilemessages
- name: Collect static files
run: |
cd src/
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
manage.py collectstatic --no-input --clear
- name: Assemble release package
run: |
mkdir -p dist/paperless-ngx/scripts
for file_name in .dockerignore \
.env \
Dockerfile \
pyproject.toml \
uv.lock \
requirements.txt \
LICENSE \
README.md \
paperless.conf.example
do
cp --verbose ${file_name} dist/paperless-ngx/
done
mv dist/paperless-ngx/paperless.conf.example dist/paperless-ngx/paperless.conf
cp --recursive docker/ dist/paperless-ngx/docker
cp scripts/*.service scripts/*.sh scripts/*.socket dist/paperless-ngx/scripts/
cp --recursive src/ dist/paperless-ngx/src
cp --recursive site/ dist/paperless-ngx/docs
mv static dist/paperless-ngx/
find dist/paperless-ngx -name "__pycache__" -type d -exec rm -rf {} +
- name: Create release archive
run: |
cd dist
sudo chown -R 1000:1000 paperless-ngx/
tar -cJf paperless-ngx.tar.xz paperless-ngx/
- name: Upload release artifact
uses: actions/upload-artifact@v6
with:
name: release
path: dist/paperless-ngx.tar.xz
retention-days: 7
publish-release:
name: Publish Release
needs: build-release
runs-on: ubuntu-24.04
outputs:
prerelease: ${{ steps.get-version.outputs.prerelease }}
changelog: ${{ steps.create-release.outputs.body }}
version: ${{ steps.get-version.outputs.version }}
steps:
- name: Download release artifact
uses: actions/download-artifact@v7
with:
name: release
path: ./
- name: Get version info
id: get-version
run: |
echo "version=${{ github.ref_name }}" >> $GITHUB_OUTPUT
if [[ "${{ github.ref_name }}" == *"-beta.rc"* ]]; then
echo "prerelease=true" >> $GITHUB_OUTPUT
else
echo "prerelease=false" >> $GITHUB_OUTPUT
fi
- name: Create release and changelog
id: create-release
uses: release-drafter/release-drafter@v6
with:
name: Paperless-ngx ${{ steps.get-version.outputs.version }}
tag: ${{ steps.get-version.outputs.version }}
version: ${{ steps.get-version.outputs.version }}
prerelease: ${{ steps.get-version.outputs.prerelease }}
publish: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload release archive
uses: shogo82148/actions-upload-release-asset@v1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
upload_url: ${{ steps.create-release.outputs.upload_url }}
asset_path: ./paperless-ngx.tar.xz
asset_name: paperless-ngx-${{ steps.get-version.outputs.version }}.tar.xz
asset_content_type: application/x-xz
# ---------------------------------------------------------------------------
# Append changelog to docs (only on non-prerelease)
# ---------------------------------------------------------------------------
append-changelog:
name: Append Changelog
needs: publish-release
if: needs.publish-release.outputs.prerelease == 'false'
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v6
with:
ref: main
- name: Set up Python
id: setup-python
uses: actions/setup-python@v6
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v7
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Update changelog
working-directory: docs
run: |
git branch ${{ needs.publish-release.outputs.version }}-changelog
git checkout ${{ needs.publish-release.outputs.version }}-changelog
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
echo "Manually linking usernames"
sed -i -r 's|@([a-zA-Z0-9_]+) \(\[#|[@\1](https://github.com/\1) ([#|g' changelog-new.md
echo "Removing unneeded comment tags"
sed -i -r 's|@<!---->|@|g' changelog-new.md
CURRENT_CHANGELOG=$(tail --lines +2 changelog.md)
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
mv changelog-new.md changelog.md
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
pre-commit run --files changelog.md || true
git config --global user.name "github-actions"
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
git push origin ${{ needs.publish-release.outputs.version }}-changelog
- name: Create pull request
uses: actions/github-script@v8
with:
script: |
const { repo, owner } = context.repo;
const result = await github.rest.pulls.create({
title: 'Documentation: Add ${{ needs.publish-release.outputs.version }} changelog',
owner,
repo,
head: '${{ needs.publish-release.outputs.version }}-changelog',
base: 'main',
body: 'This PR is auto-generated by CI.'
});
github.rest.issues.addLabels({
owner,
repo,
issue_number: result.data.number,
labels: ['documentation', 'skip-changelog']
});

693
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,693 @@
name: ci
on:
push:
tags:
# https://semver.org/#spec-item-2
- 'v[0-9]+.[0-9]+.[0-9]+'
# https://semver.org/#spec-item-9
- 'v[0-9]+.[0-9]+.[0-9]+-beta.rc[0-9]+'
branches-ignore:
- 'translations**'
pull_request:
branches-ignore:
- 'translations**'
env:
DEFAULT_UV_VERSION: "0.9.x"
# This is the default version of Python to use in most steps which aren't specific
DEFAULT_PYTHON_VERSION: "3.11"
NLTK_DATA: "/usr/share/nltk_data"
jobs:
detect-duplicate:
name: Detect Duplicate Run
runs-on: ubuntu-24.04
outputs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- name: Check if workflow should run
id: check
uses: actions/github-script@v8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
if (context.eventName !== 'push') {
core.info('Not a push event; running workflow.');
core.setOutput('should_run', 'true');
return;
}
const ref = context.ref || '';
if (!ref.startsWith('refs/heads/')) {
core.info('Push is not to a branch; running workflow.');
core.setOutput('should_run', 'true');
return;
}
const branch = ref.substring('refs/heads/'.length);
const { owner, repo } = context.repo;
const prs = await github.paginate(github.rest.pulls.list, {
owner,
repo,
state: 'open',
head: `${owner}:${branch}`,
per_page: 100,
});
if (prs.length === 0) {
core.info(`No open PR found for ${branch}; running workflow.`);
core.setOutput('should_run', 'true');
} else {
core.info(`Found ${prs.length} open PR(s) for ${branch}; skipping duplicate push run.`);
core.setOutput('should_run', 'false');
}
pre-commit:
needs:
- detect-duplicate
if: needs.detect-duplicate.outputs.should_run == 'true'
name: Linting Checks
runs-on: ubuntu-24.04
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Install python
uses: actions/setup-python@v6
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Check files
uses: pre-commit/action@v3.0.1
documentation:
name: "Build & Deploy Documentation"
runs-on: ubuntu-24.04
needs:
- pre-commit
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Set up Python
id: setup-python
uses: actions/setup-python@v6
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v7
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install Python dependencies
run: |
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
- name: Make documentation
run: |
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
--frozen \
mkdocs build --config-file ./mkdocs.yml
- name: Deploy documentation
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
run: |
echo "docs.paperless-ngx.com" > "${{ github.workspace }}/docs/CNAME"
git config --global user.name "${{ github.actor }}"
git config --global user.email "${{ github.actor }}@users.noreply.github.com"
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
--frozen \
mkdocs gh-deploy --force --no-history
- name: Upload artifact
uses: actions/upload-artifact@v5
with:
name: documentation
path: site/
retention-days: 7
tests-backend:
name: "Backend Tests (Python ${{ matrix.python-version }})"
runs-on: ubuntu-24.04
needs:
- pre-commit
strategy:
matrix:
python-version: ['3.10', '3.11', '3.12']
fail-fast: false
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Start containers
run: |
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml pull --quiet
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml up --detach
- name: Set up Python
id: setup-python
uses: actions/setup-python@v6
with:
python-version: "${{ matrix.python-version }}"
- name: Install uv
uses: astral-sh/setup-uv@v7
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ steps.setup-python.outputs.python-version }}
- name: Install system dependencies
run: |
sudo apt-get update -qq
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
- name: Configure ImageMagick
run: |
sudo cp docker/rootfs/etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml
- name: Install Python dependencies
run: |
uv sync \
--python ${{ steps.setup-python.outputs.python-version }} \
--group testing \
--frozen
- name: List installed Python dependencies
run: |
uv pip list
- name: Install or update NLTK dependencies
run: uv run python -m nltk.downloader punkt punkt_tab snowball_data stopwords -d ${{ env.NLTK_DATA }}
- name: Tests
env:
NLTK_DATA: ${{ env.NLTK_DATA }}
PAPERLESS_CI_TEST: 1
# Enable paperless_mail testing against real server
PAPERLESS_MAIL_TEST_HOST: ${{ secrets.TEST_MAIL_HOST }}
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
run: |
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
--frozen \
pytest
- name: Upload backend test results to Codecov
if: always()
uses: codecov/codecov-action@v5
with:
flags: backend-python-${{ matrix.python-version }}
files: junit.xml
report_type: test_results
- name: Upload backend coverage to Codecov
uses: codecov/codecov-action@v5
with:
flags: backend-python-${{ matrix.python-version }}
files: coverage.xml
- name: Stop containers
if: always()
run: |
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml logs
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml down
install-frontend-dependencies:
name: "Install Frontend Dependencies"
runs-on: ubuntu-24.04
needs:
- pre-commit
steps:
- uses: actions/checkout@v6
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: 'pnpm'
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
- name: Cache frontend dependencies
id: cache-frontend-deps
uses: actions/cache@v4
with:
path: |
~/.pnpm-store
~/.cache
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
- name: Install dependencies
run: cd src-ui && pnpm install
tests-frontend:
name: "Frontend Unit Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
runs-on: ubuntu-24.04
needs:
- install-frontend-dependencies
strategy:
fail-fast: false
matrix:
node-version: [20.x]
shard-index: [1, 2, 3, 4]
shard-count: [4]
steps:
- uses: actions/checkout@v6
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: 'pnpm'
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
- name: Cache frontend dependencies
id: cache-frontend-deps
uses: actions/cache@v4
with:
path: |
~/.pnpm-store
~/.cache
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
- name: Re-link Angular cli
run: cd src-ui && pnpm link @angular/cli
- name: Linting checks
run: cd src-ui && pnpm run lint
- name: Run Jest unit tests
run: cd src-ui && pnpm run test --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
- name: Upload frontend test results to Codecov
if: always()
uses: codecov/codecov-action@v5
with:
flags: frontend-node-${{ matrix.node-version }}
directory: src-ui/
report_type: test_results
- name: Upload frontend coverage to Codecov
uses: codecov/codecov-action@v5
with:
flags: frontend-node-${{ matrix.node-version }}
directory: src-ui/coverage/
tests-frontend-e2e:
name: "Frontend E2E Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
runs-on: ubuntu-24.04
container: mcr.microsoft.com/playwright:v1.57.0-noble
needs:
- install-frontend-dependencies
env:
PLAYWRIGHT_BROWSERS_PATH: /ms-playwright
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
strategy:
fail-fast: false
matrix:
node-version: [20.x]
shard-index: [1, 2]
shard-count: [2]
steps:
- uses: actions/checkout@v6
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: 'pnpm'
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
- name: Cache frontend dependencies
id: cache-frontend-deps
uses: actions/cache@v4
with:
path: |
~/.pnpm-store
~/.cache
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
- name: Re-link Angular cli
run: cd src-ui && pnpm link @angular/cli
- name: Install dependencies
run: cd src-ui && pnpm install --no-frozen-lockfile
- name: Run Playwright e2e tests
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
frontend-bundle-analysis:
name: "Frontend Bundle Analysis"
runs-on: ubuntu-24.04
needs:
- tests-frontend
- tests-frontend-e2e
steps:
- uses: actions/checkout@v6
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: 'pnpm'
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
- name: Cache frontend dependencies
id: cache-frontend-deps
uses: actions/cache@v4
with:
path: |
~/.pnpm-store
~/.cache
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
- name: Re-link Angular cli
run: cd src-ui && pnpm link @angular/cli
- name: Build frontend and upload analysis
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
run: cd src-ui && pnpm run build --configuration=production
build-docker-image:
name: Build Docker image for ${{ github.event_name == 'pull_request' && github.head_ref || github.ref_name }}
runs-on: ubuntu-24.04
if: (github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || startsWith(github.ref, 'refs/heads/fix-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v') || startsWith(github.ref, 'refs/heads/l10n_'))) || (github.event_name == 'pull_request' && (startsWith(github.head_ref, 'feature-') || startsWith(github.head_ref, 'fix-') || github.head_ref == 'dev' || github.head_ref == 'beta' || contains(github.head_ref, 'beta.rc') || startsWith(github.head_ref, 'l10n_')))
concurrency:
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
cancel-in-progress: true
needs:
- tests-backend
- tests-frontend
- tests-frontend-e2e
steps:
- name: Prepare build variables
id: build-vars
uses: actions/github-script@v8
with:
result-encoding: string
script: |
const isPR = context.eventName === 'pull_request';
const defaultRefName = context.ref.replace('refs/heads/', '');
const headRef = isPR ? context.payload.pull_request.head.ref : defaultRefName;
const buildRef = isPR ? `refs/heads/${headRef}` : context.ref;
const buildCacheKey = headRef.split('/').join('-');
const canPush = context.eventName === 'push' || (isPR && context.payload.pull_request.head.repo.full_name === `${context.repo.owner}/${context.repo.repo}`);
core.setOutput('build-ref', buildRef);
core.setOutput('build-ref-name', headRef);
core.setOutput('build-cache-key', buildCacheKey);
core.setOutput('can-push', canPush ? 'true' : 'false');
- name: Check pushing to Docker Hub
id: push-other-places
# Only push to Dockerhub from the main repo AND the ref is either:
# main
# dev
# beta
# a tag
# Otherwise forks would require a Docker Hub account and secrets setup
env:
BUILD_REF: ${{ steps.build-vars.outputs.build-ref }}
BUILD_REF_NAME: ${{ steps.build-vars.outputs.build-ref-name }}
run: |
if [[ ${{ github.repository_owner }} == "paperless-ngx" && ( "$BUILD_REF_NAME" == "dev" || "$BUILD_REF_NAME" == "beta" || $BUILD_REF == refs/tags/v* || $BUILD_REF == *beta.rc* ) ]] ; then
echo "Enabling DockerHub image push"
echo "enable=true" >> $GITHUB_OUTPUT
else
echo "Not pushing to DockerHub"
echo "enable=false" >> $GITHUB_OUTPUT
fi
- name: Set ghcr repository name
id: set-ghcr-repository
run: |
ghcr_name=$(echo "${{ github.repository }}" | awk '{ print tolower($0) }')
echo "Name is ${ghcr_name}"
echo "ghcr-repository=${ghcr_name}" >> $GITHUB_OUTPUT
- name: Gather Docker metadata
id: docker-meta
uses: docker/metadata-action@v5
with:
images: |
ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}
name=paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
name=quay.io/paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
tags: |
# Tag branches with branch name
type=ref,event=branch
# Pull requests need a sanitized branch tag for pushing images
type=raw,value=${{ steps.build-vars.outputs.build-cache-key }},enable=${{ github.event_name == 'pull_request' }}
# Process semver tags
# For a tag x.y.z or vX.Y.Z, output an x.y.z and x.y image tag
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
- name: Checkout
uses: actions/checkout@v6
# If https://github.com/docker/buildx/issues/1044 is resolved,
# the append input with a native arm64 arch could be used to
# significantly speed up building
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
platforms: arm64
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
uses: docker/login-action@v3
# Don't attempt to login if not pushing to Docker Hub
if: steps.push-other-places.outputs.enable == 'true'
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Quay.io
uses: docker/login-action@v3
# Don't attempt to login if not pushing to Quay.io
if: steps.push-other-places.outputs.enable == 'true'
with:
registry: quay.io
username: ${{ secrets.QUAY_USERNAME }}
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
platforms: linux/amd64,linux/arm64
push: ${{ steps.build-vars.outputs.can-push == 'true' }}
tags: ${{ steps.docker-meta.outputs.tags }}
labels: ${{ steps.docker-meta.outputs.labels }}
build-args: |
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
# Get cache layers from this branch, then dev
# This allows new branches to get at least some cache benefits, generally from dev
cache-from: |
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ steps.build-vars.outputs.build-cache-key }}
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:dev
cache-to: ${{ steps.build-vars.outputs.can-push == 'true' && format('type=registry,mode=max,ref=ghcr.io/{0}/builder/cache/app:{1}', steps.set-ghcr-repository.outputs.ghcr-repository, steps.build-vars.outputs.build-cache-key) || '' }}
- name: Inspect image
if: steps.build-vars.outputs.can-push == 'true'
run: |
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
- name: Export frontend artifact from docker
if: steps.build-vars.outputs.can-push == 'true'
run: |
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
- name: Upload frontend artifact
if: steps.build-vars.outputs.can-push == 'true'
uses: actions/upload-artifact@v5
with:
name: frontend-compiled
path: src/documents/static/frontend/
retention-days: 7
build-release:
name: "Build Release"
needs:
- build-docker-image
- documentation
if: github.event_name == 'push'
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Set up Python
id: setup-python
uses: actions/setup-python@v6
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v7
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ steps.setup-python.outputs.python-version }}
- name: Install Python dependencies
run: |
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
- name: Install system dependencies
run: |
sudo apt-get update -qq
sudo apt-get install -qq --no-install-recommends gettext liblept5
- name: Download frontend artifact
uses: actions/download-artifact@v6
with:
name: frontend-compiled
path: src/documents/static/frontend/
- name: Download documentation artifact
uses: actions/download-artifact@v6
with:
name: documentation
path: docs/_build/html/
- name: Generate requirements file
run: |
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
- name: Compile messages
run: |
cd src/
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
manage.py compilemessages
- name: Collect static files
run: |
cd src/
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
manage.py collectstatic --no-input
- name: Move files
run: |
echo "Making dist folders"
for directory in dist \
dist/paperless-ngx \
dist/paperless-ngx/scripts;
do
mkdir --verbose --parents ${directory}
done
echo "Copying basic files"
for file_name in .dockerignore \
.env \
Dockerfile \
pyproject.toml \
uv.lock \
requirements.txt \
LICENSE \
README.md \
paperless.conf.example
do
cp --verbose ${file_name} dist/paperless-ngx/
done
mv --verbose dist/paperless-ngx/paperless.conf.example dist/paperless-ngx/paperless.conf
echo "Copying Docker related files"
cp --recursive docker/ dist/paperless-ngx/docker
echo "Copying startup scripts"
cp --verbose scripts/*.service scripts/*.sh scripts/*.socket dist/paperless-ngx/scripts/
echo "Copying source files"
cp --recursive src/ dist/paperless-ngx/src
echo "Copying documentation"
cp --recursive docs/_build/html/ dist/paperless-ngx/docs
mv --verbose static dist/paperless-ngx
- name: Make release package
run: |
echo "Creating release archive"
cd dist
sudo chown -R 1000:1000 paperless-ngx/
tar -cJf paperless-ngx.tar.xz paperless-ngx/
- name: Upload release artifact
uses: actions/upload-artifact@v5
with:
name: release
path: dist/paperless-ngx.tar.xz
retention-days: 7
publish-release:
name: "Publish Release"
runs-on: ubuntu-24.04
outputs:
prerelease: ${{ steps.get_version.outputs.prerelease }}
changelog: ${{ steps.create-release.outputs.body }}
version: ${{ steps.get_version.outputs.version }}
needs:
- build-release
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
steps:
- name: Download release artifact
uses: actions/download-artifact@v6
with:
name: release
path: ./
- name: Get version
id: get_version
run: |
echo "version=${{ github.ref_name }}" >> $GITHUB_OUTPUT
if [[ ${{ contains(github.ref_name, '-beta.rc') }} == 'true' ]]; then
echo "prerelease=true" >> $GITHUB_OUTPUT
else
echo "prerelease=false" >> $GITHUB_OUTPUT
fi
- name: Create Release and Changelog
id: create-release
uses: release-drafter/release-drafter@v6
with:
name: Paperless-ngx ${{ steps.get_version.outputs.version }}
tag: ${{ steps.get_version.outputs.version }}
version: ${{ steps.get_version.outputs.version }}
prerelease: ${{ steps.get_version.outputs.prerelease }}
publish: true # ensures release is not marked as draft
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload release archive
id: upload-release-asset
uses: shogo82148/actions-upload-release-asset@v1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
upload_url: ${{ steps.create-release.outputs.upload_url }}
asset_path: ./paperless-ngx.tar.xz
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
asset_content_type: application/x-xz
append-changelog:
name: "Append Changelog"
runs-on: ubuntu-24.04
needs:
- publish-release
if: needs.publish-release.outputs.prerelease == 'false'
steps:
- name: Checkout
uses: actions/checkout@v6
with:
ref: main
- name: Set up Python
id: setup-python
uses: actions/setup-python@v6
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v7
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Append Changelog to docs
id: append-Changelog
working-directory: docs
run: |
git branch ${{ needs.publish-release.outputs.version }}-changelog
git checkout ${{ needs.publish-release.outputs.version }}-changelog
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
echo "Manually linking usernames"
sed -i -r 's|@([a-zA-Z0-9_]+) \(\[#|[@\1](https://github.com/\1) ([#|g' changelog-new.md
echo "Removing unneeded comment tags"
sed -i -r 's|@<!---->|@|g' changelog-new.md
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
mv changelog-new.md changelog.md
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
pre-commit run --files changelog.md || true
git config --global user.name "github-actions"
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
git push origin ${{ needs.publish-release.outputs.version }}-changelog
- name: Create Pull Request
uses: actions/github-script@v8
with:
script: |
const { repo, owner } = context.repo;
const result = await github.rest.pulls.create({
title: 'Documentation: Add ${{ needs.publish-release.outputs.version }} changelog',
owner,
repo,
head: '${{ needs.publish-release.outputs.version }}-changelog',
base: 'main',
body: 'This PR is auto-generated by CI.'
});
github.rest.issues.addLabels({
owner,
repo,
issue_number: result.data.number,
labels: ['documentation', 'skip-changelog']
});

View File

@@ -37,7 +37,7 @@ jobs:
if: github.repository_owner == 'paperless-ngx'
runs-on: ubuntu-24.04
steps:
- uses: dessant/lock-threads@v6
- uses: dessant/lock-threads@v5
with:
issue-inactive-days: '30'
pr-inactive-days: '30'

View File

@@ -47,7 +47,7 @@ jobs:
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
- name: Cache frontend dependencies
id: cache-frontend-deps
uses: actions/cache@v5
uses: actions/cache@v4
with:
path: |
~/.pnpm-store

View File

@@ -11,6 +11,8 @@ COPY ./src-ui /src/src-ui
WORKDIR /src/src-ui
RUN set -eux \
&& npm update -g pnpm \
&& npm install -g corepack@latest \
&& corepack enable \
&& pnpm install
@@ -108,7 +110,8 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONWARNINGS="ignore:::django.http.response:517" \
PNGX_CONTAINERIZED=1 \
# https://docs.astral.sh/uv/reference/settings/#link-mode
UV_LINK_MODE=copy
UV_LINK_MODE=copy \
UV_CACHE_DIR=/cache/uv/
#
# Begin installation and configuration
@@ -190,13 +193,14 @@ ARG BUILD_PACKAGES="\
pkg-config"
# hadolint ignore=DL3042
RUN set -eux \
RUN --mount=type=cache,target=${UV_CACHE_DIR},id=python-cache \
set -eux \
&& echo "Installing build system packages" \
&& apt-get update \
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
&& echo "Installing Python requirements" \
&& uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt \
&& uv pip install --no-cache --system --no-python-downloads --python-preference system --requirements requirements.txt \
&& uv pip install --system --no-python-downloads --python-preference system --requirements requirements.txt \
&& echo "Installing NLTK data" \
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" snowball_data \
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" stopwords \

View File

@@ -328,23 +328,23 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.html</context>
<context context-type="linenumber">70</context>
<context context-type="linenumber">61</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
<context context-type="linenumber">151</context>
<context context-type="linenumber">139</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
<context context-type="linenumber">151</context>
<context context-type="linenumber">139</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
<context context-type="linenumber">151</context>
<context context-type="linenumber">139</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
<context context-type="linenumber">151</context>
<context context-type="linenumber">139</context>
</context-group>
</trans-unit>
<trans-unit id="4930506384627295710" datatype="html">
@@ -2164,7 +2164,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.html</context>
<context context-type="linenumber">61</context>
<context context-type="linenumber">55</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/mail/mail.component.html</context>
@@ -2216,19 +2216,19 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
<context context-type="linenumber">140</context>
<context context-type="linenumber">133</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
<context context-type="linenumber">140</context>
<context context-type="linenumber">133</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
<context context-type="linenumber">140</context>
<context context-type="linenumber">133</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
<context context-type="linenumber">140</context>
<context context-type="linenumber">133</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.ts</context>
@@ -2300,7 +2300,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.ts</context>
<context context-type="linenumber">106</context>
<context context-type="linenumber">104</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/mail/mail.component.ts</context>
@@ -2483,7 +2483,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.html</context>
<context context-type="linenumber">58</context>
<context context-type="linenumber">52</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/mail/mail.component.html</context>
@@ -2519,19 +2519,19 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
<context context-type="linenumber">137</context>
<context context-type="linenumber">130</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
<context context-type="linenumber">137</context>
<context context-type="linenumber">130</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
<context context-type="linenumber">137</context>
<context context-type="linenumber">130</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
<context context-type="linenumber">137</context>
<context context-type="linenumber">130</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/workflows/workflows.component.html</context>
@@ -2627,7 +2627,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.ts</context>
<context context-type="linenumber">108</context>
<context context-type="linenumber">106</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/mail/mail.component.ts</context>
@@ -3340,7 +3340,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.ts</context>
<context context-type="linenumber">87</context>
<context context-type="linenumber">85</context>
</context-group>
</trans-unit>
<trans-unit id="1841172489943868696" datatype="html">
@@ -3351,7 +3351,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.ts</context>
<context context-type="linenumber">96</context>
<context context-type="linenumber">94</context>
</context-group>
</trans-unit>
<trans-unit id="6048892649018070225" datatype="html">
@@ -4361,7 +4361,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/storage-path-list/storage-path-list.component.ts</context>
<context context-type="linenumber">51</context>
<context context-type="linenumber">49</context>
</context-group>
</trans-unit>
<trans-unit id="2816147949408898105" datatype="html">
@@ -4436,7 +4436,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/tag-list/tag-list.component.ts</context>
<context context-type="linenumber">51</context>
<context context-type="linenumber">49</context>
</context-group>
</trans-unit>
<trans-unit id="8621797738551294959" datatype="html">
@@ -8500,28 +8500,28 @@
<source>correspondent</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/correspondent-list/correspondent-list.component.ts</context>
<context context-type="linenumber">49</context>
<context context-type="linenumber">47</context>
</context-group>
</trans-unit>
<trans-unit id="1612355304340685070" datatype="html">
<source>correspondents</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/correspondent-list/correspondent-list.component.ts</context>
<context context-type="linenumber">50</context>
<context context-type="linenumber">48</context>
</context-group>
</trans-unit>
<trans-unit id="6360600151505327572" datatype="html">
<source>Last used</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/correspondent-list/correspondent-list.component.ts</context>
<context context-type="linenumber">55</context>
<context context-type="linenumber">53</context>
</context-group>
</trans-unit>
<trans-unit id="7427874343955308724" datatype="html">
<source>Do you really want to delete the correspondent &quot;<x id="PH" equiv-text="object.name"/>&quot;?</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/correspondent-list/correspondent-list.component.ts</context>
<context context-type="linenumber">80</context>
<context context-type="linenumber">78</context>
</context-group>
</trans-unit>
<trans-unit id="8384138406252790442" datatype="html">
@@ -8549,79 +8549,79 @@
<source>Filter Documents (<x id="INTERPOLATION" equiv-text="{{ field.document_count }}"/>)</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.html</context>
<context context-type="linenumber">50</context>
<context context-type="linenumber">45</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
<context context-type="linenumber">129</context>
<context context-type="linenumber">123</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
<context context-type="linenumber">129</context>
<context context-type="linenumber">123</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
<context context-type="linenumber">129</context>
<context context-type="linenumber">123</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/management-list/management-list.component.html</context>
<context context-type="linenumber">129</context>
<context context-type="linenumber">123</context>
</context-group>
</trans-unit>
<trans-unit id="651372623796033489" datatype="html">
<source>No fields defined.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.html</context>
<context context-type="linenumber">80</context>
<context context-type="linenumber">70</context>
</context-group>
</trans-unit>
<trans-unit id="3032792139967609806" datatype="html">
<source>Confirm delete field</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.ts</context>
<context context-type="linenumber">104</context>
<context context-type="linenumber">102</context>
</context-group>
</trans-unit>
<trans-unit id="2939457975223185057" datatype="html">
<source>This operation will permanently delete this field.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.ts</context>
<context context-type="linenumber">105</context>
<context context-type="linenumber">103</context>
</context-group>
</trans-unit>
<trans-unit id="4679555638382452936" datatype="html">
<source>Deleted field &quot;<x id="PH" equiv-text="field.name"/>&quot;</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.ts</context>
<context context-type="linenumber">114</context>
<context context-type="linenumber">112</context>
</context-group>
</trans-unit>
<trans-unit id="4704551499967874824" datatype="html">
<source>Error deleting field &quot;<x id="PH" equiv-text="field.name"/>&quot;.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/custom-fields/custom-fields.component.ts</context>
<context context-type="linenumber">123</context>
<context context-type="linenumber">121</context>
</context-group>
</trans-unit>
<trans-unit id="8084492669582894778" datatype="html">
<source>document type</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/document-type-list/document-type-list.component.ts</context>
<context context-type="linenumber">45</context>
<context context-type="linenumber">43</context>
</context-group>
</trans-unit>
<trans-unit id="2992451138146293104" datatype="html">
<source>document types</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/document-type-list/document-type-list.component.ts</context>
<context context-type="linenumber">46</context>
<context context-type="linenumber">44</context>
</context-group>
</trans-unit>
<trans-unit id="4990731724078522539" datatype="html">
<source>Do you really want to delete the document type &quot;<x id="PH" equiv-text="object.name"/>&quot;?</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/document-type-list/document-type-list.component.ts</context>
<context context-type="linenumber">51</context>
<context context-type="linenumber">49</context>
</context-group>
</trans-unit>
<trans-unit id="8957855217409261143" datatype="html">
@@ -9161,42 +9161,42 @@
<source>storage path</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/storage-path-list/storage-path-list.component.ts</context>
<context context-type="linenumber">45</context>
<context context-type="linenumber">43</context>
</context-group>
</trans-unit>
<trans-unit id="22235115124223314" datatype="html">
<source>storage paths</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/storage-path-list/storage-path-list.component.ts</context>
<context context-type="linenumber">46</context>
<context context-type="linenumber">44</context>
</context-group>
</trans-unit>
<trans-unit id="1569070683025071137" datatype="html">
<source>Do you really want to delete the storage path &quot;<x id="PH" equiv-text="object.name"/>&quot;?</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/storage-path-list/storage-path-list.component.ts</context>
<context context-type="linenumber">62</context>
<context context-type="linenumber">60</context>
</context-group>
</trans-unit>
<trans-unit id="6402703264596649214" datatype="html">
<source>tag</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/tag-list/tag-list.component.ts</context>
<context context-type="linenumber">45</context>
<context context-type="linenumber">43</context>
</context-group>
</trans-unit>
<trans-unit id="4975748273657042999" datatype="html">
<source>tags</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/tag-list/tag-list.component.ts</context>
<context context-type="linenumber">46</context>
<context context-type="linenumber">44</context>
</context-group>
</trans-unit>
<trans-unit id="93754014749412887" datatype="html">
<source>Do you really want to delete the tag &quot;<x id="PH" equiv-text="object.name"/>&quot;?</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/tag-list/tag-list.component.ts</context>
<context context-type="linenumber">62</context>
<context context-type="linenumber">60</context>
</context-group>
</trans-unit>
<trans-unit id="1229748338333965418" datatype="html">

View File

@@ -1,7 +1,6 @@
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
import { Component, inject } from '@angular/core'
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
import { RouterModule } from '@angular/router'
import {
NgbDropdownModule,
NgbPaginationModule,
@@ -30,7 +29,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
TitleCasePipe,
FormsModule,
ReactiveFormsModule,
RouterModule,
NgClass,
NgTemplateOutlet,
NgbDropdownModule,

View File

@@ -42,13 +42,7 @@
<button (click)="editField(field)" *pngxIfPermissions="{ action: PermissionAction.Change, type: PermissionType.CustomField }" ngbDropdownItem i18n>Edit</button>
<button class="text-danger" (click)="deleteField(field)" *pngxIfPermissions="{ action: PermissionAction.Delete, type: PermissionType.CustomField }" ngbDropdownItem i18n>Delete</button>
@if (field.document_count > 0) {
<a
*pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }"
ngbDropdownItem
[routerLink]="getDocumentFilterUrl(field)"
i18n
>Filter Documents ({{ field.document_count }})</a
>
<button (click)="filterDocuments(field)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }" ngbDropdownItem i18n>Filter Documents ({{ field.document_count }})</button>
}
</div>
</div>
@@ -63,13 +57,9 @@
</div>
@if (field.document_count > 0) {
<div class="btn-group d-none d-sm-inline-block ms-2">
<a
class="btn btn-sm btn-outline-secondary"
[routerLink]="getDocumentFilterUrl(field)"
>
<i-bs width="1em" height="1em" name="filter"></i-bs>&nbsp;<ng-container i18n>Documents</ng-container
><span class="badge bg-light text-secondary ms-2">{{ field.document_count }}</span>
</a>
<button class="btn btn-sm btn-outline-secondary" type="button" (click)="filterDocuments(field)">
<i-bs width="1em" height="1em" name="filter"></i-bs>&nbsp;<ng-container i18n>Documents</ng-container><span class="badge bg-light text-secondary ms-2">{{ field.document_count }}</span>
</button>
</div>
}
</div>

View File

@@ -4,7 +4,6 @@ import { provideHttpClient, withInterceptorsFromDi } from '@angular/common/http'
import { provideHttpClientTesting } from '@angular/common/http/testing'
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
import { By } from '@angular/platform-browser'
import { RouterTestingModule } from '@angular/router/testing'
import {
NgbModal,
NgbModalModule,
@@ -62,7 +61,6 @@ describe('CustomFieldsComponent', () => {
NgbModalModule,
NgbPopoverModule,
NgxBootstrapIconsModule.pick(allIcons),
RouterTestingModule,
CustomFieldsComponent,
IfPermissionsDirective,
PageHeaderComponent,
@@ -110,9 +108,7 @@ describe('CustomFieldsComponent', () => {
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
const reloadSpy = jest.spyOn(component, 'reload')
const createButton = fixture.debugElement
.queryAll(By.css('button'))
.find((btn) => btn.nativeElement.textContent.trim().includes('Add Field'))
const createButton = fixture.debugElement.queryAll(By.css('button'))[1]
createButton.triggerEventHandler('click')
expect(modal).not.toBeUndefined()
@@ -137,11 +133,7 @@ describe('CustomFieldsComponent', () => {
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
const reloadSpy = jest.spyOn(component, 'reload')
const editButton = fixture.debugElement
.queryAll(By.css('button'))
.find((btn) =>
btn.nativeElement.textContent.trim().includes(fields[0].name)
)
const editButton = fixture.debugElement.queryAll(By.css('button'))[2]
editButton.triggerEventHandler('click')
expect(modal).not.toBeUndefined()
@@ -166,9 +158,7 @@ describe('CustomFieldsComponent', () => {
const deleteSpy = jest.spyOn(customFieldsService, 'delete')
const reloadSpy = jest.spyOn(component, 'reload')
const deleteButton = fixture.debugElement
.queryAll(By.css('button'))
.find((btn) => btn.nativeElement.textContent.trim().includes('Delete'))
const deleteButton = fixture.debugElement.queryAll(By.css('button'))[5]
deleteButton.triggerEventHandler('click')
expect(modal).not.toBeUndefined()
@@ -186,10 +176,10 @@ describe('CustomFieldsComponent', () => {
expect(reloadSpy).toHaveBeenCalled()
})
it('should provide document filter url', () => {
const urlSpy = jest.spyOn(listViewService, 'getQuickFilterUrl')
component.getDocumentFilterUrl(fields[0])
expect(urlSpy).toHaveBeenCalledWith([
it('should support filter documents', () => {
const filterSpy = jest.spyOn(listViewService, 'quickFilter')
component.filterDocuments(fields[0])
expect(filterSpy).toHaveBeenCalledWith([
{
rule_type: FILTER_CUSTOM_FIELDS_QUERY,
value: JSON.stringify([

View File

@@ -1,5 +1,4 @@
import { Component, OnInit, inject } from '@angular/core'
import { RouterModule } from '@angular/router'
import {
NgbDropdownModule,
NgbModal,
@@ -37,7 +36,6 @@ import { LoadingComponentWithPermissions } from '../../loading-component/loading
NgbDropdownModule,
NgbPaginationModule,
NgxBootstrapIconsModule,
RouterModule,
],
})
export class CustomFieldsComponent
@@ -132,8 +130,8 @@ export class CustomFieldsComponent
return DATA_TYPE_LABELS.find((l) => l.id === field.data_type).name
}
getDocumentFilterUrl(field: CustomField) {
return this.documentListViewService.getQuickFilterUrl([
filterDocuments(field: CustomField) {
this.documentListViewService.quickFilter([
{
rule_type: FILTER_CUSTOM_FIELDS_QUERY,
value: JSON.stringify([

View File

@@ -1,7 +1,6 @@
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
import { Component, inject } from '@angular/core'
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
import { RouterModule } from '@angular/router'
import {
NgbDropdownModule,
NgbPaginationModule,
@@ -28,7 +27,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
IfPermissionsDirective,
FormsModule,
ReactiveFormsModule,
RouterModule,
NgClass,
NgTemplateOutlet,
NgbDropdownModule,

View File

@@ -120,14 +120,7 @@
<button (click)="openEditDialog(object)" *pngxIfPermissions="{ action: PermissionAction.Change, type: permissionType }" ngbDropdownItem i18n>Edit</button>
<button class="text-danger" (click)="openDeleteDialog(object)" *pngxIfPermissions="{ action: PermissionAction.Delete, type: permissionType }" ngbDropdownItem i18n>Delete</button>
@if (getDocumentCount(object) > 0) {
<a
*pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }"
ngbDropdownItem
[routerLink]="getDocumentFilterUrl(object)"
(click)="$event?.stopPropagation()"
i18n
>Filter Documents ({{ getDocumentCount(object) }})</a
>
<button (click)="filterDocuments(object)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }" ngbDropdownItem i18n>Filter Documents ({{ getDocumentCount(object) }})</button>
}
</div>
</div>
@@ -142,15 +135,9 @@
</div>
@if (getDocumentCount(object) > 0) {
<div class="btn-group d-none d-sm-inline-block">
<a
class="btn btn-sm btn-outline-secondary"
*pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }"
[routerLink]="getDocumentFilterUrl(object)"
(click)="$event?.stopPropagation()"
>
<i-bs width="1em" height="1em" name="filter"></i-bs>&nbsp;<ng-container i18n>Documents</ng-container
><span class="badge bg-light text-secondary ms-2">{{ getDocumentCount(object) }}</span>
</a>
<button class="btn btn-sm btn-outline-secondary" (click)="filterDocuments(object); $event.stopPropagation();" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }">
<i-bs width="1em" height="1em" name="filter"></i-bs>&nbsp;<ng-container i18n>Documents</ng-container><span class="badge bg-light text-secondary ms-2">{{ getDocumentCount(object) }}</span>
</button>
</div>
}
</div>

View File

@@ -13,7 +13,6 @@ import {
} from '@angular/core/testing'
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
import { By } from '@angular/platform-browser'
import { RouterLinkWithHref } from '@angular/router'
import { RouterTestingModule } from '@angular/router/testing'
import {
NgbModal,
@@ -231,15 +230,12 @@ describe('ManagementListComponent', () => {
})
it('should support quick filter for objects', () => {
const expectedUrl = documentListViewService.getQuickFilterUrl([
const qfSpy = jest.spyOn(documentListViewService, 'quickFilter')
const filterButton = fixture.debugElement.queryAll(By.css('button'))[9]
filterButton.triggerEventHandler('click')
expect(qfSpy).toHaveBeenCalledWith([
{ rule_type: FILTER_HAS_TAGS_ALL, value: tags[0].id.toString() },
])
const filterLink = fixture.debugElement.query(
By.css('a.btn-outline-secondary')
)
expect(filterLink).toBeTruthy()
const routerLink = filterLink.injector.get(RouterLinkWithHref)
expect(routerLink.urlTree).toEqual(expectedUrl)
]) // subclasses set the filter rule type
})
it('should reload on sort', () => {

View File

@@ -230,8 +230,8 @@ export abstract class ManagementListComponent<T extends MatchingModel>
abstract getDeleteMessage(object: T)
getDocumentFilterUrl(object: MatchingModel) {
return this.documentListViewService.getQuickFilterUrl([
filterDocuments(object: MatchingModel) {
this.documentListViewService.quickFilter([
{ rule_type: this.filterRuleType, value: object.id.toString() },
])
}

View File

@@ -1,7 +1,6 @@
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
import { Component, inject } from '@angular/core'
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
import { RouterModule } from '@angular/router'
import {
NgbDropdownModule,
NgbPaginationModule,
@@ -28,7 +27,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
IfPermissionsDirective,
FormsModule,
ReactiveFormsModule,
RouterModule,
NgClass,
NgTemplateOutlet,
NgbDropdownModule,

View File

@@ -1,7 +1,6 @@
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
import { Component, inject } from '@angular/core'
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
import { RouterModule } from '@angular/router'
import {
NgbDropdownModule,
NgbPaginationModule,
@@ -28,7 +27,6 @@ import { ManagementListComponent } from '../management-list/management-list.comp
IfPermissionsDirective,
FormsModule,
ReactiveFormsModule,
RouterModule,
NgClass,
NgTemplateOutlet,
NgbDropdownModule,

View File

@@ -651,25 +651,4 @@ describe('DocumentListViewService', () => {
documentListViewService.displayFields = customFields as any
expect(documentListViewService.displayFields).toEqual(['custom_field_1'])
})
it('should generate quick filter URL with filter rules', () => {
const routerSpy = jest.spyOn(router, 'createUrlTree')
const urlTree = documentListViewService.getQuickFilterUrl(filterRules)
expect(routerSpy).toHaveBeenCalledWith(['/documents'], {
queryParams: expect.objectContaining({
tags__id__all: tags__id__all,
}),
})
expect(urlTree).toBeDefined()
})
it('should generate quick filter URL preserving default state', () => {
documentListViewService.reload()
httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
)
const urlTree = documentListViewService.getQuickFilterUrl(filterRules)
expect(urlTree).toBeDefined()
expect(router.createUrlTree).toBeDefined()
})
})

View File

@@ -1,5 +1,5 @@
import { Injectable, inject } from '@angular/core'
import { ParamMap, Router, UrlTree } from '@angular/router'
import { ParamMap, Router } from '@angular/router'
import { Observable, Subject, first, takeUntil } from 'rxjs'
import {
DEFAULT_DISPLAY_FIELDS,
@@ -483,18 +483,6 @@ export class DocumentListViewService {
this.router.navigate(['documents'])
}
getQuickFilterUrl(filterRules: FilterRule[]): UrlTree {
const defaultState = {
...this.defaultListViewState(),
...this.listViewStates.get(null),
filterRules,
}
const params = paramsFromViewState(defaultState)
return this.router.createUrlTree(['/documents'], {
queryParams: params,
})
}
getLastPage(): number {
return Math.ceil(this.collectionSize / this.pageSize)
}

View File

@@ -13,6 +13,7 @@ from pikepdf import Page
from pikepdf import PasswordError
from pikepdf import Pdf
from documents.consumer import ConsumerPreflightPlugin
from documents.converters import convert_from_tiff_to_pdf
from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides
@@ -193,6 +194,15 @@ class BarcodePlugin(ConsumeTaskPlugin):
):
logger.info(f"Found ASN in barcode: {located_asn}")
self.metadata.asn = located_asn
# (Re-)run the preflight ASN check
preflight_plugin = ConsumerPreflightPlugin(
input_doc=self.input_doc,
metadata=self.metadata,
status_mgr=self.status_mgr,
base_tmp_dir=self.base_tmp_dir,
task_id=self.task_id,
)
preflight_plugin.pre_check_asn_value()
def cleanup(self) -> None:
self.temp_dir.cleanup()

View File

@@ -10,7 +10,6 @@ from datetime import time
from datetime import timedelta
from datetime import timezone
from shutil import rmtree
from time import sleep
from typing import TYPE_CHECKING
from typing import Literal
@@ -33,7 +32,6 @@ from whoosh.highlight import HtmlFormatter
from whoosh.idsets import BitSet
from whoosh.idsets import DocIdSet
from whoosh.index import FileIndex
from whoosh.index import LockError
from whoosh.index import create_in
from whoosh.index import exists_in
from whoosh.index import open_dir
@@ -99,33 +97,11 @@ def get_schema() -> Schema:
def open_index(*, recreate=False) -> FileIndex:
transient_exceptions = (FileNotFoundError, LockError)
max_retries = 3
retry_delay = 0.1
for attempt in range(max_retries + 1):
try:
if exists_in(settings.INDEX_DIR) and not recreate:
return open_dir(settings.INDEX_DIR, schema=get_schema())
break
except transient_exceptions as exc:
is_last_attempt = attempt == max_retries or recreate
if is_last_attempt:
logger.exception(
"Error while opening the index after retries, recreating.",
)
break
logger.warning(
"Transient error while opening the index (attempt %s/%s): %s. Retrying.",
attempt + 1,
max_retries + 1,
exc,
)
sleep(retry_delay)
except Exception:
logger.exception("Error while opening the index, recreating.")
break
try:
if exists_in(settings.INDEX_DIR) and not recreate:
return open_dir(settings.INDEX_DIR, schema=get_schema())
except Exception:
logger.exception("Error while opening the index, recreating.")
# create_in doesn't handle corrupted indexes very well, remove the directory entirely first
if settings.INDEX_DIR.is_dir():

View File

@@ -493,7 +493,7 @@ def check_scheduled_workflows():
trigger.schedule_is_recurring
and workflow_runs.exists()
and (
workflow_runs.first().run_at
workflow_runs.last().run_at
> now
- datetime.timedelta(
days=trigger.schedule_recurring_interval_days,

View File

@@ -11,6 +11,7 @@ from django.test import override_settings
from documents import tasks
from documents.barcodes import BarcodePlugin
from documents.consumer import ConsumerError
from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides
from documents.data_models import DocumentSource
@@ -93,6 +94,41 @@ class TestBarcode(
self.assertDictEqual(separator_page_numbers, {1: False})
@override_settings(CONSUMER_ENABLE_ASN_BARCODE=True)
def test_asn_barcode_duplicate_in_trash_fails(self):
"""
GIVEN:
- A document with ASN barcode 123 is in the trash
WHEN:
- A file with the same barcode ASN is consumed
THEN:
- The ASN check is re-run and consumption fails
"""
test_file = self.BARCODE_SAMPLE_DIR / "barcode-39-asn-123.pdf"
first_doc = Document.objects.create(
title="First ASN 123",
content="",
checksum="asn123first",
mime_type="application/pdf",
archive_serial_number=123,
)
first_doc.delete()
dupe_asn = settings.SCRATCH_DIR / "barcode-39-asn-123-second.pdf"
shutil.copy(test_file, dupe_asn)
with mock.patch("documents.tasks.ProgressManager", DummyProgressManager):
with self.assertRaisesRegex(ConsumerError, r"ASN 123.*trash"):
tasks.consume_file(
ConsumableDocument(
source=DocumentSource.ConsumeFolder,
original_file=dupe_asn,
),
None,
)
@override_settings(
CONSUMER_BARCODE_TIFF_SUPPORT=True,
)

View File

@@ -1,7 +1,6 @@
from datetime import datetime
from unittest import mock
from django.conf import settings
from django.contrib.auth.models import User
from django.test import SimpleTestCase
from django.test import TestCase
@@ -252,120 +251,3 @@ class TestRewriteNaturalDateKeywords(SimpleTestCase):
result = self._rewrite_with_now("added:today", fixed_now)
# Should convert to UTC properly
self.assertIn("added:[20250719", result)
class TestIndexResilience(DirectoriesMixin, SimpleTestCase):
def _assert_recreate_called(self, mock_create_in):
mock_create_in.assert_called_once()
path_arg, schema_arg = mock_create_in.call_args.args
self.assertEqual(path_arg, settings.INDEX_DIR)
self.assertEqual(schema_arg.__class__.__name__, "Schema")
def test_transient_missing_segment_does_not_force_recreate(self):
"""
GIVEN:
- Index directory exists
WHEN:
- open_index is called
- Opening the index raises FileNotFoundError once due to a
transient missing segment
THEN:
- Index is opened successfully on retry
- Index is not recreated
"""
file_marker = settings.INDEX_DIR / "file_marker.txt"
file_marker.write_text("keep")
expected_index = object()
with (
mock.patch("documents.index.exists_in", return_value=True),
mock.patch(
"documents.index.open_dir",
side_effect=[FileNotFoundError("missing"), expected_index],
) as mock_open_dir,
mock.patch(
"documents.index.create_in",
) as mock_create_in,
mock.patch(
"documents.index.rmtree",
) as mock_rmtree,
):
ix = index.open_index()
self.assertIs(ix, expected_index)
self.assertGreaterEqual(mock_open_dir.call_count, 2)
mock_rmtree.assert_not_called()
mock_create_in.assert_not_called()
self.assertEqual(file_marker.read_text(), "keep")
def test_transient_errors_exhaust_retries_and_recreate(self):
"""
GIVEN:
- Index directory exists
WHEN:
- open_index is called
- Opening the index raises FileNotFoundError multiple times due to
transient missing segments
THEN:
- Index is recreated after retries are exhausted
"""
recreated_index = object()
with (
self.assertLogs("paperless.index", level="ERROR") as cm,
mock.patch("documents.index.exists_in", return_value=True),
mock.patch(
"documents.index.open_dir",
side_effect=FileNotFoundError("missing"),
) as mock_open_dir,
mock.patch("documents.index.rmtree") as mock_rmtree,
mock.patch(
"documents.index.create_in",
return_value=recreated_index,
) as mock_create_in,
):
ix = index.open_index()
self.assertIs(ix, recreated_index)
self.assertEqual(mock_open_dir.call_count, 4)
mock_rmtree.assert_called_once_with(settings.INDEX_DIR)
self._assert_recreate_called(mock_create_in)
self.assertIn(
"Error while opening the index after retries, recreating.",
cm.output[0],
)
def test_non_transient_error_recreates_index(self):
"""
GIVEN:
- Index directory exists
WHEN:
- open_index is called
- Opening the index raises a "non-transient" error
THEN:
- Index is recreated
"""
recreated_index = object()
with (
self.assertLogs("paperless.index", level="ERROR") as cm,
mock.patch("documents.index.exists_in", return_value=True),
mock.patch(
"documents.index.open_dir",
side_effect=RuntimeError("boom"),
),
mock.patch("documents.index.rmtree") as mock_rmtree,
mock.patch(
"documents.index.create_in",
return_value=recreated_index,
) as mock_create_in,
):
ix = index.open_index()
self.assertIs(ix, recreated_index)
mock_rmtree.assert_called_once_with(settings.INDEX_DIR)
self._assert_recreate_called(mock_create_in)
self.assertIn(
"Error while opening the index, recreating.",
cm.output[0],
)

View File

@@ -2094,68 +2094,6 @@ class TestWorkflows(
doc.refresh_from_db()
self.assertIsNone(doc.owner)
def test_workflow_scheduled_recurring_respects_latest_run(self):
"""
GIVEN:
- Scheduled workflow marked as recurring with a 1-day interval
- Document that matches the trigger
- Two prior runs exist: one 2 days ago and one 1 hour ago
WHEN:
- Scheduled workflows are checked again
THEN:
- Workflow does not run because the most recent run is inside the interval
"""
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
schedule_date_field=WorkflowTrigger.ScheduleDateField.CREATED,
schedule_is_recurring=True,
schedule_recurring_interval_days=1,
)
action = WorkflowAction.objects.create(
assign_title="Doc assign owner",
assign_owner=self.user2,
)
w = Workflow.objects.create(
name="Workflow 1",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
created=timezone.now().date() - timedelta(days=3),
)
WorkflowRun.objects.create(
workflow=w,
document=doc,
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
run_at=timezone.now() - timedelta(days=2),
)
WorkflowRun.objects.create(
workflow=w,
document=doc,
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
run_at=timezone.now() - timedelta(hours=1),
)
tasks.check_scheduled_workflows()
doc.refresh_from_db()
self.assertIsNone(doc.owner)
self.assertEqual(
WorkflowRun.objects.filter(
workflow=w,
document=doc,
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
).count(),
2,
)
def test_workflow_scheduled_trigger_negative_offset_customfield(self):
"""
GIVEN:

View File

@@ -2,7 +2,7 @@ msgid ""
msgstr ""
"Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2026-01-06 17:11+0000\n"
"POT-Creation-Date: 2025-12-29 14:49+0000\n"
"PO-Revision-Date: 2022-02-17 04:17\n"
"Last-Translator: \n"
"Language-Team: English\n"
@@ -1702,151 +1702,151 @@ msgstr ""
msgid "paperless application settings"
msgstr ""
#: paperless/settings.py:767
#: paperless/settings.py:773
msgid "English (US)"
msgstr ""
#: paperless/settings.py:768
#: paperless/settings.py:774
msgid "Arabic"
msgstr ""
#: paperless/settings.py:769
#: paperless/settings.py:775
msgid "Afrikaans"
msgstr ""
#: paperless/settings.py:770
#: paperless/settings.py:776
msgid "Belarusian"
msgstr ""
#: paperless/settings.py:771
#: paperless/settings.py:777
msgid "Bulgarian"
msgstr ""
#: paperless/settings.py:772
#: paperless/settings.py:778
msgid "Catalan"
msgstr ""
#: paperless/settings.py:773
#: paperless/settings.py:779
msgid "Czech"
msgstr ""
#: paperless/settings.py:774
#: paperless/settings.py:780
msgid "Danish"
msgstr ""
#: paperless/settings.py:775
#: paperless/settings.py:781
msgid "German"
msgstr ""
#: paperless/settings.py:776
#: paperless/settings.py:782
msgid "Greek"
msgstr ""
#: paperless/settings.py:777
#: paperless/settings.py:783
msgid "English (GB)"
msgstr ""
#: paperless/settings.py:778
#: paperless/settings.py:784
msgid "Spanish"
msgstr ""
#: paperless/settings.py:779
#: paperless/settings.py:785
msgid "Persian"
msgstr ""
#: paperless/settings.py:780
#: paperless/settings.py:786
msgid "Finnish"
msgstr ""
#: paperless/settings.py:781
#: paperless/settings.py:787
msgid "French"
msgstr ""
#: paperless/settings.py:782
#: paperless/settings.py:788
msgid "Hungarian"
msgstr ""
#: paperless/settings.py:783
#: paperless/settings.py:789
msgid "Indonesian"
msgstr ""
#: paperless/settings.py:784
#: paperless/settings.py:790
msgid "Italian"
msgstr ""
#: paperless/settings.py:785
#: paperless/settings.py:791
msgid "Japanese"
msgstr ""
#: paperless/settings.py:786
#: paperless/settings.py:792
msgid "Korean"
msgstr ""
#: paperless/settings.py:787
#: paperless/settings.py:793
msgid "Luxembourgish"
msgstr ""
#: paperless/settings.py:788
#: paperless/settings.py:794
msgid "Norwegian"
msgstr ""
#: paperless/settings.py:789
#: paperless/settings.py:795
msgid "Dutch"
msgstr ""
#: paperless/settings.py:790
#: paperless/settings.py:796
msgid "Polish"
msgstr ""
#: paperless/settings.py:791
#: paperless/settings.py:797
msgid "Portuguese (Brazil)"
msgstr ""
#: paperless/settings.py:792
#: paperless/settings.py:798
msgid "Portuguese"
msgstr ""
#: paperless/settings.py:793
#: paperless/settings.py:799
msgid "Romanian"
msgstr ""
#: paperless/settings.py:794
#: paperless/settings.py:800
msgid "Russian"
msgstr ""
#: paperless/settings.py:795
#: paperless/settings.py:801
msgid "Slovak"
msgstr ""
#: paperless/settings.py:796
#: paperless/settings.py:802
msgid "Slovenian"
msgstr ""
#: paperless/settings.py:797
#: paperless/settings.py:803
msgid "Serbian"
msgstr ""
#: paperless/settings.py:798
#: paperless/settings.py:804
msgid "Swedish"
msgstr ""
#: paperless/settings.py:799
#: paperless/settings.py:805
msgid "Turkish"
msgstr ""
#: paperless/settings.py:800
#: paperless/settings.py:806
msgid "Ukrainian"
msgstr ""
#: paperless/settings.py:801
#: paperless/settings.py:807
msgid "Vietnamese"
msgstr ""
#: paperless/settings.py:802
#: paperless/settings.py:808
msgid "Chinese Simplified"
msgstr ""
#: paperless/settings.py:803
#: paperless/settings.py:809
msgid "Chinese Traditional"
msgstr ""

View File

@@ -8,6 +8,7 @@ import os
import tempfile
from os import PathLike
from pathlib import Path
from platform import machine
from typing import Final
from urllib.parse import urlparse
@@ -422,9 +423,14 @@ ASGI_APPLICATION = "paperless.asgi.application"
STATIC_URL = os.getenv("PAPERLESS_STATIC_URL", BASE_URL + "static/")
WHITENOISE_STATIC_PREFIX = "/static/"
if machine().lower() == "aarch64": # pragma: no cover
_static_backend = "django.contrib.staticfiles.storage.StaticFilesStorage"
else:
_static_backend = "whitenoise.storage.CompressedStaticFilesStorage"
STORAGES = {
"staticfiles": {
"BACKEND": "whitenoise.storage.CompressedStaticFilesStorage",
"BACKEND": _static_backend,
},
"default": {"BACKEND": "django.core.files.storage.FileSystemStorage"},
}