mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-01-24 22:39:02 -06:00
Compare commits
84 Commits
chore/get-
...
feature/mi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e1655045ca | ||
|
|
1a638d8cc0 | ||
|
|
b21ff75a30 | ||
|
|
58f1a186d4 | ||
|
|
2a1c06c047 | ||
|
|
770dc02833 | ||
|
|
af9d75dfcf | ||
|
|
7b23cdc0c1 | ||
|
|
09892809f9 | ||
|
|
94c6108006 | ||
|
|
33c5d5bab0 | ||
|
|
9beb508f1d | ||
|
|
a290fcfe6f | ||
|
|
0846fe9845 | ||
|
|
910d16374b | ||
|
|
35d77b144d | ||
|
|
5987e35101 | ||
|
|
96259ce441 | ||
|
|
283afb265d | ||
|
|
67564dd573 | ||
|
|
046d65c2ba | ||
|
|
8761816635 | ||
|
|
a1cdc45f1a | ||
|
|
190e42e722 | ||
|
|
75c6ffe01f | ||
|
|
2964b4b256 | ||
|
|
f52f9dd325 | ||
|
|
5827a0ec25 | ||
|
|
990ef05d99 | ||
|
|
9f48b8e6e1 | ||
|
|
42689070b3 | ||
|
|
09f3cfdb93 | ||
|
|
84f408fa43 | ||
|
|
e3c29fc626 | ||
|
|
1f432a3378 | ||
|
|
d1aa76e4ce | ||
|
|
5381bc5907 | ||
|
|
6c45455384 | ||
|
|
2901693860 | ||
|
|
a527f5e244 | ||
|
|
16cc704539 | ||
|
|
245d9fb4a1 | ||
|
|
771f3f150a | ||
|
|
62248f5702 | ||
|
|
ecfeff5054 | ||
|
|
fa6a0a81f4 | ||
|
|
37477d391e | ||
|
|
b2541f3e8c | ||
|
|
f8ab81cef7 | ||
|
|
e9f7993ba5 | ||
|
|
3ea5e05137 | ||
|
|
56fddf1e58 | ||
|
|
d447a9fb32 | ||
|
|
155d69b211 | ||
|
|
4a7f9fa984 | ||
|
|
c471c201ee | ||
|
|
a9548afb42 | ||
|
|
2f1cd31e31 | ||
|
|
742c136773 | ||
|
|
939b2f7553 | ||
|
|
8b58718fff | ||
|
|
ad78c436c0 | ||
|
|
c6697cd82b | ||
|
|
0689c8ad3a | ||
|
|
825e9ca14c | ||
|
|
11cc2f8289 | ||
|
|
055ce9172c | ||
|
|
1b41559067 | ||
|
|
94a5af66eb | ||
|
|
948c664dcf | ||
|
|
eeb5639990 | ||
|
|
6cf8abc5d3 | ||
|
|
9c0de249a6 | ||
|
|
71ecdc528e | ||
|
|
00ec8a577b | ||
|
|
3618c50b62 | ||
|
|
6f4497185e | ||
|
|
e816269db5 | ||
|
|
d4e60e13bf | ||
|
|
cb091665e2 | ||
|
|
00bb92e3e1 | ||
|
|
11ec676909 | ||
|
|
7c457466b7 | ||
|
|
65aed2405c |
56
.codecov.yml
56
.codecov.yml
@@ -1,6 +1,7 @@
|
|||||||
|
# https://docs.codecov.com/docs/codecovyml-reference#codecov
|
||||||
codecov:
|
codecov:
|
||||||
require_ci_to_pass: true
|
require_ci_to_pass: true
|
||||||
# https://docs.codecov.com/docs/components
|
# https://docs.codecov.com/docs/components
|
||||||
component_management:
|
component_management:
|
||||||
individual_components:
|
individual_components:
|
||||||
- component_id: backend
|
- component_id: backend
|
||||||
@@ -9,35 +10,70 @@ component_management:
|
|||||||
- component_id: frontend
|
- component_id: frontend
|
||||||
paths:
|
paths:
|
||||||
- src-ui/**
|
- src-ui/**
|
||||||
|
# https://docs.codecov.com/docs/flags#step-2-flag-management-in-yaml
|
||||||
|
# https://docs.codecov.com/docs/carryforward-flags
|
||||||
flags:
|
flags:
|
||||||
backend:
|
# Backend Python versions
|
||||||
|
backend-python-3.10:
|
||||||
paths:
|
paths:
|
||||||
- src/**
|
- src/**
|
||||||
carryforward: true
|
carryforward: true
|
||||||
frontend:
|
backend-python-3.11:
|
||||||
|
paths:
|
||||||
|
- src/**
|
||||||
|
carryforward: true
|
||||||
|
backend-python-3.12:
|
||||||
|
paths:
|
||||||
|
- src/**
|
||||||
|
carryforward: true
|
||||||
|
# Frontend (shards merge into single flag)
|
||||||
|
frontend-node-24.x:
|
||||||
paths:
|
paths:
|
||||||
- src-ui/**
|
- src-ui/**
|
||||||
carryforward: true
|
carryforward: true
|
||||||
# https://docs.codecov.com/docs/pull-request-comments
|
|
||||||
comment:
|
comment:
|
||||||
layout: "header, diff, components, flags, files"
|
layout: "header, diff, components, flags, files"
|
||||||
# https://docs.codecov.com/docs/javascript-bundle-analysis
|
|
||||||
require_bundle_changes: true
|
require_bundle_changes: true
|
||||||
bundle_change_threshold: "50Kb"
|
bundle_change_threshold: "50Kb"
|
||||||
coverage:
|
coverage:
|
||||||
|
# https://docs.codecov.com/docs/commit-status
|
||||||
status:
|
status:
|
||||||
project:
|
project:
|
||||||
default:
|
backend:
|
||||||
|
flags:
|
||||||
|
- backend-python-3.10
|
||||||
|
- backend-python-3.11
|
||||||
|
- backend-python-3.12
|
||||||
|
paths:
|
||||||
|
- src/**
|
||||||
# https://docs.codecov.com/docs/commit-status#threshold
|
# https://docs.codecov.com/docs/commit-status#threshold
|
||||||
threshold: 1%
|
threshold: 1%
|
||||||
|
removed_code_behavior: adjust_base
|
||||||
|
frontend:
|
||||||
|
flags:
|
||||||
|
- frontend-node-24.x
|
||||||
|
paths:
|
||||||
|
- src-ui/**
|
||||||
|
threshold: 1%
|
||||||
|
removed_code_behavior: adjust_base
|
||||||
patch:
|
patch:
|
||||||
default:
|
backend:
|
||||||
# For the changed lines only, target 100% covered, but
|
flags:
|
||||||
# allow as low as 75%
|
- backend-python-3.10
|
||||||
|
- backend-python-3.11
|
||||||
|
- backend-python-3.12
|
||||||
|
paths:
|
||||||
|
- src/**
|
||||||
|
target: 100%
|
||||||
|
threshold: 25%
|
||||||
|
frontend:
|
||||||
|
flags:
|
||||||
|
- frontend-node-24.x
|
||||||
|
paths:
|
||||||
|
- src-ui/**
|
||||||
target: 100%
|
target: 100%
|
||||||
threshold: 25%
|
threshold: 25%
|
||||||
# https://docs.codecov.com/docs/javascript-bundle-analysis
|
# https://docs.codecov.com/docs/javascript-bundle-analysis
|
||||||
bundle_analysis:
|
bundle_analysis:
|
||||||
# Fail if the bundle size increases by more than 1MB
|
|
||||||
warning_threshold: "1MB"
|
warning_threshold: "1MB"
|
||||||
status: true
|
status: true
|
||||||
|
|||||||
1
.github/release-drafter.yml
vendored
1
.github/release-drafter.yml
vendored
@@ -44,6 +44,7 @@ include-labels:
|
|||||||
- 'notable'
|
- 'notable'
|
||||||
exclude-labels:
|
exclude-labels:
|
||||||
- 'skip-changelog'
|
- 'skip-changelog'
|
||||||
|
filter-by-commitish: true
|
||||||
category-template: '### $TITLE'
|
category-template: '### $TITLE'
|
||||||
change-template: '- $TITLE @$AUTHOR ([#$NUMBER]($URL))'
|
change-template: '- $TITLE @$AUTHOR ([#$NUMBER]($URL))'
|
||||||
change-title-escapes: '\<*_&#@'
|
change-title-escapes: '\<*_&#@'
|
||||||
|
|||||||
4
.github/workflows/ci-backend.yml
vendored
4
.github/workflows/ci-backend.yml
vendored
@@ -88,13 +88,13 @@ jobs:
|
|||||||
if: always()
|
if: always()
|
||||||
uses: codecov/codecov-action@v5
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
flags: backend,backend-python-${{ matrix.python-version }}
|
flags: backend-python-${{ matrix.python-version }}
|
||||||
files: junit.xml
|
files: junit.xml
|
||||||
report_type: test_results
|
report_type: test_results
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
uses: codecov/codecov-action@v5
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
flags: backend,backend-python-${{ matrix.python-version }}
|
flags: backend-python-${{ matrix.python-version }}
|
||||||
files: coverage.xml
|
files: coverage.xml
|
||||||
report_type: coverage
|
report_type: coverage
|
||||||
- name: Stop containers
|
- name: Stop containers
|
||||||
|
|||||||
36
.github/workflows/ci-docker.yml
vendored
36
.github/workflows/ci-docker.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
|||||||
contents: read
|
contents: read
|
||||||
packages: write
|
packages: write
|
||||||
outputs:
|
outputs:
|
||||||
can-push: ${{ steps.check-push.outputs.can-push }}
|
should-push: ${{ steps.check-push.outputs.should-push }}
|
||||||
push-external: ${{ steps.check-push.outputs.push-external }}
|
push-external: ${{ steps.check-push.outputs.push-external }}
|
||||||
repository: ${{ steps.repo.outputs.name }}
|
repository: ${{ steps.repo.outputs.name }}
|
||||||
ref-name: ${{ steps.ref.outputs.name }}
|
ref-name: ${{ steps.ref.outputs.name }}
|
||||||
@@ -59,16 +59,28 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
REF_NAME: ${{ steps.ref.outputs.name }}
|
REF_NAME: ${{ steps.ref.outputs.name }}
|
||||||
run: |
|
run: |
|
||||||
# can-push: Can we push to GHCR?
|
# should-push: Should we push to GHCR?
|
||||||
# True for: pushes, or PRs from the same repo (not forks)
|
# True for:
|
||||||
can_push=${{ github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository }}
|
# 1. Pushes (tags/dev/beta) - filtered via the workflow triggers
|
||||||
echo "can-push=${can_push}"
|
# 2. Internal PRs where the branch name starts with 'feature-' - filtered here when a PR is synced
|
||||||
echo "can-push=${can_push}" >> $GITHUB_OUTPUT
|
|
||||||
|
should_push="false"
|
||||||
|
|
||||||
|
if [[ "${{ github.event_name }}" == "push" ]]; then
|
||||||
|
should_push="true"
|
||||||
|
elif [[ "${{ github.event_name }}" == "pull_request" && "${{ github.event.pull_request.head.repo.full_name }}" == "${{ github.repository }}" ]]; then
|
||||||
|
if [[ "${REF_NAME}" == feature-* || "${REF_NAME}" == fix-* ]]; then
|
||||||
|
should_push="true"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "should-push=${should_push}"
|
||||||
|
echo "should-push=${should_push}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
# push-external: Should we also push to Docker Hub and Quay.io?
|
# push-external: Should we also push to Docker Hub and Quay.io?
|
||||||
# Only for main repo on dev/beta branches or version tags
|
# Only for main repo on dev/beta branches or version tags
|
||||||
push_external="false"
|
push_external="false"
|
||||||
if [[ "${can_push}" == "true" && "${{ github.repository_owner }}" == "paperless-ngx" ]]; then
|
if [[ "${should_push}" == "true" && "${{ github.repository_owner }}" == "paperless-ngx" ]]; then
|
||||||
case "${REF_NAME}" in
|
case "${REF_NAME}" in
|
||||||
dev|beta)
|
dev|beta)
|
||||||
push_external="true"
|
push_external="true"
|
||||||
@@ -125,20 +137,20 @@ jobs:
|
|||||||
labels: ${{ steps.docker-meta.outputs.labels }}
|
labels: ${{ steps.docker-meta.outputs.labels }}
|
||||||
build-args: |
|
build-args: |
|
||||||
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
|
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
|
||||||
outputs: type=image,name=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }},push-by-digest=true,name-canonical=true,push=${{ steps.check-push.outputs.can-push }}
|
outputs: type=image,name=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }},push-by-digest=true,name-canonical=true,push=${{ steps.check-push.outputs.should-push }}
|
||||||
cache-from: |
|
cache-from: |
|
||||||
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:${{ steps.ref.outputs.cache-ref }}-${{ matrix.arch }}
|
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:${{ steps.ref.outputs.cache-ref }}-${{ matrix.arch }}
|
||||||
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:dev-${{ matrix.arch }}
|
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:dev-${{ matrix.arch }}
|
||||||
cache-to: ${{ steps.check-push.outputs.can-push == 'true' && format('type=registry,mode=max,ref={0}/{1}/cache/app:{2}-{3}', env.REGISTRY, steps.repo.outputs.name, steps.ref.outputs.cache-ref, matrix.arch) || '' }}
|
cache-to: ${{ steps.check-push.outputs.should-push == 'true' && format('type=registry,mode=max,ref={0}/{1}/cache/app:{2}-{3}', env.REGISTRY, steps.repo.outputs.name, steps.ref.outputs.cache-ref, matrix.arch) || '' }}
|
||||||
- name: Export digest
|
- name: Export digest
|
||||||
if: steps.check-push.outputs.can-push == 'true'
|
if: steps.check-push.outputs.should-push == 'true'
|
||||||
run: |
|
run: |
|
||||||
mkdir -p /tmp/digests
|
mkdir -p /tmp/digests
|
||||||
digest="${{ steps.build.outputs.digest }}"
|
digest="${{ steps.build.outputs.digest }}"
|
||||||
echo "digest=${digest}"
|
echo "digest=${digest}"
|
||||||
touch "/tmp/digests/${digest#sha256:}"
|
touch "/tmp/digests/${digest#sha256:}"
|
||||||
- name: Upload digest
|
- name: Upload digest
|
||||||
if: steps.check-push.outputs.can-push == 'true'
|
if: steps.check-push.outputs.should-push == 'true'
|
||||||
uses: actions/upload-artifact@v6.0.0
|
uses: actions/upload-artifact@v6.0.0
|
||||||
with:
|
with:
|
||||||
name: digests-${{ matrix.arch }}
|
name: digests-${{ matrix.arch }}
|
||||||
@@ -149,7 +161,7 @@ jobs:
|
|||||||
name: Merge and Push Manifest
|
name: Merge and Push Manifest
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
needs: build-arch
|
needs: build-arch
|
||||||
if: needs.build-arch.outputs.can-push == 'true'
|
if: needs.build-arch.outputs.should-push == 'true'
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
packages: write
|
packages: write
|
||||||
|
|||||||
4
.github/workflows/ci-frontend.yml
vendored
4
.github/workflows/ci-frontend.yml
vendored
@@ -109,13 +109,13 @@ jobs:
|
|||||||
if: always()
|
if: always()
|
||||||
uses: codecov/codecov-action@v5
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
flags: frontend,frontend-node-${{ matrix.node-version }}
|
flags: frontend-node-${{ matrix.node-version }}
|
||||||
directory: src-ui/
|
directory: src-ui/
|
||||||
report_type: test_results
|
report_type: test_results
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
uses: codecov/codecov-action@v5
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
flags: frontend,frontend-node-${{ matrix.node-version }}
|
flags: frontend-node-${{ matrix.node-version }}
|
||||||
directory: src-ui/coverage/
|
directory: src-ui/coverage/
|
||||||
e2e-tests:
|
e2e-tests:
|
||||||
name: "E2E Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
name: "E2E Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ RUN set -eux \
|
|||||||
# Purpose: Installs s6-overlay and rootfs
|
# Purpose: Installs s6-overlay and rootfs
|
||||||
# Comments:
|
# Comments:
|
||||||
# - Don't leave anything extra in here either
|
# - Don't leave anything extra in here either
|
||||||
FROM ghcr.io/astral-sh/uv:0.9.15-python3.12-trixie-slim AS s6-overlay-base
|
FROM ghcr.io/astral-sh/uv:0.9.26-python3.12-trixie-slim AS s6-overlay-base
|
||||||
|
|
||||||
WORKDIR /usr/src/s6
|
WORKDIR /usr/src/s6
|
||||||
|
|
||||||
@@ -196,7 +196,11 @@ RUN set -eux \
|
|||||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||||
&& echo "Installing Python requirements" \
|
&& echo "Installing Python requirements" \
|
||||||
&& uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt \
|
&& uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt \
|
||||||
&& uv pip install --no-cache --system --no-python-downloads --python-preference system --requirements requirements.txt \
|
&& uv pip install --no-cache --system --no-python-downloads --python-preference system \
|
||||||
|
--index https://pypi.org/simple \
|
||||||
|
--index https://download.pytorch.org/whl/cpu \
|
||||||
|
--index-strategy unsafe-best-match \
|
||||||
|
--requirements requirements.txt \
|
||||||
&& echo "Installing NLTK data" \
|
&& echo "Installing NLTK data" \
|
||||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" snowball_data \
|
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" snowball_data \
|
||||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" stopwords \
|
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" stopwords \
|
||||||
|
|||||||
@@ -8,6 +8,11 @@ echo "${log_prefix} Apply database migrations..."
|
|||||||
|
|
||||||
cd "${PAPERLESS_SRC_DIR}"
|
cd "${PAPERLESS_SRC_DIR}"
|
||||||
|
|
||||||
|
if [[ "${PAPERLESS_MIGRATION_MODE:-0}" == "1" ]]; then
|
||||||
|
echo "${log_prefix} Migration mode enabled, skipping migrations."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
# The whole migrate, with flock, needs to run as the right user
|
# The whole migrate, with flock, needs to run as the right user
|
||||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||||
exec s6-setlock -n "${data_dir}/migration_lock" python3 manage.py migrate --skip-checks --no-input
|
exec s6-setlock -n "${data_dir}/migration_lock" python3 manage.py migrate --skip-checks --no-input
|
||||||
|
|||||||
@@ -9,7 +9,15 @@ echo "${log_prefix} Running Django checks"
|
|||||||
cd "${PAPERLESS_SRC_DIR}"
|
cd "${PAPERLESS_SRC_DIR}"
|
||||||
|
|
||||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||||
python3 manage.py check
|
if [[ "${PAPERLESS_MIGRATION_MODE:-0}" == "1" ]]; then
|
||||||
|
python3 manage_migration.py check
|
||||||
|
else
|
||||||
|
python3 manage.py check
|
||||||
|
fi
|
||||||
else
|
else
|
||||||
s6-setuidgid paperless python3 manage.py check
|
if [[ "${PAPERLESS_MIGRATION_MODE:-0}" == "1" ]]; then
|
||||||
|
s6-setuidgid paperless python3 manage_migration.py check
|
||||||
|
else
|
||||||
|
s6-setuidgid paperless python3 manage.py check
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -13,8 +13,14 @@ if [[ -n "${PAPERLESS_FORCE_SCRIPT_NAME}" ]]; then
|
|||||||
export GRANIAN_URL_PATH_PREFIX=${PAPERLESS_FORCE_SCRIPT_NAME}
|
export GRANIAN_URL_PATH_PREFIX=${PAPERLESS_FORCE_SCRIPT_NAME}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
if [[ "${PAPERLESS_MIGRATION_MODE:-0}" == "1" ]]; then
|
||||||
exec granian --interface asginl --ws --loop uvloop "paperless.asgi:application"
|
app_module="paperless.migration_asgi:application"
|
||||||
else
|
else
|
||||||
exec s6-setuidgid paperless granian --interface asginl --ws --loop uvloop "paperless.asgi:application"
|
app_module="paperless.asgi:application"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||||
|
exec granian --interface asginl --ws --loop uvloop "${app_module}"
|
||||||
|
else
|
||||||
|
exec s6-setuidgid paperless granian --interface asginl --ws --loop uvloop "${app_module}"
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -1,9 +1,60 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## paperless-ngx 2.20.5
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix: ensure horizontal scroll for long tag names in list, wrap tags without parent [@shamoon](https://github.com/shamoon) ([#11811](https://github.com/paperless-ngx/paperless-ngx/pull/11811))
|
||||||
|
- Fix: use explicit order field for workflow actions [@shamoon](https://github.com/shamoon) [@stumpylog](https://github.com/stumpylog) ([#11781](https://github.com/paperless-ngx/paperless-ngx/pull/11781))
|
||||||
|
|
||||||
|
### All App Changes
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>2 changes</summary>
|
||||||
|
|
||||||
|
- Fix: ensure horizontal scroll for long tag names in list, wrap tags without parent [@shamoon](https://github.com/shamoon) ([#11811](https://github.com/paperless-ngx/paperless-ngx/pull/11811))
|
||||||
|
- Fix: use explicit order field for workflow actions [@shamoon](https://github.com/shamoon) [@stumpylog](https://github.com/stumpylog) ([#11781](https://github.com/paperless-ngx/paperless-ngx/pull/11781))
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## paperless-ngx 2.20.4
|
||||||
|
|
||||||
|
### Security
|
||||||
|
|
||||||
|
- Resolve [GHSA-28cf-xvcf-hw6m](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-28cf-xvcf-hw6m)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix: propagate metadata override created value [@shamoon](https://github.com/shamoon) ([#11659](https://github.com/paperless-ngx/paperless-ngx/pull/11659))
|
||||||
|
- Fix: support ordering by storage path name [@shamoon](https://github.com/shamoon) ([#11661](https://github.com/paperless-ngx/paperless-ngx/pull/11661))
|
||||||
|
- Fix: validate cf integer values within PostgreSQL range [@shamoon](https://github.com/shamoon) ([#11666](https://github.com/paperless-ngx/paperless-ngx/pull/11666))
|
||||||
|
- Fixhancement: add error handling and retry when opening index [@shamoon](https://github.com/shamoon) ([#11731](https://github.com/paperless-ngx/paperless-ngx/pull/11731))
|
||||||
|
- Fix: fix recurring workflow to respect latest run time [@shamoon](https://github.com/shamoon) ([#11735](https://github.com/paperless-ngx/paperless-ngx/pull/11735))
|
||||||
|
|
||||||
|
### All App Changes
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>5 changes</summary>
|
||||||
|
|
||||||
|
- Fix: propagate metadata override created value [@shamoon](https://github.com/shamoon) ([#11659](https://github.com/paperless-ngx/paperless-ngx/pull/11659))
|
||||||
|
- Fix: support ordering by storage path name [@shamoon](https://github.com/shamoon) ([#11661](https://github.com/paperless-ngx/paperless-ngx/pull/11661))
|
||||||
|
- Fix: validate cf integer values within PostgreSQL range [@shamoon](https://github.com/shamoon) ([#11666](https://github.com/paperless-ngx/paperless-ngx/pull/11666))
|
||||||
|
- Fixhancement: add error handling and retry when opening index [@shamoon](https://github.com/shamoon) ([#11731](https://github.com/paperless-ngx/paperless-ngx/pull/11731))
|
||||||
|
- Fix: fix recurring workflow to respect latest run time [@shamoon](https://github.com/shamoon) ([#11735](https://github.com/paperless-ngx/paperless-ngx/pull/11735))
|
||||||
|
</details>
|
||||||
|
|
||||||
## paperless-ngx 2.20.3
|
## paperless-ngx 2.20.3
|
||||||
|
|
||||||
|
### Security
|
||||||
|
|
||||||
|
- Resolve [GHSA-7cq3-mhxq-w946](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-7cq3-mhxq-w946)
|
||||||
|
|
||||||
## paperless-ngx 2.20.2
|
## paperless-ngx 2.20.2
|
||||||
|
|
||||||
|
### Security
|
||||||
|
|
||||||
|
- Resolve [GHSA-6653-vcx4-69mc](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-6653-vcx4-69mc)
|
||||||
|
- Resolve [GHSA-24x5-wp64-9fcc](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-24x5-wp64-9fcc)
|
||||||
|
|
||||||
### Features / Enhancements
|
### Features / Enhancements
|
||||||
|
|
||||||
- Tweakhancement: dim inactive users in users-groups list [@shamoon](https://github.com/shamoon) ([#11537](https://github.com/paperless-ngx/paperless-ngx/pull/11537))
|
- Tweakhancement: dim inactive users in users-groups list [@shamoon](https://github.com/shamoon) ([#11537](https://github.com/paperless-ngx/paperless-ngx/pull/11537))
|
||||||
|
|||||||
@@ -170,11 +170,18 @@ Available options are `postgresql` and `mariadb`.
|
|||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
A small pool is typically sufficient — for example, a size of 4.
|
A pool of 8-10 connections per worker is typically sufficient.
|
||||||
Make sure your PostgreSQL server's max_connections setting is large enough to handle:
|
If you encounter error messages such as `couldn't get a connection`
|
||||||
```(Paperless workers + Celery workers) × pool size + safety margin```
|
or database connection timeouts, you probably need to increase the pool size.
|
||||||
For example, with 4 Paperless workers and 2 Celery workers, and a pool size of 4:
|
|
||||||
(4 + 2) × 4 + 10 = 34 connections required.
|
!!! warning
|
||||||
|
Make sure your PostgreSQL `max_connections` setting is large enough to handle the connection pools:
|
||||||
|
`(NB_PAPERLESS_WORKERS + NB_CELERY_WORKERS) × POOL_SIZE + SAFETY_MARGIN`. For example, with
|
||||||
|
4 Paperless workers and 2 Celery workers, and a pool size of 8:``(4 + 2) × 8 + 10 = 58`,
|
||||||
|
so `max_connections = 60` (or even more) is appropriate.
|
||||||
|
|
||||||
|
This assumes only Paperless-ngx connects to your PostgreSQL instance. If you have other applications,
|
||||||
|
you should increase `max_connections` accordingly.
|
||||||
|
|
||||||
#### [`PAPERLESS_DB_READ_CACHE_ENABLED=<bool>`](#PAPERLESS_DB_READ_CACHE_ENABLED) {#PAPERLESS_DB_READ_CACHE_ENABLED}
|
#### [`PAPERLESS_DB_READ_CACHE_ENABLED=<bool>`](#PAPERLESS_DB_READ_CACHE_ENABLED) {#PAPERLESS_DB_READ_CACHE_ENABLED}
|
||||||
|
|
||||||
@@ -1866,7 +1873,7 @@ using the OpenAI API. This setting is required to be set to use the AI features.
|
|||||||
#### [`PAPERLESS_AI_LLM_MODEL=<str>`](#PAPERLESS_AI_LLM_MODEL) {#PAPERLESS_AI_LLM_MODEL}
|
#### [`PAPERLESS_AI_LLM_MODEL=<str>`](#PAPERLESS_AI_LLM_MODEL) {#PAPERLESS_AI_LLM_MODEL}
|
||||||
|
|
||||||
: The model to use for the AI backend, i.e. "gpt-3.5-turbo", "gpt-4" or any of the models supported by the
|
: The model to use for the AI backend, i.e. "gpt-3.5-turbo", "gpt-4" or any of the models supported by the
|
||||||
current backend. If not supplied, defaults to "gpt-3.5-turbo" for OpenAI and "llama3" for Ollama.
|
current backend. If not supplied, defaults to "gpt-3.5-turbo" for OpenAI and "llama3.1" for Ollama.
|
||||||
|
|
||||||
Defaults to None.
|
Defaults to None.
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "paperless-ngx"
|
name = "paperless-ngx"
|
||||||
version = "2.20.3"
|
version = "2.20.5"
|
||||||
description = "A community-supported supercharged document management system: scan, index and archive all your physical documents"
|
description = "A community-supported supercharged document management system: scan, index and archive all your physical documents"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.10"
|
requires-python = ">=3.10"
|
||||||
@@ -28,7 +28,7 @@ dependencies = [
|
|||||||
# Only patch versions are guaranteed to not introduce breaking changes.
|
# Only patch versions are guaranteed to not introduce breaking changes.
|
||||||
"django~=5.2.5",
|
"django~=5.2.5",
|
||||||
"django-allauth[mfa,socialaccount]~=65.12.1",
|
"django-allauth[mfa,socialaccount]~=65.12.1",
|
||||||
"django-auditlog~=3.3.0",
|
"django-auditlog~=3.4.1",
|
||||||
"django-cachalot~=2.8.0",
|
"django-cachalot~=2.8.0",
|
||||||
"django-celery-results~=2.6.0",
|
"django-celery-results~=2.6.0",
|
||||||
"django-compression-middleware~=0.5.0",
|
"django-compression-middleware~=0.5.0",
|
||||||
@@ -47,20 +47,21 @@ dependencies = [
|
|||||||
"faiss-cpu>=1.10",
|
"faiss-cpu>=1.10",
|
||||||
"filelock~=3.20.0",
|
"filelock~=3.20.0",
|
||||||
"flower~=2.0.1",
|
"flower~=2.0.1",
|
||||||
"gotenberg-client~=0.12.0",
|
"gotenberg-client~=0.13.1",
|
||||||
"httpx-oauth~=0.16",
|
"httpx-oauth~=0.16",
|
||||||
|
"ijson~=3.3",
|
||||||
"imap-tools~=1.11.0",
|
"imap-tools~=1.11.0",
|
||||||
"inotifyrecursive~=0.3",
|
"inotifyrecursive~=0.3",
|
||||||
"jinja2~=3.1.5",
|
"jinja2~=3.1.5",
|
||||||
"langdetect~=1.0.9",
|
"langdetect~=1.0.9",
|
||||||
"llama-index-core>=0.12.33.post1",
|
"llama-index-core>=0.14.12",
|
||||||
"llama-index-embeddings-huggingface>=0.5.3",
|
"llama-index-embeddings-huggingface>=0.6.1",
|
||||||
"llama-index-embeddings-openai>=0.3.1",
|
"llama-index-embeddings-openai>=0.5.1",
|
||||||
"llama-index-llms-ollama>=0.5.4",
|
"llama-index-llms-ollama>=0.9.1",
|
||||||
"llama-index-llms-openai>=0.3.38",
|
"llama-index-llms-openai>=0.6.13",
|
||||||
"llama-index-vector-stores-faiss>=0.3",
|
"llama-index-vector-stores-faiss>=0.5.2",
|
||||||
"nltk~=3.9.1",
|
"nltk~=3.9.1",
|
||||||
"ocrmypdf~=16.12.0",
|
"ocrmypdf~=16.13.0",
|
||||||
"openai>=1.76",
|
"openai>=1.76",
|
||||||
"pathvalidate~=3.3.1",
|
"pathvalidate~=3.3.1",
|
||||||
"pdf2image~=1.17.0",
|
"pdf2image~=1.17.0",
|
||||||
@@ -73,11 +74,14 @@ dependencies = [
|
|||||||
"rapidfuzz~=3.14.0",
|
"rapidfuzz~=3.14.0",
|
||||||
"redis[hiredis]~=5.2.1",
|
"redis[hiredis]~=5.2.1",
|
||||||
"regex>=2025.9.18",
|
"regex>=2025.9.18",
|
||||||
|
"rich~=14.1.0",
|
||||||
"scikit-learn~=1.7.0",
|
"scikit-learn~=1.7.0",
|
||||||
"sentence-transformers>=4.1",
|
"sentence-transformers>=4.1",
|
||||||
"setproctitle~=1.3.4",
|
"setproctitle~=1.3.4",
|
||||||
"tika-client~=0.10.0",
|
"tika-client~=0.10.0",
|
||||||
|
"torch~=2.9.1",
|
||||||
"tqdm~=4.67.1",
|
"tqdm~=4.67.1",
|
||||||
|
"typer~=0.12",
|
||||||
"watchdog~=6.0",
|
"watchdog~=6.0",
|
||||||
"whitenoise~=6.9",
|
"whitenoise~=6.9",
|
||||||
"whoosh-reloaded>=2.7.5",
|
"whoosh-reloaded>=2.7.5",
|
||||||
@@ -91,7 +95,7 @@ optional-dependencies.postgres = [
|
|||||||
"psycopg[c,pool]==3.2.12",
|
"psycopg[c,pool]==3.2.12",
|
||||||
# Direct dependency for proper resolution of the pre-built wheels
|
# Direct dependency for proper resolution of the pre-built wheels
|
||||||
"psycopg-c==3.2.12",
|
"psycopg-c==3.2.12",
|
||||||
"psycopg-pool==3.2.7",
|
"psycopg-pool==3.3",
|
||||||
]
|
]
|
||||||
optional-dependencies.webserver = [
|
optional-dependencies.webserver = [
|
||||||
"granian[uvloop]~=2.5.1",
|
"granian[uvloop]~=2.5.1",
|
||||||
@@ -126,7 +130,7 @@ testing = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
lint = [
|
lint = [
|
||||||
"pre-commit~=4.4.0",
|
"pre-commit~=4.5.1",
|
||||||
"pre-commit-uv~=4.2.0",
|
"pre-commit-uv~=4.2.0",
|
||||||
"ruff~=0.14.0",
|
"ruff~=0.14.0",
|
||||||
]
|
]
|
||||||
@@ -169,6 +173,15 @@ zxing-cpp = [
|
|||||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
torch = [
|
||||||
|
{ index = "pytorch-cpu" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[tool.uv.index]]
|
||||||
|
name = "pytorch-cpu"
|
||||||
|
url = "https://download.pytorch.org/whl/cpu"
|
||||||
|
explicit = true
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
target-version = "py310"
|
target-version = "py310"
|
||||||
line-length = 88
|
line-length = 88
|
||||||
@@ -274,7 +287,7 @@ addopts = [
|
|||||||
"--numprocesses=auto",
|
"--numprocesses=auto",
|
||||||
"--maxprocesses=16",
|
"--maxprocesses=16",
|
||||||
"--quiet",
|
"--quiet",
|
||||||
"--durations=0",
|
"--durations=50",
|
||||||
"--junitxml=junit.xml",
|
"--junitxml=junit.xml",
|
||||||
"-o junit_family=legacy",
|
"-o junit_family=legacy",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "paperless-ngx-ui",
|
"name": "paperless-ngx-ui",
|
||||||
"version": "2.20.3",
|
"version": "2.20.5",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"preinstall": "npx only-allow pnpm",
|
"preinstall": "npx only-allow pnpm",
|
||||||
"ng": "ng",
|
"ng": "ng",
|
||||||
|
|||||||
@@ -252,7 +252,7 @@ describe('WorkflowEditDialogComponent', () => {
|
|||||||
expect(component.object.actions.length).toEqual(2)
|
expect(component.object.actions.length).toEqual(2)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should update order and remove ids from actions on drag n drop', () => {
|
it('should update order on drag n drop', () => {
|
||||||
const action1 = workflow.actions[0]
|
const action1 = workflow.actions[0]
|
||||||
const action2 = workflow.actions[1]
|
const action2 = workflow.actions[1]
|
||||||
component.object = workflow
|
component.object = workflow
|
||||||
@@ -261,8 +261,6 @@ describe('WorkflowEditDialogComponent', () => {
|
|||||||
WorkflowAction[]
|
WorkflowAction[]
|
||||||
>)
|
>)
|
||||||
expect(component.object.actions).toEqual([action2, action1])
|
expect(component.object.actions).toEqual([action2, action1])
|
||||||
expect(action1.id).toBeNull()
|
|
||||||
expect(action2.id).toBeNull()
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not include auto matching in algorithms', () => {
|
it('should not include auto matching in algorithms', () => {
|
||||||
|
|||||||
@@ -1283,11 +1283,6 @@ export class WorkflowEditDialogComponent
|
|||||||
const actionField = this.actionFields.at(event.previousIndex)
|
const actionField = this.actionFields.at(event.previousIndex)
|
||||||
this.actionFields.removeAt(event.previousIndex)
|
this.actionFields.removeAt(event.previousIndex)
|
||||||
this.actionFields.insert(event.currentIndex, actionField)
|
this.actionFields.insert(event.currentIndex, actionField)
|
||||||
// removing id will effectively re-create the actions in this order
|
|
||||||
this.object.actions.forEach((a) => (a.id = null))
|
|
||||||
this.actionFields.controls.forEach((c) =>
|
|
||||||
c.get('id').setValue(null, { emitEvent: false })
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
save(): void {
|
save(): void {
|
||||||
|
|||||||
@@ -28,7 +28,7 @@
|
|||||||
</button>
|
</button>
|
||||||
</ng-template>
|
</ng-template>
|
||||||
<ng-template ng-option-tmp let-item="item" let-index="index" let-search="searchTerm">
|
<ng-template ng-option-tmp let-item="item" let-index="index" let-search="searchTerm">
|
||||||
<div class="tag-option-row d-flex align-items-center">
|
<div class="tag-option-row d-flex align-items-center" [class.w-auto]="!getTag(item.id)?.parent">
|
||||||
@if (item.id && tags) {
|
@if (item.id && tags) {
|
||||||
@if (getTag(item.id)?.parent) {
|
@if (getTag(item.id)?.parent) {
|
||||||
<i-bs name="list-nested" class="me-1"></i-bs>
|
<i-bs name="list-nested" class="me-1"></i-bs>
|
||||||
|
|||||||
@@ -22,8 +22,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Dropdown hierarchy reveal for ng-select options
|
// Dropdown hierarchy reveal for ng-select options
|
||||||
::ng-deep .ng-dropdown-panel .ng-option {
|
:host ::ng-deep .ng-dropdown-panel .ng-option {
|
||||||
overflow-x: scroll;
|
overflow-x: auto !important;
|
||||||
|
|
||||||
.tag-option-row {
|
.tag-option-row {
|
||||||
font-size: 1rem;
|
font-size: 1rem;
|
||||||
@@ -41,12 +41,12 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
::ng-deep .ng-dropdown-panel .ng-option:hover .hierarchy-reveal,
|
:host ::ng-deep .ng-dropdown-panel .ng-option:hover .hierarchy-reveal,
|
||||||
::ng-deep .ng-dropdown-panel .ng-option.ng-option-marked .hierarchy-reveal {
|
:host ::ng-deep .ng-dropdown-panel .ng-option.ng-option-marked .hierarchy-reveal {
|
||||||
max-width: 1000px;
|
max-width: 1000px;
|
||||||
}
|
}
|
||||||
|
|
||||||
::ng-deep .ng-dropdown-panel .ng-option:hover .hierarchy-indicator,
|
::ng-deep .ng-dropdown-panel .ng-option:hover .hierarchy-indicator,
|
||||||
::ng-deep .ng-dropdown-panel .ng-option.ng-option-marked .hierarchy-indicator {
|
:host ::ng-deep .ng-dropdown-panel .ng-option.ng-option-marked .hierarchy-indicator {
|
||||||
background: transparent;
|
background: transparent;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -285,10 +285,10 @@ export class DocumentDetailComponent
|
|||||||
if (
|
if (
|
||||||
element &&
|
element &&
|
||||||
element.nativeElement.offsetParent !== null &&
|
element.nativeElement.offsetParent !== null &&
|
||||||
this.nav?.activeId == 4
|
this.nav?.activeId == DocumentDetailNavIDs.Preview
|
||||||
) {
|
) {
|
||||||
// its visible
|
// its visible
|
||||||
setTimeout(() => this.nav?.select(1))
|
setTimeout(() => this.nav?.select(DocumentDetailNavIDs.Details))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ export const environment = {
|
|||||||
apiVersion: '9', // match src/paperless/settings.py
|
apiVersion: '9', // match src/paperless/settings.py
|
||||||
appTitle: 'Paperless-ngx',
|
appTitle: 'Paperless-ngx',
|
||||||
tag: 'prod',
|
tag: 'prod',
|
||||||
version: '2.20.3',
|
version: '2.20.5',
|
||||||
webSocketHost: window.location.host,
|
webSocketHost: window.location.host,
|
||||||
webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:',
|
webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:',
|
||||||
webSocketBaseUrl: base_url.pathname + 'ws/',
|
webSocketBaseUrl: base_url.pathname + 'ws/',
|
||||||
|
|||||||
28
src/documents/migrations/1075_workflowaction_order.py
Normal file
28
src/documents/migrations/1075_workflowaction_order.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# Generated by Django 5.2.7 on 2026-01-14 16:53
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
from django.db import models
|
||||||
|
from django.db.models import F
|
||||||
|
|
||||||
|
|
||||||
|
def populate_action_order(apps, schema_editor):
|
||||||
|
WorkflowAction = apps.get_model("documents", "WorkflowAction")
|
||||||
|
WorkflowAction.objects.all().update(order=F("id"))
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("documents", "1074_workflowrun_deleted_at_workflowrun_restored_at_and_more"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="workflowaction",
|
||||||
|
name="order",
|
||||||
|
field=models.PositiveIntegerField(default=0, verbose_name="order"),
|
||||||
|
),
|
||||||
|
migrations.RunPython(
|
||||||
|
populate_action_order,
|
||||||
|
reverse_code=migrations.RunPython.noop,
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -6,7 +6,7 @@ from django.db import models
|
|||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("documents", "1074_workflowrun_deleted_at_workflowrun_restored_at_and_more"),
|
("documents", "1075_workflowaction_order"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
@@ -1295,6 +1295,8 @@ class WorkflowAction(models.Model):
|
|||||||
default=WorkflowActionType.ASSIGNMENT,
|
default=WorkflowActionType.ASSIGNMENT,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
order = models.PositiveIntegerField(_("order"), default=0)
|
||||||
|
|
||||||
assign_title = models.TextField(
|
assign_title = models.TextField(
|
||||||
_("assign title"),
|
_("assign title"),
|
||||||
null=True,
|
null=True,
|
||||||
|
|||||||
@@ -2577,7 +2577,8 @@ class WorkflowSerializer(serializers.ModelSerializer):
|
|||||||
set_triggers.append(trigger_instance)
|
set_triggers.append(trigger_instance)
|
||||||
|
|
||||||
if actions is not None and actions is not serializers.empty:
|
if actions is not None and actions is not serializers.empty:
|
||||||
for action in actions:
|
for index, action in enumerate(actions):
|
||||||
|
action["order"] = index
|
||||||
assign_tags = action.pop("assign_tags", None)
|
assign_tags = action.pop("assign_tags", None)
|
||||||
assign_view_users = action.pop("assign_view_users", None)
|
assign_view_users = action.pop("assign_view_users", None)
|
||||||
assign_view_groups = action.pop("assign_view_groups", None)
|
assign_view_groups = action.pop("assign_view_groups", None)
|
||||||
@@ -2704,6 +2705,16 @@ class WorkflowSerializer(serializers.ModelSerializer):
|
|||||||
|
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
|
def to_representation(self, instance):
|
||||||
|
data = super().to_representation(instance)
|
||||||
|
actions = instance.actions.order_by("order", "pk")
|
||||||
|
data["actions"] = WorkflowActionSerializer(
|
||||||
|
actions,
|
||||||
|
many=True,
|
||||||
|
context=self.context,
|
||||||
|
).data
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
class TrashSerializer(SerializerWithPerms):
|
class TrashSerializer(SerializerWithPerms):
|
||||||
documents = serializers.ListField(
|
documents = serializers.ListField(
|
||||||
|
|||||||
@@ -421,7 +421,15 @@ def update_filename_and_move_files(
|
|||||||
return
|
return
|
||||||
instance = instance.document
|
instance = instance.document
|
||||||
|
|
||||||
def validate_move(instance, old_path: Path, new_path: Path):
|
def validate_move(instance, old_path: Path, new_path: Path, root: Path):
|
||||||
|
if not new_path.is_relative_to(root):
|
||||||
|
msg = (
|
||||||
|
f"Document {instance!s}: Refusing to move file outside root {root}: "
|
||||||
|
f"{new_path}."
|
||||||
|
)
|
||||||
|
logger.warning(msg)
|
||||||
|
raise CannotMoveFilesException(msg)
|
||||||
|
|
||||||
if not old_path.is_file():
|
if not old_path.is_file():
|
||||||
# Can't do anything if the old file does not exist anymore.
|
# Can't do anything if the old file does not exist anymore.
|
||||||
msg = f"Document {instance!s}: File {old_path} doesn't exist."
|
msg = f"Document {instance!s}: File {old_path} doesn't exist."
|
||||||
@@ -510,12 +518,22 @@ def update_filename_and_move_files(
|
|||||||
return
|
return
|
||||||
|
|
||||||
if move_original:
|
if move_original:
|
||||||
validate_move(instance, old_source_path, instance.source_path)
|
validate_move(
|
||||||
|
instance,
|
||||||
|
old_source_path,
|
||||||
|
instance.source_path,
|
||||||
|
settings.ORIGINALS_DIR,
|
||||||
|
)
|
||||||
create_source_path_directory(instance.source_path)
|
create_source_path_directory(instance.source_path)
|
||||||
shutil.move(old_source_path, instance.source_path)
|
shutil.move(old_source_path, instance.source_path)
|
||||||
|
|
||||||
if move_archive:
|
if move_archive:
|
||||||
validate_move(instance, old_archive_path, instance.archive_path)
|
validate_move(
|
||||||
|
instance,
|
||||||
|
old_archive_path,
|
||||||
|
instance.archive_path,
|
||||||
|
settings.ARCHIVE_DIR,
|
||||||
|
)
|
||||||
create_source_path_directory(instance.archive_path)
|
create_source_path_directory(instance.archive_path)
|
||||||
shutil.move(old_archive_path, instance.archive_path)
|
shutil.move(old_archive_path, instance.archive_path)
|
||||||
|
|
||||||
@@ -763,7 +781,7 @@ def run_workflows(
|
|||||||
|
|
||||||
if matching.document_matches_workflow(document, workflow, trigger_type):
|
if matching.document_matches_workflow(document, workflow, trigger_type):
|
||||||
action: WorkflowAction
|
action: WorkflowAction
|
||||||
for action in workflow.actions.all():
|
for action in workflow.actions.order_by("order", "pk"):
|
||||||
message = f"Applying {action} from {workflow}"
|
message = f"Applying {action} from {workflow}"
|
||||||
if not use_overrides:
|
if not use_overrides:
|
||||||
logger.info(message, extra={"group": logging_group})
|
logger.info(message, extra={"group": logging_group})
|
||||||
|
|||||||
@@ -262,6 +262,17 @@ def get_custom_fields_context(
|
|||||||
return field_data
|
return field_data
|
||||||
|
|
||||||
|
|
||||||
|
def _is_safe_relative_path(value: str) -> bool:
|
||||||
|
if value == "":
|
||||||
|
return True
|
||||||
|
|
||||||
|
path = PurePath(value)
|
||||||
|
if path.is_absolute() or path.drive:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return ".." not in path.parts
|
||||||
|
|
||||||
|
|
||||||
def validate_filepath_template_and_render(
|
def validate_filepath_template_and_render(
|
||||||
template_string: str,
|
template_string: str,
|
||||||
document: Document | None = None,
|
document: Document | None = None,
|
||||||
@@ -309,6 +320,12 @@ def validate_filepath_template_and_render(
|
|||||||
)
|
)
|
||||||
rendered_template = template.render(context)
|
rendered_template = template.render(context)
|
||||||
|
|
||||||
|
if not _is_safe_relative_path(rendered_template):
|
||||||
|
logger.warning(
|
||||||
|
"Template rendered an unsafe path (absolute or containing traversal).",
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
# We're good!
|
# We're good!
|
||||||
return rendered_template
|
return rendered_template
|
||||||
except UndefinedError:
|
except UndefinedError:
|
||||||
|
|||||||
@@ -219,6 +219,30 @@ class TestApiStoragePaths(DirectoriesMixin, APITestCase):
|
|||||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||||
self.assertEqual(StoragePath.objects.count(), 1)
|
self.assertEqual(StoragePath.objects.count(), 1)
|
||||||
|
|
||||||
|
def test_api_create_storage_path_rejects_traversal(self):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- API request to create a storage paths
|
||||||
|
- Storage path attempts directory traversal
|
||||||
|
WHEN:
|
||||||
|
- API is called
|
||||||
|
THEN:
|
||||||
|
- Correct HTTP 400 response
|
||||||
|
- No storage path is created
|
||||||
|
"""
|
||||||
|
response = self.client.post(
|
||||||
|
self.ENDPOINT,
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
"name": "Traversal path",
|
||||||
|
"path": "../../../../../tmp/proof",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||||
|
self.assertEqual(StoragePath.objects.count(), 1)
|
||||||
|
|
||||||
def test_api_storage_path_placeholders(self):
|
def test_api_storage_path_placeholders(self):
|
||||||
"""
|
"""
|
||||||
GIVEN:
|
GIVEN:
|
||||||
|
|||||||
@@ -20,9 +20,6 @@ def get_workflows_for_trigger(
|
|||||||
wrap it in a list; otherwise fetch enabled workflows for the trigger with
|
wrap it in a list; otherwise fetch enabled workflows for the trigger with
|
||||||
the prefetches used by the runner.
|
the prefetches used by the runner.
|
||||||
"""
|
"""
|
||||||
if workflow_to_run is not None:
|
|
||||||
return [workflow_to_run]
|
|
||||||
|
|
||||||
annotated_actions = (
|
annotated_actions = (
|
||||||
WorkflowAction.objects.select_related(
|
WorkflowAction.objects.select_related(
|
||||||
"assign_correspondent",
|
"assign_correspondent",
|
||||||
@@ -105,10 +102,25 @@ def get_workflows_for_trigger(
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
action_prefetch = Prefetch(
|
||||||
|
"actions",
|
||||||
|
queryset=annotated_actions.order_by("order", "pk"),
|
||||||
|
)
|
||||||
|
|
||||||
|
if workflow_to_run is not None:
|
||||||
|
return (
|
||||||
|
Workflow.objects.filter(pk=workflow_to_run.pk)
|
||||||
|
.prefetch_related(
|
||||||
|
action_prefetch,
|
||||||
|
"triggers",
|
||||||
|
)
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
Workflow.objects.filter(enabled=True, triggers__type=trigger_type)
|
Workflow.objects.filter(enabled=True, triggers__type=trigger_type)
|
||||||
.prefetch_related(
|
.prefetch_related(
|
||||||
Prefetch("actions", queryset=annotated_actions),
|
action_prefetch,
|
||||||
"triggers",
|
"triggers",
|
||||||
)
|
)
|
||||||
.order_by("order")
|
.order_by("order")
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ msgid ""
|
|||||||
msgstr ""
|
msgstr ""
|
||||||
"Project-Id-Version: paperless-ngx\n"
|
"Project-Id-Version: paperless-ngx\n"
|
||||||
"Report-Msgid-Bugs-To: \n"
|
"Report-Msgid-Bugs-To: \n"
|
||||||
"POT-Creation-Date: 2026-01-13 16:26+0000\n"
|
"POT-Creation-Date: 2026-01-15 23:01+0000\n"
|
||||||
"PO-Revision-Date: 2022-02-17 04:17\n"
|
"PO-Revision-Date: 2022-02-17 04:17\n"
|
||||||
"Last-Translator: \n"
|
"Last-Translator: \n"
|
||||||
"Language-Team: English\n"
|
"Language-Team: English\n"
|
||||||
@@ -89,7 +89,7 @@ msgstr ""
|
|||||||
msgid "Automatic"
|
msgid "Automatic"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/models.py:64 documents/models.py:456 documents/models.py:1527
|
#: documents/models.py:64 documents/models.py:456 documents/models.py:1529
|
||||||
#: paperless_mail/models.py:23 paperless_mail/models.py:143
|
#: paperless_mail/models.py:23 paperless_mail/models.py:143
|
||||||
msgid "name"
|
msgid "name"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
@@ -264,7 +264,7 @@ msgid "The position of this document in your physical document archive."
|
|||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/models.py:318 documents/models.py:700 documents/models.py:754
|
#: documents/models.py:318 documents/models.py:700 documents/models.py:754
|
||||||
#: documents/models.py:1570
|
#: documents/models.py:1572
|
||||||
msgid "document"
|
msgid "document"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
@@ -1047,179 +1047,180 @@ msgstr ""
|
|||||||
msgid "Workflow Action Type"
|
msgid "Workflow Action Type"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/models.py:1299
|
#: documents/models.py:1298 documents/models.py:1531
|
||||||
msgid "assign title"
|
#: paperless_mail/models.py:145
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1303
|
|
||||||
msgid "Assign a document title, must be a Jinja2 template, see documentation."
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1311 paperless_mail/models.py:274
|
|
||||||
msgid "assign this tag"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1320 paperless_mail/models.py:282
|
|
||||||
msgid "assign this document type"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1329 paperless_mail/models.py:296
|
|
||||||
msgid "assign this correspondent"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1338
|
|
||||||
msgid "assign this storage path"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1347
|
|
||||||
msgid "assign this owner"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1354
|
|
||||||
msgid "grant view permissions to these users"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1361
|
|
||||||
msgid "grant view permissions to these groups"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1368
|
|
||||||
msgid "grant change permissions to these users"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1375
|
|
||||||
msgid "grant change permissions to these groups"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1382
|
|
||||||
msgid "assign these custom fields"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1386
|
|
||||||
msgid "custom field values"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1390
|
|
||||||
msgid "Optional values to assign to the custom fields."
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1399
|
|
||||||
msgid "remove these tag(s)"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1404
|
|
||||||
msgid "remove all tags"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1411
|
|
||||||
msgid "remove these document type(s)"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1416
|
|
||||||
msgid "remove all document types"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1423
|
|
||||||
msgid "remove these correspondent(s)"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1428
|
|
||||||
msgid "remove all correspondents"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1435
|
|
||||||
msgid "remove these storage path(s)"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1440
|
|
||||||
msgid "remove all storage paths"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1447
|
|
||||||
msgid "remove these owner(s)"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1452
|
|
||||||
msgid "remove all owners"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1459
|
|
||||||
msgid "remove view permissions for these users"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1466
|
|
||||||
msgid "remove view permissions for these groups"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1473
|
|
||||||
msgid "remove change permissions for these users"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1480
|
|
||||||
msgid "remove change permissions for these groups"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1485
|
|
||||||
msgid "remove all permissions"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1492
|
|
||||||
msgid "remove these custom fields"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1497
|
|
||||||
msgid "remove all custom fields"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1506
|
|
||||||
msgid "email"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1515
|
|
||||||
msgid "webhook"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1519
|
|
||||||
msgid "workflow action"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1520
|
|
||||||
msgid "workflow actions"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/models.py:1529 paperless_mail/models.py:145
|
|
||||||
msgid "order"
|
msgid "order"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/models.py:1535
|
#: documents/models.py:1301
|
||||||
|
msgid "assign title"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1305
|
||||||
|
msgid "Assign a document title, must be a Jinja2 template, see documentation."
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1313 paperless_mail/models.py:274
|
||||||
|
msgid "assign this tag"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1322 paperless_mail/models.py:282
|
||||||
|
msgid "assign this document type"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1331 paperless_mail/models.py:296
|
||||||
|
msgid "assign this correspondent"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1340
|
||||||
|
msgid "assign this storage path"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1349
|
||||||
|
msgid "assign this owner"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1356
|
||||||
|
msgid "grant view permissions to these users"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1363
|
||||||
|
msgid "grant view permissions to these groups"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1370
|
||||||
|
msgid "grant change permissions to these users"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1377
|
||||||
|
msgid "grant change permissions to these groups"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1384
|
||||||
|
msgid "assign these custom fields"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1388
|
||||||
|
msgid "custom field values"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1392
|
||||||
|
msgid "Optional values to assign to the custom fields."
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1401
|
||||||
|
msgid "remove these tag(s)"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1406
|
||||||
|
msgid "remove all tags"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1413
|
||||||
|
msgid "remove these document type(s)"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1418
|
||||||
|
msgid "remove all document types"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1425
|
||||||
|
msgid "remove these correspondent(s)"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1430
|
||||||
|
msgid "remove all correspondents"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1437
|
||||||
|
msgid "remove these storage path(s)"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1442
|
||||||
|
msgid "remove all storage paths"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1449
|
||||||
|
msgid "remove these owner(s)"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1454
|
||||||
|
msgid "remove all owners"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1461
|
||||||
|
msgid "remove view permissions for these users"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1468
|
||||||
|
msgid "remove view permissions for these groups"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1475
|
||||||
|
msgid "remove change permissions for these users"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1482
|
||||||
|
msgid "remove change permissions for these groups"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1487
|
||||||
|
msgid "remove all permissions"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1494
|
||||||
|
msgid "remove these custom fields"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1499
|
||||||
|
msgid "remove all custom fields"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1508
|
||||||
|
msgid "email"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1517
|
||||||
|
msgid "webhook"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1521
|
||||||
|
msgid "workflow action"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1522
|
||||||
|
msgid "workflow actions"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: documents/models.py:1537
|
||||||
msgid "triggers"
|
msgid "triggers"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/models.py:1542
|
#: documents/models.py:1544
|
||||||
msgid "actions"
|
msgid "actions"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/models.py:1545 paperless_mail/models.py:154
|
#: documents/models.py:1547 paperless_mail/models.py:154
|
||||||
msgid "enabled"
|
msgid "enabled"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/models.py:1556
|
#: documents/models.py:1558
|
||||||
msgid "workflow"
|
msgid "workflow"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/models.py:1560
|
#: documents/models.py:1562
|
||||||
msgid "workflow trigger type"
|
msgid "workflow trigger type"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/models.py:1574
|
#: documents/models.py:1576
|
||||||
msgid "date run"
|
msgid "date run"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/models.py:1580
|
#: documents/models.py:1582
|
||||||
msgid "workflow run"
|
msgid "workflow run"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/models.py:1581
|
#: documents/models.py:1583
|
||||||
msgid "workflow runs"
|
msgid "workflow runs"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,12 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
|
try:
|
||||||
|
from paperless_migration.detect import choose_settings_module
|
||||||
|
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", choose_settings_module())
|
||||||
|
except Exception:
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
|
||||||
|
|
||||||
from django.core.management import execute_from_command_line
|
from django.core.management import execute_from_command_line
|
||||||
|
|
||||||
|
|||||||
13
src/manage_migration.py
Executable file
13
src/manage_migration.py
Executable file
@@ -0,0 +1,13 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
os.environ.setdefault(
|
||||||
|
"DJANGO_SETTINGS_MODULE",
|
||||||
|
"paperless_migration.settings",
|
||||||
|
)
|
||||||
|
|
||||||
|
from django.core.management import execute_from_command_line
|
||||||
|
|
||||||
|
execute_from_command_line(sys.argv)
|
||||||
@@ -1,12 +1,18 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
|
try:
|
||||||
|
from paperless_migration.detect import choose_settings_module
|
||||||
|
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", choose_settings_module())
|
||||||
|
except Exception:
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
|
||||||
|
|
||||||
from django.core.asgi import get_asgi_application
|
from django.core.asgi import get_asgi_application
|
||||||
|
|
||||||
# Fetch Django ASGI application early to ensure AppRegistry is populated
|
# Fetch Django ASGI application early to ensure AppRegistry is populated
|
||||||
# before importing consumers and AuthMiddlewareStack that may import ORM
|
# before importing consumers and AuthMiddlewareStack that may import ORM
|
||||||
# models.
|
# models.
|
||||||
|
|
||||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
|
|
||||||
django_asgi_app = get_asgi_application()
|
django_asgi_app = get_asgi_application()
|
||||||
|
|
||||||
from channels.auth import AuthMiddlewareStack # noqa: E402
|
from channels.auth import AuthMiddlewareStack # noqa: E402
|
||||||
|
|||||||
7
src/paperless/migration_asgi.py
Normal file
7
src/paperless/migration_asgi.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
from django.core.asgi import get_asgi_application
|
||||||
|
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless_migration.settings")
|
||||||
|
|
||||||
|
application = get_asgi_application()
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
from typing import Final
|
from typing import Final
|
||||||
|
|
||||||
__version__: Final[tuple[int, int, int]] = (2, 20, 3)
|
__version__: Final[tuple[int, int, int]] = (2, 20, 5)
|
||||||
# Version string like X.Y.Z
|
# Version string like X.Y.Z
|
||||||
__full_version_str__: Final[str] = ".".join(map(str, __version__))
|
__full_version_str__: Final[str] = ".".join(map(str, __version__))
|
||||||
# Version string like X.Y
|
# Version string like X.Y
|
||||||
|
|||||||
@@ -9,9 +9,14 @@ https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from django.core.wsgi import get_wsgi_application
|
try:
|
||||||
|
from paperless_migration.detect import choose_settings_module
|
||||||
|
|
||||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", choose_settings_module())
|
||||||
|
except Exception:
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
|
||||||
|
|
||||||
|
from django.core.wsgi import get_wsgi_application
|
||||||
|
|
||||||
application = get_wsgi_application()
|
application = get_wsgi_application()
|
||||||
|
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ class AIClient:
|
|||||||
def get_llm(self) -> Ollama | OpenAI:
|
def get_llm(self) -> Ollama | OpenAI:
|
||||||
if self.settings.llm_backend == "ollama":
|
if self.settings.llm_backend == "ollama":
|
||||||
return Ollama(
|
return Ollama(
|
||||||
model=self.settings.llm_model or "llama3",
|
model=self.settings.llm_model or "llama3.1",
|
||||||
base_url=self.settings.llm_endpoint or "http://localhost:11434",
|
base_url=self.settings.llm_endpoint or "http://localhost:11434",
|
||||||
request_timeout=120,
|
request_timeout=120,
|
||||||
)
|
)
|
||||||
@@ -52,7 +52,7 @@ class AIClient:
|
|||||||
)
|
)
|
||||||
tool_calls = self.llm.get_tool_calls_from_response(
|
tool_calls = self.llm.get_tool_calls_from_response(
|
||||||
result,
|
result,
|
||||||
error_on_no_tool_calls=True,
|
error_on_no_tool_call=True,
|
||||||
)
|
)
|
||||||
logger.debug("LLM query result: %s", tool_calls)
|
logger.debug("LLM query result: %s", tool_calls)
|
||||||
parsed = DocumentClassifierSchema(**tool_calls[0].tool_kwargs)
|
parsed = DocumentClassifierSchema(**tool_calls[0].tool_kwargs)
|
||||||
|
|||||||
@@ -11,14 +11,12 @@ from paperless_ai.chat import stream_chat_with_documents
|
|||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def patch_embed_model():
|
def patch_embed_model():
|
||||||
from llama_index.core import settings as llama_settings
|
from llama_index.core import settings as llama_settings
|
||||||
|
from llama_index.core.embeddings.mock_embed_model import MockEmbedding
|
||||||
|
|
||||||
mock_embed_model = MagicMock()
|
# Use a real BaseEmbedding subclass to satisfy llama-index 0.14 validation
|
||||||
mock_embed_model._get_text_embedding_batch.return_value = [
|
llama_settings.Settings.embed_model = MockEmbedding(embed_dim=1536)
|
||||||
[0.1] * 1536,
|
|
||||||
] # 1 vector per input
|
|
||||||
llama_settings.Settings._embed_model = mock_embed_model
|
|
||||||
yield
|
yield
|
||||||
llama_settings.Settings._embed_model = None
|
llama_settings.Settings.embed_model = None
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
|
|||||||
0
src/paperless_migration/__init__.py
Normal file
0
src/paperless_migration/__init__.py
Normal file
6
src/paperless_migration/apps.py
Normal file
6
src/paperless_migration/apps.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class PaperlessMigrationConfig(AppConfig):
|
||||||
|
default_auto_field = "django.db.models.BigAutoField"
|
||||||
|
name = "paperless_migration"
|
||||||
150
src/paperless_migration/detect.py
Normal file
150
src/paperless_migration/detect.py
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
"""Lightweight detection to decide if we should boot migration mode."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sqlite3
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||||
|
|
||||||
|
_DOC_EXISTS_QUERY = "SELECT 1 FROM documents_document LIMIT 1;"
|
||||||
|
|
||||||
|
|
||||||
|
def _get_db_config() -> dict[str, Any]:
|
||||||
|
data_dir = Path(os.getenv("PAPERLESS_DATA_DIR", BASE_DIR.parent / "data")).resolve()
|
||||||
|
if not os.getenv("PAPERLESS_DBHOST"):
|
||||||
|
return {
|
||||||
|
"ENGINE": "sqlite",
|
||||||
|
"NAME": data_dir / "db.sqlite3",
|
||||||
|
}
|
||||||
|
|
||||||
|
engine = "mariadb" if os.getenv("PAPERLESS_DBENGINE") == "mariadb" else "postgres"
|
||||||
|
cfg = {
|
||||||
|
"ENGINE": engine,
|
||||||
|
"HOST": os.getenv("PAPERLESS_DBHOST"),
|
||||||
|
"PORT": os.getenv("PAPERLESS_DBPORT"),
|
||||||
|
"NAME": os.getenv("PAPERLESS_DBNAME", "paperless"),
|
||||||
|
"USER": os.getenv("PAPERLESS_DBUSER", "paperless"),
|
||||||
|
"PASSWORD": os.getenv("PAPERLESS_DBPASS", "paperless"),
|
||||||
|
}
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
|
||||||
|
def _probe_sqlite(path: Path) -> bool:
|
||||||
|
if not path.exists():
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
conn = sqlite3.connect(path, timeout=1)
|
||||||
|
cur = conn.cursor()
|
||||||
|
cur.execute(_DOC_EXISTS_QUERY)
|
||||||
|
cur.fetchone()
|
||||||
|
return True
|
||||||
|
except sqlite3.Error:
|
||||||
|
return False
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
conn.close()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _probe_postgres(cfg: dict[str, Any]) -> bool:
|
||||||
|
try:
|
||||||
|
import psycopg
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
logger.debug("psycopg not installed; skipping postgres probe")
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
conn = psycopg.connect(
|
||||||
|
host=cfg["HOST"],
|
||||||
|
port=cfg["PORT"],
|
||||||
|
dbname=cfg["NAME"],
|
||||||
|
user=cfg["USER"],
|
||||||
|
password=cfg["PASSWORD"],
|
||||||
|
connect_timeout=2,
|
||||||
|
)
|
||||||
|
with conn, conn.cursor() as cur:
|
||||||
|
cur.execute(_DOC_EXISTS_QUERY)
|
||||||
|
cur.fetchone()
|
||||||
|
return True
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
conn.close()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _probe_mariadb(cfg: dict[str, Any]) -> bool:
|
||||||
|
try:
|
||||||
|
import MySQLdb # type: ignore
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
logger.debug("mysqlclient not installed; skipping mariadb probe")
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
conn = MySQLdb.connect(
|
||||||
|
host=cfg["HOST"],
|
||||||
|
port=int(cfg["PORT"] or 3306),
|
||||||
|
user=cfg["USER"],
|
||||||
|
passwd=cfg["PASSWORD"],
|
||||||
|
db=cfg["NAME"],
|
||||||
|
connect_timeout=2,
|
||||||
|
)
|
||||||
|
cur = conn.cursor()
|
||||||
|
cur.execute("SELECT 1 FROM documents_document LIMIT 1;")
|
||||||
|
cur.fetchone()
|
||||||
|
return True
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
conn.close()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def is_v2_database() -> bool:
|
||||||
|
cfg = _get_db_config()
|
||||||
|
if cfg["ENGINE"] == "sqlite":
|
||||||
|
return _probe_sqlite(cfg["NAME"])
|
||||||
|
if cfg["ENGINE"] == "postgres":
|
||||||
|
return _probe_postgres(cfg)
|
||||||
|
if cfg["ENGINE"] == "mariadb":
|
||||||
|
return _probe_mariadb(cfg)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def choose_settings_module() -> str:
|
||||||
|
# ENV override
|
||||||
|
toggle = os.getenv("PAPERLESS_MIGRATION_MODE")
|
||||||
|
if toggle is not None:
|
||||||
|
chosen = (
|
||||||
|
"paperless_migration.settings"
|
||||||
|
if str(toggle).lower() in ("1", "true", "yes", "on")
|
||||||
|
else "paperless.settings"
|
||||||
|
)
|
||||||
|
os.environ["PAPERLESS_MIGRATION_MODE"] = "1" if "migration" in chosen else "0"
|
||||||
|
return chosen
|
||||||
|
|
||||||
|
# Auto-detect via DB probe
|
||||||
|
if is_v2_database():
|
||||||
|
logger.warning("Detected v2 schema; booting migration mode.")
|
||||||
|
os.environ["PAPERLESS_MIGRATION_MODE"] = "1"
|
||||||
|
return "paperless_migration.settings"
|
||||||
|
|
||||||
|
os.environ["PAPERLESS_MIGRATION_MODE"] = "0"
|
||||||
|
return "paperless.settings"
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__": # pragma: no cover
|
||||||
|
logger.info(
|
||||||
|
"v2 database detected" if is_v2_database() else "v2 database not detected",
|
||||||
|
)
|
||||||
0
src/paperless_migration/scripts/__init__.py
Normal file
0
src/paperless_migration/scripts/__init__.py
Normal file
158
src/paperless_migration/scripts/transform.py
Normal file
158
src/paperless_migration/scripts/transform.py
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
# /// script
|
||||||
|
# dependencies = [
|
||||||
|
# "rich",
|
||||||
|
# "ijson",
|
||||||
|
# "typer-slim",
|
||||||
|
# ]
|
||||||
|
# ///
|
||||||
|
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
from collections import Counter
|
||||||
|
from collections.abc import Callable
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
from typing import TypedDict
|
||||||
|
|
||||||
|
import typer
|
||||||
|
from rich.console import Console
|
||||||
|
from rich.progress import BarColumn
|
||||||
|
from rich.progress import Progress
|
||||||
|
from rich.progress import SpinnerColumn
|
||||||
|
from rich.progress import TextColumn
|
||||||
|
from rich.progress import TimeElapsedColumn
|
||||||
|
from rich.table import Table
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ijson # type: ignore
|
||||||
|
except ImportError as exc: # pragma: no cover - handled at runtime
|
||||||
|
raise SystemExit(
|
||||||
|
"ijson is required for migration transform. "
|
||||||
|
"Install dependencies (e.g., `uv pip install ijson`).",
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
app = typer.Typer(add_completion=False)
|
||||||
|
console = Console()
|
||||||
|
|
||||||
|
|
||||||
|
class FixtureObject(TypedDict):
|
||||||
|
model: str
|
||||||
|
pk: int
|
||||||
|
fields: dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
|
TransformFn = Callable[[FixtureObject], FixtureObject]
|
||||||
|
|
||||||
|
|
||||||
|
def transform_documents_document(obj: FixtureObject) -> FixtureObject:
|
||||||
|
fields: dict[str, Any] = obj["fields"]
|
||||||
|
fields.pop("storage_type", None)
|
||||||
|
content: Any = fields.get("content")
|
||||||
|
fields["content_length"] = len(content) if isinstance(content, str) else 0
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
TRANSFORMS: dict[str, TransformFn] = {
|
||||||
|
"documents.document": transform_documents_document,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def validate_output(value: Path) -> Path:
|
||||||
|
if value.exists():
|
||||||
|
raise typer.BadParameter(f"Output file '{value}' already exists.")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def migrate(
|
||||||
|
input_path: Path = typer.Option(
|
||||||
|
...,
|
||||||
|
"--input",
|
||||||
|
"-i",
|
||||||
|
exists=True,
|
||||||
|
file_okay=True,
|
||||||
|
dir_okay=False,
|
||||||
|
readable=True,
|
||||||
|
),
|
||||||
|
output_path: Path = typer.Option(
|
||||||
|
...,
|
||||||
|
"--output",
|
||||||
|
"-o",
|
||||||
|
callback=validate_output,
|
||||||
|
),
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Process JSON fixtures with detailed summary and timing.
|
||||||
|
"""
|
||||||
|
if input_path.resolve() == output_path.resolve():
|
||||||
|
console.print(
|
||||||
|
"[bold red]Error:[/bold red] Input and output paths cannot be the same file.",
|
||||||
|
)
|
||||||
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
|
stats: Counter[str] = Counter()
|
||||||
|
total_processed: int = 0
|
||||||
|
start_time: float = time.perf_counter()
|
||||||
|
|
||||||
|
progress = Progress(
|
||||||
|
SpinnerColumn(),
|
||||||
|
TextColumn("[bold blue]{task.description}"),
|
||||||
|
BarColumn(),
|
||||||
|
TextColumn("{task.completed:,} rows"),
|
||||||
|
TimeElapsedColumn(),
|
||||||
|
console=console,
|
||||||
|
)
|
||||||
|
|
||||||
|
with (
|
||||||
|
progress,
|
||||||
|
input_path.open("rb") as infile,
|
||||||
|
output_path.open("w", encoding="utf-8") as outfile,
|
||||||
|
):
|
||||||
|
task = progress.add_task("Processing fixture", start=True)
|
||||||
|
outfile.write("[\n")
|
||||||
|
first: bool = True
|
||||||
|
|
||||||
|
for i, obj in enumerate(ijson.items(infile, "item")):
|
||||||
|
fixture: FixtureObject = obj
|
||||||
|
model: str = fixture["model"]
|
||||||
|
total_processed += 1
|
||||||
|
|
||||||
|
transform: TransformFn | None = TRANSFORMS.get(model)
|
||||||
|
if transform:
|
||||||
|
fixture = transform(fixture)
|
||||||
|
stats[model] += 1
|
||||||
|
|
||||||
|
if not first:
|
||||||
|
outfile.write(",\n")
|
||||||
|
first = False
|
||||||
|
|
||||||
|
json.dump(fixture, outfile, ensure_ascii=False)
|
||||||
|
progress.advance(task, 1)
|
||||||
|
|
||||||
|
outfile.write("\n]\n")
|
||||||
|
|
||||||
|
end_time: float = time.perf_counter()
|
||||||
|
duration: float = end_time - start_time
|
||||||
|
|
||||||
|
# Final Statistics Table
|
||||||
|
console.print("\n[bold green]Processing Complete[/bold green]")
|
||||||
|
|
||||||
|
table = Table(show_header=True, header_style="bold magenta")
|
||||||
|
table.add_column("Metric", style="dim")
|
||||||
|
table.add_column("Value", justify="right")
|
||||||
|
|
||||||
|
table.add_row("Total Time", f"{duration:.2f} seconds")
|
||||||
|
table.add_row("Total Processed", f"{total_processed:,} rows")
|
||||||
|
table.add_row(
|
||||||
|
"Processing Speed",
|
||||||
|
f"{total_processed / duration:.0f} rows/sec" if duration > 0 else "N/A",
|
||||||
|
)
|
||||||
|
|
||||||
|
for model, count in stats.items():
|
||||||
|
table.add_row(f"Transformed: {model}", f"{count:,}")
|
||||||
|
|
||||||
|
console.print(table)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
app()
|
||||||
61
src/paperless_migration/scripts/wipe_db.py
Normal file
61
src/paperless_migration/scripts/wipe_db.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
import django
|
||||||
|
from django.apps import apps
|
||||||
|
from django.db import connection
|
||||||
|
from django.db.migrations.recorder import MigrationRecorder
|
||||||
|
|
||||||
|
|
||||||
|
def _target_tables() -> list[str]:
|
||||||
|
tables = {
|
||||||
|
model._meta.db_table for model in apps.get_models(include_auto_created=True)
|
||||||
|
}
|
||||||
|
tables.add(MigrationRecorder.Migration._meta.db_table)
|
||||||
|
existing = set(connection.introspection.table_names())
|
||||||
|
return sorted(tables & existing)
|
||||||
|
|
||||||
|
|
||||||
|
def _drop_sqlite_tables() -> None:
|
||||||
|
tables = _target_tables()
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
cursor.execute("PRAGMA foreign_keys=OFF;")
|
||||||
|
for table in tables:
|
||||||
|
cursor.execute(f'DROP TABLE IF EXISTS "{table}";')
|
||||||
|
cursor.execute("PRAGMA foreign_keys=ON;")
|
||||||
|
|
||||||
|
|
||||||
|
def _drop_postgres_tables() -> None:
|
||||||
|
tables = _target_tables()
|
||||||
|
if not tables:
|
||||||
|
return
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
for table in tables:
|
||||||
|
cursor.execute(f'DROP TABLE IF EXISTS "{table}" CASCADE;')
|
||||||
|
|
||||||
|
|
||||||
|
def _drop_mysql_tables() -> None:
|
||||||
|
tables = _target_tables()
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
cursor.execute("SET FOREIGN_KEY_CHECKS=0;")
|
||||||
|
for table in tables:
|
||||||
|
cursor.execute(f"DROP TABLE IF EXISTS `{table}`;")
|
||||||
|
cursor.execute("SET FOREIGN_KEY_CHECKS=1;")
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
django.setup()
|
||||||
|
vendor = connection.vendor
|
||||||
|
print(f"Wiping database for {vendor}...") # noqa: T201
|
||||||
|
|
||||||
|
if vendor == "sqlite":
|
||||||
|
_drop_sqlite_tables()
|
||||||
|
elif vendor == "postgresql":
|
||||||
|
_drop_postgres_tables()
|
||||||
|
elif vendor == "mysql":
|
||||||
|
_drop_mysql_tables()
|
||||||
|
else:
|
||||||
|
raise SystemExit(f"Unsupported database vendor: {vendor}")
|
||||||
|
|
||||||
|
print("Database wipe complete.") # noqa: T201
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
217
src/paperless_migration/settings.py
Normal file
217
src/paperless_migration/settings.py
Normal file
@@ -0,0 +1,217 @@
|
|||||||
|
"""Settings for migration-mode Django instance."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import secrets
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||||
|
|
||||||
|
DEBUG = os.getenv("PAPERLESS_DEBUG", "false").lower() == "true"
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = ["*"]
|
||||||
|
|
||||||
|
# Tap paperless.conf if it's available
|
||||||
|
for path in [
|
||||||
|
os.getenv("PAPERLESS_CONFIGURATION_PATH"),
|
||||||
|
"../paperless.conf",
|
||||||
|
"/etc/paperless.conf",
|
||||||
|
"/usr/local/etc/paperless.conf",
|
||||||
|
]:
|
||||||
|
if path and Path(path).exists():
|
||||||
|
load_dotenv(path)
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
|
def __get_path(
|
||||||
|
key: str,
|
||||||
|
default: str | Path,
|
||||||
|
) -> Path:
|
||||||
|
if key in os.environ:
|
||||||
|
return Path(os.environ[key]).resolve()
|
||||||
|
return Path(default).resolve()
|
||||||
|
|
||||||
|
|
||||||
|
DATA_DIR = __get_path("PAPERLESS_DATA_DIR", BASE_DIR.parent / "data")
|
||||||
|
EXPORT_DIR = __get_path("PAPERLESS_EXPORT_DIR", BASE_DIR.parent / "export")
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_db_settings() -> dict[str, dict[str, Any]]:
|
||||||
|
databases: dict[str, dict[str, Any]] = {
|
||||||
|
"default": {
|
||||||
|
"ENGINE": "django.db.backends.sqlite3",
|
||||||
|
"NAME": DATA_DIR / "db.sqlite3",
|
||||||
|
"OPTIONS": {},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if os.getenv("PAPERLESS_DBHOST"):
|
||||||
|
databases["sqlite"] = databases["default"].copy()
|
||||||
|
databases["default"] = {
|
||||||
|
"HOST": os.getenv("PAPERLESS_DBHOST"),
|
||||||
|
"NAME": os.getenv("PAPERLESS_DBNAME", "paperless"),
|
||||||
|
"USER": os.getenv("PAPERLESS_DBUSER", "paperless"),
|
||||||
|
"PASSWORD": os.getenv("PAPERLESS_DBPASS", "paperless"),
|
||||||
|
"OPTIONS": {},
|
||||||
|
}
|
||||||
|
if os.getenv("PAPERLESS_DBPORT"):
|
||||||
|
databases["default"]["PORT"] = os.getenv("PAPERLESS_DBPORT")
|
||||||
|
|
||||||
|
if os.getenv("PAPERLESS_DBENGINE") == "mariadb":
|
||||||
|
engine = "django.db.backends.mysql"
|
||||||
|
options = {
|
||||||
|
"read_default_file": "/etc/mysql/my.cnf",
|
||||||
|
"charset": "utf8mb4",
|
||||||
|
"ssl_mode": os.getenv("PAPERLESS_DBSSLMODE", "PREFERRED"),
|
||||||
|
"ssl": {
|
||||||
|
"ca": os.getenv("PAPERLESS_DBSSLROOTCERT"),
|
||||||
|
"cert": os.getenv("PAPERLESS_DBSSLCERT"),
|
||||||
|
"key": os.getenv("PAPERLESS_DBSSLKEY"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
engine = "django.db.backends.postgresql"
|
||||||
|
options = {
|
||||||
|
"sslmode": os.getenv("PAPERLESS_DBSSLMODE", "prefer"),
|
||||||
|
"sslrootcert": os.getenv("PAPERLESS_DBSSLROOTCERT"),
|
||||||
|
"sslcert": os.getenv("PAPERLESS_DBSSLCERT"),
|
||||||
|
"sslkey": os.getenv("PAPERLESS_DBSSLKEY"),
|
||||||
|
}
|
||||||
|
|
||||||
|
databases["default"]["ENGINE"] = engine
|
||||||
|
databases["default"]["OPTIONS"].update(options)
|
||||||
|
|
||||||
|
if os.getenv("PAPERLESS_DB_TIMEOUT") is not None:
|
||||||
|
timeout = int(os.getenv("PAPERLESS_DB_TIMEOUT"))
|
||||||
|
if databases["default"]["ENGINE"] == "django.db.backends.sqlite3":
|
||||||
|
databases["default"]["OPTIONS"].update({"timeout": timeout})
|
||||||
|
else:
|
||||||
|
databases["default"]["OPTIONS"].update({"connect_timeout": timeout})
|
||||||
|
databases["sqlite"]["OPTIONS"].update({"timeout": timeout})
|
||||||
|
return databases
|
||||||
|
|
||||||
|
|
||||||
|
DATABASES = _parse_db_settings()
|
||||||
|
|
||||||
|
SECRET_KEY = os.getenv(
|
||||||
|
"PAPERLESS_SECRET_KEY",
|
||||||
|
)
|
||||||
|
|
||||||
|
AUTH_PASSWORD_VALIDATORS = [
|
||||||
|
{
|
||||||
|
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
LANGUAGE_CODE = "en-us"
|
||||||
|
TIME_ZONE = "UTC"
|
||||||
|
USE_I18N = True
|
||||||
|
USE_TZ = True
|
||||||
|
CSRF_TRUSTED_ORIGINS: list[str] = []
|
||||||
|
|
||||||
|
INSTALLED_APPS = [
|
||||||
|
"django.contrib.auth",
|
||||||
|
"django.contrib.contenttypes",
|
||||||
|
"django.contrib.sessions",
|
||||||
|
"django.contrib.messages",
|
||||||
|
"django.contrib.staticfiles",
|
||||||
|
"allauth",
|
||||||
|
"allauth.account",
|
||||||
|
"allauth.socialaccount",
|
||||||
|
"allauth.mfa",
|
||||||
|
"paperless_migration",
|
||||||
|
]
|
||||||
|
|
||||||
|
MIDDLEWARE = [
|
||||||
|
"django.middleware.security.SecurityMiddleware",
|
||||||
|
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||||
|
"django.middleware.common.CommonMiddleware",
|
||||||
|
"django.middleware.csrf.CsrfViewMiddleware",
|
||||||
|
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||||
|
"django.contrib.messages.middleware.MessageMiddleware",
|
||||||
|
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
||||||
|
"allauth.account.middleware.AccountMiddleware",
|
||||||
|
]
|
||||||
|
|
||||||
|
ROOT_URLCONF = "paperless_migration.urls"
|
||||||
|
|
||||||
|
TEMPLATES = [
|
||||||
|
{
|
||||||
|
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||||
|
"DIRS": [
|
||||||
|
BASE_DIR / "paperless_migration" / "templates",
|
||||||
|
BASE_DIR / "documents" / "templates",
|
||||||
|
],
|
||||||
|
"APP_DIRS": True,
|
||||||
|
"OPTIONS": {
|
||||||
|
"context_processors": [
|
||||||
|
"django.template.context_processors.request",
|
||||||
|
"django.contrib.auth.context_processors.auth",
|
||||||
|
"django.contrib.messages.context_processors.messages",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
WSGI_APPLICATION = "paperless_migration.wsgi.application"
|
||||||
|
|
||||||
|
AUTHENTICATION_BACKENDS = [
|
||||||
|
"django.contrib.auth.backends.ModelBackend",
|
||||||
|
"allauth.account.auth_backends.AuthenticationBackend",
|
||||||
|
]
|
||||||
|
|
||||||
|
STATIC_URL = "/static/"
|
||||||
|
STATICFILES_DIRS = [
|
||||||
|
BASE_DIR / ".." / "static",
|
||||||
|
BASE_DIR / "static",
|
||||||
|
BASE_DIR / "documents" / "static",
|
||||||
|
]
|
||||||
|
|
||||||
|
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
||||||
|
|
||||||
|
LOGIN_URL = "/accounts/login/"
|
||||||
|
LOGIN_REDIRECT_URL = "/migration/"
|
||||||
|
LOGOUT_REDIRECT_URL = "/accounts/login/?loggedout=1"
|
||||||
|
|
||||||
|
ACCOUNT_ADAPTER = "allauth.account.adapter.DefaultAccountAdapter"
|
||||||
|
ACCOUNT_AUTHENTICATED_LOGIN_REDIRECTS = False
|
||||||
|
SOCIALACCOUNT_ADAPTER = "allauth.socialaccount.adapter.DefaultSocialAccountAdapter"
|
||||||
|
SOCIALACCOUNT_ENABLED = False
|
||||||
|
|
||||||
|
SESSION_ENGINE = "django.contrib.sessions.backends.db"
|
||||||
|
|
||||||
|
MIGRATION_EXPORT_PATH = __get_path(
|
||||||
|
"PAPERLESS_MIGRATION_EXPORT_PATH",
|
||||||
|
EXPORT_DIR / "manifest.json",
|
||||||
|
)
|
||||||
|
MIGRATION_TRANSFORMED_PATH = __get_path(
|
||||||
|
"PAPERLESS_MIGRATION_TRANSFORMED_PATH",
|
||||||
|
EXPORT_DIR / "manifest.v3.json",
|
||||||
|
)
|
||||||
|
MIGRATION_IMPORTED_PATH = Path(EXPORT_DIR / "import.completed").resolve()
|
||||||
|
|
||||||
|
# One-time access code required for migration logins; stable across autoreload
|
||||||
|
_code = os.getenv("PAPERLESS_MIGRATION_ACCESS_CODE")
|
||||||
|
if not _code:
|
||||||
|
_code = secrets.token_urlsafe(12)
|
||||||
|
os.environ["PAPERLESS_MIGRATION_ACCESS_CODE"] = _code
|
||||||
|
MIGRATION_ACCESS_CODE = _code
|
||||||
|
if os.environ.get("PAPERLESS_MIGRATION_CODE_LOGGED") != "1":
|
||||||
|
logging.getLogger(__name__).warning(
|
||||||
|
"Migration one-time access code: %s",
|
||||||
|
MIGRATION_ACCESS_CODE,
|
||||||
|
)
|
||||||
|
os.environ["PAPERLESS_MIGRATION_CODE_LOGGED"] = "1"
|
||||||
77
src/paperless_migration/templates/account/login.html
Normal file
77
src/paperless_migration/templates/account/login.html
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
{% load i18n static %}
|
||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||||
|
<meta name="author" content="Paperless-ngx project and contributors">
|
||||||
|
<meta name="robots" content="noindex,nofollow">
|
||||||
|
<meta name="color-scheme" content="light">
|
||||||
|
<title>{% translate "Paperless-ngx sign in" %}</title>
|
||||||
|
<link href="{% static 'bootstrap.min.css' %}" rel="stylesheet">
|
||||||
|
<link href="{% static 'base.css' %}" rel="stylesheet">
|
||||||
|
<style>
|
||||||
|
:root, body, .form-control, .form-floating {
|
||||||
|
color-scheme: light;
|
||||||
|
--bs-body-bg: #f5f5f5;
|
||||||
|
--bs-body-color: #212529;
|
||||||
|
--bs-body-color-rgb: 33, 37, 41;
|
||||||
|
--bs-border-color: #dee2e6;
|
||||||
|
--bs-link-color: #17541f;
|
||||||
|
--bs-link-color-rgb: 23, 84, 31;
|
||||||
|
}
|
||||||
|
@media (prefers-color-scheme: dark) { :root { color-scheme: light; } }
|
||||||
|
body {
|
||||||
|
min-height: 100vh;
|
||||||
|
background:
|
||||||
|
radial-gradient(circle at 20% 20%, #eef5ef, #f7fbf7),
|
||||||
|
linear-gradient(120deg, rgba(23, 84, 31, 0.05) 0%, rgba(0,0,0,0) 30%),
|
||||||
|
linear-gradient(300deg, rgba(15, 54, 20, 0.06) 0%, rgba(0,0,0,0) 40%);
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body class="d-flex align-items-center justify-content-center text-center p-3">
|
||||||
|
<main class="w-100" style="max-width: 360px;">
|
||||||
|
<form class="form-accounts p-4 rounded-4" id="form-account" method="post">
|
||||||
|
{% csrf_token %}
|
||||||
|
{% include "paperless-ngx/snippets/svg_logo.html" with extra_attrs="width='240' class='logo mb-3'" %}
|
||||||
|
<p class="text-uppercase fw-semibold mb-1 text-secondary small" style="letter-spacing: 0.12rem;">{% translate "Migration Mode" %}</p>
|
||||||
|
|
||||||
|
{% for message in messages %}
|
||||||
|
<div class="alert alert-{{ message.level_tag }} mb-2" role="alert">{{ message }}</div>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
<p class="mb-3">{% translate "Login with a superuser account to proceed." %}</p>
|
||||||
|
|
||||||
|
{% if form.errors %}
|
||||||
|
<div class="alert alert-danger" role="alert">
|
||||||
|
{% for field, errors in form.errors.items %}
|
||||||
|
{% for error in errors %}
|
||||||
|
{{ error }}
|
||||||
|
{% endfor %}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% translate "Username" as i18n_username %}
|
||||||
|
{% translate "Password" as i18n_password %}
|
||||||
|
<div class="form-floating form-stacked-top">
|
||||||
|
<input type="text" name="login" id="inputUsername" placeholder="{{ i18n_username }}" class="form-control" autocorrect="off" autocapitalize="none" required autofocus>
|
||||||
|
<label for="inputUsername">{{ i18n_username }}</label>
|
||||||
|
</div>
|
||||||
|
<div class="form-floating form-stacked-middle">
|
||||||
|
<input type="password" name="password" id="inputPassword" placeholder="{{ i18n_password }}" class="form-control" required>
|
||||||
|
<label for="inputPassword">{{ i18n_password }}</label>
|
||||||
|
</div>
|
||||||
|
<div class="form-floating form-stacked-bottom">
|
||||||
|
<input type="text" name="code" id="inputCode" placeholder="One-time code" class="form-control" required>
|
||||||
|
<label for="inputCode">One-time code</label>
|
||||||
|
</div>
|
||||||
|
<p class="mt-2 small fst-italic">{% translate "Code can be found in the startup logs." %}</p>
|
||||||
|
<div class="d-grid mt-3">
|
||||||
|
<button class="btn btn-lg btn-primary" type="submit">{% translate "Sign in" %}</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</main>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
@@ -0,0 +1,318 @@
|
|||||||
|
<!doctype html>
|
||||||
|
{% load static %}
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
<title>Paperless-ngx Migration Mode</title>
|
||||||
|
<link rel="stylesheet" href="{% static 'bootstrap.min.css' %}" />
|
||||||
|
<link rel="stylesheet" href="{% static 'base.css' %}" />
|
||||||
|
<style>
|
||||||
|
:root, .form-control {
|
||||||
|
color-scheme: light;
|
||||||
|
--bs-body-bg: #f5f5f5;
|
||||||
|
--bs-body-color: #212529;
|
||||||
|
--bs-body-color-rgb: 33, 37, 41;
|
||||||
|
--bs-border-color: #dee2e6;
|
||||||
|
--bs-link-color: var(--pngx-primary);
|
||||||
|
--bs-link-color-rgb: 23, 84, 31;
|
||||||
|
}
|
||||||
|
@media (prefers-color-scheme: dark) { :root { color-scheme: light; } }
|
||||||
|
|
||||||
|
.btn-primary:disabled {
|
||||||
|
--bs-btn-disabled-bg: #4d7352;
|
||||||
|
--bs-btn-disabled-border-color: #4d7352;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
background:
|
||||||
|
radial-gradient(circle at 20% 20%, #eef5ef, #f7fbf7),
|
||||||
|
linear-gradient(120deg, rgba(23, 84, 31, 0.05) 0%, rgba(0,0,0,0) 30%),
|
||||||
|
linear-gradient(300deg, rgba(15, 54, 20, 0.06) 0%, rgba(0,0,0,0) 40%);
|
||||||
|
min-height: 100vh;
|
||||||
|
}
|
||||||
|
|
||||||
|
svg.logo .text {
|
||||||
|
fill: #161616 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hero-card,
|
||||||
|
.card-step {
|
||||||
|
background: #fff;
|
||||||
|
backdrop-filter: blur(6px);
|
||||||
|
border: 1px solid rgba(23, 84, 31, 0.08);
|
||||||
|
box-shadow: 0 16px 40px rgba(0, 0, 0, 0.06);
|
||||||
|
border-radius: 18px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-dot {
|
||||||
|
width: 10px;
|
||||||
|
height: 10px;
|
||||||
|
border-radius: 50%;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.card-step {
|
||||||
|
border-radius: 16px;
|
||||||
|
transition: transform 0.15s ease, box-shadow 0.15s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.card-step.done-step {
|
||||||
|
opacity: 0.4;
|
||||||
|
}
|
||||||
|
|
||||||
|
.path-pill {
|
||||||
|
background: rgba(23, 84, 31, 0.08);
|
||||||
|
color: var(--bs-body-color);
|
||||||
|
border-radius: 12px;
|
||||||
|
padding: 0.4rem 0.75rem;
|
||||||
|
font-size: 0.9rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.step-rail {
|
||||||
|
position: relative;
|
||||||
|
height: 4px;
|
||||||
|
background: rgba(23, 84, 31, 0.12);
|
||||||
|
border-radius: 999px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.step-rail .fill {
|
||||||
|
position: absolute;
|
||||||
|
left: 0;
|
||||||
|
top: 0;
|
||||||
|
bottom: 0;
|
||||||
|
width: calc({{ export_exists|yesno:'33,0' }}% + {{ transformed_exists|yesno:'33,0' }}% + {{ imported_exists|yesno:'34,0' }}%);
|
||||||
|
max-width: 100%;
|
||||||
|
background: linear-gradient(90deg, #17541f, #2c7a3c);
|
||||||
|
border-radius: 999px;
|
||||||
|
transition: width 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.step-chip {
|
||||||
|
width: 38px;
|
||||||
|
height: 38px;
|
||||||
|
border-radius: 50%;
|
||||||
|
display: grid;
|
||||||
|
place-items: center;
|
||||||
|
font-weight: 700;
|
||||||
|
background: #fff;
|
||||||
|
border: 2px solid rgba(23, 84, 31, 0.25);
|
||||||
|
color: #17541f;
|
||||||
|
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.08);
|
||||||
|
}
|
||||||
|
|
||||||
|
.step-chip.done {
|
||||||
|
background: #17541f;
|
||||||
|
color: #fff;
|
||||||
|
border-color: #17541f;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body class="pb-4">
|
||||||
|
<div class="container py-4">
|
||||||
|
<div class="row justify-content-center mb-4">
|
||||||
|
<div class="col-lg-9">
|
||||||
|
<div class="hero-card p-4">
|
||||||
|
<div class="d-flex flex-wrap align-items-center justify-content-between gap-3">
|
||||||
|
<div class="d-flex align-items-center gap-3">
|
||||||
|
{% include "paperless-ngx/snippets/svg_logo.html" with extra_attrs="width='280' class='logo'" %}
|
||||||
|
<div class="ps-2">
|
||||||
|
<p class="text-uppercase fw-semibold mb-1 text-secondary" style="letter-spacing: 0.12rem;">Migration Mode</p>
|
||||||
|
<h1 class="h3 mb-2 text-primary">Paperless-ngx v2 → v3</h1>
|
||||||
|
<p class="text-muted mb-0">Migrate your data from Paperless-ngx version 2 to version 3.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="text-end">
|
||||||
|
<span class="badge bg-success-subtle text-success border border-success-subtle px-3 py-2">Online</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="mt-4">
|
||||||
|
<div class="d-flex justify-content-between align-items-center mb-2">
|
||||||
|
<div class="d-flex align-items-center gap-2">
|
||||||
|
<span class="step-chip {% if export_exists %}done{% endif %}">1</span>
|
||||||
|
<div>
|
||||||
|
<div class="fw-semibold mb-0">Export</div>
|
||||||
|
<small class="text-muted">v2 data</small>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="d-flex align-items-center gap-2">
|
||||||
|
<span class="step-chip {% if transformed_exists %}done{% endif %}">2</span>
|
||||||
|
<div>
|
||||||
|
<div class="fw-semibold mb-0">Transform</div>
|
||||||
|
<small class="text-muted">to v3 schema</small>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="d-flex align-items-center gap-2">
|
||||||
|
<span class="step-chip {% if imported_exists %}done{% endif %}">3</span>
|
||||||
|
<div>
|
||||||
|
<div class="fw-semibold mb-0">Import</div>
|
||||||
|
<small class="text-muted">into v3</small>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="step-rail">
|
||||||
|
<div class="fill"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% if messages %}
|
||||||
|
<div class="mt-4">
|
||||||
|
{% for message in messages %}
|
||||||
|
<div class="alert alert-{{ message.level_tag }} mb-2" role="alert">{{ message }}</div>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
<div class="row g-3 mt-2">
|
||||||
|
<div class="col-md-6">
|
||||||
|
<div class="d-flex align-items-center gap-2">
|
||||||
|
<span class="status-dot bg-{{ export_exists|yesno:'success,danger' }}"></span>
|
||||||
|
<div>
|
||||||
|
<div class="fw-semibold">Export file</div>
|
||||||
|
<div class="small text-muted">{{ export_exists|yesno:"Ready,Missing" }}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="path-pill mt-2 text-truncate" title="{{ export_path }}">{{ export_path }}</div>
|
||||||
|
</div>
|
||||||
|
<div class="col-md-6">
|
||||||
|
<div class="d-flex align-items-center gap-2">
|
||||||
|
<span class="status-dot bg-{{ transformed_exists|yesno:'success,warning' }}"></span>
|
||||||
|
<div>
|
||||||
|
<div class="fw-semibold">Transformed file</div>
|
||||||
|
<div class="small text-muted">{{ transformed_exists|yesno:"Ready,Pending" }}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="path-pill mt-2 text-truncate" title="{{ transformed_path }}">{{ transformed_path }}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="row gy-4 justify-content-center">
|
||||||
|
<div class="col-lg-3 col-md-4">
|
||||||
|
<div class="card card-step h-100 {% if export_exists %}done-step{% endif %}">
|
||||||
|
<div class="card-body d-flex flex-column gap-3">
|
||||||
|
<div>
|
||||||
|
<p class="text-uppercase text-muted mb-1 fw-semibold" style="letter-spacing: 0.08rem;">Step 1</p>
|
||||||
|
<h3 class="h5 mb-1">Export (v2)</h3>
|
||||||
|
<p class="small text-muted mb-0">Generate and upload the v2 export file.</p>
|
||||||
|
</div>
|
||||||
|
<div class="mt-auto d-grid gap-2">
|
||||||
|
<form method="post" enctype="multipart/form-data" class="d-flex gap-2 align-items-center">
|
||||||
|
{% csrf_token %}
|
||||||
|
<input class="form-control form-control-sm" type="file" name="export_file" accept=".json" {% if export_exists %}disabled{% endif %} required>
|
||||||
|
<button class="btn btn-outline-secondary btn-sm" type="submit" name="action" value="upload" {% if export_exists %}disabled aria-disabled="true"{% endif %}>Upload</button>
|
||||||
|
</form>
|
||||||
|
<form method="post">
|
||||||
|
{% csrf_token %}
|
||||||
|
<button class="btn btn-primary w-100" type="submit" name="action" value="check" {% if export_exists %}disabled aria-disabled="true"{% endif %}>Re-check export</button>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-lg-3 col-md-4">
|
||||||
|
<div class="card card-step h-100 {% if transformed_exists %}done-step{% endif %}">
|
||||||
|
<div class="card-body d-flex flex-column gap-3">
|
||||||
|
<div>
|
||||||
|
<p class="text-uppercase text-muted mb-1 fw-semibold" style="letter-spacing: 0.08rem;">Step 2</p>
|
||||||
|
<h3 class="h5 mb-1">Transform</h3>
|
||||||
|
<p class="small text-muted mb-0">Convert the export into the v3-ready structure.</p>
|
||||||
|
</div>
|
||||||
|
<div class="mt-auto">
|
||||||
|
<form method="post">
|
||||||
|
{% csrf_token %}
|
||||||
|
<button
|
||||||
|
class="btn btn-outline-primary w-100"
|
||||||
|
type="submit"
|
||||||
|
name="action"
|
||||||
|
value="transform"
|
||||||
|
{% if not export_exists or transformed_exists %}disabled aria-disabled="true"{% endif %}
|
||||||
|
>
|
||||||
|
Transform export
|
||||||
|
</button>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-lg-3 col-md-4">
|
||||||
|
<div class="card card-step h-100 {% if imported_exists %}done-step{% endif %}">
|
||||||
|
<div class="card-body d-flex flex-column gap-3">
|
||||||
|
<div>
|
||||||
|
<p class="text-uppercase text-muted mb-1 fw-semibold" style="letter-spacing: 0.08rem;">Step 3</p>
|
||||||
|
<h3 class="h5 mb-1">Import (v3)</h3>
|
||||||
|
<p class="small text-muted mb-0">Load the transformed data into your v3 instance.</p>
|
||||||
|
</div>
|
||||||
|
<div class="mt-auto">
|
||||||
|
<form method="post">
|
||||||
|
{% csrf_token %}
|
||||||
|
<button
|
||||||
|
class="btn btn-outline-secondary w-100"
|
||||||
|
type="submit"
|
||||||
|
name="action"
|
||||||
|
value="import"
|
||||||
|
{% if not transformed_exists or imported_exists %}disabled aria-disabled="true"{% endif %}
|
||||||
|
>
|
||||||
|
Import transformed data
|
||||||
|
</button>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="row justify-content-center mt-4">
|
||||||
|
<div class="col-lg-9">
|
||||||
|
{% if not export_exists %}
|
||||||
|
<div class="alert alert-info mb-3">
|
||||||
|
<div class="fw-semibold mb-1">Export file not found</div>
|
||||||
|
<div class="small">
|
||||||
|
Run the v2 export from your Paperless instance, e.g.:
|
||||||
|
<code>docker run --rm ghcr.io/paperless-ngx/paperless-ngx:2.20.6 document_exporter --data-only</code>
|
||||||
|
(see <a href="https://docs.paperless-ngx.com/administration/#exporter" target="_blank" rel="noopener noreferrer">documentation</a>). Once the <code>manifest.json</code> is in-place, upload it or (especially for larger files) place it directly at the expected location and click “Re-check export”.
|
||||||
|
<p class="mt-2 mb-0 text-danger fst-italic">⚠️ The export must be generated with version Paperless-ngx v2.20.6</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
<div class="card card-step">
|
||||||
|
<div class="card-body">
|
||||||
|
<div class="d-flex justify-content-between align-items-center mb-2">
|
||||||
|
<div class="fw-semibold">Migration console</div>
|
||||||
|
<span class="badge bg-secondary-subtle text-secondary border border-secondary-subtle">Live output</span>
|
||||||
|
</div>
|
||||||
|
<pre id="migration-log" class="mb-0" style="background:#0f1a12;color:#d1e7d6;border-radius:12px;min-height:180px;padding:12px;font-size:0.9rem;overflow:auto;">Ready</pre>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% if stream_action %}
|
||||||
|
<script>
|
||||||
|
(() => {
|
||||||
|
const logEl = document.getElementById('migration-log');
|
||||||
|
if (!logEl) return;
|
||||||
|
const streamUrl = "{% if stream_action == 'import' %}{% url 'import_stream' %}{% else %}{% url 'transform_stream' %}{% endif %}";
|
||||||
|
const donePrefix = "{{ stream_action|capfirst }} finished";
|
||||||
|
const evt = new EventSource(streamUrl);
|
||||||
|
const append = (line) => {
|
||||||
|
logEl.textContent += `\n${line}`;
|
||||||
|
logEl.scrollTop = logEl.scrollHeight;
|
||||||
|
};
|
||||||
|
evt.onmessage = (e) => {
|
||||||
|
append(e.data);
|
||||||
|
if (e.data.startsWith(donePrefix)) {
|
||||||
|
setTimeout(() => window.location.reload(), 500);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
evt.onerror = () => {
|
||||||
|
append('[connection closed]');
|
||||||
|
evt.close();
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
</script>
|
||||||
|
{% endif %}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
21
src/paperless_migration/urls.py
Normal file
21
src/paperless_migration/urls.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
from django.conf import settings
|
||||||
|
from django.conf.urls.static import static
|
||||||
|
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
|
||||||
|
from django.urls import include
|
||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
from paperless_migration import views
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path("accounts/login/", views.migration_login, name="account_login"),
|
||||||
|
path("accounts/", include("allauth.urls")),
|
||||||
|
path("migration/", views.migration_home, name="migration_home"),
|
||||||
|
path("migration/transform/stream", views.transform_stream, name="transform_stream"),
|
||||||
|
path("migration/import/stream", views.import_stream, name="import_stream"),
|
||||||
|
# redirect root to migration home
|
||||||
|
path("", views.migration_home, name="migration_home"),
|
||||||
|
]
|
||||||
|
|
||||||
|
if settings.DEBUG:
|
||||||
|
urlpatterns += staticfiles_urlpatterns()
|
||||||
|
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
||||||
269
src/paperless_migration/views.py
Normal file
269
src/paperless_migration/views.py
Normal file
@@ -0,0 +1,269 @@
|
|||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from django.contrib import messages
|
||||||
|
from django.contrib.auth import authenticate
|
||||||
|
from django.contrib.auth import login
|
||||||
|
from django.contrib.auth.decorators import login_required
|
||||||
|
from django.http import HttpResponseForbidden
|
||||||
|
from django.http import StreamingHttpResponse
|
||||||
|
from django.shortcuts import redirect
|
||||||
|
from django.shortcuts import render
|
||||||
|
from django.views.decorators.http import require_http_methods
|
||||||
|
|
||||||
|
from paperless_migration import settings
|
||||||
|
|
||||||
|
|
||||||
|
@login_required
|
||||||
|
@require_http_methods(["GET", "POST"])
|
||||||
|
def migration_home(request):
|
||||||
|
if not request.session.get("migration_code_ok"):
|
||||||
|
return HttpResponseForbidden("Access code required")
|
||||||
|
if not request.user.is_superuser:
|
||||||
|
return HttpResponseForbidden("Superuser access required")
|
||||||
|
|
||||||
|
export_path = Path(settings.MIGRATION_EXPORT_PATH)
|
||||||
|
transformed_path = Path(settings.MIGRATION_TRANSFORMED_PATH)
|
||||||
|
imported_marker = Path(settings.MIGRATION_IMPORTED_PATH)
|
||||||
|
|
||||||
|
if request.method == "POST":
|
||||||
|
action = request.POST.get("action")
|
||||||
|
if action == "check":
|
||||||
|
messages.success(request, "Checked export paths.")
|
||||||
|
elif action == "transform":
|
||||||
|
messages.info(request, "Starting transform… live output below.")
|
||||||
|
request.session["start_stream_action"] = "transform"
|
||||||
|
if imported_marker.exists():
|
||||||
|
imported_marker.unlink()
|
||||||
|
elif action == "upload":
|
||||||
|
upload = request.FILES.get("export_file")
|
||||||
|
if not upload:
|
||||||
|
messages.error(request, "No file selected.")
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
export_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with export_path.open("wb") as dest:
|
||||||
|
for chunk in upload.chunks():
|
||||||
|
dest.write(chunk)
|
||||||
|
messages.success(request, f"Uploaded to {export_path}.")
|
||||||
|
except Exception as exc:
|
||||||
|
messages.error(request, f"Failed to save file: {exc}")
|
||||||
|
elif action == "import":
|
||||||
|
messages.info(request, "Starting import… live output below.")
|
||||||
|
request.session["start_stream_action"] = "import"
|
||||||
|
else:
|
||||||
|
messages.error(request, "Unknown action.")
|
||||||
|
return redirect("migration_home")
|
||||||
|
|
||||||
|
stream_action = request.session.pop("start_stream_action", None)
|
||||||
|
context = {
|
||||||
|
"export_path": export_path,
|
||||||
|
"export_exists": export_path.exists(),
|
||||||
|
"transformed_path": transformed_path,
|
||||||
|
"transformed_exists": transformed_path.exists(),
|
||||||
|
"imported_exists": imported_marker.exists(),
|
||||||
|
"stream_action": stream_action,
|
||||||
|
}
|
||||||
|
return render(request, "paperless_migration/migration_home.html", context)
|
||||||
|
|
||||||
|
|
||||||
|
@require_http_methods(["GET", "POST"])
|
||||||
|
def migration_login(request):
|
||||||
|
if request.method == "POST":
|
||||||
|
username = request.POST.get("login", "")
|
||||||
|
password = request.POST.get("password", "")
|
||||||
|
code = request.POST.get("code", "")
|
||||||
|
|
||||||
|
if not code or code != settings.MIGRATION_ACCESS_CODE:
|
||||||
|
messages.error(request, "One-time code is required.")
|
||||||
|
return redirect("account_login")
|
||||||
|
|
||||||
|
user = authenticate(request, username=username, password=password)
|
||||||
|
if user is None:
|
||||||
|
messages.error(request, "Invalid username or password.")
|
||||||
|
return redirect("account_login")
|
||||||
|
|
||||||
|
if not user.is_superuser:
|
||||||
|
messages.error(request, "Superuser access required.")
|
||||||
|
return redirect("account_login")
|
||||||
|
|
||||||
|
login(request, user)
|
||||||
|
request.session["migration_code_ok"] = True
|
||||||
|
return redirect(settings.LOGIN_REDIRECT_URL)
|
||||||
|
|
||||||
|
return render(request, "account/login.html")
|
||||||
|
|
||||||
|
|
||||||
|
@login_required
|
||||||
|
@require_http_methods(["GET"])
|
||||||
|
def transform_stream(request):
|
||||||
|
if not request.session.get("migration_code_ok"):
|
||||||
|
return HttpResponseForbidden("Access code required")
|
||||||
|
if not request.user.is_superuser:
|
||||||
|
return HttpResponseForbidden("Superuser access required")
|
||||||
|
|
||||||
|
input_path = Path(settings.MIGRATION_EXPORT_PATH)
|
||||||
|
output_path = Path(settings.MIGRATION_TRANSFORMED_PATH)
|
||||||
|
|
||||||
|
cmd = [
|
||||||
|
sys.executable,
|
||||||
|
"-m",
|
||||||
|
"paperless_migration.scripts.transform",
|
||||||
|
"--input",
|
||||||
|
str(input_path),
|
||||||
|
"--output",
|
||||||
|
str(output_path),
|
||||||
|
]
|
||||||
|
|
||||||
|
def event_stream():
|
||||||
|
process = subprocess.Popen(
|
||||||
|
cmd,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
bufsize=1,
|
||||||
|
text=True,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
yield "data: Starting transform...\n\n"
|
||||||
|
if process.stdout:
|
||||||
|
for line in process.stdout:
|
||||||
|
yield f"data: {line.rstrip()}\n\n"
|
||||||
|
process.wait()
|
||||||
|
yield f"data: Transform finished with code {process.returncode}\n\n"
|
||||||
|
finally:
|
||||||
|
if process and process.poll() is None:
|
||||||
|
process.kill()
|
||||||
|
|
||||||
|
return StreamingHttpResponse(
|
||||||
|
event_stream(),
|
||||||
|
content_type="text/event-stream",
|
||||||
|
headers={
|
||||||
|
"Cache-Control": "no-cache",
|
||||||
|
"X-Accel-Buffering": "no",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@login_required
|
||||||
|
@require_http_methods(["GET"])
|
||||||
|
def import_stream(request):
|
||||||
|
if not request.session.get("migration_code_ok"):
|
||||||
|
return HttpResponseForbidden("Access code required")
|
||||||
|
if not request.user.is_superuser:
|
||||||
|
return HttpResponseForbidden("Superuser access required")
|
||||||
|
|
||||||
|
export_path = Path(settings.MIGRATION_EXPORT_PATH)
|
||||||
|
transformed_path = Path(settings.MIGRATION_TRANSFORMED_PATH)
|
||||||
|
imported_marker = Path(settings.MIGRATION_IMPORTED_PATH)
|
||||||
|
manage_path = Path(settings.BASE_DIR) / "manage.py"
|
||||||
|
source_dir = export_path.parent
|
||||||
|
|
||||||
|
env = os.environ.copy()
|
||||||
|
env["DJANGO_SETTINGS_MODULE"] = "paperless.settings"
|
||||||
|
env["PAPERLESS_MIGRATION_MODE"] = "0"
|
||||||
|
|
||||||
|
def event_stream():
|
||||||
|
if not export_path.exists():
|
||||||
|
yield "data: Missing export manifest.json; upload or re-check export.\n\n"
|
||||||
|
return
|
||||||
|
if not transformed_path.exists():
|
||||||
|
yield "data: Missing transformed manifest.v3.json; run transform first.\n\n"
|
||||||
|
return
|
||||||
|
|
||||||
|
backup_path: Path | None = None
|
||||||
|
try:
|
||||||
|
backup_fd, backup_name = tempfile.mkstemp(
|
||||||
|
prefix="manifest.v2.",
|
||||||
|
suffix=".json",
|
||||||
|
dir=source_dir,
|
||||||
|
)
|
||||||
|
os.close(backup_fd)
|
||||||
|
backup_path = Path(backup_name)
|
||||||
|
shutil.copy2(export_path, backup_path)
|
||||||
|
shutil.copy2(transformed_path, export_path)
|
||||||
|
except Exception as exc:
|
||||||
|
yield f"data: Failed to prepare import manifest: {exc}\n\n"
|
||||||
|
return
|
||||||
|
|
||||||
|
def run_cmd(args, label):
|
||||||
|
yield f"data: {label}\n\n"
|
||||||
|
process = subprocess.Popen(
|
||||||
|
args,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
bufsize=1,
|
||||||
|
text=True,
|
||||||
|
env=env,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
if process.stdout:
|
||||||
|
for line in process.stdout:
|
||||||
|
yield f"data: {line.rstrip()}\n\n"
|
||||||
|
process.wait()
|
||||||
|
return process.returncode
|
||||||
|
finally:
|
||||||
|
if process and process.poll() is None:
|
||||||
|
process.kill()
|
||||||
|
|
||||||
|
wipe_cmd = [
|
||||||
|
sys.executable,
|
||||||
|
"-m",
|
||||||
|
"paperless_migration.scripts.wipe_db",
|
||||||
|
]
|
||||||
|
migrate_cmd = [
|
||||||
|
sys.executable,
|
||||||
|
str(manage_path),
|
||||||
|
"migrate",
|
||||||
|
"--noinput",
|
||||||
|
]
|
||||||
|
import_cmd = [
|
||||||
|
sys.executable,
|
||||||
|
str(manage_path),
|
||||||
|
"document_importer",
|
||||||
|
str(source_dir),
|
||||||
|
"--data-only",
|
||||||
|
]
|
||||||
|
try:
|
||||||
|
wipe_code = yield from run_cmd(
|
||||||
|
wipe_cmd,
|
||||||
|
"Wiping database...",
|
||||||
|
)
|
||||||
|
if wipe_code != 0:
|
||||||
|
yield f"data: Wipe finished with code {wipe_code}\n\n"
|
||||||
|
return
|
||||||
|
|
||||||
|
migrate_code = yield from run_cmd(
|
||||||
|
migrate_cmd,
|
||||||
|
"Running migrations...",
|
||||||
|
)
|
||||||
|
if migrate_code != 0:
|
||||||
|
yield f"data: Migrate finished with code {migrate_code}\n\n"
|
||||||
|
return
|
||||||
|
|
||||||
|
import_code = yield from run_cmd(
|
||||||
|
import_cmd,
|
||||||
|
"Starting import...",
|
||||||
|
)
|
||||||
|
if import_code == 0:
|
||||||
|
imported_marker.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
imported_marker.write_text("ok\n", encoding="utf-8")
|
||||||
|
yield f"data: Import finished with code {import_code}\n\n"
|
||||||
|
finally:
|
||||||
|
if backup_path and backup_path.exists():
|
||||||
|
try:
|
||||||
|
shutil.move(backup_path, export_path)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return StreamingHttpResponse(
|
||||||
|
event_stream(),
|
||||||
|
content_type="text/event-stream",
|
||||||
|
headers={
|
||||||
|
"Cache-Control": "no-cache",
|
||||||
|
"X-Accel-Buffering": "no",
|
||||||
|
},
|
||||||
|
)
|
||||||
7
src/paperless_migration/wsgi.py
Normal file
7
src/paperless_migration/wsgi.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
from django.core.wsgi import get_wsgi_application
|
||||||
|
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless_migration.settings")
|
||||||
|
|
||||||
|
application = get_wsgi_application()
|
||||||
Reference in New Issue
Block a user