mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-08-03 18:54:40 -05:00
Compare commits
160 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
16fc7ebecc | ||
![]() |
454264a87f | ||
![]() |
7ecb76dddc | ||
![]() |
64eabbe8d0 | ||
![]() |
197938eaab | ||
![]() |
02a40055f5 | ||
![]() |
72bacc016a | ||
![]() |
aeecc10e45 | ||
![]() |
2b3edbaa46 | ||
![]() |
270f8677a7 | ||
![]() |
447edd1355 | ||
![]() |
024921212a | ||
![]() |
5d08a34365 | ||
![]() |
20763e7c26 | ||
![]() |
b33ba4c902 | ||
![]() |
fae5e834b9 | ||
![]() |
4cb4bd13ad | ||
![]() |
896304ccaa | ||
![]() |
9ae186e6f9 | ||
![]() |
c7690c05f5 | ||
![]() |
7273a8c7a5 | ||
![]() |
4195d5746f | ||
![]() |
8b90b51b1a | ||
![]() |
e74af5c73c | ||
![]() |
99c2442b28 | ||
![]() |
3c2df48a1a | ||
![]() |
a0c1c48dca | ||
![]() |
4e05aba0a5 | ||
![]() |
299a69a2de | ||
![]() |
7bc077ac08 | ||
![]() |
64752f6b57 | ||
![]() |
c2880bcf9a | ||
![]() |
159dcdbda5 | ||
![]() |
1838fa971e | ||
![]() |
d8d111f093 | ||
![]() |
31a03b1d30 | ||
![]() |
5004771d79 | ||
![]() |
92b9fc1ba9 | ||
![]() |
585cc24dd5 | ||
![]() |
f261c70f1e | ||
![]() |
8c9dfa449c | ||
![]() |
d94ca2962e | ||
![]() |
3c7eacf923 | ||
![]() |
643486b14b | ||
![]() |
87045da1e2 | ||
![]() |
a109723ada | ||
![]() |
151573a26e | ||
![]() |
284e0d3f60 | ||
![]() |
7048af276a | ||
![]() |
e6cd3c1970 | ||
![]() |
623ac441d5 | ||
![]() |
003201bc1b | ||
![]() |
1bf6d9165f | ||
![]() |
4b49bd9de8 | ||
![]() |
69f82d503a | ||
![]() |
6c7ff54aad | ||
![]() |
0b53a8981c | ||
![]() |
c4dbd58efd | ||
![]() |
959f80604a | ||
![]() |
dee691b72b | ||
![]() |
a4829ce26a | ||
![]() |
7ed4dedd5e | ||
![]() |
93d272f50b | ||
![]() |
6fe5674ac3 | ||
![]() |
6024a862d6 | ||
![]() |
195f3a5dbf | ||
![]() |
94f0808a2f | ||
![]() |
e3f062b981 | ||
![]() |
22142203ce | ||
![]() |
412d9f5cd2 | ||
![]() |
133532a463 | ||
![]() |
c9683808c9 | ||
![]() |
b25f083687 | ||
![]() |
62ba4b9730 | ||
![]() |
150c7f26a5 | ||
![]() |
4b4111ec03 | ||
![]() |
9e33344808 | ||
![]() |
bba1fc7194 | ||
![]() |
efaa1c4dd7 | ||
![]() |
a88b318d7d | ||
![]() |
2460c3e076 | ||
![]() |
9763b72f81 | ||
![]() |
19ab62c06c | ||
![]() |
eb8f37d846 | ||
![]() |
5c9e2d7070 | ||
![]() |
da9f2b1a8c | ||
![]() |
985f298c46 | ||
![]() |
2bb63b2d02 | ||
![]() |
ac75c61c8c | ||
![]() |
f8f0915a32 | ||
![]() |
7b87511e88 | ||
![]() |
bb05c2218f | ||
![]() |
e96e8472d9 | ||
![]() |
3191c15889 | ||
![]() |
d4af7aa411 | ||
![]() |
7b3719101a | ||
![]() |
4def3bf5c2 | ||
![]() |
3daee46c3d | ||
![]() |
fbebd8d7c0 | ||
![]() |
af5cb35531 | ||
![]() |
61a2dca81f | ||
![]() |
4aa8e9b800 | ||
![]() |
b81fe1695d | ||
![]() |
3625e5080c | ||
![]() |
c21775980f | ||
![]() |
33e597f5bb | ||
![]() |
6fa2ca648a | ||
![]() |
adecf5d927 | ||
![]() |
e69d7d804b | ||
![]() |
a0eecb83cf | ||
![]() |
9955315a10 | ||
![]() |
ee7097b497 | ||
![]() |
387c23d27a | ||
![]() |
359593728e | ||
![]() |
9708832ccd | ||
![]() |
aa2ae8fe4c | ||
![]() |
729845662f | ||
![]() |
6ff28c92a4 | ||
![]() |
d19bf59f47 | ||
![]() |
a340b9c8a1 | ||
![]() |
d7939ca958 | ||
![]() |
00d67d53bf | ||
![]() |
b869ad02a1 | ||
![]() |
91d4941438 | ||
![]() |
5746e8b56d | ||
![]() |
8e83f90952 | ||
![]() |
80910c72cf | ||
![]() |
ca4ece3ccd | ||
![]() |
ac6c0484ed | ||
![]() |
1e4923835b | ||
![]() |
7be9ae9c02 | ||
![]() |
da38efebdf | ||
![]() |
0fd51e35e1 | ||
![]() |
59e0c1fe4e | ||
![]() |
cfe9528884 | ||
![]() |
1b45637e9c | ||
![]() |
76acf2b01d | ||
![]() |
eda2bd2dbd | ||
![]() |
6819decec3 | ||
![]() |
c2220aa1ef | ||
![]() |
0d87e529f3 | ||
![]() |
24ce1830eb | ||
![]() |
dfed4176ed | ||
![]() |
be8615741e | ||
![]() |
fd1f6aa960 | ||
![]() |
067a6107f5 | ||
![]() |
62782be08e | ||
![]() |
428fe4a372 | ||
![]() |
91e3302e54 | ||
![]() |
906d5d0bab | ||
![]() |
06c62abfbd | ||
![]() |
31e4a0a88b | ||
![]() |
cf82cb35c9 | ||
![]() |
53fff1d54a | ||
![]() |
60cf260b71 | ||
![]() |
b9d1499d04 | ||
![]() |
3fe68d7bbe | ||
![]() |
2eeb02638b | ||
![]() |
d8e02c6fa0 | ||
![]() |
3c8d7f2dee |
88
.github/workflows/ci.yml
vendored
88
.github/workflows/ci.yml
vendored
@@ -13,6 +13,14 @@ on:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
|
||||
env:
|
||||
# This is the version of pipenv all the steps will use
|
||||
# If changing this, change Dockerfile
|
||||
DEFAULT_PIP_ENV_VERSION: "2022.11.30"
|
||||
# This is the default version of Python to use in most steps
|
||||
# If changing this, change Dockerfile
|
||||
DEFAULT_PYTHON_VERSION: "3.9"
|
||||
|
||||
jobs:
|
||||
pre-commit:
|
||||
name: Linting Checks
|
||||
@@ -21,13 +29,11 @@ jobs:
|
||||
-
|
||||
name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
-
|
||||
name: Install tools
|
||||
name: Install python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
-
|
||||
name: Check files
|
||||
uses: pre-commit/action@v3.0.0
|
||||
@@ -41,29 +47,30 @@ jobs:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Install pipenv
|
||||
run: |
|
||||
pipx install pipenv==2022.11.30
|
||||
-
|
||||
name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.8
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
cache: "pipenv"
|
||||
cache-dependency-path: 'Pipfile.lock'
|
||||
-
|
||||
name: Install pipenv
|
||||
run: |
|
||||
pip install --user pipenv==${DEFAULT_PIP_ENV_VERSION}
|
||||
-
|
||||
name: Install dependencies
|
||||
run: |
|
||||
pipenv sync --dev
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
||||
-
|
||||
name: List installed Python dependencies
|
||||
run: |
|
||||
pipenv run pip list
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pip list
|
||||
-
|
||||
name: Make documentation
|
||||
run: |
|
||||
pipenv run mkdocs build --config-file ./mkdocs.yml
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run mkdocs build --config-file ./mkdocs.yml
|
||||
-
|
||||
name: Upload artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
@@ -121,17 +128,18 @@ jobs:
|
||||
run: |
|
||||
docker compose --file ${GITHUB_WORKSPACE}/docker/compose/docker-compose.ci-test.yml pull --quiet
|
||||
docker compose --file ${GITHUB_WORKSPACE}/docker/compose/docker-compose.ci-test.yml up --detach
|
||||
-
|
||||
name: Install pipenv
|
||||
run: |
|
||||
pipx install pipenv==2022.11.30
|
||||
-
|
||||
name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "${{ matrix.python-version }}"
|
||||
cache: "pipenv"
|
||||
cache-dependency-path: 'Pipfile.lock'
|
||||
-
|
||||
name: Install pipenv
|
||||
run: |
|
||||
pip install --user pipenv==${DEFAULT_PIP_ENV_VERSION}
|
||||
-
|
||||
name: Install system dependencies
|
||||
run: |
|
||||
@@ -140,20 +148,21 @@ jobs:
|
||||
-
|
||||
name: Install Python dependencies
|
||||
run: |
|
||||
pipenv sync --dev
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run python --version
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
||||
-
|
||||
name: List installed Python dependencies
|
||||
run: |
|
||||
pipenv run pip list
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pip list
|
||||
-
|
||||
name: Tests
|
||||
run: |
|
||||
cd src/
|
||||
pipenv run pytest -rfEp
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pytest -ra
|
||||
-
|
||||
name: Get changed files
|
||||
id: changed-files-specific
|
||||
uses: tj-actions/changed-files@v34
|
||||
uses: tj-actions/changed-files@v35
|
||||
with:
|
||||
files: |
|
||||
src/**
|
||||
@@ -165,13 +174,13 @@ jobs:
|
||||
done
|
||||
-
|
||||
name: Publish coverage results
|
||||
if: matrix.python-version == '3.9' && steps.changed-files-specific.outputs.any_changed == 'true'
|
||||
if: matrix.python-version == ${{ env.DEFAULT_PYTHON_VERSION }} && steps.changed-files-specific.outputs.any_changed == 'true'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# https://github.com/coveralls-clients/coveralls-python/issues/251
|
||||
run: |
|
||||
cd src/
|
||||
pipenv run coveralls --service=github
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run coveralls --service=github
|
||||
-
|
||||
name: Stop containers
|
||||
if: always()
|
||||
@@ -227,7 +236,7 @@ jobs:
|
||||
name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
-
|
||||
name: Setup qpdf image
|
||||
id: qpdf-setup
|
||||
@@ -389,22 +398,22 @@ jobs:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Install pipenv
|
||||
run: |
|
||||
pip3 install --upgrade pip setuptools wheel pipx
|
||||
pipx install pipenv
|
||||
-
|
||||
name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
cache: "pipenv"
|
||||
cache-dependency-path: 'Pipfile.lock'
|
||||
-
|
||||
name: Install pipenv + tools
|
||||
run: |
|
||||
pip install --upgrade --user pipenv==${DEFAULT_PIP_ENV_VERSION} setuptools wheel
|
||||
-
|
||||
name: Install Python dependencies
|
||||
run: |
|
||||
pipenv sync --dev
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
||||
-
|
||||
name: Install system dependencies
|
||||
run: |
|
||||
@@ -425,17 +434,17 @@ jobs:
|
||||
-
|
||||
name: Generate requirements file
|
||||
run: |
|
||||
pipenv requirements > requirements.txt
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} requirements > requirements.txt
|
||||
-
|
||||
name: Compile messages
|
||||
run: |
|
||||
cd src/
|
||||
pipenv run python3 manage.py compilemessages
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run python3 manage.py compilemessages
|
||||
-
|
||||
name: Collect static files
|
||||
run: |
|
||||
cd src/
|
||||
pipenv run python3 manage.py collectstatic --no-input
|
||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run python3 manage.py collectstatic --no-input
|
||||
-
|
||||
name: Move files
|
||||
run: |
|
||||
@@ -446,7 +455,7 @@ jobs:
|
||||
cp paperless.conf.example dist/paperless-ngx/paperless.conf
|
||||
cp gunicorn.conf.py dist/paperless-ngx/gunicorn.conf.py
|
||||
cp -r docker/ dist/paperless-ngx/docker
|
||||
cp scripts/*.service scripts/*.sh dist/paperless-ngx/scripts/
|
||||
cp scripts/*.service scripts/*.sh scripts/*.socket dist/paperless-ngx/scripts/
|
||||
cp -r src/ dist/paperless-ngx/src
|
||||
cp -r docs/_build/html/ dist/paperless-ngx/docs
|
||||
mv static dist/paperless-ngx
|
||||
@@ -522,18 +531,17 @@ jobs:
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: main
|
||||
-
|
||||
name: Install pipenv
|
||||
run: |
|
||||
pip3 install --upgrade pip setuptools wheel pipx
|
||||
pipx install pipenv
|
||||
-
|
||||
name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
cache: "pipenv"
|
||||
cache-dependency-path: 'Pipfile.lock'
|
||||
-
|
||||
name: Install pipenv + tools
|
||||
run: |
|
||||
pip install --upgrade --user pipenv==${DEFAULT_PIP_ENV_VERSION} setuptools wheel
|
||||
-
|
||||
name: Append Changelog to docs
|
||||
id: append-Changelog
|
||||
|
4
.github/workflows/installer-library.yml
vendored
4
.github/workflows/installer-library.yml
vendored
@@ -95,8 +95,8 @@ jobs:
|
||||
name: Setup other versions
|
||||
id: cache-bust-setup
|
||||
run: |
|
||||
pillow_version=$(jq ".default.pillow.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
||||
lxml_version=$(jq ".default.lxml.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
||||
pillow_version=$(jq -r '.default.pillow.version | gsub("=";"")' Pipfile.lock)
|
||||
lxml_version=$(jq -r '.default.lxml.version | gsub("=";"")' Pipfile.lock)
|
||||
|
||||
echo "Pillow is ${pillow_version}"
|
||||
echo "lxml is ${lxml_version}"
|
||||
|
31
.github/workflows/release-chart.yml
vendored
31
.github/workflows/release-chart.yml
vendored
@@ -1,31 +0,0 @@
|
||||
---
|
||||
name: Release Charts
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
release_chart:
|
||||
name: "Release Chart"
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config user.name "$GITHUB_ACTOR"
|
||||
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
- name: Run chart-releaser
|
||||
uses: helm/chart-releaser-action@v1.4.1
|
||||
env:
|
||||
CR_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
@@ -11,14 +11,13 @@ repos:
|
||||
- id: check-json
|
||||
exclude: "tsconfig.*json"
|
||||
- id: check-yaml
|
||||
exclude: "charts/paperless-ngx/templates/common.yaml"
|
||||
- id: check-toml
|
||||
- id: check-executables-have-shebangs
|
||||
- id: end-of-file-fixer
|
||||
exclude_types:
|
||||
- svg
|
||||
- pofile
|
||||
exclude: "^(LICENSE|charts/paperless-ngx/README.md)$"
|
||||
exclude: "(^LICENSE$)"
|
||||
- id: mixed-line-ending
|
||||
args:
|
||||
- "--fix=lf"
|
||||
@@ -35,7 +34,7 @@ repos:
|
||||
- javascript
|
||||
- ts
|
||||
- markdown
|
||||
exclude: "(^Pipfile\\.lock$)|(^charts/paperless-ngx/README.md$)"
|
||||
exclude: "(^Pipfile\\.lock$)"
|
||||
# Python hooks
|
||||
- repo: https://github.com/asottile/reorder_python_imports
|
||||
rev: v3.9.0
|
||||
|
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
||||
3.8.15
|
@@ -10,7 +10,7 @@ ARG PIKEPDF_VERSION
|
||||
ARG PSYCOPG2_VERSION
|
||||
|
||||
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/jbig2enc:${JBIG2ENC_VERSION} as jbig2enc-builder
|
||||
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/qpdf:${QPDF_VERSION} as qpdf-builder
|
||||
FROM --platform=$BUILDPLATFORM ghcr.io/paperless-ngx/paperless-ngx/builder/qpdf:${QPDF_VERSION} as qpdf-builder
|
||||
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/pikepdf:${PIKEPDF_VERSION} as pikepdf-builder
|
||||
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/psycopg2:${PSYCOPG2_VERSION} as psycopg2-builder
|
||||
|
||||
@@ -234,9 +234,9 @@ RUN set -eux \
|
||||
&& echo "Installing Python requirements" \
|
||||
&& python3 -m pip install --default-timeout=1000 --no-cache-dir --requirement requirements.txt \
|
||||
&& echo "Installing NLTK data" \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/local/share/nltk_data" snowball_data \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/local/share/nltk_data" stopwords \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/local/share/nltk_data" punkt \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" snowball_data \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" stopwords \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" punkt \
|
||||
&& echo "Cleaning up image" \
|
||||
&& apt-get -y purge ${BUILD_PACKAGES} \
|
||||
&& apt-get -y autoremove --purge \
|
||||
|
7
Pipfile
7
Pipfile
@@ -44,9 +44,6 @@ channels = "~=3.0"
|
||||
uvicorn = {extras = ["standard"], version = "*"}
|
||||
concurrent-log-handler = "*"
|
||||
"pdfminer.six" = "*"
|
||||
"backports.zoneinfo" = {version = "*", markers = "python_version < '3.9'"}
|
||||
"importlib-resources" = {version = "*", markers = "python_version < '3.9'"}
|
||||
zipp = {version = "*", markers = "python_version < '3.9'"}
|
||||
pyzbar = "*"
|
||||
mysqlclient = "*"
|
||||
celery = {extras = ["redis"], version = "*"}
|
||||
@@ -74,17 +71,13 @@ channels-redis = "==3.4.1"
|
||||
[dev-packages]
|
||||
coveralls = "*"
|
||||
factory-boy = "*"
|
||||
pycodestyle = "*"
|
||||
pytest = "*"
|
||||
pytest-cov = "*"
|
||||
pytest-django = "*"
|
||||
pytest-env = "*"
|
||||
pytest-sugar = "*"
|
||||
pytest-xdist = "*"
|
||||
tox = "*"
|
||||
black = "*"
|
||||
pre-commit = "*"
|
||||
sphinx-autobuild = "*"
|
||||
myst-parser = "*"
|
||||
imagehash = "*"
|
||||
mkdocs-material = "*"
|
||||
|
1005
Pipfile.lock
generated
1005
Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -108,15 +108,6 @@ Paperless has been around a while now, and people are starting to build stuff on
|
||||
- [Scan to Paperless](https://github.com/sbrunner/scan-to-paperless): Scan and prepare (crop, deskew, OCR, ...) your documents for Paperless.
|
||||
- [Paperless Mobile](https://github.com/astubenbord/paperless-mobile): A modern, feature rich mobile application for Paperless.
|
||||
|
||||
These projects also exist, but their status and compatibility with paperless-ngx is unknown.
|
||||
|
||||
- [paperless-cli](https://github.com/stgarf/paperless-cli): A golang command line binary to interact with a Paperless instance.
|
||||
|
||||
This project also exists, but needs updates to be compatible with paperless-ngx.
|
||||
|
||||
- [Paperless Desktop](https://github.com/thomasbrueggemann/paperless-desktop): A desktop UI for your Paperless installation. Runs on Mac, Linux, and Windows.
|
||||
Known issues on Mac: (Could not load reminders and documents)
|
||||
|
||||
# Important Note
|
||||
|
||||
Document scanners are typically used to scan sensitive documents. Things like your social insurance number, tax records, invoices, etc. Everything is stored in the clear without encryption. This means that Paperless should never be run on an untrusted host. Instead, I recommend that if you do want to use it, run it locally on a server in your own home.
|
||||
|
@@ -24,12 +24,12 @@ fi
|
||||
branch_name=$(git rev-parse --abbrev-ref HEAD)
|
||||
|
||||
# Parse eithe Pipfile.lock or the .build-config.json
|
||||
jbig2enc_version=$(jq ".jbig2enc.version" .build-config.json | sed 's/"//g')
|
||||
qpdf_version=$(jq ".qpdf.version" .build-config.json | sed 's/"//g')
|
||||
psycopg2_version=$(jq ".default.psycopg2.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
||||
pikepdf_version=$(jq ".default.pikepdf.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
||||
pillow_version=$(jq ".default.pillow.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
||||
lxml_version=$(jq ".default.lxml.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
||||
jbig2enc_version=$(jq -r '.jbig2enc.version' .build-config.json)
|
||||
qpdf_version=$(jq -r '.qpdf.version' .build-config.json)
|
||||
psycopg2_version=$(jq -r '.default.psycopg2.version | gsub("=";"")' Pipfile.lock)
|
||||
pikepdf_version=$(jq -r '.default.pikepdf.version | gsub("=";"")' Pipfile.lock)
|
||||
pillow_version=$(jq -r '.default.pillow.version | gsub("=";"")' Pipfile.lock)
|
||||
lxml_version=$(jq -r '.default.lxml.version | gsub("=";"")' Pipfile.lock)
|
||||
|
||||
base_filename="$(basename -- "${1}")"
|
||||
build_args_str=""
|
||||
|
@@ -1,26 +0,0 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
||||
# OWNERS file for Kubernetes
|
||||
OWNERS
|
||||
# helm-docs templates
|
||||
*.gotmpl
|
@@ -1,35 +0,0 @@
|
||||
---
|
||||
apiVersion: v2
|
||||
appVersion: "1.9.2"
|
||||
description: Paperless-ngx - Index and archive all of your scanned paper documents
|
||||
name: paperless
|
||||
version: 10.0.1
|
||||
kubeVersion: ">=1.16.0-0"
|
||||
keywords:
|
||||
- paperless
|
||||
- paperless-ngx
|
||||
- dms
|
||||
- document
|
||||
home: https://github.com/paperless-ngx/paperless-ngx/tree/main/charts/paperless-ngx
|
||||
icon: https://github.com/paperless-ngx/paperless-ngx/raw/main/resources/logo/web/svg/square.svg
|
||||
sources:
|
||||
- https://github.com/paperless-ngx/paperless-ngx
|
||||
maintainers:
|
||||
- name: Paperless-ngx maintainers
|
||||
dependencies:
|
||||
- name: common
|
||||
repository: https://library-charts.k8s-at-home.com
|
||||
version: 4.5.2
|
||||
- name: postgresql
|
||||
version: 11.6.12
|
||||
repository: https://charts.bitnami.com/bitnami
|
||||
condition: postgresql.enabled
|
||||
- name: redis
|
||||
version: 16.13.1
|
||||
repository: https://charts.bitnami.com/bitnami
|
||||
condition: redis.enabled
|
||||
deprecated: false
|
||||
annotations:
|
||||
artifacthub.io/changes: |
|
||||
- kind: changed
|
||||
description: Moved to Paperless-ngx ownership
|
@@ -1,201 +0,0 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2020 k8s@Home
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
@@ -1,50 +0,0 @@
|
||||
# paperless
|
||||
|
||||
 
|
||||
|
||||
Paperless-ngx - Index and archive all of your scanned paper documents
|
||||
|
||||
**Homepage:** <https://github.com/paperless-ngx/paperless-ngx/tree/main/charts/paperless-ngx>
|
||||
|
||||
## Maintainers
|
||||
|
||||
| Name | Email | Url |
|
||||
| ---- | ------ | --- |
|
||||
| Paperless-ngx maintainers | | |
|
||||
|
||||
## Source Code
|
||||
|
||||
* <https://github.com/paperless-ngx/paperless-ngx>
|
||||
|
||||
## Requirements
|
||||
|
||||
Kubernetes: `>=1.16.0-0`
|
||||
|
||||
| Repository | Name | Version |
|
||||
|------------|------|---------|
|
||||
| https://charts.bitnami.com/bitnami | postgresql | 11.6.12 |
|
||||
| https://charts.bitnami.com/bitnami | redis | 16.13.1 |
|
||||
| https://library-charts.k8s-at-home.com | common | 4.5.2 |
|
||||
|
||||
## Values
|
||||
|
||||
| Key | Type | Default | Description |
|
||||
|-----|------|---------|-------------|
|
||||
| env | object | See below | See the following files for additional environment variables: https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose/ https://github.com/paperless-ngx/paperless-ngx/blob/main/paperless.conf.example |
|
||||
| env.COMPOSE_PROJECT_NAME | string | `"paperless"` | Project name |
|
||||
| env.PAPERLESS_DBHOST | string | `nil` | Database host to use |
|
||||
| env.PAPERLESS_OCR_LANGUAGE | string | `"eng"` | OCR languages to install |
|
||||
| env.PAPERLESS_PORT | int | `8000` | Port to use |
|
||||
| env.PAPERLESS_REDIS | string | `nil` | Redis to use |
|
||||
| image.pullPolicy | string | `"IfNotPresent"` | image pull policy |
|
||||
| image.repository | string | `"ghcr.io/paperless-ngx/paperless-ngx"` | image repository |
|
||||
| image.tag | string | chart.appVersion | image tag |
|
||||
| ingress.main | object | See values.yaml | Enable and configure ingress settings for the chart under this key. |
|
||||
| persistence.consume | object | See values.yaml | Configure volume to monitor for new documents. |
|
||||
| persistence.data | object | See values.yaml | Configure persistence for data. |
|
||||
| persistence.export | object | See values.yaml | Configure export volume. |
|
||||
| persistence.media | object | See values.yaml | Configure persistence for media. |
|
||||
| postgresql | object | See values.yaml | Enable and configure postgresql database subchart under this key. For more options see [postgresql chart documentation](https://github.com/bitnami/charts/tree/master/bitnami/postgresql) |
|
||||
| redis | object | See values.yaml | Enable and configure redis subchart under this key. For more options see [redis chart documentation](https://github.com/bitnami/charts/tree/master/bitnami/redis) |
|
||||
| service | object | See values.yaml | Configures service settings for the chart. |
|
||||
|
@@ -1,8 +0,0 @@
|
||||
{{- define "custom.custom.configuration.header" -}}
|
||||
## Custom configuration
|
||||
{{- end -}}
|
||||
|
||||
{{- define "custom.custom.configuration" -}}
|
||||
{{ template "custom.custom.configuration.header" . }}
|
||||
N/A
|
||||
{{- end -}}
|
@@ -1,26 +0,0 @@
|
||||
env:
|
||||
PAPERLESS_REDIS: redis://paperless-redis-headless:6379
|
||||
|
||||
persistence:
|
||||
data:
|
||||
enabled: true
|
||||
type: emptyDir
|
||||
media:
|
||||
enabled: true
|
||||
type: emptyDir
|
||||
consume:
|
||||
enabled: true
|
||||
type: emptyDir
|
||||
export:
|
||||
enabled: true
|
||||
type: emptyDir
|
||||
|
||||
redis:
|
||||
enabled: true
|
||||
architecture: standalone
|
||||
auth:
|
||||
enabled: false
|
||||
master:
|
||||
persistence:
|
||||
enabled: false
|
||||
fullnameOverride: paperless-redis
|
@@ -1,4 +0,0 @@
|
||||
{{- include "common.notes.defaultNotes" . }}
|
||||
2. Create a super user by running the command:
|
||||
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "common.names.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
|
||||
kubectl exec -it --namespace {{ .Release.Namespace }} $POD_NAME -- bash -c "python manage.py createsuperuser"
|
@@ -1,11 +0,0 @@
|
||||
{{/* Make sure all variables are set properly */}}
|
||||
{{- include "common.values.setup" . }}
|
||||
|
||||
{{/* Append the hardcoded settings */}}
|
||||
{{- define "paperless.harcodedValues" -}}
|
||||
env:
|
||||
PAPERLESS_URL: http{{if ne ( len .Values.ingress.main.tls ) 0 }}s{{end}}://{{ (first .Values.ingress.main.hosts).host }}
|
||||
{{- end -}}
|
||||
{{- $_ := merge .Values (include "paperless.harcodedValues" . | fromYaml) -}}
|
||||
|
||||
{{ include "common.all" . }}
|
@@ -1,107 +0,0 @@
|
||||
#
|
||||
# IMPORTANT NOTE
|
||||
#
|
||||
# This chart inherits from our common library chart. You can check the default values/options here:
|
||||
# https://github.com/k8s-at-home/library-charts/tree/main/charts/stable/common/values.yaml
|
||||
#
|
||||
|
||||
image:
|
||||
# -- image repository
|
||||
repository: ghcr.io/paperless-ngx/paperless-ngx
|
||||
# -- image pull policy
|
||||
pullPolicy: IfNotPresent
|
||||
# -- image tag
|
||||
# @default -- chart.appVersion
|
||||
tag:
|
||||
|
||||
# -- See the following files for additional environment variables:
|
||||
# https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose/
|
||||
# https://github.com/paperless-ngx/paperless-ngx/blob/main/paperless.conf.example
|
||||
# @default -- See below
|
||||
env:
|
||||
# -- Project name
|
||||
COMPOSE_PROJECT_NAME: paperless
|
||||
# -- Redis to use
|
||||
PAPERLESS_REDIS:
|
||||
# -- OCR languages to install
|
||||
PAPERLESS_OCR_LANGUAGE: eng
|
||||
# USERMAP_UID: 1000
|
||||
# USERMAP_GID: 1000
|
||||
# PAPERLESS_TIME_ZONE: Europe/London
|
||||
# -- Database host to use
|
||||
PAPERLESS_DBHOST:
|
||||
# -- Port to use
|
||||
PAPERLESS_PORT: 8000
|
||||
# -- Username for the root user
|
||||
# PAPERLESS_ADMIN_USER: admin
|
||||
# -- Password for the root user
|
||||
# PAPERLESS_ADMIN_PASSWORD: admin
|
||||
# PAPERLESS_URL: <set to main ingress by default>
|
||||
|
||||
# -- Configures service settings for the chart.
|
||||
# @default -- See values.yaml
|
||||
service:
|
||||
main:
|
||||
ports:
|
||||
http:
|
||||
port: 8000
|
||||
|
||||
ingress:
|
||||
# -- Enable and configure ingress settings for the chart under this key.
|
||||
# @default -- See values.yaml
|
||||
main:
|
||||
enabled: false
|
||||
|
||||
persistence:
|
||||
# -- Configure persistence for data.
|
||||
# @default -- See values.yaml
|
||||
data:
|
||||
enabled: false
|
||||
mountPath: /usr/src/paperless/data
|
||||
accessMode: ReadWriteOnce
|
||||
emptyDir:
|
||||
enabled: false
|
||||
# -- Configure persistence for media.
|
||||
# @default -- See values.yaml
|
||||
media:
|
||||
enabled: false
|
||||
mountPath: /usr/src/paperless/media
|
||||
accessMode: ReadWriteOnce
|
||||
emptyDir:
|
||||
enabled: false
|
||||
# -- Configure volume to monitor for new documents.
|
||||
# @default -- See values.yaml
|
||||
consume:
|
||||
enabled: false
|
||||
mountPath: /usr/src/paperless/consume
|
||||
accessMode: ReadWriteOnce
|
||||
emptyDir:
|
||||
enabled: false
|
||||
# -- Configure export volume.
|
||||
# @default -- See values.yaml
|
||||
export:
|
||||
enabled: false
|
||||
mountPath: /usr/src/paperless/export
|
||||
accessMode: ReadWriteOnce
|
||||
emptyDir:
|
||||
enabled: false
|
||||
|
||||
# -- Enable and configure postgresql database subchart under this key.
|
||||
# For more options see [postgresql chart documentation](https://github.com/bitnami/charts/tree/master/bitnami/postgresql)
|
||||
# @default -- See values.yaml
|
||||
postgresql:
|
||||
enabled: false
|
||||
postgresqlUsername: paperless
|
||||
postgresqlPassword: paperless
|
||||
postgresqlDatabase: paperless
|
||||
persistence:
|
||||
enabled: false
|
||||
# storageClass: ""
|
||||
|
||||
# -- Enable and configure redis subchart under this key.
|
||||
# For more options see [redis chart documentation](https://github.com/bitnami/charts/tree/master/bitnami/redis)
|
||||
# @default -- See values.yaml
|
||||
redis:
|
||||
enabled: false
|
||||
auth:
|
||||
enabled: false
|
@@ -8,7 +8,7 @@
|
||||
ARG REPO="paperless-ngx/paperless-ngx"
|
||||
|
||||
ARG QPDF_VERSION
|
||||
FROM ghcr.io/${REPO}/builder/qpdf:${QPDF_VERSION} as qpdf-builder
|
||||
FROM --platform=$BUILDPLATFORM ghcr.io/${REPO}/builder/qpdf:${QPDF_VERSION} as qpdf-builder
|
||||
|
||||
# This does nothing, except provide a name for a copy below
|
||||
|
||||
|
@@ -13,8 +13,8 @@ for line in $(printenv)
|
||||
do
|
||||
# Extract the name of the environment variable
|
||||
env_name=${line%%=*}
|
||||
# Check if it ends in "_FILE"
|
||||
if [[ ${env_name} == *_FILE ]]; then
|
||||
# Check if it starts with "PAPERLESS_" and ends in "_FILE"
|
||||
if [[ ${env_name} == PAPERLESS_*_FILE ]]; then
|
||||
# Extract the value of the environment
|
||||
env_value=${line#*=}
|
||||
|
||||
@@ -32,8 +32,7 @@ do
|
||||
export "${non_file_env_name}"="${val}"
|
||||
|
||||
else
|
||||
echo "File ${env_value} doesn't exist"
|
||||
exit 1
|
||||
echo "File ${env_value} referenced by ${env_name} doesn't exist"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
@@ -152,9 +152,11 @@ following:
|
||||
|
||||
```shell-session
|
||||
$ cd src
|
||||
$ python3 manage.py migrate
|
||||
$ python3 manage.py migrate # (1)
|
||||
```
|
||||
|
||||
1. Including `sudo -Hu <paperless_user>` may be required
|
||||
|
||||
This might not actually do anything. Not every new paperless version
|
||||
comes with new database migrations.
|
||||
|
||||
@@ -210,9 +212,11 @@ Bare metal:
|
||||
|
||||
```shell-session
|
||||
$ cd /path/to/paperless/src
|
||||
$ python3 manage.py <command> <arguments>
|
||||
$ python3 manage.py <command> <arguments> # (1)
|
||||
```
|
||||
|
||||
1. Including `sudo -Hu <paperless_user>` may be required
|
||||
|
||||
All commands have built-in help, which can be accessed by executing them
|
||||
with the argument `--help`.
|
||||
|
||||
@@ -227,12 +231,16 @@ is not a TTY" errors. For example:
|
||||
`docker-compose exec -T webserver document_exporter ../export`
|
||||
|
||||
```
|
||||
document_exporter target [-c] [-f] [-d]
|
||||
document_exporter target [-c] [-d] [-f] [-na] [-nt] [-p] [-sm] [-z]
|
||||
|
||||
optional arguments:
|
||||
-c, --compare-checksums
|
||||
-f, --use-filename-format
|
||||
-d, --delete
|
||||
-f, --use-filename-format
|
||||
-na, --no-archive
|
||||
-nt, --no-thumbnail
|
||||
-p, --use-folder-prefix
|
||||
-sm, --split-manifest
|
||||
-z --zip
|
||||
```
|
||||
|
||||
@@ -249,23 +257,53 @@ will assume that the contents of the export directory are a previous
|
||||
export and will attempt to update the previous export. Paperless will
|
||||
only export changed and added files. Paperless determines whether a file
|
||||
has changed by inspecting the file attributes "date/time modified" and
|
||||
"size". If that does not work out for you, specify
|
||||
"size". If that does not work out for you, specify `-c` or
|
||||
`--compare-checksums` and paperless will attempt to compare file
|
||||
checksums instead. This is slower.
|
||||
|
||||
Paperless will not remove any existing files in the export directory. If
|
||||
you want paperless to also remove files that do not belong to the
|
||||
current export such as files from deleted documents, specify `--delete`.
|
||||
current export such as files from deleted documents, specify `-d` or `--delete`.
|
||||
Be careful when pointing paperless to a directory that already contains
|
||||
other files.
|
||||
|
||||
If `-z` or `--zip` is provided, the export will be a zipfile
|
||||
in the target directory, named according to the current date.
|
||||
|
||||
The filenames generated by this command follow the format
|
||||
`[date created] [correspondent] [title].[extension]`. If you want
|
||||
paperless to use `PAPERLESS_FILENAME_FORMAT` for exported filenames
|
||||
instead, specify `--use-filename-format`.
|
||||
instead, specify `-f` or `--use-filename-format`.
|
||||
|
||||
If `-na` or `--no-archive` is provided, no archive files will be exported,
|
||||
only the original files.
|
||||
|
||||
If `-nt` or `--no-thumbnail` is provided, thumbnail files will not be exported.
|
||||
|
||||
!!! note
|
||||
|
||||
When using the `-na`/`--no-archive` or `-nt`/`--no-thumbnail` options
|
||||
the exporter will not output these files for backup. After importing,
|
||||
the [sanity checker](#sanity-checker) will warn about missing thumbnails and archive files
|
||||
until they are regenerated with `document_thumbnails` or [`document_archiver`](#archiver).
|
||||
It can make sense to omit these files from backup as their content and checksum
|
||||
can change (new archiver algorithm) and may then cause additional used space in
|
||||
a deduplicated backup.
|
||||
|
||||
If `-p` or `--use-folder-prefix` is provided, files will be exported
|
||||
in dedicated folders according to their nature: `archive`, `originals`,
|
||||
`thumbnails` or `json`
|
||||
|
||||
If `-sm` or `--split-manifest` is provided, information about document
|
||||
will be placed in individual json files, instead of a single JSON file. The main
|
||||
manifest.json will still contain application wide information (e.g. tags, correspondent,
|
||||
documenttype, etc)
|
||||
|
||||
If `-z` or `--zip` is provided, the export will be a zipfile
|
||||
in the target directory, named according to the current date.
|
||||
|
||||
!!! warning
|
||||
|
||||
If exporting with the file name format, there may be errors due to
|
||||
your operating system's maximum path lengths. Try adjusting the export
|
||||
target or consider not using the filename format.
|
||||
|
||||
### Document importer {#importer}
|
||||
|
||||
@@ -347,6 +385,14 @@ document_create_classifier
|
||||
|
||||
This command takes no arguments.
|
||||
|
||||
### Document thumbnails {#thumbnails}
|
||||
|
||||
Use this command to re-create document thumbnails. Optionally include the ` --document {id}` option to generate thumbnails for a specific document only.
|
||||
|
||||
```
|
||||
document_thumbnails
|
||||
```
|
||||
|
||||
### Managing the document search index {#index}
|
||||
|
||||
The document search index is responsible for delivering search results
|
||||
|
@@ -336,6 +336,13 @@ value.
|
||||
However, keep in mind that inside docker, if files get stored outside of
|
||||
the predefined volumes, they will be lost after a restart of paperless.
|
||||
|
||||
!!! warning
|
||||
|
||||
When file naming handling, in particular when using `{tag_list}`,
|
||||
you may run into the limits of your operating system's maximum
|
||||
path lengths. Files will retain the previous path instead and
|
||||
the issue logged.
|
||||
|
||||
## Storage paths
|
||||
|
||||
One of the best things in Paperless is that you can not only access the
|
||||
@@ -392,7 +399,7 @@ structure as in the previous example above.
|
||||
If you adjust the format of an existing storage path, old documents
|
||||
don't get relocated automatically. You need to run the
|
||||
[document renamer](/administration#renamer) to
|
||||
adjust their pathes.
|
||||
adjust their paths.
|
||||
|
||||
## Celery Monitoring {#celery-monitoring}
|
||||
|
||||
|
@@ -6,7 +6,7 @@ provides a browsable API for most of its endpoints, which you can
|
||||
inspect at `http://<paperless-host>:<port>/api/`. This also documents
|
||||
most of the available filters and ordering fields.
|
||||
|
||||
The API provides 5 main endpoints:
|
||||
The API provides 7 main endpoints:
|
||||
|
||||
- `/api/documents/`: Full CRUD support, except POSTing new documents.
|
||||
See below.
|
||||
|
@@ -1,5 +1,28 @@
|
||||
# Changelog
|
||||
|
||||
## paperless-ngx 1.11.3
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
_Note: PR #2279 could represent a breaking change to the API which may affect third party applications that were only checking the `post_document` endpoint for e.g. result = 'OK' as opposed to e.g. HTTP status = 200_
|
||||
|
||||
- Bugfix: Return created task ID when posting document to API [@stumpylog](https://github.com/stumpylog) ([#2279](https://github.com/paperless-ngx/paperless-ngx/pull/2279))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Bugfix: Fix no content when processing some RTL files [@stumpylog](https://github.com/stumpylog) ([#2295](https://github.com/paperless-ngx/paperless-ngx/pull/2295))
|
||||
- Bugfix: Handle email dates maybe being naive [@stumpylog](https://github.com/stumpylog) ([#2293](https://github.com/paperless-ngx/paperless-ngx/pull/2293))
|
||||
- Fix: live filterable dropdowns broken in 1.11.x [@shamoon](https://github.com/shamoon) ([#2292](https://github.com/paperless-ngx/paperless-ngx/pull/2292))
|
||||
- Bugfix: Reading environment from files didn't work for management commands [@stumpylog](https://github.com/stumpylog) ([#2261](https://github.com/paperless-ngx/paperless-ngx/pull/2261))
|
||||
- Bugfix: Return created task ID when posting document to API [@stumpylog](https://github.com/stumpylog) ([#2279](https://github.com/paperless-ngx/paperless-ngx/pull/2279))
|
||||
|
||||
### All App Changes
|
||||
|
||||
- Bugfix: Fix no content when processing some RTL files [@stumpylog](https://github.com/stumpylog) ([#2295](https://github.com/paperless-ngx/paperless-ngx/pull/2295))
|
||||
- Bugfix: Handle email dates maybe being naive [@stumpylog](https://github.com/stumpylog) ([#2293](https://github.com/paperless-ngx/paperless-ngx/pull/2293))
|
||||
- Fix: live filterable dropdowns broken in 1.11.x [@shamoon](https://github.com/shamoon) ([#2292](https://github.com/paperless-ngx/paperless-ngx/pull/2292))
|
||||
- Bugfix: Return created task ID when posting document to API [@stumpylog](https://github.com/stumpylog) ([#2279](https://github.com/paperless-ngx/paperless-ngx/pull/2279))
|
||||
|
||||
## paperless-ngx 1.11.2
|
||||
|
||||
Versions 1.11.1 and 1.11.2 contain bug fixes from v1.11.0 that prevented use of the new email consumption feature
|
||||
|
@@ -179,7 +179,7 @@ Previously, the location defaulted to `PAPERLESS_DATA_DIR/nltk`.
|
||||
Unless you are using this in a bare metal install or other setup,
|
||||
this folder is no longer needed and can be removed manually.
|
||||
|
||||
Defaults to `/usr/local/share/nltk_data`
|
||||
Defaults to `/usr/share/nltk_data`
|
||||
|
||||
## Logging
|
||||
|
||||
@@ -217,6 +217,11 @@ not include a trailing slash. E.g. <https://paperless.domain.com>
|
||||
|
||||
Defaults to empty string, leaving the other settings unaffected.
|
||||
|
||||
!!! note
|
||||
|
||||
This value cannot contain a path (e.g. domain.com/path), even if
|
||||
you are installing paperless-ngx at a subpath.
|
||||
|
||||
`PAPERLESS_CSRF_TRUSTED_ORIGINS=<comma-separated-list>`
|
||||
|
||||
: A list of trusted origins for unsafe requests (e.g. POST). As of
|
||||
@@ -711,12 +716,47 @@ for details on how to set it.
|
||||
|
||||
: Enables or disables the advanced natural language processing
|
||||
used during automatic classification. If disabled, paperless will
|
||||
still preform some basic text pre-processing before matching.
|
||||
still perform some basic text pre-processing before matching.
|
||||
|
||||
See also `PAPERLESS_NLTK_DIR`.
|
||||
: See also `PAPERLESS_NLTK_DIR`.
|
||||
|
||||
Defaults to 1.
|
||||
|
||||
`PAPERLESS_EMAIL_TASK_CRON=<cron expression>`
|
||||
|
||||
: Configures the scheduled email fetching frequency. The value
|
||||
should be a valid crontab(5) expression describing when to run.
|
||||
|
||||
: If set to the string "disable", no emails will be fetched automatically.
|
||||
|
||||
Defaults to `*/10 * * * *` or every ten minutes.
|
||||
|
||||
`PAPERLESS_TRAIN_TASK_CRON=<cron expression>`
|
||||
|
||||
: Configures the scheduled automatic classifier training frequency. The value
|
||||
should be a valid crontab(5) expression describing when to run.
|
||||
|
||||
: If set to the string "disable", the classifier will not be trained automatically.
|
||||
|
||||
Defaults to `5 */1 * * *` or every hour at 5 minutes past the hour.
|
||||
|
||||
`PAPERLESS_INDEX_TASK_CRON=<cron expression>`
|
||||
|
||||
: Configures the scheduled search index update frequency. The value
|
||||
should be a valid crontab(5) expression describing when to run.
|
||||
|
||||
: If set to the string "disable", the search index will not be automatically updated.
|
||||
|
||||
Defaults to `0 0 * * *` or daily at midnight.
|
||||
|
||||
`PAPERLESS_SANITY_TASK_CRON=<cron expression>`
|
||||
|
||||
: Configures the scheduled sanity checker frequency.
|
||||
|
||||
: If set to the string "disable", the sanity checker will not run automatically.
|
||||
|
||||
Defaults to `30 0 * * sun` or Sunday at 30 minutes past midnight.
|
||||
|
||||
## Polling {#polling}
|
||||
|
||||
`PAPERLESS_CONSUMER_POLLING=<num>`
|
||||
@@ -813,7 +853,7 @@ PAPERLESS_CONSUMER_ENABLE_BARCODES has been enabled.
|
||||
|
||||
Defaults to false.
|
||||
|
||||
PAPERLESS_CONSUMER_BARCODE_STRING=PATCHT
|
||||
`PAPERLESS_CONSUMER_BARCODE_STRING=PATCHT`
|
||||
|
||||
: Defines the string to be detected as a separator barcode. If
|
||||
paperless is used with the PATCH-T separator pages, users shouldn't
|
||||
@@ -821,6 +861,31 @@ change this.
|
||||
|
||||
Defaults to "PATCHT"
|
||||
|
||||
`PAPERLESS_CONSUMER_ENABLE_ASN_BARCODE=<bool>`
|
||||
|
||||
: Enables the detection of barcodes in the scanned document and
|
||||
setting the ASN (archive serial number) if a properly formatted
|
||||
barcode is detected.
|
||||
|
||||
The barcode must consist of a (configurable) prefix and the ASN
|
||||
to be set, for instance `ASN00123`.
|
||||
|
||||
This option is compatible with barcode page separation, since
|
||||
pages will be split up before reading the ASN.
|
||||
|
||||
If no ASN barcodes are detected in the uploaded file, no ASN will
|
||||
be set. If a barcode with an already existing ASN is detected, no ASN
|
||||
will be set either and a warning will be logged.
|
||||
|
||||
Defaults to false.
|
||||
|
||||
`PAPERLESS_CONSUMER_ASN_BARCODE_PREFIX=ASN`
|
||||
|
||||
: Defines the prefix that is used to identify a barcode as an ASN
|
||||
barcode.
|
||||
|
||||
Defaults to "ASN"
|
||||
|
||||
`PAPERLESS_CONVERT_MEMORY_LIMIT=<num>`
|
||||
|
||||
: On smaller systems, or even in the case of Very Large Documents, the
|
||||
|
@@ -248,7 +248,7 @@ Testing and code style:
|
||||
In order to build the front end and serve it as part of django, execute
|
||||
|
||||
```shell-session
|
||||
$ ng build --prod
|
||||
$ ng build --configuration production
|
||||
```
|
||||
|
||||
This will build the front end and put it in a location from which the
|
||||
|
101
docs/setup.md
101
docs/setup.md
@@ -46,7 +46,7 @@ steps described in [Docker setup](#docker_hub) automatically.
|
||||
page](https://github.com/paperless-ngx/paperless-ngx/tree/master/docker/compose)
|
||||
and download one of the `docker-compose.*.yml` files,
|
||||
depending on which database backend you want to use. Rename this
|
||||
file to `docker-compose.*.yml`. If you want to enable
|
||||
file to `docker-compose.yml`. If you want to enable
|
||||
optional support for Office documents, download a file with
|
||||
`-tika` in the file name. Download the
|
||||
`docker-compose.env` file and the `.env` file as well and store them
|
||||
@@ -64,8 +64,7 @@ steps described in [Docker setup](#docker_hub) automatically.
|
||||
|
||||
If you want to use the included `docker-compose.*.yml` file, you
|
||||
need to have at least Docker version **17.09.0** and docker-compose
|
||||
version **1.17.0**. To check do: `docker-compose -v` or
|
||||
`docker -v`
|
||||
version **1.17.0**. To check do: `docker-compose -v` or `docker -v`
|
||||
|
||||
See the [Docker installation guide](https://docs.docker.com/engine/install/) on how to install the current
|
||||
version of Docker for your operating system or Linux distribution of
|
||||
@@ -144,21 +143,13 @@ steps described in [Docker setup](#docker_hub) automatically.
|
||||
!!! note
|
||||
|
||||
You can copy any setting from the file `paperless.conf.example` and
|
||||
paste it here. Have a look at [configuration](/configuration] to see what's available.
|
||||
paste it here. Have a look at [configuration](/configuration) to see what's available.
|
||||
|
||||
!!! note
|
||||
|
||||
You can utilize Docker secrets for some configuration settings by
|
||||
appending `_FILE` to some configuration values. This is
|
||||
supported currently only by:
|
||||
|
||||
- PAPERLESS_DBUSER
|
||||
- PAPERLESS_DBPASS
|
||||
- PAPERLESS_SECRET_KEY
|
||||
- PAPERLESS_AUTO_LOGIN_USERNAME
|
||||
- PAPERLESS_ADMIN_USER
|
||||
- PAPERLESS_ADMIN_MAIL
|
||||
- PAPERLESS_ADMIN_PASSWORD
|
||||
You can utilize Docker secrets for configuration settings by
|
||||
appending `_FILE` to configuration values. For example `PAPERLESS_DBUSER`
|
||||
can be set using `PAPERLESS_DBUSER_FILE=/var/run/secrets/password.txt`.
|
||||
|
||||
!!! warning
|
||||
|
||||
@@ -314,14 +305,34 @@ supported.
|
||||
extension](https://code.djangoproject.com/wiki/JSON1Extension) is
|
||||
enabled. This is usually the case, but not always.
|
||||
|
||||
4. Get the release archive from
|
||||
<https://github.com/paperless-ngx/paperless-ngx/releases>. Extract the
|
||||
archive to a place from where you wish to execute it, such as
|
||||
`/opt/paperless`. If you clone the git repo as it is, you also have to
|
||||
4. Create a system user with a new home folder under which you wish
|
||||
to run paperless.
|
||||
|
||||
```shell-session
|
||||
adduser paperless --system --home /opt/paperless --group
|
||||
```
|
||||
|
||||
5. Get the release archive from
|
||||
<https://github.com/paperless-ngx/paperless-ngx/releases> for example with
|
||||
|
||||
```shell-session
|
||||
curl -O -L https://github.com/paperless-ngx/paperless-ngx/releases/download/v1.10.2/paperless-ngx-v1.10.2.tar.xz
|
||||
```
|
||||
|
||||
Extract the archive with
|
||||
|
||||
```shell-session
|
||||
tar -xf paperless-ngx-v1.10.2.tar.xz
|
||||
```
|
||||
|
||||
and copy the contents to the
|
||||
home folder of the user you created before (`/opt/paperless`).
|
||||
|
||||
Optional: If you cloned the git repo, you will have to
|
||||
compile the frontend yourself, see [here](/development#front-end-development)
|
||||
and use the `build` step, not `serve`.
|
||||
|
||||
5. Configure paperless. See [configuration](/configuration) for details.
|
||||
6. Configure paperless. See [configuration](/configuration) for details.
|
||||
Edit the included `paperless.conf` and adjust the settings to your
|
||||
needs. Required settings for getting
|
||||
paperless running are:
|
||||
@@ -354,20 +365,27 @@ supported.
|
||||
documents are written in.
|
||||
- Set `PAPERLESS_TIME_ZONE` to your local time zone.
|
||||
|
||||
6. Create a system user under which you wish to run paperless.
|
||||
|
||||
```shell-session
|
||||
adduser paperless --system --home /opt/paperless --group
|
||||
```
|
||||
|
||||
7. Ensure that these directories exist and that the paperless user has
|
||||
write permissions to the following directories:
|
||||
7. Create the following directories if they are missing:
|
||||
|
||||
- `/opt/paperless/media`
|
||||
- `/opt/paperless/data`
|
||||
- `/opt/paperless/consume`
|
||||
|
||||
Adjust as necessary if you configured different folders.
|
||||
Ensure that the paperless user has write permissions for every one
|
||||
of these folders with
|
||||
|
||||
```shell-session
|
||||
ls -l -d /opt/paperless/media
|
||||
```
|
||||
|
||||
If needed, change the owner with
|
||||
|
||||
```shell-session
|
||||
sudo chown paperless:paperless /opt/paperless/media
|
||||
sudo chown paperless:paperless /opt/paperless/data
|
||||
sudo chown paperless:paperless /opt/paperless/consume
|
||||
```
|
||||
|
||||
8. Install python requirements from the `requirements.txt` file. It is
|
||||
up to you if you wish to use a virtual environment or not. First you
|
||||
@@ -397,11 +415,15 @@ supported.
|
||||
10. Optional: Test that paperless is working by executing
|
||||
|
||||
```bash
|
||||
# This collects static files from paperless and django.
|
||||
# Manually starts the webserver
|
||||
sudo -Hu paperless python3 manage.py runserver
|
||||
```
|
||||
|
||||
and pointing your browser to <http://localhost:8000/>.
|
||||
and pointing your browser to http://localhost:8000 if
|
||||
accessing from the same devices on which paperless is installed.
|
||||
If accessing from another machine, set up systemd services. You may need
|
||||
to set `PAPERLESS_DEBUG=true` in order for the development server to work
|
||||
normally in your browser.
|
||||
|
||||
!!! warning
|
||||
|
||||
@@ -452,6 +474,14 @@ supported.
|
||||
For instructions on how to use nginx for that,
|
||||
[see the instructions below](/setup#nginx).
|
||||
|
||||
!!! warning
|
||||
|
||||
If celery won't start (check with
|
||||
`sudo systemctl status paperless-task-queue.service` for
|
||||
paperless-task-queue.service and paperless-scheduler.service
|
||||
) you need to change the path in the files. Example:
|
||||
`ExecStart=/opt/paperless/.local/bin/celery --app paperless worker --loglevel INFO`
|
||||
|
||||
12. Optional: Install a samba server and make the consumption folder
|
||||
available as a network share.
|
||||
|
||||
@@ -746,7 +776,9 @@ with a few simple steps.
|
||||
|
||||
Paperless-ngx modified the database schema slightly, however, these
|
||||
changes can be reverted while keeping your current data, so that your
|
||||
current data will be compatible with original Paperless.
|
||||
current data will be compatible with original Paperless. Thumbnails
|
||||
were also changed from PNG to WEBP format and will need to be
|
||||
re-generated.
|
||||
|
||||
Execute this:
|
||||
|
||||
@@ -762,9 +794,9 @@ $ cd /path/to/paperless/src
|
||||
$ python3 manage.py migrate documents 0023
|
||||
```
|
||||
|
||||
After that, you need to clear your cookies (Paperless-ngx comes with
|
||||
updated dependencies that do cookie-processing differently) and probably
|
||||
your cache as well.
|
||||
After regenerating thumbnails, you'll need to clear your cookies
|
||||
(Paperless-ngx comes with updated dependencies that do cookie-processing
|
||||
differently) and probably your cache as well.
|
||||
|
||||
# Considerations for less powerful devices {#less-powerful-devices}
|
||||
|
||||
@@ -843,6 +875,7 @@ http {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Host $server_name;
|
||||
add_header P3P 'CP=""'; # may not be required in all setups
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -108,6 +108,8 @@ Furthermore, there is the [Paperless
|
||||
App](https://github.com/bauerj/paperless_app) as well, which not only
|
||||
has document upload, but also document browsing and download features.
|
||||
|
||||
Another option is [Paperless Mobile](https://github.com/astubenbord/paperless-mobile), an Android app that supports document upload, scanning, management of labels and more.
|
||||
|
||||
### IMAP (Email) {#usage-email}
|
||||
|
||||
You can tell paperless-ngx to consume documents from your email
|
||||
@@ -151,6 +153,8 @@ different means. These are as follows:
|
||||
will not consume mails already tagged. Not all mail servers support
|
||||
this feature!
|
||||
|
||||
- **Apple Mail support:** Apple Mail clients allow differently colored tags. For this to work use `apple:<color>` (e.g. _apple:green_) as a custom tag. Available colors are _red_, _orange_, _yellow_, _blue_, _green_, _violet_ and _grey_.
|
||||
|
||||
!!! warning
|
||||
|
||||
The mail consumer will perform these actions on all mails it has
|
||||
@@ -191,7 +195,7 @@ different means. These are as follows:
|
||||
them further.
|
||||
|
||||
Paperless is set up to check your mails every 10 minutes. This can be
|
||||
configured on the 'Scheduled tasks' page in the admin.
|
||||
configured via `PAPERLESS_EMAIL_TASK_CRON` (see [software tweaks](/configuration#software_tweaks))
|
||||
|
||||
### REST API
|
||||
|
||||
@@ -359,6 +363,14 @@ documents in your inbox:
|
||||
sorted by ASN. Don't order this binder in any other way.
|
||||
5. If the document has no ASN, throw it away. Yay!
|
||||
|
||||
!!! tip
|
||||
|
||||
Instead of writing a number on the document by hand, you may also prepare
|
||||
a spool of labels with barcodes with an ascending serial number, that are
|
||||
formatted like `ASN00001`.
|
||||
This also enables Paperless to automatically parse and process the ASN
|
||||
(if enabled in the config), so that you don't need to manually assign it.
|
||||
|
||||
Over time, you will notice that your physical binder will fill up. If it
|
||||
is full, label the binder with the range of ASNs in this binder (i.e.,
|
||||
"Documents 1 to 343"), store the binder in your cellar or elsewhere,
|
||||
|
@@ -1,18 +0,0 @@
|
||||
# This file is used by the build system to adjust CSS and JS output to support the specified browsers below.
|
||||
# For additional information regarding the format and rule options, please see:
|
||||
# https://github.com/browserslist/browserslist#queries
|
||||
|
||||
# For the full list of supported browsers by the Angular framework, please see:
|
||||
# https://angular.io/guide/browser-support
|
||||
|
||||
# You can see what browsers were selected by your queries by running:
|
||||
# npx browserslist
|
||||
|
||||
last 1 Chrome version
|
||||
last 1 Firefox version
|
||||
last 2 Edge major versions
|
||||
last 2 Safari major versions
|
||||
last 2 iOS major versions
|
||||
Firefox ESR
|
||||
not IE 9-10 # Angular support for IE 9-10 has been deprecated and will be removed as of Angular v11. To opt-in, remove the 'not' prefix on this line.
|
||||
not IE 11 # Angular supports IE 11 only as an opt-in. To opt-in, remove the 'not' prefix on this line.
|
@@ -12,7 +12,7 @@ Run `ng generate component component-name` to generate a new component. You can
|
||||
|
||||
## Build
|
||||
|
||||
Run `ng build` to build the project. The build artifacts will be stored in the `dist/` directory. Use the `--prod` flag for a production build.
|
||||
Run `ng build` to build the project. The build artifacts will be stored in the `dist/` directory. Use the `--configuration production` flag for a production build.
|
||||
|
||||
## Running unit tests
|
||||
|
||||
|
@@ -16,6 +16,7 @@
|
||||
"i18n": {
|
||||
"sourceLocale": "en-US",
|
||||
"locales": {
|
||||
"ar-AR": "src/locale/messages.ar_AR.xlf",
|
||||
"be-BY": "src/locale/messages.be_BY.xlf",
|
||||
"cs-CZ": "src/locale/messages.cs_CZ.xlf",
|
||||
"da-DK": "src/locale/messages.da_DK.xlf",
|
||||
@@ -192,5 +193,13 @@
|
||||
"schematicCollections": [
|
||||
"@angular-eslint/schematics"
|
||||
]
|
||||
},
|
||||
"schematics": {
|
||||
"@angular-eslint/schematics:application": {
|
||||
"setParserOptionsProject": true
|
||||
},
|
||||
"@angular-eslint/schematics:library": {
|
||||
"setParserOptionsProject": true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -44,7 +44,7 @@ describe('document-detail', () => {
|
||||
})
|
||||
|
||||
cy.viewport(1024, 1024)
|
||||
cy.visit('/documents/1/')
|
||||
cy.visit('/documents/1/').wait('@ui-settings')
|
||||
})
|
||||
|
||||
it('should activate / deactivate save button when changes are saved', () => {
|
||||
@@ -66,8 +66,21 @@ describe('document-detail', () => {
|
||||
cy.contains('You have unsaved changes').should('not.exist')
|
||||
})
|
||||
|
||||
it('should show a mobile preview', () => {
|
||||
cy.viewport(440, 1000)
|
||||
cy.get('a')
|
||||
.contains('Preview')
|
||||
.scrollIntoView({ offset: { top: 150, left: 0 } })
|
||||
.click()
|
||||
cy.get('pdf-viewer').should('be.visible')
|
||||
})
|
||||
|
||||
it('should show a list of comments', () => {
|
||||
cy.wait(1000).get('a').contains('Comments').click().wait(1000)
|
||||
cy.wait(1000)
|
||||
.get('a')
|
||||
.contains('Comments')
|
||||
.click({ force: true })
|
||||
.wait(1000)
|
||||
cy.get('app-document-comments').find('.card').its('length').should('eq', 3)
|
||||
})
|
||||
|
||||
|
@@ -52,6 +52,10 @@ describe('documents-list', () => {
|
||||
|
||||
req.reply(response)
|
||||
})
|
||||
|
||||
cy.intercept('http://localhost:8000/api/documents/selection_data/', {
|
||||
fixture: 'documents/selection_data.json',
|
||||
}).as('selection-data')
|
||||
})
|
||||
|
||||
cy.viewport(1280, 1024)
|
||||
@@ -76,6 +80,28 @@ describe('documents-list', () => {
|
||||
cy.get('app-document-card-large')
|
||||
})
|
||||
|
||||
it('should show partial tag selection', () => {
|
||||
cy.get('app-document-card-small:nth-child(1)').click()
|
||||
cy.get('app-document-card-small:nth-child(4)').click()
|
||||
cy.get('app-bulk-editor button')
|
||||
.contains('Tags')
|
||||
.click()
|
||||
.wait('@selection-data')
|
||||
cy.get('svg.bi-dash').should('be.visible')
|
||||
cy.get('svg.bi-check').should('be.visible')
|
||||
})
|
||||
|
||||
it('should allow bulk removal', () => {
|
||||
cy.get('app-document-card-small:nth-child(1)').click()
|
||||
cy.get('app-document-card-small:nth-child(4)').click()
|
||||
cy.get('app-bulk-editor').within(() => {
|
||||
cy.get('button').contains('Tags').click().wait('@selection-data')
|
||||
cy.get('button').contains('Another Sample Tag').click()
|
||||
cy.get('button').contains('Apply').click()
|
||||
})
|
||||
cy.contains('operation will remove the tag')
|
||||
})
|
||||
|
||||
it('should filter tags', () => {
|
||||
cy.get('app-filter-editor app-filterable-dropdown[title="Tags"]').within(
|
||||
() => {
|
||||
|
@@ -35,16 +35,58 @@ describe('settings', () => {
|
||||
req.reply(response)
|
||||
}
|
||||
).as('savedViews')
|
||||
})
|
||||
|
||||
cy.intercept('http://localhost:8000/api/mail_accounts/*', {
|
||||
fixture: 'mail_accounts/mail_accounts.json',
|
||||
})
|
||||
cy.intercept('http://localhost:8000/api/mail_rules/*', {
|
||||
fixture: 'mail_rules/mail_rules.json',
|
||||
}).as('mailRules')
|
||||
cy.intercept('http://localhost:8000/api/tasks/', {
|
||||
fixture: 'tasks/tasks.json',
|
||||
})
|
||||
this.newMailAccounts = []
|
||||
|
||||
cy.intercept(
|
||||
'POST',
|
||||
'http://localhost:8000/api/mail_accounts/',
|
||||
(req) => {
|
||||
const newRule = req.body
|
||||
newRule.id = 3
|
||||
this.newMailAccounts.push(newRule) // store this for later
|
||||
req.reply({ result: 'OK' })
|
||||
}
|
||||
).as('saveAccount')
|
||||
|
||||
cy.fixture('mail_accounts/mail_accounts.json').then(
|
||||
(mailAccountsJson) => {
|
||||
cy.intercept(
|
||||
'GET',
|
||||
'http://localhost:8000/api/mail_accounts/*',
|
||||
(req) => {
|
||||
console.log(req, this.newMailAccounts)
|
||||
|
||||
let response = { ...mailAccountsJson }
|
||||
if (this.newMailAccounts.length) {
|
||||
response.results = response.results.concat(this.newMailAccounts)
|
||||
}
|
||||
|
||||
req.reply(response)
|
||||
}
|
||||
).as('getAccounts')
|
||||
}
|
||||
)
|
||||
|
||||
this.newMailRules = []
|
||||
|
||||
cy.intercept('POST', 'http://localhost:8000/api/mail_rules/', (req) => {
|
||||
const newRule = req.body
|
||||
newRule.id = 2
|
||||
this.newMailRules.push(newRule) // store this for later
|
||||
req.reply({ result: 'OK' })
|
||||
}).as('saveRule')
|
||||
|
||||
cy.fixture('mail_rules/mail_rules.json').then((mailRulesJson) => {
|
||||
cy.intercept('GET', 'http://localhost:8000/api/mail_rules/*', (req) => {
|
||||
let response = { ...mailRulesJson }
|
||||
if (this.newMailRules.length) {
|
||||
response.results = response.results.concat(this.newMailRules)
|
||||
}
|
||||
|
||||
req.reply(response)
|
||||
}).as('getRules')
|
||||
})
|
||||
|
||||
cy.fixture('documents/documents.json').then((documentsJson) => {
|
||||
@@ -99,4 +141,42 @@ describe('settings', () => {
|
||||
cy.visit('/dashboard')
|
||||
cy.get('app-saved-view-widget').contains('Inbox').should('not.exist')
|
||||
})
|
||||
|
||||
it('should show a list of mail accounts & rules & support creation', () => {
|
||||
cy.contains('a', 'Mail').click()
|
||||
cy.get('app-settings .tab-content ul li').its('length').should('eq', 5) // 2 headers, 2 accounts, 1 rule
|
||||
cy.contains('button', 'Add Account').click()
|
||||
cy.contains('Create new mail account')
|
||||
cy.get('app-input-text[formcontrolname="name"]').type(
|
||||
'Example Mail Account'
|
||||
)
|
||||
cy.get('app-input-text[formcontrolname="imap_server"]').type(
|
||||
'mail.example.com'
|
||||
)
|
||||
cy.get('app-input-text[formcontrolname="imap_port"]').type('993')
|
||||
cy.get('app-input-text[formcontrolname="username"]').type('username')
|
||||
cy.get('app-input-password[formcontrolname="password"]').type('pass')
|
||||
cy.contains('app-mail-account-edit-dialog button', 'Save')
|
||||
.click()
|
||||
.wait('@saveAccount')
|
||||
.wait('@getAccounts')
|
||||
cy.contains('Saved account')
|
||||
|
||||
cy.wait(1000)
|
||||
cy.contains('button', 'Add Rule').click()
|
||||
cy.contains('Create new mail rule')
|
||||
cy.get('app-input-text[formcontrolname="name"]').type('Example Rule')
|
||||
cy.get('app-input-select[formcontrolname="account"]').type('Example{enter}')
|
||||
cy.get('app-input-number[formcontrolname="maximum_age"]').type('30')
|
||||
cy.get('app-input-text[formcontrolname="filter_subject"]').type(
|
||||
'[paperless]'
|
||||
)
|
||||
cy.contains('app-mail-rule-edit-dialog button', 'Save')
|
||||
.click()
|
||||
.wait('@saveRule')
|
||||
.wait('@getRules')
|
||||
cy.contains('Saved rule').wait(1000)
|
||||
|
||||
cy.get('app-settings .tab-content ul li').its('length').should('eq', 7)
|
||||
})
|
||||
})
|
||||
|
293
src-ui/cypress/fixtures/documents/selection_data.json
Normal file
293
src-ui/cypress/fixtures/documents/selection_data.json
Normal file
@@ -0,0 +1,293 @@
|
||||
{
|
||||
"selected_correspondents": [
|
||||
{
|
||||
"id": 62,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 75,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 55,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 56,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 73,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 58,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 44,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 42,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 74,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 54,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 29,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 71,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 68,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 82,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 34,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 41,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 51,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 46,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 40,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 43,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 80,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 70,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 52,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 67,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 53,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 32,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 63,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 35,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 45,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 38,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 79,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 48,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 72,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 78,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 39,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 57,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 61,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 81,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 77,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 69,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 36,
|
||||
"document_count": 3
|
||||
},
|
||||
{
|
||||
"id": 31,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 30,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 50,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 49,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 60,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 47,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 66,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 37,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 28,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 59,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 33,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 76,
|
||||
"document_count": 0
|
||||
}
|
||||
],
|
||||
"selected_tags": [
|
||||
{
|
||||
"id": 4,
|
||||
"document_count": 2
|
||||
},
|
||||
{
|
||||
"id": 7,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"document_count": 1
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"document_count": 1
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 8,
|
||||
"document_count": 0
|
||||
}
|
||||
],
|
||||
"selected_document_types": [
|
||||
{
|
||||
"id": 4,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 10,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 11,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 9,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 7,
|
||||
"document_count": 2
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"document_count": 0
|
||||
},
|
||||
{
|
||||
"id": 8,
|
||||
"document_count": 1
|
||||
}
|
||||
],
|
||||
"selected_storage_paths": []
|
||||
}
|
@@ -23,7 +23,8 @@
|
||||
"assign_correspondent": 2,
|
||||
"assign_document_type": null,
|
||||
"order": 0,
|
||||
"attachment_type": 2
|
||||
"attachment_type": 2,
|
||||
"consumption_scope": 1
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@@ -3,7 +3,7 @@
|
||||
beforeEach(() => {
|
||||
cy.intercept('http://localhost:8000/api/ui_settings/', {
|
||||
fixture: 'ui_settings/settings.json',
|
||||
})
|
||||
}).as('ui-settings')
|
||||
|
||||
cy.intercept('http://localhost:8000/api/remote_version/', {
|
||||
fixture: 'remote_version/remote_version.json',
|
||||
@@ -29,6 +29,10 @@ beforeEach(() => {
|
||||
fixture: 'storage_paths/storage_paths.json',
|
||||
})
|
||||
|
||||
cy.intercept('http://localhost:8000/api/tasks/', {
|
||||
fixture: 'tasks/tasks.json',
|
||||
})
|
||||
|
||||
cy.intercept('http://localhost:8000/api/documents/1/metadata/', {
|
||||
fixture: 'documents/1/metadata.json',
|
||||
})
|
||||
|
9692
src-ui/package-lock.json
generated
9692
src-ui/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -13,46 +13,46 @@
|
||||
},
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@angular/common": "~14.2.8",
|
||||
"@angular/compiler": "~14.2.8",
|
||||
"@angular/core": "~14.2.8",
|
||||
"@angular/forms": "~14.2.8",
|
||||
"@angular/localize": "~14.2.8",
|
||||
"@angular/platform-browser": "~14.2.8",
|
||||
"@angular/platform-browser-dynamic": "~14.2.8",
|
||||
"@angular/router": "~14.2.8",
|
||||
"@ng-bootstrap/ng-bootstrap": "^13.0.0",
|
||||
"@ng-select/ng-select": "^9.0.2",
|
||||
"@angular/common": "~15.1.0",
|
||||
"@angular/compiler": "~15.1.0",
|
||||
"@angular/core": "~15.1.0",
|
||||
"@angular/forms": "~15.1.0",
|
||||
"@angular/localize": "~15.1.0",
|
||||
"@angular/platform-browser": "~15.1.0",
|
||||
"@angular/platform-browser-dynamic": "~15.1.0",
|
||||
"@angular/router": "~15.1.0",
|
||||
"@ng-bootstrap/ng-bootstrap": "^14.0.1",
|
||||
"@ng-select/ng-select": "^10.0.1",
|
||||
"@ngneat/dirty-check-forms": "^3.0.3",
|
||||
"@popperjs/core": "^2.11.6",
|
||||
"bootstrap": "^5.2.1",
|
||||
"bootstrap": "^5.2.3",
|
||||
"file-saver": "^2.0.5",
|
||||
"ng2-pdf-viewer": "^9.1.2",
|
||||
"ngx-color": "^8.0.3",
|
||||
"ngx-cookie-service": "^14.0.1",
|
||||
"ngx-cookie-service": "^15.0.0",
|
||||
"ngx-file-drop": "^14.0.2",
|
||||
"ngx-ui-tour-ng-bootstrap": "^11.1.0",
|
||||
"rxjs": "~7.5.7",
|
||||
"ngx-ui-tour-ng-bootstrap": "^12.0.0",
|
||||
"rxjs": "^7.8.0",
|
||||
"tslib": "^2.4.1",
|
||||
"uuid": "^9.0.0",
|
||||
"zone.js": "~0.11.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@angular-builders/jest": "14.1.0",
|
||||
"@angular-devkit/build-angular": "~14.2.7",
|
||||
"@angular-eslint/builder": "14.4.0",
|
||||
"@angular-eslint/eslint-plugin": "14.4.0",
|
||||
"@angular-eslint/eslint-plugin-template": "14.4.0",
|
||||
"@angular-eslint/schematics": "14.4.0",
|
||||
"@angular-eslint/template-parser": "14.4.0",
|
||||
"@angular/cli": "~14.2.7",
|
||||
"@angular/compiler-cli": "~14.2.8",
|
||||
"@angular-builders/jest": "15.0.0",
|
||||
"@angular-devkit/build-angular": "~15.1.0",
|
||||
"@angular-eslint/builder": "15.1.0",
|
||||
"@angular-eslint/eslint-plugin": "15.1.0",
|
||||
"@angular-eslint/eslint-plugin-template": "15.1.0",
|
||||
"@angular-eslint/schematics": "15.1.0",
|
||||
"@angular-eslint/template-parser": "15.1.0",
|
||||
"@angular/cli": "~15.1.0",
|
||||
"@angular/compiler-cli": "~15.1.0",
|
||||
"@types/jest": "28.1.6",
|
||||
"@types/node": "^18.7.23",
|
||||
"@typescript-eslint/eslint-plugin": "5.43.0",
|
||||
"@typescript-eslint/parser": "5.43.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.43.0",
|
||||
"@typescript-eslint/parser": "^5.43.0",
|
||||
"concurrently": "7.4.0",
|
||||
"eslint": "^8.28.0",
|
||||
"eslint": "^8.31.0",
|
||||
"jest": "28.1.3",
|
||||
"jest-environment-jsdom": "^29.2.2",
|
||||
"jest-preset-angular": "^12.2.3",
|
||||
|
@@ -61,7 +61,7 @@ const routes: Routes = [
|
||||
]
|
||||
|
||||
@NgModule({
|
||||
imports: [RouterModule.forRoot(routes, { relativeLinkResolution: 'legacy' })],
|
||||
imports: [RouterModule.forRoot(routes)],
|
||||
exports: [RouterModule],
|
||||
})
|
||||
export class AppRoutingModule {}
|
||||
|
@@ -80,6 +80,7 @@ import { TourNgBootstrapModule } from 'ngx-ui-tour-ng-bootstrap'
|
||||
import { MailAccountEditDialogComponent } from './components/common/edit-dialog/mail-account-edit-dialog/mail-account-edit-dialog.component'
|
||||
import { MailRuleEditDialogComponent } from './components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component'
|
||||
|
||||
import localeAr from '@angular/common/locales/ar'
|
||||
import localeBe from '@angular/common/locales/be'
|
||||
import localeCs from '@angular/common/locales/cs'
|
||||
import localeDa from '@angular/common/locales/da'
|
||||
@@ -100,6 +101,7 @@ import localeSv from '@angular/common/locales/sv'
|
||||
import localeTr from '@angular/common/locales/tr'
|
||||
import localeZh from '@angular/common/locales/zh'
|
||||
|
||||
registerLocaleData(localeAr)
|
||||
registerLocaleData(localeBe)
|
||||
registerLocaleData(localeCs)
|
||||
registerLocaleData(localeDa)
|
||||
@@ -198,7 +200,7 @@ function initializeApp(settings: SettingsService) {
|
||||
PdfViewerModule,
|
||||
NgSelectModule,
|
||||
ColorSliderModule,
|
||||
TourNgBootstrapModule.forRoot(),
|
||||
TourNgBootstrapModule,
|
||||
],
|
||||
providers: [
|
||||
{
|
||||
|
@@ -13,6 +13,7 @@
|
||||
<app-input-number i18n-title title="Maximum age (days)" formControlName="maximum_age" [showAdd]="false" [error]="error?.maximum_age"></app-input-number>
|
||||
<app-input-select i18n-title title="Attachment type" [items]="attachmentTypeOptions" formControlName="attachment_type"></app-input-select>
|
||||
<app-input-select i18n-title title="Consumption scope" [items]="consumptionScopeOptions" formControlName="consumption_scope" i18n-hint hint="See docs for .eml processing requirements"></app-input-select>
|
||||
<app-input-number i18n-title title="Rule order" formControlName="order" [showAdd]="false" [error]="error?.order"></app-input-number>
|
||||
</div>
|
||||
<div class="col">
|
||||
<p class="small" i18n>Paperless will only process mails that match <em>all</em> of the filters specified below.</p>
|
||||
|
@@ -153,6 +153,7 @@ export class MailRuleEditDialogComponent extends EditDialogComponent<PaperlessMa
|
||||
maximum_age: new FormControl(null),
|
||||
attachment_type: new FormControl(MailFilterAttachmentType.Attachments),
|
||||
consumption_scope: new FormControl(MailRuleConsumptionScope.Attachments),
|
||||
order: new FormControl(null),
|
||||
action: new FormControl(MailAction.MarkRead),
|
||||
action_parameter: new FormControl(null),
|
||||
assign_title_from: new FormControl(MailMetadataTitleOption.FromSubject),
|
||||
|
@@ -91,7 +91,7 @@
|
||||
<a ngbNavLink i18n>Content</a>
|
||||
<ng-template ngbNavContent>
|
||||
<div class="mb-3">
|
||||
<textarea class="form-control" id="content" rows="20" formControlName='content'></textarea>
|
||||
<textarea class="form-control" id="content" rows="20" formControlName='content' [class.rtl]="isRTL"></textarea>
|
||||
</div>
|
||||
</ng-template>
|
||||
</li>
|
||||
@@ -149,7 +149,7 @@
|
||||
|
||||
<li [ngbNavItem]="4" class="d-md-none">
|
||||
<a ngbNavLink>Preview</a>
|
||||
<ng-template ngbNavContent *ngIf="pdfPreview.offsetParent === undefined">
|
||||
<ng-template ngbNavContent *ngIf="!pdfPreview.offsetParent">
|
||||
<div class="position-relative">
|
||||
<ng-container *ngIf="getContentType() === 'application/pdf'">
|
||||
<div class="preview-sticky pdf-viewer-container" *ngIf="!useNativePdfViewer ; else nativePdfViewer">
|
||||
@@ -180,9 +180,9 @@
|
||||
|
||||
<div [ngbNavOutlet]="nav" class="mt-2"></div>
|
||||
|
||||
<button type="button" class="btn btn-outline-secondary" (click)="discard()" i18n [disabled]="networkActive || (isDirty$ | async) === false">Discard</button>
|
||||
<button type="button" class="btn btn-outline-primary" (click)="saveEditNext()" *ngIf="hasNext()" i18n [disabled]="networkActive || (isDirty$ | async) === false || error">Save & next</button>
|
||||
<button type="submit" class="btn btn-primary" i18n [disabled]="networkActive || (isDirty$ | async) === false || error">Save</button>
|
||||
<button type="button" class="btn btn-outline-secondary" (click)="discard()" i18n [disabled]="networkActive || (isDirty$ | async) !== true">Discard</button>
|
||||
<button type="button" class="btn btn-outline-primary" (click)="saveEditNext()" *ngIf="hasNext()" i18n [disabled]="networkActive || (isDirty$ | async) !== true || error">Save & next</button>
|
||||
<button type="submit" class="btn btn-primary" i18n [disabled]="networkActive || (isDirty$ | async) !== true || error">Save</button>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
|
@@ -28,3 +28,7 @@
|
||||
left: 30%;
|
||||
right: 30%;
|
||||
}
|
||||
|
||||
textarea.rtl {
|
||||
direction: rtl;
|
||||
}
|
||||
|
@@ -135,6 +135,13 @@ export class DocumentDetailComponent
|
||||
: this.metadata?.original_mime_type
|
||||
}
|
||||
|
||||
get isRTL() {
|
||||
if (!this.metadata || !this.metadata.lang) return false
|
||||
else {
|
||||
return ['ar', 'he', 'fe'].includes(this.metadata.lang)
|
||||
}
|
||||
}
|
||||
|
||||
ngOnInit(): void {
|
||||
this.documentForm.valueChanges
|
||||
.pipe(takeUntil(this.unsubscribeNotifier))
|
||||
|
@@ -25,7 +25,13 @@
|
||||
</h5>
|
||||
</div>
|
||||
<p class="card-text">
|
||||
<span *ngIf="document.__search_hit__" [innerHtml]="document.__search_hit__.highlights"></span>
|
||||
<span *ngIf="document.__search_hit__ && document.__search_hit__.highlights" [innerHtml]="document.__search_hit__.highlights"></span>
|
||||
<span *ngIf="document.__search_hit__ && document.__search_hit__.comment_highlights">
|
||||
<svg width="1em" height="1em" fill="currentColor" class="me-2">
|
||||
<use xlink:href="assets/bootstrap-icons.svg#chat-left-text"/>
|
||||
</svg>
|
||||
<span [innerHtml]="document.__search_hit__.comment_highlights"></span>
|
||||
</span>
|
||||
<span *ngIf="!document.__search_hit__" class="result-content">{{contentTrimmed}}</span>
|
||||
</p>
|
||||
|
||||
|
@@ -10,4 +10,6 @@ export interface PaperlessDocumentMetadata {
|
||||
original_filename?: string
|
||||
|
||||
has_archive_version?: boolean
|
||||
|
||||
lang?: string
|
||||
}
|
||||
|
@@ -10,6 +10,7 @@ export interface SearchHit {
|
||||
rank?: number
|
||||
|
||||
highlights?: string
|
||||
comment_highlights?: string
|
||||
}
|
||||
|
||||
export interface PaperlessDocument extends ObjectWithId {
|
||||
|
@@ -36,6 +36,8 @@ export interface PaperlessMailRule extends ObjectWithId {
|
||||
|
||||
account: number // PaperlessMailAccount.id
|
||||
|
||||
order: number
|
||||
|
||||
folder: string
|
||||
|
||||
filter_from: string
|
||||
|
@@ -13,6 +13,7 @@ export enum FileStatusPhase {
|
||||
|
||||
export const FILE_STATUS_MESSAGES = {
|
||||
document_already_exists: $localize`Document already exists.`,
|
||||
asn_already_exists: $localize`Document with ASN already exists.`,
|
||||
file_not_found: $localize`File not found.`,
|
||||
pre_consume_script_not_found: $localize`:Pre-Consume is a term that appears like that in the documentation as well and does not need a specific translation:Pre-consume script does not exist.`,
|
||||
pre_consume_script_error: $localize`:Pre-Consume is a term that appears like that in the documentation as well and does not need a specific translation:Error while executing pre-consume script.`,
|
||||
|
@@ -146,6 +146,12 @@ export class SettingsService {
|
||||
englishName: 'English (US)',
|
||||
dateInputFormat: 'mm/dd/yyyy',
|
||||
},
|
||||
{
|
||||
code: 'ar-ar',
|
||||
name: $localize`Arabic`,
|
||||
englishName: 'Arabic',
|
||||
dateInputFormat: 'yyyy-mm-dd',
|
||||
},
|
||||
{
|
||||
code: 'be-by',
|
||||
name: $localize`Belarusian`,
|
||||
|
@@ -5,7 +5,7 @@ export const environment = {
|
||||
apiBaseUrl: document.baseURI + 'api/',
|
||||
apiVersion: '2',
|
||||
appTitle: 'Paperless-ngx',
|
||||
version: '1.11.3',
|
||||
version: '1.12.0',
|
||||
webSocketHost: window.location.host,
|
||||
webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:',
|
||||
webSocketBaseUrl: base_url.pathname + 'ws/',
|
||||
|
@@ -1,5 +1,5 @@
|
||||
// This file can be replaced during build by using the `fileReplacements` array.
|
||||
// `ng build --prod` replaces `environment.ts` with `environment.prod.ts`.
|
||||
// `ng build --configuration production` replaces `environment.ts` with `environment.prod.ts`.
|
||||
// The list of file replacements can be found in `angular.json`.
|
||||
|
||||
export const environment = {
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1326,7 +1326,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">15</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Consumption scope</target>
|
||||
<target state="translated">Umfang der Verarbeitung</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="56643687972548912" datatype="html">
|
||||
<source>See docs for .eml processing requirements</source>
|
||||
@@ -1334,7 +1334,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">15</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">See docs for .eml processing requirements</target>
|
||||
<target state="translated">Für die Voraussetzungen zur Verarbeitung von E-Mails als .eml siehe Dokumentation</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="5488632521862493221" datatype="html">
|
||||
<source>Paperless will only process mails that match <x id="START_EMPHASISED_TEXT" ctype="x-em" equiv-text="<em>"/>all<x id="CLOSE_EMPHASISED_TEXT" ctype="x-em" equiv-text="</em>"/> of the filters specified below.</source>
|
||||
@@ -1462,7 +1462,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">36</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Only process attachments</target>
|
||||
<target state="translated">Nur Anhänge verarbeiten</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="936923743212522897" datatype="html">
|
||||
<source>Process all files, including 'inline' attachments</source>
|
||||
@@ -1470,7 +1470,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">29</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Process all files, including 'inline' attachments</target>
|
||||
<target state="translated">Alle Dateien verarbeiten, auch Anhänge im Textkörper</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="9025522236384167767" datatype="html">
|
||||
<source>Process message as .eml</source>
|
||||
@@ -1478,7 +1478,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">40</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Process message as .eml</target>
|
||||
<target state="translated">E-mail als .eml verarbeiten</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="7411485377918318115" datatype="html">
|
||||
<source>Process message as .eml and attachments separately</source>
|
||||
@@ -1486,7 +1486,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">44</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Process message as .eml and attachments separately</target>
|
||||
<target state="translated">E-mail als .eml und Anhänge separat verarbeiten</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="7022070615528435141" datatype="html" approved="yes">
|
||||
<source>Delete</source>
|
||||
|
@@ -227,7 +227,7 @@
|
||||
<context context-type="sourcefile">node_modules/src/timepicker/timepicker.ts</context>
|
||||
<context context-type="linenumber">429</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">SS</target>
|
||||
<target state="translated">SS</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="ngb.timepicker.seconds" datatype="html">
|
||||
<source>Seconds</source>
|
||||
@@ -345,7 +345,7 @@
|
||||
<context context-type="sourcefile">src/app/app.component.ts</context>
|
||||
<context context-type="linenumber">119</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Prev</target>
|
||||
<target state="translated">Anterior</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="3885497195825665706" datatype="html">
|
||||
<source>Next</source>
|
||||
@@ -365,7 +365,7 @@
|
||||
<context context-type="sourcefile">src/app/app.component.ts</context>
|
||||
<context context-type="linenumber">121</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">End</target>
|
||||
<target state="translated">Fin</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="3909462337752654810" datatype="html">
|
||||
<source>The dashboard can be used to show saved views, such as an 'Inbox'. Those settings are found under Settings > Saved Views once you have created some.</source>
|
||||
@@ -381,7 +381,7 @@
|
||||
<context context-type="sourcefile">src/app/app.component.ts</context>
|
||||
<context context-type="linenumber">136</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Drag-and-drop documents here to start uploading or place them in the consume folder. You can also drag-and-drop documents anywhere on all other pages of the web app. Once you do, Paperless-ngx will start training its machine learning algorithms.</target>
|
||||
<target state="translated">Arrastra los documentos aquí para subirlos o colócalos en la carpeta de consumo. También puedes arrastrar los documentos en cualquier parte del resto de páginas de la aplicación. Una vez lo hagas, Paperless-ngx comenzará a entrenar los algoritmos de machine learning.</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="7495498057594070122" datatype="html">
|
||||
<source>The documents list shows all of your documents and allows for filtering as well as bulk-editing. There are three different view styles: list, small cards and large cards. A list of documents currently opened for editing is shown in the sidebar.</source>
|
||||
@@ -389,7 +389,7 @@
|
||||
<context context-type="sourcefile">src/app/app.component.ts</context>
|
||||
<context context-type="linenumber">145</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">The documents list shows all of your documents and allows for filtering as well as bulk-editing. There are three different view styles: list, small cards and large cards. A list of documents currently opened for editing is shown in the sidebar.</target>
|
||||
<target state="translated">La lista de documentos muestra todos tus documentos y te permite filtrar y editar en masa. Hay disponibles tres vistas diferentes: lista, tarjetas pequeñas y tarjetas grandes. La lista de los documentos que se encuentran abiertos en un momento dado se muestra en la barra lateral.</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="1334220418719920556" datatype="html">
|
||||
<source>The filtering tools allow you to quickly find documents using various searches, dates, tags, etc.</source>
|
||||
@@ -437,7 +437,7 @@
|
||||
<context context-type="sourcefile">src/app/app.component.ts</context>
|
||||
<context context-type="linenumber">203</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Thank you! 🙏</target>
|
||||
<target state="translated">¡Gracias! 🙏</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="7354947513482088740" datatype="html">
|
||||
<source>There are <em>tons</em> more features and info we didn't cover here, but this should get you started. Check out the documentation or visit the project on GitHub to learn more or to report issues.</source>
|
||||
@@ -453,7 +453,7 @@
|
||||
<context context-type="sourcefile">src/app/app.component.ts</context>
|
||||
<context context-type="linenumber">207</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Lastly, on behalf of every contributor to this community-supported project, thank you for using Paperless-ngx!</target>
|
||||
<target state="translated">Por último, en nombre de todos los colaboradores de este proyecto apoyado por la comunidad, ¡gracias por utilizar Paperless-ngx!</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="5749300816154614125" datatype="html">
|
||||
<source>Initiating upload...</source>
|
||||
@@ -770,7 +770,7 @@
|
||||
<context context-type="sourcefile">src/app/components/app-frame/app-frame.component.html</context>
|
||||
<context context-type="linenumber">214</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Paperless-ngx can automatically check for updates</target>
|
||||
<target state="translated">Paperless-ngx puede comprobar automáticamente si hay actualizaciones</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="894819944961861800" datatype="html">
|
||||
<source> How does this work? </source>
|
||||
@@ -778,7 +778,7 @@
|
||||
<context context-type="sourcefile">src/app/components/app-frame/app-frame.component.html</context>
|
||||
<context context-type="linenumber">221,223</context>
|
||||
</context-group>
|
||||
<target state="needs-translation"> How does this work? </target>
|
||||
<target state="translated"> ¿Cómo funciona? </target>
|
||||
</trans-unit>
|
||||
<trans-unit id="509090351011426949" datatype="html">
|
||||
<source>Update available</source>
|
||||
@@ -806,7 +806,7 @@
|
||||
<context context-type="sourcefile">src/app/components/app-frame/app-frame.component.ts</context>
|
||||
<context context-type="linenumber">216</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">An error occurred while saving update checking settings.</target>
|
||||
<target state="translated">Se produjo un error al guardar la configuración de comprobación de actualizaciones.</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="8700121026680200191" datatype="html" approved="yes">
|
||||
<source>Clear</source>
|
||||
@@ -1194,7 +1194,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-account-edit-dialog/mail-account-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">11</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">IMAP Server</target>
|
||||
<target state="translated">Servidor IMAP</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="6575044156016560168" datatype="html">
|
||||
<source>IMAP Port</source>
|
||||
@@ -1202,7 +1202,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-account-edit-dialog/mail-account-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">12</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">IMAP Port</target>
|
||||
<target state="translated">Puerto IMAP</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="5418425343712813426" datatype="html">
|
||||
<source>IMAP Security</source>
|
||||
@@ -1210,7 +1210,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-account-edit-dialog/mail-account-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">13</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">IMAP Security</target>
|
||||
<target state="translated">Seguridad IMAP</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="5248717555542428023" datatype="html">
|
||||
<source>Username</source>
|
||||
@@ -1218,7 +1218,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-account-edit-dialog/mail-account-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">16</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Username</target>
|
||||
<target state="translated">Usuario</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="1431416938026210429" datatype="html">
|
||||
<source>Password</source>
|
||||
@@ -1226,7 +1226,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-account-edit-dialog/mail-account-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">17</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Password</target>
|
||||
<target state="translated">Contraseña</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="6124167940736826613" datatype="html">
|
||||
<source>Character Set</source>
|
||||
@@ -1234,7 +1234,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-account-edit-dialog/mail-account-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">18</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Character Set</target>
|
||||
<target state="translated">Conjunto de caracteres</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="451418349275958054" datatype="html">
|
||||
<source>No encryption</source>
|
||||
@@ -1242,7 +1242,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-account-edit-dialog/mail-account-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">12</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">No encryption</target>
|
||||
<target state="translated">Sin cifrado</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="3719080555538542367" datatype="html">
|
||||
<source>SSL</source>
|
||||
@@ -1250,7 +1250,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-account-edit-dialog/mail-account-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">13</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">SSL</target>
|
||||
<target state="translated">SSL</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="2620794666957669114" datatype="html">
|
||||
<source>STARTTLS</source>
|
||||
@@ -1258,7 +1258,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-account-edit-dialog/mail-account-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">14</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">STARTTLS</target>
|
||||
<target state="translated">STARTTLS</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="8758081884575368561" datatype="html">
|
||||
<source>Create new mail account</source>
|
||||
@@ -1266,7 +1266,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-account-edit-dialog/mail-account-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">28</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Create new mail account</target>
|
||||
<target state="translated">Crear una nueva cuenta de correo</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="5559445021532852612" datatype="html">
|
||||
<source>Edit mail account</source>
|
||||
@@ -1274,7 +1274,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-account-edit-dialog/mail-account-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">32</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Edit mail account</target>
|
||||
<target state="translated">Editar cuenta de correo</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="4086606389696938932" datatype="html">
|
||||
<source>Account</source>
|
||||
@@ -1286,7 +1286,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/settings/settings.component.html</context>
|
||||
<context context-type="linenumber">284</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Account</target>
|
||||
<target state="translated">Cuenta</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="7046259383943324039" datatype="html">
|
||||
<source>Folder</source>
|
||||
@@ -1294,7 +1294,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">12</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Folder</target>
|
||||
<target state="translated">Carpeta</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="1391527525114848695" datatype="html">
|
||||
<source>Subfolders must be separated by a delimiter, often a dot ('.') or slash ('/'), but it varies by mail server.</source>
|
||||
@@ -1302,7 +1302,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">12</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Subfolders must be separated by a delimiter, often a dot ('.') or slash ('/'), but it varies by mail server.</target>
|
||||
<target state="translated">Las subcarpetas deben estar separadas por un delimitador, típicamente un punto ('.') o una barra ('/'), aunque varía entre servidores de correo.</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="101686279614365671" datatype="html">
|
||||
<source>Maximum age (days)</source>
|
||||
@@ -1318,7 +1318,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">14</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Attachment type</target>
|
||||
<target state="translated">Tipo de archivo adjunto</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="559099472394646919" datatype="html">
|
||||
<source>Consumption scope</source>
|
||||
@@ -1334,7 +1334,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">15</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">See docs for .eml processing requirements</target>
|
||||
<target state="translated">Vea la documentación para los requerimientos de procesado para .eml</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="5488632521862493221" datatype="html">
|
||||
<source>Paperless will only process mails that match <x id="START_EMPHASISED_TEXT" ctype="x-em" equiv-text="<em>"/>all<x id="CLOSE_EMPHASISED_TEXT" ctype="x-em" equiv-text="</em>"/> of the filters specified below.</source>
|
||||
@@ -1390,7 +1390,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">25</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Action</target>
|
||||
<target state="translated">Acción</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="4274038999388817994" datatype="html">
|
||||
<source>Action is only performed when documents are consumed from the mail. Mails without attachments remain entirely untouched.</source>
|
||||
@@ -1398,7 +1398,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">25</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Action is only performed when documents are consumed from the mail. Mails without attachments remain entirely untouched.</target>
|
||||
<target state="translated">La acción solo es ejecutada cuando se consumen documentos desde el correo. Los correos sin adjuntos permanecen intactos.</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="1261794314435932203" datatype="html">
|
||||
<source>Action parameter</source>
|
||||
@@ -1430,7 +1430,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">30</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Assign correspondent from</target>
|
||||
<target state="translated">Asignar interlocutor desde</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="4875491778188965469" datatype="html">
|
||||
<source>Assign correspondent</source>
|
||||
@@ -1438,7 +1438,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">31</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Assign correspondent</target>
|
||||
<target state="translated">Asignar interlocutor</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="1519954996184640001" datatype="html">
|
||||
<source>Error</source>
|
||||
@@ -1462,7 +1462,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">36</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Only process attachments</target>
|
||||
<target state="translated">Solo procesar ficheros adjuntos</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="936923743212522897" datatype="html">
|
||||
<source>Process all files, including 'inline' attachments</source>
|
||||
@@ -1470,7 +1470,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">29</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Process all files, including 'inline' attachments</target>
|
||||
<target state="translated">Procesar todos los archivos, incluyendo los incrustados en el cuerpo del mensaje</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="9025522236384167767" datatype="html">
|
||||
<source>Process message as .eml</source>
|
||||
@@ -1478,7 +1478,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">40</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Process message as .eml</target>
|
||||
<target state="translated">Procesar mensaje como .eml</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="7411485377918318115" datatype="html">
|
||||
<source>Process message as .eml and attachments separately</source>
|
||||
@@ -1486,7 +1486,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">44</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Process message as .eml and attachments separately</target>
|
||||
<target state="translated">Procesar mensaje como .eml y los adjuntos por separado</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="7022070615528435141" datatype="html" approved="yes">
|
||||
<source>Delete</source>
|
||||
@@ -1558,7 +1558,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">55</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Move to specified folder</target>
|
||||
<target state="translated">Mover a la carpeta especificada</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="4593278936733161020" datatype="html">
|
||||
<source>Mark as read, don't process read mails</source>
|
||||
@@ -1590,7 +1590,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">74</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Use subject as title</target>
|
||||
<target state="translated">Utilizar asunto como título</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="8645471396972938185" datatype="html">
|
||||
<source>Use attachment filename as title</source>
|
||||
@@ -1598,7 +1598,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">78</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Use attachment filename as title</target>
|
||||
<target state="translated">Usar nombre de archivo adjunto como título</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="1568902914205618549" datatype="html">
|
||||
<source>Do not assign a correspondent</source>
|
||||
@@ -1606,7 +1606,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">85</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Do not assign a correspondent</target>
|
||||
<target state="translated">No asignar un interlocutor</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="3567746385454588269" datatype="html">
|
||||
<source>Use mail address</source>
|
||||
@@ -1614,7 +1614,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">89</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Use mail address</target>
|
||||
<target state="translated">Usar dirección de correo</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="445154175758965852" datatype="html">
|
||||
<source>Use name (or mail address if not available)</source>
|
||||
@@ -1622,7 +1622,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">93</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Use name (or mail address if not available)</target>
|
||||
<target state="translated">Usar nombre (o dirección de correo si no está disponible)</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="1258862217749148424" datatype="html">
|
||||
<source>Use correspondent selected below</source>
|
||||
@@ -1630,7 +1630,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">97</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Use correspondent selected below</target>
|
||||
<target state="translated">Usar el interlocutor seleccionado a continuación</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="3147349817770432927" datatype="html">
|
||||
<source>Create new mail rule</source>
|
||||
@@ -1638,7 +1638,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">137</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Create new mail rule</target>
|
||||
<target state="translated">Crear nueva regla de correo</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="3374331029704382439" datatype="html">
|
||||
<source>Edit mail rule</source>
|
||||
@@ -1646,7 +1646,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">141</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Edit mail rule</target>
|
||||
<target state="translated">Editar regla de correo</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="6036319582202941456" datatype="html">
|
||||
<source><x id="START_EMPHASISED_TEXT" ctype="x-em" equiv-text="<em>"/>Note that editing a path does not apply changes to stored files until you have run the 'document_renamer' utility. See the <x id="START_LINK" ctype="x-a" equiv-text="<a target="_blank" href="https://docs.paperless-ngx.com/administration/#renamer">"/>documentation<x id="CLOSE_LINK" ctype="x-a" equiv-text="</a>"/>.<x id="CLOSE_EMPHASISED_TEXT" ctype="x-em" equiv-text="</em>"/></source>
|
||||
@@ -1908,7 +1908,7 @@
|
||||
<context context-type="sourcefile">src/app/components/dashboard/dashboard.component.ts</context>
|
||||
<context context-type="linenumber">20</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Welcome to Paperless-ngx</target>
|
||||
<target state="translated">Bienvenido a Paperless-ngx</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="2946624699882754313" datatype="html" approved="yes">
|
||||
<source>Show all</source>
|
||||
@@ -2079,7 +2079,7 @@
|
||||
<context context-type="sourcefile">src/app/components/dashboard/widgets/welcome-widget/welcome-widget.component.html</context>
|
||||
<context context-type="linenumber">3</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Paperless-ngx is running!</target>
|
||||
<target state="translated">¡Paperless-ngx está corriendo!</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="3326049540711826572" datatype="html">
|
||||
<source>You're ready to start uploading documents! Explore the various features of this web app on your own, or start a quick tour using the button below.</source>
|
||||
@@ -2095,7 +2095,7 @@
|
||||
<context context-type="sourcefile">src/app/components/dashboard/widgets/welcome-widget/welcome-widget.component.html</context>
|
||||
<context context-type="linenumber">5</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">More detail on how to use and configure Paperless-ngx is always available in the <x id="START_LINK" ctype="x-a" equiv-text="<a href="https://docs.paperless-ngx.com" target="_blank">"/>documentation<x id="CLOSE_LINK" ctype="x-a" equiv-text="</a>"/>.</target>
|
||||
<target state="translated">Encontrarás más información sobre cómo utilizar y configurar Paperless-ngx en la <x id="START_LINK" ctype="x-a" equiv-text="<a href="https://docs.paperless-ngx.com" target="_blank">"/>documentación<x id="CLOSE_LINK" ctype="x-a" equiv-text="</a>"/>.</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="4294899532887357745" datatype="html">
|
||||
<source>Thanks for being a part of the Paperless-ngx community!</source>
|
||||
@@ -2103,7 +2103,7 @@
|
||||
<context context-type="sourcefile">src/app/components/dashboard/widgets/welcome-widget/welcome-widget.component.html</context>
|
||||
<context context-type="linenumber">8</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Thanks for being a part of the Paperless-ngx community!</target>
|
||||
<target state="translated">¡Gracias por formar parte de la comunidad de Paperless-ngx!</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="1415832194529539652" datatype="html">
|
||||
<source>Start the tour</source>
|
||||
@@ -3713,7 +3713,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/settings/settings.component.html</context>
|
||||
<context context-type="linenumber">2</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Start tour</target>
|
||||
<target state="translated">Iniciar la visita</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="4798013226763881638" datatype="html">
|
||||
<source>Open Django Admin</source>
|
||||
@@ -3721,7 +3721,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/settings/settings.component.html</context>
|
||||
<context context-type="linenumber">4</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Open Django Admin</target>
|
||||
<target state="translated">Abrir administración de Django</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="6439365426343089851" datatype="html">
|
||||
<source>General</source>
|
||||
@@ -3833,7 +3833,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/settings/settings.component.html</context>
|
||||
<context context-type="linenumber">99</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Sidebar</target>
|
||||
<target state="translated">Barra lateral</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="4608457133854405683" datatype="html">
|
||||
<source>Use 'slim' sidebar (icons only)</source>
|
||||
@@ -3841,7 +3841,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/settings/settings.component.html</context>
|
||||
<context context-type="linenumber">103</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Use 'slim' sidebar (icons only)</target>
|
||||
<target state="translated">Usar barra lateral compacta (solo iconos)</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="1356890996281769972" datatype="html" approved="yes">
|
||||
<source>Dark mode</source>
|
||||
@@ -3897,7 +3897,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/settings/settings.component.html</context>
|
||||
<context context-type="linenumber">135</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Update checking</target>
|
||||
<target state="translated">Comprobación de actualizaciones</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="7890007688616707209" datatype="html">
|
||||
<source> Update checking works by pinging the the public <x id="START_LINK" ctype="x-a" equiv-text="<a href="https://api.github.com/repos/paperless-ngx/paperless-ngx/releases/latest" target="_blank" rel="noopener noreferrer">"/>Github API<x id="CLOSE_LINK" ctype="x-a" equiv-text="</a>"/> for the latest release to determine whether a new version is available.<x id="LINE_BREAK" ctype="lb" equiv-text="<br/>"/> Actual updating of the app must still be performed manually. </source>
|
||||
@@ -3905,7 +3905,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/settings/settings.component.html</context>
|
||||
<context context-type="linenumber">139,142</context>
|
||||
</context-group>
|
||||
<target state="needs-translation"> Update checking works by pinging the the public <x id="START_LINK" ctype="x-a" equiv-text="<a href="https://api.github.com/repos/paperless-ngx/paperless-ngx/releases/latest" target="_blank" rel="noopener noreferrer">"/>Github API<x id="CLOSE_LINK" ctype="x-a" equiv-text="</a>"/> for the latest release to determine whether a new version is available.<x id="LINE_BREAK" ctype="lb" equiv-text="<br/>"/> Actual updating of the app must still be performed manually. </target>
|
||||
<target state="translated"> La comprobación de actualizaciones funciona contactando con la <x id="START_LINK" ctype="x-a" equiv-text="<a href="https://api.github.com/repos/paperless-ngx/paperless-ngx/releases/latest" target="_blank" rel="noopener noreferrer">"/>API pública de Github<x id="CLOSE_LINK" ctype="x-a" equiv-text="</a>"/> para obtener la información de la última versión y así determinar si hay una nueva disponible.<x id="LINE_BREAK" ctype="lb" equiv-text="<br/>"/> La propia aplicación debe ser actualizada manualmente. </target>
|
||||
</trans-unit>
|
||||
<trans-unit id="5489945693955857309" datatype="html">
|
||||
<source><x id="START_EMPHASISED_TEXT" ctype="x-em" equiv-text="<em>"/>No tracking data is collected by the app in any way.<x id="CLOSE_EMPHASISED_TEXT" ctype="x-em" equiv-text="</em>"/></source>
|
||||
@@ -3921,7 +3921,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/settings/settings.component.html</context>
|
||||
<context context-type="linenumber">146</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Enable update checking</target>
|
||||
<target state="translated">Habilitar comprobación de actualizaciones</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="5478370193831195440" datatype="html">
|
||||
<source>Note that for users of thirdy-party containers e.g. linuxserver.io this notification may be 'ahead' of the current third-party release.</source>
|
||||
@@ -4049,7 +4049,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/settings/settings.component.html</context>
|
||||
<context context-type="linenumber">231</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Mail</target>
|
||||
<target state="translated">Correo</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="8913167930428886792" datatype="html">
|
||||
<source>Mail accounts</source>
|
||||
@@ -4057,7 +4057,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/settings/settings.component.html</context>
|
||||
<context context-type="linenumber">236</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Mail accounts</target>
|
||||
<target state="translated">Cuentas de correo</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="1259421956660976189" datatype="html">
|
||||
<source>Add Account</source>
|
||||
@@ -4065,7 +4065,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/settings/settings.component.html</context>
|
||||
<context context-type="linenumber">241</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Add Account</target>
|
||||
<target state="translated">Añadir cuenta</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="2188854519574316630" datatype="html">
|
||||
<source>Server</source>
|
||||
@@ -4073,7 +4073,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/settings/settings.component.html</context>
|
||||
<context context-type="linenumber">249</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Server</target>
|
||||
<target state="translated">Servidor</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="6235247415162820954" datatype="html">
|
||||
<source>No mail accounts defined.</source>
|
||||
@@ -4081,7 +4081,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/settings/settings.component.html</context>
|
||||
<context context-type="linenumber">267</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">No mail accounts defined.</target>
|
||||
<target state="translated">No hay ninguna cuenta de correo configurada.</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="5364020217520256833" datatype="html">
|
||||
<source>Mail rules</source>
|
||||
@@ -4089,7 +4089,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/settings/settings.component.html</context>
|
||||
<context context-type="linenumber">271</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Mail rules</target>
|
||||
<target state="translated">Reglas de correo</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="1372022816709469401" datatype="html">
|
||||
<source>Add Rule</source>
|
||||
@@ -4097,7 +4097,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/settings/settings.component.html</context>
|
||||
<context context-type="linenumber">276</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Add Rule</target>
|
||||
<target state="translated">Añadir regla</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="6751234988479444294" datatype="html">
|
||||
<source>No mail rules defined.</source>
|
||||
@@ -4105,7 +4105,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/settings/settings.component.html</context>
|
||||
<context context-type="linenumber">302</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">No mail rules defined.</target>
|
||||
<target state="translated">No hay reglas de correo definidas.</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="5610279464668232148" datatype="html" approved="yes">
|
||||
<source>Saved view "<x id="PH" equiv-text="savedView.name"/>" deleted.</source>
|
||||
@@ -4619,7 +4619,7 @@
|
||||
<context context-type="sourcefile">src/app/guards/dirty-saved-view.guard.ts</context>
|
||||
<context context-type="linenumber">34</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Save and close</target>
|
||||
<target state="translated">Guardar y cerrar</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="7536524521722799066" datatype="html" approved="yes">
|
||||
<source>(no title)</source>
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -2837,7 +2837,7 @@
|
||||
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
|
||||
<context context-type="linenumber">267,269</context>
|
||||
</context-group>
|
||||
<target state="translated">Ta operacija bo odstranila oznake <x id="PH" equiv-text="this._localizeList( changedTags.itemsToRemove )"/> iz <x id="PH_1" equiv-text="this.list.selected.size" /> izbranih dokumentov.</target>
|
||||
<target state="translated">Ta operacija bo odstranila oznake <x id="PH" equiv-text="this._localizeList( changedTags.itemsToRemove )"/> iz <x id="PH_1" equiv-text="this.list.selected.size"/> izbranih dokumentov.</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="2739066218579571288" datatype="html">
|
||||
<source>This operation will add the tags <x id="PH" equiv-text="this._localizeList( changedTags.itemsToAdd )"/> and remove the tags <x id="PH_1" equiv-text="this._localizeList( changedTags.itemsToRemove )"/> on <x id="PH_2" equiv-text="this.list.selected.size"/> selected document(s).</source>
|
||||
|
@@ -1326,7 +1326,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">15</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Consumption scope</target>
|
||||
<target state="translated">Obim obrade priloga</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="56643687972548912" datatype="html">
|
||||
<source>See docs for .eml processing requirements</source>
|
||||
@@ -1462,7 +1462,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">36</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Only process attachments</target>
|
||||
<target state="translated">Obradi samo priloge</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="936923743212522897" datatype="html">
|
||||
<source>Process all files, including 'inline' attachments</source>
|
||||
@@ -1470,7 +1470,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">29</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Process all files, including 'inline' attachments</target>
|
||||
<target state="translated">Obradite sve fajlove, uključujući "umetnute" priloge</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="9025522236384167767" datatype="html">
|
||||
<source>Process message as .eml</source>
|
||||
@@ -1478,7 +1478,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">40</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Process message as .eml</target>
|
||||
<target state="translated">Obradi poruku kao .eml</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="7411485377918318115" datatype="html">
|
||||
<source>Process message as .eml and attachments separately</source>
|
||||
@@ -1486,7 +1486,7 @@
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">44</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">Process message as .eml and attachments separately</target>
|
||||
<target state="translated">Obradite poruku kao .eml i priloge odvojeno</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="7022070615528435141" datatype="html">
|
||||
<source>Delete</source>
|
||||
|
@@ -3469,7 +3469,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/correspondent-list/correspondent-list.component.ts</context>
|
||||
<context context-type="linenumber">33</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">correspondent</target>
|
||||
<target state="translated">ek yazar</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="1612355304340685070" datatype="html">
|
||||
<source>correspondents</source>
|
||||
@@ -3477,7 +3477,7 @@
|
||||
<context context-type="sourcefile">src/app/components/manage/correspondent-list/correspondent-list.component.ts</context>
|
||||
<context context-type="linenumber">34</context>
|
||||
</context-group>
|
||||
<target state="needs-translation">correspondents</target>
|
||||
<target state="translated">ek yazarlar</target>
|
||||
</trans-unit>
|
||||
<trans-unit id="6360600151505327572" datatype="html">
|
||||
<source>Last used</source>
|
||||
|
@@ -509,6 +509,10 @@ table.table {
|
||||
|
||||
.progress {
|
||||
background-color: var(--bs-body-bg);
|
||||
|
||||
.text-bg-primary {
|
||||
background-color: var(--bs-primary) !important;
|
||||
}
|
||||
}
|
||||
|
||||
.ngb-dp-header,
|
||||
|
@@ -10,12 +10,13 @@
|
||||
"experimentalDecorators": true,
|
||||
"moduleResolution": "node",
|
||||
"importHelpers": true,
|
||||
"target": "es2015",
|
||||
"target": "ES2022",
|
||||
"module": "es2020",
|
||||
"lib": [
|
||||
"es2018",
|
||||
"es2020",
|
||||
"dom"
|
||||
]
|
||||
],
|
||||
"useDefineForClassFields": false
|
||||
},
|
||||
"angularCompilerOptions": {
|
||||
"enableI18nLegacyMessageIdFormat": false,
|
||||
|
@@ -2,10 +2,12 @@ import logging
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from dataclasses import dataclass
|
||||
from functools import lru_cache
|
||||
from math import ceil
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
|
||||
import magic
|
||||
from django.conf import settings
|
||||
@@ -25,6 +27,42 @@ class BarcodeImageFormatError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Barcode:
|
||||
"""
|
||||
Holds the information about a single barcode and its location
|
||||
"""
|
||||
|
||||
page: int
|
||||
value: str
|
||||
|
||||
@property
|
||||
def is_separator(self) -> bool:
|
||||
"""
|
||||
Returns True if the barcode value equals the configured separation value,
|
||||
False otherwise
|
||||
"""
|
||||
return self.value == settings.CONSUMER_BARCODE_STRING
|
||||
|
||||
@property
|
||||
def is_asn(self) -> bool:
|
||||
"""
|
||||
Returns True if the barcode value matches the configured ASN prefix,
|
||||
False otherwise
|
||||
"""
|
||||
return self.value.startswith(settings.CONSUMER_ASN_BARCODE_PREFIX)
|
||||
|
||||
|
||||
@dataclass
|
||||
class DocumentBarcodeInfo:
|
||||
"""
|
||||
Describes a single document's barcode status
|
||||
"""
|
||||
|
||||
pdf_path: Path
|
||||
barcodes: List[Barcode]
|
||||
|
||||
|
||||
@lru_cache(maxsize=8)
|
||||
def supported_file_type(mime_type) -> bool:
|
||||
"""
|
||||
@@ -107,14 +145,17 @@ def convert_from_tiff_to_pdf(filepath: str) -> str:
|
||||
return newpath
|
||||
|
||||
|
||||
def scan_file_for_separating_barcodes(filepath: str) -> Tuple[Optional[str], List[int]]:
|
||||
def scan_file_for_barcodes(
|
||||
filepath: str,
|
||||
) -> DocumentBarcodeInfo:
|
||||
"""
|
||||
Scan the provided pdf file for page separating barcodes
|
||||
Returns a PDF filepath and a list of pagenumbers,
|
||||
which separate the file into new files
|
||||
Scan the provided pdf file for any barcodes
|
||||
Returns a PDF filepath and a list of
|
||||
(page_number, barcode_text) tuples
|
||||
"""
|
||||
|
||||
def _pikepdf_barcode_scan(pdf_filepath: str):
|
||||
def _pikepdf_barcode_scan(pdf_filepath: str) -> List[Barcode]:
|
||||
detected_barcodes = []
|
||||
with Pdf.open(pdf_filepath) as pdf:
|
||||
for page_num, page in enumerate(pdf.pages):
|
||||
for image_key in page.images:
|
||||
@@ -132,24 +173,43 @@ def scan_file_for_separating_barcodes(filepath: str) -> Tuple[Optional[str], Lis
|
||||
# raise an exception, triggering fallback
|
||||
pillow_img = pdfimage.as_pil_image()
|
||||
|
||||
detected_barcodes = barcode_reader(pillow_img)
|
||||
# Scale the image down
|
||||
# See: https://github.com/paperless-ngx/paperless-ngx/issues/2385
|
||||
# TLDR: zbar has issues with larger images
|
||||
width, height = pillow_img.size
|
||||
if width > 1024:
|
||||
scaler = ceil(width / 1024)
|
||||
new_width = int(width / scaler)
|
||||
new_height = int(height / scaler)
|
||||
pillow_img = pillow_img.resize((new_width, new_height))
|
||||
|
||||
if settings.CONSUMER_BARCODE_STRING in detected_barcodes:
|
||||
separator_page_numbers.append(page_num)
|
||||
width, height = pillow_img.size
|
||||
if height > 2048:
|
||||
scaler = ceil(height / 2048)
|
||||
new_width = int(width / scaler)
|
||||
new_height = int(height / scaler)
|
||||
pillow_img = pillow_img.resize((new_width, new_height))
|
||||
|
||||
def _pdf2image_barcode_scan(pdf_filepath: str):
|
||||
for barcode_value in barcode_reader(pillow_img):
|
||||
detected_barcodes.append(Barcode(page_num, barcode_value))
|
||||
|
||||
return detected_barcodes
|
||||
|
||||
def _pdf2image_barcode_scan(pdf_filepath: str) -> List[Barcode]:
|
||||
detected_barcodes = []
|
||||
# use a temporary directory in case the file is too big to handle in memory
|
||||
with tempfile.TemporaryDirectory() as path:
|
||||
pages_from_path = convert_from_path(pdf_filepath, output_folder=path)
|
||||
for current_page_number, page in enumerate(pages_from_path):
|
||||
current_barcodes = barcode_reader(page)
|
||||
if settings.CONSUMER_BARCODE_STRING in current_barcodes:
|
||||
separator_page_numbers.append(current_page_number)
|
||||
for barcode_value in barcode_reader(page):
|
||||
detected_barcodes.append(
|
||||
Barcode(current_page_number, barcode_value),
|
||||
)
|
||||
return detected_barcodes
|
||||
|
||||
separator_page_numbers = []
|
||||
pdf_filepath = None
|
||||
|
||||
mime_type = get_file_mime_type(filepath)
|
||||
barcodes = []
|
||||
|
||||
if supported_file_type(mime_type):
|
||||
pdf_filepath = filepath
|
||||
@@ -159,7 +219,7 @@ def scan_file_for_separating_barcodes(filepath: str) -> Tuple[Optional[str], Lis
|
||||
# Always try pikepdf first, it's usually fine, faster and
|
||||
# uses less memory
|
||||
try:
|
||||
_pikepdf_barcode_scan(pdf_filepath)
|
||||
barcodes = _pikepdf_barcode_scan(pdf_filepath)
|
||||
# Password protected files can't be checked
|
||||
except PasswordError as e:
|
||||
logger.warning(
|
||||
@@ -172,9 +232,7 @@ def scan_file_for_separating_barcodes(filepath: str) -> Tuple[Optional[str], Lis
|
||||
f"Falling back to pdf2image because: {e}",
|
||||
)
|
||||
try:
|
||||
# Clear the list in case some processing worked
|
||||
separator_page_numbers = []
|
||||
_pdf2image_barcode_scan(pdf_filepath)
|
||||
barcodes = _pdf2image_barcode_scan(pdf_filepath)
|
||||
# This file is really borked, allow the consumption to continue
|
||||
# but it may fail further on
|
||||
except Exception as e: # pragma: no cover
|
||||
@@ -186,7 +244,49 @@ def scan_file_for_separating_barcodes(filepath: str) -> Tuple[Optional[str], Lis
|
||||
logger.warning(
|
||||
f"Unsupported file format for barcode reader: {str(mime_type)}",
|
||||
)
|
||||
return pdf_filepath, separator_page_numbers
|
||||
|
||||
return DocumentBarcodeInfo(pdf_filepath, barcodes)
|
||||
|
||||
|
||||
def get_separating_barcodes(barcodes: List[Barcode]) -> List[int]:
|
||||
"""
|
||||
Search the parsed barcodes for separators
|
||||
and returns a list of page numbers, which
|
||||
separate the file into new files.
|
||||
"""
|
||||
# filter all barcodes for the separator string
|
||||
# get the page numbers of the separating barcodes
|
||||
|
||||
return list({bc.page for bc in barcodes if bc.is_separator})
|
||||
|
||||
|
||||
def get_asn_from_barcodes(barcodes: List[Barcode]) -> Optional[int]:
|
||||
"""
|
||||
Search the parsed barcodes for any ASNs.
|
||||
The first barcode that starts with CONSUMER_ASN_BARCODE_PREFIX
|
||||
is considered the ASN to be used.
|
||||
Returns the detected ASN (or None)
|
||||
"""
|
||||
asn = None
|
||||
|
||||
# get the first barcode that starts with CONSUMER_ASN_BARCODE_PREFIX
|
||||
asn_text = next(
|
||||
(x.value for x in barcodes if x.is_asn),
|
||||
None,
|
||||
)
|
||||
|
||||
if asn_text:
|
||||
logger.debug(f"Found ASN Barcode: {asn_text}")
|
||||
# remove the prefix and remove whitespace
|
||||
asn_text = asn_text[len(settings.CONSUMER_ASN_BARCODE_PREFIX) :].strip()
|
||||
|
||||
# now, try parsing the ASN number
|
||||
try:
|
||||
asn = int(asn_text)
|
||||
except ValueError as e:
|
||||
logger.warning(f"Failed to parse ASN number because: {e}")
|
||||
|
||||
return asn
|
||||
|
||||
|
||||
def separate_pages(filepath: str, pages_to_split_on: List[int]) -> List[str]:
|
||||
|
@@ -39,6 +39,8 @@ class ConsumerError(Exception):
|
||||
|
||||
|
||||
MESSAGE_DOCUMENT_ALREADY_EXISTS = "document_already_exists"
|
||||
MESSAGE_ASN_ALREADY_EXISTS = "asn_already_exists"
|
||||
MESSAGE_ASN_RANGE = "asn_value_out_of_range"
|
||||
MESSAGE_FILE_NOT_FOUND = "file_not_found"
|
||||
MESSAGE_PRE_CONSUME_SCRIPT_NOT_FOUND = "pre_consume_script_not_found"
|
||||
MESSAGE_PRE_CONSUME_SCRIPT_ERROR = "pre_consume_script_error"
|
||||
@@ -98,6 +100,7 @@ class Consumer(LoggingMixin):
|
||||
self.override_correspondent_id = None
|
||||
self.override_tag_ids = None
|
||||
self.override_document_type_id = None
|
||||
self.override_asn = None
|
||||
self.task_id = None
|
||||
|
||||
self.channel_layer = get_channel_layer()
|
||||
@@ -130,6 +133,27 @@ class Consumer(LoggingMixin):
|
||||
os.makedirs(settings.ORIGINALS_DIR, exist_ok=True)
|
||||
os.makedirs(settings.ARCHIVE_DIR, exist_ok=True)
|
||||
|
||||
def pre_check_asn_value(self):
|
||||
"""
|
||||
Check that if override_asn is given, it is unique and within a valid range
|
||||
"""
|
||||
if not self.override_asn:
|
||||
# check not necessary in case no ASN gets set
|
||||
return
|
||||
# Validate the range is above zero and less than uint32_t max
|
||||
# otherwise, Whoosh can't handle it in the index
|
||||
if self.override_asn < 0 or self.override_asn > 0xFF_FF_FF_FF:
|
||||
self._fail(
|
||||
MESSAGE_ASN_RANGE,
|
||||
f"Not consuming {self.filename}: "
|
||||
f"Given ASN {self.override_asn} is out of range [0, 4,294,967,295]",
|
||||
)
|
||||
if Document.objects.filter(archive_serial_number=self.override_asn).exists():
|
||||
self._fail(
|
||||
MESSAGE_ASN_ALREADY_EXISTS,
|
||||
f"Not consuming {self.filename}: Given ASN already exists!",
|
||||
)
|
||||
|
||||
def run_pre_consume_script(self):
|
||||
if not settings.PRE_CONSUME_SCRIPT:
|
||||
return
|
||||
@@ -255,6 +279,7 @@ class Consumer(LoggingMixin):
|
||||
override_tag_ids=None,
|
||||
task_id=None,
|
||||
override_created=None,
|
||||
override_asn=None,
|
||||
) -> Document:
|
||||
"""
|
||||
Return the document object if it was successfully created.
|
||||
@@ -268,6 +293,7 @@ class Consumer(LoggingMixin):
|
||||
self.override_tag_ids = override_tag_ids
|
||||
self.task_id = task_id or str(uuid.uuid4())
|
||||
self.override_created = override_created
|
||||
self.override_asn = override_asn
|
||||
|
||||
self._send_progress(0, 100, "STARTING", MESSAGE_NEW_FILE)
|
||||
|
||||
@@ -281,6 +307,7 @@ class Consumer(LoggingMixin):
|
||||
self.pre_check_file_exists()
|
||||
self.pre_check_directories()
|
||||
self.pre_check_duplicate()
|
||||
self.pre_check_asn_value()
|
||||
|
||||
self.log("info", f"Consuming {self.filename}")
|
||||
|
||||
@@ -526,6 +553,9 @@ class Consumer(LoggingMixin):
|
||||
for tag_id in self.override_tag_ids:
|
||||
document.tags.add(Tag.objects.get(pk=tag_id))
|
||||
|
||||
if self.override_asn:
|
||||
document.archive_serial_number = self.override_asn
|
||||
|
||||
def _write(self, storage_type, source, target):
|
||||
with open(source, "rb") as read_file:
|
||||
with open(target, "wb") as write_file:
|
||||
|
@@ -5,6 +5,7 @@ from contextlib import contextmanager
|
||||
|
||||
from dateutil.parser import isoparse
|
||||
from django.conf import settings
|
||||
from documents.models import Comment
|
||||
from documents.models import Document
|
||||
from whoosh import classify
|
||||
from whoosh import highlight
|
||||
@@ -33,7 +34,7 @@ def get_schema():
|
||||
id=NUMERIC(stored=True, unique=True),
|
||||
title=TEXT(sortable=True),
|
||||
content=TEXT(),
|
||||
asn=NUMERIC(sortable=True),
|
||||
asn=NUMERIC(sortable=True, signed=False),
|
||||
correspondent=TEXT(sortable=True),
|
||||
correspondent_id=NUMERIC(),
|
||||
has_correspondent=BOOLEAN(),
|
||||
@@ -49,6 +50,7 @@ def get_schema():
|
||||
path=TEXT(sortable=True),
|
||||
path_id=NUMERIC(),
|
||||
has_path=BOOLEAN(),
|
||||
comments=TEXT(),
|
||||
)
|
||||
|
||||
|
||||
@@ -90,6 +92,7 @@ def open_index_searcher():
|
||||
def update_document(writer, doc):
|
||||
tags = ",".join([t.name for t in doc.tags.all()])
|
||||
tags_ids = ",".join([str(t.id) for t in doc.tags.all()])
|
||||
comments = ",".join([str(c.comment) for c in Comment.objects.filter(document=doc)])
|
||||
writer.update_document(
|
||||
id=doc.pk,
|
||||
title=doc.title,
|
||||
@@ -110,6 +113,7 @@ def update_document(writer, doc):
|
||||
path=doc.storage_path.name if doc.storage_path else None,
|
||||
path_id=doc.storage_path.id if doc.storage_path else None,
|
||||
has_path=doc.storage_path is not None,
|
||||
comments=comments,
|
||||
)
|
||||
|
||||
|
||||
@@ -255,7 +259,7 @@ class DelayedFullTextQuery(DelayedQuery):
|
||||
def _get_query(self):
|
||||
q_str = self.query_params["query"]
|
||||
qp = MultifieldParser(
|
||||
["content", "title", "correspondent", "tag", "type"],
|
||||
["content", "title", "correspondent", "tag", "type", "comments"],
|
||||
self.searcher.ixreader.schema,
|
||||
)
|
||||
qp.add_plugin(DateParserPlugin())
|
||||
|
@@ -19,7 +19,7 @@ from watchdog.observers.polling import PollingObserver
|
||||
|
||||
try:
|
||||
from inotifyrecursive import INotify, flags
|
||||
except ImportError:
|
||||
except ImportError: # pragma: nocover
|
||||
INotify = flags = None
|
||||
|
||||
logger = logging.getLogger("paperless.management.consumer")
|
||||
|
@@ -4,6 +4,9 @@ import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
from typing import Set
|
||||
|
||||
import tqdm
|
||||
from django.conf import settings
|
||||
@@ -60,15 +63,6 @@ class Command(BaseCommand):
|
||||
"modified is used instead.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--use-filename-format",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Use PAPERLESS_FILENAME_FORMAT for storing files in the "
|
||||
"export directory, if configured.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--delete",
|
||||
@@ -80,10 +74,45 @@ class Command(BaseCommand):
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--no-progress-bar",
|
||||
"-f",
|
||||
"--use-filename-format",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="If set, the progress bar will not be shown",
|
||||
help="Use PAPERLESS_FILENAME_FORMAT for storing files in the "
|
||||
"export directory, if configured.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-na",
|
||||
"--no-archive",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Avoid exporting archive files",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-nt",
|
||||
"--no-thumbnail",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Avoid exporting thumbnail files",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-p",
|
||||
"--use-folder-prefix",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Export files in dedicated folders according to their nature: "
|
||||
"archive, originals or thumbnails",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-sm",
|
||||
"--split-manifest",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Export document information in individual manifest json files.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
@@ -94,21 +123,36 @@ class Command(BaseCommand):
|
||||
help="Export the documents to a zip file in the given directory",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--no-progress-bar",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="If set, the progress bar will not be shown",
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
BaseCommand.__init__(self, *args, **kwargs)
|
||||
self.target = None
|
||||
self.files_in_export_dir = []
|
||||
self.exported_files = []
|
||||
self.target: Path = None
|
||||
self.split_manifest = False
|
||||
self.files_in_export_dir: Set[Path] = set()
|
||||
self.exported_files: List[Path] = []
|
||||
self.compare_checksums = False
|
||||
self.use_filename_format = False
|
||||
self.use_folder_prefix = False
|
||||
self.delete = False
|
||||
self.no_archive = False
|
||||
self.no_thumbnail = False
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
||||
self.target = options["target"]
|
||||
self.target = Path(options["target"]).resolve()
|
||||
self.split_manifest = options["split_manifest"]
|
||||
self.compare_checksums = options["compare_checksums"]
|
||||
self.use_filename_format = options["use_filename_format"]
|
||||
self.use_folder_prefix = options["use_folder_prefix"]
|
||||
self.delete = options["delete"]
|
||||
self.no_archive = options["no_archive"]
|
||||
self.no_thumbnail = options["no_thumbnail"]
|
||||
zip_export: bool = options["zip"]
|
||||
|
||||
# If zipping, save the original target for later and
|
||||
@@ -121,11 +165,14 @@ class Command(BaseCommand):
|
||||
dir=settings.SCRATCH_DIR,
|
||||
prefix="paperless-export",
|
||||
)
|
||||
self.target = temp_dir.name
|
||||
self.target = Path(temp_dir.name).resolve()
|
||||
|
||||
if not os.path.exists(self.target):
|
||||
if not self.target.exists():
|
||||
raise CommandError("That path doesn't exist")
|
||||
|
||||
if not self.target.is_dir():
|
||||
raise CommandError("That path isn't a directory")
|
||||
|
||||
if not os.access(self.target, os.W_OK):
|
||||
raise CommandError("That path doesn't appear to be writable")
|
||||
|
||||
@@ -152,10 +199,9 @@ class Command(BaseCommand):
|
||||
|
||||
def dump(self, progress_bar_disable=False):
|
||||
# 1. Take a snapshot of what files exist in the current export folder
|
||||
for root, dirs, files in os.walk(self.target):
|
||||
self.files_in_export_dir.extend(
|
||||
map(lambda f: os.path.abspath(os.path.join(root, f)), files),
|
||||
)
|
||||
for x in self.target.glob("**/*"):
|
||||
if x.is_file():
|
||||
self.files_in_export_dir.add(x.resolve())
|
||||
|
||||
# 2. Create manifest, containing all correspondents, types, tags, storage paths
|
||||
# comments, documents and ui_settings
|
||||
@@ -174,14 +220,17 @@ class Command(BaseCommand):
|
||||
serializers.serialize("json", StoragePath.objects.all()),
|
||||
)
|
||||
|
||||
manifest += json.loads(
|
||||
comments = json.loads(
|
||||
serializers.serialize("json", Comment.objects.all()),
|
||||
)
|
||||
if not self.split_manifest:
|
||||
manifest += comments
|
||||
|
||||
documents = Document.objects.order_by("id")
|
||||
document_map = {d.pk: d for d in documents}
|
||||
document_manifest = json.loads(serializers.serialize("json", documents))
|
||||
manifest += document_manifest
|
||||
if not self.split_manifest:
|
||||
manifest += document_manifest
|
||||
|
||||
manifest += json.loads(
|
||||
serializers.serialize("json", MailAccount.objects.all()),
|
||||
@@ -238,16 +287,25 @@ class Command(BaseCommand):
|
||||
|
||||
# 3.3. write filenames into manifest
|
||||
original_name = base_name
|
||||
original_target = os.path.join(self.target, original_name)
|
||||
if self.use_folder_prefix:
|
||||
original_name = os.path.join("originals", original_name)
|
||||
original_target = (self.target / Path(original_name)).resolve()
|
||||
document_dict[EXPORTER_FILE_NAME] = original_name
|
||||
|
||||
thumbnail_name = base_name + "-thumbnail.webp"
|
||||
thumbnail_target = os.path.join(self.target, thumbnail_name)
|
||||
document_dict[EXPORTER_THUMBNAIL_NAME] = thumbnail_name
|
||||
if not self.no_thumbnail:
|
||||
thumbnail_name = base_name + "-thumbnail.webp"
|
||||
if self.use_folder_prefix:
|
||||
thumbnail_name = os.path.join("thumbnails", thumbnail_name)
|
||||
thumbnail_target = (self.target / Path(thumbnail_name)).resolve()
|
||||
document_dict[EXPORTER_THUMBNAIL_NAME] = thumbnail_name
|
||||
else:
|
||||
thumbnail_target = None
|
||||
|
||||
if document.has_archive_version:
|
||||
if not self.no_archive and document.has_archive_version:
|
||||
archive_name = base_name + "-archive.pdf"
|
||||
archive_target = os.path.join(self.target, archive_name)
|
||||
if self.use_folder_prefix:
|
||||
archive_name = os.path.join("archive", archive_name)
|
||||
archive_target = (self.target / Path(archive_name)).resolve()
|
||||
document_dict[EXPORTER_ARCHIVE_NAME] = archive_name
|
||||
else:
|
||||
archive_target = None
|
||||
@@ -256,24 +314,22 @@ class Command(BaseCommand):
|
||||
t = int(time.mktime(document.created.timetuple()))
|
||||
if document.storage_type == Document.STORAGE_TYPE_GPG:
|
||||
|
||||
os.makedirs(os.path.dirname(original_target), exist_ok=True)
|
||||
with open(original_target, "wb") as f:
|
||||
with document.source_file as out_file:
|
||||
f.write(GnuPG.decrypted(out_file))
|
||||
os.utime(original_target, times=(t, t))
|
||||
original_target.parent.mkdir(parents=True, exist_ok=True)
|
||||
with document.source_file as out_file:
|
||||
original_target.write_bytes(GnuPG.decrypted(out_file))
|
||||
os.utime(original_target, times=(t, t))
|
||||
|
||||
os.makedirs(os.path.dirname(thumbnail_target), exist_ok=True)
|
||||
with open(thumbnail_target, "wb") as f:
|
||||
if thumbnail_target:
|
||||
thumbnail_target.parent.mkdir(parents=True, exist_ok=True)
|
||||
with document.thumbnail_file as out_file:
|
||||
f.write(GnuPG.decrypted(out_file))
|
||||
thumbnail_target.write_bytes(GnuPG.decrypted(out_file))
|
||||
os.utime(thumbnail_target, times=(t, t))
|
||||
|
||||
if archive_target:
|
||||
os.makedirs(os.path.dirname(archive_target), exist_ok=True)
|
||||
with open(archive_target, "wb") as f:
|
||||
with document.archive_path as out_file:
|
||||
f.write(GnuPG.decrypted(out_file))
|
||||
os.utime(archive_target, times=(t, t))
|
||||
archive_target.parent.mkdir(parents=True, exist_ok=True)
|
||||
with document.archive_path as out_file:
|
||||
archive_target.write_bytes(GnuPG.decrypted(out_file))
|
||||
os.utime(archive_target, times=(t, t))
|
||||
else:
|
||||
self.check_and_copy(
|
||||
document.source_path,
|
||||
@@ -281,7 +337,8 @@ class Command(BaseCommand):
|
||||
original_target,
|
||||
)
|
||||
|
||||
self.check_and_copy(document.thumbnail_path, None, thumbnail_target)
|
||||
if thumbnail_target:
|
||||
self.check_and_copy(document.thumbnail_path, None, thumbnail_target)
|
||||
|
||||
if archive_target:
|
||||
self.check_and_copy(
|
||||
@@ -290,44 +347,59 @@ class Command(BaseCommand):
|
||||
archive_target,
|
||||
)
|
||||
|
||||
# 4.1 write manifest to target folder
|
||||
manifest_path = os.path.abspath(os.path.join(self.target, "manifest.json"))
|
||||
if self.split_manifest:
|
||||
manifest_name = base_name + "-manifest.json"
|
||||
if self.use_folder_prefix:
|
||||
manifest_name = os.path.join("json", manifest_name)
|
||||
manifest_name = (self.target / Path(manifest_name)).resolve()
|
||||
manifest_name.parent.mkdir(parents=True, exist_ok=True)
|
||||
content = [document_manifest[index]]
|
||||
content += list(
|
||||
filter(
|
||||
lambda d: d["fields"]["document"] == document_dict["pk"],
|
||||
comments,
|
||||
),
|
||||
)
|
||||
manifest_name.write_text(json.dumps(content, indent=2))
|
||||
if manifest_name in self.files_in_export_dir:
|
||||
self.files_in_export_dir.remove(manifest_name)
|
||||
|
||||
with open(manifest_path, "w") as f:
|
||||
json.dump(manifest, f, indent=2)
|
||||
# 4.1 write manifest to target folder
|
||||
manifest_path = (self.target / Path("manifest.json")).resolve()
|
||||
manifest_path.write_text(json.dumps(manifest, indent=2))
|
||||
if manifest_path in self.files_in_export_dir:
|
||||
self.files_in_export_dir.remove(manifest_path)
|
||||
|
||||
# 4.2 write version information to target folder
|
||||
version_path = os.path.abspath(os.path.join(self.target, "version.json"))
|
||||
|
||||
with open(version_path, "w") as f:
|
||||
json.dump({"version": version.__full_version_str__}, f, indent=2)
|
||||
version_path = (self.target / Path("version.json")).resolve()
|
||||
version_path.write_text(
|
||||
json.dumps({"version": version.__full_version_str__}, indent=2),
|
||||
)
|
||||
if version_path in self.files_in_export_dir:
|
||||
self.files_in_export_dir.remove(version_path)
|
||||
|
||||
if self.delete:
|
||||
# 5. Remove files which we did not explicitly export in this run
|
||||
|
||||
if manifest_path in self.files_in_export_dir:
|
||||
self.files_in_export_dir.remove(manifest_path)
|
||||
|
||||
for f in self.files_in_export_dir:
|
||||
os.remove(f)
|
||||
f.unlink()
|
||||
|
||||
delete_empty_directories(
|
||||
os.path.abspath(os.path.dirname(f)),
|
||||
os.path.abspath(self.target),
|
||||
f.parent,
|
||||
self.target,
|
||||
)
|
||||
|
||||
def check_and_copy(self, source, source_checksum, target):
|
||||
if os.path.abspath(target) in self.files_in_export_dir:
|
||||
self.files_in_export_dir.remove(os.path.abspath(target))
|
||||
def check_and_copy(self, source, source_checksum, target: Path):
|
||||
if target in self.files_in_export_dir:
|
||||
self.files_in_export_dir.remove(target)
|
||||
|
||||
perform_copy = False
|
||||
|
||||
if os.path.exists(target):
|
||||
if target.exists():
|
||||
source_stat = os.stat(source)
|
||||
target_stat = os.stat(target)
|
||||
target_stat = target.stat()
|
||||
if self.compare_checksums and source_checksum:
|
||||
with open(target, "rb") as f:
|
||||
target_checksum = hashlib.md5(f.read()).hexdigest()
|
||||
target_checksum = hashlib.md5(target.read_bytes()).hexdigest()
|
||||
perform_copy = target_checksum != source_checksum
|
||||
elif source_stat.st_mtime != target_stat.st_mtime:
|
||||
perform_copy = True
|
||||
@@ -338,5 +410,5 @@ class Command(BaseCommand):
|
||||
perform_copy = True
|
||||
|
||||
if perform_copy:
|
||||
os.makedirs(os.path.dirname(target), exist_ok=True)
|
||||
target.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(source, target)
|
||||
|
@@ -72,11 +72,21 @@ class Command(BaseCommand):
|
||||
if not os.access(self.source, os.R_OK):
|
||||
raise CommandError("That path doesn't appear to be readable")
|
||||
|
||||
manifest_path = os.path.normpath(os.path.join(self.source, "manifest.json"))
|
||||
self._check_manifest_exists(manifest_path)
|
||||
manifest_paths = []
|
||||
|
||||
with open(manifest_path) as f:
|
||||
main_manifest_path = os.path.normpath(
|
||||
os.path.join(self.source, "manifest.json"),
|
||||
)
|
||||
self._check_manifest_exists(main_manifest_path)
|
||||
|
||||
with open(main_manifest_path) as f:
|
||||
self.manifest = json.load(f)
|
||||
manifest_paths.append(main_manifest_path)
|
||||
|
||||
for file in Path(self.source).glob("**/*-manifest.json"):
|
||||
with open(file) as f:
|
||||
self.manifest += json.load(f)
|
||||
manifest_paths.append(file)
|
||||
|
||||
version_path = os.path.normpath(os.path.join(self.source, "version.json"))
|
||||
if os.path.exists(version_path):
|
||||
@@ -109,7 +119,8 @@ class Command(BaseCommand):
|
||||
):
|
||||
# Fill up the database with whatever is in the manifest
|
||||
try:
|
||||
call_command("loaddata", manifest_path)
|
||||
for manifest_path in manifest_paths:
|
||||
call_command("loaddata", manifest_path)
|
||||
except (FieldDoesNotExist, DeserializationError) as e:
|
||||
self.stdout.write(self.style.ERROR("Database import failed"))
|
||||
if (
|
||||
@@ -193,8 +204,11 @@ class Command(BaseCommand):
|
||||
doc_file = record[EXPORTER_FILE_NAME]
|
||||
document_path = os.path.join(self.source, doc_file)
|
||||
|
||||
thumb_file = record[EXPORTER_THUMBNAIL_NAME]
|
||||
thumbnail_path = Path(os.path.join(self.source, thumb_file)).resolve()
|
||||
if EXPORTER_THUMBNAIL_NAME in record:
|
||||
thumb_file = record[EXPORTER_THUMBNAIL_NAME]
|
||||
thumbnail_path = Path(os.path.join(self.source, thumb_file)).resolve()
|
||||
else:
|
||||
thumbnail_path = None
|
||||
|
||||
if EXPORTER_ARCHIVE_NAME in record:
|
||||
archive_file = record[EXPORTER_ARCHIVE_NAME]
|
||||
@@ -212,19 +226,21 @@ class Command(BaseCommand):
|
||||
|
||||
shutil.copy2(document_path, document.source_path)
|
||||
|
||||
if thumbnail_path.suffix in {".png", ".PNG"}:
|
||||
run_convert(
|
||||
density=300,
|
||||
scale="500x5000>",
|
||||
alpha="remove",
|
||||
strip=True,
|
||||
trim=False,
|
||||
auto_orient=True,
|
||||
input_file=f"{thumbnail_path}[0]",
|
||||
output_file=str(document.thumbnail_path),
|
||||
)
|
||||
else:
|
||||
shutil.copy2(thumbnail_path, document.thumbnail_path)
|
||||
if thumbnail_path:
|
||||
if thumbnail_path.suffix in {".png", ".PNG"}:
|
||||
run_convert(
|
||||
density=300,
|
||||
scale="500x5000>",
|
||||
alpha="remove",
|
||||
strip=True,
|
||||
trim=False,
|
||||
auto_orient=True,
|
||||
input_file=f"{thumbnail_path}[0]",
|
||||
output_file=str(document.thumbnail_path),
|
||||
)
|
||||
else:
|
||||
shutil.copy2(thumbnail_path, document.thumbnail_path)
|
||||
|
||||
if archive_path:
|
||||
create_source_path_directory(document.archive_path)
|
||||
# TODO: this assumes that the export is valid and
|
||||
|
@@ -24,7 +24,7 @@ class Migration(migrations.Migration):
|
||||
),
|
||||
),
|
||||
("task_id", models.CharField(max_length=128)),
|
||||
("name", models.CharField(max_length=256)),
|
||||
("name", models.CharField(max_length=256, null=True)),
|
||||
(
|
||||
"created",
|
||||
models.DateTimeField(auto_now=True, verbose_name="created"),
|
||||
|
@@ -0,0 +1,30 @@
|
||||
# Generated by Django 4.1.4 on 2023-01-24 17:56
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1028_remove_paperlesstask_task_args_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="archive_serial_number",
|
||||
field=models.PositiveIntegerField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="The position of this document in your physical document archive.",
|
||||
null=True,
|
||||
unique=True,
|
||||
validators=[
|
||||
django.core.validators.MaxValueValidator(4294967295),
|
||||
django.core.validators.MinValueValidator(0),
|
||||
],
|
||||
verbose_name="archive serial number",
|
||||
),
|
||||
),
|
||||
]
|
@@ -10,6 +10,8 @@ import pathvalidate
|
||||
from celery import states
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.validators import MaxValueValidator
|
||||
from django.core.validators import MinValueValidator
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -227,12 +229,16 @@ class Document(models.Model):
|
||||
help_text=_("The original name of the file when it was uploaded"),
|
||||
)
|
||||
|
||||
archive_serial_number = models.IntegerField(
|
||||
archive_serial_number = models.PositiveIntegerField(
|
||||
_("archive serial number"),
|
||||
blank=True,
|
||||
null=True,
|
||||
unique=True,
|
||||
db_index=True,
|
||||
validators=[
|
||||
MaxValueValidator(0xFF_FF_FF_FF),
|
||||
MinValueValidator(0),
|
||||
],
|
||||
help_text=_(
|
||||
"The position of this document in your physical document " "archive.",
|
||||
),
|
||||
|
@@ -6,12 +6,12 @@ import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
from functools import lru_cache
|
||||
from typing import Iterator
|
||||
from typing import Match
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
|
||||
import magic
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from documents.loggers import LoggingMixin
|
||||
@@ -45,11 +45,20 @@ DATE_REGEX = re.compile(
|
||||
logger = logging.getLogger("paperless.parsing")
|
||||
|
||||
|
||||
def is_mime_type_supported(mime_type) -> bool:
|
||||
@lru_cache(maxsize=8)
|
||||
def is_mime_type_supported(mime_type: str) -> bool:
|
||||
"""
|
||||
Returns True if the mime type is supported, False otherwise
|
||||
"""
|
||||
return get_parser_class_for_mime_type(mime_type) is not None
|
||||
|
||||
|
||||
def get_default_file_extension(mime_type) -> str:
|
||||
@lru_cache(maxsize=8)
|
||||
def get_default_file_extension(mime_type: str) -> str:
|
||||
"""
|
||||
Returns the default file extension for a mimetype, or
|
||||
an empty string if it could not be determined
|
||||
"""
|
||||
for response in document_consumer_declaration.send(None):
|
||||
parser_declaration = response[1]
|
||||
supported_mime_types = parser_declaration["mime_types"]
|
||||
@@ -64,7 +73,12 @@ def get_default_file_extension(mime_type) -> str:
|
||||
return ""
|
||||
|
||||
|
||||
def is_file_ext_supported(ext) -> bool:
|
||||
@lru_cache(maxsize=8)
|
||||
def is_file_ext_supported(ext: str) -> bool:
|
||||
"""
|
||||
Returns True if the file extension is supported, False otherwise
|
||||
TODO: Investigate why this really exists, why not use mimetype
|
||||
"""
|
||||
if ext:
|
||||
return ext.lower() in get_supported_file_extensions()
|
||||
else:
|
||||
@@ -79,11 +93,19 @@ def get_supported_file_extensions() -> Set[str]:
|
||||
|
||||
for mime_type in supported_mime_types:
|
||||
extensions.update(mimetypes.guess_all_extensions(mime_type))
|
||||
# Python's stdlib might be behind, so also add what the parser
|
||||
# says is the default extension
|
||||
# This makes image/webp supported on Python < 3.11
|
||||
extensions.add(supported_mime_types[mime_type])
|
||||
|
||||
return extensions
|
||||
|
||||
|
||||
def get_parser_class_for_mime_type(mime_type):
|
||||
def get_parser_class_for_mime_type(mime_type: str) -> Optional["DocumentParser"]:
|
||||
"""
|
||||
Returns the best parser (by weight) for the given mimetype or
|
||||
None if no parser exists
|
||||
"""
|
||||
|
||||
options = []
|
||||
|
||||
@@ -103,16 +125,6 @@ def get_parser_class_for_mime_type(mime_type):
|
||||
return sorted(options, key=lambda _: _["weight"], reverse=True)[0]["parser"]
|
||||
|
||||
|
||||
def get_parser_class(path):
|
||||
"""
|
||||
Determine the appropriate parser class based on the file
|
||||
"""
|
||||
|
||||
mime_type = magic.from_file(path, mime=True)
|
||||
|
||||
return get_parser_class_for_mime_type(mime_type)
|
||||
|
||||
|
||||
def run_convert(
|
||||
input_file,
|
||||
output_file,
|
||||
|
@@ -447,7 +447,7 @@ def update_filename_and_move_files(sender, instance, **kwargs):
|
||||
)
|
||||
|
||||
except (OSError, DatabaseError, CannotMoveFilesException) as e:
|
||||
logger.warn(f"Exception during file handling: {e}")
|
||||
logger.warning(f"Exception during file handling: {e}")
|
||||
# This happens when either:
|
||||
# - moving the files failed due to file system errors
|
||||
# - saving to the database failed due to database errors
|
||||
|
@@ -3,10 +3,10 @@ import logging
|
||||
import os
|
||||
import shutil
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Type
|
||||
|
||||
import dateutil.parser
|
||||
import tqdm
|
||||
from asgiref.sync import async_to_sync
|
||||
from celery import shared_task
|
||||
@@ -98,6 +98,7 @@ def consume_file(
|
||||
):
|
||||
|
||||
path = Path(path).resolve()
|
||||
asn = None
|
||||
|
||||
# Celery converts this to a string, but everything expects a datetime
|
||||
# Long term solution is to not use JSON for the serializer but pickle instead
|
||||
@@ -105,75 +106,87 @@ def consume_file(
|
||||
# More types will be retained through JSON encode/decode
|
||||
if override_created is not None and isinstance(override_created, str):
|
||||
try:
|
||||
override_created = datetime.fromisoformat(override_created)
|
||||
override_created = dateutil.parser.isoparse(override_created)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# check for separators in current document
|
||||
if settings.CONSUMER_ENABLE_BARCODES:
|
||||
# read all barcodes in the current document
|
||||
if settings.CONSUMER_ENABLE_BARCODES or settings.CONSUMER_ENABLE_ASN_BARCODE:
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(path)
|
||||
|
||||
pdf_filepath, separators = barcodes.scan_file_for_separating_barcodes(path)
|
||||
# split document by separator pages, if enabled
|
||||
if settings.CONSUMER_ENABLE_BARCODES:
|
||||
separators = barcodes.get_separating_barcodes(doc_barcode_info.barcodes)
|
||||
|
||||
if separators:
|
||||
logger.debug(
|
||||
f"Pages with separators found in: {str(path)}",
|
||||
)
|
||||
document_list = barcodes.separate_pages(pdf_filepath, separators)
|
||||
if len(separators) > 0:
|
||||
logger.debug(
|
||||
f"Pages with separators found in: {str(path)}",
|
||||
)
|
||||
document_list = barcodes.separate_pages(
|
||||
doc_barcode_info.pdf_path,
|
||||
separators,
|
||||
)
|
||||
|
||||
if document_list:
|
||||
for n, document in enumerate(document_list):
|
||||
# save to consumption dir
|
||||
# rename it to the original filename with number prefix
|
||||
if override_filename:
|
||||
newname = f"{str(n)}_" + override_filename
|
||||
else:
|
||||
newname = None
|
||||
if document_list:
|
||||
for n, document in enumerate(document_list):
|
||||
# save to consumption dir
|
||||
# rename it to the original filename with number prefix
|
||||
if override_filename:
|
||||
newname = f"{str(n)}_" + override_filename
|
||||
else:
|
||||
newname = None
|
||||
|
||||
# If the file is an upload, it's in the scratch directory
|
||||
# Move it to consume directory to be picked up
|
||||
# Otherwise, use the current parent to keep possible tags
|
||||
# from subdirectories
|
||||
# If the file is an upload, it's in the scratch directory
|
||||
# Move it to consume directory to be picked up
|
||||
# Otherwise, use the current parent to keep possible tags
|
||||
# from subdirectories
|
||||
try:
|
||||
# is_relative_to would be nicer, but new in 3.9
|
||||
_ = path.relative_to(settings.SCRATCH_DIR)
|
||||
save_to_dir = settings.CONSUMPTION_DIR
|
||||
except ValueError:
|
||||
save_to_dir = path.parent
|
||||
|
||||
barcodes.save_to_dir(
|
||||
document,
|
||||
newname=newname,
|
||||
target_dir=save_to_dir,
|
||||
)
|
||||
|
||||
# Delete the PDF file which was split
|
||||
os.remove(doc_barcode_info.pdf_path)
|
||||
|
||||
# If the original was a TIFF, remove the original file as well
|
||||
if str(doc_barcode_info.pdf_path) != str(path):
|
||||
logger.debug(f"Deleting file {path}")
|
||||
os.unlink(path)
|
||||
|
||||
# notify the sender, otherwise the progress bar
|
||||
# in the UI stays stuck
|
||||
payload = {
|
||||
"filename": override_filename,
|
||||
"task_id": task_id,
|
||||
"current_progress": 100,
|
||||
"max_progress": 100,
|
||||
"status": "SUCCESS",
|
||||
"message": "finished",
|
||||
}
|
||||
try:
|
||||
# is_relative_to would be nicer, but new in 3.9
|
||||
_ = path.relative_to(settings.SCRATCH_DIR)
|
||||
save_to_dir = settings.CONSUMPTION_DIR
|
||||
except ValueError:
|
||||
save_to_dir = path.parent
|
||||
async_to_sync(get_channel_layer().group_send)(
|
||||
"status_updates",
|
||||
{"type": "status_update", "data": payload},
|
||||
)
|
||||
except ConnectionError as e:
|
||||
logger.warning(f"ConnectionError on status send: {str(e)}")
|
||||
# consuming stops here, since the original document with
|
||||
# the barcodes has been split and will be consumed separately
|
||||
return "File successfully split"
|
||||
|
||||
barcodes.save_to_dir(
|
||||
document,
|
||||
newname=newname,
|
||||
target_dir=save_to_dir,
|
||||
)
|
||||
|
||||
# Delete the PDF file which was split
|
||||
os.remove(pdf_filepath)
|
||||
|
||||
# If the original was a TIFF, remove the original file as well
|
||||
if str(pdf_filepath) != str(path):
|
||||
logger.debug(f"Deleting file {path}")
|
||||
os.unlink(path)
|
||||
|
||||
# notify the sender, otherwise the progress bar
|
||||
# in the UI stays stuck
|
||||
payload = {
|
||||
"filename": override_filename,
|
||||
"task_id": task_id,
|
||||
"current_progress": 100,
|
||||
"max_progress": 100,
|
||||
"status": "SUCCESS",
|
||||
"message": "finished",
|
||||
}
|
||||
try:
|
||||
async_to_sync(get_channel_layer().group_send)(
|
||||
"status_updates",
|
||||
{"type": "status_update", "data": payload},
|
||||
)
|
||||
except ConnectionError as e:
|
||||
logger.warning(f"ConnectionError on status send: {str(e)}")
|
||||
# consuming stops here, since the original document with
|
||||
# the barcodes has been split and will be consumed separately
|
||||
return "File successfully split"
|
||||
# try reading the ASN from barcode
|
||||
if settings.CONSUMER_ENABLE_ASN_BARCODE:
|
||||
asn = barcodes.get_asn_from_barcodes(doc_barcode_info.barcodes)
|
||||
if asn:
|
||||
logger.info(f"Found ASN in barcode: {asn}")
|
||||
|
||||
# continue with consumption if no barcode was found
|
||||
document = Consumer().try_consume_file(
|
||||
@@ -185,6 +198,7 @@ def consume_file(
|
||||
override_tag_ids=override_tag_ids,
|
||||
task_id=task_id,
|
||||
override_created=override_created,
|
||||
override_asn=asn,
|
||||
)
|
||||
|
||||
if document:
|
||||
|
Binary file not shown.
BIN
src/documents/tests/samples/barcodes/barcode-39-asn-123.pdf
Normal file
BIN
src/documents/tests/samples/barcodes/barcode-39-asn-123.pdf
Normal file
Binary file not shown.
BIN
src/documents/tests/samples/barcodes/barcode-39-asn-123.png
Normal file
BIN
src/documents/tests/samples/barcodes/barcode-39-asn-123.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 12 KiB |
Binary file not shown.
Binary file not shown.
After Width: | Height: | Size: 21 KiB |
BIN
src/documents/tests/samples/barcodes/barcode-39-asn-invalid.pdf
Normal file
BIN
src/documents/tests/samples/barcodes/barcode-39-asn-invalid.pdf
Normal file
Binary file not shown.
BIN
src/documents/tests/samples/barcodes/barcode-39-asn-invalid.png
Normal file
BIN
src/documents/tests/samples/barcodes/barcode-39-asn-invalid.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 11 KiB |
BIN
src/documents/tests/samples/barcodes/many-qr-codes.pdf
Normal file
BIN
src/documents/tests/samples/barcodes/many-qr-codes.pdf
Normal file
Binary file not shown.
@@ -1,5 +1,3 @@
|
||||
from unittest import mock
|
||||
|
||||
from django.contrib.admin.sites import AdminSite
|
||||
from django.test import TestCase
|
||||
from django.utils import timezone
|
||||
|
@@ -7,6 +7,7 @@ import tempfile
|
||||
import urllib.request
|
||||
import uuid
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
@@ -33,7 +34,6 @@ from documents.models import SavedView
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
from documents.models import Comment
|
||||
from documents.models import StoragePath
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from paperless import version
|
||||
from rest_framework.test import APITestCase
|
||||
@@ -480,7 +480,7 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
|
||||
self.assertNotIn(result["id"], seen_ids)
|
||||
seen_ids.append(result["id"])
|
||||
|
||||
response = self.client.get(f"/api/documents/?query=content&page=6&page_size=10")
|
||||
response = self.client.get("/api/documents/?query=content&page=6&page_size=10")
|
||||
results = response.data["results"]
|
||||
self.assertEqual(response.data["count"], 55)
|
||||
self.assertEqual(len(results), 5)
|
||||
@@ -500,9 +500,9 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
|
||||
)
|
||||
index.update_document(writer, doc)
|
||||
|
||||
response = self.client.get(f"/api/documents/?query=content&page=0&page_size=10")
|
||||
response = self.client.get("/api/documents/?query=content&page=0&page_size=10")
|
||||
self.assertEqual(response.status_code, 404)
|
||||
response = self.client.get(f"/api/documents/?query=content&page=3&page_size=10")
|
||||
response = self.client.get("/api/documents/?query=content&page=3&page_size=10")
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
@mock.patch("documents.index.autocomplete")
|
||||
@@ -809,7 +809,9 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
|
||||
m.assert_called_once()
|
||||
|
||||
args, kwargs = m.call_args
|
||||
self.assertEqual(kwargs["override_filename"], "simple.pdf")
|
||||
file_path = Path(args[0])
|
||||
self.assertEqual(file_path.name, "simple.pdf")
|
||||
self.assertIn(Path(settings.SCRATCH_DIR), file_path.parents)
|
||||
self.assertIsNone(kwargs["override_title"])
|
||||
self.assertIsNone(kwargs["override_correspondent_id"])
|
||||
self.assertIsNone(kwargs["override_document_type_id"])
|
||||
@@ -834,7 +836,9 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
|
||||
m.assert_called_once()
|
||||
|
||||
args, kwargs = m.call_args
|
||||
self.assertEqual(kwargs["override_filename"], "simple.pdf")
|
||||
file_path = Path(args[0])
|
||||
self.assertEqual(file_path.name, "simple.pdf")
|
||||
self.assertIn(Path(settings.SCRATCH_DIR), file_path.parents)
|
||||
self.assertIsNone(kwargs["override_title"])
|
||||
self.assertIsNone(kwargs["override_correspondent_id"])
|
||||
self.assertIsNone(kwargs["override_document_type_id"])
|
||||
@@ -1080,7 +1084,7 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(meta["archive_size"], os.stat(archive_file).st_size)
|
||||
|
||||
def test_get_metadata_invalid_doc(self):
|
||||
response = self.client.get(f"/api/documents/34576/metadata/")
|
||||
response = self.client.get("/api/documents/34576/metadata/")
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_get_metadata_no_archive(self):
|
||||
@@ -1145,7 +1149,7 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
|
||||
)
|
||||
|
||||
def test_get_suggestions_invalid_doc(self):
|
||||
response = self.client.get(f"/api/documents/34676/suggestions/")
|
||||
response = self.client.get("/api/documents/34676/suggestions/")
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
@mock.patch("documents.views.match_storage_paths")
|
||||
|
@@ -9,6 +9,7 @@ from django.test import override_settings
|
||||
from django.test import TestCase
|
||||
from documents import barcodes
|
||||
from documents import tasks
|
||||
from documents.consumer import ConsumerError
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from PIL import Image
|
||||
|
||||
@@ -110,6 +111,58 @@ class TestBarcode(DirectoriesMixin, TestCase):
|
||||
img = Image.open(test_file)
|
||||
self.assertEqual(barcodes.barcode_reader(img), ["CUSTOM BARCODE"])
|
||||
|
||||
def test_barcode_reader_asn_normal(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Image containing standard ASNxxxxx barcode
|
||||
WHEN:
|
||||
- Image is scanned for barcodes
|
||||
THEN:
|
||||
- The barcode is located
|
||||
- The barcode value is correct
|
||||
"""
|
||||
test_file = os.path.join(
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"barcode-39-asn-123.png",
|
||||
)
|
||||
img = Image.open(test_file)
|
||||
self.assertEqual(barcodes.barcode_reader(img), ["ASN00123"])
|
||||
|
||||
def test_barcode_reader_asn_invalid(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Image containing invalid ASNxxxxx barcode
|
||||
- The number portion of the ASN is not a number
|
||||
WHEN:
|
||||
- Image is scanned for barcodes
|
||||
THEN:
|
||||
- The barcode is located
|
||||
- The barcode value is correct
|
||||
"""
|
||||
test_file = os.path.join(
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"barcode-39-asn-invalid.png",
|
||||
)
|
||||
img = Image.open(test_file)
|
||||
self.assertEqual(barcodes.barcode_reader(img), ["ASNXYZXYZ"])
|
||||
|
||||
def test_barcode_reader_asn_custom_prefix(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Image containing custom prefix barcode
|
||||
WHEN:
|
||||
- Image is scanned for barcodes
|
||||
THEN:
|
||||
- The barcode is located
|
||||
- The barcode value is correct
|
||||
"""
|
||||
test_file = os.path.join(
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"barcode-39-asn-custom-prefix.png",
|
||||
)
|
||||
img = Image.open(test_file)
|
||||
self.assertEqual(barcodes.barcode_reader(img), ["CUSTOM-PREFIX-00123"])
|
||||
|
||||
def test_get_mime_type(self):
|
||||
tiff_file = os.path.join(
|
||||
self.SAMPLE_DIR,
|
||||
@@ -167,20 +220,26 @@ class TestBarcode(DirectoriesMixin, TestCase):
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"patch-code-t.pdf",
|
||||
)
|
||||
pdf_file, separator_page_numbers = barcodes.scan_file_for_separating_barcodes(
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
separator_page_numbers = barcodes.get_separating_barcodes(
|
||||
doc_barcode_info.barcodes,
|
||||
)
|
||||
|
||||
self.assertEqual(pdf_file, test_file)
|
||||
self.assertEqual(doc_barcode_info.pdf_path, test_file)
|
||||
self.assertListEqual(separator_page_numbers, [0])
|
||||
|
||||
def test_scan_file_for_separating_barcodes_none_present(self):
|
||||
test_file = os.path.join(self.SAMPLE_DIR, "simple.pdf")
|
||||
pdf_file, separator_page_numbers = barcodes.scan_file_for_separating_barcodes(
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
separator_page_numbers = barcodes.get_separating_barcodes(
|
||||
doc_barcode_info.barcodes,
|
||||
)
|
||||
|
||||
self.assertEqual(pdf_file, test_file)
|
||||
self.assertEqual(doc_barcode_info.pdf_path, test_file)
|
||||
self.assertListEqual(separator_page_numbers, [])
|
||||
|
||||
def test_scan_file_for_separating_barcodes3(self):
|
||||
@@ -188,11 +247,14 @@ class TestBarcode(DirectoriesMixin, TestCase):
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"patch-code-t-middle.pdf",
|
||||
)
|
||||
pdf_file, separator_page_numbers = barcodes.scan_file_for_separating_barcodes(
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
separator_page_numbers = barcodes.get_separating_barcodes(
|
||||
doc_barcode_info.barcodes,
|
||||
)
|
||||
|
||||
self.assertEqual(pdf_file, test_file)
|
||||
self.assertEqual(doc_barcode_info.pdf_path, test_file)
|
||||
self.assertListEqual(separator_page_numbers, [1])
|
||||
|
||||
def test_scan_file_for_separating_barcodes4(self):
|
||||
@@ -200,11 +262,14 @@ class TestBarcode(DirectoriesMixin, TestCase):
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"several-patcht-codes.pdf",
|
||||
)
|
||||
pdf_file, separator_page_numbers = barcodes.scan_file_for_separating_barcodes(
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
separator_page_numbers = barcodes.get_separating_barcodes(
|
||||
doc_barcode_info.barcodes,
|
||||
)
|
||||
|
||||
self.assertEqual(pdf_file, test_file)
|
||||
self.assertEqual(doc_barcode_info.pdf_path, test_file)
|
||||
self.assertListEqual(separator_page_numbers, [2, 5])
|
||||
|
||||
def test_scan_file_for_separating_barcodes_upsidedown(self):
|
||||
@@ -212,14 +277,17 @@ class TestBarcode(DirectoriesMixin, TestCase):
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"patch-code-t-middle_reverse.pdf",
|
||||
)
|
||||
pdf_file, separator_page_numbers = barcodes.scan_file_for_separating_barcodes(
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
separator_page_numbers = barcodes.get_separating_barcodes(
|
||||
doc_barcode_info.barcodes,
|
||||
)
|
||||
|
||||
self.assertEqual(pdf_file, test_file)
|
||||
self.assertEqual(doc_barcode_info.pdf_path, test_file)
|
||||
self.assertListEqual(separator_page_numbers, [1])
|
||||
|
||||
def test_scan_file_for_separating_barcodes_pillow_transcode_error(self):
|
||||
def test_scan_file_for_barcodes_pillow_transcode_error(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- A PDF containing an image which cannot be transcoded to a PIL image
|
||||
@@ -273,7 +341,7 @@ class TestBarcode(DirectoriesMixin, TestCase):
|
||||
with mock.patch("documents.barcodes.barcode_reader") as reader:
|
||||
reader.return_value = list()
|
||||
|
||||
_, _ = barcodes.scan_file_for_separating_barcodes(
|
||||
_ = barcodes.scan_file_for_barcodes(
|
||||
str(device_n_pdf.name),
|
||||
)
|
||||
|
||||
@@ -292,11 +360,14 @@ class TestBarcode(DirectoriesMixin, TestCase):
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"barcode-fax-image.pdf",
|
||||
)
|
||||
pdf_file, separator_page_numbers = barcodes.scan_file_for_separating_barcodes(
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
separator_page_numbers = barcodes.get_separating_barcodes(
|
||||
doc_barcode_info.barcodes,
|
||||
)
|
||||
|
||||
self.assertEqual(pdf_file, test_file)
|
||||
self.assertEqual(doc_barcode_info.pdf_path, test_file)
|
||||
self.assertListEqual(separator_page_numbers, [1])
|
||||
|
||||
def test_scan_file_for_separating_qr_barcodes(self):
|
||||
@@ -304,11 +375,14 @@ class TestBarcode(DirectoriesMixin, TestCase):
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"patch-code-t-qr.pdf",
|
||||
)
|
||||
pdf_file, separator_page_numbers = barcodes.scan_file_for_separating_barcodes(
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
separator_page_numbers = barcodes.get_separating_barcodes(
|
||||
doc_barcode_info.barcodes,
|
||||
)
|
||||
|
||||
self.assertEqual(pdf_file, test_file)
|
||||
self.assertEqual(doc_barcode_info.pdf_path, test_file)
|
||||
self.assertListEqual(separator_page_numbers, [0])
|
||||
|
||||
@override_settings(CONSUMER_BARCODE_STRING="CUSTOM BARCODE")
|
||||
@@ -317,11 +391,14 @@ class TestBarcode(DirectoriesMixin, TestCase):
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"barcode-39-custom.pdf",
|
||||
)
|
||||
pdf_file, separator_page_numbers = barcodes.scan_file_for_separating_barcodes(
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
separator_page_numbers = barcodes.get_separating_barcodes(
|
||||
doc_barcode_info.barcodes,
|
||||
)
|
||||
|
||||
self.assertEqual(pdf_file, test_file)
|
||||
self.assertEqual(doc_barcode_info.pdf_path, test_file)
|
||||
self.assertListEqual(separator_page_numbers, [0])
|
||||
|
||||
@override_settings(CONSUMER_BARCODE_STRING="CUSTOM BARCODE")
|
||||
@@ -330,11 +407,14 @@ class TestBarcode(DirectoriesMixin, TestCase):
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"barcode-qr-custom.pdf",
|
||||
)
|
||||
pdf_file, separator_page_numbers = barcodes.scan_file_for_separating_barcodes(
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
separator_page_numbers = barcodes.get_separating_barcodes(
|
||||
doc_barcode_info.barcodes,
|
||||
)
|
||||
|
||||
self.assertEqual(pdf_file, test_file)
|
||||
self.assertEqual(doc_barcode_info.pdf_path, test_file)
|
||||
self.assertListEqual(separator_page_numbers, [0])
|
||||
|
||||
@override_settings(CONSUMER_BARCODE_STRING="CUSTOM BARCODE")
|
||||
@@ -343,11 +423,14 @@ class TestBarcode(DirectoriesMixin, TestCase):
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"barcode-128-custom.pdf",
|
||||
)
|
||||
pdf_file, separator_page_numbers = barcodes.scan_file_for_separating_barcodes(
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
separator_page_numbers = barcodes.get_separating_barcodes(
|
||||
doc_barcode_info.barcodes,
|
||||
)
|
||||
|
||||
self.assertEqual(pdf_file, test_file)
|
||||
self.assertEqual(doc_barcode_info.pdf_path, test_file)
|
||||
self.assertListEqual(separator_page_numbers, [0])
|
||||
|
||||
def test_scan_file_for_separating_wrong_qr_barcodes(self):
|
||||
@@ -355,13 +438,41 @@ class TestBarcode(DirectoriesMixin, TestCase):
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"barcode-39-custom.pdf",
|
||||
)
|
||||
pdf_file, separator_page_numbers = barcodes.scan_file_for_separating_barcodes(
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
separator_page_numbers = barcodes.get_separating_barcodes(
|
||||
doc_barcode_info.barcodes,
|
||||
)
|
||||
|
||||
self.assertEqual(pdf_file, test_file)
|
||||
self.assertEqual(doc_barcode_info.pdf_path, test_file)
|
||||
self.assertListEqual(separator_page_numbers, [])
|
||||
|
||||
@override_settings(CONSUMER_BARCODE_STRING="ADAR-NEXTDOC")
|
||||
def test_scan_file_for_separating_qr_barcodes(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Input PDF with certain QR codes that aren't detected at current size
|
||||
WHEN:
|
||||
- The input file is scanned for barcodes
|
||||
THEN:
|
||||
- QR codes are detected
|
||||
"""
|
||||
test_file = os.path.join(
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"many-qr-codes.pdf",
|
||||
)
|
||||
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
separator_page_numbers = barcodes.get_separating_barcodes(
|
||||
doc_barcode_info.barcodes,
|
||||
)
|
||||
|
||||
self.assertGreater(len(doc_barcode_info.barcodes), 0)
|
||||
self.assertListEqual(separator_page_numbers, [1])
|
||||
|
||||
def test_separate_pages(self):
|
||||
test_file = os.path.join(
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
@@ -401,7 +512,7 @@ class TestBarcode(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(
|
||||
cm.output,
|
||||
[
|
||||
f"WARNING:paperless.barcodes:No pages to split on!",
|
||||
"WARNING:paperless.barcodes:No pages to split on!",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -450,11 +561,14 @@ class TestBarcode(DirectoriesMixin, TestCase):
|
||||
)
|
||||
tempdir = tempfile.mkdtemp(prefix="paperless-", dir=settings.SCRATCH_DIR)
|
||||
|
||||
pdf_file, separator_page_numbers = barcodes.scan_file_for_separating_barcodes(
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
separator_page_numbers = barcodes.get_separating_barcodes(
|
||||
doc_barcode_info.barcodes,
|
||||
)
|
||||
|
||||
self.assertEqual(test_file, pdf_file)
|
||||
self.assertEqual(test_file, doc_barcode_info.pdf_path)
|
||||
self.assertTrue(len(separator_page_numbers) > 0)
|
||||
|
||||
document_list = barcodes.separate_pages(test_file, separator_page_numbers)
|
||||
@@ -559,12 +673,155 @@ class TestBarcode(DirectoriesMixin, TestCase):
|
||||
WHEN:
|
||||
- File is scanned for barcode
|
||||
THEN:
|
||||
- Scanning handle the exception without exception
|
||||
- Scanning handles the exception without exception
|
||||
"""
|
||||
test_file = os.path.join(self.SAMPLE_DIR, "password-is-test.pdf")
|
||||
pdf_file, separator_page_numbers = barcodes.scan_file_for_separating_barcodes(
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
separator_page_numbers = barcodes.get_separating_barcodes(
|
||||
doc_barcode_info.barcodes,
|
||||
)
|
||||
|
||||
self.assertEqual(doc_barcode_info.pdf_path, test_file)
|
||||
self.assertListEqual(separator_page_numbers, [])
|
||||
|
||||
def test_scan_file_for_asn_barcode(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- PDF containing an ASN barcode
|
||||
- The ASN value is 123
|
||||
WHEN:
|
||||
- File is scanned for barcodes
|
||||
THEN:
|
||||
- The ASN is located
|
||||
- The ASN integer value is correct
|
||||
"""
|
||||
test_file = os.path.join(
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"barcode-39-asn-123.pdf",
|
||||
)
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
asn = barcodes.get_asn_from_barcodes(doc_barcode_info.barcodes)
|
||||
|
||||
self.assertEqual(doc_barcode_info.pdf_path, test_file)
|
||||
self.assertEqual(asn, 123)
|
||||
|
||||
def test_scan_file_for_asn_not_existing(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- PDF without an ASN barcode
|
||||
WHEN:
|
||||
- File is scanned for barcodes
|
||||
THEN:
|
||||
- No ASN is retrieved from the document
|
||||
"""
|
||||
test_file = os.path.join(
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"patch-code-t.pdf",
|
||||
)
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
asn = barcodes.get_asn_from_barcodes(doc_barcode_info.barcodes)
|
||||
|
||||
self.assertEqual(doc_barcode_info.pdf_path, test_file)
|
||||
self.assertEqual(asn, None)
|
||||
|
||||
def test_scan_file_for_asn_barcode_invalid(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- PDF containing an ASN barcode
|
||||
- The ASN value is XYZXYZ
|
||||
WHEN:
|
||||
- File is scanned for barcodes
|
||||
THEN:
|
||||
- The ASN is located
|
||||
- The ASN value is not used
|
||||
"""
|
||||
test_file = os.path.join(
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"barcode-39-asn-invalid.pdf",
|
||||
)
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
|
||||
self.assertEqual(pdf_file, test_file)
|
||||
self.assertListEqual(separator_page_numbers, [])
|
||||
asn = barcodes.get_asn_from_barcodes(doc_barcode_info.barcodes)
|
||||
|
||||
self.assertEqual(doc_barcode_info.pdf_path, test_file)
|
||||
self.assertEqual(asn, None)
|
||||
|
||||
@override_settings(CONSUMER_ASN_BARCODE_PREFIX="CUSTOM-PREFIX-")
|
||||
def test_scan_file_for_asn_custom_prefix(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- PDF containing an ASN barcode with custom prefix
|
||||
- The ASN value is 123
|
||||
WHEN:
|
||||
- File is scanned for barcodes
|
||||
THEN:
|
||||
- The ASN is located
|
||||
- The ASN integer value is correct
|
||||
"""
|
||||
test_file = os.path.join(
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"barcode-39-asn-custom-prefix.pdf",
|
||||
)
|
||||
doc_barcode_info = barcodes.scan_file_for_barcodes(
|
||||
test_file,
|
||||
)
|
||||
asn = barcodes.get_asn_from_barcodes(doc_barcode_info.barcodes)
|
||||
|
||||
self.assertEqual(doc_barcode_info.pdf_path, test_file)
|
||||
self.assertEqual(asn, 123)
|
||||
|
||||
@override_settings(CONSUMER_ENABLE_ASN_BARCODE=True)
|
||||
def test_consume_barcode_file_asn_assignment(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- PDF containing an ASN barcode
|
||||
- The ASN value is 123
|
||||
WHEN:
|
||||
- File is scanned for barcodes
|
||||
THEN:
|
||||
- The ASN is located
|
||||
- The ASN integer value is correct
|
||||
- The ASN is provided as the override value to the consumer
|
||||
"""
|
||||
test_file = os.path.join(
|
||||
self.BARCODE_SAMPLE_DIR,
|
||||
"barcode-39-asn-123.pdf",
|
||||
)
|
||||
|
||||
dst = os.path.join(settings.SCRATCH_DIR, "barcode-39-asn-123.pdf")
|
||||
shutil.copy(test_file, dst)
|
||||
|
||||
with mock.patch("documents.consumer.Consumer.try_consume_file") as mocked_call:
|
||||
tasks.consume_file(dst)
|
||||
|
||||
args, kwargs = mocked_call.call_args
|
||||
|
||||
self.assertEqual(kwargs["override_asn"], 123)
|
||||
|
||||
@override_settings(CONSUMER_ENABLE_ASN_BARCODE=True)
|
||||
def test_asn_too_large(self):
|
||||
|
||||
src = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
"samples",
|
||||
"barcodes",
|
||||
"barcode-128-asn-too-large.pdf",
|
||||
)
|
||||
dst = os.path.join(self.dirs.scratch_dir, "barcode-128-asn-too-large.pdf")
|
||||
shutil.copy(src, dst)
|
||||
|
||||
with mock.patch("documents.consumer.Consumer._send_progress"):
|
||||
self.assertRaisesMessage(
|
||||
ConsumerError,
|
||||
"Given ASN 4294967296 is out of range [0, 4,294,967,295]",
|
||||
tasks.consume_file,
|
||||
dst,
|
||||
)
|
||||
|
@@ -1,5 +1,4 @@
|
||||
import textwrap
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
from django.core.checks import Error
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
@@ -27,6 +28,9 @@ def dummy_preprocess(content: str):
|
||||
|
||||
|
||||
class TestClassifier(DirectoriesMixin, TestCase):
|
||||
|
||||
SAMPLE_MODEL_FILE = os.path.join(os.path.dirname(__file__), "data", "model.pickle")
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.classifier = DocumentClassifier()
|
||||
@@ -213,13 +217,14 @@ class TestClassifier(DirectoriesMixin, TestCase):
|
||||
# self.classifier.train()
|
||||
# self.classifier.save()
|
||||
|
||||
@override_settings(
|
||||
MODEL_FILE=os.path.join(os.path.dirname(__file__), "data", "model.pickle"),
|
||||
)
|
||||
def test_load_and_classify(self):
|
||||
# Generate test data, train and save to the model file
|
||||
# This ensures the model file sklearn version matches
|
||||
# and eliminates a warning
|
||||
shutil.copy(
|
||||
self.SAMPLE_MODEL_FILE,
|
||||
os.path.join(self.dirs.data_dir, "classification_model.pickle"),
|
||||
)
|
||||
self.generate_test_data()
|
||||
self.classifier.train()
|
||||
self.classifier.save()
|
||||
@@ -230,9 +235,6 @@ class TestClassifier(DirectoriesMixin, TestCase):
|
||||
|
||||
self.assertCountEqual(new_classifier.predict_tags(self.doc2.content), [45, 12])
|
||||
|
||||
@override_settings(
|
||||
MODEL_FILE=os.path.join(os.path.dirname(__file__), "data", "model.pickle"),
|
||||
)
|
||||
@mock.patch("documents.classifier.pickle.load")
|
||||
def test_load_corrupt_file(self, patched_pickle_load):
|
||||
"""
|
||||
@@ -243,6 +245,10 @@ class TestClassifier(DirectoriesMixin, TestCase):
|
||||
THEN:
|
||||
- The ClassifierModelCorruptError is raised
|
||||
"""
|
||||
shutil.copy(
|
||||
self.SAMPLE_MODEL_FILE,
|
||||
os.path.join(self.dirs.data_dir, "classification_model.pickle"),
|
||||
)
|
||||
# First load is the schema version
|
||||
patched_pickle_load.side_effect = [DocumentClassifier.FORMAT_VERSION, OSError()]
|
||||
|
||||
|
@@ -4,7 +4,6 @@ import re
|
||||
import shutil
|
||||
import stat
|
||||
import tempfile
|
||||
from subprocess import CalledProcessError
|
||||
from unittest import mock
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
|
@@ -9,7 +9,6 @@ from django.test import override_settings
|
||||
from django.test import TestCase
|
||||
from documents.parsers import parse_date
|
||||
from documents.parsers import parse_date_generator
|
||||
from paperless.settings import DATE_ORDER
|
||||
|
||||
|
||||
class TestDate(TestCase):
|
||||
|
@@ -88,10 +88,10 @@ class TestArchiver(DirectoriesMixin, TestCase):
|
||||
mime_type="application/pdf",
|
||||
filename="document_01.pdf",
|
||||
)
|
||||
shutil.copy(sample_file, os.path.join(self.dirs.originals_dir, f"document.pdf"))
|
||||
shutil.copy(sample_file, os.path.join(self.dirs.originals_dir, "document.pdf"))
|
||||
shutil.copy(
|
||||
sample_file,
|
||||
os.path.join(self.dirs.originals_dir, f"document_01.pdf"),
|
||||
os.path.join(self.dirs.originals_dir, "document_01.pdf"),
|
||||
)
|
||||
|
||||
update_document_archive_file(doc2.pk)
|
||||
@@ -150,7 +150,7 @@ class TestDecryptDocuments(TestCase):
|
||||
"samples",
|
||||
"documents",
|
||||
"thumbnails",
|
||||
f"0000004.webp.gpg",
|
||||
"0000004.webp.gpg",
|
||||
),
|
||||
os.path.join(thumb_dir, f"{doc.id:07}.webp.gpg"),
|
||||
)
|
||||
|
@@ -8,6 +8,7 @@ from unittest import mock
|
||||
from zipfile import ZipFile
|
||||
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import CommandError
|
||||
from django.test import override_settings
|
||||
from django.test import TestCase
|
||||
from django.utils import timezone
|
||||
@@ -101,6 +102,10 @@ class TestExportImport(DirectoriesMixin, TestCase):
|
||||
use_filename_format=False,
|
||||
compare_checksums=False,
|
||||
delete=False,
|
||||
no_archive=False,
|
||||
no_thumbnail=False,
|
||||
split_manifest=False,
|
||||
use_folder_prefix=False,
|
||||
):
|
||||
args = ["document_exporter", self.target]
|
||||
if use_filename_format:
|
||||
@@ -109,6 +114,14 @@ class TestExportImport(DirectoriesMixin, TestCase):
|
||||
args += ["--compare-checksums"]
|
||||
if delete:
|
||||
args += ["--delete"]
|
||||
if no_archive:
|
||||
args += ["--no-archive"]
|
||||
if no_thumbnail:
|
||||
args += ["--no-thumbnail"]
|
||||
if split_manifest:
|
||||
args += ["--split-manifest"]
|
||||
if use_folder_prefix:
|
||||
args += ["--use-folder-prefix"]
|
||||
|
||||
call_command(*args)
|
||||
|
||||
@@ -438,3 +451,198 @@ class TestExportImport(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(len(zip.namelist()), 14)
|
||||
self.assertIn("manifest.json", zip.namelist())
|
||||
self.assertIn("version.json", zip.namelist())
|
||||
|
||||
def test_export_target_not_exists(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Request to export documents to directory that doesn't exist
|
||||
WHEN:
|
||||
- Export command is called
|
||||
THEN:
|
||||
- Error is raised
|
||||
"""
|
||||
args = ["document_exporter", "/tmp/foo/bar"]
|
||||
|
||||
with self.assertRaises(CommandError) as e:
|
||||
|
||||
call_command(*args)
|
||||
|
||||
self.assertEqual("That path isn't a directory", str(e))
|
||||
|
||||
def test_export_target_exists_but_is_file(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Request to export documents to file instead of directory
|
||||
WHEN:
|
||||
- Export command is called
|
||||
THEN:
|
||||
- Error is raised
|
||||
"""
|
||||
|
||||
with tempfile.NamedTemporaryFile() as tmp_file:
|
||||
|
||||
args = ["document_exporter", tmp_file.name]
|
||||
|
||||
with self.assertRaises(CommandError) as e:
|
||||
|
||||
call_command(*args)
|
||||
|
||||
self.assertEqual("That path isn't a directory", str(e))
|
||||
|
||||
def test_export_target_not_writable(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Request to export documents to directory that's not writeable
|
||||
WHEN:
|
||||
- Export command is called
|
||||
THEN:
|
||||
- Error is raised
|
||||
"""
|
||||
with tempfile.TemporaryDirectory() as tmp_dir:
|
||||
|
||||
os.chmod(tmp_dir, 0o000)
|
||||
|
||||
args = ["document_exporter", tmp_dir]
|
||||
|
||||
with self.assertRaises(CommandError) as e:
|
||||
|
||||
call_command(*args)
|
||||
|
||||
self.assertEqual("That path doesn't appear to be writable", str(e))
|
||||
|
||||
def test_no_archive(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Request to export documents to directory
|
||||
WHEN:
|
||||
- Option no-archive is used
|
||||
THEN:
|
||||
- Manifest.json doesn't contain information about archive files
|
||||
- Documents can be imported again
|
||||
"""
|
||||
shutil.rmtree(os.path.join(self.dirs.media_dir, "documents"))
|
||||
shutil.copytree(
|
||||
os.path.join(os.path.dirname(__file__), "samples", "documents"),
|
||||
os.path.join(self.dirs.media_dir, "documents"),
|
||||
)
|
||||
|
||||
manifest = self._do_export()
|
||||
has_archive = False
|
||||
for element in manifest:
|
||||
if element["model"] == "documents.document":
|
||||
has_archive = (
|
||||
has_archive or document_exporter.EXPORTER_ARCHIVE_NAME in element
|
||||
)
|
||||
self.assertTrue(has_archive)
|
||||
|
||||
has_archive = False
|
||||
manifest = self._do_export(no_archive=True)
|
||||
for element in manifest:
|
||||
if element["model"] == "documents.document":
|
||||
has_archive = (
|
||||
has_archive or document_exporter.EXPORTER_ARCHIVE_NAME in element
|
||||
)
|
||||
self.assertFalse(has_archive)
|
||||
|
||||
with paperless_environment() as dirs:
|
||||
self.assertEqual(Document.objects.count(), 4)
|
||||
Document.objects.all().delete()
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
call_command("document_importer", self.target)
|
||||
self.assertEqual(Document.objects.count(), 4)
|
||||
|
||||
def test_no_thumbnail(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Request to export documents to directory
|
||||
WHEN:
|
||||
- Option no-thumbnails is used
|
||||
THEN:
|
||||
- Manifest.json doesn't contain information about thumbnails
|
||||
- Documents can be imported again
|
||||
"""
|
||||
shutil.rmtree(os.path.join(self.dirs.media_dir, "documents"))
|
||||
shutil.copytree(
|
||||
os.path.join(os.path.dirname(__file__), "samples", "documents"),
|
||||
os.path.join(self.dirs.media_dir, "documents"),
|
||||
)
|
||||
|
||||
manifest = self._do_export()
|
||||
has_thumbnail = False
|
||||
for element in manifest:
|
||||
if element["model"] == "documents.document":
|
||||
has_thumbnail = (
|
||||
has_thumbnail
|
||||
or document_exporter.EXPORTER_THUMBNAIL_NAME in element
|
||||
)
|
||||
self.assertTrue(has_thumbnail)
|
||||
|
||||
has_thumbnail = False
|
||||
manifest = self._do_export(no_thumbnail=True)
|
||||
for element in manifest:
|
||||
if element["model"] == "documents.document":
|
||||
has_thumbnail = (
|
||||
has_thumbnail
|
||||
or document_exporter.EXPORTER_THUMBNAIL_NAME in element
|
||||
)
|
||||
self.assertFalse(has_thumbnail)
|
||||
|
||||
with paperless_environment() as dirs:
|
||||
self.assertEqual(Document.objects.count(), 4)
|
||||
Document.objects.all().delete()
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
call_command("document_importer", self.target)
|
||||
self.assertEqual(Document.objects.count(), 4)
|
||||
|
||||
def test_split_manifest(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Request to export documents to directory
|
||||
WHEN:
|
||||
- Option split_manifest is used
|
||||
THEN:
|
||||
- Main manifest.json file doesn't contain information about documents
|
||||
- Documents can be imported again
|
||||
"""
|
||||
shutil.rmtree(os.path.join(self.dirs.media_dir, "documents"))
|
||||
shutil.copytree(
|
||||
os.path.join(os.path.dirname(__file__), "samples", "documents"),
|
||||
os.path.join(self.dirs.media_dir, "documents"),
|
||||
)
|
||||
|
||||
manifest = self._do_export(split_manifest=True)
|
||||
has_document = False
|
||||
for element in manifest:
|
||||
has_document = has_document or element["model"] == "documents.document"
|
||||
self.assertFalse(has_document)
|
||||
|
||||
with paperless_environment() as dirs:
|
||||
self.assertEqual(Document.objects.count(), 4)
|
||||
Document.objects.all().delete()
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
call_command("document_importer", self.target)
|
||||
self.assertEqual(Document.objects.count(), 4)
|
||||
|
||||
def test_folder_prefix(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Request to export documents to directory
|
||||
WHEN:
|
||||
- Option use_folder_prefix is used
|
||||
THEN:
|
||||
- Documents can be imported again
|
||||
"""
|
||||
shutil.rmtree(os.path.join(self.dirs.media_dir, "documents"))
|
||||
shutil.copytree(
|
||||
os.path.join(os.path.dirname(__file__), "samples", "documents"),
|
||||
os.path.join(self.dirs.media_dir, "documents"),
|
||||
)
|
||||
|
||||
manifest = self._do_export(use_folder_prefix=True)
|
||||
|
||||
with paperless_environment() as dirs:
|
||||
self.assertEqual(Document.objects.count(), 4)
|
||||
Document.objects.all().delete()
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
call_command("document_importer", self.target)
|
||||
self.assertEqual(Document.objects.count(), 4)
|
||||
|
@@ -5,10 +5,7 @@ from unittest import mock
|
||||
from django.core.management import call_command
|
||||
from django.test import TestCase
|
||||
from documents.management.commands.document_thumbnails import _process_document
|
||||
from documents.models import Correspondent
|
||||
from documents.models import Document
|
||||
from documents.models import DocumentType
|
||||
from documents.models import Tag
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
|
||||
|
@@ -7,7 +7,6 @@ from typing import Union
|
||||
from unittest import mock
|
||||
|
||||
from django.test import override_settings
|
||||
from documents.tests.test_migration_archive_files import thumbnail_path
|
||||
from documents.tests.utils import TestMigrations
|
||||
|
||||
|
||||
|
@@ -1,14 +1,8 @@
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from tempfile import TemporaryDirectory
|
||||
from unittest import mock
|
||||
|
||||
from django.test import override_settings
|
||||
from django.test import TestCase
|
||||
from documents.parsers import DocumentParser
|
||||
from documents.parsers import get_default_file_extension
|
||||
from documents.parsers import get_parser_class
|
||||
from documents.parsers import get_parser_class_for_mime_type
|
||||
from documents.parsers import get_supported_file_extensions
|
||||
from documents.parsers import is_file_ext_supported
|
||||
@@ -16,21 +10,18 @@ from paperless_tesseract.parsers import RasterisedDocumentParser
|
||||
from paperless_text.parsers import TextDocumentParser
|
||||
|
||||
|
||||
def fake_magic_from_file(file, mime=False):
|
||||
|
||||
if mime:
|
||||
if os.path.splitext(file)[1] == ".pdf":
|
||||
return "application/pdf"
|
||||
else:
|
||||
return "unknown"
|
||||
else:
|
||||
return "A verbose string that describes the contents of the file"
|
||||
|
||||
|
||||
@mock.patch("documents.parsers.magic.from_file", fake_magic_from_file)
|
||||
class TestParserDiscovery(TestCase):
|
||||
@mock.patch("documents.parsers.document_consumer_declaration.send")
|
||||
def test__get_parser_class_1_parser(self, m, *args):
|
||||
def test_get_parser_class_1_parser(self, m, *args):
|
||||
"""
|
||||
GIVEN:
|
||||
- Parser declared for a given mimetype
|
||||
WHEN:
|
||||
- Attempt to get parser for the mimetype
|
||||
THEN:
|
||||
- Declared parser class is returned
|
||||
"""
|
||||
|
||||
class DummyParser:
|
||||
pass
|
||||
|
||||
@@ -45,10 +36,20 @@ class TestParserDiscovery(TestCase):
|
||||
),
|
||||
)
|
||||
|
||||
self.assertEqual(get_parser_class("doc.pdf"), DummyParser)
|
||||
self.assertEqual(get_parser_class_for_mime_type("application/pdf"), DummyParser)
|
||||
|
||||
@mock.patch("documents.parsers.document_consumer_declaration.send")
|
||||
def test__get_parser_class_n_parsers(self, m, *args):
|
||||
def test_get_parser_class_n_parsers(self, m, *args):
|
||||
"""
|
||||
GIVEN:
|
||||
- Two parsers declared for a given mimetype
|
||||
- Second parser has a higher weight
|
||||
WHEN:
|
||||
- Attempt to get parser for the mimetype
|
||||
THEN:
|
||||
- Second parser class is returned
|
||||
"""
|
||||
|
||||
class DummyParser1:
|
||||
pass
|
||||
|
||||
@@ -74,30 +75,77 @@ class TestParserDiscovery(TestCase):
|
||||
),
|
||||
)
|
||||
|
||||
self.assertEqual(get_parser_class("doc.pdf"), DummyParser2)
|
||||
self.assertEqual(
|
||||
get_parser_class_for_mime_type("application/pdf"),
|
||||
DummyParser2,
|
||||
)
|
||||
|
||||
@mock.patch("documents.parsers.document_consumer_declaration.send")
|
||||
def test__get_parser_class_0_parsers(self, m, *args):
|
||||
def test_get_parser_class_0_parsers(self, m, *args):
|
||||
"""
|
||||
GIVEN:
|
||||
- No parsers are declared
|
||||
WHEN:
|
||||
- Attempt to get parser for the mimetype
|
||||
THEN:
|
||||
- No parser class is returned
|
||||
"""
|
||||
m.return_value = []
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
self.assertIsNone(get_parser_class("doc.pdf"))
|
||||
self.assertIsNone(get_parser_class_for_mime_type("application/pdf"))
|
||||
|
||||
@mock.patch("documents.parsers.document_consumer_declaration.send")
|
||||
def test_get_parser_class_no_valid_parser(self, m, *args):
|
||||
"""
|
||||
GIVEN:
|
||||
- No parser declared for a given mimetype
|
||||
- Parser declared for a different mimetype
|
||||
WHEN:
|
||||
- Attempt to get parser for the given mimetype
|
||||
THEN:
|
||||
- No parser class is returned
|
||||
"""
|
||||
|
||||
def fake_get_thumbnail(self, path, mimetype, file_name):
|
||||
return os.path.join(os.path.dirname(__file__), "examples", "no-text.png")
|
||||
class DummyParser:
|
||||
pass
|
||||
|
||||
m.return_value = (
|
||||
(
|
||||
None,
|
||||
{
|
||||
"weight": 0,
|
||||
"parser": DummyParser,
|
||||
"mime_types": {"application/pdf": ".pdf"},
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
self.assertIsNone(get_parser_class_for_mime_type("image/tiff"))
|
||||
|
||||
|
||||
class TestParserAvailability(TestCase):
|
||||
def test_file_extensions(self):
|
||||
|
||||
for ext in [".pdf", ".jpe", ".jpg", ".jpeg", ".txt", ".csv"]:
|
||||
self.assertIn(ext, get_supported_file_extensions())
|
||||
self.assertEqual(get_default_file_extension("application/pdf"), ".pdf")
|
||||
self.assertEqual(get_default_file_extension("image/png"), ".png")
|
||||
self.assertEqual(get_default_file_extension("image/jpeg"), ".jpg")
|
||||
self.assertEqual(get_default_file_extension("text/plain"), ".txt")
|
||||
self.assertEqual(get_default_file_extension("text/csv"), ".csv")
|
||||
supported_mimes_and_exts = [
|
||||
("application/pdf", ".pdf"),
|
||||
("image/png", ".png"),
|
||||
("image/jpeg", ".jpg"),
|
||||
("image/tiff", ".tif"),
|
||||
("image/webp", ".webp"),
|
||||
("text/plain", ".txt"),
|
||||
("text/csv", ".csv"),
|
||||
]
|
||||
|
||||
supported_exts = get_supported_file_extensions()
|
||||
|
||||
for mime_type, ext in supported_mimes_and_exts:
|
||||
self.assertIn(ext, supported_exts)
|
||||
self.assertEqual(get_default_file_extension(mime_type), ext)
|
||||
|
||||
# Test no parser declared still returns a an extension
|
||||
self.assertEqual(get_default_file_extension("application/zip"), ".zip")
|
||||
|
||||
# Test invalid mimetype returns no extension
|
||||
self.assertEqual(get_default_file_extension("aasdasd/dgfgf"), "")
|
||||
|
||||
self.assertIsInstance(
|
||||
@@ -108,7 +156,7 @@ class TestParserAvailability(TestCase):
|
||||
get_parser_class_for_mime_type("text/plain")(logging_group=None),
|
||||
TextDocumentParser,
|
||||
)
|
||||
self.assertEqual(get_parser_class_for_mime_type("text/sdgsdf"), None)
|
||||
self.assertIsNone(get_parser_class_for_mime_type("text/sdgsdf"))
|
||||
|
||||
self.assertTrue(is_file_ext_supported(".pdf"))
|
||||
self.assertFalse(is_file_ext_supported(".hsdfh"))
|
||||
|
@@ -7,10 +7,12 @@ import urllib
|
||||
import uuid
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from time import mktime
|
||||
from unicodedata import normalize
|
||||
from urllib.parse import quote
|
||||
|
||||
import pathvalidate
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.db.models import Case
|
||||
@@ -29,6 +31,7 @@ from django.views.decorators.cache import cache_control
|
||||
from django.views.generic import TemplateView
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from documents.tasks import consume_file
|
||||
from langdetect import detect
|
||||
from packaging import version as packaging_version
|
||||
from paperless import version
|
||||
from paperless.db import GnuPG
|
||||
@@ -173,7 +176,7 @@ class TagViewSet(ModelViewSet):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
filter_backends = (DjangoFilterBackend, OrderingFilter)
|
||||
filterset_class = TagFilterSet
|
||||
ordering_fields = ("name", "matching_algorithm", "match", "document_count")
|
||||
ordering_fields = ("color", "name", "matching_algorithm", "match", "document_count")
|
||||
|
||||
|
||||
class DocumentTypeViewSet(ModelViewSet):
|
||||
@@ -325,6 +328,13 @@ class DocumentViewSet(
|
||||
"original_filename": doc.original_filename,
|
||||
}
|
||||
|
||||
lang = "en"
|
||||
try:
|
||||
lang = detect(doc.content)
|
||||
except Exception:
|
||||
pass
|
||||
meta["lang"] = lang
|
||||
|
||||
if doc.has_archive_version:
|
||||
meta["archive_size"] = self.get_filesize(doc.archive_path)
|
||||
meta["archive_metadata"] = self.get_metadata(
|
||||
@@ -458,10 +468,19 @@ class DocumentViewSet(
|
||||
class SearchResultSerializer(DocumentSerializer):
|
||||
def to_representation(self, instance):
|
||||
doc = Document.objects.get(id=instance["id"])
|
||||
commentTerm = instance.results.q.subqueries[0]
|
||||
comments = ",".join(
|
||||
[
|
||||
str(c.comment)
|
||||
for c in Comment.objects.filter(document=instance["id"])
|
||||
if commentTerm.text in c.comment
|
||||
],
|
||||
)
|
||||
r = super().to_representation(doc)
|
||||
r["__search_hit__"] = {
|
||||
"score": instance.score,
|
||||
"highlights": instance.highlights("content", text=doc.content)
|
||||
"highlights": instance.highlights("content", text=doc.content),
|
||||
"comment_highlights": instance.highlights("content", text=comments)
|
||||
if doc
|
||||
else None,
|
||||
"rank": instance.rank,
|
||||
@@ -606,20 +625,19 @@ class PostDocumentView(GenericAPIView):
|
||||
|
||||
os.makedirs(settings.SCRATCH_DIR, exist_ok=True)
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
prefix="paperless-upload-",
|
||||
dir=settings.SCRATCH_DIR,
|
||||
delete=False,
|
||||
) as f:
|
||||
f.write(doc_data)
|
||||
os.utime(f.name, times=(t, t))
|
||||
temp_filename = f.name
|
||||
temp_file_path = Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR)) / Path(
|
||||
pathvalidate.sanitize_filename(doc_name),
|
||||
)
|
||||
|
||||
temp_file_path.write_bytes(doc_data)
|
||||
|
||||
os.utime(temp_file_path, times=(t, t))
|
||||
|
||||
task_id = str(uuid.uuid4())
|
||||
|
||||
async_task = consume_file.delay(
|
||||
temp_filename,
|
||||
override_filename=doc_name,
|
||||
# Paths are not JSON friendly
|
||||
str(temp_file_path),
|
||||
override_title=title,
|
||||
override_correspondent_id=correspondent_id,
|
||||
override_document_type_id=document_type_id,
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,878 +0,0 @@
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: paperless-ngx\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2022-11-09 21:50+0000\n"
|
||||
"PO-Revision-Date: 2022-12-09 07:39\n"
|
||||
"Last-Translator: \n"
|
||||
"Language-Team: Arabic\n"
|
||||
"Language: ar_SA\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Plural-Forms: nplurals=6; plural=(n==0 ? 0 : n==1 ? 1 : n==2 ? 2 : n%100>=3 && n%100<=10 ? 3 : n%100>=11 && n%100<=99 ? 4 : 5);\n"
|
||||
"X-Crowdin-Project: paperless-ngx\n"
|
||||
"X-Crowdin-Project-ID: 500308\n"
|
||||
"X-Crowdin-Language: ar\n"
|
||||
"X-Crowdin-File: /dev/src/locale/en_US/LC_MESSAGES/django.po\n"
|
||||
"X-Crowdin-File-ID: 14\n"
|
||||
|
||||
#: documents/apps.py:9
|
||||
msgid "Documents"
|
||||
msgstr "المستندات"
|
||||
|
||||
#: documents/models.py:32
|
||||
msgid "Any word"
|
||||
msgstr "أي كلمة"
|
||||
|
||||
#: documents/models.py:33
|
||||
msgid "All words"
|
||||
msgstr "كل الكلمات"
|
||||
|
||||
#: documents/models.py:34
|
||||
msgid "Exact match"
|
||||
msgstr "تطابق تام"
|
||||
|
||||
#: documents/models.py:35
|
||||
msgid "Regular expression"
|
||||
msgstr "التعابير النظامية"
|
||||
|
||||
#: documents/models.py:36
|
||||
msgid "Fuzzy word"
|
||||
msgstr "كلمة غامضة"
|
||||
|
||||
#: documents/models.py:37
|
||||
msgid "Automatic"
|
||||
msgstr "تلقائي"
|
||||
|
||||
#: documents/models.py:40 documents/models.py:367 paperless_mail/models.py:16
|
||||
#: paperless_mail/models.py:80
|
||||
msgid "name"
|
||||
msgstr "اسم"
|
||||
|
||||
#: documents/models.py:42
|
||||
msgid "match"
|
||||
msgstr "تطابق"
|
||||
|
||||
#: documents/models.py:45
|
||||
msgid "matching algorithm"
|
||||
msgstr "خوارزمية مطابقة"
|
||||
|
||||
#: documents/models.py:50
|
||||
msgid "is insensitive"
|
||||
msgstr "غير حساس"
|
||||
|
||||
#: documents/models.py:63 documents/models.py:118
|
||||
msgid "correspondent"
|
||||
msgstr "مراسل"
|
||||
|
||||
#: documents/models.py:64
|
||||
msgid "correspondents"
|
||||
msgstr "مراسلون"
|
||||
|
||||
#: documents/models.py:69
|
||||
msgid "color"
|
||||
msgstr "لون"
|
||||
|
||||
#: documents/models.py:72
|
||||
msgid "is inbox tag"
|
||||
msgstr "علامة علبة الوارد"
|
||||
|
||||
#: documents/models.py:75
|
||||
msgid "Marks this tag as an inbox tag: All newly consumed documents will be tagged with inbox tags."
|
||||
msgstr "ضع علامة على هذه السمة كعلامة علبة الوارد: سيتم وضع علامة على جميع المستندات المستهلكة حديثا مع علامات صندوق الواردات."
|
||||
|
||||
#: documents/models.py:81
|
||||
msgid "tag"
|
||||
msgstr "علامة"
|
||||
|
||||
#: documents/models.py:82 documents/models.py:156
|
||||
msgid "tags"
|
||||
msgstr "علامات"
|
||||
|
||||
#: documents/models.py:87 documents/models.py:138
|
||||
msgid "document type"
|
||||
msgstr "نوع المستند"
|
||||
|
||||
#: documents/models.py:88
|
||||
msgid "document types"
|
||||
msgstr "أنواع المستندات"
|
||||
|
||||
#: documents/models.py:93
|
||||
msgid "path"
|
||||
msgstr "مسار"
|
||||
|
||||
#: documents/models.py:99 documents/models.py:127
|
||||
msgid "storage path"
|
||||
msgstr "مسار التخزين"
|
||||
|
||||
#: documents/models.py:100
|
||||
msgid "storage paths"
|
||||
msgstr "مسارات التخزين"
|
||||
|
||||
#: documents/models.py:108
|
||||
msgid "Unencrypted"
|
||||
msgstr "دون تشفير"
|
||||
|
||||
#: documents/models.py:109
|
||||
msgid "Encrypted with GNU Privacy Guard"
|
||||
msgstr "مشفر باستخدام حارس خصوصية غنو"
|
||||
|
||||
#: documents/models.py:130
|
||||
msgid "title"
|
||||
msgstr "عنوان"
|
||||
|
||||
#: documents/models.py:142 documents/models.py:611
|
||||
msgid "content"
|
||||
msgstr "محتوى"
|
||||
|
||||
#: documents/models.py:145
|
||||
msgid "The raw, text-only data of the document. This field is primarily used for searching."
|
||||
msgstr "الخام, فقط النص من المستند. يستخدم هذا الحقل أساسا للبحث."
|
||||
|
||||
#: documents/models.py:150
|
||||
msgid "mime type"
|
||||
msgstr "MIME type"
|
||||
|
||||
#: documents/models.py:160
|
||||
msgid "checksum"
|
||||
msgstr "بصمة الملف"
|
||||
|
||||
#: documents/models.py:164
|
||||
msgid "The checksum of the original document."
|
||||
msgstr "بصمة الملف للمستند الأصلي."
|
||||
|
||||
#: documents/models.py:168
|
||||
msgid "archive checksum"
|
||||
msgstr "بصمة الملف للربيدة"
|
||||
|
||||
#: documents/models.py:173
|
||||
msgid "The checksum of the archived document."
|
||||
msgstr "بصمة الملف للمستند الربيدة."
|
||||
|
||||
#: documents/models.py:176 documents/models.py:348 documents/models.py:617
|
||||
msgid "created"
|
||||
msgstr "أُنشئ"
|
||||
|
||||
#: documents/models.py:179
|
||||
msgid "modified"
|
||||
msgstr "مُعدّل"
|
||||
|
||||
#: documents/models.py:186
|
||||
msgid "storage type"
|
||||
msgstr "نوع التخزين"
|
||||
|
||||
#: documents/models.py:194
|
||||
msgid "added"
|
||||
msgstr "أضيف"
|
||||
|
||||
#: documents/models.py:201
|
||||
msgid "filename"
|
||||
msgstr "اسم الملف"
|
||||
|
||||
#: documents/models.py:207
|
||||
msgid "Current filename in storage"
|
||||
msgstr "اسم الملف الحالي في التخزين"
|
||||
|
||||
#: documents/models.py:211
|
||||
msgid "archive filename"
|
||||
msgstr "اسم الربيدة"
|
||||
|
||||
#: documents/models.py:217
|
||||
msgid "Current archive filename in storage"
|
||||
msgstr "اسم ملف الربيدة الحالي في التخزين"
|
||||
|
||||
#: documents/models.py:221
|
||||
msgid "original filename"
|
||||
msgstr "اسم الملف الأصلي"
|
||||
|
||||
#: documents/models.py:227
|
||||
msgid "The original name of the file when it was uploaded"
|
||||
msgstr "اسم الملف الأصلي عند تحميله"
|
||||
|
||||
#: documents/models.py:231
|
||||
msgid "archive serial number"
|
||||
msgstr "الرقم التسلسلي للربيدة"
|
||||
|
||||
#: documents/models.py:237
|
||||
msgid "The position of this document in your physical document archive."
|
||||
msgstr "موقع هذا المستند في ربيدة المستند الفيزيائي."
|
||||
|
||||
#: documents/models.py:243 documents/models.py:628
|
||||
msgid "document"
|
||||
msgstr "مستند"
|
||||
|
||||
#: documents/models.py:244
|
||||
msgid "documents"
|
||||
msgstr "المستندات"
|
||||
|
||||
#: documents/models.py:331
|
||||
msgid "debug"
|
||||
msgstr "تصحيح الأخطاء"
|
||||
|
||||
#: documents/models.py:332
|
||||
msgid "information"
|
||||
msgstr "معلومات"
|
||||
|
||||
#: documents/models.py:333
|
||||
msgid "warning"
|
||||
msgstr "تحذير"
|
||||
|
||||
#: documents/models.py:334
|
||||
msgid "error"
|
||||
msgstr "خطأ"
|
||||
|
||||
#: documents/models.py:335
|
||||
msgid "critical"
|
||||
msgstr "الحرجة"
|
||||
|
||||
#: documents/models.py:338
|
||||
msgid "group"
|
||||
msgstr "مجموعة"
|
||||
|
||||
#: documents/models.py:340
|
||||
msgid "message"
|
||||
msgstr "رسالة"
|
||||
|
||||
#: documents/models.py:343
|
||||
msgid "level"
|
||||
msgstr "المستوى"
|
||||
|
||||
#: documents/models.py:352
|
||||
msgid "log"
|
||||
msgstr "سجل"
|
||||
|
||||
#: documents/models.py:353
|
||||
msgid "logs"
|
||||
msgstr "السجلات"
|
||||
|
||||
#: documents/models.py:363 documents/models.py:419
|
||||
msgid "saved view"
|
||||
msgstr "العرض المحفوظ"
|
||||
|
||||
#: documents/models.py:364
|
||||
msgid "saved views"
|
||||
msgstr "العروض المحفوظة"
|
||||
|
||||
#: documents/models.py:366 documents/models.py:637
|
||||
msgid "user"
|
||||
msgstr "المستخدم"
|
||||
|
||||
#: documents/models.py:370
|
||||
msgid "show on dashboard"
|
||||
msgstr "عرض على لوحة التحكم"
|
||||
|
||||
#: documents/models.py:373
|
||||
msgid "show in sidebar"
|
||||
msgstr "عرض على الشريط الجانبي"
|
||||
|
||||
#: documents/models.py:377
|
||||
msgid "sort field"
|
||||
msgstr "فرز الحقل"
|
||||
|
||||
#: documents/models.py:382
|
||||
msgid "sort reverse"
|
||||
msgstr "فرز بالعكس"
|
||||
|
||||
#: documents/models.py:387
|
||||
msgid "title contains"
|
||||
msgstr "العنوان يحتوي"
|
||||
|
||||
#: documents/models.py:388
|
||||
msgid "content contains"
|
||||
msgstr "المحتوى يحتوي"
|
||||
|
||||
#: documents/models.py:389
|
||||
msgid "ASN is"
|
||||
msgstr "ASN هو"
|
||||
|
||||
#: documents/models.py:390
|
||||
msgid "correspondent is"
|
||||
msgstr "المراسل هو"
|
||||
|
||||
#: documents/models.py:391
|
||||
msgid "document type is"
|
||||
msgstr "نوع المستند"
|
||||
|
||||
#: documents/models.py:392
|
||||
msgid "is in inbox"
|
||||
msgstr "موجود في علبة الوارد"
|
||||
|
||||
#: documents/models.py:393
|
||||
msgid "has tag"
|
||||
msgstr "لديه علامة"
|
||||
|
||||
#: documents/models.py:394
|
||||
msgid "has any tag"
|
||||
msgstr "لديه أي وسم"
|
||||
|
||||
#: documents/models.py:395
|
||||
msgid "created before"
|
||||
msgstr "أنشئت قبل"
|
||||
|
||||
#: documents/models.py:396
|
||||
msgid "created after"
|
||||
msgstr "أنشئت بعد"
|
||||
|
||||
#: documents/models.py:397
|
||||
msgid "created year is"
|
||||
msgstr "أنشئت سنة"
|
||||
|
||||
#: documents/models.py:398
|
||||
msgid "created month is"
|
||||
msgstr "أنشئت شهر"
|
||||
|
||||
#: documents/models.py:399
|
||||
msgid "created day is"
|
||||
msgstr "أنشئت يوم"
|
||||
|
||||
#: documents/models.py:400
|
||||
msgid "added before"
|
||||
msgstr "أضيف قبل"
|
||||
|
||||
#: documents/models.py:401
|
||||
msgid "added after"
|
||||
msgstr "أضيف بعد"
|
||||
|
||||
#: documents/models.py:402
|
||||
msgid "modified before"
|
||||
msgstr "عُدِّل قبل"
|
||||
|
||||
#: documents/models.py:403
|
||||
msgid "modified after"
|
||||
msgstr "عُدِّل بعد"
|
||||
|
||||
#: documents/models.py:404
|
||||
msgid "does not have tag"
|
||||
msgstr "ليس لديه علامة"
|
||||
|
||||
#: documents/models.py:405
|
||||
msgid "does not have ASN"
|
||||
msgstr "ليس لديه ASN"
|
||||
|
||||
#: documents/models.py:406
|
||||
msgid "title or content contains"
|
||||
msgstr "العنوان أو المحتوى يحتوي"
|
||||
|
||||
#: documents/models.py:407
|
||||
msgid "fulltext query"
|
||||
msgstr "استعلام كامل النص"
|
||||
|
||||
#: documents/models.py:408
|
||||
msgid "more like this"
|
||||
msgstr "أخرى مثلها"
|
||||
|
||||
#: documents/models.py:409
|
||||
msgid "has tags in"
|
||||
msgstr "لديه علامات في"
|
||||
|
||||
#: documents/models.py:410
|
||||
msgid "ASN greater than"
|
||||
msgstr "ASN أكبر من"
|
||||
|
||||
#: documents/models.py:411
|
||||
msgid "ASN less than"
|
||||
msgstr "ASN أقل من"
|
||||
|
||||
#: documents/models.py:412
|
||||
msgid "storage path is"
|
||||
msgstr "مسار التخزين"
|
||||
|
||||
#: documents/models.py:422
|
||||
msgid "rule type"
|
||||
msgstr "نوع القاعدة"
|
||||
|
||||
#: documents/models.py:424
|
||||
msgid "value"
|
||||
msgstr "قيمة"
|
||||
|
||||
#: documents/models.py:427
|
||||
msgid "filter rule"
|
||||
msgstr "تصفية القاعدة"
|
||||
|
||||
#: documents/models.py:428
|
||||
msgid "filter rules"
|
||||
msgstr "تصفية القواعد"
|
||||
|
||||
#: documents/models.py:536
|
||||
msgid "Task ID"
|
||||
msgstr "الرمز التعريفي للمهمة"
|
||||
|
||||
#: documents/models.py:537
|
||||
msgid "Celery ID for the Task that was run"
|
||||
msgstr "رمز المعرف للمهمة التي كانت تعمل"
|
||||
|
||||
#: documents/models.py:542
|
||||
msgid "Acknowledged"
|
||||
msgstr "مُعترف"
|
||||
|
||||
#: documents/models.py:543
|
||||
msgid "If the task is acknowledged via the frontend or API"
|
||||
msgstr "إذا عرف على المهمة عبر الواجهة الأمامية أو API"
|
||||
|
||||
#: documents/models.py:549 documents/models.py:556
|
||||
msgid "Task Name"
|
||||
msgstr "اسم المهمة"
|
||||
|
||||
#: documents/models.py:550
|
||||
msgid "Name of the file which the Task was run for"
|
||||
msgstr "اسم الملف الذي وكل بالمهمة"
|
||||
|
||||
#: documents/models.py:557
|
||||
msgid "Name of the Task which was run"
|
||||
msgstr "اسم المهمة التي كانت تعمل"
|
||||
|
||||
#: documents/models.py:562
|
||||
msgid "Task Positional Arguments"
|
||||
msgstr "مهمة قيمة المعاملات الموضعية"
|
||||
|
||||
#: documents/models.py:564
|
||||
msgid "JSON representation of the positional arguments used with the task"
|
||||
msgstr "تمثيل JSON لقيمة المعاملات الموضعية المستخدمة في المهمة"
|
||||
|
||||
#: documents/models.py:569
|
||||
msgid "Task Named Arguments"
|
||||
msgstr "مهمة قيمة المعامل المسمى"
|
||||
|
||||
#: documents/models.py:571
|
||||
msgid "JSON representation of the named arguments used with the task"
|
||||
msgstr "تمثيل JSON لقيمة المعاملات المسمية المستخدمة في المهمة"
|
||||
|
||||
#: documents/models.py:578
|
||||
msgid "Task State"
|
||||
msgstr "حالة المهمة"
|
||||
|
||||
#: documents/models.py:579
|
||||
msgid "Current state of the task being run"
|
||||
msgstr "الحالة الراهنة للمهمة قيد العمل"
|
||||
|
||||
#: documents/models.py:584
|
||||
msgid "Created DateTime"
|
||||
msgstr "تاريخ و وقت الإنشاء"
|
||||
|
||||
#: documents/models.py:585
|
||||
msgid "Datetime field when the task result was created in UTC"
|
||||
msgstr "حقل التاريخ والوقت عند إنشاء نتيجة المهمة في UTC"
|
||||
|
||||
#: documents/models.py:590
|
||||
msgid "Started DateTime"
|
||||
msgstr "تاريخ و وقت البداية"
|
||||
|
||||
#: documents/models.py:591
|
||||
msgid "Datetime field when the task was started in UTC"
|
||||
msgstr "حقل التاريخ والوقت عند بدء المهمة في UTC"
|
||||
|
||||
#: documents/models.py:596
|
||||
msgid "Completed DateTime"
|
||||
msgstr "التاريخ و الوقت المكتمل"
|
||||
|
||||
#: documents/models.py:597
|
||||
msgid "Datetime field when the task was completed in UTC"
|
||||
msgstr "حقل التاريخ و الوقت عند اكتمال المهمة في UTC"
|
||||
|
||||
#: documents/models.py:602
|
||||
msgid "Result Data"
|
||||
msgstr "نتائج البيانات"
|
||||
|
||||
#: documents/models.py:604
|
||||
msgid "The data returned by the task"
|
||||
msgstr "البيانات المستردة من قبل المهمة"
|
||||
|
||||
#: documents/models.py:613
|
||||
msgid "Comment for the document"
|
||||
msgstr "التعليق على المستند"
|
||||
|
||||
#: documents/models.py:642
|
||||
msgid "comment"
|
||||
msgstr "تعليق"
|
||||
|
||||
#: documents/models.py:643
|
||||
msgid "comments"
|
||||
msgstr "التعليقات"
|
||||
|
||||
#: documents/serialisers.py:72
|
||||
#, python-format
|
||||
msgid "Invalid regular expression: %(error)s"
|
||||
msgstr "التعبير النظامي خاطىء: %(error)s"
|
||||
|
||||
#: documents/serialisers.py:193
|
||||
msgid "Invalid color."
|
||||
msgstr "لون خاطئ."
|
||||
|
||||
#: documents/serialisers.py:518
|
||||
#, python-format
|
||||
msgid "File type %(type)s not supported"
|
||||
msgstr "نوع الملف %(type)s غير مدعوم"
|
||||
|
||||
#: documents/serialisers.py:599
|
||||
msgid "Invalid variable detected."
|
||||
msgstr "اكتشاف متغير خاطئ."
|
||||
|
||||
#: documents/templates/index.html:78
|
||||
msgid "Paperless-ngx is loading..."
|
||||
msgstr "تحميل Paperless-ngx..."
|
||||
|
||||
#: documents/templates/index.html:79
|
||||
msgid "Still here?! Hmm, something might be wrong."
|
||||
msgstr "مازلت هنا؟! همم، قد يكون هناك خطأ ما."
|
||||
|
||||
#: documents/templates/index.html:79
|
||||
msgid "Here's a link to the docs."
|
||||
msgstr "إليك رابط المستندات."
|
||||
|
||||
#: documents/templates/registration/logged_out.html:14
|
||||
msgid "Paperless-ngx signed out"
|
||||
msgstr "تسجيل الخروج Paperless-ngx"
|
||||
|
||||
#: documents/templates/registration/logged_out.html:59
|
||||
msgid "You have been successfully logged out. Bye!"
|
||||
msgstr "تم تسجيل خروجك بنجاح. مع السلامة!"
|
||||
|
||||
#: documents/templates/registration/logged_out.html:60
|
||||
msgid "Sign in again"
|
||||
msgstr "تسجيل الدخول مرة أخرى"
|
||||
|
||||
#: documents/templates/registration/login.html:15
|
||||
msgid "Paperless-ngx sign in"
|
||||
msgstr "تسجيل الدخول Paperless-ngx"
|
||||
|
||||
#: documents/templates/registration/login.html:61
|
||||
msgid "Please sign in."
|
||||
msgstr "الرجاء تسجيل الدخول."
|
||||
|
||||
#: documents/templates/registration/login.html:64
|
||||
msgid "Your username and password didn't match. Please try again."
|
||||
msgstr "اسم المستخدم وكلمة المرور غير متطابقين. حاول مرة أخرى."
|
||||
|
||||
#: documents/templates/registration/login.html:67
|
||||
msgid "Username"
|
||||
msgstr "اسم المستخدم"
|
||||
|
||||
#: documents/templates/registration/login.html:68
|
||||
msgid "Password"
|
||||
msgstr "كلمة المرور"
|
||||
|
||||
#: documents/templates/registration/login.html:73
|
||||
msgid "Sign in"
|
||||
msgstr "تسجيل الدخول"
|
||||
|
||||
#: paperless/settings.py:378
|
||||
msgid "English (US)"
|
||||
msgstr "الإنجليزية (الولايات المتحدة)"
|
||||
|
||||
#: paperless/settings.py:379
|
||||
msgid "Belarusian"
|
||||
msgstr "البيلاروسية"
|
||||
|
||||
#: paperless/settings.py:380
|
||||
msgid "Czech"
|
||||
msgstr "التشيكية"
|
||||
|
||||
#: paperless/settings.py:381
|
||||
msgid "Danish"
|
||||
msgstr "الدانماركية"
|
||||
|
||||
#: paperless/settings.py:382
|
||||
msgid "German"
|
||||
msgstr "الألمانية"
|
||||
|
||||
#: paperless/settings.py:383
|
||||
msgid "English (GB)"
|
||||
msgstr "الإنجليزية (المملكة المتحدة)"
|
||||
|
||||
#: paperless/settings.py:384
|
||||
msgid "Spanish"
|
||||
msgstr "الإسبانية"
|
||||
|
||||
#: paperless/settings.py:385
|
||||
msgid "French"
|
||||
msgstr "الفرنسية"
|
||||
|
||||
#: paperless/settings.py:386
|
||||
msgid "Italian"
|
||||
msgstr "الإيطالية"
|
||||
|
||||
#: paperless/settings.py:387
|
||||
msgid "Luxembourgish"
|
||||
msgstr "اللوكسمبرجية"
|
||||
|
||||
#: paperless/settings.py:388
|
||||
msgid "Dutch"
|
||||
msgstr "الهولندية"
|
||||
|
||||
#: paperless/settings.py:389
|
||||
msgid "Polish"
|
||||
msgstr "البولندية"
|
||||
|
||||
#: paperless/settings.py:390
|
||||
msgid "Portuguese (Brazil)"
|
||||
msgstr "البرتغالية (البرازيل)"
|
||||
|
||||
#: paperless/settings.py:391
|
||||
msgid "Portuguese"
|
||||
msgstr "البرتغالية"
|
||||
|
||||
#: paperless/settings.py:392
|
||||
msgid "Romanian"
|
||||
msgstr "الرومانية"
|
||||
|
||||
#: paperless/settings.py:393
|
||||
msgid "Russian"
|
||||
msgstr "الروسية"
|
||||
|
||||
#: paperless/settings.py:394
|
||||
msgid "Slovenian"
|
||||
msgstr "السلوفانية"
|
||||
|
||||
#: paperless/settings.py:395
|
||||
msgid "Serbian"
|
||||
msgstr "الصربية"
|
||||
|
||||
#: paperless/settings.py:396
|
||||
msgid "Swedish"
|
||||
msgstr "السويدية"
|
||||
|
||||
#: paperless/settings.py:397
|
||||
msgid "Turkish"
|
||||
msgstr "التركية"
|
||||
|
||||
#: paperless/settings.py:398
|
||||
msgid "Chinese Simplified"
|
||||
msgstr "الصينية المبسطة"
|
||||
|
||||
#: paperless/urls.py:161
|
||||
msgid "Paperless-ngx administration"
|
||||
msgstr "Paperless-ngx الإدارة"
|
||||
|
||||
#: paperless_mail/admin.py:29
|
||||
msgid "Authentication"
|
||||
msgstr "المصادقة"
|
||||
|
||||
#: paperless_mail/admin.py:30
|
||||
msgid "Advanced settings"
|
||||
msgstr "الإعدادات المتقدمة"
|
||||
|
||||
#: paperless_mail/admin.py:47
|
||||
msgid "Filter"
|
||||
msgstr "تصفية"
|
||||
|
||||
#: paperless_mail/admin.py:50
|
||||
msgid "Paperless will only process mails that match ALL of the filters given below."
|
||||
msgstr "Paperless يقوم فقط بمعالجة البُرُد التي تتطابق جميع التصفيات المقدمة أدناه."
|
||||
|
||||
#: paperless_mail/admin.py:64
|
||||
msgid "Actions"
|
||||
msgstr "إجراءات"
|
||||
|
||||
#: paperless_mail/admin.py:67
|
||||
msgid "The action applied to the mail. This action is only performed when documents were consumed from the mail. Mails without attachments will remain entirely untouched."
|
||||
msgstr "الإجراء المطبق على البريد. ينفذ هذا الإجراء فقط عندما تستهلك المستندات من البريد. ستبقى البُرٌد التي لا تحتوي على مرفقات ستبقى كما هي."
|
||||
|
||||
#: paperless_mail/admin.py:75
|
||||
msgid "Metadata"
|
||||
msgstr "البيانات الوصفية"
|
||||
|
||||
#: paperless_mail/admin.py:78
|
||||
msgid "Assign metadata to documents consumed from this rule automatically. If you do not assign tags, types or correspondents here, paperless will still process all matching rules that you have defined."
|
||||
msgstr "تعيين بيانات التعريف للمستندات المستهلكة من هذه القاعدة تِلْقائيًا. إذا لم تعين العلامات أو الأنواع أو المراسلين هنا، سيظل paperless يعالج جميع قواعد المطابقة التي حددتها."
|
||||
|
||||
#: paperless_mail/apps.py:8
|
||||
msgid "Paperless mail"
|
||||
msgstr "بريد paperless"
|
||||
|
||||
#: paperless_mail/models.py:8
|
||||
msgid "mail account"
|
||||
msgstr "حساب البريد"
|
||||
|
||||
#: paperless_mail/models.py:9
|
||||
msgid "mail accounts"
|
||||
msgstr "حساب البُرُد"
|
||||
|
||||
#: paperless_mail/models.py:12
|
||||
msgid "No encryption"
|
||||
msgstr "دون تشفير"
|
||||
|
||||
#: paperless_mail/models.py:13
|
||||
msgid "Use SSL"
|
||||
msgstr "استخدم SSL"
|
||||
|
||||
#: paperless_mail/models.py:14
|
||||
msgid "Use STARTTLS"
|
||||
msgstr "استخدم STARTTLS"
|
||||
|
||||
#: paperless_mail/models.py:18
|
||||
msgid "IMAP server"
|
||||
msgstr "خادم IMAP"
|
||||
|
||||
#: paperless_mail/models.py:21
|
||||
msgid "IMAP port"
|
||||
msgstr "منفذ IMAP"
|
||||
|
||||
#: paperless_mail/models.py:25
|
||||
msgid "This is usually 143 for unencrypted and STARTTLS connections, and 993 for SSL connections."
|
||||
msgstr "عادة ما يكون 143 للغير مشفر و اتصالات STARTTLS و 993 للاتصالات SSL."
|
||||
|
||||
#: paperless_mail/models.py:31
|
||||
msgid "IMAP security"
|
||||
msgstr "أمان IMAP"
|
||||
|
||||
#: paperless_mail/models.py:36
|
||||
msgid "username"
|
||||
msgstr "اسم المستخدم"
|
||||
|
||||
#: paperless_mail/models.py:38
|
||||
msgid "password"
|
||||
msgstr "كلمة المرور"
|
||||
|
||||
#: paperless_mail/models.py:41
|
||||
msgid "character set"
|
||||
msgstr "نوع ترميز المحارف"
|
||||
|
||||
#: paperless_mail/models.py:45
|
||||
msgid "The character set to use when communicating with the mail server, such as 'UTF-8' or 'US-ASCII'."
|
||||
msgstr "ترميز المحارف المستخدمة عند التواصل مع خادم البريد، مثل 'UTF-8' أو 'US-ASCII'."
|
||||
|
||||
#: paperless_mail/models.py:56
|
||||
msgid "mail rule"
|
||||
msgstr "قاعدة البريد"
|
||||
|
||||
#: paperless_mail/models.py:57
|
||||
msgid "mail rules"
|
||||
msgstr "قواعد البريد"
|
||||
|
||||
#: paperless_mail/models.py:60
|
||||
msgid "Only process attachments."
|
||||
msgstr "معالجة المرفقات فقط."
|
||||
|
||||
#: paperless_mail/models.py:61
|
||||
msgid "Process all files, including 'inline' attachments."
|
||||
msgstr "معالجة جميع الملفات، بما في ذلك المرفقات المضمنة."
|
||||
|
||||
#: paperless_mail/models.py:64
|
||||
msgid "Delete"
|
||||
msgstr "حذف"
|
||||
|
||||
#: paperless_mail/models.py:65
|
||||
msgid "Move to specified folder"
|
||||
msgstr "نقل إلى مجلد محدد"
|
||||
|
||||
#: paperless_mail/models.py:66
|
||||
msgid "Mark as read, don't process read mails"
|
||||
msgstr "وضع علامة كمقروءة، لا تعالج الرسائل المقروءة"
|
||||
|
||||
#: paperless_mail/models.py:67
|
||||
msgid "Flag the mail, don't process flagged mails"
|
||||
msgstr "علم الرسالة، لا تعالج الرسائل المعلمة"
|
||||
|
||||
#: paperless_mail/models.py:68
|
||||
msgid "Tag the mail with specified tag, don't process tagged mails"
|
||||
msgstr "علم الرسالة بعلامة محددة، لا تعالج الرسائل المُعلمة"
|
||||
|
||||
#: paperless_mail/models.py:71
|
||||
msgid "Use subject as title"
|
||||
msgstr "استخدم الموضوع كعنوان"
|
||||
|
||||
#: paperless_mail/models.py:72
|
||||
msgid "Use attachment filename as title"
|
||||
msgstr "استخدم اسم الملف المرفق كعنوان"
|
||||
|
||||
#: paperless_mail/models.py:75
|
||||
msgid "Do not assign a correspondent"
|
||||
msgstr "لا تعيّن مراسل"
|
||||
|
||||
#: paperless_mail/models.py:76
|
||||
msgid "Use mail address"
|
||||
msgstr "استخدم عنوان البريد"
|
||||
|
||||
#: paperless_mail/models.py:77
|
||||
msgid "Use name (or mail address if not available)"
|
||||
msgstr "استخدم الاسم (أو عنوان البريد إذا لم يكن متاحا)"
|
||||
|
||||
#: paperless_mail/models.py:78
|
||||
msgid "Use correspondent selected below"
|
||||
msgstr "استخدم المراسل المحدد أدناه"
|
||||
|
||||
#: paperless_mail/models.py:82
|
||||
msgid "order"
|
||||
msgstr "الطلب"
|
||||
|
||||
#: paperless_mail/models.py:88
|
||||
msgid "account"
|
||||
msgstr "الحساب"
|
||||
|
||||
#: paperless_mail/models.py:92
|
||||
msgid "folder"
|
||||
msgstr "مجلد"
|
||||
|
||||
#: paperless_mail/models.py:96
|
||||
msgid "Subfolders must be separated by a delimiter, often a dot ('.') or slash ('/'), but it varies by mail server."
|
||||
msgstr "يجب فصل المجلدات الفرعية باستخدام محدد، غالبا نقطة ('.') أو خط مائل ('/')، لكنها تختلف حسب خادم البريد."
|
||||
|
||||
#: paperless_mail/models.py:102
|
||||
msgid "filter from"
|
||||
msgstr "تصفية من"
|
||||
|
||||
#: paperless_mail/models.py:108
|
||||
msgid "filter subject"
|
||||
msgstr "تصفية الموضوع"
|
||||
|
||||
#: paperless_mail/models.py:114
|
||||
msgid "filter body"
|
||||
msgstr "تصفية الجسم"
|
||||
|
||||
#: paperless_mail/models.py:121
|
||||
msgid "filter attachment filename"
|
||||
msgstr "تصفية اسم الملف المرفق"
|
||||
|
||||
#: paperless_mail/models.py:126
|
||||
msgid "Only consume documents which entirely match this filename if specified. Wildcards such as *.pdf or *invoice* are allowed. Case insensitive."
|
||||
msgstr "فقط المستندات التي تتطابق تماما مع اسم هذا الملف إذا تم تحديدها. المحارف البديلة مثل *.pdf أو *الفواتير* مسموح بها. لأنها غير حساسة."
|
||||
|
||||
#: paperless_mail/models.py:133
|
||||
msgid "maximum age"
|
||||
msgstr "أقصى عُمُر"
|
||||
|
||||
#: paperless_mail/models.py:135
|
||||
msgid "Specified in days."
|
||||
msgstr "محدد بالأيام."
|
||||
|
||||
#: paperless_mail/models.py:139
|
||||
msgid "attachment type"
|
||||
msgstr "نوع المرفق"
|
||||
|
||||
#: paperless_mail/models.py:143
|
||||
msgid "Inline attachments include embedded images, so it's best to combine this option with a filename filter."
|
||||
msgstr "تتضمن المرفقات المضمنة صورا مضمنة، لذا من الأفضل دمج هذا الخِيار مع تصفية اسم الملف."
|
||||
|
||||
#: paperless_mail/models.py:149
|
||||
msgid "action"
|
||||
msgstr "إجراء"
|
||||
|
||||
#: paperless_mail/models.py:155
|
||||
msgid "action parameter"
|
||||
msgstr "إجراء المعامل"
|
||||
|
||||
#: paperless_mail/models.py:160
|
||||
msgid "Additional parameter for the action selected above, i.e., the target folder of the move to folder action. Subfolders must be separated by dots."
|
||||
msgstr "معامل إضافي للإجراء المحدد أعلاه، مثال: المجلد المستهدف للانتقال إلى إجراء مجلد. يجب أن تكون المجلدات الفرعية مفصولة بنقاط."
|
||||
|
||||
#: paperless_mail/models.py:168
|
||||
msgid "assign title from"
|
||||
msgstr "تعيين العنوان من"
|
||||
|
||||
#: paperless_mail/models.py:176
|
||||
msgid "assign this tag"
|
||||
msgstr "تعيين هذه العلامة"
|
||||
|
||||
#: paperless_mail/models.py:184
|
||||
msgid "assign this document type"
|
||||
msgstr "تعيين نوع هذا المستند"
|
||||
|
||||
#: paperless_mail/models.py:188
|
||||
msgid "assign correspondent from"
|
||||
msgstr "تعيين مراسل من"
|
||||
|
||||
#: paperless_mail/models.py:198
|
||||
msgid "assign this correspondent"
|
||||
msgstr "تعيين هذا المراسل"
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user