mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-02-11 23:59:31 -06:00
Compare commits
6 Commits
chore/swit
...
feature-zx
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d08b1e6c5c | ||
|
|
981036feac | ||
|
|
5067ab3fc4 | ||
|
|
15d18c06ed | ||
|
|
21e9eaa4db | ||
|
|
e8e027abc0 |
@@ -64,8 +64,6 @@ ARG RUNTIME_PACKAGES="\
|
|||||||
libmagic1 \
|
libmagic1 \
|
||||||
media-types \
|
media-types \
|
||||||
zlib1g \
|
zlib1g \
|
||||||
# Barcode splitter
|
|
||||||
libzbar0 \
|
|
||||||
poppler-utils \
|
poppler-utils \
|
||||||
htop \
|
htop \
|
||||||
sudo"
|
sudo"
|
||||||
|
|||||||
1
.github/dependabot.yml
vendored
1
.github/dependabot.yml
vendored
@@ -69,7 +69,6 @@ updates:
|
|||||||
patterns:
|
patterns:
|
||||||
- "ocrmypdf"
|
- "ocrmypdf"
|
||||||
- "pdf2image"
|
- "pdf2image"
|
||||||
- "pyzbar"
|
|
||||||
- "zxing-cpp"
|
- "zxing-cpp"
|
||||||
- "tika-client"
|
- "tika-client"
|
||||||
- "gotenberg-client"
|
- "gotenberg-client"
|
||||||
|
|||||||
2
.github/workflows/ci-backend.yml
vendored
2
.github/workflows/ci-backend.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
sudo apt-get update -qq
|
sudo apt-get update -qq
|
||||||
sudo apt-get install -qq --no-install-recommends \
|
sudo apt-get install -qq --no-install-recommends \
|
||||||
unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
|
unpaper tesseract-ocr imagemagick ghostscript poppler-utils
|
||||||
- name: Configure ImageMagick
|
- name: Configure ImageMagick
|
||||||
run: |
|
run: |
|
||||||
sudo cp docker/rootfs/etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml
|
sudo cp docker/rootfs/etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml
|
||||||
|
|||||||
4
.github/workflows/ci-docs.yml
vendored
4
.github/workflows/ci-docs.yml
vendored
@@ -26,8 +26,8 @@ permissions:
|
|||||||
pages: write
|
pages: write
|
||||||
id-token: write
|
id-token: write
|
||||||
env:
|
env:
|
||||||
DEFAULT_UV_VERSION: "0.9.x"
|
DEFAULT_UV_VERSION: "0.10.x"
|
||||||
DEFAULT_PYTHON_VERSION: "3.11"
|
DEFAULT_PYTHON_VERSION: "3.12"
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
name: Build Documentation
|
name: Build Documentation
|
||||||
|
|||||||
4
.github/workflows/ci-release.yml
vendored
4
.github/workflows/ci-release.yml
vendored
@@ -8,8 +8,8 @@ concurrency:
|
|||||||
group: release-${{ github.ref }}
|
group: release-${{ github.ref }}
|
||||||
cancel-in-progress: false
|
cancel-in-progress: false
|
||||||
env:
|
env:
|
||||||
DEFAULT_UV_VERSION: "0.9.x"
|
DEFAULT_UV_VERSION: "0.10.x"
|
||||||
DEFAULT_PYTHON_VERSION: "3.11"
|
DEFAULT_PYTHON_VERSION: "3.12"
|
||||||
jobs:
|
jobs:
|
||||||
wait-for-docker:
|
wait-for-docker:
|
||||||
name: Wait for Docker Build
|
name: Wait for Docker Build
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ src/documents/admin.py:0: error: Skipping analyzing "auditlog.models": module is
|
|||||||
src/documents/admin.py:0: error: Skipping analyzing "treenode.admin": module is installed, but missing library stubs or py.typed marker [import-untyped]
|
src/documents/admin.py:0: error: Skipping analyzing "treenode.admin": module is installed, but missing library stubs or py.typed marker [import-untyped]
|
||||||
src/documents/barcodes.py:0: error: "Image" has no attribute "filename" [attr-defined]
|
src/documents/barcodes.py:0: error: "Image" has no attribute "filename" [attr-defined]
|
||||||
src/documents/barcodes.py:0: error: Cannot find implementation or library stub for module named "zxingcpp" [import-not-found]
|
src/documents/barcodes.py:0: error: Cannot find implementation or library stub for module named "zxingcpp" [import-not-found]
|
||||||
src/documents/barcodes.py:0: error: Skipping analyzing "pyzbar": module is installed, but missing library stubs or py.typed marker [import-untyped]
|
|
||||||
src/documents/bulk_download.py:0: error: Return type "None" of "add_document" incompatible with return type "Never" in supertype "BulkArchiveStrategy" [override]
|
src/documents/bulk_download.py:0: error: Return type "None" of "add_document" incompatible with return type "Never" in supertype "BulkArchiveStrategy" [override]
|
||||||
src/documents/bulk_download.py:0: error: Return type "None" of "add_document" incompatible with return type "Never" in supertype "BulkArchiveStrategy" [override]
|
src/documents/bulk_download.py:0: error: Return type "None" of "add_document" incompatible with return type "Never" in supertype "BulkArchiveStrategy" [override]
|
||||||
src/documents/bulk_download.py:0: error: Return type "None" of "add_document" incompatible with return type "Never" in supertype "BulkArchiveStrategy" [override]
|
src/documents/bulk_download.py:0: error: Return type "None" of "add_document" incompatible with return type "Never" in supertype "BulkArchiveStrategy" [override]
|
||||||
@@ -277,6 +276,8 @@ src/documents/management/commands/document_exporter.py:0: error: Skipping analyz
|
|||||||
src/documents/management/commands/document_exporter.py:0: error: Skipping analyzing "auditlog.models": module is installed, but missing library stubs or py.typed marker [import-untyped]
|
src/documents/management/commands/document_exporter.py:0: error: Skipping analyzing "auditlog.models": module is installed, but missing library stubs or py.typed marker [import-untyped]
|
||||||
src/documents/management/commands/document_fuzzy_match.py:0: error: Function is missing a type annotation [no-untyped-def]
|
src/documents/management/commands/document_fuzzy_match.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||||
src/documents/management/commands/document_fuzzy_match.py:0: error: Function is missing a type annotation [no-untyped-def]
|
src/documents/management/commands/document_fuzzy_match.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||||
|
src/documents/management/commands/document_importer.py:0: error: Argument 1 to "create_source_path_directory" has incompatible type "Path | None"; expected "Path" [arg-type]
|
||||||
|
src/documents/management/commands/document_importer.py:0: error: Argument 2 to "copy_file_with_basic_stats" has incompatible type "Path | None"; expected "Path | str" [arg-type]
|
||||||
src/documents/management/commands/document_importer.py:0: error: Attribute "version" already defined on line 0 [no-redef]
|
src/documents/management/commands/document_importer.py:0: error: Attribute "version" already defined on line 0 [no-redef]
|
||||||
src/documents/management/commands/document_importer.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
src/documents/management/commands/document_importer.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||||
src/documents/management/commands/document_importer.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
src/documents/management/commands/document_importer.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||||
|
|||||||
@@ -154,8 +154,6 @@ ARG RUNTIME_PACKAGES="\
|
|||||||
libmagic1 \
|
libmagic1 \
|
||||||
media-types \
|
media-types \
|
||||||
zlib1g \
|
zlib1g \
|
||||||
# Barcode splitter
|
|
||||||
libzbar0 \
|
|
||||||
poppler-utils"
|
poppler-utils"
|
||||||
|
|
||||||
# Install basic runtime packages.
|
# Install basic runtime packages.
|
||||||
|
|||||||
@@ -774,7 +774,6 @@ At this time, the library utilized for detection of barcodes supports the follow
|
|||||||
- QR Code
|
- QR Code
|
||||||
- SQ Code
|
- SQ Code
|
||||||
|
|
||||||
You may check for updates on the [zbar library homepage](https://github.com/mchehab/zbar).
|
|
||||||
For usage in Paperless, the type of barcode does not matter, only the contents of it.
|
For usage in Paperless, the type of barcode does not matter, only the contents of it.
|
||||||
|
|
||||||
For how to enable barcode usage, see [the configuration](configuration.md#barcodes).
|
For how to enable barcode usage, see [the configuration](configuration.md#barcodes).
|
||||||
|
|||||||
@@ -1222,14 +1222,6 @@ using Python's `re.match()`, which anchors at the start of the filename.
|
|||||||
|
|
||||||
The default ignores are `[.stfolder, .stversions, .localized, @eaDir, .Spotlight-V100, .Trashes, __MACOSX]` and cannot be overridden.
|
The default ignores are `[.stfolder, .stversions, .localized, @eaDir, .Spotlight-V100, .Trashes, __MACOSX]` and cannot be overridden.
|
||||||
|
|
||||||
#### [`PAPERLESS_CONSUMER_BARCODE_SCANNER=<string>`](#PAPERLESS_CONSUMER_BARCODE_SCANNER) {#PAPERLESS_CONSUMER_BARCODE_SCANNER}
|
|
||||||
|
|
||||||
: Sets the barcode scanner used for barcode functionality.
|
|
||||||
|
|
||||||
Currently, "PYZBAR" (the default) or "ZXING" might be selected.
|
|
||||||
If you have problems that your Barcodes/QR-Codes are not detected
|
|
||||||
(especially with bad scan quality and/or small codes), try the other one.
|
|
||||||
|
|
||||||
#### [`PAPERLESS_PRE_CONSUME_SCRIPT=<filename>`](#PAPERLESS_PRE_CONSUME_SCRIPT) {#PAPERLESS_PRE_CONSUME_SCRIPT}
|
#### [`PAPERLESS_PRE_CONSUME_SCRIPT=<filename>`](#PAPERLESS_PRE_CONSUME_SCRIPT) {#PAPERLESS_PRE_CONSUME_SCRIPT}
|
||||||
|
|
||||||
: After some initial validation, Paperless can trigger an arbitrary
|
: After some initial validation, Paperless can trigger an arbitrary
|
||||||
|
|||||||
@@ -23,3 +23,28 @@ separating the directory ignore from the file ignore.
|
|||||||
Document and thumbnail encryption is no longer supported. This was previously deprecated in [paperless-ng 0.9.3](https://github.com/paperless-ngx/paperless-ngx/blob/dev/docs/changelog.md#paperless-ng-093)
|
Document and thumbnail encryption is no longer supported. This was previously deprecated in [paperless-ng 0.9.3](https://github.com/paperless-ngx/paperless-ngx/blob/dev/docs/changelog.md#paperless-ng-093)
|
||||||
|
|
||||||
Users must decrypt their document using the `decrypt_documents` command before upgrading.
|
Users must decrypt their document using the `decrypt_documents` command before upgrading.
|
||||||
|
|
||||||
|
## Barcode Scanner Changes
|
||||||
|
|
||||||
|
Support for [pyzbar](https://github.com/NaturalHistoryMuseum/pyzbar) has been removed. The underlying libzbar library has
|
||||||
|
seen no updates in 16 years and is largely unmaintained, and the pyzbar Python wrapper last saw a release in March 2022. In
|
||||||
|
practice, pyzbar struggled with barcode detection reliability, particularly on skewed, low-contrast, or partially
|
||||||
|
obscured barcodes. [zxing-cpp](https://github.com/zxing-cpp/zxing-cpp) is actively maintained, significantly more
|
||||||
|
reliable at finding barcodes, and now ships pre-built wheels for both x86_64 and arm64, removing the need to build the library.
|
||||||
|
|
||||||
|
The `CONSUMER_BARCODE_SCANNER` setting has been removed. zxing-cpp is now the only backend.
|
||||||
|
|
||||||
|
### Summary
|
||||||
|
|
||||||
|
| Old Setting | New Setting | Notes |
|
||||||
|
| -------------------------- | ----------- | --------------------------------- |
|
||||||
|
| `CONSUMER_BARCODE_SCANNER` | _Removed_ | zxing-cpp is now the only backend |
|
||||||
|
|
||||||
|
### Action Required
|
||||||
|
|
||||||
|
- If you were already using `CONSUMER_BARCODE_SCANNER=ZXING`, simply remove the setting.
|
||||||
|
- If you had `CONSUMER_BARCODE_SCANNER=PYZBAR` or were using the default, no functional changes are needed beyond
|
||||||
|
removing the setting. zxing-cpp supports all the same barcode formats and you should see improved detection
|
||||||
|
reliability.
|
||||||
|
- The `libzbar0` / `libzbar-dev` system packages are no longer required and can be removed from any custom Docker
|
||||||
|
images or host installations.
|
||||||
|
|||||||
@@ -207,13 +207,12 @@ are released, dependency support is confirmed, etc.
|
|||||||
- `libpq-dev` for PostgreSQL
|
- `libpq-dev` for PostgreSQL
|
||||||
- `libmagic-dev` for mime type detection
|
- `libmagic-dev` for mime type detection
|
||||||
- `mariadb-client` for MariaDB compile time
|
- `mariadb-client` for MariaDB compile time
|
||||||
- `libzbar0` for barcode detection
|
|
||||||
- `poppler-utils` for barcode detection
|
- `poppler-utils` for barcode detection
|
||||||
|
|
||||||
Use this list for your preferred package management:
|
Use this list for your preferred package management:
|
||||||
|
|
||||||
```
|
```
|
||||||
python3 python3-pip python3-dev imagemagick fonts-liberation gnupg libpq-dev default-libmysqlclient-dev pkg-config libmagic-dev libzbar0 poppler-utils
|
python3 python3-pip python3-dev imagemagick fonts-liberation gnupg libpq-dev default-libmysqlclient-dev pkg-config libmagic-dev poppler-utils
|
||||||
```
|
```
|
||||||
|
|
||||||
These dependencies are required for OCRmyPDF, which is used for text
|
These dependencies are required for OCRmyPDF, which is used for text
|
||||||
|
|||||||
@@ -68,7 +68,6 @@ dependencies = [
|
|||||||
"python-gnupg~=0.5.4",
|
"python-gnupg~=0.5.4",
|
||||||
"python-ipware~=3.0.0",
|
"python-ipware~=3.0.0",
|
||||||
"python-magic~=0.4.27",
|
"python-magic~=0.4.27",
|
||||||
"pyzbar~=0.1.9",
|
|
||||||
"rapidfuzz~=3.14.0",
|
"rapidfuzz~=3.14.0",
|
||||||
"redis[hiredis]~=5.2.1",
|
"redis[hiredis]~=5.2.1",
|
||||||
"regex>=2025.9.18",
|
"regex>=2025.9.18",
|
||||||
@@ -77,10 +76,11 @@ dependencies = [
|
|||||||
"setproctitle~=1.3.4",
|
"setproctitle~=1.3.4",
|
||||||
"tika-client~=0.10.0",
|
"tika-client~=0.10.0",
|
||||||
"torch~=2.10.0",
|
"torch~=2.10.0",
|
||||||
|
"tqdm~=4.67.1",
|
||||||
"watchfiles>=1.1.1",
|
"watchfiles>=1.1.1",
|
||||||
"whitenoise~=6.11",
|
"whitenoise~=6.11",
|
||||||
"whoosh-reloaded>=2.7.5",
|
"whoosh-reloaded>=2.7.5",
|
||||||
"zxing-cpp~=2.3.0",
|
"zxing-cpp~=3.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
optional-dependencies.mariadb = [
|
optional-dependencies.mariadb = [
|
||||||
@@ -149,6 +149,7 @@ typing = [
|
|||||||
"types-pytz",
|
"types-pytz",
|
||||||
"types-redis",
|
"types-redis",
|
||||||
"types-setuptools",
|
"types-setuptools",
|
||||||
|
"types-tqdm",
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.uv]
|
[tool.uv]
|
||||||
@@ -170,10 +171,6 @@ psycopg-c = [
|
|||||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-trixie-3.3.0/psycopg_c-3.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-trixie-3.3.0/psycopg_c-3.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
||||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-trixie-3.3.0/psycopg_c-3.3.0-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-trixie-3.3.0/psycopg_c-3.3.0-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
||||||
]
|
]
|
||||||
zxing-cpp = [
|
|
||||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
|
||||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
|
||||||
]
|
|
||||||
|
|
||||||
torch = [
|
torch = [
|
||||||
{ index = "pytorch-cpu" },
|
{ index = "pytorch-cpu" },
|
||||||
|
|||||||
@@ -28,8 +28,6 @@ from documents.utils import maybe_override_pixel_limit
|
|||||||
from paperless.config import BarcodeConfig
|
from paperless.config import BarcodeConfig
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from collections.abc import Callable
|
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
logger = logging.getLogger("paperless.barcodes")
|
logger = logging.getLogger("paperless.barcodes")
|
||||||
@@ -262,26 +260,6 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
|||||||
|
|
||||||
return barcodes
|
return barcodes
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def read_barcodes_pyzbar(image: Image.Image) -> list[str]:
|
|
||||||
barcodes = []
|
|
||||||
|
|
||||||
from pyzbar import pyzbar
|
|
||||||
|
|
||||||
# Decode the barcode image
|
|
||||||
detected_barcodes = pyzbar.decode(image)
|
|
||||||
|
|
||||||
# Traverse through all the detected barcodes in image
|
|
||||||
for barcode in detected_barcodes:
|
|
||||||
if barcode.data:
|
|
||||||
decoded_barcode = barcode.data.decode("utf-8")
|
|
||||||
barcodes.append(decoded_barcode)
|
|
||||||
logger.debug(
|
|
||||||
f"Barcode of type {barcode.type} found: {decoded_barcode}",
|
|
||||||
)
|
|
||||||
|
|
||||||
return barcodes
|
|
||||||
|
|
||||||
def detect(self) -> None:
|
def detect(self) -> None:
|
||||||
"""
|
"""
|
||||||
Scan all pages of the PDF as images, updating barcodes and the pages
|
Scan all pages of the PDF as images, updating barcodes and the pages
|
||||||
@@ -294,14 +272,6 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
|||||||
# No op if not a TIFF
|
# No op if not a TIFF
|
||||||
self.convert_from_tiff_to_pdf()
|
self.convert_from_tiff_to_pdf()
|
||||||
|
|
||||||
# Choose the library for reading
|
|
||||||
if settings.CONSUMER_BARCODE_SCANNER == "PYZBAR":
|
|
||||||
reader: Callable[[Image.Image], list[str]] = self.read_barcodes_pyzbar
|
|
||||||
logger.debug("Scanning for barcodes using PYZBAR")
|
|
||||||
else:
|
|
||||||
reader = self.read_barcodes_zxing
|
|
||||||
logger.debug("Scanning for barcodes using ZXING")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Read number of pages from pdf
|
# Read number of pages from pdf
|
||||||
with Pdf.open(self.pdf_file) as pdf:
|
with Pdf.open(self.pdf_file) as pdf:
|
||||||
@@ -349,7 +319,7 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Detect barcodes
|
# Detect barcodes
|
||||||
for barcode_value in reader(page):
|
for barcode_value in self.read_barcodes_zxing(page):
|
||||||
self.barcodes.append(
|
self.barcodes.append(
|
||||||
Barcode(current_page_number, barcode_value, self.settings),
|
Barcode(current_page_number, barcode_value, self.settings),
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,14 +1,10 @@
|
|||||||
import logging
|
import logging
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
|
|
||||||
|
import tqdm
|
||||||
from django import db
|
from django import db
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from rich.progress import BarColumn
|
|
||||||
from rich.progress import Progress
|
|
||||||
from rich.progress import TaskProgressColumn
|
|
||||||
from rich.progress import TextColumn
|
|
||||||
from rich.progress import TimeRemainingColumn
|
|
||||||
|
|
||||||
from documents.management.commands.mixins import MultiProcessMixin
|
from documents.management.commands.mixins import MultiProcessMixin
|
||||||
from documents.management.commands.mixins import ProgressBarMixin
|
from documents.management.commands.mixins import ProgressBarMixin
|
||||||
@@ -79,24 +75,20 @@ class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand):
|
|||||||
try:
|
try:
|
||||||
logging.getLogger().handlers[0].level = logging.ERROR
|
logging.getLogger().handlers[0].level = logging.ERROR
|
||||||
|
|
||||||
with Progress(
|
|
||||||
TextColumn("[progress.description]{task.description}"),
|
|
||||||
BarColumn(),
|
|
||||||
TaskProgressColumn(),
|
|
||||||
TimeRemainingColumn(),
|
|
||||||
disable=self.no_progress_bar,
|
|
||||||
) as progress:
|
|
||||||
task = progress.add_task("Archiving documents", total=len(document_ids))
|
|
||||||
if self.process_count == 1:
|
if self.process_count == 1:
|
||||||
for doc_id in document_ids:
|
for doc_id in document_ids:
|
||||||
update_document_content_maybe_archive_file(doc_id)
|
update_document_content_maybe_archive_file(doc_id)
|
||||||
progress.update(task, advance=1)
|
|
||||||
else: # pragma: no cover
|
else: # pragma: no cover
|
||||||
with multiprocessing.Pool(self.process_count) as pool:
|
with multiprocessing.Pool(self.process_count) as pool:
|
||||||
for _ in pool.imap_unordered(
|
list(
|
||||||
|
tqdm.tqdm(
|
||||||
|
pool.imap_unordered(
|
||||||
update_document_content_maybe_archive_file,
|
update_document_content_maybe_archive_file,
|
||||||
document_ids,
|
document_ids,
|
||||||
):
|
),
|
||||||
progress.update(task, advance=1)
|
total=len(document_ids),
|
||||||
|
disable=self.no_progress_bar,
|
||||||
|
),
|
||||||
|
)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
self.stdout.write(self.style.NOTICE("Aborting..."))
|
self.stdout.write(self.style.NOTICE("Aborting..."))
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import tempfile
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
import tqdm
|
||||||
from allauth.mfa.models import Authenticator
|
from allauth.mfa.models import Authenticator
|
||||||
from allauth.socialaccount.models import SocialAccount
|
from allauth.socialaccount.models import SocialAccount
|
||||||
from allauth.socialaccount.models import SocialApp
|
from allauth.socialaccount.models import SocialApp
|
||||||
@@ -23,11 +24,6 @@ from django.utils import timezone
|
|||||||
from filelock import FileLock
|
from filelock import FileLock
|
||||||
from guardian.models import GroupObjectPermission
|
from guardian.models import GroupObjectPermission
|
||||||
from guardian.models import UserObjectPermission
|
from guardian.models import UserObjectPermission
|
||||||
from rich.progress import BarColumn
|
|
||||||
from rich.progress import Progress
|
|
||||||
from rich.progress import TaskProgressColumn
|
|
||||||
from rich.progress import TextColumn
|
|
||||||
from rich.progress import TimeRemainingColumn
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from django.db.models import QuerySet
|
from django.db.models import QuerySet
|
||||||
@@ -313,18 +309,11 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
document_manifest = manifest_dict["documents"]
|
document_manifest = manifest_dict["documents"]
|
||||||
|
|
||||||
# 3. Export files from each document
|
# 3. Export files from each document
|
||||||
with Progress(
|
for index, document_dict in tqdm.tqdm(
|
||||||
TextColumn("[progress.description]{task.description}"),
|
enumerate(document_manifest),
|
||||||
BarColumn(),
|
|
||||||
TaskProgressColumn(),
|
|
||||||
TimeRemainingColumn(),
|
|
||||||
disable=self.no_progress_bar,
|
|
||||||
) as progress:
|
|
||||||
task = progress.add_task(
|
|
||||||
"Exporting documents",
|
|
||||||
total=len(document_manifest),
|
total=len(document_manifest),
|
||||||
)
|
disable=self.no_progress_bar,
|
||||||
for index, document_dict in enumerate(document_manifest):
|
):
|
||||||
document = document_map[document_dict["pk"]]
|
document = document_map[document_dict["pk"]]
|
||||||
|
|
||||||
# 3.1. generate a unique filename
|
# 3.1. generate a unique filename
|
||||||
@@ -345,9 +334,7 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if self.split_manifest:
|
if self.split_manifest:
|
||||||
manifest_name = base_name.with_name(
|
manifest_name = base_name.with_name(f"{base_name.stem}-manifest.json")
|
||||||
f"{base_name.stem}-manifest.json",
|
|
||||||
)
|
|
||||||
if self.use_folder_prefix:
|
if self.use_folder_prefix:
|
||||||
manifest_name = Path("json") / manifest_name
|
manifest_name = Path("json") / manifest_name
|
||||||
manifest_name = (self.target / manifest_name).resolve()
|
manifest_name = (self.target / manifest_name).resolve()
|
||||||
@@ -370,7 +357,6 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
content,
|
content,
|
||||||
manifest_name,
|
manifest_name,
|
||||||
)
|
)
|
||||||
progress.update(task, advance=1)
|
|
||||||
|
|
||||||
# These were exported already
|
# These were exported already
|
||||||
if self.split_manifest:
|
if self.split_manifest:
|
||||||
|
|||||||
@@ -3,13 +3,9 @@ import multiprocessing
|
|||||||
from typing import Final
|
from typing import Final
|
||||||
|
|
||||||
import rapidfuzz
|
import rapidfuzz
|
||||||
|
import tqdm
|
||||||
from django.core.management import BaseCommand
|
from django.core.management import BaseCommand
|
||||||
from django.core.management import CommandError
|
from django.core.management import CommandError
|
||||||
from rich.progress import BarColumn
|
|
||||||
from rich.progress import Progress
|
|
||||||
from rich.progress import TaskProgressColumn
|
|
||||||
from rich.progress import TextColumn
|
|
||||||
from rich.progress import TimeRemainingColumn
|
|
||||||
|
|
||||||
from documents.management.commands.mixins import MultiProcessMixin
|
from documents.management.commands.mixins import MultiProcessMixin
|
||||||
from documents.management.commands.mixins import ProgressBarMixin
|
from documents.management.commands.mixins import ProgressBarMixin
|
||||||
@@ -110,25 +106,19 @@ class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand):
|
|||||||
work_pkgs.append(_WorkPackage(first_doc, second_doc))
|
work_pkgs.append(_WorkPackage(first_doc, second_doc))
|
||||||
|
|
||||||
# Don't spin up a pool of 1 process
|
# Don't spin up a pool of 1 process
|
||||||
with Progress(
|
|
||||||
TextColumn("[progress.description]{task.description}"),
|
|
||||||
BarColumn(),
|
|
||||||
TaskProgressColumn(),
|
|
||||||
TimeRemainingColumn(),
|
|
||||||
disable=self.no_progress_bar,
|
|
||||||
) as progress:
|
|
||||||
task = progress.add_task("Fuzzy matching documents", total=len(work_pkgs))
|
|
||||||
if self.process_count == 1:
|
if self.process_count == 1:
|
||||||
results = []
|
results = []
|
||||||
for work in work_pkgs:
|
for work in tqdm.tqdm(work_pkgs, disable=self.no_progress_bar):
|
||||||
results.append(_process_and_match(work))
|
results.append(_process_and_match(work))
|
||||||
progress.update(task, advance=1)
|
|
||||||
else: # pragma: no cover
|
else: # pragma: no cover
|
||||||
with multiprocessing.Pool(processes=self.process_count) as pool:
|
with multiprocessing.Pool(processes=self.process_count) as pool:
|
||||||
results = []
|
results = list(
|
||||||
for result in pool.imap_unordered(_process_and_match, work_pkgs):
|
tqdm.tqdm(
|
||||||
results.append(result)
|
pool.imap_unordered(_process_and_match, work_pkgs),
|
||||||
progress.update(task, advance=1)
|
total=len(work_pkgs),
|
||||||
|
disable=self.no_progress_bar,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
# Check results
|
# Check results
|
||||||
messages = []
|
messages = []
|
||||||
|
|||||||
@@ -5,10 +5,10 @@ import tempfile
|
|||||||
from collections.abc import Generator
|
from collections.abc import Generator
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
from zipfile import is_zipfile
|
from zipfile import is_zipfile
|
||||||
|
|
||||||
|
import tqdm
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.auth.models import Permission
|
from django.contrib.auth.models import Permission
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
@@ -23,11 +23,6 @@ from django.db import transaction
|
|||||||
from django.db.models.signals import m2m_changed
|
from django.db.models.signals import m2m_changed
|
||||||
from django.db.models.signals import post_save
|
from django.db.models.signals import post_save
|
||||||
from filelock import FileLock
|
from filelock import FileLock
|
||||||
from rich.progress import BarColumn
|
|
||||||
from rich.progress import Progress
|
|
||||||
from rich.progress import TaskProgressColumn
|
|
||||||
from rich.progress import TextColumn
|
|
||||||
from rich.progress import TimeRemainingColumn
|
|
||||||
|
|
||||||
from documents.file_handling import create_source_path_directory
|
from documents.file_handling import create_source_path_directory
|
||||||
from documents.management.commands.mixins import CryptMixin
|
from documents.management.commands.mixins import CryptMixin
|
||||||
@@ -370,18 +365,7 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
filter(lambda r: r["model"] == "documents.document", self.manifest),
|
filter(lambda r: r["model"] == "documents.document", self.manifest),
|
||||||
)
|
)
|
||||||
|
|
||||||
with Progress(
|
for record in tqdm.tqdm(manifest_documents, disable=self.no_progress_bar):
|
||||||
TextColumn("[progress.description]{task.description}"),
|
|
||||||
BarColumn(),
|
|
||||||
TaskProgressColumn(),
|
|
||||||
TimeRemainingColumn(),
|
|
||||||
disable=self.no_progress_bar,
|
|
||||||
) as progress:
|
|
||||||
task = progress.add_task(
|
|
||||||
"Importing documents",
|
|
||||||
total=len(manifest_documents),
|
|
||||||
)
|
|
||||||
for record in manifest_documents:
|
|
||||||
document = Document.objects.get(pk=record["pk"])
|
document = Document.objects.get(pk=record["pk"])
|
||||||
|
|
||||||
doc_file = record[EXPORTER_FILE_NAME]
|
doc_file = record[EXPORTER_FILE_NAME]
|
||||||
@@ -426,8 +410,6 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if archive_path:
|
if archive_path:
|
||||||
if TYPE_CHECKING:
|
|
||||||
assert document.archive_path is not None
|
|
||||||
create_source_path_directory(document.archive_path)
|
create_source_path_directory(document.archive_path)
|
||||||
# TODO: this assumes that the export is valid and
|
# TODO: this assumes that the export is valid and
|
||||||
# archive_filename is present on all documents with
|
# archive_filename is present on all documents with
|
||||||
@@ -435,7 +417,6 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
copy_file_with_basic_stats(archive_path, document.archive_path)
|
copy_file_with_basic_stats(archive_path, document.archive_path)
|
||||||
|
|
||||||
document.save()
|
document.save()
|
||||||
progress.update(task, advance=1)
|
|
||||||
|
|
||||||
def decrypt_secret_fields(self) -> None:
|
def decrypt_secret_fields(self) -> None:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,12 +1,8 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import tqdm
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from django.db.models.signals import post_save
|
from django.db.models.signals import post_save
|
||||||
from rich.progress import BarColumn
|
|
||||||
from rich.progress import Progress
|
|
||||||
from rich.progress import TaskProgressColumn
|
|
||||||
from rich.progress import TextColumn
|
|
||||||
from rich.progress import TimeRemainingColumn
|
|
||||||
|
|
||||||
from documents.management.commands.mixins import ProgressBarMixin
|
from documents.management.commands.mixins import ProgressBarMixin
|
||||||
from documents.models import Document
|
from documents.models import Document
|
||||||
@@ -22,15 +18,8 @@ class Command(ProgressBarMixin, BaseCommand):
|
|||||||
self.handle_progress_bar_mixin(**options)
|
self.handle_progress_bar_mixin(**options)
|
||||||
logging.getLogger().handlers[0].level = logging.ERROR
|
logging.getLogger().handlers[0].level = logging.ERROR
|
||||||
|
|
||||||
documents = Document.objects.all()
|
for document in tqdm.tqdm(
|
||||||
with Progress(
|
Document.objects.all(),
|
||||||
TextColumn("[progress.description]{task.description}"),
|
|
||||||
BarColumn(),
|
|
||||||
TaskProgressColumn(),
|
|
||||||
TimeRemainingColumn(),
|
|
||||||
disable=self.no_progress_bar,
|
disable=self.no_progress_bar,
|
||||||
) as progress:
|
):
|
||||||
task = progress.add_task("Renaming documents", total=documents.count())
|
|
||||||
for document in documents:
|
|
||||||
post_save.send(Document, instance=document, created=False)
|
post_save.send(Document, instance=document, created=False)
|
||||||
progress.update(task, advance=1)
|
|
||||||
|
|||||||
@@ -1,11 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import tqdm
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from rich.progress import BarColumn
|
|
||||||
from rich.progress import Progress
|
|
||||||
from rich.progress import TaskProgressColumn
|
|
||||||
from rich.progress import TextColumn
|
|
||||||
from rich.progress import TimeRemainingColumn
|
|
||||||
|
|
||||||
from documents.classifier import load_classifier
|
from documents.classifier import load_classifier
|
||||||
from documents.management.commands.mixins import ProgressBarMixin
|
from documents.management.commands.mixins import ProgressBarMixin
|
||||||
@@ -88,15 +84,7 @@ class Command(ProgressBarMixin, BaseCommand):
|
|||||||
|
|
||||||
classifier = load_classifier()
|
classifier = load_classifier()
|
||||||
|
|
||||||
with Progress(
|
for document in tqdm.tqdm(documents, disable=self.no_progress_bar):
|
||||||
TextColumn("[progress.description]{task.description}"),
|
|
||||||
BarColumn(),
|
|
||||||
TaskProgressColumn(),
|
|
||||||
TimeRemainingColumn(),
|
|
||||||
disable=self.no_progress_bar,
|
|
||||||
) as progress:
|
|
||||||
task = progress.add_task("Retagging documents", total=documents.count())
|
|
||||||
for document in documents:
|
|
||||||
if options["correspondent"]:
|
if options["correspondent"]:
|
||||||
set_correspondent(
|
set_correspondent(
|
||||||
sender=None,
|
sender=None,
|
||||||
@@ -146,4 +134,3 @@ class Command(ProgressBarMixin, BaseCommand):
|
|||||||
stdout=self.stdout,
|
stdout=self.stdout,
|
||||||
style_func=self.style,
|
style_func=self.style,
|
||||||
)
|
)
|
||||||
progress.update(task, advance=1)
|
|
||||||
|
|||||||
@@ -2,13 +2,9 @@ import logging
|
|||||||
import multiprocessing
|
import multiprocessing
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
|
import tqdm
|
||||||
from django import db
|
from django import db
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from rich.progress import BarColumn
|
|
||||||
from rich.progress import Progress
|
|
||||||
from rich.progress import TaskProgressColumn
|
|
||||||
from rich.progress import TextColumn
|
|
||||||
from rich.progress import TimeRemainingColumn
|
|
||||||
|
|
||||||
from documents.management.commands.mixins import MultiProcessMixin
|
from documents.management.commands.mixins import MultiProcessMixin
|
||||||
from documents.management.commands.mixins import ProgressBarMixin
|
from documents.management.commands.mixins import ProgressBarMixin
|
||||||
@@ -74,19 +70,15 @@ class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand):
|
|||||||
# with postgres.
|
# with postgres.
|
||||||
db.connections.close_all()
|
db.connections.close_all()
|
||||||
|
|
||||||
with Progress(
|
|
||||||
TextColumn("[progress.description]{task.description}"),
|
|
||||||
BarColumn(),
|
|
||||||
TaskProgressColumn(),
|
|
||||||
TimeRemainingColumn(),
|
|
||||||
disable=self.no_progress_bar,
|
|
||||||
) as progress:
|
|
||||||
task = progress.add_task("Generating thumbnails", total=len(ids))
|
|
||||||
if self.process_count == 1:
|
if self.process_count == 1:
|
||||||
for doc_id in ids:
|
for doc_id in ids:
|
||||||
_process_document(doc_id)
|
_process_document(doc_id)
|
||||||
progress.update(task, advance=1)
|
|
||||||
else: # pragma: no cover
|
else: # pragma: no cover
|
||||||
with multiprocessing.Pool(processes=self.process_count) as pool:
|
with multiprocessing.Pool(processes=self.process_count) as pool:
|
||||||
for _ in pool.imap_unordered(_process_document, ids):
|
list(
|
||||||
progress.update(task, advance=1)
|
tqdm.tqdm(
|
||||||
|
pool.imap_unordered(_process_document, ids),
|
||||||
|
total=len(ids),
|
||||||
|
disable=self.no_progress_bar,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,12 +1,7 @@
|
|||||||
from auditlog.models import LogEntry
|
from auditlog.models import LogEntry
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
from rich.console import Console
|
from tqdm import tqdm
|
||||||
from rich.progress import BarColumn
|
|
||||||
from rich.progress import Progress
|
|
||||||
from rich.progress import TaskProgressColumn
|
|
||||||
from rich.progress import TextColumn
|
|
||||||
from rich.progress import TimeRemainingColumn
|
|
||||||
|
|
||||||
from documents.management.commands.mixins import ProgressBarMixin
|
from documents.management.commands.mixins import ProgressBarMixin
|
||||||
|
|
||||||
@@ -23,22 +18,8 @@ class Command(BaseCommand, ProgressBarMixin):
|
|||||||
|
|
||||||
def handle(self, **options):
|
def handle(self, **options):
|
||||||
self.handle_progress_bar_mixin(**options)
|
self.handle_progress_bar_mixin(**options)
|
||||||
console = Console()
|
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
log_entries = LogEntry.objects.all()
|
for log_entry in tqdm(LogEntry.objects.all(), disable=self.no_progress_bar):
|
||||||
with Progress(
|
|
||||||
TextColumn("[progress.description]{task.description}"),
|
|
||||||
BarColumn(),
|
|
||||||
TaskProgressColumn(),
|
|
||||||
TimeRemainingColumn(),
|
|
||||||
console=console,
|
|
||||||
disable=self.no_progress_bar,
|
|
||||||
) as progress:
|
|
||||||
task = progress.add_task(
|
|
||||||
"Pruning audit logs",
|
|
||||||
total=log_entries.count(),
|
|
||||||
)
|
|
||||||
for log_entry in log_entries:
|
|
||||||
model_class = log_entry.content_type.model_class()
|
model_class = log_entry.content_type.model_class()
|
||||||
# use global_objects for SoftDeleteModel
|
# use global_objects for SoftDeleteModel
|
||||||
objects = (
|
objects = (
|
||||||
@@ -51,9 +32,8 @@ class Command(BaseCommand, ProgressBarMixin):
|
|||||||
and not objects.filter(pk=log_entry.object_id).exists()
|
and not objects.filter(pk=log_entry.object_id).exists()
|
||||||
):
|
):
|
||||||
log_entry.delete()
|
log_entry.delete()
|
||||||
console.print(
|
tqdm.write(
|
||||||
self.style.NOTICE(
|
self.style.NOTICE(
|
||||||
f"Deleted audit log entry for {model_class.__name__} #{log_entry.object_id}",
|
f"Deleted audit log entry for {model_class.__name__} #{log_entry.object_id}",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
progress.update(task, advance=1)
|
|
||||||
|
|||||||
227
src/documents/migrations/0011_optimize_integer_field_sizes.py
Normal file
227
src/documents/migrations/0011_optimize_integer_field_sizes.py
Normal file
@@ -0,0 +1,227 @@
|
|||||||
|
# Generated by Django 5.2.11 on 2026-02-09 16:37
|
||||||
|
|
||||||
|
import django.core.validators
|
||||||
|
from django.db import migrations
|
||||||
|
from django.db import models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("documents", "0010_alter_document_content_length"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="correspondent",
|
||||||
|
name="matching_algorithm",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[
|
||||||
|
(0, "None"),
|
||||||
|
(1, "Any word"),
|
||||||
|
(2, "All words"),
|
||||||
|
(3, "Exact match"),
|
||||||
|
(4, "Regular expression"),
|
||||||
|
(5, "Fuzzy word"),
|
||||||
|
(6, "Automatic"),
|
||||||
|
],
|
||||||
|
default=1,
|
||||||
|
verbose_name="matching algorithm",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="documenttype",
|
||||||
|
name="matching_algorithm",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[
|
||||||
|
(0, "None"),
|
||||||
|
(1, "Any word"),
|
||||||
|
(2, "All words"),
|
||||||
|
(3, "Exact match"),
|
||||||
|
(4, "Regular expression"),
|
||||||
|
(5, "Fuzzy word"),
|
||||||
|
(6, "Automatic"),
|
||||||
|
],
|
||||||
|
default=1,
|
||||||
|
verbose_name="matching algorithm",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="savedviewfilterrule",
|
||||||
|
name="rule_type",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[
|
||||||
|
(0, "title contains"),
|
||||||
|
(1, "content contains"),
|
||||||
|
(2, "ASN is"),
|
||||||
|
(3, "correspondent is"),
|
||||||
|
(4, "document type is"),
|
||||||
|
(5, "is in inbox"),
|
||||||
|
(6, "has tag"),
|
||||||
|
(7, "has any tag"),
|
||||||
|
(8, "created before"),
|
||||||
|
(9, "created after"),
|
||||||
|
(10, "created year is"),
|
||||||
|
(11, "created month is"),
|
||||||
|
(12, "created day is"),
|
||||||
|
(13, "added before"),
|
||||||
|
(14, "added after"),
|
||||||
|
(15, "modified before"),
|
||||||
|
(16, "modified after"),
|
||||||
|
(17, "does not have tag"),
|
||||||
|
(18, "does not have ASN"),
|
||||||
|
(19, "title or content contains"),
|
||||||
|
(20, "fulltext query"),
|
||||||
|
(21, "more like this"),
|
||||||
|
(22, "has tags in"),
|
||||||
|
(23, "ASN greater than"),
|
||||||
|
(24, "ASN less than"),
|
||||||
|
(25, "storage path is"),
|
||||||
|
(26, "has correspondent in"),
|
||||||
|
(27, "does not have correspondent in"),
|
||||||
|
(28, "has document type in"),
|
||||||
|
(29, "does not have document type in"),
|
||||||
|
(30, "has storage path in"),
|
||||||
|
(31, "does not have storage path in"),
|
||||||
|
(32, "owner is"),
|
||||||
|
(33, "has owner in"),
|
||||||
|
(34, "does not have owner"),
|
||||||
|
(35, "does not have owner in"),
|
||||||
|
(36, "has custom field value"),
|
||||||
|
(37, "is shared by me"),
|
||||||
|
(38, "has custom fields"),
|
||||||
|
(39, "has custom field in"),
|
||||||
|
(40, "does not have custom field in"),
|
||||||
|
(41, "does not have custom field"),
|
||||||
|
(42, "custom fields query"),
|
||||||
|
(43, "created to"),
|
||||||
|
(44, "created from"),
|
||||||
|
(45, "added to"),
|
||||||
|
(46, "added from"),
|
||||||
|
(47, "mime type is"),
|
||||||
|
],
|
||||||
|
verbose_name="rule type",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="storagepath",
|
||||||
|
name="matching_algorithm",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[
|
||||||
|
(0, "None"),
|
||||||
|
(1, "Any word"),
|
||||||
|
(2, "All words"),
|
||||||
|
(3, "Exact match"),
|
||||||
|
(4, "Regular expression"),
|
||||||
|
(5, "Fuzzy word"),
|
||||||
|
(6, "Automatic"),
|
||||||
|
],
|
||||||
|
default=1,
|
||||||
|
verbose_name="matching algorithm",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="tag",
|
||||||
|
name="matching_algorithm",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[
|
||||||
|
(0, "None"),
|
||||||
|
(1, "Any word"),
|
||||||
|
(2, "All words"),
|
||||||
|
(3, "Exact match"),
|
||||||
|
(4, "Regular expression"),
|
||||||
|
(5, "Fuzzy word"),
|
||||||
|
(6, "Automatic"),
|
||||||
|
],
|
||||||
|
default=1,
|
||||||
|
verbose_name="matching algorithm",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="workflow",
|
||||||
|
name="order",
|
||||||
|
field=models.SmallIntegerField(default=0, verbose_name="order"),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="workflowaction",
|
||||||
|
name="order",
|
||||||
|
field=models.PositiveSmallIntegerField(default=0, verbose_name="order"),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="workflowaction",
|
||||||
|
name="type",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[
|
||||||
|
(1, "Assignment"),
|
||||||
|
(2, "Removal"),
|
||||||
|
(3, "Email"),
|
||||||
|
(4, "Webhook"),
|
||||||
|
(5, "Password removal"),
|
||||||
|
],
|
||||||
|
default=1,
|
||||||
|
verbose_name="Workflow Action Type",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="workflowrun",
|
||||||
|
name="type",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[
|
||||||
|
(1, "Consumption Started"),
|
||||||
|
(2, "Document Added"),
|
||||||
|
(3, "Document Updated"),
|
||||||
|
(4, "Scheduled"),
|
||||||
|
],
|
||||||
|
null=True,
|
||||||
|
verbose_name="workflow trigger type",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="workflowtrigger",
|
||||||
|
name="matching_algorithm",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[
|
||||||
|
(0, "None"),
|
||||||
|
(1, "Any word"),
|
||||||
|
(2, "All words"),
|
||||||
|
(3, "Exact match"),
|
||||||
|
(4, "Regular expression"),
|
||||||
|
(5, "Fuzzy word"),
|
||||||
|
],
|
||||||
|
default=0,
|
||||||
|
verbose_name="matching algorithm",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="workflowtrigger",
|
||||||
|
name="schedule_offset_days",
|
||||||
|
field=models.SmallIntegerField(
|
||||||
|
default=0,
|
||||||
|
help_text="The number of days to offset the schedule trigger by.",
|
||||||
|
verbose_name="schedule offset days",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="workflowtrigger",
|
||||||
|
name="schedule_recurring_interval_days",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
default=1,
|
||||||
|
help_text="The number of days between recurring schedule triggers.",
|
||||||
|
validators=[django.core.validators.MinValueValidator(1)],
|
||||||
|
verbose_name="schedule recurring delay in days",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="workflowtrigger",
|
||||||
|
name="type",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[
|
||||||
|
(1, "Consumption Started"),
|
||||||
|
(2, "Document Added"),
|
||||||
|
(3, "Document Updated"),
|
||||||
|
(4, "Scheduled"),
|
||||||
|
],
|
||||||
|
default=1,
|
||||||
|
verbose_name="Workflow Trigger Type",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -67,7 +67,7 @@ class MatchingModel(ModelWithOwner):
|
|||||||
|
|
||||||
match = models.CharField(_("match"), max_length=256, blank=True)
|
match = models.CharField(_("match"), max_length=256, blank=True)
|
||||||
|
|
||||||
matching_algorithm = models.PositiveIntegerField(
|
matching_algorithm = models.PositiveSmallIntegerField(
|
||||||
_("matching algorithm"),
|
_("matching algorithm"),
|
||||||
choices=MATCHING_ALGORITHMS,
|
choices=MATCHING_ALGORITHMS,
|
||||||
default=MATCH_ANY,
|
default=MATCH_ANY,
|
||||||
@@ -547,7 +547,7 @@ class SavedViewFilterRule(models.Model):
|
|||||||
verbose_name=_("saved view"),
|
verbose_name=_("saved view"),
|
||||||
)
|
)
|
||||||
|
|
||||||
rule_type = models.PositiveIntegerField(_("rule type"), choices=RULE_TYPES)
|
rule_type = models.PositiveSmallIntegerField(_("rule type"), choices=RULE_TYPES)
|
||||||
|
|
||||||
value = models.CharField(_("value"), max_length=255, blank=True, null=True)
|
value = models.CharField(_("value"), max_length=255, blank=True, null=True)
|
||||||
|
|
||||||
@@ -1102,7 +1102,7 @@ class WorkflowTrigger(models.Model):
|
|||||||
MODIFIED = "modified", _("Modified")
|
MODIFIED = "modified", _("Modified")
|
||||||
CUSTOM_FIELD = "custom_field", _("Custom Field")
|
CUSTOM_FIELD = "custom_field", _("Custom Field")
|
||||||
|
|
||||||
type = models.PositiveIntegerField(
|
type = models.PositiveSmallIntegerField(
|
||||||
_("Workflow Trigger Type"),
|
_("Workflow Trigger Type"),
|
||||||
choices=WorkflowTriggerType.choices,
|
choices=WorkflowTriggerType.choices,
|
||||||
default=WorkflowTriggerType.CONSUMPTION,
|
default=WorkflowTriggerType.CONSUMPTION,
|
||||||
@@ -1148,7 +1148,7 @@ class WorkflowTrigger(models.Model):
|
|||||||
|
|
||||||
match = models.CharField(_("match"), max_length=256, blank=True)
|
match = models.CharField(_("match"), max_length=256, blank=True)
|
||||||
|
|
||||||
matching_algorithm = models.PositiveIntegerField(
|
matching_algorithm = models.PositiveSmallIntegerField(
|
||||||
_("matching algorithm"),
|
_("matching algorithm"),
|
||||||
choices=WorkflowTriggerMatching.choices,
|
choices=WorkflowTriggerMatching.choices,
|
||||||
default=WorkflowTriggerMatching.NONE,
|
default=WorkflowTriggerMatching.NONE,
|
||||||
@@ -1249,7 +1249,7 @@ class WorkflowTrigger(models.Model):
|
|||||||
help_text=_("JSON-encoded custom field query expression."),
|
help_text=_("JSON-encoded custom field query expression."),
|
||||||
)
|
)
|
||||||
|
|
||||||
schedule_offset_days = models.IntegerField(
|
schedule_offset_days = models.SmallIntegerField(
|
||||||
_("schedule offset days"),
|
_("schedule offset days"),
|
||||||
default=0,
|
default=0,
|
||||||
help_text=_(
|
help_text=_(
|
||||||
@@ -1265,7 +1265,7 @@ class WorkflowTrigger(models.Model):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
schedule_recurring_interval_days = models.PositiveIntegerField(
|
schedule_recurring_interval_days = models.PositiveSmallIntegerField(
|
||||||
_("schedule recurring delay in days"),
|
_("schedule recurring delay in days"),
|
||||||
default=1,
|
default=1,
|
||||||
validators=[MinValueValidator(1)],
|
validators=[MinValueValidator(1)],
|
||||||
@@ -1410,13 +1410,13 @@ class WorkflowAction(models.Model):
|
|||||||
_("Password removal"),
|
_("Password removal"),
|
||||||
)
|
)
|
||||||
|
|
||||||
type = models.PositiveIntegerField(
|
type = models.PositiveSmallIntegerField(
|
||||||
_("Workflow Action Type"),
|
_("Workflow Action Type"),
|
||||||
choices=WorkflowActionType.choices,
|
choices=WorkflowActionType.choices,
|
||||||
default=WorkflowActionType.ASSIGNMENT,
|
default=WorkflowActionType.ASSIGNMENT,
|
||||||
)
|
)
|
||||||
|
|
||||||
order = models.PositiveIntegerField(_("order"), default=0)
|
order = models.PositiveSmallIntegerField(_("order"), default=0)
|
||||||
|
|
||||||
assign_title = models.TextField(
|
assign_title = models.TextField(
|
||||||
_("assign title"),
|
_("assign title"),
|
||||||
@@ -1658,7 +1658,7 @@ class WorkflowAction(models.Model):
|
|||||||
class Workflow(models.Model):
|
class Workflow(models.Model):
|
||||||
name = models.CharField(_("name"), max_length=256, unique=True)
|
name = models.CharField(_("name"), max_length=256, unique=True)
|
||||||
|
|
||||||
order = models.IntegerField(_("order"), default=0)
|
order = models.SmallIntegerField(_("order"), default=0)
|
||||||
|
|
||||||
triggers = models.ManyToManyField(
|
triggers = models.ManyToManyField(
|
||||||
WorkflowTrigger,
|
WorkflowTrigger,
|
||||||
@@ -1688,7 +1688,7 @@ class WorkflowRun(SoftDeleteModel):
|
|||||||
verbose_name=_("workflow"),
|
verbose_name=_("workflow"),
|
||||||
)
|
)
|
||||||
|
|
||||||
type = models.PositiveIntegerField(
|
type = models.PositiveSmallIntegerField(
|
||||||
_("workflow trigger type"),
|
_("workflow trigger type"),
|
||||||
choices=WorkflowTrigger.WorkflowTriggerType.choices,
|
choices=WorkflowTrigger.WorkflowTriggerType.choices,
|
||||||
null=True,
|
null=True,
|
||||||
|
|||||||
@@ -8,11 +8,7 @@ from typing import Final
|
|||||||
from celery import states
|
from celery import states
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from rich.progress import BarColumn
|
from tqdm import tqdm
|
||||||
from rich.progress import Progress
|
|
||||||
from rich.progress import TaskProgressColumn
|
|
||||||
from rich.progress import TextColumn
|
|
||||||
from rich.progress import TimeRemainingColumn
|
|
||||||
|
|
||||||
from documents.models import Document
|
from documents.models import Document
|
||||||
from documents.models import PaperlessTask
|
from documents.models import PaperlessTask
|
||||||
@@ -96,19 +92,7 @@ def check_sanity(*, progress=False, scheduled=True) -> SanityCheckMessages:
|
|||||||
if logo_file in present_files:
|
if logo_file in present_files:
|
||||||
present_files.remove(logo_file)
|
present_files.remove(logo_file)
|
||||||
|
|
||||||
documents = Document.global_objects.all()
|
for doc in tqdm(Document.global_objects.all(), disable=not progress):
|
||||||
with Progress(
|
|
||||||
TextColumn("[progress.description]{task.description}"),
|
|
||||||
BarColumn(),
|
|
||||||
TaskProgressColumn(),
|
|
||||||
TimeRemainingColumn(),
|
|
||||||
disable=not progress,
|
|
||||||
) as progress_bar:
|
|
||||||
task = progress_bar.add_task(
|
|
||||||
"Checking document sanity",
|
|
||||||
total=documents.count(),
|
|
||||||
)
|
|
||||||
for doc in documents:
|
|
||||||
# Check sanity of the thumbnail
|
# Check sanity of the thumbnail
|
||||||
thumbnail_path: Final[Path] = Path(doc.thumbnail_path).resolve()
|
thumbnail_path: Final[Path] = Path(doc.thumbnail_path).resolve()
|
||||||
if not thumbnail_path.exists() or not thumbnail_path.is_file():
|
if not thumbnail_path.exists() or not thumbnail_path.is_file():
|
||||||
@@ -119,10 +103,7 @@ def check_sanity(*, progress=False, scheduled=True) -> SanityCheckMessages:
|
|||||||
try:
|
try:
|
||||||
_ = thumbnail_path.read_bytes()
|
_ = thumbnail_path.read_bytes()
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
messages.error(
|
messages.error(doc.pk, f"Cannot read thumbnail file of document: {e}")
|
||||||
doc.pk,
|
|
||||||
f"Cannot read thumbnail file of document: {e}",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check sanity of the original file
|
# Check sanity of the original file
|
||||||
# TODO: extract method
|
# TODO: extract method
|
||||||
@@ -135,10 +116,7 @@ def check_sanity(*, progress=False, scheduled=True) -> SanityCheckMessages:
|
|||||||
try:
|
try:
|
||||||
checksum = hashlib.md5(source_path.read_bytes()).hexdigest()
|
checksum = hashlib.md5(source_path.read_bytes()).hexdigest()
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
messages.error(
|
messages.error(doc.pk, f"Cannot read original file of document: {e}")
|
||||||
doc.pk,
|
|
||||||
f"Cannot read original file of document: {e}",
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
if checksum != doc.checksum:
|
if checksum != doc.checksum:
|
||||||
messages.error(
|
messages.error(
|
||||||
@@ -161,10 +139,7 @@ def check_sanity(*, progress=False, scheduled=True) -> SanityCheckMessages:
|
|||||||
elif doc.has_archive_version:
|
elif doc.has_archive_version:
|
||||||
archive_path: Final[Path] = Path(doc.archive_path).resolve()
|
archive_path: Final[Path] = Path(doc.archive_path).resolve()
|
||||||
if not archive_path.exists() or not archive_path.is_file():
|
if not archive_path.exists() or not archive_path.is_file():
|
||||||
messages.error(
|
messages.error(doc.pk, "Archived version of document does not exist.")
|
||||||
doc.pk,
|
|
||||||
"Archived version of document does not exist.",
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
if archive_path in present_files:
|
if archive_path in present_files:
|
||||||
present_files.remove(archive_path)
|
present_files.remove(archive_path)
|
||||||
@@ -188,8 +163,6 @@ def check_sanity(*, progress=False, scheduled=True) -> SanityCheckMessages:
|
|||||||
if not doc.content:
|
if not doc.content:
|
||||||
messages.info(doc.pk, "Document contains no OCR data")
|
messages.info(doc.pk, "Document contains no OCR data")
|
||||||
|
|
||||||
progress_bar.update(task, advance=1)
|
|
||||||
|
|
||||||
for extra_file in present_files:
|
for extra_file in present_files:
|
||||||
messages.warning(None, f"Orphaned file in media dir: {extra_file}")
|
messages.warning(None, f"Orphaned file in media dir: {extra_file}")
|
||||||
|
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ from pathlib import Path
|
|||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
from tempfile import mkstemp
|
from tempfile import mkstemp
|
||||||
|
|
||||||
|
import tqdm
|
||||||
from celery import Task
|
from celery import Task
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from celery import states
|
from celery import states
|
||||||
@@ -18,11 +19,6 @@ from django.db import transaction
|
|||||||
from django.db.models.signals import post_save
|
from django.db.models.signals import post_save
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from filelock import FileLock
|
from filelock import FileLock
|
||||||
from rich.progress import BarColumn
|
|
||||||
from rich.progress import Progress
|
|
||||||
from rich.progress import TaskProgressColumn
|
|
||||||
from rich.progress import TextColumn
|
|
||||||
from rich.progress import TimeRemainingColumn
|
|
||||||
from whoosh.writing import AsyncWriter
|
from whoosh.writing import AsyncWriter
|
||||||
|
|
||||||
from documents import index
|
from documents import index
|
||||||
@@ -87,20 +83,9 @@ def index_reindex(*, progress_bar_disable=False) -> None:
|
|||||||
|
|
||||||
ix = index.open_index(recreate=True)
|
ix = index.open_index(recreate=True)
|
||||||
|
|
||||||
with (
|
with AsyncWriter(ix) as writer:
|
||||||
AsyncWriter(ix) as writer,
|
for document in tqdm.tqdm(documents, disable=progress_bar_disable):
|
||||||
Progress(
|
|
||||||
TextColumn("[progress.description]{task.description}"),
|
|
||||||
BarColumn(),
|
|
||||||
TaskProgressColumn(),
|
|
||||||
TimeRemainingColumn(),
|
|
||||||
disable=progress_bar_disable,
|
|
||||||
) as progress,
|
|
||||||
):
|
|
||||||
task = progress.add_task("Reindexing documents", total=documents.count())
|
|
||||||
for document in documents:
|
|
||||||
index.update_document(writer, document)
|
index.update_document(writer, document)
|
||||||
progress.update(task, advance=1)
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task
|
@shared_task
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ from contextlib import contextmanager
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
import pytest
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from django.test import override_settings
|
from django.test import override_settings
|
||||||
@@ -25,13 +24,6 @@ from documents.tests.utils import FileSystemAssertsMixin
|
|||||||
from documents.tests.utils import SampleDirMixin
|
from documents.tests.utils import SampleDirMixin
|
||||||
from paperless.models import ApplicationConfiguration
|
from paperless.models import ApplicationConfiguration
|
||||||
|
|
||||||
try:
|
|
||||||
import zxingcpp # noqa: F401
|
|
||||||
|
|
||||||
HAS_ZXING_LIB = True
|
|
||||||
except ImportError:
|
|
||||||
HAS_ZXING_LIB = False
|
|
||||||
|
|
||||||
|
|
||||||
class GetReaderPluginMixin:
|
class GetReaderPluginMixin:
|
||||||
@contextmanager
|
@contextmanager
|
||||||
@@ -48,7 +40,6 @@ class GetReaderPluginMixin:
|
|||||||
reader.cleanup()
|
reader.cleanup()
|
||||||
|
|
||||||
|
|
||||||
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
|
|
||||||
class TestBarcode(
|
class TestBarcode(
|
||||||
DirectoriesMixin,
|
DirectoriesMixin,
|
||||||
FileSystemAssertsMixin,
|
FileSystemAssertsMixin,
|
||||||
@@ -606,7 +597,6 @@ class TestBarcode(
|
|||||||
self.assertDictEqual(separator_page_numbers, {0: False})
|
self.assertDictEqual(separator_page_numbers, {0: False})
|
||||||
|
|
||||||
|
|
||||||
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
|
|
||||||
class TestBarcodeNewConsume(
|
class TestBarcodeNewConsume(
|
||||||
DirectoriesMixin,
|
DirectoriesMixin,
|
||||||
FileSystemAssertsMixin,
|
FileSystemAssertsMixin,
|
||||||
@@ -784,36 +774,12 @@ class TestAsnBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
|
|||||||
|
|
||||||
self.assertEqual(document.archive_serial_number, 123)
|
self.assertEqual(document.archive_serial_number, 123)
|
||||||
|
|
||||||
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
|
|
||||||
def test_scan_file_for_qrcode_without_upscale(self) -> None:
|
def test_scan_file_for_qrcode_without_upscale(self) -> None:
|
||||||
"""
|
"""
|
||||||
GIVEN:
|
GIVEN:
|
||||||
- A printed and scanned PDF document with a rather small QR code
|
- A printed and scanned PDF document with a rather small QR code
|
||||||
WHEN:
|
WHEN:
|
||||||
- ASN barcode detection is run with default settings
|
- ASN barcode detection is run with default settings
|
||||||
- pyzbar is used for detection, as zxing would behave differently, and detect the QR code
|
|
||||||
THEN:
|
|
||||||
- ASN is not detected
|
|
||||||
"""
|
|
||||||
|
|
||||||
test_file = self.BARCODE_SAMPLE_DIR / "barcode-qr-asn-000123-upscale-dpi.pdf"
|
|
||||||
|
|
||||||
with self.get_reader(test_file) as reader:
|
|
||||||
reader.detect()
|
|
||||||
self.assertEqual(len(reader.barcodes), 0)
|
|
||||||
|
|
||||||
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
|
|
||||||
@override_settings(CONSUMER_BARCODE_DPI=600)
|
|
||||||
@override_settings(CONSUMER_BARCODE_UPSCALE=1.5)
|
|
||||||
def test_scan_file_for_qrcode_with_upscale(self) -> None:
|
|
||||||
"""
|
|
||||||
GIVEN:
|
|
||||||
- A printed and scanned PDF document with a rather small QR code
|
|
||||||
WHEN:
|
|
||||||
- ASN barcode detection is run with 600dpi and an upscale factor of 1.5 and pyzbar
|
|
||||||
- pyzbar is used for detection, as zxing would behave differently.
|
|
||||||
Upscaling is a workaround for detection problems with pyzbar,
|
|
||||||
when you cannot switch to zxing (aarch64 build problems of zxing)
|
|
||||||
THEN:
|
THEN:
|
||||||
- ASN 123 is detected
|
- ASN 123 is detected
|
||||||
"""
|
"""
|
||||||
@@ -825,23 +791,24 @@ class TestAsnBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
|
|||||||
self.assertEqual(len(reader.barcodes), 1)
|
self.assertEqual(len(reader.barcodes), 1)
|
||||||
self.assertEqual(reader.asn, 123)
|
self.assertEqual(reader.asn, 123)
|
||||||
|
|
||||||
|
@override_settings(CONSUMER_BARCODE_DPI=600)
|
||||||
|
@override_settings(CONSUMER_BARCODE_UPSCALE=1.5)
|
||||||
|
def test_scan_file_for_qrcode_with_upscale(self) -> None:
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- A printed and scanned PDF document with a rather small QR code
|
||||||
|
WHEN:
|
||||||
|
- ASN barcode detection is run with 600dpi and an upscale factor of 1.5
|
||||||
|
THEN:
|
||||||
|
- ASN 123 is detected
|
||||||
|
"""
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
test_file = self.BARCODE_SAMPLE_DIR / "barcode-qr-asn-000123-upscale-dpi.pdf"
|
||||||
not HAS_ZXING_LIB,
|
|
||||||
reason="No zxingcpp",
|
|
||||||
)
|
|
||||||
@override_settings(CONSUMER_BARCODE_SCANNER="ZXING")
|
|
||||||
class TestBarcodeZxing(TestBarcode):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
with self.get_reader(test_file) as reader:
|
||||||
@pytest.mark.skipif(
|
reader.detect()
|
||||||
not HAS_ZXING_LIB,
|
self.assertEqual(len(reader.barcodes), 1)
|
||||||
reason="No zxingcpp",
|
self.assertEqual(reader.asn, 123)
|
||||||
)
|
|
||||||
@override_settings(CONSUMER_BARCODE_SCANNER="ZXING")
|
|
||||||
class TestAsnBarcodesZxing(TestAsnBarcode):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, TestCase):
|
class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, TestCase):
|
||||||
|
|||||||
@@ -167,17 +167,6 @@ def settings_values_check(app_configs, **kwargs):
|
|||||||
)
|
)
|
||||||
return msgs
|
return msgs
|
||||||
|
|
||||||
def _barcode_scanner_validate():
|
|
||||||
"""
|
|
||||||
Validates the barcode scanner type
|
|
||||||
"""
|
|
||||||
msgs = []
|
|
||||||
if settings.CONSUMER_BARCODE_SCANNER not in ["PYZBAR", "ZXING"]:
|
|
||||||
msgs.append(
|
|
||||||
Error(f'Invalid Barcode Scanner "{settings.CONSUMER_BARCODE_SCANNER}"'),
|
|
||||||
)
|
|
||||||
return msgs
|
|
||||||
|
|
||||||
def _email_certificate_validate():
|
def _email_certificate_validate():
|
||||||
msgs = []
|
msgs = []
|
||||||
# Existence checks
|
# Existence checks
|
||||||
@@ -195,7 +184,6 @@ def settings_values_check(app_configs, **kwargs):
|
|||||||
return (
|
return (
|
||||||
_ocrmypdf_settings_check()
|
_ocrmypdf_settings_check()
|
||||||
+ _timezone_validate()
|
+ _timezone_validate()
|
||||||
+ _barcode_scanner_validate()
|
|
||||||
+ _email_certificate_validate()
|
+ _email_certificate_validate()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,50 @@
|
|||||||
|
# Generated by Django 5.2.11 on 2026-02-09 16:37
|
||||||
|
|
||||||
|
import django.core.validators
|
||||||
|
from django.db import migrations
|
||||||
|
from django.db import models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("paperless", "0006_applicationconfiguration_barcode_tag_split"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="applicationconfiguration",
|
||||||
|
name="barcode_dpi",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
null=True,
|
||||||
|
validators=[django.core.validators.MinValueValidator(1)],
|
||||||
|
verbose_name="Sets the barcode DPI",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="applicationconfiguration",
|
||||||
|
name="barcode_max_pages",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
null=True,
|
||||||
|
validators=[django.core.validators.MinValueValidator(1)],
|
||||||
|
verbose_name="Sets the maximum pages for barcode",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="applicationconfiguration",
|
||||||
|
name="image_dpi",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
null=True,
|
||||||
|
validators=[django.core.validators.MinValueValidator(1)],
|
||||||
|
verbose_name="Sets image DPI fallback value",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="applicationconfiguration",
|
||||||
|
name="pages",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
null=True,
|
||||||
|
validators=[django.core.validators.MinValueValidator(1)],
|
||||||
|
verbose_name="Do OCR from page 1 to this value",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -105,7 +105,7 @@ class ApplicationConfiguration(AbstractSingletonModel):
|
|||||||
Settings for the Tesseract based OCR parser
|
Settings for the Tesseract based OCR parser
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pages = models.PositiveIntegerField(
|
pages = models.PositiveSmallIntegerField(
|
||||||
verbose_name=_("Do OCR from page 1 to this value"),
|
verbose_name=_("Do OCR from page 1 to this value"),
|
||||||
null=True,
|
null=True,
|
||||||
validators=[MinValueValidator(1)],
|
validators=[MinValueValidator(1)],
|
||||||
@@ -134,7 +134,7 @@ class ApplicationConfiguration(AbstractSingletonModel):
|
|||||||
choices=ArchiveFileChoices.choices,
|
choices=ArchiveFileChoices.choices,
|
||||||
)
|
)
|
||||||
|
|
||||||
image_dpi = models.PositiveIntegerField(
|
image_dpi = models.PositiveSmallIntegerField(
|
||||||
verbose_name=_("Sets image DPI fallback value"),
|
verbose_name=_("Sets image DPI fallback value"),
|
||||||
null=True,
|
null=True,
|
||||||
validators=[MinValueValidator(1)],
|
validators=[MinValueValidator(1)],
|
||||||
@@ -254,14 +254,14 @@ class ApplicationConfiguration(AbstractSingletonModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# PAPERLESS_CONSUMER_BARCODE_DPI
|
# PAPERLESS_CONSUMER_BARCODE_DPI
|
||||||
barcode_dpi = models.PositiveIntegerField(
|
barcode_dpi = models.PositiveSmallIntegerField(
|
||||||
verbose_name=_("Sets the barcode DPI"),
|
verbose_name=_("Sets the barcode DPI"),
|
||||||
null=True,
|
null=True,
|
||||||
validators=[MinValueValidator(1)],
|
validators=[MinValueValidator(1)],
|
||||||
)
|
)
|
||||||
|
|
||||||
# PAPERLESS_CONSUMER_BARCODE_MAX_PAGES
|
# PAPERLESS_CONSUMER_BARCODE_MAX_PAGES
|
||||||
barcode_max_pages = models.PositiveIntegerField(
|
barcode_max_pages = models.PositiveSmallIntegerField(
|
||||||
verbose_name=_("Sets the maximum pages for barcode"),
|
verbose_name=_("Sets the maximum pages for barcode"),
|
||||||
null=True,
|
null=True,
|
||||||
validators=[MinValueValidator(1)],
|
validators=[MinValueValidator(1)],
|
||||||
|
|||||||
@@ -1106,11 +1106,6 @@ CONSUMER_BARCODE_STRING: Final[str] = os.getenv(
|
|||||||
"PATCHT",
|
"PATCHT",
|
||||||
)
|
)
|
||||||
|
|
||||||
CONSUMER_BARCODE_SCANNER: Final[str] = os.getenv(
|
|
||||||
"PAPERLESS_CONSUMER_BARCODE_SCANNER",
|
|
||||||
"PYZBAR",
|
|
||||||
).upper()
|
|
||||||
|
|
||||||
CONSUMER_ENABLE_ASN_BARCODE: Final[bool] = __get_boolean(
|
CONSUMER_ENABLE_ASN_BARCODE: Final[bool] = __get_boolean(
|
||||||
"PAPERLESS_CONSUMER_ENABLE_ASN_BARCODE",
|
"PAPERLESS_CONSUMER_ENABLE_ASN_BARCODE",
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -187,31 +187,6 @@ class TestTimezoneSettingsChecks(DirectoriesMixin, TestCase):
|
|||||||
self.assertIn('Timezone "TheMoon\\MyCrater"', msg.msg)
|
self.assertIn('Timezone "TheMoon\\MyCrater"', msg.msg)
|
||||||
|
|
||||||
|
|
||||||
class TestBarcodeSettingsChecks(DirectoriesMixin, TestCase):
|
|
||||||
@override_settings(CONSUMER_BARCODE_SCANNER="Invalid")
|
|
||||||
def test_barcode_scanner_invalid(self) -> None:
|
|
||||||
msgs = settings_values_check(None)
|
|
||||||
self.assertEqual(len(msgs), 1)
|
|
||||||
|
|
||||||
msg = msgs[0]
|
|
||||||
|
|
||||||
self.assertIn('Invalid Barcode Scanner "Invalid"', msg.msg)
|
|
||||||
|
|
||||||
@override_settings(CONSUMER_BARCODE_SCANNER="")
|
|
||||||
def test_barcode_scanner_empty(self) -> None:
|
|
||||||
msgs = settings_values_check(None)
|
|
||||||
self.assertEqual(len(msgs), 1)
|
|
||||||
|
|
||||||
msg = msgs[0]
|
|
||||||
|
|
||||||
self.assertIn('Invalid Barcode Scanner ""', msg.msg)
|
|
||||||
|
|
||||||
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
|
|
||||||
def test_barcode_scanner_valid(self) -> None:
|
|
||||||
msgs = settings_values_check(None)
|
|
||||||
self.assertEqual(len(msgs), 0)
|
|
||||||
|
|
||||||
|
|
||||||
class TestEmailCertSettingsChecks(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
class TestEmailCertSettingsChecks(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||||
@override_settings(EMAIL_CERTIFICATE_FILE=Path("/tmp/not_actually_here.pem"))
|
@override_settings(EMAIL_CERTIFICATE_FILE=Path("/tmp/not_actually_here.pem"))
|
||||||
def test_not_valid_file(self) -> None:
|
def test_not_valid_file(self) -> None:
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from pathlib import Path
|
|||||||
|
|
||||||
import faiss
|
import faiss
|
||||||
import llama_index.core.settings as llama_settings
|
import llama_index.core.settings as llama_settings
|
||||||
|
import tqdm
|
||||||
from celery import states
|
from celery import states
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
@@ -21,11 +22,6 @@ from llama_index.core.storage.docstore import SimpleDocumentStore
|
|||||||
from llama_index.core.storage.index_store import SimpleIndexStore
|
from llama_index.core.storage.index_store import SimpleIndexStore
|
||||||
from llama_index.core.text_splitter import TokenTextSplitter
|
from llama_index.core.text_splitter import TokenTextSplitter
|
||||||
from llama_index.vector_stores.faiss import FaissVectorStore
|
from llama_index.vector_stores.faiss import FaissVectorStore
|
||||||
from rich.progress import BarColumn
|
|
||||||
from rich.progress import Progress
|
|
||||||
from rich.progress import TaskProgressColumn
|
|
||||||
from rich.progress import TextColumn
|
|
||||||
from rich.progress import TimeRemainingColumn
|
|
||||||
|
|
||||||
from documents.models import Document
|
from documents.models import Document
|
||||||
from documents.models import PaperlessTask
|
from documents.models import PaperlessTask
|
||||||
@@ -180,18 +176,9 @@ def update_llm_index(*, progress_bar_disable=False, rebuild=False) -> str:
|
|||||||
embed_model = get_embedding_model()
|
embed_model = get_embedding_model()
|
||||||
llama_settings.Settings.embed_model = embed_model
|
llama_settings.Settings.embed_model = embed_model
|
||||||
storage_context = get_or_create_storage_context(rebuild=True)
|
storage_context = get_or_create_storage_context(rebuild=True)
|
||||||
with Progress(
|
for document in tqdm.tqdm(documents, disable=progress_bar_disable):
|
||||||
TextColumn("[progress.description]{task.description}"),
|
|
||||||
BarColumn(),
|
|
||||||
TaskProgressColumn(),
|
|
||||||
TimeRemainingColumn(),
|
|
||||||
disable=progress_bar_disable,
|
|
||||||
) as progress:
|
|
||||||
task = progress.add_task("Building document nodes", total=documents.count())
|
|
||||||
for document in documents:
|
|
||||||
document_nodes = build_document_node(document)
|
document_nodes = build_document_node(document)
|
||||||
nodes.extend(document_nodes)
|
nodes.extend(document_nodes)
|
||||||
progress.update(task, advance=1)
|
|
||||||
|
|
||||||
index = VectorStoreIndex(
|
index = VectorStoreIndex(
|
||||||
nodes=nodes,
|
nodes=nodes,
|
||||||
@@ -209,15 +196,7 @@ def update_llm_index(*, progress_bar_disable=False, rebuild=False) -> str:
|
|||||||
for node in index.docstore.get_nodes(all_node_ids)
|
for node in index.docstore.get_nodes(all_node_ids)
|
||||||
}
|
}
|
||||||
|
|
||||||
with Progress(
|
for document in tqdm.tqdm(documents, disable=progress_bar_disable):
|
||||||
TextColumn("[progress.description]{task.description}"),
|
|
||||||
BarColumn(),
|
|
||||||
TaskProgressColumn(),
|
|
||||||
TimeRemainingColumn(),
|
|
||||||
disable=progress_bar_disable,
|
|
||||||
) as progress:
|
|
||||||
task = progress.add_task("Updating index nodes", total=documents.count())
|
|
||||||
for document in documents:
|
|
||||||
doc_id = str(document.id)
|
doc_id = str(document.id)
|
||||||
document_modified = document.modified.isoformat()
|
document_modified = document.modified.isoformat()
|
||||||
|
|
||||||
@@ -226,7 +205,6 @@ def update_llm_index(*, progress_bar_disable=False, rebuild=False) -> str:
|
|||||||
node_modified = node.metadata.get("modified")
|
node_modified = node.metadata.get("modified")
|
||||||
|
|
||||||
if node_modified == document_modified:
|
if node_modified == document_modified:
|
||||||
progress.update(task, advance=1)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Again, delete from docstore, FAISS IndexFlatL2 are append-only
|
# Again, delete from docstore, FAISS IndexFlatL2 are append-only
|
||||||
@@ -235,7 +213,6 @@ def update_llm_index(*, progress_bar_disable=False, rebuild=False) -> str:
|
|||||||
else:
|
else:
|
||||||
# New document, add it
|
# New document, add it
|
||||||
nodes.extend(build_document_node(document))
|
nodes.extend(build_document_node(document))
|
||||||
progress.update(task, advance=1)
|
|
||||||
|
|
||||||
if nodes:
|
if nodes:
|
||||||
msg = "LLM index updated successfully."
|
msg = "LLM index updated successfully."
|
||||||
|
|||||||
@@ -76,7 +76,6 @@ def test_update_llm_index(
|
|||||||
mock_queryset = MagicMock()
|
mock_queryset = MagicMock()
|
||||||
mock_queryset.exists.return_value = True
|
mock_queryset.exists.return_value = True
|
||||||
mock_queryset.__iter__.return_value = iter([real_document])
|
mock_queryset.__iter__.return_value = iter([real_document])
|
||||||
mock_queryset.count.return_value = 1
|
|
||||||
mock_all.return_value = mock_queryset
|
mock_all.return_value = mock_queryset
|
||||||
indexing.update_llm_index(rebuild=True)
|
indexing.update_llm_index(rebuild=True)
|
||||||
|
|
||||||
@@ -98,7 +97,6 @@ def test_update_llm_index_removes_meta(
|
|||||||
mock_queryset = MagicMock()
|
mock_queryset = MagicMock()
|
||||||
mock_queryset.exists.return_value = True
|
mock_queryset.exists.return_value = True
|
||||||
mock_queryset.__iter__.return_value = iter([real_document])
|
mock_queryset.__iter__.return_value = iter([real_document])
|
||||||
mock_queryset.count.return_value = 1
|
|
||||||
mock_all.return_value = mock_queryset
|
mock_all.return_value = mock_queryset
|
||||||
indexing.update_llm_index(rebuild=True)
|
indexing.update_llm_index(rebuild=True)
|
||||||
|
|
||||||
@@ -131,7 +129,6 @@ def test_update_llm_index_partial_update(
|
|||||||
mock_queryset = MagicMock()
|
mock_queryset = MagicMock()
|
||||||
mock_queryset.exists.return_value = True
|
mock_queryset.exists.return_value = True
|
||||||
mock_queryset.__iter__.return_value = iter([real_document, doc2])
|
mock_queryset.__iter__.return_value = iter([real_document, doc2])
|
||||||
mock_queryset.count.return_value = 2
|
|
||||||
mock_all.return_value = mock_queryset
|
mock_all.return_value = mock_queryset
|
||||||
|
|
||||||
indexing.update_llm_index(rebuild=True)
|
indexing.update_llm_index(rebuild=True)
|
||||||
@@ -152,7 +149,6 @@ def test_update_llm_index_partial_update(
|
|||||||
mock_queryset = MagicMock()
|
mock_queryset = MagicMock()
|
||||||
mock_queryset.exists.return_value = True
|
mock_queryset.exists.return_value = True
|
||||||
mock_queryset.__iter__.return_value = iter([updated_document, doc2, doc3])
|
mock_queryset.__iter__.return_value = iter([updated_document, doc2, doc3])
|
||||||
mock_queryset.count.return_value = 3
|
|
||||||
mock_all.return_value = mock_queryset
|
mock_all.return_value = mock_queryset
|
||||||
|
|
||||||
# assert logs "Updating LLM index with %d new nodes and removing %d old nodes."
|
# assert logs "Updating LLM index with %d new nodes and removing %d old nodes."
|
||||||
|
|||||||
@@ -0,0 +1,144 @@
|
|||||||
|
# Generated by Django 5.2.11 on 2026-02-09 16:37
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
from django.db import models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("paperless_mail", "0001_initial"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="mailaccount",
|
||||||
|
name="account_type",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[(1, "IMAP"), (2, "Gmail OAuth"), (3, "Outlook OAuth")],
|
||||||
|
default=1,
|
||||||
|
verbose_name="account type",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="mailaccount",
|
||||||
|
name="imap_port",
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
blank=True,
|
||||||
|
help_text="This is usually 143 for unencrypted and STARTTLS connections, and 993 for SSL connections.",
|
||||||
|
null=True,
|
||||||
|
verbose_name="IMAP port",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="mailaccount",
|
||||||
|
name="imap_security",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[(1, "No encryption"), (2, "Use SSL"), (3, "Use STARTTLS")],
|
||||||
|
default=2,
|
||||||
|
verbose_name="IMAP security",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="mailrule",
|
||||||
|
name="action",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[
|
||||||
|
(1, "Delete"),
|
||||||
|
(2, "Move to specified folder"),
|
||||||
|
(3, "Mark as read, don't process read mails"),
|
||||||
|
(4, "Flag the mail, don't process flagged mails"),
|
||||||
|
(5, "Tag the mail with specified tag, don't process tagged mails"),
|
||||||
|
],
|
||||||
|
default=3,
|
||||||
|
verbose_name="action",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="mailrule",
|
||||||
|
name="assign_correspondent_from",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[
|
||||||
|
(1, "Do not assign a correspondent"),
|
||||||
|
(2, "Use mail address"),
|
||||||
|
(3, "Use name (or mail address if not available)"),
|
||||||
|
(4, "Use correspondent selected below"),
|
||||||
|
],
|
||||||
|
default=1,
|
||||||
|
verbose_name="assign correspondent from",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="mailrule",
|
||||||
|
name="assign_title_from",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[
|
||||||
|
(1, "Use subject as title"),
|
||||||
|
(2, "Use attachment filename as title"),
|
||||||
|
(3, "Do not assign title from rule"),
|
||||||
|
],
|
||||||
|
default=1,
|
||||||
|
verbose_name="assign title from",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="mailrule",
|
||||||
|
name="attachment_type",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[
|
||||||
|
(1, "Only process attachments."),
|
||||||
|
(2, "Process all files, including 'inline' attachments."),
|
||||||
|
],
|
||||||
|
default=1,
|
||||||
|
help_text="Inline attachments include embedded images, so it's best to combine this option with a filename filter.",
|
||||||
|
verbose_name="attachment type",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="mailrule",
|
||||||
|
name="consumption_scope",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[
|
||||||
|
(1, "Only process attachments."),
|
||||||
|
(
|
||||||
|
2,
|
||||||
|
"Process full Mail (with embedded attachments in file) as .eml",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
3,
|
||||||
|
"Process full Mail (with embedded attachments in file) as .eml + process attachments as separate documents",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
default=1,
|
||||||
|
verbose_name="consumption scope",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="mailrule",
|
||||||
|
name="maximum_age",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
default=30,
|
||||||
|
help_text="Specified in days.",
|
||||||
|
verbose_name="maximum age",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="mailrule",
|
||||||
|
name="order",
|
||||||
|
field=models.SmallIntegerField(default=0, verbose_name="order"),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="mailrule",
|
||||||
|
name="pdf_layout",
|
||||||
|
field=models.PositiveSmallIntegerField(
|
||||||
|
choices=[
|
||||||
|
(0, "System default"),
|
||||||
|
(1, "Text, then HTML"),
|
||||||
|
(2, "HTML, then text"),
|
||||||
|
(3, "HTML only"),
|
||||||
|
(4, "Text only"),
|
||||||
|
],
|
||||||
|
default=0,
|
||||||
|
verbose_name="pdf layout",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -24,7 +24,7 @@ class MailAccount(document_models.ModelWithOwner):
|
|||||||
|
|
||||||
imap_server = models.CharField(_("IMAP server"), max_length=256)
|
imap_server = models.CharField(_("IMAP server"), max_length=256)
|
||||||
|
|
||||||
imap_port = models.IntegerField(
|
imap_port = models.PositiveIntegerField(
|
||||||
_("IMAP port"),
|
_("IMAP port"),
|
||||||
blank=True,
|
blank=True,
|
||||||
null=True,
|
null=True,
|
||||||
@@ -34,7 +34,7 @@ class MailAccount(document_models.ModelWithOwner):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
imap_security = models.PositiveIntegerField(
|
imap_security = models.PositiveSmallIntegerField(
|
||||||
_("IMAP security"),
|
_("IMAP security"),
|
||||||
choices=ImapSecurity.choices,
|
choices=ImapSecurity.choices,
|
||||||
default=ImapSecurity.SSL,
|
default=ImapSecurity.SSL,
|
||||||
@@ -56,7 +56,7 @@ class MailAccount(document_models.ModelWithOwner):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
account_type = models.PositiveIntegerField(
|
account_type = models.PositiveSmallIntegerField(
|
||||||
_("account type"),
|
_("account type"),
|
||||||
choices=MailAccountType.choices,
|
choices=MailAccountType.choices,
|
||||||
default=MailAccountType.IMAP,
|
default=MailAccountType.IMAP,
|
||||||
@@ -142,7 +142,7 @@ class MailRule(document_models.ModelWithOwner):
|
|||||||
|
|
||||||
name = models.CharField(_("name"), max_length=256)
|
name = models.CharField(_("name"), max_length=256)
|
||||||
|
|
||||||
order = models.IntegerField(_("order"), default=0)
|
order = models.SmallIntegerField(_("order"), default=0)
|
||||||
|
|
||||||
account = models.ForeignKey(
|
account = models.ForeignKey(
|
||||||
MailAccount,
|
MailAccount,
|
||||||
@@ -215,13 +215,13 @@ class MailRule(document_models.ModelWithOwner):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
maximum_age = models.PositiveIntegerField(
|
maximum_age = models.PositiveSmallIntegerField(
|
||||||
_("maximum age"),
|
_("maximum age"),
|
||||||
default=30,
|
default=30,
|
||||||
help_text=_("Specified in days."),
|
help_text=_("Specified in days."),
|
||||||
)
|
)
|
||||||
|
|
||||||
attachment_type = models.PositiveIntegerField(
|
attachment_type = models.PositiveSmallIntegerField(
|
||||||
_("attachment type"),
|
_("attachment type"),
|
||||||
choices=AttachmentProcessing.choices,
|
choices=AttachmentProcessing.choices,
|
||||||
default=AttachmentProcessing.ATTACHMENTS_ONLY,
|
default=AttachmentProcessing.ATTACHMENTS_ONLY,
|
||||||
@@ -231,19 +231,19 @@ class MailRule(document_models.ModelWithOwner):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
consumption_scope = models.PositiveIntegerField(
|
consumption_scope = models.PositiveSmallIntegerField(
|
||||||
_("consumption scope"),
|
_("consumption scope"),
|
||||||
choices=ConsumptionScope.choices,
|
choices=ConsumptionScope.choices,
|
||||||
default=ConsumptionScope.ATTACHMENTS_ONLY,
|
default=ConsumptionScope.ATTACHMENTS_ONLY,
|
||||||
)
|
)
|
||||||
|
|
||||||
pdf_layout = models.PositiveIntegerField(
|
pdf_layout = models.PositiveSmallIntegerField(
|
||||||
_("pdf layout"),
|
_("pdf layout"),
|
||||||
choices=PdfLayout.choices,
|
choices=PdfLayout.choices,
|
||||||
default=PdfLayout.DEFAULT,
|
default=PdfLayout.DEFAULT,
|
||||||
)
|
)
|
||||||
|
|
||||||
action = models.PositiveIntegerField(
|
action = models.PositiveSmallIntegerField(
|
||||||
_("action"),
|
_("action"),
|
||||||
choices=MailAction.choices,
|
choices=MailAction.choices,
|
||||||
default=MailAction.MARK_READ,
|
default=MailAction.MARK_READ,
|
||||||
@@ -262,7 +262,7 @@ class MailRule(document_models.ModelWithOwner):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
assign_title_from = models.PositiveIntegerField(
|
assign_title_from = models.PositiveSmallIntegerField(
|
||||||
_("assign title from"),
|
_("assign title from"),
|
||||||
choices=TitleSource.choices,
|
choices=TitleSource.choices,
|
||||||
default=TitleSource.FROM_SUBJECT,
|
default=TitleSource.FROM_SUBJECT,
|
||||||
@@ -282,7 +282,7 @@ class MailRule(document_models.ModelWithOwner):
|
|||||||
verbose_name=_("assign this document type"),
|
verbose_name=_("assign this document type"),
|
||||||
)
|
)
|
||||||
|
|
||||||
assign_correspondent_from = models.PositiveIntegerField(
|
assign_correspondent_from = models.PositiveSmallIntegerField(
|
||||||
_("assign correspondent from"),
|
_("assign correspondent from"),
|
||||||
choices=CorrespondentSource.choices,
|
choices=CorrespondentSource.choices,
|
||||||
default=CorrespondentSource.FROM_NOTHING,
|
default=CorrespondentSource.FROM_NOTHING,
|
||||||
|
|||||||
104
uv.lock
generated
104
uv.lock
generated
@@ -3073,7 +3073,6 @@ dependencies = [
|
|||||||
{ name = "python-gnupg", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "python-gnupg", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "python-ipware", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "python-ipware", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "python-magic", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "python-magic", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "pyzbar", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
|
||||||
{ name = "rapidfuzz", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "rapidfuzz", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "redis", extra = ["hiredis"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "redis", extra = ["hiredis"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "regex", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "regex", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
@@ -3083,12 +3082,11 @@ dependencies = [
|
|||||||
{ name = "tika-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "tika-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "torch", version = "2.10.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform == 'darwin'" },
|
{ name = "torch", version = "2.10.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform == 'darwin'" },
|
||||||
{ name = "torch", version = "2.10.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform == 'linux'" },
|
{ name = "torch", version = "2.10.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform == 'linux'" },
|
||||||
|
{ name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "watchfiles", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "watchfiles", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "whitenoise", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "whitenoise", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "whoosh-reloaded", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "whoosh-reloaded", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "zxing-cpp", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version != '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version != '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or sys_platform == 'darwin'" },
|
{ name = "zxing-cpp", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "zxing-cpp", version = "2.3.0", source = { url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_aarch64.whl" }, marker = "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'" },
|
|
||||||
{ name = "zxing-cpp", version = "2.3.0", source = { url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl" }, marker = "python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.optional-dependencies]
|
[package.optional-dependencies]
|
||||||
@@ -3165,6 +3163,7 @@ typing = [
|
|||||||
{ name = "types-pytz", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "types-pytz", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "types-redis", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "types-redis", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "types-setuptools", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "types-setuptools", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
|
{ name = "types-tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.metadata]
|
[package.metadata]
|
||||||
@@ -3226,7 +3225,6 @@ requires-dist = [
|
|||||||
{ name = "python-gnupg", specifier = "~=0.5.4" },
|
{ name = "python-gnupg", specifier = "~=0.5.4" },
|
||||||
{ name = "python-ipware", specifier = "~=3.0.0" },
|
{ name = "python-ipware", specifier = "~=3.0.0" },
|
||||||
{ name = "python-magic", specifier = "~=0.4.27" },
|
{ name = "python-magic", specifier = "~=0.4.27" },
|
||||||
{ name = "pyzbar", specifier = "~=0.1.9" },
|
|
||||||
{ name = "rapidfuzz", specifier = "~=3.14.0" },
|
{ name = "rapidfuzz", specifier = "~=3.14.0" },
|
||||||
{ name = "redis", extras = ["hiredis"], specifier = "~=5.2.1" },
|
{ name = "redis", extras = ["hiredis"], specifier = "~=5.2.1" },
|
||||||
{ name = "regex", specifier = ">=2025.9.18" },
|
{ name = "regex", specifier = ">=2025.9.18" },
|
||||||
@@ -3235,12 +3233,11 @@ requires-dist = [
|
|||||||
{ name = "setproctitle", specifier = "~=1.3.4" },
|
{ name = "setproctitle", specifier = "~=1.3.4" },
|
||||||
{ name = "tika-client", specifier = "~=0.10.0" },
|
{ name = "tika-client", specifier = "~=0.10.0" },
|
||||||
{ name = "torch", specifier = "~=2.10.0", index = "https://download.pytorch.org/whl/cpu" },
|
{ name = "torch", specifier = "~=2.10.0", index = "https://download.pytorch.org/whl/cpu" },
|
||||||
|
{ name = "tqdm", specifier = "~=4.67.1" },
|
||||||
{ name = "watchfiles", specifier = ">=1.1.1" },
|
{ name = "watchfiles", specifier = ">=1.1.1" },
|
||||||
{ name = "whitenoise", specifier = "~=6.11" },
|
{ name = "whitenoise", specifier = "~=6.11" },
|
||||||
{ name = "whoosh-reloaded", specifier = ">=2.7.5" },
|
{ name = "whoosh-reloaded", specifier = ">=2.7.5" },
|
||||||
{ name = "zxing-cpp", marker = "(python_full_version != '3.12.*' and platform_machine == 'aarch64') or (python_full_version != '3.12.*' and platform_machine == 'x86_64') or (platform_machine != 'aarch64' and platform_machine != 'x86_64') or sys_platform != 'linux'", specifier = "~=2.3.0" },
|
{ name = "zxing-cpp", specifier = "~=3.0.0" },
|
||||||
{ name = "zxing-cpp", marker = "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_aarch64.whl" },
|
|
||||||
{ name = "zxing-cpp", marker = "python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'linux'", url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl" },
|
|
||||||
]
|
]
|
||||||
provides-extras = ["mariadb", "postgres", "webserver"]
|
provides-extras = ["mariadb", "postgres", "webserver"]
|
||||||
|
|
||||||
@@ -3301,6 +3298,7 @@ typing = [
|
|||||||
{ name = "types-pytz" },
|
{ name = "types-pytz" },
|
||||||
{ name = "types-redis" },
|
{ name = "types-redis" },
|
||||||
{ name = "types-setuptools" },
|
{ name = "types-setuptools" },
|
||||||
|
{ name = "types-tqdm" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -4279,14 +4277,6 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" },
|
{ url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pyzbar"
|
|
||||||
version = "0.1.9"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/6d/24/81ebe6a1c00760471a3028a23cbe0b94e5fa2926e5ba47adc895920887bc/pyzbar-0.1.9-py2.py3-none-any.whl", hash = "sha256:4559628b8192feb25766d954b36a3753baaf5c97c03135aec7e4a026036b475d", size = 32560, upload-time = "2022-03-15T14:53:40.637Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "qrcode"
|
name = "qrcode"
|
||||||
version = "8.2"
|
version = "8.2"
|
||||||
@@ -5233,6 +5223,10 @@ wheels = [
|
|||||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:0826ac8e409551e12b2360ac18b4161a838cbd111933e694752f351191331d09" },
|
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:0826ac8e409551e12b2360ac18b4161a838cbd111933e694752f351191331d09" },
|
||||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:7fbbf409143a4fe0812a40c0b46a436030a7e1d14fe8c5234dfbe44df47f617e" },
|
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:7fbbf409143a4fe0812a40c0b46a436030a7e1d14fe8c5234dfbe44df47f617e" },
|
||||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:b39cafff7229699f9d6e172cac74d85fd71b568268e439e08d9c540e54732a3e" },
|
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:b39cafff7229699f9d6e172cac74d85fd71b568268e439e08d9c540e54732a3e" },
|
||||||
|
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-2-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:7417ef370d7c3969dd509dae8d5c7daeb945af335ab76dd38358ba30a91251c1" },
|
||||||
|
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-2-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:90821a3194b8806d9fa9fdaa9308c1bc73df0c26808274b14129a97c99f35794" },
|
||||||
|
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-2-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:358bd7125cbec6e692d60618a5eec7f55a51b29e3652a849fd42af021d818023" },
|
||||||
|
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-2-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:470de4176007c2700735e003a830828a88d27129032a3add07291da07e2a94e8" },
|
||||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:2d16abfce6c92584ceeb00c3b2665d5798424dd9ed235ea69b72e045cd53ae97" },
|
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:2d16abfce6c92584ceeb00c3b2665d5798424dd9ed235ea69b72e045cd53ae97" },
|
||||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:4584ab167995c0479f6821e3dceaf199c8166c811d3adbba5d8eedbbfa6764fd" },
|
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:4584ab167995c0479f6821e3dceaf199c8166c811d3adbba5d8eedbbfa6764fd" },
|
||||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:45a1c5057629444aeb1c452c18298fa7f30f2f7aeadd4dc41f9d340980294407" },
|
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:45a1c5057629444aeb1c452c18298fa7f30f2f7aeadd4dc41f9d340980294407" },
|
||||||
@@ -5580,6 +5574,18 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/2b/7f/016dc5cc718ec6ccaa84fb73ed409ef1c261793fd5e637cdfaa18beb40a9/types_setuptools-80.10.0.20260124-py3-none-any.whl", hash = "sha256:efed7e044f01adb9c2806c7a8e1b6aa3656b8e382379b53d5f26ee3db24d4c01", size = 64333, upload-time = "2026-01-24T03:18:38.344Z" },
|
{ url = "https://files.pythonhosted.org/packages/2b/7f/016dc5cc718ec6ccaa84fb73ed409ef1c261793fd5e637cdfaa18beb40a9/types_setuptools-80.10.0.20260124-py3-none-any.whl", hash = "sha256:efed7e044f01adb9c2806c7a8e1b6aa3656b8e382379b53d5f26ee3db24d4c01", size = 64333, upload-time = "2026-01-24T03:18:38.344Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "types-tqdm"
|
||||||
|
version = "4.67.3.20260205"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "types-requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/53/46/790b9872523a48163bdda87d47849b4466017640e5259d06eed539340afd/types_tqdm-4.67.3.20260205.tar.gz", hash = "sha256:f3023682d4aa3bbbf908c8c6bb35f35692d319460d9bbd3e646e8852f3dd9f85", size = 17597, upload-time = "2026-02-05T04:03:19.721Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cc/da/7f761868dbaa328392356fab30c18ab90d14cce86b269e7e63328f29d4a3/types_tqdm-4.67.3.20260205-py3-none-any.whl", hash = "sha256:85c31731e81dc3c5cecc34c6c8b2e5166fafa722468f58840c2b5ac6a8c5c173", size = 23894, upload-time = "2026-02-05T04:03:18.48Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "types-webencodings"
|
name = "types-webencodings"
|
||||||
version = "0.5.0.20251108"
|
version = "0.5.0.20251108"
|
||||||
@@ -6225,50 +6231,28 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zxing-cpp"
|
name = "zxing-cpp"
|
||||||
version = "2.3.0"
|
version = "3.0.0"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
resolution-markers = [
|
sdist = { url = "https://files.pythonhosted.org/packages/f1/c6/ac2a12cdc2b1c296804fc6a65bf112b607825ca7f47742a5aca541134711/zxing_cpp-3.0.0.tar.gz", hash = "sha256:703353304de24d947bd68044fac4e062953a7b64029de6941ba8ffeb4476b60d", size = 1197544, upload-time = "2026-02-10T12:50:11.252Z" }
|
||||||
"python_full_version >= '3.12' and sys_platform == 'darwin'",
|
|
||||||
"python_full_version == '3.11.*' and sys_platform == 'darwin'",
|
|
||||||
"python_full_version < '3.11' and sys_platform == 'darwin'",
|
|
||||||
"(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and platform_machine == 'x86_64' and sys_platform == 'linux')",
|
|
||||||
"python_full_version == '3.11.*' and sys_platform == 'linux'",
|
|
||||||
"python_full_version < '3.11' and sys_platform == 'linux'",
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/d9/f2/b781bf6119abe665069777e3c0f154752cf924fe8a55fca027243abbc555/zxing_cpp-2.3.0.tar.gz", hash = "sha256:3babedb67a4c15c9de2c2b4c42d70af83a6c85780c1b2d9803ac64c6ae69f14e", size = 1172666, upload-time = "2025-01-01T21:54:05.856Z" }
|
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/31/93/3e830a3dd44a9f7d11219883bc6f131ca68da2a5ad48690d9645e19c3b55/zxing_cpp-2.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4e1ffcdd8e44a344cbf32bb0435e1fbe67241337c0a0f22452c2b8f7c16dc75e", size = 1694502, upload-time = "2025-01-01T21:53:06.339Z" },
|
{ url = "https://files.pythonhosted.org/packages/ac/84/689a748f08635ff1543265905532cbe6dfaa299350cfd6591e4456da3014/zxing_cpp-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:63bcc80e7a6c741f1948381bb1b9c36082400624a217e3306aebb1e2bec21f6f", size = 910995, upload-time = "2026-02-10T12:49:22.189Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/d7/4c/6bf1551c9b0097e13bcc54b82828e66719c021afd3ef05fd3d7650e0e768/zxing_cpp-2.3.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfde95506d3fec439705dbc8771ace025d049dce324861ddbf74be3ab0fabd36", size = 991445, upload-time = "2025-01-01T21:53:08.204Z" },
|
{ url = "https://files.pythonhosted.org/packages/28/3d/f3c23181697a2407e2079dc122ba8c266b46842e3ffc810d510716a95759/zxing_cpp-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0b30e2f4b081a85fe5f09ba34cb17486d607625f2ddeb0c80d5212d2872e5530", size = 865029, upload-time = "2026-02-10T12:49:24.719Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/64/6c/1bf6e40fadcb73958f672385c5186b062485c818cecc32b36ddf5666da1e/zxing_cpp-2.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd3f175f7b57cfbdea56afdb5335eaebaadeebc06e20a087d9aa3f99637c4aa5", size = 982960, upload-time = "2025-01-01T21:53:10.136Z" },
|
{ url = "https://files.pythonhosted.org/packages/1e/48/1e56b02edfda18d557abea7cf5790a7a0aade06191f7c2bbce4a4efab0fd/zxing_cpp-3.0.0-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dd640c33a06da8b15e36a8e0c3c8236531fea13a95d7eaa8deb91ccb5d76c4e7", size = 993311, upload-time = "2026-02-10T12:49:26.487Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/ab/60/d420be9446b25a65064a665603bd24295e143e2bafde500bfc952a07fbee/zxing_cpp-2.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6ef0548f4247480da988ce1dad4d9c5b8d7cb2871538894fb9615c9ac0bb8656", size = 1697594, upload-time = "2025-01-01T21:53:17.292Z" },
|
{ url = "https://files.pythonhosted.org/packages/db/47/78fe46ee99e4f6b67467a96ca61e75e907d2e469f63bbd92127b91008c02/zxing_cpp-3.0.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:630adc04f3a7916054a91c71d7dd55568e798289be5f16186a17ea05555eb60f", size = 1070707, upload-time = "2026-02-10T12:49:27.746Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/3e/34/ea057223cc34e63b1ff27b2794bcddfa58a1a64af7314882291255b56980/zxing_cpp-2.3.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfc1095dc3303ed24be2622916e199a071bae19b19d432a0ce7ca993f95879ec", size = 991930, upload-time = "2025-01-01T21:53:18.808Z" },
|
{ url = "https://files.pythonhosted.org/packages/e6/9c/25ddd83cd109a97a0382fe807a8b0904b3eefcf42d22df6aa6ae6a5e2b86/zxing_cpp-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c171e9b37f596293d1134e74c3285a8b7cf06ef72e2ad39c4a7d54b1aa939782", size = 912816, upload-time = "2026-02-10T12:49:33.174Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/2e/d3/75a6d6485e704527c5e18f825f6bd6b5e5129f56c3526f28142911b48410/zxing_cpp-2.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64e5a4ff5168142d8b33ca648978c8ec4125c50b33aa1521e0c5344c6ffacef7", size = 983751, upload-time = "2025-01-01T21:53:21.757Z" },
|
{ url = "https://files.pythonhosted.org/packages/32/cc/e2e0d68e60fb132c31c728e24dc529cbb5579bfa1365c64b62290aefe317/zxing_cpp-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e712d958155408c8e902ea91d8feb3f4edfa41fd207ef85ca9e59f3f0c7060ad", size = 866684, upload-time = "2026-02-10T12:49:34.913Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/94/d2/e4552dc7d341ccf6242410a13bf95cbd37d7bf194a482d400729b5934b87/zxing_cpp-2.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2f457c0aa53c1de263e34cac9917ef647bfb9adcc9e3d4f42a8a1fc02558e1a6", size = 1698659, upload-time = "2025-01-01T21:53:36.692Z" },
|
{ url = "https://files.pythonhosted.org/packages/96/f9/538488cacaea1e3e989cf87c389d075e2139ee50fab786de7e59b64f9411/zxing_cpp-3.0.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4f62174643de2012bde470bf2048d8a29b5d93bb23bbdc6c075e7e92dbd5794", size = 994390, upload-time = "2026-02-10T12:49:36.294Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/0e/6c/00252c1b3545c13d68922b67cb7c555f739b3a1755cc2a694fd8705ecae2/zxing_cpp-2.3.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:899955e0091fa0e159b9eb429e43d0a23e2be4a5347c9629c858844f02024b4b", size = 992014, upload-time = "2025-01-01T21:53:39.621Z" },
|
{ url = "https://files.pythonhosted.org/packages/51/c1/3eab6fa0b1c6e83a23ce94727e1551ca49a6edabe4691adaa8d03ff742a2/zxing_cpp-3.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:156b363a0aae0b2472c58628346b5223ebb72935f0fa5def3d7ab4a7211c3e88", size = 1071503, upload-time = "2026-02-10T12:49:38.575Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/95/30/3143bf75944d65c9432349a79b97f9414965a44875ec9eeb5745592b4ecd/zxing_cpp-2.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dec2805c0e9dec0d7707c97ca5196f98d2730d2dfcea80442807123b9f8ec850", size = 984542, upload-time = "2025-01-01T21:53:41.01Z" },
|
{ url = "https://files.pythonhosted.org/packages/7b/7f/32b4cc8545da72061d360aca9d96c51738d48e2f3a8eebe06a47f4103dd6/zxing_cpp-3.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b76fac77c94545c5a6e2e6184a121c09409fff29f9c7557e350c16b78025d74", size = 914798, upload-time = "2026-02-10T12:49:43.556Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/3d/46/ef7c69bea44a7c64d4a740679dd18c59616d610fb468c057d8bfbda5f063/zxing_cpp-2.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3da0fbf0d93ef85663def561e8f7880447970710ea6b1768dfc05550a9ee3e00", size = 1698948, upload-time = "2025-01-01T21:53:46.768Z" },
|
{ url = "https://files.pythonhosted.org/packages/df/21/5ba18d19383fe5f044fefa79640f4234665bc77057cf3d584e5eb979685f/zxing_cpp-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bf58043c543d3440f1cbef6bfa9e5ad7139c39c90955d1f294f4778f0cd1ec0", size = 867437, upload-time = "2026-02-10T12:49:45.424Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/49/2e/8ed22a7b3743a8aa6a588366e34c44056d118ea7614b6bdbc44817ab4a7f/zxing_cpp-2.3.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0b36f3be2e6d928bea9bd529f173ef41092061f0f46d27f591c87486f9a7366", size = 992070, upload-time = "2025-01-01T21:53:48.258Z" },
|
{ url = "https://files.pythonhosted.org/packages/8a/2a/94d98c5b728e1dfeec3a343f2581bf7f372ca448cefff50076cab0c6e0c4/zxing_cpp-3.0.0-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:548cc0e767f24193038031c76f60f2de0965ab5b05106dff6095bcae89607748", size = 995650, upload-time = "2026-02-10T12:49:47.222Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/ce/5e/5784ad14f8514e4321f3a828dccc00ebcf70202f6ef967174d26bcb65568/zxing_cpp-2.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ba641ca5a0f19b97d7bc6a0212e61dab267a2b1a52a84946d02bdcd859ec318", size = 984869, upload-time = "2025-01-01T21:53:51.256Z" },
|
{ url = "https://files.pythonhosted.org/packages/39/0f/03f09d048b7dde279a5bed8839ffbb21f7e8995747afa17970791c0356ff/zxing_cpp-3.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cfdf7a393541f4cd7c7c9329ec5d56b49a5cfc91bf24cdc53ec301d41c2afd68", size = 1074289, upload-time = "2026-02-10T12:49:48.804Z" },
|
||||||
]
|
{ url = "https://files.pythonhosted.org/packages/a0/c4/c4f276e43c4df74896b7cac2a3e5deabaf743e8256ee6736380d64f7295b/zxing_cpp-3.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:26ee52319b545a0db5adc19c682d5bd7efa210456daff0293f5cc78311c52d90", size = 914828, upload-time = "2026-02-10T12:49:53.306Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/52/7e/971bb37b9091b02fd12f7c13745335a77a8e9e907abc3e0530ff9c4e6b32/zxing_cpp-3.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c4d44e63c0cb06df1d7ab636018b3e7139d5b010c22a5dcb18f3badfa29e1e1c", size = 867410, upload-time = "2026-02-10T12:49:55.061Z" },
|
||||||
[[package]]
|
{ url = "https://files.pythonhosted.org/packages/8e/df/cbf7e3ad2ca5f80f71df39c99fb7061f39fb390a9cab031dab2be361c8be/zxing_cpp-3.0.0-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9e9f7404b9b33abf863ccb243f6b0e99c4818028894dfdd8fb41e142fcdad65", size = 996406, upload-time = "2026-02-10T12:49:56.42Z" },
|
||||||
name = "zxing-cpp"
|
{ url = "https://files.pythonhosted.org/packages/a3/ac/ae87a5ed87a7623e18a986e4394c3e12a5fa0f4fa55dae3be7f5ca6ef392/zxing_cpp-3.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0a96c8eaf1adff4c5aaf99c74d2b5ce3d542d44c21f964ac3f70eaaefcdc141e", size = 1074221, upload-time = "2026-02-10T12:49:57.971Z" },
|
||||||
version = "2.3.0"
|
{ url = "https://files.pythonhosted.org/packages/7a/06/8ecd68d8a9e9bb7166808480a1c09ab059c9974b5c54a40640d4e4e1d814/zxing_cpp-3.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:af13fcbbe24ca4285bda83309f50954107ddf7d12686c332a838f4eaf88ff619", size = 915701, upload-time = "2026-02-10T12:50:01.942Z" },
|
||||||
source = { url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_aarch64.whl" }
|
{ url = "https://files.pythonhosted.org/packages/f5/38/76f89b42fff2fae62595b3adc88b72e6eb1460acb9c43a8ed4c2455297df/zxing_cpp-3.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1b74a6b3608d035818d6d4fa9545875acae92635028b8927e3922175cb4fe19b", size = 868123, upload-time = "2026-02-10T12:50:03.222Z" },
|
||||||
resolution-markers = [
|
{ url = "https://files.pythonhosted.org/packages/0a/3b/b76d979f74f09a7d764fe4c22583ba8322ef0f347e3193eceb1461b84913/zxing_cpp-3.0.0-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27901910b14e2d6a6f8eba585249d02ac23660de1a6fef3dc3a283bb017c41d0", size = 997309, upload-time = "2026-02-10T12:50:04.835Z" },
|
||||||
"python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'",
|
{ url = "https://files.pythonhosted.org/packages/f8/e4/dd9ce2a725c83c15b1bc45b3d4e6be30f9528bcb9a4749002e1c4c8dca51/zxing_cpp-3.0.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:489fc0ab4af893e1b10b58b70c34db80fbbaf6e5c27c216e8f3f2367cf18a45d", size = 1074223, upload-time = "2026-02-10T12:50:06.622Z" },
|
||||||
]
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_aarch64.whl", hash = "sha256:cfe600ed871ac540733fea3dac15c345b1ef61b703dd73ab0b618d29a491e611" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "zxing-cpp"
|
|
||||||
version = "2.3.0"
|
|
||||||
source = { url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl" }
|
|
||||||
resolution-markers = [
|
|
||||||
"python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'linux'",
|
|
||||||
]
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl", hash = "sha256:15c6b1b6975a2a7d3dc679a05f6aed435753e39a105f37bed11098d00e0b5e79" },
|
|
||||||
]
|
]
|
||||||
|
|||||||
Reference in New Issue
Block a user