Compare commits

..

10 Commits

Author SHA1 Message Date
shamoon
a759f974c4 Merge branch 'dev' into feature-better-asn-operations 2026-01-29 11:24:18 -08:00
GitHub Actions
a367b8ad1c Auto translate strings 2026-01-29 16:07:32 +00:00
Christoph Schober
d16d3fb618 Feature: support split documents based on tag barcodes (#11645) 2026-01-29 08:05:33 -08:00
Trenton H
5577f70c69 Chore: Enables pylance pytest integration, swaps around some test markers (#11930) 2026-01-28 23:06:11 +00:00
Philipp Defner
4d9aa2e943 Expose port to host (#11918)
---------

Co-authored-by: shamoon <4887959+shamoon@users.noreply.github.com>
2026-01-28 21:51:10 +00:00
shamoon
0a40c0de0c Merge branch 'dev' into feature-better-asn-operations 2026-01-27 17:03:40 -08:00
shamoon
5fe46cac55 Mas testing 2026-01-24 20:05:25 -08:00
shamoon
c0c2202564 skip_asn_if_exists 2026-01-24 20:05:25 -08:00
shamoon
d65d9a2b88 "Handoff" ASN when merging or editing PDFs 2026-01-24 20:05:25 -08:00
shamoon
8e12f3e93c First, release ASNs before document replacement (and restore if needed) 2026-01-24 20:05:25 -08:00
52 changed files with 1307 additions and 2167 deletions

View File

@@ -3,6 +3,7 @@
"dockerComposeFile": "docker-compose.devcontainer.sqlite-tika.yml", "dockerComposeFile": "docker-compose.devcontainer.sqlite-tika.yml",
"service": "paperless-development", "service": "paperless-development",
"workspaceFolder": "/usr/src/paperless/paperless-ngx", "workspaceFolder": "/usr/src/paperless/paperless-ngx",
"forwardPorts": [4200, 8000],
"containerEnv": { "containerEnv": {
"UV_CACHE_DIR": "/usr/src/paperless/paperless-ngx/.uv-cache" "UV_CACHE_DIR": "/usr/src/paperless/paperless-ngx/.uv-cache"
}, },

View File

@@ -33,7 +33,7 @@
"label": "Start: Frontend Angular", "label": "Start: Frontend Angular",
"description": "Start the Frontend Angular Dev Server", "description": "Start the Frontend Angular Dev Server",
"type": "shell", "type": "shell",
"command": "pnpm start", "command": "pnpm exec ng serve --host 0.0.0.0",
"isBackground": true, "isBackground": true,
"options": { "options": {
"cwd": "${workspaceFolder}/src-ui" "cwd": "${workspaceFolder}/src-ui"

View File

@@ -8,11 +8,6 @@ echo "${log_prefix} Apply database migrations..."
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ "${PAPERLESS_MIGRATION_MODE:-0}" == "1" ]]; then
echo "${log_prefix} Migration mode enabled, skipping migrations."
exit 0
fi
# The whole migrate, with flock, needs to run as the right user # The whole migrate, with flock, needs to run as the right user
if [[ -n "${USER_IS_NON_ROOT}" ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
exec s6-setlock -n "${data_dir}/migration_lock" python3 manage.py migrate --skip-checks --no-input exec s6-setlock -n "${data_dir}/migration_lock" python3 manage.py migrate --skip-checks --no-input

View File

@@ -9,15 +9,7 @@ echo "${log_prefix} Running Django checks"
cd "${PAPERLESS_SRC_DIR}" cd "${PAPERLESS_SRC_DIR}"
if [[ -n "${USER_IS_NON_ROOT}" ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
if [[ "${PAPERLESS_MIGRATION_MODE:-0}" == "1" ]]; then
python3 manage_migration.py check
else
python3 manage.py check python3 manage.py check
fi
else else
if [[ "${PAPERLESS_MIGRATION_MODE:-0}" == "1" ]]; then
s6-setuidgid paperless python3 manage_migration.py check
else
s6-setuidgid paperless python3 manage.py check s6-setuidgid paperless python3 manage.py check
fi
fi fi

View File

@@ -13,14 +13,8 @@ if [[ -n "${PAPERLESS_FORCE_SCRIPT_NAME}" ]]; then
export GRANIAN_URL_PATH_PREFIX=${PAPERLESS_FORCE_SCRIPT_NAME} export GRANIAN_URL_PATH_PREFIX=${PAPERLESS_FORCE_SCRIPT_NAME}
fi fi
if [[ "${PAPERLESS_MIGRATION_MODE:-0}" == "1" ]]; then
app_module="paperless.migration_asgi:application"
else
app_module="paperless.asgi:application"
fi
if [[ -n "${USER_IS_NON_ROOT}" ]]; then if [[ -n "${USER_IS_NON_ROOT}" ]]; then
exec granian --interface asginl --ws --loop uvloop "${app_module}" exec granian --interface asginl --ws --loop uvloop "paperless.asgi:application"
else else
exec s6-setuidgid paperless granian --interface asginl --ws --loop uvloop "${app_module}" exec s6-setuidgid paperless granian --interface asginl --ws --loop uvloop "paperless.asgi:application"
fi fi

View File

@@ -805,6 +805,27 @@ See the relevant settings [`PAPERLESS_CONSUMER_ENABLE_TAG_BARCODE`](configuratio
and [`PAPERLESS_CONSUMER_TAG_BARCODE_MAPPING`](configuration.md#PAPERLESS_CONSUMER_TAG_BARCODE_MAPPING) and [`PAPERLESS_CONSUMER_TAG_BARCODE_MAPPING`](configuration.md#PAPERLESS_CONSUMER_TAG_BARCODE_MAPPING)
for more information. for more information.
#### Splitting on Tag Barcodes
By default, tag barcodes only assign tags to documents without splitting them. However,
you can enable document splitting on tag barcodes by setting
[`PAPERLESS_CONSUMER_TAG_BARCODE_SPLIT`](configuration.md#PAPERLESS_CONSUMER_TAG_BARCODE_SPLIT)
to `true`.
When enabled, documents will be split at pages containing tag barcodes, similar to how
ASN barcodes work. Key features:
- The page with the tag barcode is **retained** in the resulting document
- **Each split document extracts its own tags** - only tags on pages within that document are assigned
- Multiple tag barcodes can trigger multiple splits in the same document
- Works seamlessly with ASN barcodes - each split document gets its own ASN and tags
This is useful for batch scanning where you place tag barcode pages between different
documents to both separate and categorize them in a single operation.
**Example:** A 6-page scan with TAG:invoice on page 3 and TAG:receipt on page 5 will create
three documents: pages 1-2 (no tags), pages 3-4 (tagged "invoice"), and pages 5-6 (tagged "receipt").
## Automatic collation of double-sided documents {#collate} ## Automatic collation of double-sided documents {#collate}
!!! note !!! note

View File

@@ -1557,6 +1557,20 @@ assigns or creates tags if a properly formatted barcode is detected.
Please refer to the Python regex documentation for more information. Please refer to the Python regex documentation for more information.
#### [`PAPERLESS_CONSUMER_TAG_BARCODE_SPLIT=<bool>`](#PAPERLESS_CONSUMER_TAG_BARCODE_SPLIT) {#PAPERLESS_CONSUMER_TAG_BARCODE_SPLIT}
: Enables splitting of documents on tag barcodes, similar to how ASN barcodes work.
When enabled, documents will be split into separate PDFs at pages containing
tag barcodes that match the configured `PAPERLESS_CONSUMER_TAG_BARCODE_MAPPING`
patterns. The page with the tag barcode will be retained in the new document.
Each split document will have the detected tags assigned to it.
This only has an effect if `PAPERLESS_CONSUMER_ENABLE_TAG_BARCODE` is also enabled.
Defaults to false.
## Audit Trail ## Audit Trail
#### [`PAPERLESS_AUDIT_LOG_ENABLED=<bool>`](#PAPERLESS_AUDIT_LOG_ENABLED) {#PAPERLESS_AUDIT_LOG_ENABLED} #### [`PAPERLESS_AUDIT_LOG_ENABLED=<bool>`](#PAPERLESS_AUDIT_LOG_ENABLED) {#PAPERLESS_AUDIT_LOG_ENABLED}

View File

@@ -33,6 +33,8 @@
"**/coverage.json": true "**/coverage.json": true
}, },
"python.defaultInterpreterPath": ".venv/bin/python3", "python.defaultInterpreterPath": ".venv/bin/python3",
"python.analysis.inlayHints.pytestParameters": true,
"python.testing.pytestEnabled": true,
}, },
"extensions": { "extensions": {
"recommendations": ["ms-python.python", "charliermarsh.ruff", "editorconfig.editorconfig"], "recommendations": ["ms-python.python", "charliermarsh.ruff", "editorconfig.editorconfig"],

View File

@@ -66,6 +66,7 @@
#PAPERLESS_CONSUMER_BARCODE_DPI=300 #PAPERLESS_CONSUMER_BARCODE_DPI=300
#PAPERLESS_CONSUMER_ENABLE_TAG_BARCODE=false #PAPERLESS_CONSUMER_ENABLE_TAG_BARCODE=false
#PAPERLESS_CONSUMER_TAG_BARCODE_MAPPING={"TAG:(.*)": "\\g<1>"} #PAPERLESS_CONSUMER_TAG_BARCODE_MAPPING={"TAG:(.*)": "\\g<1>"}
#PAPERLESS_CONSUMER_TAG_BARCODE_SPLIT=false
#PAPERLESS_CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED=false #PAPERLESS_CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED=false
#PAPERLESS_CONSUMER_COLLATE_DOUBLE_SIDED_SUBDIR_NAME=double-sided #PAPERLESS_CONSUMER_COLLATE_DOUBLE_SIDED_SUBDIR_NAME=double-sided
#PAPERLESS_CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT=false #PAPERLESS_CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT=false

View File

@@ -49,8 +49,6 @@ dependencies = [
"flower~=2.0.1", "flower~=2.0.1",
"gotenberg-client~=0.13.1", "gotenberg-client~=0.13.1",
"httpx-oauth~=0.16", "httpx-oauth~=0.16",
"ijson",
"ijson~=3.3",
"imap-tools~=1.11.0", "imap-tools~=1.11.0",
"jinja2~=3.1.5", "jinja2~=3.1.5",
"langdetect~=1.0.9", "langdetect~=1.0.9",
@@ -74,7 +72,6 @@ dependencies = [
"rapidfuzz~=3.14.0", "rapidfuzz~=3.14.0",
"redis[hiredis]~=5.2.1", "redis[hiredis]~=5.2.1",
"regex>=2025.9.18", "regex>=2025.9.18",
"rich~=14.1.0",
"scikit-learn~=1.7.0", "scikit-learn~=1.7.0",
"sentence-transformers>=4.1", "sentence-transformers>=4.1",
"setproctitle~=1.3.4", "setproctitle~=1.3.4",

View File

@@ -10412,60 +10412,67 @@
<context context-type="linenumber">269</context> <context context-type="linenumber">269</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="8880243885140172279" datatype="html">
<source>Split on Tag Barcodes</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/data/paperless-config.ts</context>
<context context-type="linenumber">276</context>
</context-group>
</trans-unit>
<trans-unit id="7011909364081812031" datatype="html"> <trans-unit id="7011909364081812031" datatype="html">
<source>AI Enabled</source> <source>AI Enabled</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/data/paperless-config.ts</context> <context context-type="sourcefile">src/app/data/paperless-config.ts</context>
<context context-type="linenumber">276</context> <context context-type="linenumber">283</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="8028880048909383956" datatype="html"> <trans-unit id="8028880048909383956" datatype="html">
<source>Consider privacy implications when enabling AI features, especially if using a remote model.</source> <source>Consider privacy implications when enabling AI features, especially if using a remote model.</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/data/paperless-config.ts</context> <context context-type="sourcefile">src/app/data/paperless-config.ts</context>
<context context-type="linenumber">280</context> <context context-type="linenumber">287</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="8131374115579345652" datatype="html"> <trans-unit id="8131374115579345652" datatype="html">
<source>LLM Embedding Backend</source> <source>LLM Embedding Backend</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/data/paperless-config.ts</context> <context context-type="sourcefile">src/app/data/paperless-config.ts</context>
<context context-type="linenumber">284</context> <context context-type="linenumber">291</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="6647708571891295756" datatype="html"> <trans-unit id="6647708571891295756" datatype="html">
<source>LLM Embedding Model</source> <source>LLM Embedding Model</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/data/paperless-config.ts</context> <context context-type="sourcefile">src/app/data/paperless-config.ts</context>
<context context-type="linenumber">292</context> <context context-type="linenumber">299</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="4234495692726214397" datatype="html"> <trans-unit id="4234495692726214397" datatype="html">
<source>LLM Backend</source> <source>LLM Backend</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/data/paperless-config.ts</context> <context context-type="sourcefile">src/app/data/paperless-config.ts</context>
<context context-type="linenumber">299</context> <context context-type="linenumber">306</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="7935234833834000002" datatype="html"> <trans-unit id="7935234833834000002" datatype="html">
<source>LLM Model</source> <source>LLM Model</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/data/paperless-config.ts</context> <context context-type="sourcefile">src/app/data/paperless-config.ts</context>
<context context-type="linenumber">307</context> <context context-type="linenumber">314</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="1980550530387803165" datatype="html"> <trans-unit id="1980550530387803165" datatype="html">
<source>LLM API Key</source> <source>LLM API Key</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/data/paperless-config.ts</context> <context context-type="sourcefile">src/app/data/paperless-config.ts</context>
<context context-type="linenumber">314</context> <context context-type="linenumber">321</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="6126617860376156501" datatype="html"> <trans-unit id="6126617860376156501" datatype="html">
<source>LLM Endpoint</source> <source>LLM Endpoint</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/data/paperless-config.ts</context> <context context-type="sourcefile">src/app/data/paperless-config.ts</context>
<context context-type="linenumber">321</context> <context context-type="linenumber">328</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="4416413576346763682" datatype="html"> <trans-unit id="4416413576346763682" datatype="html">

View File

@@ -271,6 +271,13 @@ export const PaperlessConfigOptions: ConfigOption[] = [
config_key: 'PAPERLESS_CONSUMER_TAG_BARCODE_MAPPING', config_key: 'PAPERLESS_CONSUMER_TAG_BARCODE_MAPPING',
category: ConfigCategory.Barcode, category: ConfigCategory.Barcode,
}, },
{
key: 'barcode_tag_split',
title: $localize`Split on Tag Barcodes`,
type: ConfigOptionType.Boolean,
config_key: 'PAPERLESS_CONSUMER_TAG_BARCODE_SPLIT',
category: ConfigCategory.Barcode,
},
{ {
key: 'ai_enabled', key: 'ai_enabled',
title: $localize`AI Enabled`, title: $localize`AI Enabled`,
@@ -352,6 +359,7 @@ export interface PaperlessConfig extends ObjectWithId {
barcode_max_pages: number barcode_max_pages: number
barcode_enable_tag: boolean barcode_enable_tag: boolean
barcode_tag_mapping: object barcode_tag_mapping: object
barcode_tag_split: boolean
ai_enabled: boolean ai_enabled: boolean
llm_embedding_backend: string llm_embedding_backend: string
llm_embedding_model: string llm_embedding_model: string

View File

@@ -16,6 +16,7 @@ from pikepdf import Pdf
from documents.converters import convert_from_tiff_to_pdf from documents.converters import convert_from_tiff_to_pdf
from documents.data_models import ConsumableDocument from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides from documents.data_models import DocumentMetadataOverrides
from documents.models import Document
from documents.models import Tag from documents.models import Tag
from documents.plugins.base import ConsumeTaskPlugin from documents.plugins.base import ConsumeTaskPlugin
from documents.plugins.base import StopConsumeTaskError from documents.plugins.base import StopConsumeTaskError
@@ -60,6 +61,20 @@ class Barcode:
""" """
return self.value.startswith(self.settings.barcode_asn_prefix) return self.value.startswith(self.settings.barcode_asn_prefix)
@property
def is_tag(self) -> bool:
"""
Returns True if the barcode value matches any configured tag mapping pattern,
False otherwise.
Note: This does NOT exclude ASN or separator barcodes - they can also be used
as tags if they match a tag mapping pattern (e.g., {"ASN12.*": "JOHN"}).
"""
for regex in self.settings.barcode_tag_mapping:
if re.match(regex, self.value, flags=re.IGNORECASE):
return True
return False
class BarcodePlugin(ConsumeTaskPlugin): class BarcodePlugin(ConsumeTaskPlugin):
NAME: str = "BarcodePlugin" NAME: str = "BarcodePlugin"
@@ -115,6 +130,24 @@ class BarcodePlugin(ConsumeTaskPlugin):
self._tiff_conversion_done = False self._tiff_conversion_done = False
self.barcodes: list[Barcode] = [] self.barcodes: list[Barcode] = []
def _apply_detected_asn(self, detected_asn: int) -> None:
"""
Apply a detected ASN to metadata if allowed.
"""
if (
self.metadata.skip_asn_if_exists
and Document.global_objects.filter(
archive_serial_number=detected_asn,
).exists()
):
logger.info(
f"Found ASN in barcode {detected_asn} but skipping because it already exists.",
)
return
logger.info(f"Found ASN in barcode: {detected_asn}")
self.metadata.asn = detected_asn
def run(self) -> None: def run(self) -> None:
# Some operations may use PIL, override pixel setting if needed # Some operations may use PIL, override pixel setting if needed
maybe_override_pixel_limit() maybe_override_pixel_limit()
@@ -126,8 +159,14 @@ class BarcodePlugin(ConsumeTaskPlugin):
self.detect() self.detect()
# try reading tags from barcodes # try reading tags from barcodes
# If tag splitting is enabled, skip this on the original document - let each split document extract its own tags
# However, if we're processing a split document (original_path is set), extract tags
if ( if (
self.settings.barcode_enable_tag self.settings.barcode_enable_tag
and (
not self.settings.barcode_tag_split
or self.input_doc.original_path is not None
)
and (tags := self.tags) is not None and (tags := self.tags) is not None
and len(tags) > 0 and len(tags) > 0
): ):
@@ -186,13 +225,8 @@ class BarcodePlugin(ConsumeTaskPlugin):
# Update/overwrite an ASN if possible # Update/overwrite an ASN if possible
# After splitting, as otherwise each split document gets the same ASN # After splitting, as otherwise each split document gets the same ASN
if ( if self.settings.barcode_enable_asn and (located_asn := self.asn) is not None:
self.settings.barcode_enable_asn self._apply_detected_asn(located_asn)
and not self.metadata.skip_asn
and (located_asn := self.asn) is not None
):
logger.info(f"Found ASN in barcode: {located_asn}")
self.metadata.asn = located_asn
def cleanup(self) -> None: def cleanup(self) -> None:
self.temp_dir.cleanup() self.temp_dir.cleanup()
@@ -432,16 +466,25 @@ class BarcodePlugin(ConsumeTaskPlugin):
for bc in self.barcodes for bc in self.barcodes
if bc.is_separator and (not retain or (retain and bc.page > 0)) if bc.is_separator and (not retain or (retain and bc.page > 0))
} # as below, dont include the first page if retain is enabled } # as below, dont include the first page if retain is enabled
if not self.settings.barcode_enable_asn:
return separator_pages
# add the page numbers of the ASN barcodes # add the page numbers of the ASN barcodes
# (except for first page, that might lead to infinite loops). # (except for first page, that might lead to infinite loops).
return { if self.settings.barcode_enable_asn:
separator_pages = {
**separator_pages, **separator_pages,
**{bc.page: True for bc in self.barcodes if bc.is_asn and bc.page != 0}, **{bc.page: True for bc in self.barcodes if bc.is_asn and bc.page != 0},
} }
# add the page numbers of the TAG barcodes if splitting is enabled
# (except for first page, that might lead to infinite loops).
if self.settings.barcode_tag_split and self.settings.barcode_enable_tag:
separator_pages = {
**separator_pages,
**{bc.page: True for bc in self.barcodes if bc.is_tag and bc.page != 0},
}
return separator_pages
def separate_pages(self, pages_to_split_on: dict[int, bool]) -> list[Path]: def separate_pages(self, pages_to_split_on: dict[int, bool]) -> list[Path]:
""" """
Separate the provided pdf file on the pages_to_split_on. Separate the provided pdf file on the pages_to_split_on.

View File

@@ -7,7 +7,6 @@ from pathlib import Path
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from typing import Literal from typing import Literal
from celery import chain
from celery import chord from celery import chord
from celery import group from celery import group
from celery import shared_task from celery import shared_task
@@ -38,6 +37,42 @@ if TYPE_CHECKING:
logger: logging.Logger = logging.getLogger("paperless.bulk_edit") logger: logging.Logger = logging.getLogger("paperless.bulk_edit")
@shared_task(bind=True)
def restore_archive_serial_numbers_task(
self,
backup: dict[int, int],
*args,
**kwargs,
) -> None:
restore_archive_serial_numbers(backup)
def release_archive_serial_numbers(doc_ids: list[int]) -> dict[int, int]:
"""
Clears ASNs on documents that are about to be replaced so new documents
can be assigned ASNs without uniqueness collisions. Returns a backup map
of doc_id -> previous ASN for potential restoration.
"""
qs = Document.objects.filter(
id__in=doc_ids,
archive_serial_number__isnull=False,
).only("pk", "archive_serial_number")
backup = dict(qs.values_list("pk", "archive_serial_number"))
qs.update(archive_serial_number=None)
logger.info(f"Released archive serial numbers for documents {list(backup.keys())}")
return backup
def restore_archive_serial_numbers(backup: dict[int, int]) -> None:
"""
Restores ASNs using the provided backup map, intended for
rollback when replacement consumption fails.
"""
for doc_id, asn in backup.items():
Document.objects.filter(pk=doc_id).update(archive_serial_number=asn)
logger.info(f"Restored archive serial numbers for documents {list(backup.keys())}")
def set_correspondent( def set_correspondent(
doc_ids: list[int], doc_ids: list[int],
correspondent: Correspondent, correspondent: Correspondent,
@@ -386,6 +421,7 @@ def merge(
merged_pdf = pikepdf.new() merged_pdf = pikepdf.new()
version: str = merged_pdf.pdf_version version: str = merged_pdf.pdf_version
handoff_asn: int | None = None
# use doc_ids to preserve order # use doc_ids to preserve order
for doc_id in doc_ids: for doc_id in doc_ids:
doc = qs.get(id=doc_id) doc = qs.get(id=doc_id)
@@ -401,6 +437,8 @@ def merge(
version = max(version, pdf.pdf_version) version = max(version, pdf.pdf_version)
merged_pdf.pages.extend(pdf.pages) merged_pdf.pages.extend(pdf.pages)
affected_docs.append(doc.id) affected_docs.append(doc.id)
if handoff_asn is None and doc.archive_serial_number is not None:
handoff_asn = doc.archive_serial_number
except Exception as e: except Exception as e:
logger.exception( logger.exception(
f"Error merging document {doc.id}, it will not be included in the merge: {e}", f"Error merging document {doc.id}, it will not be included in the merge: {e}",
@@ -426,6 +464,8 @@ def merge(
DocumentMetadataOverrides.from_document(metadata_document) DocumentMetadataOverrides.from_document(metadata_document)
) )
overrides.title = metadata_document.title + " (merged)" overrides.title = metadata_document.title + " (merged)"
if metadata_document.archive_serial_number is not None:
handoff_asn = metadata_document.archive_serial_number
else: else:
overrides = DocumentMetadataOverrides() overrides = DocumentMetadataOverrides()
else: else:
@@ -433,8 +473,11 @@ def merge(
if user is not None: if user is not None:
overrides.owner_id = user.id overrides.owner_id = user.id
# Avoid copying or detecting ASN from merged PDFs to prevent collision if not delete_originals:
overrides.skip_asn = True overrides.skip_asn_if_exists = True
if delete_originals and handoff_asn is not None:
overrides.asn = handoff_asn
logger.info("Adding merged document to the task queue.") logger.info("Adding merged document to the task queue.")
@@ -447,10 +490,18 @@ def merge(
) )
if delete_originals: if delete_originals:
backup = release_archive_serial_numbers(affected_docs)
logger.info( logger.info(
"Queueing removal of original documents after consumption of merged document", "Queueing removal of original documents after consumption of merged document",
) )
chain(consume_task, delete.si(affected_docs)).delay() try:
consume_task.apply_async(
link=[delete.si(affected_docs)],
link_error=[restore_archive_serial_numbers_task.s(backup)],
)
except Exception:
restore_archive_serial_numbers(backup)
raise
else: else:
consume_task.delay() consume_task.delay()
@@ -494,6 +545,8 @@ def split(
overrides.title = f"{doc.title} (split {idx + 1})" overrides.title = f"{doc.title} (split {idx + 1})"
if user is not None: if user is not None:
overrides.owner_id = user.id overrides.owner_id = user.id
if not delete_originals:
overrides.skip_asn_if_exists = True
logger.info( logger.info(
f"Adding split document with pages {split_doc} to the task queue.", f"Adding split document with pages {split_doc} to the task queue.",
) )
@@ -508,10 +561,20 @@ def split(
) )
if delete_originals: if delete_originals:
backup = release_archive_serial_numbers([doc.id])
logger.info( logger.info(
"Queueing removal of original document after consumption of the split documents", "Queueing removal of original document after consumption of the split documents",
) )
chord(header=consume_tasks, body=delete.si([doc.id])).delay() try:
chord(
header=consume_tasks,
body=delete.si([doc.id]),
).apply_async(
link_error=[restore_archive_serial_numbers_task.s(backup)],
)
except Exception:
restore_archive_serial_numbers(backup)
raise
else: else:
group(consume_tasks).delay() group(consume_tasks).delay()
@@ -614,7 +677,10 @@ def edit_pdf(
) )
if user is not None: if user is not None:
overrides.owner_id = user.id overrides.owner_id = user.id
if not delete_original:
overrides.skip_asn_if_exists = True
if delete_original and len(pdf_docs) == 1:
overrides.asn = doc.archive_serial_number
for idx, pdf in enumerate(pdf_docs, start=1): for idx, pdf in enumerate(pdf_docs, start=1):
filepath: Path = ( filepath: Path = (
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR)) Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
@@ -633,7 +699,17 @@ def edit_pdf(
) )
if delete_original: if delete_original:
chord(header=consume_tasks, body=delete.si([doc.id])).delay() backup = release_archive_serial_numbers([doc.id])
try:
chord(
header=consume_tasks,
body=delete.si([doc.id]),
).apply_async(
link_error=[restore_archive_serial_numbers_task.s(backup)],
)
except Exception:
restore_archive_serial_numbers(backup)
raise
else: else:
group(consume_tasks).delay() group(consume_tasks).delay()

View File

@@ -690,7 +690,7 @@ class ConsumerPlugin(
pk=self.metadata.storage_path_id, pk=self.metadata.storage_path_id,
) )
if self.metadata.asn is not None and not self.metadata.skip_asn: if self.metadata.asn is not None:
document.archive_serial_number = self.metadata.asn document.archive_serial_number = self.metadata.asn
if self.metadata.owner_id: if self.metadata.owner_id:
@@ -832,8 +832,8 @@ class ConsumerPreflightPlugin(
""" """
Check that if override_asn is given, it is unique and within a valid range Check that if override_asn is given, it is unique and within a valid range
""" """
if self.metadata.skip_asn or self.metadata.asn is None: if self.metadata.asn is None:
# if skip is set or ASN is None # if ASN is None
return return
# Validate the range is above zero and less than uint32_t max # Validate the range is above zero and less than uint32_t max
# otherwise, Whoosh can't handle it in the index # otherwise, Whoosh can't handle it in the index

View File

@@ -30,7 +30,7 @@ class DocumentMetadataOverrides:
change_users: list[int] | None = None change_users: list[int] | None = None
change_groups: list[int] | None = None change_groups: list[int] | None = None
custom_fields: dict | None = None custom_fields: dict | None = None
skip_asn: bool = False skip_asn_if_exists: bool = False
def update(self, other: "DocumentMetadataOverrides") -> "DocumentMetadataOverrides": def update(self, other: "DocumentMetadataOverrides") -> "DocumentMetadataOverrides":
""" """
@@ -50,8 +50,8 @@ class DocumentMetadataOverrides:
self.storage_path_id = other.storage_path_id self.storage_path_id = other.storage_path_id
if other.owner_id is not None: if other.owner_id is not None:
self.owner_id = other.owner_id self.owner_id = other.owner_id
if other.skip_asn: if other.skip_asn_if_exists:
self.skip_asn = True self.skip_asn_if_exists = True
# merge # merge
if self.tag_ids is None: if self.tag_ids is None:

View File

@@ -0,0 +1,191 @@
%PDF-1.3
%“Œ‹ž ReportLab Generated PDF document (opensource)
1 0 obj
<<
/F1 2 0 R /F2 4 0 R
>>
endobj
2 0 obj
<<
/BaseFont /Helvetica /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font
>>
endobj
3 0 obj
<<
/Contents 15 0 R /MediaBox [ 0 0 612 792 ] /Parent 14 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
4 0 obj
<<
/BaseFont /Helvetica-Bold /Encoding /WinAnsiEncoding /Name /F2 /Subtype /Type1 /Type /Font
>>
endobj
5 0 obj
<<
/BitsPerComponent 8 /ColorSpace /DeviceRGB /Filter [ /ASCII85Decode /FlateDecode ] /Height 290 /Length 3461 /Subtype /Image
/Type /XObject /Width 290
>>
stream
Gb"0M0bW:r$j4o4s3aL9.o/:sRKC1+V[Po_hnP="8Wk>jOsEV^,Y=.E8Wk>jOsEV^,Y=.E8Wk>jOsEV^,Y=.E8Wk>jOsEV^,Y=.EM=P;M`ictLh5:'u?=R'nd;IE]5Hh=BmqB2p^7X$0Q$9UiFPkD[m)hEDD7]3!20S(%m5Eepo,>73Ncpo[qg"0,Gt5J@p\hbEY.UOcVYbgK@oqO7DN/KYR@egT:O\Y"S^Nmne(=m!@M;\.mreAj`lssm2RjQmR*'f[]=0V/jtsN_^"C8&k'PptV(jd(Ymp-?-DiQUlg??aR5p7DE%a+(Q2+a1De[G>Bl&EKZ&,I(pUY]E@qJJG)r-?G9P(rih-1dREuNfk?>O(#o=aSKd[6HOfEV(Z'2t=fFn_3AbT-'E'3sZfj1^M@pYhQ7E1%B!q_i'CLMJZ]APP)MgR*7.Y/pg53RP?TA*/3L-50YH7,u"@RJ5[/9Q6C5NVbVGhM5l%_.?@umb=+S+0N]gQT<I'De%pX\0_kok!\7DNLBP"RS7[g'92lIB&8;Y13$pgHd09iRG2p8o0-ECM)-sFC\FmSgqH^TpYhQ7S=01ZZYsF;p79@=&(b@ObfogMI4I+_mo8Ft\0_l%B"lm`>FE$MV_[_Y246E[o=\bnb0967Q$FISai'U8mksuCAo?M*bkl?R-I0h_YM$B?F8J^DhM5l%EG"?[c+]I2gNP.=5$X;.1Gdp(p8uQo^/LHoiL3GZR2\raAeSG3ICLU;>is%i\_.+PGos32"IH[hA8X<AA_r2X1;RO>4IM[5E1-IZRS7[g)c,U.'3s[J\0_kok/NUqf`[Xe+0N]gQauWsDDo=BhM5l%_.@LHR@?oiRJ5[/9Q6C=:Zc7&>ipIE-50YH`fmsd"IFD+`\u,TrPk$]NL;edD'YO[I2buE1hPl,[ZP+_p2)p[e!QQPfLD$lgUH]`:1Im2@iJ!ODVrHt3K9FeNGTr/\U>Dmjtp]41q&NWk4WXSRF@Oke(@-QRG54@A56WH:1G57Ao?MGP<"Vj3K7l$RCR_b:ZaKGk$5CC.+u(L[aFc^qb6b_]O]p>fgaTjmPE\no9+M@B,b.F]?bTVcV*tKS8EA]mlo3K5;1^!EOO9f^ACUurOc[u`n<i5qsH8rp[aPr)eU*qn%6nfhp4shD4GHb^$e/6I6TC<[rJk(otL;sp\ha8ho=>=fDB;AhhFPj09^)KAJ38&9VV?L8MpH&M<8.ldJV05RX^_no.Ld9qHZg`b02a-m$D"%2.\6nf;,`[G2:]5WQ\V2c@4Gh=&YtOF%n^mA_13^REE`2l0OaBG;Wq]1Y8G/?Zt8UPc;l3PKnX1F]VM=136/NqdnAb9ps/J2<jIo?$A/;.Po\PZX7lf=5<1=SU;dP(A-q:-FpT?Fn1s1>L9Q0S)iGGeB)@_DF)%_Cm',a;^\2o]*8-oZUsS%9V$PXmM>H\bU0m00m3&T\6I=`1RmI^`mi+Cibh&sc>8Yj)cJ,VM7Wri3jVEGD+pLJ-LMZAlc^]d[kW$rRCHJJs-pTpHZ/#)PI^.2/_/Gu9ldRQT$2WWCT5#pBp+rKo47:$?VC&L8X%rrR4!(5rE?5)8Xe^PcTIWmmak?b:!t:GHfiH*GJBI/CQ^$TfeZFd^AG<;?^!=gc(929pYE$LqO43ODYD;<\aOu!e^l'@EjKDMb^K5$WP0]nZLJ^%HY#u61\3[enc_V2NHb$M)gp)%RGYQ;01^D,]VFZHi02I1r6C:L6.0i7*Bj-$T6+]-GAcILP+EW]kd`YIUbagAF!G%Ro\=[]cb7.BSXK;E)u5)]kJfT0mL;AEbfoP2a;6*b2r;r'Dt$>2Aq&o4^*)[NnW'2fK24Nao/eo%"\I%"GP'Z0I+"FNhmnk&i,4+8D*45U9lOt5(Y:H%gNYJ4S)E#I0<Sr*[ddmG2Slep?X1q4Cu`XmCk?Fi^UTlGfuB5df`]o]IW7MlZ]->RZO*cDrSi.cAfFP.AeSDgqSi-Obr20;bpKqYoS`%'Rr(9URn[j=kSMi,2qrR42k/aZcu)c8-TRTajWn-2h:0V>:?H.K8QTXcol?4Z\QM\UQ.esGSE+3uQBQEeG#L%A3LQAu,[ID*eB:EYk%6VF=)'\eEfuWs=\dD1g.f8NjCE.oPB<XE;_KLYR@E:`?)cZ0b=PIkAiWFaqb5fI-eXjPMG'4JbhVF=>A92cbB:e#8i1-tFRQ=g8G;/Vi_h'@1H2o><Z37\Ea<[a&ri:uh0UX]P'smD\5\=)b`2&(Pm5@E>ZY116t>@KpYJMpA7)Ji/leW#F/+)#V*VC?f+jW%d?qJl]slE4fpD#^99j27h!!U!Boq])FiC1L1hLXTfG0bKqW+XE:1[0q:;4>9@iJ\8WL&g=b[jOE:;4>9@iJ\8WL&g=b[jOE:;4>9@iJ\8WL&g=b[jOE:;4>9@iJ\8WL&g=b[jOE:;4>9@iJ\8WL&g=b[jOE:;4>9@iJ\8WL&g=b[jOE:;4>9@iJ\8WL&g=b[jOE:;4>9@iJ\8WL&g=b[jOE:;4>9@iJ\8WL&g=b[jOE:;4>9j"6dh3D"AU^/LHoRCY]P]2P,]<+kV\Q$K"$)s"^pPrVEYk.Xc^pR/TYm^lDcP>l2_4-b)`W>jp44-_ftFlpD:RJ3,\612?`R?LT_mQ6\ZT;`dj^,qT?8Tj10;jmBJ\j>br;jihKBC7jHH(V&TjM!^@3D"AU^/LHoRCY]P]2P,]<+kV\Q$K"$)s"`VbpKshhJ;heIJ#,$L#d]nmrG`@m^r4^I;<3g8o>f_?gbP]CkDQP]k60U=20o&8FDiA/iT9X^3d':\+\@Uj;*pUjhAp_-FiO$C\FlYoddS,jF4Z.EjH)?]D%bBCL@$4DBZPtm^q7jK)=uLB&D<D^QMelm[*f'2k/a>H`u,3p=6A-(6\RV^<=bJ\F89ip8rc9/%LApI_"ofZO-'3pR6MG?i<T7+h:tJ]8a.RlgO*ANA"r%CuY<'3^MfLff,D1riT#Cpi?)Q-Eb+a'/[FnIC"drn*1%805'0Yiqg8J60$/A2k.>VY"m@=Eq[a)Y.q"N1qoK.Z\e#:l3*)"BA[ObqR\dSisd?'T6oD_R;-aleNSse-CL'A2.`f0WDraO2OS)NhURji-Dsc/e(A2o3I+\)VOF#I[81:r8`o)>9poa:.b-_B9dZ9lG;Ws3af/8:1cCb4:>XNcW@"N@mF0]uOu[eh;l6"R9!qH)P=aot>tp`%E[oU'ND1afPBSlqWl_5>q`ONacB7HTe^^)FjdQ)cmKTR7qbD9Vk'+?_^P9A:.ET;&?(LdsY0!m+DK&4Rmo3A$I[=j@CUb=RP3b9\eX>=VRf")l#,`aD:3C^AGI]'8L:b8NahC\ZSbZQoafjZ@E([G)<**^]QYZ/-\/Us$loWbJRG[+pr#4u-V^2.7F`lhj\L&UoOsEV^,Y=.E8Wk>jOsEV^,Y=.E8Wk>jOsEV^,Y=.E8Wk>jOsEV^,Y=.E8Wk>j''C?8XL9P~>endstream
endobj
6 0 obj
<<
/Contents 16 0 R /MediaBox [ 0 0 612 792 ] /Parent 14 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] /XObject <<
/FormXob.1ec20b3a96e40a35266a0a8a0d634adb 5 0 R
>>
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
7 0 obj
<<
/Contents 17 0 R /MediaBox [ 0 0 612 792 ] /Parent 14 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
8 0 obj
<<
/BitsPerComponent 8 /ColorSpace /DeviceRGB /Filter [ /ASCII85Decode /FlateDecode ] /Height 290 /Length 3500 /Subtype /Image
/Type /XObject /Width 290
>>
stream
Gb"0M0s\bV$q&G/J($uOju)%+%1&)hn*)-NT`"8nzzzzzzzzzzzz!!'grT65NupmP_`]63jRce!oT8TqIFGMi(@D>9Q18%Wp<?-h,WY=WoE>BeutHu8YIA4O7SpKc+sL9F0lZs.b3omCWORUeq#Fn]1ff7pJ#G-kItht;A6pmP_`]63jRce!oT8TqIF@N3&*FmnO#.ck97`6:E%D03I(`QWUU&i9D1[aFc>'f5%G8^-ObfLFJ><m7)c-S_r'@N/VA=YXu(T>\r;M/@@JB>r)?I1e@5,du+nSeX'Eoh!BoPLr@VHWJ@\f-`;Z:LY8Kmo_Ad?D#0[5)F,u]k>=.H$p;]q^?<O]sAA.1XE_eQ`;S-5/&?Y1Gd@ifpA]ho00l8'f.Yl]\/XO/+Ys=-5A<mcb.qtW[m[)^*XRN1XE_eQ`;S-5/&?Y1Gd@ifpA]ho00l8'f.Yl]\/XO/+Ys=-5A<mcb.qtW[m[)^*XRN1XE_eQ`;S-5/&?Y1Gd@ifpA]ho00l8'u%mtD9P\Mk\;?)Y=XF$F&s;:;^o<38E=PaiQL$,`lqD>Xu6pgRT0&;GI9.]Q(k==7(su_^<Bl"bY4ksC*SkE8VJg=<uWqo.D"5(jD.ZPbM:XfbZ'J&2A5hS<;84m[4sJ&U8s8A^*XT/b[#)09Vprf,E]0$KeILK)`(DA]%T^9CJs-7XQ[gn40.j^hT+6D_O"EQQ.^@^iQJlpY=XF$Z_AtVn#XBmGopCW$=@C6=(^>mKeN$]^*XT/b_iRI^9\/Rk'_VO.X[X!?($+R'u%ohpmP1W1+Tpkqp$[=RJ65/WUOJ"FCk0:<VS?<j(hQObH0pMloV9;A_tJZUr&I$d?WC/<oM67:LY8MbP\bnpIT2]CRMpqmllSFHnFsAk1qDiNNZpmg:[;.[dgcL?^l83`&>>qq.oTiPM!n,14O/tI1k<0>3<$5]2)lT?d&ATH1smHj(k't2X`iUD'W$A9g"p/H/<t\qlZj@Rs6j=o=XsBpK^R_2t:^YkBZgdm^o&GDrTG<ch$SRh02"nhScaWT'+q-]C1'g]SU874jU`9GMi(XGn\LNHCf>Qm_8!9o-U&'oK;S+h0mmRk"Rt-k]u$5])/Y.baWi8dIY"AkBe/\3oGMcAUk_L);rMA#.X2i!H.gHJ/`tUi5T+.\FGmdDZ"(U:O*o%bOL")-=7^7DrE74n%8HFqc1)_qsI.l2X9/9=Xr<QpJLXbCr,l%R=&l$]nNdl^@1KblrVkln%1COg8K?+B;p:9h+-/%Z3B-0BC`H-pD2%Pq7aJ%Z<q/N^@0A.CSU;LS>Ge)G9:D2aqfB^S]TJQh-2j3jnnI0b'oU-pqAhRYDp-&E0eZ@h0kOd.U2CjG:$Z9F`64iQ1)?^./R#Qi;;q9^,G95_HAAGGP@N9hjJ)bTkmPnBP&2>q6mNRbVk[p.ML'C@j^(Kp6jTgZ9`&rR;L1/gVQ-1gJBf,9Jj)8R=&5kB4`+*#*k$W[P<ta$iA.a6eS+fdEFL\nnhg-R;F>k<$n'e`_=)ulnbsWAV8,n1Y\;=[tT6B[\7M6R:p1O1\nJ`cce;3%4W%9Cu];YgRj<\hRi@\^S#q;\'`3BG@'2DFDp_.g3E)3$iGVE:#8>Yn(i8??dQL.gM#W\4"p(2\i4mRD7k)U"b&c3-?#Z=p[5]00Bh9RD7&iiSJV&)h4)':2Vu(;!l(CTPIJrZHZrfS**lhrDLp#UDVjQe.sCQCb]ds]kIE*doS^q;DVjQe.sCQCb]ds]kIE*doS^q;DVjQe.sCQCb]ds]kIE*doS^q;DVjQe.sCQCb]ds]kIE*doS^q;DVjQe.sCQCb]dt()PtR=ZX#Ne^?[k]n7>Yqk"X@5,N$b[n+t<ZI$k_`GnY>faEOuZ]=tTY?Y5"1hF(X2o%i[0Y4&I/QW`::2c81eHoLr:lT;0:AQJTg:"6Qqhp&n(qT^R<R2*G]'6W]`GI-bM^9\/RAqb0[6sVnFh<b$An#XBm=lGi/;:ghU2uC>T40.j^<qtfOe?pOYc+`ZCc7440'u"rJJT(G.c#rLLN&!'U(Z28lDQ`kTc7&8cJ+:35jlX/Sk);&Kn/'u_;f8c8DpBd&!e9aR3p#M8s5o7q0CTe8X&Eo=qesb.o)aF3]fP9;])UoO1,&,5hlB[nY5<._..[Lin\%!Fk.:TTN&!'U(Z28lDQ`kTc7&;D]rq>1..d;(9\b4QZdNohWf>5rbj0%"E=9M)9$`?o2DU%CYHQ'd/bh(O4X[8`a;i@8^*XN&i6/4oS>^0IF"$YVRS;Lg0=0)JU8j3sU!2h<13!]9bY$3<W\uVf19[n'`%Ca>.m58[g;k8V]Y5^+\)>H2oUMjp,BG:)qO1+5JhOIYF/#[obb<8HCGKl;^<B3qM%\S,H5f%lH95tRk08_qgJBXiTC$Zs\'m6IhOH"!%41W;fe.Jp4)JKic&!(f:bk8-m;f,6dl(gpS1(WO-1g`]/pDV'D.D_QM%\Q>1-_DuEi6Cq2J1g9.'X4-oCLWfGBu>fA*2$m'&-5<5G.=`Vmk,5B&9%+Ymi#No@Ya?H95tRk08_qgJBXiTC$Zs\'m6IhOI,Nj6S(_kfW7]G@i>d]6GuK^GF1_VGb-dpCd3^o5%kcjh#ajEPF<U-Dj\TMt[kY47d8t.cn9e06+`_cR,Me^5M^upH.t_@OgKOGV='O1X@DF;SJ(`')+KZCgnmU]6GuK^GF1_VGb-dpCd5$SD?,d0=+!u_ELRn>rts0m[M:a=eTY?+/Q$@*@YXq:#sL!:q!ThdT+nZPdC66nmtiM>M)I1WbY,IfmOP01+SS@m%\[Q[3Of"^576*(!7<c;7c&HO`GX&7)$kPAIJA`?$5O*3P02R?Y5"tKmf2g\osm>h)CHLZU3?^5"\m^4&XAlS&gq!Tkn-ZV5pa>F_*aPG"-*$7)!s@;;.u'G3*prREq=mOkD[UDr,o,2X7_Vq-@@iZY!i\p.aV;G9<Z@\ntMtf9c<7fbp3+'D^eH7qn`9gQg[hANjmQ7V:OG^3THMg8NbLj`c-@c^LDeff,%3hL1VHlF(!o?!la#AnPZJ:#qdf+/Ot.D-)2<Qhd`9)4>mdq<$L'BqoS#Q/D7G5&5=2B&?"jH1t1iW7uLWGC>n*R[oSo2j&%8I1k:217u7s3aHUt;K^6R.'Y9Ko@YXqe('1+<S+m?'"_$sT=r*&?#B@7Fj6C(Yq%-lfdj/QeV5_Wf=ZqQ]2CDV]tug9D>7"Oc'p,d.jaf?/$.4ML+cQY]SR95;DOlX_E(t>pel7ZRjbNl-1fe?XOG^S03-W:M%[Eu17u7s3aHUt;K^6R.'Y9Ko@YXqe('1+<S+m?PTA#EbWE/3Y5NP"]T:Lk9Zk"(]B\*gf?O1@?-T1h40tujrH@#0O4)QPb.KOBlIp1.c2/npc(rQFZ`C8-G29flda_%6]JI1bg2GTfq^>apUs(p,X02DEh7SfseP+,u1V;r+DqE82-sb)nbWE/3Y5NP"]T:Lk9Zk"(]B\*gf?F9qzzzzzzzzzzzz5k,pqe-_`~>endstream
endobj
9 0 obj
<<
/Contents 18 0 R /MediaBox [ 0 0 612 792 ] /Parent 14 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] /XObject <<
/FormXob.1280b7d13f0587f75dbba24117c3e12a 8 0 R
>>
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
10 0 obj
<<
/Contents 19 0 R /MediaBox [ 0 0 612 792 ] /Parent 14 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
11 0 obj
<<
/Contents 20 0 R /MediaBox [ 0 0 612 792 ] /Parent 14 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
12 0 obj
<<
/PageMode /UseNone /Pages 14 0 R /Type /Catalog
>>
endobj
13 0 obj
<<
/Author (anonymous) /CreationDate (D:20251216142815+00'00') /Creator (anonymous) /Keywords () /ModDate (D:20251216142815+00'00') /Producer (ReportLab PDF Library - \(opensource\))
/Subject (unspecified) /Title (untitled) /Trapped /False
>>
endobj
14 0 obj
<<
/Count 6 /Kids [ 3 0 R 6 0 R 7 0 R 9 0 R 10 0 R 11 0 R ] /Type /Pages
>>
endobj
15 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 180
>>
stream
GarW2_$YcZ&4#]5`B.*sm?lG7-r1F@qZF9DA`.IipA]su.E!o]Gjlb!W-Y8MVLYd\SdH+*Db)WVcj08*lF:SRr1h[EQATgu\mTX&mM6\8TBJjRrY+[@;2SB>n%<o^ecds*8b94pS?3+.GJiRY?$tJ'kE;+M?S1+H*dP`*I]"P+I/q7'3Jd~>endstream
endobj
16 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 247
>>
stream
GarW49oHkR%#46B/)I%IW6)D&Ib[t)(Vt`^F7>Vna$l[3>H5Gfa')=U#ta'/k95LEWOSG3"r7^/'n\C!!F0.=g]gEL*]\ARI\tm.$t5=QeJN,a-U84$:2lt0pl4os1D+HLYKtgFoDtT3?g;TJZ"p1Ms8-mnFZu9hhtp+9>=dda($9HW?h<;`raPb)D!6nUI04P3H\NHB@T2`*R;q_bb$8O:'I:6:edBMuW3U[l<LGI>h[8WZ3d+9:~>endstream
endobj
17 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 174
>>
stream
GarW05mkIo$q9nR`F#VQ:"lUo>6R=/74qEVW"iqLlKJ$aC5XZ78-0=Y@o+._YHbl4Yh1ZiKj;Fh4GOo\Q)*Tmo(5G_$S2VM\7.0Q<EDUIL"miXU&6:llV)t3kO76]f1&V`o<RW_$L^=Zj5S/(^?3g%MR/&;IQVW!7L'9`%ko%``W~>endstream
endobj
18 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 244
>>
stream
GarW4b6l*?&4Q?hMRuh(23Tpmkt^cRET&,M[qV:g6EQRMs2I6!U^T$](X?G+!r#.NZmDs+Qu<6UTF4R17*n#s2&gSmk2Mk.09@0r[k9DgRu9WjKt]k!Ic1n'J.q+%HiE61]07.7.f1^O]tp[&Fn>&8hUMD]+;spqhR>>LX`Bap0=GTNaa\,&7Zqt_h]FP:T4bD(VQ.g\Pm&EGSuJ6sK$-Os9'08g6q!hekXZP?.6B5f2_i<IDu~>endstream
endobj
19 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 177
>>
stream
GarW05mkI_&4Q=V`F#VQSZ/`o[HbngaBTj`WC0EX,i7E\>AQT[REn!?T0[2$]@m:4_<Sts*7B@qAH=gO.+uD8j8"aV?VhBDBuOM(;)a,J%_4qBA,/pbTJI0<]mhonfJ__#oZJ*o;$A%?:)bSCqJ5Nag.!$oHGKA(4Iu=r,7iHH@ok]?~>endstream
endobj
20 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 167
>>
stream
GarWpYmu@>'Lq&PV`:G4BJ1Rt7[P0S\M$3?0c4(6nGPe/#`385LHkTp@/;7gD`sVT>qS[,<_0MjcjQ72dr'n'riIp[%YeJCl<.DN]-CUV%s0VJJ"dSm@n9+>F4SmfNcSuChM%!&%Rn8_]8SB3ren*ZdT2U-SH5HD>?EhY~>endstream
endobj
xref
0 21
0000000000 65535 f
0000000061 00000 n
0000000102 00000 n
0000000209 00000 n
0000000404 00000 n
0000000516 00000 n
0000004168 00000 n
0000004426 00000 n
0000004621 00000 n
0000008312 00000 n
0000008570 00000 n
0000008766 00000 n
0000008962 00000 n
0000009032 00000 n
0000009294 00000 n
0000009386 00000 n
0000009657 00000 n
0000009995 00000 n
0000010260 00000 n
0000010595 00000 n
0000010863 00000 n
trailer
<<
/ID
[<93a746516153ebd4bfbd42147dac7019><93a746516153ebd4bfbd42147dac7019>]
% ReportLab generated PDF document -- digest (opensource)
/Info 13 0 R
/Root 12 0 R
/Size 21
>>
startxref
11121
%%EOF

View File

@@ -0,0 +1,251 @@
%PDF-1.3
%“Œ‹ž ReportLab Generated PDF document (opensource)
1 0 obj
<<
/F1 2 0 R /F2 3 0 R
>>
endobj
2 0 obj
<<
/BaseFont /Helvetica /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font
>>
endobj
3 0 obj
<<
/BaseFont /Helvetica-Bold /Encoding /WinAnsiEncoding /Name /F2 /Subtype /Type1 /Type /Font
>>
endobj
4 0 obj
<<
/BitsPerComponent 8 /ColorSpace /DeviceRGB /Filter [ /ASCII85Decode /FlateDecode ] /Height 290 /Length 3565 /Subtype /Image
/Type /XObject /Width 290
>>
stream
Gb"0M0s\bV$q&G/J(%!d.nqi3%"dr>?4mOts8EB+zzzzzzzzzzzz!*oM+-e25^Cnl29l&b?1]i8(4Z]3i/UdM`^pJP:nc8L!XP7Hikhb/*W3nrlS:-,3JIP95Ol>35>46_jpqn5s1WO%T@mA*+\n+T^cV9UuF\!%<d^>DS+Prd[`h+g#,qgL3K<U+CqG<>&Nmea[s=/iNchX.++@iJ&&?TXb*R+\CBNRSd!GW)BQZVP'MW@t^MCeA2LT>ice?YncUPfCp2NGYK*fZ(6HNL>25gIQatNFu0.1]'#;OnrN+c_`O2p1\=fou/h\9khi<e(@.TMetSGd[8^cZ&R6LgNYWQ0scpZqhe?bhn8-AB4gkNX0g9qf;'UUff/Bqk*_Wcmj`02nhmC;AbOAq^3O=&hn)K"NI4GM;OMt)So:3gg%VYt7O)9(MH"3F]]YbsB(ip=[*ctHU`u)W2t-S)R@/Kl.W%$525s,Xp<@ruH"L[G]1O$%Hqc(gR=4[uDk<BMpTDq6,@?Xl<+b%#kF6*rp:[N6dE'q8,*gh?n+l+6R=4NRD)Cf@WUOIo8^2V6q[HopI=+a=I$mo2oA9.h_j>Wj?]th(k*4B.GJ2o'A_tJZUr&HY,rP-sml,W^p[L,?n7>?dc$S(>%:A7GHqc)<R8[#ThXu?8Q['Z[P6t[ZPgm?i^#/<jhT+6D_O!8/2=Fi>2gh%fnMR<8APPhAHe8-(?flca9e(OY;c<5DomXfFGopCWMT3kt`d*!#p[L,?B[!PkSCa*Q+((4h`ls=[H$u+TnMR:ZbcGKJf9^b)?^$Aeb_jDcI@9j"25t95XD"-UQ['Z[PD[B(MT0piRJ65/WG'MWBUm\@GopCWMT3kt`d*!#p[L,?B[!PkSCa*QT>id:gK2Uh2l/78:Lb>9k4;`pb^!,*1]'#;Tk$?Z<QpslV(o;P@iJ&&o3'&VGuGV;q_YaTA3QVcba^Vg0saYc)bT8a=iJU1;]7Ih1q')g5+bj!S2[9!ZVP'M9'5_J`QWU+)Dh7I[\74qT2j+*2l/78:Lb>9k4;`pb^!,*1]+i^b,/"tD*6S:4RYJu^>DlOD*6S:4RYJu^>DlOD*6S:4RYJu^>DlOD*6S:4RYJu^>DlOD*6S:4RYJu^>DlOD*6S:4RYJu^>DlOD*6S:4RYJu^>DlOD*6S:4RYJu^>DlOD*6S:4RYJu^>DlOD*6S:4>5EnIDCiW$uk5["5N[p!&sm4^l?);E8rSRh"]%m2uYSJNP>nZ4+DCq2sL42I@5NP.]F?D<I3LL[A0.XIIAYb)W+P@hC#_+R@/Kl.]F?D<I3LL[A0.XIIAYb)W+P@hC#_+R@/Kl.]F?D<I3LL[A0.XIIAYb)W+P@hC#_+R@/Kl.]F?D<I3LL[A0.XIIAYb)W+P@hC#_+R@/Kl.]F?D<I3LL[A0.XIIAYb)W+P@hC#]7`QV:oW@c?\lC;T2V2]COdZWlH[=t#SSQ"pN[.2C1>#Lof[P5%gF/jYQ2SRj89<,>J(1q`4::6/2`]"iN8Tg?Q_T]1.WV03Cpgta&WQnCGR+^"MeNQSEhe])7eQHVp1V5]Gg.,l;RuOeN3_O<uAln.pq^?</Z]UPQB;mK,m-:*U+/Os82U[cR/@C2"?(Fp]j6J1;F(6\,k*Zn]Rad@F2`IW)AY(p0Y=\-&GB77D?fmairL25rHoF!]nn"CQdB?./aEM]8.UV>+'E+BLNdNcEVXe;i3RP$%mDsKm@k(jJR1oOfe;eo82l?.WSiP3%8aXTCF(6\,k*Zn]Rad@F2`IW)AY(morkEmYot@DJjn#L]gNe59mc3'j^2HC'b3)cC\+BIQh8d"^IG]e-Q-ZMfDWg]+]8pmFqn<N9/"]nVh9P>5G9>^kpO*uQ=$Eg6]:IRJmQSA`me\o,Y'aRKFuunrh0j\Jh=kb7?2;.um<2gn]DMBs]C4KN](1-sg[2YfGPCXpGMfm'FQN+o\(c1Vn*]5jn%N^-l,r0hDn#?7hJLcP6!rcB2`JI/^Lm3;l:\Y34S.^TGOa;/SmQmP\!^mch"WN$Rb@;50B"h\baTR/`EtEJIe>#BAtF9hj1G)%B$[ZpK\O^,Df0q=J'I?UeX(6EH03A2n)CO=4hph+DDTVO\a`u'27)II?c$[BR5[(=Mn`ltr9?qbbrkU[`cn]Bb\tDn[ATgh_uqm2NGlQ/0;Y,U">dfPDnc-&V4,=*1V\of^AKX\R`#=b^Q!0Gr%Fn8_uqm2NGlQ/0;Y,U">dfPDnc-&V4,=*1V\of^AKX\R`#=b^Q!0Gr%Fn8_uqm2NGlQ/0;Y,-.D:`8P^-25mE:I'Hs9-=k"U!+0/NUuD';?92q?HmbkTAjKbS+T<*/rPj"K:*;NuH*?1#go^O06V;<CSq[4a?8nnfO=R5`[H^<HAq[tPN-hGdSR1?1%F(/gBB;dJe6N5'aE85\MDFgWkYrb?8tQ[.^pk=_($qeI+^nmt#BSEe7`.WtI:1j8!(I(hVkPqQ/&m&t-ib*VpOF/g`tWmo^/e(Bp=oD.NJ=2%Y5ZbNBB.pqH1c)IjnWD[AY9k.8=bibHo?27BpcMj9PY'cY`2lF7XV+R<W22to;17rJ[Flc8]3r<\2]CKMshjiuSQR[.OfD'YmpbL1.=jd#IdUXS0P6t[ZR5ZW7hc6+]jD.ZPb<5B.7H;!PPDsra.X[XshQ1C<=)"t&Y=XF$o?<[=NUJ>k2Y&`)n7A2:Z_HtqV9)3@-5F8=1N-;N^8j*DaOd6)Q[.`<MT0k*-6)cL<;85p]NNYWY0pi*?]th(k*\.Y)W+P`CufA0YqP^m<5,]]c24;_p:WODo?:1JZc^PiUjDaJ4#?KP<5,]]c24;_p:WODo?:1JZc^PiUjDaJ4#?KP<5,]]c24;_p:WODo?:1JZc^PiUjDaJ4#?KP<5,]]c24;_p:WODo?:1JZc^PiUjDaJ4#?KP<5,]]c24;_p:WODo?:1JZc^PiUjDaJ40oCXXMU;aF`SebNYK4OWA^!I$=DrFc&I*4;YW#Fg/-'EI$mnl2tH?,iQIa+6TM4:APV(:aL<CJ06=-&jfH=c3H]Bfhi8Dbbc*LbZO1(%eBNu#4&Yi/VqOtm.X[X_2`L<p8E%a^C-t"^n7>ANgrQrLKeIMI%HX\.QT920,N"Lq^<EQ7Pd:^(k1%Y8)V[f2=6HoIGMTp%\+/:B)V\(Dqp$\JFn9Y1`cl'1CdZ>240*VXc-(AOhjpaO%4/@LXQ!ME0B[2"GG`A<Pg$kJMKdmhS@A%ER2XgV1Af?HhKAF1hSsbPC2?8ZloV7mf?MKa)rI3cYmi"cn#YMS2_X?2I!;I-2O)G"Z\it_]UZJ$h3f29:bqpn.c,=>Eo+f[B6s<K9VK]-:$6Fn]kd&]2pL]Ng6%FOml-fP:%b["45Vm$6Y]+Tb.QVRr\i<=k3dWGbRUTmq'bi`e+q`tn!^l[9f"l,G3#FBqb?6l;nZu7c8X#7OP5hX1@I/m^J4C(\NA\*ZU8It50COqFT+2K]B8O/e'&<$*Vh,TIae,I<:?9fo1_#&Uitgobkk,tDc=>bm`mc9m#]q`O#$/_S\InlY.6XO1]m^XzzzzzzzzzzzzDZ'feLL1)~>endstream
endobj
5 0 obj
<<
/Contents 19 0 R /MediaBox [ 0 0 612 792 ] /Parent 18 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] /XObject <<
/FormXob.969a0a278dab8164403e924542d002c7 4 0 R
>>
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
6 0 obj
<<
/Contents 20 0 R /MediaBox [ 0 0 612 792 ] /Parent 18 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
7 0 obj
<<
/BitsPerComponent 8 /ColorSpace /DeviceRGB /Filter [ /ASCII85Decode /FlateDecode ] /Height 290 /Length 3730 /Subtype /Image
/Type /XObject /Width 290
>>
stream
Gb"0M_/\Be$q&G0^ZfP<ejT+dJ>D]&P!ICnq2'o`LkpkCLkpkCLkpkCLkpkCLkpkCLkpkCLkpkCLkpkCLkpkCLkpkCLkpkCLkpkCLkpkCLkpkCLkpkCLkpkC#aKo*\@Q^OSZ$ER1V!=bkN^34G+uR=bi%p$SuXBnfno!;15!4-HEODUB.'_6aUIUOlaaTISXl]s-Sr:0Y35)mF`sLjS]p$[]:Q&Rf@c#GFu"]^ms;.>>\>FagW9ll^?`>@Eh/c"DC76Mql],Ib&qm%\t.&!Z&R?j^GbOE'AIa`o5!sc1U2qk1;>a94ql[q/b<cLmPD,-f.t*!mlhE>GV/N&oBnE>NRor#2X`gCgNY=AC\-sMZ&R?j^GbOE'AIa`o5!scUY@ukb,dZtkV<g_bkpUm,[?a[B*hjU6C2s>?'BplZgKB<b=q+Tn"XZ]E+RUk-$5O]?06QW9bOVQ]!I&B1SN")O02'kQ!&<sct+PGRJ5)d8[pPAc4[_4KJ)j[\g@_bB&-TVQ^Q33hFH0CiQA5`9B\,E]$1'8RNu.+F_;"dAjVi0+'k(`0!gIOhRhW_kVDV%CQd%UG&bcRfD?"_k'_>G'"Z:>Y'KJ^^S/i`O02&@QeB>8mp5B@FIDmN2+d!e]=@":lhuPjo00`4$!h-Zf=Z=@hp>rk8(T#[bO"5WpTF60]$1'8RO!!C?!R!XomT8pGop=UKJ'ThCYl\[Ds/tFUMH%>ATi(<He8-(?06QW9bP%]4ko1gb*@5o?'BpoF3cajVK-8)8(T#[bM9F`SuGt_pX(iikVDmKULu&Xk'_>G'"Z:MgZY'JQ$FHWPCtQ0cSg*HEb4)tf-I'5gp<!3WEbkcd5=MZ1A>U[B.@!FI<80r]$*=lkaBSYGop=UKJ%>l2L;6@A_q'/)nO7;hq=!\oBK#nX>@*jfCqu2S6'?4At=cq\@uUMkI`hh1[@_N_t?/+4m1@`Qs"'2)m8XtkW,il`6:7UgMhSe7*g.$->(hsfS8^=-@3t<1GQ5]`&:lX1XU8MSV`bW-FVn0Sr''1l:fF'1U.D@pIt4L=(buSS\"9g$Sq1hZP/nmh8e>q4YlKEZP/nmh8e>q4YlKEZP/nmh8e>q4YlKEZP/nmh8e>q4YlKEZP/nmh8e>q4YlKEZP/nmh8e>q4YlKEZP/nmh8e>q4YlKEZP/nmh8e>q4YlKEZP/nmh8e>q4YlKEZP/nmNTnOXoArC<7fK\![JXSJCAH$seFJqpW9#[j:s32]TpEGE67m[iKNeA\pj;0WLN'q"ON]k7-=$$-j4_68?%QVTfe_T5G9>H1FmfqgF3gEaf=Upi[]b%E+IRTM9Xrs8a0g?O]*-72ZY./JmQRf@leWpYk+4^LY'9V[D.!#i5r/3$R;fjOO)+X(G6sGCB?u7sh0iP_f=ah=bWU;">jq+@g:lu\Jn4<&1Z<Y(+4=]G/_kcL2orVBZar&"RJ5CdoA,aTK=n6HX)#V<H(TLJhgJ'TaLf$i\i*f]pY"0ih3W59]W4,pCVuE_T7&l[I7ZB=]=P>[2I)[Eg"5iOQZRkmX#m@t_j,M>I6HJ98N4`&]!Jt+>ab?>fCRgbFG%6,40*/7Q$K!M4l:ZZ2orVBZar&"RJ5CdoA,aTK=n6HX)#V<GotiXFRPAlY+&^0HTFs0O/o=]IGLc@h<[2Wds7[9>[:@slSb$oB%P_-c4fU*lDeT+\#EUk]?^+lSnK3I=&o./:$&t>Fge4>7/OL*WnQ'A.bDqJZTXj,g;h`De\5p"\ScS]mHHa\5-D&gp2ukIFgLCM:ZmT.EHP9hYM3$ZRpBZR3`!,DX/i7Kh555KloNnMH(o^X)nIACmca(3]&?8XH^EO#&*`Ae%A$K1Y#MuVaj'^EY$Tl@3>LLFV0eQa]FX8l[1/^+&[#*EGKJ,,OKMpEi&MP28u6m$gq40^lEq(m;\?03\p)9@qiIsegYb(,iC]qnQe]4al0>YcSl66(<4[:qc23U*?JFBu(#_c]H^EO#&*`Ae%A$K1Y#MuVaj'_TgceTc9^P6Z<$X7[40]52>oU?qkq_QOd\S.Uak;Mf\EMYnm7<RH2j#^-a\9uTCr8]nL2KMmbdrXQWCUWAG%)CC\i+XldUh!(Ue=-3PHtqWE7)(egQ=+pDB`@9P*qi2fS5<e%Gd%dR<92-;N]2am)1eeFD^5bUs#j/8=,3E->+^7iM(*T[j,0jh*ehR,VqGBYmhLT)nI'S1@$:8V';>LfnjRahf+!d4Yr:$&RTWeXR:GdAL=B#^?E:5h=n"o9N"<,d=&:2moc!FUl+rAqdO^*ZS"%*(%F`e`Tk0M?"T6Lif!5_eU>cTbTp3,(Yon.qjqqu8ZIS"\h@+Y4mOke<"WJZ?``[Ben$2-iqLN\dhA/$Fk1]AleV0Wqc%F.%l1?SXQsKM]B>[XWS0lbA7!kjf?LIBcSt;k[?3Yo]WSTDRpGKSDr.C,,CPZ!?"[G%]A>^.]MI'IqB5C@cFRn:]mIJ=T;mnC60<<\FkuD1pO)Qr<?(1Tbj.=U]%QcEq<!+*2UVhuf6,YeZg6H=c21IWU^o5al`BDaf=U48pR0Z'cYk%&p7KrJ@TU-Z6[*V-oT`aPf4ADm\o0r[F5?JR?CTPX@TU-Z6[*V-oT`aPf4ADm\o0r[F5?JR?CTPX@TU-Z6[*V-oT`aPf4ADm\o0r[F5?JR?CTPX@TU-Z6[*V-oT`aPf4ADm\o0r[F5?JR?CTPX@TU-Z6[*V-oT`aPf1":I0ekIU7CY+bk+,&p1GLU?8O^-E]B5YTf/#-91J($F1:u\>8CoXP0f)Bi\2(\YSOh?g0ekIU7CY+bk+,&p1GLU?8O^-E]B5YTf/#-91J($F1:u\>8CoXP0f)Bi\2(\YSOh?g0ekIU7CY+bk+,&p1GLU?8O^-E]B5YTf/',Dm$``@G,6D((A8nW\TUBd\gAVh?)\+8KmcKnXrWY2Y),\]="8f<@3<p_CIdHeCEeqpQ&6\gfS=S1g(?@2RYYKtV'<trp*!_PG!sWe-6ej`W<fe`I7RGB*Kd\2M+D5[.Y;JF+4?Ei8*$ae,*LWEMBH-ULNMY3&NT0GLh(\*,01Pn+^MX%7*`@j,+2`9#dT/486;r7\MCd7meJJ,V0f[!dK\C.mTj+1f?HJ9-)l"Tp-dUWqpiB\bNugNkIW")mbOI*)e?^pGBK<Z2tq]dUscs2'5<*c=8/jMZg6bFmugY]GC(NNOT"J$eR`p[h"+g79)!4!U^aV:h7L8BY+'dP92b$2m>829p8`OAQe?M&cZ_r1h7Gh42TU>in*2[?DXR<Q8YpdC-di4PYO5\%B%P9jh^,7Dn+JTC%?fCe<l^K*B,a!.m##=sS^h_VlIeX&'>#HV?"\:bWHu##e&8C[B%L0+=/qj/oJEg'P1atYa-EZ;)a-p^MI^n$eT%GNfg\aN]-<YKB6@IYHFD5M`3)XCf3'+qWh+)"1pJQofl^?9Q)\I94\b:#A9ONoEM_sR7Si3M-?@@(hQ>OFjh.acpE<i8jI2FJ>IVr$H!NAt6b;R)2(s6aDP*3c\o-A[FOcRj?Li!i4tGf=h7O9_W[HE2jP@Cq<mL$BGotI7:Teub]64"PSC_oT5,>6N>k=,&lh4IAYNt>!867sbjsBKImB^h>f?O2SY'P-59AZf*BB?e/S+e(3p#MR.]N_kkb'@8O7moa56psF26psF26psF26psF26psF26psF26psF26psF26psF26psF26psF26psF26psF26psF26psF26psEGnK3us@Di~>endstream
endobj
8 0 obj
<<
/Contents 21 0 R /MediaBox [ 0 0 612 792 ] /Parent 18 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] /XObject <<
/FormXob.f6e745526a61cd8416256ca62679b160 7 0 R
>>
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
9 0 obj
<<
/Contents 22 0 R /MediaBox [ 0 0 612 792 ] /Parent 18 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
10 0 obj
<<
/BitsPerComponent 8 /ColorSpace /DeviceRGB /Filter [ /ASCII85Decode /FlateDecode ] /Height 290 /Length 3461 /Subtype /Image
/Type /XObject /Width 290
>>
stream
Gb"0M5n_s,&44e"s3f#4<j$Hd!m(E/-.0*upFcLHzzzzzzzzzzzzE;=RG<*-'*MpC^#6b;aX;I--E/ipJ*%I(UQ96HL\]BJ$F2Pg460@a(fm%SP_foYY!]Mqt3Z]!,rA4*rso28kjc7V/eP7?cjcV&DG3kOV2:,\pF4tkGdlKk6d45#_`qn,m0WO%N.m:8Sqn+0F_V9U\S\!%8ZD((geFuEndZd3)"W@rGbCs$4!T>`]d?gQe*Pf:j1Mf#9(fYt0GMj\u/gIQatMe>s(1]&`3OnrN'c_`7*p/u2Vou/PT9j,^,Ze.b4Md8H7ZC'<XZ&R6L]6H5F0sf28)lYK6nSH?WAfoq('rfEkH(T$Cbj9[XBk*:/8OY,8Dr0ZH4#?cj<SCR9CJp;;m$mbFk0D2bGEk'%q3k9Y1Ag3^^:?&+mp2cL7D:)`WQ`)%T5X/Am!DfJA!*rd74PW^hWL@i1]-$/g0!EO;N],gS+la@cHO"HPr9ERRT-(<qrW=@\MLS\?)<NL^*XSobeA@pDskj9HWOokOfq>F.^O'MkF4Ohqqb?,=QF6qiQIaSReL+VDtl*Fa4I-(QT6?Ln%7H6n7B=pY=XF$F%U3(GJ1cMbBtC=P6t[ZPgmA7H@Ni!q*)M>'u%m@WHmCore<)tI=+a=I$mo2o=jPh%:A8ppmP1W18gr1IFn?8^<H?A26$(V:.`_-Y4jl&RIB0O;W5fhP+),Nmr.tVhoF?E_O!8/2=J06hsb3kO0;-o@V3U=]Y;VanMR:JbcFp:f9c:s^*XSQR2(YOqD-sKf;)k1ZXRo\?fq7\RT(hM<\2P^bBtC=PD[?'MT'jhRIB0O;R)TlYrJp?o6l!\.Mafj(?b@%^5oHh`F<Guik3"Lp[L,?=NmjKSCa,'5)X:Q[]enb2e=_McXIhck3'(.c?W>(1]&`3Tk&U9X6?'4AMLO;@N.r%kHg&kGuGJ7q^f1LA:?_CkaXSC0sf28)bT8UZ\m%R;\h1d/@M6_IQjgrS2Y"6Zd3)"9'5<CMU$KG(,Ph=[\75\5-*n;2e=_McXIhck3'(.c?W>(1]*^>b$$4b/tQA@4R[^HI@mF]]6JMHo=XfhpO/Vgn)E"hch$!@h=l3;]SgD=4jSJIFPl71o.HV?oK;SjdX@acbe&+CdIY$F8b[Y)1@O5U7F1S^-f+NBb.NXF(Hp)kT6/uR.J#0`?1^8JIF<P9Vp)btG1#oo=NC+92uYSJNP>m@j"CMipr`Xrr\D9K^C5)A0*C#)b5aL#1AlXJ?_-a^qS$TLB&C3mWl91IT'qls;.jLu]1T*BGEncgPM/lHB&C3mWl91IT'qls;.jLu]1T*BGEncgPM/lHB&C3mWl91IT'qls;.jLu]1T*BGEncgPM/lHB&C3mWl91IT'qls;.jLu]1T*BGEncgPM/lHB&C3mWl91IT'qls;.jLu]1T*BGEncgPM2/fXD_E6B349YHfs51pA)9lSb:7.kj7TiI.+`&ffibEWO'<a'qJX$S*unl458ra.Ws<$YG/A(F7[*:DJ/io?)>jCk#HMHF`+p;hX,-OYH-/Uhn4ilY0P;dcF]Y7o6$<Zl^Lu'g"!Y/hKcT:ID2j^B79A$3kIF3h`L:M[a3Q*lE"o17;#]bk'Q:IcM%(F8tGY"X6Ap8gjJS'gY(sT\'TE,nnr(ihaI8$gTRp!cC:)C4&br%_CseqPmqonXM!;Uau9o]q=^k*Yk)Jgh9K!6M=A]t?Y+k\2_4??VXdH7?bZIg)sk4?H+,SpD^,,Mm8qtKkKV/]SZ-uMiKnJtap'qG<o34;jdQOjI/?o%fQjgoDJ(HV7/5luYHl!iS!A1Z;u%cWYPWL-9(i+"8nq\i]8MoV3d^;ml;COOqVGMRFOf&ck?""5Rb.<%,,M?cSia]uPgm@[G8D]6FnXVdeYT)(ot@t.l)M)QcEAiH26ZK)77pXP4a2?t.B.T@mk.MLlKl(QX%Q%/l[*i<dT'r+SR5YpC0of0Mj1A*Hg^gs;H!,_hYe@"ccXN$^;=qMqbf#[Mc5"-:i>uT3N5u9n)>&=(6k;:?VW*$ik%1h[ftl*hj9O![J2@LnSLS&g%XU(;D#V@;g_eGDYAK=WDnfEiY:8cPad).:+FfR]/[d="m'@RQ]k%Lpb#>3D*b_cpZ82K/\[-AHs9Ebp3es#</^5)DOk.[WGfD(Y:K82r,PP\gHBpiTjQuLF6_NJiTgjL3kq1`hf]N;=6S93jdc1UHhb0=^WnLqe@48Of+n.)IQ==?m2j!Ed'*&a3V@7`n9'G6SR9[ADnCe./+i0*E__&;4a^Ne8oN`PA_pBfpLV>th\K+SGuJ#)QPmPc>=_WR9m(ScB)M4:+[qeXe^])9n]SCq'P6-#=8-:G0AdOCP^-25h3T"oHs9-=k"Tu@?Y96,D';?92q?H,RISJ0KbS+T<*/g*`\63b;NuH*?/:hb^D<=2Aa]K]I&U"V^Lj`$?fpO@;dK2sV9XP7@TU:fcSqc5ntesLrL1^.I$pUF:3GYkPree"Md?-65Mr!rb*OT#p1kGUnn!9a3RsBJ<U/r&/9^-ur-WRc.po=+frnQFb*Vr"k#>Er<2m32Ze1NroCL5+Y'd3HB>8&`.pqH%Ro<Vg;PhZfNNZuK0=,SkYBACkDC=<G'u%o+M8%P@kI[:4)cZSfq^2/C=.Sf'nnh4^bNt_^CL)(sgW?ojKeKcNU9A^iq/.S;"r4kr^59ST<TgubHlH&[1A`Ep>$%9`G4KKHiQLSWk\FIH^>$c=6%ptuDkaFXebO)1hZah0beAqJelCnFSU;AUUb_hm.Q;i/o6l!*oPJ%,XE*pa1>g-]6/NFncRLl'/b=C<)t[uUnZ6d&f.u7([mAme(=+HhIkC8?HlM]p@I^F>o@GbTa1"@l()G4G2h*FNI=*"+D.2t9Eq@m<hoE2lh\I!R.Q;i/o6l!*oPJ%,XE*pa1>g-]6/NHBZ\m7#^73Gke+.B(hZah0b_jn<^73Gke+.B(hZah0b_jn<^73Gke+.B(hZah0b_jn<^73Gke+.B(hZah0b_jn<^73Gke+.B(hZah0b_jn<^73Gke+.B(hZah0b_jn<^73Gke+.B(hZah0b_jn<^73Gke+.B(hZah0b_jn<^73Gke+.B(hZah0b_jn<^73Gke+.CS`ATcl]'#RI\URqX`ATcds*QUcpWr03fLG&tT5UUircro-gZ[Glo?Hp_5HiCB)kG`5^6tSikOhtoUN?=Y2q)jJ]KV6jFDh-SApWm;f4V@JcL!jR9[E\/qCK)&\c(]\l+):MT>`]L\,Uh34nm:gUO4#O;Y0and()GRhQKN<a<rjKD[W*T<9F.Je;eoSoD5oB\Nsf`Y=\-&-T/)G$_859k5CauNdOarqR4b]6"SoV8aXSH26Up+SJtPjrL25rHrBS4[a;QNA_sU^mDpY4YAXf>)cYtuRad@F;Y0and()GRhQKN<a<rjKD[W*T<9=>azzzzzzzzzzzz!'^D_bZ%t[~>endstream
endobj
11 0 obj
<<
/Contents 23 0 R /MediaBox [ 0 0 612 792 ] /Parent 18 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] /XObject <<
/FormXob.e28945bebb594f11c83bbac90aba8c31 10 0 R
>>
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
12 0 obj
<<
/Contents 24 0 R /MediaBox [ 0 0 612 792 ] /Parent 18 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
13 0 obj
<<
/BitsPerComponent 8 /ColorSpace /DeviceRGB /Filter [ /ASCII85Decode /FlateDecode ] /Height 290 /Length 3621 /Subtype /Image
/Type /XObject /Width 290
>>
stream
Gb"0M0s^+9$q&Fqs/,gl02gI,?@ebho'Q]Jl/07UWiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiDfkGE/^Sf@`Q543?FKH_.8gf@`Q543?FKH_.8gf@`Q543?FKH_.8gf@`Q543?FKH_.8gf@`Q543?FKH_.8gf@`Q543?FKH_.8gf@`Q543?FKH_.8gf@`Q543?FKH_.8gf@`Q543?FKH_.8gf@`Q543<T:L6DAacFP95js7CoqI1,a\g1nM,du\.@M@)!,\H0a[c.'l'@468B'(AIiVI!scakB6js7CoqI1,a\g1nM,du\.@M@)!,\H0a[c.'l'@468B'(AIiVI!scakB6js7CoqI1,a\g1nME8+I(13<%Nhn1a$pTDq6,-FLk.l0-Qo?B?QF71Sq,iY7l\2&-s52X"/1\\'#7O#!,Ke"r<n+4dc1NW"7D'\,0<4F])o02'E1Ged.dRGF`8VJZN2t-@_4#:sH.W$HjRT-dPF7VTQ]JDA<Y0oc`Y"=<t1H4dobr[_&mj`0"igNiV13<%Nhn1a$pTDq6,-FLk.l0-Qo?B?QF71Sq,iY7lE1(!:o7#<g2n7*+E(qglbV$&D^[(h;>ipCC-5Bct(Y?=Wp&5CQo0$?B$8/ZCCYnN+rs"hc*j/'<QeB>8mruHpK?JWQf;+#Am,FK.]AC<=A\Nq8;ScpfER"Adn)*%Yhnq=Ni%hpSk5DIWDfD-5iL3_bR9a(hIb4HU\BO\d9e*Zs0!9K7lhqc-k'OTb'oA=nM=YbUn)9^7,\C(&cHBe5LMKeMQs#P20:2JUY";&kAjt^l>ipCC-5DB*'oDO&RIB+'<$Z'V,gOSE^5$K%OdK:63%pWo*j/('@V/'g]NjTG\0_rIR2(SMEgcCTRT(PE<[>u6A\KOs%;u'#Sb'E<B'(AITk&U9X4TG994DL.@N.r%kHg&:o=?2Zjs7C/A:?_CfK$-.@V/AE)bT8UZ\k=F\g1nM,dsCWIQjgb3Nl(bAkleS9'5<CMOj@M,\H0a[\75\5-*<rY=i>::1>/7k3'(.Sb'E<B'(AITk&U9X4TG994DMYgfLq<12l1*7*kJ]j6h8c12l1*7*kJ]j6h8c12l1*7*kJ]j6h8c12l1*7*kJ]j6h8c12l1*7*kJ]j6h8c12l1*7*kJ]j6h8c12l1*7*kJ]j6h8c12l1*7*kJ]j6h8c12l1*7*kJ]j6h8c12l1*7*kI*\o,9?f"\;Df"\;Df"\;Df"\;Df"\;Df"\;Df"\;Df"\;Df"\<Cp\`Pjh^%qZF.'R_PuNV/.Qu\Lg<n3igq-1Y`-7K<mr@`.B4gR;,urs:9$d7AG-j%]pTBZ/nQi!SHX&YdR@/D'8X9(RddoW)*UV(p]rRr$HroS.*nQEqB'.rM;C0&XoR)^87lrMJmjb:Lhc5!:a0h\5RCRO,</L&n4l>;RdE]tk]1T*AGEnbLc."ZGm"e<RSTWoRXhW`Gk*nHl;K6>T\9[DDGII>0Njr'D=Vku4qWj\h^:>I@'mJ5_S%&BCB:Ei-LeG^XFlDJb^Y[Z+RlfV-EG&ReH0_YX[e]'G\5_6d(N/++l^_n,\K`^AGLhJ;\,L\dRr@_N9?!HEiT@8WhV%%\45s.XB3i#mlZ;O]qHH$/Ei%K8ZdQjs;0Kn23KYiBcC:(WIZ:WXcH_8OWHR."[..pr*H?6tj@ZiOAu1JoU[3fCF!=\cSMr'9r#0-:S=L>(;XM,"C#den46#UsaH^N'bt8qi7bJAdk<uIP4)\0Rpbg3S3'/L.V:p4$d`/IfG/W&oOXnu-S!>lE\XNG6UN8LphGmJ@4M14IHaoZ4ZeUS*GEl,hcgJIA*j0/;/@Qkc>p\%HZTE*>L[/)'bO%iG=%Tgkm*;*hiHePME][1U<Dr3H]CY03\=Stlgl@VCPuKikm_&?:Y%QiI[j1!?-.U;I4)_iglWH6km7;M(8[\0VcC=:]H</=34&Xp8;XHTYoB(D0Ss?Y;Ng+aQ<'/Fo]_U0hRT//*.%->Y'\l*6G@jY0C3"'YZ]r[+)[^I,`B1E(E&BY`bumYNV%!SeOlDqb;n/9dg?'q4ZdOS[Rn3(plmZ16:2P'<P$)Kd_+g_MBD*%B-E":49?F9BU#77q]JFmA=-HS;S>Basg/)`%.GgIUKr`=pD9c$7.r?]EM%oF&T#0%=e2CqgR?]ZEgdI$DAl%B#Eb_)MPfu`=$>q',40t93l`5*_bh8Da2V@gYE&Vi]VQ()KrX7N+S=QWoK:W#DG;ElEWnLkD^VJ[LfpSiuf_I1>2fj/>WXX.N$f'!sSZ-@VSc8b81M:@#g0j*Y^tm5fRAr0tq(H\GF*=G78_NQ-gq8J&/+4sePah.#2X&$&Rs;:1Y*)Dfq&XF+0&4*r2_4>HWK`n@iATo:c'rsanue-J\bR:aK.brlM)QsR.]JZ%n)61g_TeQpZgt8^9"Fa=DM'Rs]SSQtmjb<$S+#;okebT6GdHtDP"N:\*c?T_gq8J&/+4sePah.#2X&$&Rs;:1Y*)Dfq4>O->a^k$GP6E&U7Dq/Eb2uZ:#;@QVo5`CRI>QfpEnTCYq&o<Hf*ntM0;MXb*DeZ2gK>9SJ&PTA\Rn5hOA2V`3*@5msUDo-JBoR.p(k[hrMJ-G"V,DQ208qH#KFJ'"`0q^@t@YRqO1PB*FWiRqV!0gtDND[j)'CR@@mQc.q"K*4I-rQ^O=VkKVf8qAcsh"dbXHB]mXqg%l\t,6aF%RqV!0gtDND[j)'CR@@mQc.q"K*4I-rQ^O=VkKVf8qAcsh"dbXHB]mXqg%l\t,6aF%RqV!0gtDND[j)'CR@@mQc.q"K*4I/H/+[u@c*aft?=b>Bh0TdDi&]&hZ#a5_,3@X-'"?d[bI+Tr?=b>Bh0TdDi&]&hZ#a5_,3@X-'"?d[bI+Tr?=b>Bh0TdDi&]&hZ#a5_,3@X-'"?d[bI+Tr?=b>Bh0TdDi&]&hZ#a5_,3@X-'"?d[bI+Tr?=b>Bh0TdDi&]&hZ#a5_,3@X-'"=O(9l,HWFN$+_O4)&'[O9tGf*4b0MJMc(V2`$&:VM1Z%?AjOfAo(e)fsc*I.pqd*2P0gaa971k-dGKm_(M<8lo8]pF'n&D0tj0HYm_dY.,hI;;.Dlp>ij[g>FAbED>;bZn]&Wa`m3]_,7f'R_2.)@rC%bn(qi4UNFI4H&pCngpSTf^"3HYl&'9IT#<OObhmi&F)>mbEg^]8if:NeP&#\sQciW4&pM9BP?]'^]3OKeP.2b)S)i;]hW6NEPf+S^h^$eXpF_fCVBkScq^/j3\9GL-2'm64gc*?__7e$'Xso2hikA!e%IRLSF%o6]&bf"?D/eraR;5FSNjs"jCP-u#b?Ru1mI-jpS(JIBR;;BF-sP=qR5_B$/i2,?Y>Ma4P;7&c='T@?a^:fZ@4,XC`3-Vo>a)olftpM,]STp6RCOYoNO"8/P^*qSi\GRLrYf>U4"9bJG/\%TRf#%c(3WU<:$db\bVf33PV)6tba[6"Q^MX[f-jkU8XVNB.kWX0A5uW0<E3%!<E3%!<E3%!<E3%!<E3%!<E3%!<E3%!<E3%!<E3%!<E3%!<E3%!<E3%!<E3%!<E3%!<E3%!<E3%!EW-4.C'pL~>endstream
endobj
14 0 obj
<<
/Contents 25 0 R /MediaBox [ 0 0 612 792 ] /Parent 18 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] /XObject <<
/FormXob.3d82fbc7fb155441d517c93ed9ec525d 13 0 R
>>
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
15 0 obj
<<
/Contents 26 0 R /MediaBox [ 0 0 612 792 ] /Parent 18 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
16 0 obj
<<
/PageMode /UseNone /Pages 18 0 R /Type /Catalog
>>
endobj
17 0 obj
<<
/Author (anonymous) /CreationDate (D:20251216142815+00'00') /Creator (anonymous) /Keywords () /ModDate (D:20251216142815+00'00') /Producer (ReportLab PDF Library - \(opensource\))
/Subject (unspecified) /Title (untitled) /Trapped /False
>>
endobj
18 0 obj
<<
/Count 8 /Kids [ 5 0 R 6 0 R 8 0 R 9 0 R 11 0 R 12 0 R 14 0 R 15 0 R ] /Type /Pages
>>
endobj
19 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 261
>>
stream
GarW495=S`'SZ;W'k^[])"5]poB8qH=%/@qU>)&lKpEPq`^ToUj7jAr2hAmFc3IcS3]u1M!)nO?K#&(3$W`[Iq8D'E927i,D^h:d1ndTQ!o?uOgOi!M5;;R#n;G^&c9s)+r7(.`dIamA_=.6`V?\;0&JO?e+g!YX&8_+8XHlF=W/Ia'Xk$Qrk@3aA0PQh(F/A?*p4>/mR.MH!>TZ1)N#/`,?ZD:[7;^/hZ>/d(i^,*:I4^^bZ6b[:`%6eRJG9?mY#W4~>endstream
endobj
20 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 170
>>
stream
GarWp5mr90&-_"(^Z$9?6d9G5GL7PscscphYcOP./#`K?e<CYVG]S-!4U-ndo=#d^"Qqk!%AX+SVZcQY+jo#mgm_ZobJ9C)NY.>i9A^4+L=D=F(B57X3>:A[Gj.'WC58euaPELhe*k>#kjh012B&[7,F!'.Fi#)NkQ>n`2Yd~>endstream
endobj
21 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 242
>>
stream
GarW44U]+\&;KrWMK`$TRZ#Ejh%W'h#K7+SBoIXGUUC)N@r5(2Nk9o[Sql>31)AEh'u8=l^EAVi3IoO'Kiqi%7%;@\=.b]qJX4AdRf].g4aIOJ0IN7E2uH2"iZoh[jaDCb(OO.Q\4>L2't0AZX'_QXW`mKcn@qdY_Po+mf_[DIA3%@^p+P5rOHsf;mM@n1YBPg%,*^&!jN2t$Q>3PS1:d?;-!$J*A]<=N-^O:%Yn7OC#!>ZE~>endstream
endobj
22 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 167
>>
stream
GarW05mkI_&4Q=V`F#VQSZ,K\70UM]Mn-VGWC30XdbspQV83pXpp557*eT4-nNT1`#^08Rf8pba[K^cm7B44=jRnT)'K?Y!_o-'Xd$KGh6f*pL>lGBg3YUISI%0sL[)4K&0mV"rL1P41m3aEpO(O<Vc0Dlh-D:2:fWR1#~>endstream
endobj
23 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 262
>>
stream
GarW4bA+pK&4Q?iMRuQnE&;l#G'=F5<i)ToMf'h0Bh5HsNV`ZtS4DsU`6?#NEt=)Bqa\Qt%BqQhD$_Iq%Nq#cQoDYrG0fuGMnbKo"e&rWO(kiNfrkChKK6k2NT$;R4&2:j^[W8tSl$l)65/&U0l<?jnk)ku9ZPMRnim&[l+^EodQsF:`OoA=e=DZtn;OAr-`M2OWqp>'q3C45Kp)<'Bl(2:?1Sq>(.;"Lm=KDj<!OUS8E['-\TP@MP40+;#6Xi,0^;^6~>endstream
endobj
24 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 167
>>
stream
GarW0YmS?5&-_rY`KY,2[kYm/Lm<SM2O6iq`P>d;U4n)8L^>CNbM-K67PO4$pIc#YA[N="7;k`*bg6HG-3H1I:Oc@?^rETqVk2S`hAuD3XjG$hrY$;o0o!@8O*,PX[%Xbdo219TR[PYjYB"V@;M6R`QhNDP7g@'?7l*!B~>endstream
endobj
25 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 245
>>
stream
GarW4]*cD?&4QKpMHd)Lgt?4sg)58p(5c>K-G(3oLqu4Qh_c`/,/_X?Kl\)c+FR*G9`ZLXdk-+bF:O1[#^VPk(.0^d>^]N8"Y:8_eceNuo\pFCTl8;ADtoB8^e%/BH9Q'/leU)Ketm'gGAhG8d]3*X.^/,jZXEc>aFc1K9PN;qHF6sh^&:Uu&B+.aq>DL)I_A*B[q/R0b:Eqd>ihB1o8K'3Lb)J@(88&BB3;G3Vi%7"GQK1T:[J~>endstream
endobj
26 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 172
>>
stream
GapQh0E=F,0U\H3T\pNYT^QKk?tc>IP,;W#U1^23ihPEM_?CT3!/hd>6k,goQl7B?"*$l7Jjr"7HE)RNVF!h>6AQD_Ah8\RZl9o#.%!<n4-^Rok+rh0.sU3QjVR#*Q=T.@Tpg_1b8?ts3Bstq0s=Iu-oF$".&@bPK)uhhh*O"$~>endstream
endobj
xref
0 27
0000000000 65535 f
0000000061 00000 n
0000000102 00000 n
0000000209 00000 n
0000000321 00000 n
0000004077 00000 n
0000004335 00000 n
0000004530 00000 n
0000008451 00000 n
0000008709 00000 n
0000008904 00000 n
0000012557 00000 n
0000012817 00000 n
0000013013 00000 n
0000016826 00000 n
0000017086 00000 n
0000017282 00000 n
0000017352 00000 n
0000017614 00000 n
0000017720 00000 n
0000018072 00000 n
0000018333 00000 n
0000018666 00000 n
0000018924 00000 n
0000019277 00000 n
0000019535 00000 n
0000019871 00000 n
trailer
<<
/ID
[<11a4452bbe31319e89fba7a743537da0><11a4452bbe31319e89fba7a743537da0>]
% ReportLab generated PDF document -- digest (opensource)
/Info 17 0 R
/Root 16 0 R
/Size 27
>>
startxref
20134
%%EOF

View File

@@ -0,0 +1,181 @@
%PDF-1.3
%“Ś‹ž ReportLab Generated PDF document (opensource)
1 0 obj
<<
/F1 2 0 R /F2 4 0 R
>>
endobj
2 0 obj
<<
/BaseFont /Helvetica /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font
>>
endobj
3 0 obj
<<
/Contents 15 0 R /MediaBox [ 0 0 612 792 ] /Parent 14 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
4 0 obj
<<
/BaseFont /Helvetica-Bold /Encoding /WinAnsiEncoding /Name /F2 /Subtype /Type1 /Type /Font
>>
endobj
5 0 obj
<<
/BitsPerComponent 8 /ColorSpace /DeviceRGB /Filter [ /ASCII85Decode /FlateDecode ] /Height 290 /Length 3461 /Subtype /Image
/Type /XObject /Width 290
>>
stream
Gb"0M0bW:r$j4o4s3aL9.o/:sRKC1+V[Po_hnP="8Wk>jOsEV^,Y=.E8Wk>jOsEV^,Y=.E8Wk>jOsEV^,Y=.E8Wk>jOsEV^,Y=.EM=P;M`ictLh5:'u?=R'nd;IE]5Hh=BmqB2p^7X$0Q$9UiFPkD[m)hEDD7]3!20S(%m5Eepo,>73Ncpo[qg"0,Gt5J@p\hbEY.UOcVYbgK@oqO7DN/KYR@egT:O\Y"S^Nmne(=m!@M;\.mreAj`lssm2RjQmR*'f[]=0V/jtsN_^"C8&k'PptV(jd(Ymp-?-DiQUlg??aR5p7DE%a+(Q2+a1De[G>Bl&EKZ&,I(pUY]E@qJJG)r-?G9P(rih-1dREuNfk?>O(#o=aSKd[6HOfEV(Z'2t=fFn_3AbT-'E'3sZfj1^M@pYhQ7E1%B!q_i'CLMJZ]APP)MgR*7.Y/pg53RP?TA*/3L-50YH7,u"@RJ5[/9Q6C5NVbVGhM5l%_.?@umb=+S+0N]gQT<I'De%pX\0_kok!\7DNLBP"RS7[g'92lIB&8;Y13$pgHd09iRG2p8o0-ECM)-sFC\FmSgqH^TpYhQ7S=01ZZYsF;p79@=&(b@ObfogMI4I+_mo8Ft\0_l%B"lm`>FE$MV_[_Y246E[o=\bnb0967Q$FISai'U8mksuCAo?M*bkl?R-I0h_YM$B?F8J^DhM5l%EG"?[c+]I2gNP.=5$X;.1Gdp(p8uQo^/LHoiL3GZR2\raAeSG3ICLU;>is%i\_.+PGos32"IH[hA8X<AA_r2X1;RO>4IM[5E1-IZRS7[g)c,U.'3s[J\0_kok/NUqf`[Xe+0N]gQauWsDDo=BhM5l%_.@LHR@?oiRJ5[/9Q6C=:Zc7&>ipIE-50YH`fmsd"IFD+`\u,TrPk$]NL;edD'YO[I2buE1hPl,[ZP+_p2)p[e!QQPfLD$lgUH]`:1Im2@iJ!ODVrHt3K9FeNGTr/\U>Dmjtp]41q&NWk4WXSRF@Oke(@-QRG54@A56WH:1G57Ao?MGP<"Vj3K7l$RCR_b:ZaKGk$5CC.+u(L[aFc^qb6b_]O]p>fgaTjmPE\no9+M@B,b.F]?bTVcV*tKS8EA]mlo3K5;1^!EOO9f^ACUurOc[u`n<i5qsH8rp[aPr)eU*qn%6nfhp4shD4GHb^$e/6I6TC<[rJk(otL;sp\ha8ho=>=fDB;AhhFPj09^)KAJ38&9VV?L8MpH&M<8.ldJV05RX^_no.Ld9qHZg`b02a-m$D"%2.\6nf;,`[G2:]5WQ\V2c@4Gh=&YtOF%n^mA_13^REE`2l0OaBG;Wq]1Y8G/?Zt8UPc;l3PKnX1F]VM=136/NqdnAb9ps/J2<jIo?$A/;.Po\PZX7lf=5<1=SU;dP(A-q:-FpT?Fn1s1>L9Q0S)iGGeB)@_DF)%_Cm',a;^\2o]*8-oZUsS%9V$PXmM>H\bU0m00m3&T\6I=`1RmI^`mi+Cibh&sc>8Yj)cJ,VM7Wri3jVEGD+pLJ-LMZAlc^]d[kW$rRCHJJs-pTpHZ/#)PI^.2/_/Gu9ldRQT$2WWCT5#pBp+rKo47:$?VC&L8X%rrR4!(5rE?5)8Xe^PcTIWmmak?b:!t:GHfiH*GJBI/CQ^$TfeZFd^AG<;?^!=gc(929pYE$LqO43ODYD;<\aOu!e^l'@EjKDMb^K5$WP0]nZLJ^%HY#u61\3[enc_V2NHb$M)gp)%RGYQ;01^D,]VFZHi02I1r6C:L6.0i7*Bj-$T6+]-GAcILP+EW]kd`YIUbagAF!G%Ro\=[]cb7.BSXK;E)u5)]kJfT0mL;AEbfoP2a;6*b2r;r'Dt$>2Aq&o4^*)[NnW'2fK24Nao/eo%"\I%"GP'Z0I+"FNhmnk&i,4+8D*45U9lOt5(Y:H%gNYJ4S)E#I0<Sr*[ddmG2Slep?X1q4Cu`XmCk?Fi^UTlGfuB5df`]o]IW7MlZ]->RZO*cDrSi.cAfFP.AeSDgqSi-Obr20;bpKqYoS`%'Rr(9URn[j=kSMi,2qrR42k/aZcu)c8-TRTajWn-2h:0V>:?H.K8QTXcol?4Z\QM\UQ.esGSE+3uQBQEeG#L%A3LQAu,[ID*eB:EYk%6VF=)'\eEfuWs=\dD1g.f8NjCE.oPB<XE;_KLYR@E:`?)cZ0b=PIkAiWFaqb5fI-eXjPMG'4JbhVF=>A92cbB:e#8i1-tFRQ=g8G;/Vi_h'@1H2o><Z37\Ea<[a&ri:uh0UX]P'smD\5\=)b`2&(Pm5@E>ZY116t>@KpYJMpA7)Ji/leW#F/+)#V*VC?f+jW%d?qJl]slE4fpD#^99j27h!!U!Boq])FiC1L1hLXTfG0bKqW+XE:1[0q:;4>9@iJ\8WL&g=b[jOE:;4>9@iJ\8WL&g=b[jOE:;4>9@iJ\8WL&g=b[jOE:;4>9@iJ\8WL&g=b[jOE:;4>9@iJ\8WL&g=b[jOE:;4>9@iJ\8WL&g=b[jOE:;4>9@iJ\8WL&g=b[jOE:;4>9@iJ\8WL&g=b[jOE:;4>9@iJ\8WL&g=b[jOE:;4>9j"6dh3D"AU^/LHoRCY]P]2P,]<+kV\Q$K"$)s"^pPrVEYk.Xc^pR/TYm^lDcP>l2_4-b)`W>jp44-_ftFlpD:RJ3,\612?`R?LT_mQ6\ZT;`dj^,qT?8Tj10;jmBJ\j>br;jihKBC7jHH(V&TjM!^@3D"AU^/LHoRCY]P]2P,]<+kV\Q$K"$)s"`VbpKshhJ;heIJ#,$L#d]nmrG`@m^r4^I;<3g8o>f_?gbP]CkDQP]k60U=20o&8FDiA/iT9X^3d':\+\@Uj;*pUjhAp_-FiO$C\FlYoddS,jF4Z.EjH)?]D%bBCL@$4DBZPtm^q7jK)=uLB&D<D^QMelm[*f'2k/a>H`u,3p=6A-(6\RV^<=bJ\F89ip8rc9/%LApI_"ofZO-'3pR6MG?i<T7+h:tJ]8a.RlgO*ANA"r%CuY<'3^MfLff,D1riT#Cpi?)Q-Eb+a'/[FnIC"drn*1%805'0Yiqg8J60$/A2k.>VY"m@=Eq[a)Y.q"N1qoK.Z\e#:l3*)"BA[ObqR\dSisd?'T6oD_R;-aleNSse-CL'A2.`f0WDraO2OS)NhURji-Dsc/e(A2o3I+\)VOF#I[81:r8`o)>9poa:.b-_B9dZ9lG;Ws3af/8:1cCb4:>XNcW@"N@mF0]uOu[eh;l6"R9!qH)P=aot>tp`%E[oU'ND1afPBSlqWl_5>q`ONacB7HTe^^)FjdQ)cmKTR7qbD9Vk'+?_^P9A:.ET;&?(LdsY0!m+DK&4Rmo3A$I[=j@CUb=RP3b9\eX>=VRf")l#,`aD:3C^AGI]'8L:b8NahC\ZSbZQoafjZ@E([G)<**^]QYZ/-\/Us$loWbJRG[+pr#4u-V^2.7F`lhj\L&UoOsEV^,Y=.E8Wk>jOsEV^,Y=.E8Wk>jOsEV^,Y=.E8Wk>jOsEV^,Y=.E8Wk>j''C?8XL9P~>endstream
endobj
6 0 obj
<<
/BitsPerComponent 8 /ColorSpace /DeviceRGB /Filter [ /ASCII85Decode /FlateDecode ] /Height 290 /Length 3802 /Subtype /Image
/Type /XObject /Width 290
>>
stream
Gb"0M0s\bV$q&G/J(%!dMEdPCi+mj":+lnVm=5/<WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!Wi@Pk5O39*E(tLG]=Iff*nLVJA9YDM]C4p&D/`3m8%Z>/INI)Ff49*4S%J.PEG]YhG"90TqgKU<#1mC0[%"\rkAb?X9m0%=\bggsf*9i;GI3jOn)n\-E(tLG]=Iff*nLVJA9YE8`&<k(gpb*Wakb1/R$fb8%41WA/9eF9?C-M:3>:.D17um52nS3pR@'BuYmi#Nq(-`rCL3?aR5kr8:bnZhE]:VmFd\Vb1U.B8oD'q]ZYNm6M4$@;gJBYtcZ1tVk&m)ZR5;)W-1gp`GI)'kQ\h+j'AH>=T?hO:]B47(R$fb8%41WA/9eF9?C-M:3S\hTaNT%`ls#mgH!Qj4iL3_VB"i'SXj#S5Y0?c^9e%nSh_k:3Ao:pVk'a`d'q(KYRXH\B2m4b'5$X;/bfk:U^6P+Uf9LV'Y";&ok.>6_b08,rb.J8:U`qVd?*eH\k2pethM5lEOdZ$Jfs`_Xo=?^G__nF7b^A%/>FE$I?((HGRT*g0^2*GEbhTk6bkl?R._*^*2DS2[>]!0Q26#Lh]@t>"V/Wgs&_Eh1bNrsg1R5i-`^F'q$8/ZcNQb?/<uVfm9e*ZsXu8%6.9p^``&4OP1:u8]9Vl^90bN$5K$3Lq,;YV%e](!^P6rqKX,Y>&$<J/[ED7pmbcWiZ;^ksg9Z/ffi%hpua@rFL[4r9FUr&DmY.GX@o.%8oqf#7Z>ab&l]9,+WK$1e;f;-9Kq6%=KRI_o<bkl?R.U8D1]"=(bYCas1&(dVoQ2.Sp)k$:I\BOhh9e'D9n%-,n3Nn%X]FWVi_Njr"],R10._*_"E(qfeRI@`!OZBUsbIeA;Ur&DmY.GX@o.%8oqf#7Z>ab&l]9,+WK$1gQ`&9B[J%dp!1M:0/cGVUr^Y&/&R@-&K1NOnOOnltfDpHSNR@'BuZ&Qs\p3^pnB$S?=S;D)nI^/(*1Ga)!B')d',P-gVhjg+&1GL[u@N1_Bm.oefbaaNX3>:/gr*mu2B4g:"bflL-7ckM6^,[u*B4>Ju`&9Bcf^m@UR5kp:F-LSfP;;UFZP/nmh8e@Go9=MJSt5(-mlp0RT;Tmpna&[,H1u=QrB+ZJM.\1scb#7Mn)l.k:-:VcH/<u)I6UMqGN.;4cL^Jame_:P]G&UdFIm[uGMg_Fk+-a?U@Z%p\GL`H1@N/f':n=Ba5-L]P^+XJS`i1S:$;MiroS*pl:LFDl#5ujWh/;NTDmtjV<UY?s64Ii<iVbPrh'2PDPcVimf2NimT-?ZjSkeNk&.$\8acWCDsgZ+T&fmCroS*pl:LFDl#5ujMT'u_giql&bY?$P;DN;PIuS56'/O\CEN,2Hgs'3dg<mj_gdGBs,r9c:f=5u\0f(a#QV,BDh>B.5*DOc%9uNl+135C(NGD#t1NO4LWKbW^c!TMbdDX8a6sM2f1O&HB\99\`1H,o49$,<5r&(Vt:!CgU`2-?eF#ST.CI]$oEt`PmPg%q-p$8So50Gf:pTDB`o>IdeE>qlek2kH"\9ab!<@2U9$J2rG]*_Wc'>)E*D,B2[Q[;fml?Qi?RCE\U>k<").U5,4lm`[X6%s)\mTkOIkD<JUi\m`Sc-*f$E:l>uX%_:Q's;hmG879P-[(c3gRuD@0DDWdeap_^13"74\E;o0<Od@HfThA1$_3&jA>\@(5+gZOeMCiI)k!\8S'PaAEV7PDfLBI&mcO,oI9\pV-FT&)MS&\3Frsf3S;D)nVW)JWTA@Q<1M:/9RAm]ccY)ulouriV7V4.<k4@8S>Wm-T[ZQu8]D6ht+'6dYM/?e#ibgrPh8e&%B4;gJWOSGDBD58Lk*jNL;.^ci]A-QNk'`Js\0a"UPhCf^/pC@,Gork5Y0"m.A[\;FbFMXNhFHgfCGKs$R;FA@EbCZ,pJJN]R`sPaAfJn)gf`b$I?^nE-8Tk1fnT&MG?28"+/LKjaikc:[\:@WSQ0Ra8*PnH135sRD(+jnB9et\;7bbUbhQ'-)p5eJ=lndoPchMC1O#):L@Qs@<k5d?5H\YegJCf8HhVng_9Mj7>gLR;Y3f#4pO$#Xc20A'ccXM8m8&-(Hre).q__X)b0@*V:OMna<l*&X2-eJMc*$G0I.r"h_9Mj7>gLR;Y3f#4pO$#Xc20A'ccXM8m8&-(Hre).q__X)b0@*V:OMna<l*&X2-eJMc*$G0I.r"h_9Mj7>gLR;Y3f#4pO$#Xc!(Lam41+kr*hH\<<DUC:i=#EY;3iTbZ'jXq_j\.n,BAHml<1(-/cKHjlX2T.cYm$9N;D/DaV)2m_?p>Shj;F!q+f>DSH"O1;;qq<0`"22O'^"ri;"H3AXY]i4]Z^k.CXCj*T'F<=-0R6b3$\^WQ>C1K.9Tmb:QU^AG)h^?<^>R?[Y^;Wd)d.\>tgaipnG-K6rIU<LfO<BSmXPF5]n9Z1ep7@BK(X*Ce:-SrBgR#'LiM_Zr0<q0ER:M+dX0bLl\(M-q@XQ$d.T$6P9@j5fC0$:i^=iGI<4IOmQ`^JSd?'KWFZ\mqWGqu_,NJY.S\g[/jB,/a8o42H7)\[31FA_2^c75FOk/kiM2C@EAkb?;ESQ.f(c'+Q$D+ldkX3Ae3htM2[Q('R.m5)#,bfo6L>\<*[bZ+I5Ca1b-3>Il`\N)Ir4ql\lb^<1)AQE]o9XW3b2DR(\;fS4jRkn"11U2q`bi_r1B'&1!<;T@*,;\pO'pi(63A]2L'ALUPHqqXl<c62V[ElcL1,GP$ELE)]1K3ZsZ&QuOk>Y\ujlXcdqf"VpE1(!K.&do)^!E<0'A'RBbNrt28Yrr_IK$mI\=OF?AlfpD.`G]^oB4B:orR(,]@p(Z.IMMS5AqmOB*FXTeV95ZES(*cGr'G'/%Eg+2O%0u]AD_sRu-H6_.@etTqkTAofdl^9O,mN0!hSf-'dJmp\FDmid+XER9aYXW>'Qic_!-0f<^(PltSmZV7>i>rk+Z/Se\EPgf`b$b^$bCqb5e^`3[V2RIbHepR/OFM.`*C1[!jTmk\qk'@/-eB?n\3hIkg`-D"4TcCC6E][+MJ9K]B2S2i6hH(TptR!;ZB3HJLZo0*hs0_)5bF6:,?k'\Ro@H(GOk0/+]bkk)h_Sie'c'e0DRJ3,Z"m%9odCu(bY'2\4p<1.m[D?G"]NO3>2j8$lgq-f1>is8"'s;a:\b=4[bI,/tcFd<=H8h%'^YHG+)dMOLRh`)M1V*5&^!!h"A^7qkiHdGHCVSZ:>T6r1baT?MG;Hf'bIu*,_.Cp)=lFZcPA@qg]3H:[k00;0Y'2\4p<1.m[D?G"]NO3>2q,]'Pg">YiEL/<n%'jG?PBoAA3QW.[DBQBR]([0gk(^bI+HgPpTA;+qDE7#9'5_J<u]r,Y)2Gq`%<C*cY$O#E::@bWa?FKTO^6YFg]'!l^Fc$:#(>`q0^cD#5>99UA?`e$VKRQ=]ZQt]<7"Uf>K6hREoMOD/esT-E:Dd\"I7qT67QX^$D,cIOSW0->m.mkKFrd'7J+eqg&o70@t:-Njsq[k+-q6M49jt3HI^G6soQ2^>?fQbOqC9,As6ZH"UK&io]?KcJ0!jdFZ%;Y;^ImE]n('Ln!UDhV$MM\9YWV1O$c3oJQ+(lV60I>gJg"i[4MjGP:\VDI0L/bT1[:IEakNH4r4jf5p)7\;@5cWiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!WiE)!Wf$BiN/nET~>endstream
endobj
7 0 obj
<<
/Contents 16 0 R /MediaBox [ 0 0 612 792 ] /Parent 14 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] /XObject <<
/FormXob.1ec20b3a96e40a35266a0a8a0d634adb 5 0 R /FormXob.bbe0a4b6628a0e8125ab26a62825893b 6 0 R
>>
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
8 0 obj
<<
/Contents 17 0 R /MediaBox [ 0 0 612 792 ] /Parent 14 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
9 0 obj
<<
/BitsPerComponent 8 /ColorSpace /DeviceRGB /Filter [ /ASCII85Decode /FlateDecode ] /Height 290 /Length 3500 /Subtype /Image
/Type /XObject /Width 290
>>
stream
Gb"0M0s\bV$q&G/J($uOju)%+%1&)hn*)-NT`"8nzzzzzzzzzzzz!!'grT65NupmP_`]63jRce!oT8TqIFGMi(@D>9Q18%Wp<?-h,WY=WoE>BeutHu8YIA4O7SpKc+sL9F0lZs.b3omCWORUeq#Fn]1ff7pJ#G-kItht;A6pmP_`]63jRce!oT8TqIF@N3&*FmnO#.ck97`6:E%D03I(`QWUU&i9D1[aFc>'f5%G8^-ObfLFJ><m7)c-S_r'@N/VA=YXu(T>\r;M/@@JB>r)?I1e@5,du+nSeX'Eoh!BoPLr@VHWJ@\f-`;Z:LY8Kmo_Ad?D#0[5)F,u]k>=.H$p;]q^?<O]sAA.1XE_eQ`;S-5/&?Y1Gd@ifpA]ho00l8'f.Yl]\/XO/+Ys=-5A<mcb.qtW[m[)^*XRN1XE_eQ`;S-5/&?Y1Gd@ifpA]ho00l8'f.Yl]\/XO/+Ys=-5A<mcb.qtW[m[)^*XRN1XE_eQ`;S-5/&?Y1Gd@ifpA]ho00l8'u%mtD9P\Mk\;?)Y=XF$F&s;:;^o<38E=PaiQL$,`lqD>Xu6pgRT0&;GI9.]Q(k==7(su_^<Bl"bY4ksC*SkE8VJg=<uWqo.D"5(jD.ZPbM:XfbZ'J&2A5hS<;84m[4sJ&U8s8A^*XT/b[#)09Vprf,E]0$KeILK)`(DA]%T^9CJs-7XQ[gn40.j^hT+6D_O"EQQ.^@^iQJlpY=XF$Z_AtVn#XBmGopCW$=@C6=(^>mKeN$]^*XT/b_iRI^9\/Rk'_VO.X[X!?($+R'u%ohpmP1W1+Tpkqp$[=RJ65/WUOJ"FCk0:<VS?<j(hQObH0pMloV9;A_tJZUr&I$d?WC/<oM67:LY8MbP\bnpIT2]CRMpqmllSFHnFsAk1qDiNNZpmg:[;.[dgcL?^l83`&>>qq.oTiPM!n,14O/tI1k<0>3<$5]2)lT?d&ATH1smHj(k't2X`iUD'W$A9g"p/H/<t\qlZj@Rs6j=o=XsBpK^R_2t:^YkBZgdm^o&GDrTG<ch$SRh02"nhScaWT'+q-]C1'g]SU874jU`9GMi(XGn\LNHCf>Qm_8!9o-U&'oK;S+h0mmRk"Rt-k]u$5])/Y.baWi8dIY"AkBe/\3oGMcAUk_L);rMA#.X2i!H.gHJ/`tUi5T+.\FGmdDZ"(U:O*o%bOL")-=7^7DrE74n%8HFqc1)_qsI.l2X9/9=Xr<QpJLXbCr,l%R=&l$]nNdl^@1KblrVkln%1COg8K?+B;p:9h+-/%Z3B-0BC`H-pD2%Pq7aJ%Z<q/N^@0A.CSU;LS>Ge)G9:D2aqfB^S]TJQh-2j3jnnI0b'oU-pqAhRYDp-&E0eZ@h0kOd.U2CjG:$Z9F`64iQ1)?^./R#Qi;;q9^,G95_HAAGGP@N9hjJ)bTkmPnBP&2>q6mNRbVk[p.ML'C@j^(Kp6jTgZ9`&rR;L1/gVQ-1gJBf,9Jj)8R=&5kB4`+*#*k$W[P<ta$iA.a6eS+fdEFL\nnhg-R;F>k<$n'e`_=)ulnbsWAV8,n1Y\;=[tT6B[\7M6R:p1O1\nJ`cce;3%4W%9Cu];YgRj<\hRi@\^S#q;\'`3BG@'2DFDp_.g3E)3$iGVE:#8>Yn(i8??dQL.gM#W\4"p(2\i4mRD7k)U"b&c3-?#Z=p[5]00Bh9RD7&iiSJV&)h4)':2Vu(;!l(CTPIJrZHZrfS**lhrDLp#UDVjQe.sCQCb]ds]kIE*doS^q;DVjQe.sCQCb]ds]kIE*doS^q;DVjQe.sCQCb]ds]kIE*doS^q;DVjQe.sCQCb]ds]kIE*doS^q;DVjQe.sCQCb]dt()PtR=ZX#Ne^?[k]n7>Yqk"X@5,N$b[n+t<ZI$k_`GnY>faEOuZ]=tTY?Y5"1hF(X2o%i[0Y4&I/QW`::2c81eHoLr:lT;0:AQJTg:"6Qqhp&n(qT^R<R2*G]'6W]`GI-bM^9\/RAqb0[6sVnFh<b$An#XBm=lGi/;:ghU2uC>T40.j^<qtfOe?pOYc+`ZCc7440'u"rJJT(G.c#rLLN&!'U(Z28lDQ`kTc7&8cJ+:35jlX/Sk);&Kn/'u_;f8c8DpBd&!e9aR3p#M8s5o7q0CTe8X&Eo=qesb.o)aF3]fP9;])UoO1,&,5hlB[nY5<._..[Lin\%!Fk.:TTN&!'U(Z28lDQ`kTc7&;D]rq>1..d;(9\b4QZdNohWf>5rbj0%"E=9M)9$`?o2DU%CYHQ'd/bh(O4X[8`a;i@8^*XN&i6/4oS>^0IF"$YVRS;Lg0=0)JU8j3sU!2h<13!]9bY$3<W\uVf19[n'`%Ca>.m58[g;k8V]Y5^+\)>H2oUMjp,BG:)qO1+5JhOIYF/#[obb<8HCGKl;^<B3qM%\S,H5f%lH95tRk08_qgJBXiTC$Zs\'m6IhOH"!%41W;fe.Jp4)JKic&!(f:bk8-m;f,6dl(gpS1(WO-1g`]/pDV'D.D_QM%\Q>1-_DuEi6Cq2J1g9.'X4-oCLWfGBu>fA*2$m'&-5<5G.=`Vmk,5B&9%+Ymi#No@Ya?H95tRk08_qgJBXiTC$Zs\'m6IhOI,Nj6S(_kfW7]G@i>d]6GuK^GF1_VGb-dpCd3^o5%kcjh#ajEPF<U-Dj\TMt[kY47d8t.cn9e06+`_cR,Me^5M^upH.t_@OgKOGV='O1X@DF;SJ(`')+KZCgnmU]6GuK^GF1_VGb-dpCd5$SD?,d0=+!u_ELRn>rts0m[M:a=eTY?+/Q$@*@YXq:#sL!:q!ThdT+nZPdC66nmtiM>M)I1WbY,IfmOP01+SS@m%\[Q[3Of"^576*(!7<c;7c&HO`GX&7)$kPAIJA`?$5O*3P02R?Y5"tKmf2g\osm>h)CHLZU3?^5"\m^4&XAlS&gq!Tkn-ZV5pa>F_*aPG"-*$7)!s@;;.u'G3*prREq=mOkD[UDr,o,2X7_Vq-@@iZY!i\p.aV;G9<Z@\ntMtf9c<7fbp3+'D^eH7qn`9gQg[hANjmQ7V:OG^3THMg8NbLj`c-@c^LDeff,%3hL1VHlF(!o?!la#AnPZJ:#qdf+/Ot.D-)2<Qhd`9)4>mdq<$L'BqoS#Q/D7G5&5=2B&?"jH1t1iW7uLWGC>n*R[oSo2j&%8I1k:217u7s3aHUt;K^6R.'Y9Ko@YXqe('1+<S+m?'"_$sT=r*&?#B@7Fj6C(Yq%-lfdj/QeV5_Wf=ZqQ]2CDV]tug9D>7"Oc'p,d.jaf?/$.4ML+cQY]SR95;DOlX_E(t>pel7ZRjbNl-1fe?XOG^S03-W:M%[Eu17u7s3aHUt;K^6R.'Y9Ko@YXqe('1+<S+m?PTA#EbWE/3Y5NP"]T:Lk9Zk"(]B\*gf?O1@?-T1h40tujrH@#0O4)QPb.KOBlIp1.c2/npc(rQFZ`C8-G29flda_%6]JI1bg2GTfq^>apUs(p,X02DEh7SfseP+,u1V;r+DqE82-sb)nbWE/3Y5NP"]T:Lk9Zk"(]B\*gf?F9qzzzzzzzzzzzz5k,pqe-_`~>endstream
endobj
10 0 obj
<<
/Contents 18 0 R /MediaBox [ 0 0 612 792 ] /Parent 14 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] /XObject <<
/FormXob.1280b7d13f0587f75dbba24117c3e12a 9 0 R
>>
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
11 0 obj
<<
/Contents 19 0 R /MediaBox [ 0 0 612 792 ] /Parent 14 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
12 0 obj
<<
/PageMode /UseNone /Pages 14 0 R /Type /Catalog
>>
endobj
13 0 obj
<<
/Author (anonymous) /CreationDate (D:20251216142815+00'00') /Creator (anonymous) /Keywords () /ModDate (D:20251216142815+00'00') /Producer (ReportLab PDF Library - \(opensource\))
/Subject (unspecified) /Title (untitled) /Trapped /False
>>
endobj
14 0 obj
<<
/Count 5 /Kids [ 3 0 R 7 0 R 8 0 R 10 0 R 11 0 R ] /Type /Pages
>>
endobj
15 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 164
>>
stream
GarW05mkI_&4Q=V`ET>Mc.&b?;-+rnF+G1',/ca*pUAWf>EIgiipknB9RY?:(LXAhG(PtB're&F\nA'4a%aIp+-h"W@9D(-_!#HM*Z:^`FOHNUN-;cL_m8qodCk0^Akq$V4PUhHkMQ7^Ejj*/Mgoa@anH4$%uMYhOT~>endstream
endobj
16 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 306
>>
stream
GasbT4\rsL&4$!fMAlpjH?ZXMZl9aE"XK0lYb%&R$>PaP&&cf78Q/KO<^N*hq5!cWA-.^JqQm+$jrVMM!b1Zf5UJAHTA[O5a#Wj7rghI[YR$H&I1;qf=XrnMp^cl'VsjlS&LTtc"96#@mAfhO_96j,pMI1=[Fq,J31>/c9Cp=j48@sD.G@[NB&N,!B3"QR<e[.?4qhBfJFW&(KAC_sj$gSZhqF0T(5sE#WDeG4=a!P:El$187`l8mPLohVAbL$Y+1h)kXj-Qc=,3AkKqKh@36/9mF(-YNS\uo1I$tLMOSf*X#]ps~>endstream
endobj
17 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 185
>>
stream
GarW0:CDb>&4c2<MVk["e;d_gmE_c)_gI#HRqtN;"kVtu-`3[N_p#oKpB[c<lE@1;N3PAV-jDn$krJuTE8kQlWoro5dLb%\m(;?dMjquqaV%j/=&\ojFOWl2lj!LDQGM.KJ9B`,kY59<$TNAI\j,Z/r8d0V5!390b?H4<mLFNoiYn5PLd#8q4Ic~>endstream
endobj
18 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 244
>>
stream
GarW4b6l*?&4Q?hMRuh(23Tpmkt^cRET&,M[qV:g6EQRMs2I6!U^T$](X?G+!r#.NZmDs+Qu<6UTF4R17*n#s2&gSmk2Mk.09@0r[k9DgRu9WjKt]k!Ic1n'J.q+%HiE61]07.7.f1^O]tp[&Fn>&8hUMD]+;spqhR>>LX`Bap0=GTNaa\,&7Zqt_h]FP:T4bD(VQ.g\Pm&EGSuJ6sK$-Os9'08g6q!hekXZP?.6B5f2_i<IDu~>endstream
endobj
19 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 164
>>
stream
GapQh0E=F,0U\H3T\pNYT^QKk?tc>IP,;W#U1^23ihPEM_?CT3!/hd>6k,goQl7B?"*$l7Jjr"7HE)RrVF!h>6AQD_Ah8\RZl9o#.%!<n#GicFAepUZ\E#$(k,.:+.(JE9TULV0b8?ts/Mf%77fd-ZP$/#A!1Zb>/H~>endstream
endobj
xref
0 20
0000000000 65535 f
0000000061 00000 n
0000000102 00000 n
0000000209 00000 n
0000000404 00000 n
0000000516 00000 n
0000004168 00000 n
0000008161 00000 n
0000008467 00000 n
0000008662 00000 n
0000012353 00000 n
0000012612 00000 n
0000012808 00000 n
0000012878 00000 n
0000013140 00000 n
0000013226 00000 n
0000013481 00000 n
0000013878 00000 n
0000014154 00000 n
0000014489 00000 n
trailer
<<
/ID
[<3a098456c5603f47bfc8fb47cd3d7621><3a098456c5603f47bfc8fb47cd3d7621>]
% ReportLab generated PDF document -- digest (opensource)
/Info 13 0 R
/Root 12 0 R
/Size 20
>>
startxref
14744
%%EOF

View File

@@ -67,6 +67,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase):
"barcode_max_pages": None, "barcode_max_pages": None,
"barcode_enable_tag": None, "barcode_enable_tag": None,
"barcode_tag_mapping": None, "barcode_tag_mapping": None,
"barcode_tag_split": None,
"ai_enabled": False, "ai_enabled": False,
"llm_embedding_backend": None, "llm_embedding_backend": None,
"llm_embedding_model": None, "llm_embedding_model": None,

View File

@@ -822,6 +822,35 @@ class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
yield reader yield reader
reader.cleanup() reader.cleanup()
@override_settings(
CONSUMER_ENABLE_TAG_BARCODE=True,
CONSUMER_TAG_BARCODE_MAPPING={"TAG:(.*)": "\\g<1>"},
)
def test_barcode_without_tag_match(self):
"""
GIVEN:
- Barcode that does not match any TAG mapping pattern
- TAG mapping configured for "TAG:" prefix only
WHEN:
- is_tag property is checked on an ASN barcode
THEN:
- Returns False
"""
test_file = self.BARCODE_SAMPLE_DIR / "barcode-39-asn-123.pdf"
with self.get_reader(test_file) as reader:
reader.detect()
self.assertGreater(
len(reader.barcodes),
0,
"Should have detected at least one barcode",
)
asn_barcode = reader.barcodes[0]
self.assertFalse(
asn_barcode.is_tag,
f"ASN barcode '{asn_barcode.value}' should not match TAG: pattern",
)
@override_settings(CONSUMER_ENABLE_TAG_BARCODE=True) @override_settings(CONSUMER_ENABLE_TAG_BARCODE=True)
def test_scan_file_without_matching_barcodes(self): def test_scan_file_without_matching_barcodes(self):
""" """
@@ -928,3 +957,163 @@ class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
# expect error to be caught and logged only # expect error to be caught and logged only
tags = reader.metadata.tag_ids tags = reader.metadata.tag_ids
self.assertEqual(tags, None) self.assertEqual(tags, None)
@override_settings(
CONSUMER_ENABLE_TAG_BARCODE=True,
CONSUMER_TAG_BARCODE_SPLIT=True,
CONSUMER_TAG_BARCODE_MAPPING={"TAG:(.*)": "\\g<1>"},
)
def test_split_on_tag_barcodes(self):
"""
GIVEN:
- PDF containing barcodes with TAG: prefix
- Tag barcode splitting is enabled with TAG: mapping
WHEN:
- File is processed
THEN:
- Splits should occur at pages with TAG barcodes
- Tags should NOT be assigned when tag splitting is enabled (they're assigned during re-consumption)
"""
test_file = self.BARCODE_SAMPLE_DIR / "split-by-tag-basic.pdf"
with self.get_reader(test_file) as reader:
reader.detect()
separator_page_numbers = reader.get_separation_pages()
self.assertDictEqual(separator_page_numbers, {1: True, 3: True})
tags = reader.metadata.tag_ids
self.assertIsNone(tags)
@override_settings(
CONSUMER_ENABLE_TAG_BARCODE=True,
CONSUMER_TAG_BARCODE_SPLIT=False,
CONSUMER_TAG_BARCODE_MAPPING={"TAG:(.*)": "\\g<1>"},
)
def test_no_split_when_tag_split_disabled(self):
"""
GIVEN:
- PDF containing TAG barcodes (TAG:invoice, TAG:receipt)
- Tag barcode splitting is disabled
WHEN:
- File is processed
THEN:
- No separation pages are identified
- Tags are still extracted and assigned
"""
test_file = self.BARCODE_SAMPLE_DIR / "split-by-tag-basic.pdf"
with self.get_reader(test_file) as reader:
reader.run()
separator_page_numbers = reader.get_separation_pages()
self.assertDictEqual(separator_page_numbers, {})
tags = reader.metadata.tag_ids
self.assertEqual(len(tags), 2)
@override_settings(
CONSUMER_ENABLE_BARCODES=True,
CONSUMER_ENABLE_TAG_BARCODE=True,
CONSUMER_TAG_BARCODE_SPLIT=True,
CONSUMER_TAG_BARCODE_MAPPING={"TAG:(.*)": "\\g<1>"},
CELERY_TASK_ALWAYS_EAGER=True,
OCR_MODE="skip",
)
def test_consume_barcode_file_tag_split_and_assignment(self):
"""
GIVEN:
- PDF containing TAG barcodes on pages 2 and 4 (TAG:invoice, TAG:receipt)
- Tag barcode splitting is enabled
WHEN:
- File is consumed
THEN:
- PDF is split into 3 documents at barcode pages
- Each split document has the appropriate TAG barcodes extracted and assigned
- Document 1: page 1 (no tags)
- Document 2: pages 2-3 with TAG:invoice
- Document 3: pages 4-5 with TAG:receipt
"""
test_file = self.BARCODE_SAMPLE_DIR / "split-by-tag-basic.pdf"
dst = settings.SCRATCH_DIR / "split-by-tag-basic.pdf"
shutil.copy(test_file, dst)
with mock.patch("documents.tasks.ProgressManager", DummyProgressManager):
result = tasks.consume_file(
ConsumableDocument(
source=DocumentSource.ConsumeFolder,
original_file=dst,
),
None,
)
self.assertEqual(result, "Barcode splitting complete!")
documents = Document.objects.all().order_by("id")
self.assertEqual(documents.count(), 3)
doc1 = documents[0]
self.assertEqual(doc1.tags.count(), 0)
doc2 = documents[1]
self.assertEqual(doc2.tags.count(), 1)
self.assertEqual(doc2.tags.first().name, "invoice")
doc3 = documents[2]
self.assertEqual(doc3.tags.count(), 1)
self.assertEqual(doc3.tags.first().name, "receipt")
@override_settings(
CONSUMER_ENABLE_TAG_BARCODE=True,
CONSUMER_TAG_BARCODE_SPLIT=True,
CONSUMER_TAG_BARCODE_MAPPING={"ASN(.*)": "ASN_\\g<1>", "TAG:(.*)": "\\g<1>"},
)
def test_split_by_mixed_asn_tag_backwards_compat(self):
"""
GIVEN:
- PDF with mixed ASN and TAG barcodes
- Mapping that treats ASN barcodes as tags (backwards compatibility)
- ASN12345 on page 1, TAG:personal on page 3, ASN13456 on page 5, TAG:business on page 7
WHEN:
- File is consumed
THEN:
- Both ASN and TAG barcodes trigger splits
- Split points are at pages 3, 5, and 7 (page 1 never splits)
- 4 separate documents are produced
"""
test_file = self.BARCODE_SAMPLE_DIR / "split-by-tag-mixed-asn.pdf"
with self.get_reader(test_file) as reader:
reader.detect()
separator_pages = reader.get_separation_pages()
self.assertDictEqual(separator_pages, {2: True, 4: True, 6: True})
document_list = reader.separate_pages(separator_pages)
self.assertEqual(len(document_list), 4)
@override_settings(
CONSUMER_ENABLE_TAG_BARCODE=True,
CONSUMER_TAG_BARCODE_SPLIT=True,
CONSUMER_TAG_BARCODE_MAPPING={"TAG:(.*)": "\\g<1>"},
)
def test_split_by_tag_multiple_per_page(self):
"""
GIVEN:
- PDF with multiple TAG barcodes on same page
- TAG:invoice and TAG:expense on page 2, TAG:receipt on page 4
WHEN:
- File is processed
THEN:
- Pages with barcodes trigger splits
- Split points at pages 2 and 4
- 3 separate documents are produced
"""
test_file = self.BARCODE_SAMPLE_DIR / "split-by-tag-multiple-per-page.pdf"
with self.get_reader(test_file) as reader:
reader.detect()
separator_pages = reader.get_separation_pages()
self.assertDictEqual(separator_pages, {1: True, 3: True})
document_list = reader.separate_pages(separator_pages)
self.assertEqual(len(document_list), 3)

View File

@@ -603,23 +603,21 @@ class TestPDFActions(DirectoriesMixin, TestCase):
expected_filename, expected_filename,
) )
self.assertEqual(consume_file_args[1].title, None) self.assertEqual(consume_file_args[1].title, None)
self.assertTrue(consume_file_args[1].skip_asn) # No metadata_document_id, delete_originals False, so ASN should be None
self.assertIsNone(consume_file_args[1].asn)
# With metadata_document_id overrides # With metadata_document_id overrides
result = bulk_edit.merge(doc_ids, metadata_document_id=metadata_document_id) result = bulk_edit.merge(doc_ids, metadata_document_id=metadata_document_id)
consume_file_args, _ = mock_consume_file.call_args consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].title, "B (merged)") self.assertEqual(consume_file_args[1].title, "B (merged)")
self.assertEqual(consume_file_args[1].created, self.doc2.created) self.assertEqual(consume_file_args[1].created, self.doc2.created)
self.assertTrue(consume_file_args[1].skip_asn)
self.assertEqual(result, "OK") self.assertEqual(result, "OK")
@mock.patch("documents.bulk_edit.delete.si") @mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s") @mock.patch("documents.tasks.consume_file.s")
@mock.patch("documents.bulk_edit.chain")
def test_merge_and_delete_originals( def test_merge_and_delete_originals(
self, self,
mock_chain,
mock_consume_file, mock_consume_file,
mock_delete_documents, mock_delete_documents,
): ):
@@ -633,6 +631,12 @@ class TestPDFActions(DirectoriesMixin, TestCase):
- Document deletion task should be called - Document deletion task should be called
""" """
doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id] doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
self.doc1.archive_serial_number = 101
self.doc2.archive_serial_number = 102
self.doc3.archive_serial_number = 103
self.doc1.save()
self.doc2.save()
self.doc3.save()
result = bulk_edit.merge(doc_ids, delete_originals=True) result = bulk_edit.merge(doc_ids, delete_originals=True)
self.assertEqual(result, "OK") self.assertEqual(result, "OK")
@@ -643,7 +647,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
mock_consume_file.assert_called() mock_consume_file.assert_called()
mock_delete_documents.assert_called() mock_delete_documents.assert_called()
mock_chain.assert_called_once() consume_sig = mock_consume_file.return_value
consume_sig.apply_async.assert_called_once()
consume_file_args, _ = mock_consume_file.call_args consume_file_args, _ = mock_consume_file.call_args
self.assertEqual( self.assertEqual(
@@ -651,7 +656,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
expected_filename, expected_filename,
) )
self.assertEqual(consume_file_args[1].title, None) self.assertEqual(consume_file_args[1].title, None)
self.assertTrue(consume_file_args[1].skip_asn) self.assertEqual(consume_file_args[1].asn, 101)
delete_documents_args, _ = mock_delete_documents.call_args delete_documents_args, _ = mock_delete_documents.call_args
self.assertEqual( self.assertEqual(
@@ -659,6 +664,92 @@ class TestPDFActions(DirectoriesMixin, TestCase):
doc_ids, doc_ids,
) )
self.doc1.refresh_from_db()
self.doc2.refresh_from_db()
self.doc3.refresh_from_db()
self.assertIsNone(self.doc1.archive_serial_number)
self.assertIsNone(self.doc2.archive_serial_number)
self.assertIsNone(self.doc3.archive_serial_number)
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
def test_merge_and_delete_originals_restore_on_failure(
self,
mock_consume_file,
mock_delete_documents,
):
"""
GIVEN:
- Existing documents
WHEN:
- Merge action with deleting documents is called with 1 document
- Error occurs when queuing consume file task
THEN:
- Archive serial numbers are restored
"""
doc_ids = [self.doc1.id]
self.doc1.archive_serial_number = 111
self.doc1.save()
sig = mock.Mock()
sig.apply_async.side_effect = Exception("boom")
mock_consume_file.return_value = sig
with self.assertRaises(Exception):
bulk_edit.merge(doc_ids, delete_originals=True)
self.doc1.refresh_from_db()
self.assertEqual(self.doc1.archive_serial_number, 111)
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
def test_merge_and_delete_originals_metadata_handoff(
self,
mock_consume_file,
mock_delete_documents,
):
"""
GIVEN:
- Existing documents with ASNs
WHEN:
- Merge with delete_originals=True and metadata_document_id set
THEN:
- Handoff ASN uses metadata document ASN
"""
doc_ids = [self.doc1.id, self.doc2.id]
self.doc1.archive_serial_number = 101
self.doc2.archive_serial_number = 202
self.doc1.save()
self.doc2.save()
result = bulk_edit.merge(
doc_ids,
metadata_document_id=self.doc2.id,
delete_originals=True,
)
self.assertEqual(result, "OK")
consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].asn, 202)
def test_restore_archive_serial_numbers_task(self):
"""
GIVEN:
- Existing document with no archive serial number
WHEN:
- Restore archive serial number task is called with backup data
THEN:
- Document archive serial number is restored
"""
self.doc1.archive_serial_number = 444
self.doc1.save()
Document.objects.filter(pk=self.doc1.id).update(archive_serial_number=None)
backup = {self.doc1.id: 444}
bulk_edit.restore_archive_serial_numbers_task(backup)
self.doc1.refresh_from_db()
self.assertEqual(self.doc1.archive_serial_number, 444)
@mock.patch("documents.tasks.consume_file.s") @mock.patch("documents.tasks.consume_file.s")
def test_merge_with_archive_fallback(self, mock_consume_file): def test_merge_with_archive_fallback(self, mock_consume_file):
""" """
@@ -727,6 +818,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
self.assertEqual(mock_consume_file.call_count, 2) self.assertEqual(mock_consume_file.call_count, 2)
consume_file_args, _ = mock_consume_file.call_args consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].title, "B (split 2)") self.assertEqual(consume_file_args[1].title, "B (split 2)")
self.assertIsNone(consume_file_args[1].asn)
self.assertEqual(result, "OK") self.assertEqual(result, "OK")
@@ -751,6 +843,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
""" """
doc_ids = [self.doc2.id] doc_ids = [self.doc2.id]
pages = [[1, 2], [3]] pages = [[1, 2], [3]]
self.doc2.archive_serial_number = 200
self.doc2.save()
result = bulk_edit.split(doc_ids, pages, delete_originals=True) result = bulk_edit.split(doc_ids, pages, delete_originals=True)
self.assertEqual(result, "OK") self.assertEqual(result, "OK")
@@ -768,6 +862,42 @@ class TestPDFActions(DirectoriesMixin, TestCase):
doc_ids, doc_ids,
) )
self.doc2.refresh_from_db()
self.assertIsNone(self.doc2.archive_serial_number)
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
@mock.patch("documents.bulk_edit.chord")
def test_split_restore_on_failure(
self,
mock_chord,
mock_consume_file,
mock_delete_documents,
):
"""
GIVEN:
- Existing documents
WHEN:
- Split action with deleting documents is called with 1 document and 2 page groups
- Error occurs when queuing chord task
THEN:
- Archive serial numbers are restored
"""
doc_ids = [self.doc2.id]
pages = [[1, 2]]
self.doc2.archive_serial_number = 222
self.doc2.save()
sig = mock.Mock()
sig.apply_async.side_effect = Exception("boom")
mock_chord.return_value = sig
result = bulk_edit.split(doc_ids, pages, delete_originals=True)
self.assertEqual(result, "OK")
self.doc2.refresh_from_db()
self.assertEqual(self.doc2.archive_serial_number, 222)
@mock.patch("documents.tasks.consume_file.delay") @mock.patch("documents.tasks.consume_file.delay")
@mock.patch("pikepdf.Pdf.save") @mock.patch("pikepdf.Pdf.save")
def test_split_with_errors(self, mock_save_pdf, mock_consume_file): def test_split_with_errors(self, mock_save_pdf, mock_consume_file):
@@ -968,10 +1098,49 @@ class TestPDFActions(DirectoriesMixin, TestCase):
mock_chord.return_value.delay.return_value = None mock_chord.return_value.delay.return_value = None
doc_ids = [self.doc2.id] doc_ids = [self.doc2.id]
operations = [{"page": 1}, {"page": 2}] operations = [{"page": 1}, {"page": 2}]
self.doc2.archive_serial_number = 250
self.doc2.save()
result = bulk_edit.edit_pdf(doc_ids, operations, delete_original=True) result = bulk_edit.edit_pdf(doc_ids, operations, delete_original=True)
self.assertEqual(result, "OK") self.assertEqual(result, "OK")
mock_chord.assert_called_once() mock_chord.assert_called_once()
consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].asn, 250)
self.doc2.refresh_from_db()
self.assertIsNone(self.doc2.archive_serial_number)
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
@mock.patch("documents.bulk_edit.chord")
def test_edit_pdf_restore_on_failure(
self,
mock_chord,
mock_consume_file,
mock_delete_documents,
):
"""
GIVEN:
- Existing document
WHEN:
- edit_pdf is called with delete_original=True
- Error occurs when queuing chord task
THEN:
- Archive serial numbers are restored
"""
doc_ids = [self.doc2.id]
operations = [{"page": 1}]
self.doc2.archive_serial_number = 333
self.doc2.save()
sig = mock.Mock()
sig.apply_async.side_effect = Exception("boom")
mock_chord.return_value = sig
with self.assertRaises(Exception):
bulk_edit.edit_pdf(doc_ids, operations, delete_original=True)
self.doc2.refresh_from_db()
self.assertEqual(self.doc2.archive_serial_number, 333)
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay") @mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
def test_edit_pdf_with_update_document(self, mock_update_document): def test_edit_pdf_with_update_document(self, mock_update_document):

View File

@@ -14,6 +14,7 @@ from django.test import override_settings
from django.utils import timezone from django.utils import timezone
from guardian.core import ObjectPermissionChecker from guardian.core import ObjectPermissionChecker
from documents.barcodes import BarcodePlugin
from documents.consumer import ConsumerError from documents.consumer import ConsumerError
from documents.data_models import DocumentMetadataOverrides from documents.data_models import DocumentMetadataOverrides
from documents.data_models import DocumentSource from documents.data_models import DocumentSource
@@ -412,14 +413,6 @@ class TestConsumer(
self.assertEqual(document.archive_serial_number, 123) self.assertEqual(document.archive_serial_number, 123)
self._assert_first_last_send_progress() self._assert_first_last_send_progress()
def testMetadataOverridesSkipAsnPropagation(self):
overrides = DocumentMetadataOverrides()
incoming = DocumentMetadataOverrides(skip_asn=True)
overrides.update(incoming)
self.assertTrue(overrides.skip_asn)
def testOverrideTitlePlaceholders(self): def testOverrideTitlePlaceholders(self):
c = Correspondent.objects.create(name="Correspondent Name") c = Correspondent.objects.create(name="Correspondent Name")
dt = DocumentType.objects.create(name="DocType Name") dt = DocumentType.objects.create(name="DocType Name")
@@ -1271,3 +1264,46 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
r"sample\.pdf: Error while executing post-consume script: Command '\[.*\]' returned non-zero exit status \d+\.", r"sample\.pdf: Error while executing post-consume script: Command '\[.*\]' returned non-zero exit status \d+\.",
): ):
consumer.run_post_consume_script(doc) consumer.run_post_consume_script(doc)
class TestMetadataOverrides(TestCase):
def test_update_skip_asn_if_exists(self):
base = DocumentMetadataOverrides()
incoming = DocumentMetadataOverrides(skip_asn_if_exists=True)
base.update(incoming)
self.assertTrue(base.skip_asn_if_exists)
class TestBarcodeApplyDetectedASN(TestCase):
"""
GIVEN:
- Existing Documents with ASN 123
WHEN:
- A BarcodePlugin which detected an ASN
THEN:
- If skip_asn_if_exists is set, and ASN exists, do not set ASN
- If skip_asn_if_exists is set, and ASN does not exist, set ASN
"""
def test_apply_detected_asn_skips_existing_when_flag_set(self):
doc = Document.objects.create(
checksum="X1",
title="D1",
archive_serial_number=123,
)
metadata = DocumentMetadataOverrides(skip_asn_if_exists=True)
plugin = BarcodePlugin(
input_doc=mock.Mock(),
metadata=metadata,
status_mgr=mock.Mock(),
base_tmp_dir=tempfile.gettempdir(),
task_id="test-task",
)
plugin._apply_detected_asn(123)
self.assertIsNone(plugin.metadata.asn)
doc.hard_delete()
plugin._apply_detected_asn(123)
self.assertEqual(plugin.metadata.asn, 123)

View File

@@ -2,7 +2,7 @@ msgid ""
msgstr "" msgstr ""
"Project-Id-Version: paperless-ngx\n" "Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2026-01-27 18:56+0000\n" "POT-Creation-Date: 2026-01-29 16:06+0000\n"
"PO-Revision-Date: 2022-02-17 04:17\n" "PO-Revision-Date: 2022-02-17 04:17\n"
"Last-Translator: \n" "Last-Translator: \n"
"Language-Team: English\n" "Language-Team: English\n"
@@ -1786,35 +1786,39 @@ msgstr ""
msgid "Sets the tag barcode mapping" msgid "Sets the tag barcode mapping"
msgstr "" msgstr ""
#: paperless/models.py:287 #: paperless/models.py:284
msgid "Enables AI features" msgid "Enables splitting on tag barcodes"
msgstr "" msgstr ""
#: paperless/models.py:293 #: paperless/models.py:293
msgid "Enables AI features"
msgstr ""
#: paperless/models.py:299
msgid "Sets the LLM embedding backend" msgid "Sets the LLM embedding backend"
msgstr "" msgstr ""
#: paperless/models.py:301 #: paperless/models.py:307
msgid "Sets the LLM embedding model" msgid "Sets the LLM embedding model"
msgstr "" msgstr ""
#: paperless/models.py:308 #: paperless/models.py:314
msgid "Sets the LLM backend" msgid "Sets the LLM backend"
msgstr "" msgstr ""
#: paperless/models.py:316 #: paperless/models.py:322
msgid "Sets the LLM model" msgid "Sets the LLM model"
msgstr "" msgstr ""
#: paperless/models.py:323 #: paperless/models.py:329
msgid "Sets the LLM API key" msgid "Sets the LLM API key"
msgstr "" msgstr ""
#: paperless/models.py:330 #: paperless/models.py:336
msgid "Sets the LLM endpoint, optional" msgid "Sets the LLM endpoint, optional"
msgstr "" msgstr ""
#: paperless/models.py:337 #: paperless/models.py:343
msgid "paperless application settings" msgid "paperless application settings"
msgstr "" msgstr ""

View File

@@ -3,11 +3,6 @@ import os
import sys import sys
if __name__ == "__main__": if __name__ == "__main__":
try:
from paperless_migration.detect import choose_settings_module
os.environ.setdefault("DJANGO_SETTINGS_MODULE", choose_settings_module())
except Exception:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings") os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
from django.core.management import execute_from_command_line from django.core.management import execute_from_command_line

View File

@@ -1,13 +0,0 @@
#!/usr/bin/env python3
import os
import sys
if __name__ == "__main__":
os.environ.setdefault(
"DJANGO_SETTINGS_MODULE",
"paperless_migration.settings",
)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)

View File

@@ -1,18 +1,12 @@
import os import os
try:
from paperless_migration.detect import choose_settings_module
os.environ.setdefault("DJANGO_SETTINGS_MODULE", choose_settings_module())
except Exception:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
from django.core.asgi import get_asgi_application from django.core.asgi import get_asgi_application
# Fetch Django ASGI application early to ensure AppRegistry is populated # Fetch Django ASGI application early to ensure AppRegistry is populated
# before importing consumers and AuthMiddlewareStack that may import ORM # before importing consumers and AuthMiddlewareStack that may import ORM
# models. # models.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
django_asgi_app = get_asgi_application() django_asgi_app = get_asgi_application()
from channels.auth import AuthMiddlewareStack # noqa: E402 from channels.auth import AuthMiddlewareStack # noqa: E402

View File

@@ -116,6 +116,7 @@ class BarcodeConfig(BaseConfig):
barcode_max_pages: int = dataclasses.field(init=False) barcode_max_pages: int = dataclasses.field(init=False)
barcode_enable_tag: bool = dataclasses.field(init=False) barcode_enable_tag: bool = dataclasses.field(init=False)
barcode_tag_mapping: dict[str, str] = dataclasses.field(init=False) barcode_tag_mapping: dict[str, str] = dataclasses.field(init=False)
barcode_tag_split: bool = dataclasses.field(init=False)
def __post_init__(self) -> None: def __post_init__(self) -> None:
app_config = self._get_config_instance() app_config = self._get_config_instance()
@@ -153,6 +154,9 @@ class BarcodeConfig(BaseConfig):
self.barcode_tag_mapping = ( self.barcode_tag_mapping = (
app_config.barcode_tag_mapping or settings.CONSUMER_TAG_BARCODE_MAPPING app_config.barcode_tag_mapping or settings.CONSUMER_TAG_BARCODE_MAPPING
) )
self.barcode_tag_split = (
app_config.barcode_tag_split or settings.CONSUMER_TAG_BARCODE_SPLIT
)
@dataclasses.dataclass @dataclasses.dataclass

View File

@@ -1,7 +0,0 @@
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless_migration.settings")
application = get_asgi_application()

View File

@@ -0,0 +1,21 @@
# Generated by Django 5.1.7 on 2025-12-15 21:30
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("paperless", "0005_applicationconfiguration_ai_enabled_and_more"),
]
operations = [
migrations.AddField(
model_name="applicationconfiguration",
name="barcode_tag_split",
field=models.BooleanField(
null=True,
verbose_name="Enables splitting on tag barcodes",
),
),
]

View File

@@ -279,6 +279,12 @@ class ApplicationConfiguration(AbstractSingletonModel):
null=True, null=True,
) )
# PAPERLESS_CONSUMER_TAG_BARCODE_SPLIT
barcode_tag_split = models.BooleanField(
verbose_name=_("Enables splitting on tag barcodes"),
null=True,
)
""" """
AI related settings AI related settings
""" """

View File

@@ -1149,6 +1149,10 @@ CONSUMER_TAG_BARCODE_MAPPING = dict(
), ),
) )
CONSUMER_TAG_BARCODE_SPLIT: Final[bool] = __get_boolean(
"PAPERLESS_CONSUMER_TAG_BARCODE_SPLIT",
)
CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED: Final[bool] = __get_boolean( CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED: Final[bool] = __get_boolean(
"PAPERLESS_CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED", "PAPERLESS_CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED",
) )

View File

@@ -9,15 +9,10 @@ https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
import os import os
try:
from paperless_migration.detect import choose_settings_module
os.environ.setdefault("DJANGO_SETTINGS_MODULE", choose_settings_module())
except Exception:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
from django.core.wsgi import get_wsgi_application from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
application = get_wsgi_application() application = get_wsgi_application()
import logging # noqa: E402 import logging # noqa: E402

View File

@@ -1,6 +0,0 @@
from django.apps import AppConfig
class PaperlessMigrationConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "paperless_migration"

View File

@@ -1,28 +0,0 @@
"""ASGI application for migration mode with WebSocket support."""
from __future__ import annotations
import os
from channels.auth import AuthMiddlewareStack
from channels.routing import ProtocolTypeRouter
from channels.routing import URLRouter
from channels.security.websocket import AllowedHostsOriginValidator
from django.core.asgi import get_asgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless_migration.settings")
# Initialize Django ASGI application early to ensure settings are loaded
django_asgi_app = get_asgi_application()
# Import routing after Django is initialized
from paperless_migration.routing import websocket_urlpatterns # noqa: E402
application = ProtocolTypeRouter(
{
"http": django_asgi_app,
"websocket": AllowedHostsOriginValidator(
AuthMiddlewareStack(URLRouter(websocket_urlpatterns)),
),
},
)

View File

@@ -1,245 +0,0 @@
"""WebSocket consumers for migration operations."""
from __future__ import annotations
import json
import logging
import os
import shutil
import tempfile
from pathlib import Path
from typing import Any
from channels.generic.websocket import AsyncWebsocketConsumer
from django.conf import settings
from paperless_migration.services.importer import ImportService
from paperless_migration.services.transform import TransformService
logger = logging.getLogger(__name__)
class MigrationConsumerBase(AsyncWebsocketConsumer):
"""Base consumer with common authentication and messaging logic."""
async def connect(self) -> None:
"""Authenticate and accept or reject the connection."""
user = self.scope.get("user")
session = self.scope.get("session", {})
if not user or not user.is_authenticated:
logger.warning("WebSocket connection rejected: not authenticated")
await self.close(code=4001)
return
if not user.is_superuser:
logger.warning("WebSocket connection rejected: not superuser")
await self.close(code=4003)
return
if not session.get("migration_code_ok"):
logger.warning("WebSocket connection rejected: migration code not verified")
await self.close(code=4002)
return
await self.accept()
logger.info("WebSocket connection accepted for user: %s", user.username)
async def disconnect(self, close_code: int) -> None:
"""Handle disconnection."""
logger.debug("WebSocket disconnected with code: %d", close_code)
async def receive(self, text_data: str | None = None, **kwargs: Any) -> None:
"""Handle incoming messages - triggers the operation."""
if text_data is None:
return
try:
data = json.loads(text_data)
except json.JSONDecodeError:
await self.send_error("Invalid JSON message")
return
action = data.get("action")
if action == "start":
await self.run_operation()
else:
await self.send_error(f"Unknown action: {action}")
async def run_operation(self) -> None:
"""Override in subclasses to run the specific operation."""
raise NotImplementedError
async def send_message(self, msg_type: str, **kwargs: Any) -> None:
"""Send a typed JSON message to the client."""
await self.send(text_data=json.dumps({"type": msg_type, **kwargs}))
async def send_log(self, message: str, level: str = "info") -> None:
"""Send a log message."""
await self.send_message("log", message=message, level=level)
async def send_progress(
self,
current: int,
total: int | None = None,
label: str = "",
) -> None:
"""Send a progress update."""
await self.send_message(
"progress",
current=current,
total=total,
label=label,
)
async def send_stats(self, stats: dict[str, Any]) -> None:
"""Send statistics update."""
await self.send_message("stats", **stats)
async def send_complete(
self,
duration: float,
*,
success: bool,
**kwargs: Any,
) -> None:
"""Send completion message."""
await self.send_message(
"complete",
success=success,
duration=duration,
**kwargs,
)
async def send_error(self, message: str) -> None:
"""Send an error message."""
await self.send_message("error", message=message)
class TransformConsumer(MigrationConsumerBase):
"""WebSocket consumer for transform operations."""
async def run_operation(self) -> None:
"""Run the transform operation."""
input_path = Path(settings.MIGRATION_EXPORT_PATH)
output_path = Path(settings.MIGRATION_TRANSFORMED_PATH)
frequency = settings.MIGRATION_PROGRESS_FREQUENCY
if not input_path.exists():
await self.send_error(f"Export file not found: {input_path}")
return
if output_path.exists():
await self.send_error(
f"Output file already exists: {output_path}. "
"Delete it first to re-run transform.",
)
return
await self.send_log("Starting transform operation...")
service = TransformService(
input_path=input_path,
output_path=output_path,
update_frequency=frequency,
)
try:
async for update in service.run_async():
match update["type"]:
case "progress":
await self.send_progress(
current=update["completed"],
label=f"{update['completed']:,} rows processed",
)
if update.get("stats"):
await self.send_stats({"transformed": update["stats"]})
case "complete":
await self.send_complete(
success=True,
duration=update["duration"],
total_processed=update["total_processed"],
stats=update["stats"],
speed=update["speed"],
)
case "error":
await self.send_error(update["message"])
case "log":
await self.send_log(
update["message"],
update.get("level", "info"),
)
except Exception as exc:
logger.exception("Transform operation failed")
await self.send_error(f"Transform failed: {exc}")
class ImportConsumer(MigrationConsumerBase):
"""WebSocket consumer for import operations."""
async def run_operation(self) -> None:
"""Run the import operation (wipe, migrate, import)."""
export_path = Path(settings.MIGRATION_EXPORT_PATH)
transformed_path = Path(settings.MIGRATION_TRANSFORMED_PATH)
imported_marker = Path(settings.MIGRATION_IMPORTED_PATH)
source_dir = export_path.parent
if not export_path.exists():
await self.send_error("Export file not found. Upload or re-check export.")
return
if not transformed_path.exists():
await self.send_error("Transformed file not found. Run transform first.")
return
await self.send_log("Preparing import operation...")
# Backup original manifest and swap in transformed version
backup_path: Path | None = None
try:
backup_fd, backup_name = tempfile.mkstemp(
prefix="manifest.v2.",
suffix=".json",
dir=source_dir,
)
os.close(backup_fd)
backup_path = Path(backup_name)
shutil.copy2(export_path, backup_path)
shutil.copy2(transformed_path, export_path)
await self.send_log("Manifest files prepared")
except Exception as exc:
await self.send_error(f"Failed to prepare import manifest: {exc}")
return
service = ImportService(
source_dir=source_dir,
imported_marker=imported_marker,
)
try:
async for update in service.run_async():
match update["type"]:
case "phase":
await self.send_log(f"Phase: {update['phase']}", level="info")
case "log":
await self.send_log(
update["message"],
update.get("level", "info"),
)
case "complete":
await self.send_complete(
success=update["success"],
duration=update["duration"],
)
case "error":
await self.send_error(update["message"])
except Exception as exc:
logger.exception("Import operation failed")
await self.send_error(f"Import failed: {exc}")
finally:
# Restore original manifest
if backup_path and backup_path.exists():
try:
shutil.move(str(backup_path), str(export_path))
except Exception as exc:
logger.warning("Failed to restore backup manifest: %s", exc)

View File

@@ -1,150 +0,0 @@
"""Lightweight detection to decide if we should boot migration mode."""
from __future__ import annotations
import logging
import os
import sqlite3
from pathlib import Path
from typing import Any
logger = logging.getLogger(__name__)
BASE_DIR = Path(__file__).resolve().parent.parent
_DOC_EXISTS_QUERY = "SELECT 1 FROM documents_document LIMIT 1;"
def _get_db_config() -> dict[str, Any]:
data_dir = Path(os.getenv("PAPERLESS_DATA_DIR", BASE_DIR.parent / "data")).resolve()
if not os.getenv("PAPERLESS_DBHOST"):
return {
"ENGINE": "sqlite",
"NAME": data_dir / "db.sqlite3",
}
engine = "mariadb" if os.getenv("PAPERLESS_DBENGINE") == "mariadb" else "postgres"
cfg = {
"ENGINE": engine,
"HOST": os.getenv("PAPERLESS_DBHOST"),
"PORT": os.getenv("PAPERLESS_DBPORT"),
"NAME": os.getenv("PAPERLESS_DBNAME", "paperless"),
"USER": os.getenv("PAPERLESS_DBUSER", "paperless"),
"PASSWORD": os.getenv("PAPERLESS_DBPASS", "paperless"),
}
return cfg
def _probe_sqlite(path: Path) -> bool:
if not path.exists():
return False
try:
conn = sqlite3.connect(path, timeout=1)
cur = conn.cursor()
cur.execute(_DOC_EXISTS_QUERY)
cur.fetchone()
return True
except sqlite3.Error:
return False
finally:
try:
conn.close()
except Exception:
pass
def _probe_postgres(cfg: dict[str, Any]) -> bool:
try:
import psycopg
except ImportError: # pragma: no cover
logger.debug("psycopg not installed; skipping postgres probe")
return False
try:
conn = psycopg.connect(
host=cfg["HOST"],
port=cfg["PORT"],
dbname=cfg["NAME"],
user=cfg["USER"],
password=cfg["PASSWORD"],
connect_timeout=2,
)
with conn, conn.cursor() as cur:
cur.execute(_DOC_EXISTS_QUERY)
cur.fetchone()
return True
except Exception:
return False
finally:
try:
conn.close()
except Exception:
pass
def _probe_mariadb(cfg: dict[str, Any]) -> bool:
try:
import MySQLdb # type: ignore
except ImportError: # pragma: no cover
logger.debug("mysqlclient not installed; skipping mariadb probe")
return False
try:
conn = MySQLdb.connect(
host=cfg["HOST"],
port=int(cfg["PORT"] or 3306),
user=cfg["USER"],
passwd=cfg["PASSWORD"],
db=cfg["NAME"],
connect_timeout=2,
)
cur = conn.cursor()
cur.execute("SELECT 1 FROM documents_document LIMIT 1;")
cur.fetchone()
return True
except Exception:
return False
finally:
try:
conn.close()
except Exception:
pass
def is_v2_database() -> bool:
cfg = _get_db_config()
if cfg["ENGINE"] == "sqlite":
return _probe_sqlite(cfg["NAME"])
if cfg["ENGINE"] == "postgres":
return _probe_postgres(cfg)
if cfg["ENGINE"] == "mariadb":
return _probe_mariadb(cfg)
return False
def choose_settings_module() -> str:
# ENV override
toggle = os.getenv("PAPERLESS_MIGRATION_MODE")
if toggle is not None:
chosen = (
"paperless_migration.settings"
if str(toggle).lower() in ("1", "true", "yes", "on")
else "paperless.settings"
)
os.environ["PAPERLESS_MIGRATION_MODE"] = "1" if "migration" in chosen else "0"
return chosen
# Auto-detect via DB probe
if is_v2_database():
logger.warning("Detected v2 schema; booting migration mode.")
os.environ["PAPERLESS_MIGRATION_MODE"] = "1"
return "paperless_migration.settings"
os.environ["PAPERLESS_MIGRATION_MODE"] = "0"
return "paperless.settings"
if __name__ == "__main__": # pragma: no cover
logger.info(
"v2 database detected" if is_v2_database() else "v2 database not detected",
)

View File

@@ -1,13 +0,0 @@
"""WebSocket URL routing for migration operations."""
from __future__ import annotations
from django.urls import path
from paperless_migration.consumers import ImportConsumer
from paperless_migration.consumers import TransformConsumer
websocket_urlpatterns = [
path("ws/migration/transform/", TransformConsumer.as_asgi()),
path("ws/migration/import/", ImportConsumer.as_asgi()),
]

View File

@@ -1,186 +0,0 @@
"""Import service for loading transformed data into v3 database."""
from __future__ import annotations
import subprocess
import sys
import time
from dataclasses import dataclass
from pathlib import Path
from typing import TYPE_CHECKING
from typing import TypedDict
if TYPE_CHECKING:
from collections.abc import AsyncGenerator
from collections.abc import Generator
class ProgressUpdate(TypedDict, total=False):
"""Progress update message structure."""
type: str
phase: str
message: str
level: str
success: bool
duration: float
return_code: int
@dataclass
class ImportService:
"""Service for importing transformed data into v3 database.
This service orchestrates the three-phase import process:
1. Wipe the existing database
2. Run Django migrations for v3 schema
3. Import the transformed data
"""
source_dir: Path
imported_marker: Path
manage_path: Path | None = None
def __post_init__(self) -> None:
if self.manage_path is None:
# Default to manage.py in the src directory
self.manage_path = (
Path(__file__).resolve().parent.parent.parent / "manage.py"
)
def _get_env(self) -> dict[str, str]:
"""Get environment variables for subprocess calls."""
import os
env = os.environ.copy()
env["DJANGO_SETTINGS_MODULE"] = "paperless.settings"
env["PAPERLESS_MIGRATION_MODE"] = "0"
return env
def _run_command(
self,
args: list[str],
label: str,
) -> Generator[ProgressUpdate, None, int]:
"""Run a command and yield log lines. Returns the return code."""
yield {"type": "log", "message": f"Running: {label}", "level": "info"}
process = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
bufsize=1,
text=True,
env=self._get_env(),
)
try:
if process.stdout:
for line in process.stdout:
yield {
"type": "log",
"message": line.rstrip(),
"level": "info",
}
process.wait()
return process.returncode
finally:
if process.poll() is None:
process.kill()
def run_sync(self) -> Generator[ProgressUpdate, None, None]:
"""Run the import synchronously, yielding progress updates.
This orchestrates:
1. Database wipe
2. Django migrations
3. Document import
"""
start_time = time.perf_counter()
# Phase 1: Wipe database
yield {"type": "phase", "phase": "wipe"}
wipe_cmd = [
sys.executable,
"-m",
"paperless_migration.services.wipe_db",
]
wipe_code = yield from self._run_command(wipe_cmd, "Database wipe")
if wipe_code != 0:
yield {
"type": "error",
"message": f"Database wipe failed with code {wipe_code}",
}
return
yield {"type": "log", "message": "Database wipe complete", "level": "info"}
# Phase 2: Run migrations
yield {"type": "phase", "phase": "migrate"}
migrate_cmd = [
sys.executable,
str(self.manage_path),
"migrate",
"--noinput",
]
migrate_code = yield from self._run_command(migrate_cmd, "Django migrations")
if migrate_code != 0:
yield {
"type": "error",
"message": f"Migrations failed with code {migrate_code}",
}
return
yield {"type": "log", "message": "Migrations complete", "level": "info"}
# Phase 3: Import data
yield {"type": "phase", "phase": "import"}
import_cmd = [
sys.executable,
str(self.manage_path),
"document_importer",
str(self.source_dir),
"--data-only",
]
import_code = yield from self._run_command(import_cmd, "Document import")
if import_code != 0:
yield {
"type": "error",
"message": f"Import failed with code {import_code}",
}
return
# Mark import as complete
try:
self.imported_marker.parent.mkdir(parents=True, exist_ok=True)
self.imported_marker.write_text("ok\n", encoding="utf-8")
except Exception as exc:
yield {
"type": "log",
"message": f"Warning: Could not write import marker: {exc}",
"level": "warning",
}
end_time = time.perf_counter()
duration = end_time - start_time
yield {
"type": "complete",
"success": True,
"duration": duration,
}
async def run_async(self) -> AsyncGenerator[ProgressUpdate, None]:
"""Run the import asynchronously, yielding progress updates.
This wraps the synchronous implementation to work with async consumers.
"""
import asyncio
for update in self.run_sync():
yield update
# Yield control to the event loop
await asyncio.sleep(0)

View File

@@ -1,173 +0,0 @@
"""Transform service for converting v2 exports to v3 format."""
from __future__ import annotations
import json
import time
from collections import Counter
from collections.abc import AsyncGenerator
from collections.abc import Callable
from collections.abc import Generator
from dataclasses import dataclass
from dataclasses import field
from typing import TYPE_CHECKING
from typing import Any
from typing import TypedDict
import ijson
if TYPE_CHECKING:
from pathlib import Path
class FixtureObject(TypedDict):
"""Structure of a Django fixture object."""
model: str
pk: int
fields: dict[str, Any]
class ProgressUpdate(TypedDict, total=False):
"""Progress update message structure."""
type: str
completed: int
stats: dict[str, int]
message: str
level: str
duration: float
total_processed: int
speed: float
TransformFn = Callable[[FixtureObject], FixtureObject]
def transform_documents_document(obj: FixtureObject) -> FixtureObject:
"""Transform a documents.document fixture object for v3 schema."""
fields: dict[str, Any] = obj["fields"]
fields.pop("storage_type", None)
content: Any = fields.get("content")
fields["content_length"] = len(content) if isinstance(content, str) else 0
return obj
# Registry of model-specific transforms
TRANSFORMS: dict[str, TransformFn] = {
"documents.document": transform_documents_document,
}
@dataclass
class TransformService:
"""Service for transforming v2 exports to v3 format.
This service processes JSON fixtures incrementally using ijson for
memory-efficient streaming, and yields progress updates suitable
for WebSocket transmission.
"""
input_path: Path
output_path: Path
update_frequency: int = 100
_stats: Counter[str] = field(default_factory=Counter, init=False)
_total_processed: int = field(default=0, init=False)
def validate(self) -> str | None:
"""Validate preconditions for transform. Returns error message or None."""
if not self.input_path.exists():
return f"Input file not found: {self.input_path}"
if self.output_path.exists():
return f"Output file already exists: {self.output_path}"
if self.input_path.resolve() == self.output_path.resolve():
return "Input and output paths cannot be the same file"
return None
def _process_fixture(self, obj: FixtureObject) -> FixtureObject:
"""Apply any registered transforms to a fixture object."""
model: str = obj["model"]
transform: TransformFn | None = TRANSFORMS.get(model)
if transform:
obj = transform(obj)
self._stats[model] += 1
return obj
def run_sync(self) -> Generator[ProgressUpdate, None, None]:
"""Run the transform synchronously, yielding progress updates.
This is the core implementation that processes the JSON file
and yields progress updates at regular intervals.
"""
error = self.validate()
if error:
yield {"type": "error", "message": error}
return
self._stats.clear()
self._total_processed = 0
start_time = time.perf_counter()
yield {"type": "log", "message": "Opening input file...", "level": "info"}
try:
with (
self.input_path.open("rb") as infile,
self.output_path.open("w", encoding="utf-8") as outfile,
):
outfile.write("[\n")
first = True
for i, obj in enumerate(ijson.items(infile, "item")):
fixture: FixtureObject = obj
fixture = self._process_fixture(fixture)
self._total_processed += 1
if not first:
outfile.write(",\n")
first = False
json.dump(fixture, outfile, ensure_ascii=False)
# Yield progress at configured frequency
if i > 0 and i % self.update_frequency == 0:
yield {
"type": "progress",
"completed": self._total_processed,
"stats": dict(self._stats),
}
outfile.write("\n]\n")
except Exception as exc:
# Clean up partial output on error
if self.output_path.exists():
self.output_path.unlink()
yield {"type": "error", "message": str(exc)}
return
end_time = time.perf_counter()
duration = end_time - start_time
speed = self._total_processed / duration if duration > 0 else 0
yield {
"type": "complete",
"duration": duration,
"total_processed": self._total_processed,
"stats": dict(self._stats),
"speed": speed,
}
async def run_async(self) -> AsyncGenerator[ProgressUpdate, None]:
"""Run the transform asynchronously, yielding progress updates.
This wraps the synchronous implementation to work with async consumers.
The actual I/O is done synchronously since ijson doesn't support async,
but we yield control periodically to keep the event loop responsive.
"""
import asyncio
for update in self.run_sync():
yield update
# Yield control to the event loop periodically
await asyncio.sleep(0)

View File

@@ -1,115 +0,0 @@
"""Database wipe service for migration import process.
This module can be run as a script via:
python -m paperless_migration.services.wipe_db
It uses the paperless_migration settings to wipe all tables
before running v3 migrations.
"""
from __future__ import annotations
import logging
import sys
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from django.db.backends.base.base import BaseDatabaseWrapper
logger = logging.getLogger(__name__)
def _get_target_tables(connection: BaseDatabaseWrapper) -> list[str]:
"""Get list of tables to drop that exist in the database."""
from django.apps import apps
from django.db.migrations.recorder import MigrationRecorder
model_tables = {
model._meta.db_table for model in apps.get_models(include_auto_created=True)
}
model_tables.add(MigrationRecorder.Migration._meta.db_table)
existing_tables = set(connection.introspection.table_names())
return sorted(model_tables & existing_tables)
def _drop_sqlite_tables(connection: BaseDatabaseWrapper) -> int:
"""Drop tables for SQLite database. Returns count of tables dropped."""
tables = _get_target_tables(connection)
with connection.cursor() as cursor:
cursor.execute("PRAGMA foreign_keys=OFF;")
for table in tables:
cursor.execute(f'DROP TABLE IF EXISTS "{table}";')
cursor.execute("PRAGMA foreign_keys=ON;")
return len(tables)
def _drop_postgres_tables(connection: BaseDatabaseWrapper) -> int:
"""Drop tables for PostgreSQL database. Returns count of tables dropped."""
tables = _get_target_tables(connection)
if not tables:
return 0
with connection.cursor() as cursor:
for table in tables:
cursor.execute(f'DROP TABLE IF EXISTS "{table}" CASCADE;')
return len(tables)
def _drop_mysql_tables(connection: BaseDatabaseWrapper) -> int:
"""Drop tables for MySQL/MariaDB database. Returns count of tables dropped."""
tables = _get_target_tables(connection)
with connection.cursor() as cursor:
cursor.execute("SET FOREIGN_KEY_CHECKS=0;")
for table in tables:
cursor.execute(f"DROP TABLE IF EXISTS `{table}`;")
cursor.execute("SET FOREIGN_KEY_CHECKS=1;")
return len(tables)
def wipe_database() -> tuple[bool, str]:
"""Wipe all application tables from the database.
Returns:
Tuple of (success: bool, message: str)
"""
from django.db import connection
vendor = connection.vendor
logger.info("Wiping database for vendor: %s", vendor)
try:
match vendor:
case "sqlite":
count = _drop_sqlite_tables(connection)
case "postgresql":
count = _drop_postgres_tables(connection)
case "mysql":
count = _drop_mysql_tables(connection)
case _:
return False, f"Unsupported database vendor: {vendor}"
message = f"Dropped {count} tables from {vendor} database"
logger.info(message)
return True, message
except Exception as exc:
message = f"Failed to wipe database: {exc}"
logger.exception(message)
return False, message
def main() -> int:
"""Entry point when run as a script."""
import os
import django
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless_migration.settings")
django.setup()
success, message = wipe_database()
print(message) # noqa: T201
return 0 if success else 1
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,245 +0,0 @@
"""Settings for migration-mode Django instance."""
from __future__ import annotations
import logging
import os
from pathlib import Path
from typing import Any
from dotenv import load_dotenv
BASE_DIR = Path(__file__).resolve().parent.parent
DEBUG = os.getenv("PAPERLESS_DEBUG", "false").lower() == "true"
ALLOWED_HOSTS = ["*"]
# Tap paperless.conf if it's available
for path in [
os.getenv("PAPERLESS_CONFIGURATION_PATH"),
"../paperless.conf",
"/etc/paperless.conf",
"/usr/local/etc/paperless.conf",
]:
if path and Path(path).exists():
load_dotenv(path)
break
def __get_path(
key: str,
default: str | Path,
) -> Path:
if key in os.environ:
return Path(os.environ[key]).resolve()
return Path(default).resolve()
DATA_DIR = __get_path("PAPERLESS_DATA_DIR", BASE_DIR.parent / "data")
EXPORT_DIR = __get_path("PAPERLESS_EXPORT_DIR", BASE_DIR.parent / "export")
def _parse_redis_url() -> str:
"""Parse Redis URL from environment with sensible defaults."""
return os.getenv("PAPERLESS_REDIS_URL", "redis://localhost:6379")
def _parse_db_settings() -> dict[str, dict[str, Any]]:
databases: dict[str, dict[str, Any]] = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": DATA_DIR / "db.sqlite3",
"OPTIONS": {},
},
}
if os.getenv("PAPERLESS_DBHOST"):
databases["sqlite"] = databases["default"].copy()
databases["default"] = {
"HOST": os.getenv("PAPERLESS_DBHOST"),
"NAME": os.getenv("PAPERLESS_DBNAME", "paperless"),
"USER": os.getenv("PAPERLESS_DBUSER", "paperless"),
"PASSWORD": os.getenv("PAPERLESS_DBPASS", "paperless"),
"OPTIONS": {},
}
if os.getenv("PAPERLESS_DBPORT"):
databases["default"]["PORT"] = os.getenv("PAPERLESS_DBPORT")
if os.getenv("PAPERLESS_DBENGINE") == "mariadb":
engine = "django.db.backends.mysql"
options = {
"read_default_file": "/etc/mysql/my.cnf",
"charset": "utf8mb4",
"ssl_mode": os.getenv("PAPERLESS_DBSSLMODE", "PREFERRED"),
"ssl": {
"ca": os.getenv("PAPERLESS_DBSSLROOTCERT"),
"cert": os.getenv("PAPERLESS_DBSSLCERT"),
"key": os.getenv("PAPERLESS_DBSSLKEY"),
},
}
else:
engine = "django.db.backends.postgresql"
options = {
"sslmode": os.getenv("PAPERLESS_DBSSLMODE", "prefer"),
"sslrootcert": os.getenv("PAPERLESS_DBSSLROOTCERT"),
"sslcert": os.getenv("PAPERLESS_DBSSLCERT"),
"sslkey": os.getenv("PAPERLESS_DBSSLKEY"),
}
databases["default"]["ENGINE"] = engine
databases["default"]["OPTIONS"].update(options)
if os.getenv("PAPERLESS_DB_TIMEOUT") is not None:
timeout = int(os.getenv("PAPERLESS_DB_TIMEOUT"))
if databases["default"]["ENGINE"] == "django.db.backends.sqlite3":
databases["default"]["OPTIONS"].update({"timeout": timeout})
else:
databases["default"]["OPTIONS"].update({"connect_timeout": timeout})
databases["sqlite"]["OPTIONS"].update({"timeout": timeout})
return databases
DATABASES = _parse_db_settings()
SECRET_KEY = os.getenv("PAPERLESS_SECRET_KEY")
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_TZ = True
CSRF_TRUSTED_ORIGINS: list[str] = []
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"channels",
"allauth",
"allauth.account",
"allauth.socialaccount",
"allauth.mfa",
"paperless_migration",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"allauth.account.middleware.AccountMiddleware",
]
ROOT_URLCONF = "paperless_migration.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [
BASE_DIR / "paperless_migration" / "templates",
BASE_DIR / "documents" / "templates",
],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
# ASGI application for Channels
ASGI_APPLICATION = "paperless_migration.asgi.application"
# Channel layers configuration using Redis
REDIS_URL = _parse_redis_url()
CHANNEL_LAYERS = {
"default": {
"BACKEND": "channels_redis.core.RedisChannelLayer",
"CONFIG": {
"hosts": [REDIS_URL],
"capacity": 1500,
"expiry": 10,
},
},
}
# Keep WSGI for compatibility
WSGI_APPLICATION = "paperless_migration.wsgi.application"
AUTHENTICATION_BACKENDS = [
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
]
STATIC_URL = "/static/"
STATICFILES_DIRS = [
BASE_DIR / ".." / "static",
BASE_DIR / "static",
BASE_DIR / "documents" / "static",
]
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
LOGIN_URL = "/accounts/login/"
LOGIN_REDIRECT_URL = "/migration/"
LOGOUT_REDIRECT_URL = "/accounts/login/?loggedout=1"
ACCOUNT_ADAPTER = "allauth.account.adapter.DefaultAccountAdapter"
ACCOUNT_AUTHENTICATED_LOGIN_REDIRECTS = False
SOCIALACCOUNT_ADAPTER = "allauth.socialaccount.adapter.DefaultSocialAccountAdapter"
SOCIALACCOUNT_ENABLED = False
SESSION_ENGINE = "django.contrib.sessions.backends.db"
MIGRATION_EXPORT_PATH = __get_path(
"PAPERLESS_MIGRATION_EXPORT_PATH",
EXPORT_DIR / "manifest.json",
)
MIGRATION_TRANSFORMED_PATH = __get_path(
"PAPERLESS_MIGRATION_TRANSFORMED_PATH",
EXPORT_DIR / "manifest.v3.json",
)
MIGRATION_IMPORTED_PATH = Path(EXPORT_DIR / "import.completed").resolve()
# Progress update frequency (rows between WebSocket updates)
MIGRATION_PROGRESS_FREQUENCY = int(
os.getenv("PAPERLESS_MIGRATION_PROGRESS_FREQUENCY", "100"),
)
# One-time access code required for migration logins; stable across autoreload
_code = os.getenv("PAPERLESS_MIGRATION_ACCESS_CODE")
if not _code:
import secrets
_code = secrets.token_urlsafe(12)
os.environ["PAPERLESS_MIGRATION_ACCESS_CODE"] = _code
MIGRATION_ACCESS_CODE = _code
if os.environ.get("PAPERLESS_MIGRATION_CODE_LOGGED") != "1":
logging.getLogger(__name__).warning(
"Migration one-time access code: %s",
MIGRATION_ACCESS_CODE,
)
os.environ["PAPERLESS_MIGRATION_CODE_LOGGED"] = "1"

View File

@@ -1,77 +0,0 @@
{% load i18n static %}
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta name="author" content="Paperless-ngx project and contributors">
<meta name="robots" content="noindex,nofollow">
<meta name="color-scheme" content="light">
<title>{% translate "Paperless-ngx sign in" %}</title>
<link href="{% static 'bootstrap.min.css' %}" rel="stylesheet">
<link href="{% static 'base.css' %}" rel="stylesheet">
<style>
:root, body, .form-control, .form-floating {
color-scheme: light;
--bs-body-bg: #f5f5f5;
--bs-body-color: #212529;
--bs-body-color-rgb: 33, 37, 41;
--bs-border-color: #dee2e6;
--bs-link-color: #17541f;
--bs-link-color-rgb: 23, 84, 31;
}
@media (prefers-color-scheme: dark) { :root { color-scheme: light; } }
body {
min-height: 100vh;
background:
radial-gradient(circle at 20% 20%, #eef5ef, #f7fbf7),
linear-gradient(120deg, rgba(23, 84, 31, 0.05) 0%, rgba(0,0,0,0) 30%),
linear-gradient(300deg, rgba(15, 54, 20, 0.06) 0%, rgba(0,0,0,0) 40%);
}
</style>
</head>
<body class="d-flex align-items-center justify-content-center text-center p-3">
<main class="w-100" style="max-width: 360px;">
<form class="form-accounts p-4 rounded-4" id="form-account" method="post">
{% csrf_token %}
{% include "paperless-ngx/snippets/svg_logo.html" with extra_attrs="width='240' class='logo mb-3'" %}
<p class="text-uppercase fw-semibold mb-1 text-secondary small" style="letter-spacing: 0.12rem;">{% translate "Migration Mode" %}</p>
{% for message in messages %}
<div class="alert alert-{{ message.level_tag }} mb-2" role="alert">{{ message }}</div>
{% endfor %}
<p class="mb-3">{% translate "Login with a superuser account to proceed." %}</p>
{% if form.errors %}
<div class="alert alert-danger" role="alert">
{% for field, errors in form.errors.items %}
{% for error in errors %}
{{ error }}
{% endfor %}
{% endfor %}
</div>
{% endif %}
{% translate "Username" as i18n_username %}
{% translate "Password" as i18n_password %}
<div class="form-floating form-stacked-top">
<input type="text" name="login" id="inputUsername" placeholder="{{ i18n_username }}" class="form-control" autocorrect="off" autocapitalize="none" required autofocus>
<label for="inputUsername">{{ i18n_username }}</label>
</div>
<div class="form-floating form-stacked-middle">
<input type="password" name="password" id="inputPassword" placeholder="{{ i18n_password }}" class="form-control" required>
<label for="inputPassword">{{ i18n_password }}</label>
</div>
<div class="form-floating form-stacked-bottom">
<input type="text" name="code" id="inputCode" placeholder="One-time code" class="form-control" required>
<label for="inputCode">One-time code</label>
</div>
<p class="mt-2 small fst-italic">{% translate "Code can be found in the startup logs." %}</p>
<div class="d-grid mt-3">
<button class="btn btn-lg btn-primary" type="submit">{% translate "Sign in" %}</button>
</div>
</form>
</main>
</body>
</html>

View File

@@ -1,558 +0,0 @@
<!doctype html>
{% load static %}
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>Paperless-ngx Migration Mode</title>
<link rel="stylesheet" href="{% static 'bootstrap.min.css' %}" />
<link rel="stylesheet" href="{% static 'base.css' %}" />
<style>
:root, .form-control {
color-scheme: light;
--bs-body-bg: #f5f5f5;
--bs-body-color: #212529;
--bs-body-color-rgb: 33, 37, 41;
--bs-border-color: #dee2e6;
--bs-link-color: var(--pngx-primary);
--bs-link-color-rgb: 23, 84, 31;
}
@media (prefers-color-scheme: dark) { :root { color-scheme: light; } }
.btn-primary:disabled {
--bs-btn-disabled-bg: #4d7352;
--bs-btn-disabled-border-color: #4d7352;
}
body {
background:
radial-gradient(circle at 20% 20%, #eef5ef, #f7fbf7),
linear-gradient(120deg, rgba(23, 84, 31, 0.05) 0%, rgba(0,0,0,0) 30%),
linear-gradient(300deg, rgba(15, 54, 20, 0.06) 0%, rgba(0,0,0,0) 40%);
min-height: 100vh;
}
svg.logo .text {
fill: #161616 !important;
}
.hero-card,
.card-step {
background: #fff;
backdrop-filter: blur(6px);
border: 1px solid rgba(23, 84, 31, 0.08);
box-shadow: 0 16px 40px rgba(0, 0, 0, 0.06);
border-radius: 18px;
}
.status-dot {
width: 10px;
height: 10px;
border-radius: 50%;
display: inline-block;
}
.card-step {
border-radius: 16px;
transition: transform 0.15s ease, box-shadow 0.15s ease;
}
.card-step.done-step {
opacity: 0.4;
}
.path-pill {
background: rgba(23, 84, 31, 0.08);
color: var(--bs-body-color);
border-radius: 12px;
padding: 0.4rem 0.75rem;
font-size: 0.9rem;
}
.step-rail {
position: relative;
height: 4px;
background: rgba(23, 84, 31, 0.12);
border-radius: 999px;
}
.step-rail .fill {
position: absolute;
left: 0;
top: 0;
bottom: 0;
width: calc({{ export_exists|yesno:'33,0' }}% + {{ transformed_exists|yesno:'33,0' }}% + {{ imported_exists|yesno:'34,0' }}%);
max-width: 100%;
background: linear-gradient(90deg, #17541f, #2c7a3c);
border-radius: 999px;
transition: width 0.3s ease;
}
.step-chip {
width: 38px;
height: 38px;
border-radius: 50%;
display: grid;
place-items: center;
font-weight: 700;
background: #fff;
border: 2px solid rgba(23, 84, 31, 0.25);
color: #17541f;
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.08);
}
.step-chip.done {
background: #17541f;
color: #fff;
border-color: #17541f;
}
.console-log {
background: #0f1a12;
color: #d1e7d6;
border-radius: 12px;
min-height: 180px;
max-height: 400px;
padding: 12px;
font-size: 0.85rem;
font-family: 'Consolas', 'Monaco', monospace;
overflow: auto;
white-space: pre-wrap;
word-break: break-word;
}
.console-log .log-error { color: #ff6b6b; }
.console-log .log-warning { color: #ffd93d; }
.console-log .log-success { color: #6bcb77; }
.console-log .log-info { color: #4d96ff; }
.progress-bar-container {
height: 24px;
background: rgba(23, 84, 31, 0.1);
border-radius: 12px;
overflow: hidden;
margin-bottom: 0.5rem;
}
.progress-bar-fill {
height: 100%;
background: linear-gradient(90deg, #17541f, #2c7a3c);
border-radius: 12px;
transition: width 0.3s ease;
display: flex;
align-items: center;
justify-content: center;
color: white;
font-size: 0.75rem;
font-weight: 600;
min-width: fit-content;
padding: 0 8px;
}
.stats-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(120px, 1fr));
gap: 0.5rem;
margin-top: 0.5rem;
}
.stat-item {
background: rgba(23, 84, 31, 0.05);
border-radius: 8px;
padding: 0.5rem;
text-align: center;
}
.stat-value {
font-size: 1.25rem;
font-weight: 700;
color: #17541f;
}
.stat-label {
font-size: 0.75rem;
color: #666;
}
.ws-status {
display: inline-flex;
align-items: center;
gap: 0.5rem;
padding: 0.25rem 0.75rem;
border-radius: 999px;
font-size: 0.8rem;
font-weight: 500;
}
.ws-status.connected { background: #d4edda; color: #155724; }
.ws-status.disconnected { background: #f8d7da; color: #721c24; }
.ws-status.connecting { background: #fff3cd; color: #856404; }
</style>
</head>
<body class="pb-4">
<div class="container py-4">
<div class="row justify-content-center mb-4">
<div class="col-lg-9">
<div class="hero-card p-4">
<div class="d-flex flex-wrap align-items-center justify-content-between gap-3">
<div class="d-flex align-items-center gap-3">
{% include "paperless-ngx/snippets/svg_logo.html" with extra_attrs="width='280' class='logo'" %}
<div class="ps-2">
<p class="text-uppercase fw-semibold mb-1 text-secondary" style="letter-spacing: 0.12rem;">Migration Mode</p>
<h1 class="h3 mb-2 text-primary">Paperless-ngx v2 to v3</h1>
<p class="text-muted mb-0">Migrate your data from Paperless-ngx version 2 to version 3.</p>
</div>
</div>
<div class="text-end">
<span class="badge bg-success-subtle text-success border border-success-subtle px-3 py-2">Online</span>
</div>
</div>
<div class="mt-4">
<div class="d-flex justify-content-between align-items-center mb-2">
<div class="d-flex align-items-center gap-2">
<span class="step-chip {% if export_exists %}done{% endif %}">1</span>
<div>
<div class="fw-semibold mb-0">Export</div>
<small class="text-muted">v2 data</small>
</div>
</div>
<div class="d-flex align-items-center gap-2">
<span class="step-chip {% if transformed_exists %}done{% endif %}">2</span>
<div>
<div class="fw-semibold mb-0">Transform</div>
<small class="text-muted">to v3 schema</small>
</div>
</div>
<div class="d-flex align-items-center gap-2">
<span class="step-chip {% if imported_exists %}done{% endif %}">3</span>
<div>
<div class="fw-semibold mb-0">Import</div>
<small class="text-muted">into v3</small>
</div>
</div>
</div>
<div class="step-rail">
<div class="fill"></div>
</div>
</div>
{% if messages %}
<div class="mt-4">
{% for message in messages %}
<div class="alert alert-{{ message.level_tag }} mb-2" role="alert">{{ message }}</div>
{% endfor %}
</div>
{% endif %}
<div class="row g-3 mt-2">
<div class="col-md-6">
<div class="d-flex align-items-center gap-2">
<span class="status-dot bg-{{ export_exists|yesno:'success,danger' }}"></span>
<div>
<div class="fw-semibold">Export file</div>
<div class="small text-muted">{{ export_exists|yesno:"Ready,Missing" }}</div>
</div>
</div>
<div class="path-pill mt-2 text-truncate" title="{{ export_path }}">{{ export_path }}</div>
</div>
<div class="col-md-6">
<div class="d-flex align-items-center gap-2">
<span class="status-dot bg-{{ transformed_exists|yesno:'success,warning' }}"></span>
<div>
<div class="fw-semibold">Transformed file</div>
<div class="small text-muted">{{ transformed_exists|yesno:"Ready,Pending" }}</div>
</div>
</div>
<div class="path-pill mt-2 text-truncate" title="{{ transformed_path }}">{{ transformed_path }}</div>
</div>
</div>
</div>
</div>
</div>
<div class="row gy-4 justify-content-center">
<div class="col-lg-3 col-md-4">
<div class="card card-step h-100 {% if export_exists %}done-step{% endif %}">
<div class="card-body d-flex flex-column gap-3">
<div>
<p class="text-uppercase text-muted mb-1 fw-semibold" style="letter-spacing: 0.08rem;">Step 1</p>
<h3 class="h5 mb-1">Export (v2)</h3>
<p class="small text-muted mb-0">Generate and upload the v2 export file.</p>
</div>
<div class="mt-auto d-grid gap-2">
<form method="post" enctype="multipart/form-data" class="d-flex gap-2 align-items-center">
{% csrf_token %}
<input class="form-control form-control-sm" type="file" name="export_file" accept=".json" {% if export_exists %}disabled{% endif %} required>
<button class="btn btn-outline-secondary btn-sm" type="submit" name="action" value="upload" {% if export_exists %}disabled aria-disabled="true"{% endif %}>Upload</button>
</form>
<form method="post">
{% csrf_token %}
<button class="btn btn-primary w-100" type="submit" name="action" value="check" {% if export_exists %}disabled aria-disabled="true"{% endif %}>Re-check export</button>
</form>
</div>
</div>
</div>
</div>
<div class="col-lg-3 col-md-4">
<div class="card card-step h-100 {% if transformed_exists %}done-step{% endif %}">
<div class="card-body d-flex flex-column gap-3">
<div>
<p class="text-uppercase text-muted mb-1 fw-semibold" style="letter-spacing: 0.08rem;">Step 2</p>
<h3 class="h5 mb-1">Transform</h3>
<p class="small text-muted mb-0">Convert the export into the v3-ready structure.</p>
</div>
<div class="mt-auto d-grid gap-2">
<form method="post">
{% csrf_token %}
<button
class="btn btn-outline-primary w-100"
type="submit"
name="action"
value="transform"
id="btn-transform"
{% if not export_exists or transformed_exists %}disabled aria-disabled="true"{% endif %}
>
Transform export
</button>
</form>
{% if transformed_exists %}
<form method="post">
{% csrf_token %}
<button class="btn btn-outline-danger btn-sm w-100" type="submit" name="action" value="reset_transform">
Reset transform
</button>
</form>
{% endif %}
</div>
</div>
</div>
</div>
<div class="col-lg-3 col-md-4">
<div class="card card-step h-100 {% if imported_exists %}done-step{% endif %}">
<div class="card-body d-flex flex-column gap-3">
<div>
<p class="text-uppercase text-muted mb-1 fw-semibold" style="letter-spacing: 0.08rem;">Step 3</p>
<h3 class="h5 mb-1">Import (v3)</h3>
<p class="small text-muted mb-0">Load the transformed data into your v3 instance.</p>
</div>
<div class="mt-auto">
<form method="post">
{% csrf_token %}
<button
class="btn btn-outline-secondary w-100"
type="submit"
name="action"
value="import"
id="btn-import"
{% if not transformed_exists or imported_exists %}disabled aria-disabled="true"{% endif %}
>
Import transformed data
</button>
</form>
</div>
</div>
</div>
</div>
</div>
<div class="row justify-content-center mt-4">
<div class="col-lg-9">
{% if not export_exists %}
<div class="alert alert-info mb-3">
<div class="fw-semibold mb-1">Export file not found</div>
<div class="small">
Run the v2 export from your Paperless instance, e.g.:
<code>docker run --rm ghcr.io/paperless-ngx/paperless-ngx:2.20.6 document_exporter --data-only</code>
(see <a href="https://docs.paperless-ngx.com/administration/#exporter" target="_blank" rel="noopener noreferrer">documentation</a>). Once the <code>manifest.json</code> is in-place, upload it or (especially for larger files) place it directly at the expected location and click "Re-check export".
<p class="mt-2 mb-0 text-danger fst-italic">Warning: The export must be generated with version Paperless-ngx v2.20.6</p>
</div>
</div>
{% endif %}
<div class="card card-step">
<div class="card-body">
<div class="d-flex justify-content-between align-items-center mb-2">
<div class="fw-semibold">Migration console</div>
<span id="ws-status" class="ws-status disconnected">
<span class="status-dot"></span>
<span class="status-text">Ready</span>
</span>
</div>
<div id="progress-container" class="mb-3" style="display: none;">
<div class="progress-bar-container">
<div id="progress-bar" class="progress-bar-fill" style="width: 0%;">
<span id="progress-text">0 rows</span>
</div>
</div>
<div id="stats-container" class="stats-grid"></div>
</div>
<div id="migration-log" class="console-log">Ready to begin migration...</div>
</div>
</div>
</div>
</div>
</div>
<script>
(function() {
const logEl = document.getElementById('migration-log');
const wsStatusEl = document.getElementById('ws-status');
const progressContainer = document.getElementById('progress-container');
const progressBar = document.getElementById('progress-bar');
const progressText = document.getElementById('progress-text');
const statsContainer = document.getElementById('stats-container');
function setWsStatus(status, text) {
wsStatusEl.className = 'ws-status ' + status;
wsStatusEl.querySelector('.status-text').textContent = text;
}
function appendLog(message, level) {
const line = document.createElement('div');
line.className = 'log-' + (level || 'info');
line.textContent = message;
logEl.appendChild(line);
logEl.scrollTop = logEl.scrollHeight;
}
function clearLog() {
logEl.innerHTML = '';
}
function updateProgress(current, total, label) {
progressContainer.style.display = 'block';
const pct = total ? Math.min(100, (current / total) * 100) : 0;
progressBar.style.width = (total ? pct : 100) + '%';
progressText.textContent = label || (current.toLocaleString() + ' rows');
}
function updateStats(stats) {
if (!stats || Object.keys(stats).length === 0) {
statsContainer.innerHTML = '';
return;
}
let html = '';
for (const [key, value] of Object.entries(stats)) {
const label = key.replace('documents.', '').replace('_', ' ');
html += '<div class="stat-item">' +
'<div class="stat-value">' + (typeof value === 'number' ? value.toLocaleString() : value) + '</div>' +
'<div class="stat-label">' + label + '</div>' +
'</div>';
}
statsContainer.innerHTML = html;
}
function formatDuration(seconds) {
if (seconds < 60) return seconds.toFixed(1) + 's';
const mins = Math.floor(seconds / 60);
const secs = (seconds % 60).toFixed(0);
return mins + 'm ' + secs + 's';
}
function startWebSocket(action) {
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
const wsUrl = protocol + '//' + window.location.host + '/ws/migration/' + action + '/';
clearLog();
appendLog('Connecting to ' + action + ' service...', 'info');
setWsStatus('connecting', 'Connecting...');
progressContainer.style.display = 'none';
statsContainer.innerHTML = '';
const ws = new WebSocket(wsUrl);
ws.onopen = function() {
setWsStatus('connected', 'Connected');
appendLog('Connected. Starting ' + action + '...', 'success');
ws.send(JSON.stringify({ action: 'start' }));
};
ws.onmessage = function(event) {
try {
const data = JSON.parse(event.data);
switch (data.type) {
case 'log':
appendLog(data.message, data.level || 'info');
break;
case 'progress':
updateProgress(data.current, data.total, data.label);
break;
case 'stats':
if (data.transformed) {
updateStats(data.transformed);
} else {
updateStats(data);
}
break;
case 'complete':
const status = data.success ? 'success' : 'error';
const msg = data.success
? 'Completed successfully in ' + formatDuration(data.duration)
: 'Operation failed';
appendLog(msg, status);
if (data.total_processed) {
appendLog('Total processed: ' + data.total_processed.toLocaleString() + ' rows', 'info');
}
if (data.speed) {
appendLog('Speed: ' + Math.round(data.speed).toLocaleString() + ' rows/sec', 'info');
}
if (data.stats) {
updateStats(data.stats);
}
setWsStatus('disconnected', 'Complete');
ws.close();
if (data.success) {
setTimeout(function() { window.location.reload(); }, 1500);
}
break;
case 'error':
appendLog('Error: ' + data.message, 'error');
setWsStatus('disconnected', 'Error');
break;
default:
appendLog(JSON.stringify(data), 'info');
}
} catch (e) {
appendLog('Received: ' + event.data, 'info');
}
};
ws.onerror = function(error) {
appendLog('WebSocket error occurred', 'error');
setWsStatus('disconnected', 'Error');
};
ws.onclose = function(event) {
if (event.code !== 1000) {
const reason = event.code === 4001 ? 'Not authenticated'
: event.code === 4002 ? 'Migration code not verified'
: event.code === 4003 ? 'Superuser access required'
: 'Connection closed (code: ' + event.code + ')';
appendLog(reason, 'error');
}
setWsStatus('disconnected', 'Disconnected');
};
}
// Check if we should auto-start a WebSocket action
{% if ws_action %}
startWebSocket('{{ ws_action }}');
{% endif %}
// Expose for manual triggering if needed
window.startMigrationWs = startWebSocket;
})();
</script>
</body>
</html>

View File

@@ -1,21 +0,0 @@
"""URL configuration for migration mode."""
from __future__ import annotations
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.urls import include
from django.urls import path
from paperless_migration import views
urlpatterns = [
path("accounts/login/", views.migration_login, name="account_login"),
path("accounts/", include("allauth.urls")),
path("migration/", views.migration_home, name="migration_home"),
# Redirect root to migration home
path("", views.migration_home, name="home"),
]
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()

View File

@@ -1,132 +0,0 @@
"""Views for migration mode web interface."""
from __future__ import annotations
from pathlib import Path
from typing import TYPE_CHECKING
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import authenticate
from django.contrib.auth import login
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseForbidden
from django.shortcuts import redirect
from django.shortcuts import render
from django.views.decorators.http import require_http_methods
if TYPE_CHECKING:
from django.http import HttpRequest
from django.http import HttpResponse
def _check_migration_access(request: HttpRequest) -> HttpResponse | None:
"""Check if user has migration access. Returns error response or None."""
if not request.session.get("migration_code_ok"):
return HttpResponseForbidden("Access code required")
if not request.user.is_superuser:
return HttpResponseForbidden("Superuser access required")
return None
@login_required
@require_http_methods(["GET", "POST"])
def migration_home(request: HttpRequest) -> HttpResponse:
"""Main migration dashboard view."""
error_response = _check_migration_access(request)
if error_response:
return error_response
export_path = Path(settings.MIGRATION_EXPORT_PATH)
transformed_path = Path(settings.MIGRATION_TRANSFORMED_PATH)
imported_marker = Path(settings.MIGRATION_IMPORTED_PATH)
if request.method == "POST":
action = request.POST.get("action")
if action == "check":
messages.success(request, "Checked export paths.")
elif action == "upload":
upload = request.FILES.get("export_file")
if not upload:
messages.error(request, "No file selected.")
else:
try:
export_path.parent.mkdir(parents=True, exist_ok=True)
with export_path.open("wb") as dest:
for chunk in upload.chunks():
dest.write(chunk)
messages.success(request, f"Uploaded to {export_path}.")
except Exception as exc:
messages.error(request, f"Failed to save file: {exc}")
elif action == "transform":
if imported_marker.exists():
imported_marker.unlink()
# Signal to start WebSocket connection for transform
request.session["start_ws_action"] = "transform"
messages.info(request, "Starting transform via WebSocket...")
elif action == "import":
# Signal to start WebSocket connection for import
request.session["start_ws_action"] = "import"
messages.info(request, "Starting import via WebSocket...")
elif action == "reset_transform":
if transformed_path.exists():
try:
transformed_path.unlink()
messages.success(request, "Transformed file deleted.")
except Exception as exc:
messages.error(request, f"Failed to delete transformed file: {exc}")
if imported_marker.exists():
try:
imported_marker.unlink()
except Exception:
pass
else:
messages.error(request, "Unknown action.")
return redirect("migration_home")
ws_action = request.session.pop("start_ws_action", None)
context = {
"export_path": export_path,
"export_exists": export_path.exists(),
"transformed_path": transformed_path,
"transformed_exists": transformed_path.exists(),
"imported_exists": imported_marker.exists(),
"ws_action": ws_action,
}
return render(request, "paperless_migration/migration_home.html", context)
@require_http_methods(["GET", "POST"])
def migration_login(request: HttpRequest) -> HttpResponse:
"""Migration-specific login view requiring access code."""
if request.method == "POST":
username = request.POST.get("login", "")
password = request.POST.get("password", "")
code = request.POST.get("code", "")
if not code or code != settings.MIGRATION_ACCESS_CODE:
messages.error(request, "One-time code is required.")
return redirect("account_login")
user = authenticate(request, username=username, password=password)
if user is None:
messages.error(request, "Invalid username or password.")
return redirect("account_login")
if not user.is_superuser:
messages.error(request, "Superuser access required.")
return redirect("account_login")
login(request, user)
request.session["migration_code_ok"] = True
return redirect(settings.LOGIN_REDIRECT_URL)
return render(request, "account/login.html")

View File

@@ -1,7 +0,0 @@
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless_migration.settings")
application = get_wsgi_application()

View File

@@ -12,6 +12,9 @@ from paperless_tika.parsers import TikaDocumentParser
reason="No Gotenberg/Tika servers to test with", reason="No Gotenberg/Tika servers to test with",
) )
@pytest.mark.django_db() @pytest.mark.django_db()
@pytest.mark.live
@pytest.mark.gotenberg
@pytest.mark.tika
class TestTikaParserAgainstServer: class TestTikaParserAgainstServer:
""" """
This test case tests the Tika parsing against a live tika server, This test case tests the Tika parsing against a live tika server,

View File

@@ -128,6 +128,8 @@ class TestTikaParser:
request = httpx_mock.get_request() request = httpx_mock.get_request()
assert request is not None
expected_field_name = "pdfa" expected_field_name = "pdfa"
content_type = request.headers["Content-Type"] content_type = request.headers["Content-Type"]

89
uv.lock generated
View File

@@ -1305,7 +1305,7 @@ name = "exceptiongroup"
version = "1.3.1" version = "1.3.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" }, { name = "typing-extensions", marker = "(python_full_version < '3.13' and platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (python_full_version < '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.12' and platform_machine == 'x86_64' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'darwin')" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" }
wheels = [ wheels = [
@@ -1933,82 +1933,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
] ]
[[package]]
name = "ijson"
version = "3.4.0.post0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/2d/30/7ab4b9e88e7946f6beef419f74edcc541df3ea562c7882257b4eaa82417d/ijson-3.4.0.post0.tar.gz", hash = "sha256:9aa02dc70bb245670a6ca7fba737b992aeeb4895360980622f7e568dbf23e41e", size = 67216, upload-time = "2025-10-10T05:29:25.62Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b5/15/4f4921ed9ab94032fd0b03ecb211ff9dbd5cc9953463f5b5c4ddeab406fc/ijson-3.4.0.post0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8f904a405b58a04b6ef0425f1babbc5c65feb66b0a4cc7f214d4ad7de106f77d", size = 88244, upload-time = "2025-10-10T05:27:42.001Z" },
{ url = "https://files.pythonhosted.org/packages/af/d6/b85d4da1752362a789bc3e0fc4b55e812a374a50d2fe1c06cab2e2bcb170/ijson-3.4.0.post0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a07dcc1a8a1ddd76131a7c7528cbd12951c2e34eb3c3d63697b905069a2d65b1", size = 59880, upload-time = "2025-10-10T05:27:44.791Z" },
{ url = "https://files.pythonhosted.org/packages/c3/96/e1027e6d0efb5b9192bdc9f0af5633c20a56999cce4cf7ad35427f823138/ijson-3.4.0.post0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ab3be841b8c430c1883b8c0775eb551f21b5500c102c7ee828afa35ddd701bdd", size = 59939, upload-time = "2025-10-10T05:27:45.66Z" },
{ url = "https://files.pythonhosted.org/packages/e3/71/b9ca0a19afb2f36be35c6afa2c4d1c19950dc45f6a50b483b56082b3e165/ijson-3.4.0.post0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:43059ae0d657b11c5ddb11d149bc400c44f9e514fb8663057e9b2ea4d8d44c1f", size = 125894, upload-time = "2025-10-10T05:27:46.551Z" },
{ url = "https://files.pythonhosted.org/packages/02/1b/f7356de078d85564829c5e2a2a31473ee0ad1876258ceecf550b582e57b7/ijson-3.4.0.post0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0d3e82963096579d1385c06b2559570d7191e225664b7fa049617da838e1a4a4", size = 132385, upload-time = "2025-10-10T05:27:48Z" },
{ url = "https://files.pythonhosted.org/packages/57/7b/08f86eed5df0849b673260dd2943b6a7367a55b5a4b6e73ddbfbdf4206f1/ijson-3.4.0.post0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:461ce4e87a21a261b60c0a68a2ad17c7dd214f0b90a0bec7e559a66b6ae3bd7e", size = 129567, upload-time = "2025-10-10T05:27:49.188Z" },
{ url = "https://files.pythonhosted.org/packages/96/e1/69672d95b1a16e7c6bf89cef6c892b228cc84b484945a731786a425700d2/ijson-3.4.0.post0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:890cf6610c9554efcb9765a93e368efeb5bb6135f59ce0828d92eaefff07fde5", size = 132821, upload-time = "2025-10-10T05:27:50.342Z" },
{ url = "https://files.pythonhosted.org/packages/0b/15/9ed4868e2e92db2454508f7ea1282bec0b039bd344ac0cbac4a2de16786d/ijson-3.4.0.post0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6793c29a5728e7751a7df01be58ba7da9b9690c12bf79d32094c70a908fa02b9", size = 127757, upload-time = "2025-10-10T05:27:51.203Z" },
{ url = "https://files.pythonhosted.org/packages/5b/aa/08a308d3aaa6e98511f3100f8a1e4e8ff8c853fa4ec3f18b71094ac36bbe/ijson-3.4.0.post0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a56b6674d7feec0401c91f86c376f4e3d8ff8129128a8ad21ca43ec0b1242f79", size = 130439, upload-time = "2025-10-10T05:27:52.123Z" },
{ url = "https://files.pythonhosted.org/packages/a7/ac/3d57249d4acba66a33eaef794edb5b2a2222ca449ae08800f8abe9286645/ijson-3.4.0.post0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b473112e72c0c506da425da3278367b6680f340ecc093084693a1e819d28435", size = 88278, upload-time = "2025-10-10T05:27:55.403Z" },
{ url = "https://files.pythonhosted.org/packages/12/fb/2d068d23d1a665f500282ceb6f2473952a95fc7107d739fd629b4ab41959/ijson-3.4.0.post0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:043f9b7cf9cc744263a78175e769947733710d2412d25180df44b1086b23ebd5", size = 59898, upload-time = "2025-10-10T05:27:56.361Z" },
{ url = "https://files.pythonhosted.org/packages/26/3d/8b14589dfb0e5dbb7bcf9063e53d3617c041cf315ff3dfa60945382237ce/ijson-3.4.0.post0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b55e49045f4c8031f3673f56662fd828dc9e8d65bd3b03a9420dda0d370e64ba", size = 59945, upload-time = "2025-10-10T05:27:57.581Z" },
{ url = "https://files.pythonhosted.org/packages/77/57/086a75094397d4b7584698a540a279689e12905271af78cdfc903bf9eaf8/ijson-3.4.0.post0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:11f13b73194ea2a5a8b4a2863f25b0b4624311f10db3a75747b510c4958179b0", size = 131318, upload-time = "2025-10-10T05:27:58.453Z" },
{ url = "https://files.pythonhosted.org/packages/df/35/7f61e9ce4a9ff1306ec581eb851f8a660439126d92ee595c6dc8084aac97/ijson-3.4.0.post0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:659acb2843433e080c271ecedf7d19c71adde1ee5274fc7faa2fec0a793f9f1c", size = 137990, upload-time = "2025-10-10T05:27:59.328Z" },
{ url = "https://files.pythonhosted.org/packages/59/bf/590bbc3c3566adce5e2f43ba5894520cbaf19a3e7f38c1250926ba67eee4/ijson-3.4.0.post0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:deda4cfcaafa72ca3fa845350045b1d0fef9364ec9f413241bb46988afbe6ee6", size = 134416, upload-time = "2025-10-10T05:28:00.317Z" },
{ url = "https://files.pythonhosted.org/packages/24/c1/fb719049851979df71f3e039d6f1a565d349c9cb1b29c0f8775d9db141b4/ijson-3.4.0.post0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47352563e8c594360bacee2e0753e97025f0861234722d02faace62b1b6d2b2a", size = 138034, upload-time = "2025-10-10T05:28:01.627Z" },
{ url = "https://files.pythonhosted.org/packages/10/ce/ccda891f572876aaf2c43f0b2079e31d5b476c3ae53196187eab1a788eff/ijson-3.4.0.post0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5a48b9486242d1295abe7fd0fbb6308867da5ca3f69b55c77922a93c2b6847aa", size = 132510, upload-time = "2025-10-10T05:28:03.141Z" },
{ url = "https://files.pythonhosted.org/packages/11/b5/ca8e64ab7cf5252f358e467be767630f085b5bbcd3c04333a3a5f36c3dd3/ijson-3.4.0.post0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9c0886234d1fae15cf4581a430bdba03d79251c1ab3b07e30aa31b13ef28d01c", size = 134907, upload-time = "2025-10-10T05:28:04.438Z" },
{ url = "https://files.pythonhosted.org/packages/7d/fe/3b6af0025288e769dbfa30485dae1b3bd3f33f00390f3ee532cbb1c33e9b/ijson-3.4.0.post0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b607a500fca26101be47d2baf7cddb457b819ab60a75ce51ed1092a40da8b2f9", size = 87847, upload-time = "2025-10-10T05:28:07.229Z" },
{ url = "https://files.pythonhosted.org/packages/6e/a5/95ee2ca82f3b1a57892452f6e5087607d56c620beb8ce625475194568698/ijson-3.4.0.post0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4827d9874a6a81625412c59f7ca979a84d01f7f6bfb3c6d4dc4c46d0382b14e0", size = 59815, upload-time = "2025-10-10T05:28:08.448Z" },
{ url = "https://files.pythonhosted.org/packages/51/8d/5a704ab3c17c55c21c86423458db8610626ca99cc9086a74dfeb7ee9054c/ijson-3.4.0.post0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4d4afec780881edb2a0d2dd40b1cdbe246e630022d5192f266172a0307986a7", size = 59648, upload-time = "2025-10-10T05:28:09.307Z" },
{ url = "https://files.pythonhosted.org/packages/25/56/ca5d6ca145d007f30b44e747f3c163bc08710ce004af0deaad4a2301339b/ijson-3.4.0.post0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:432fb60ffb952926f9438e0539011e2dfcd108f8426ee826ccc6173308c3ff2c", size = 138279, upload-time = "2025-10-10T05:28:10.489Z" },
{ url = "https://files.pythonhosted.org/packages/c3/d3/22e3cc806fcdda7ad4c8482ed74db7a017d4a1d49b4300c7bc07052fb561/ijson-3.4.0.post0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:54a0e3e05d9a0c95ecba73d9579f146cf6d5c5874116c849dba2d39a5f30380e", size = 149110, upload-time = "2025-10-10T05:28:12.263Z" },
{ url = "https://files.pythonhosted.org/packages/3e/04/efb30f413648b9267f5a33920ac124d7ebef3bc4063af8f6ffc8ca11ddcb/ijson-3.4.0.post0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05807edc0bcbd222dc6ea32a2b897f0c81dc7f12c8580148bc82f6d7f5e7ec7b", size = 149026, upload-time = "2025-10-10T05:28:13.557Z" },
{ url = "https://files.pythonhosted.org/packages/2d/cf/481165f7046ade32488719300a3994a437020bc41cfbb54334356348f513/ijson-3.4.0.post0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a5269af16f715855d9864937f9dd5c348ca1ac49cee6a2c7a1b7091c159e874f", size = 150012, upload-time = "2025-10-10T05:28:14.859Z" },
{ url = "https://files.pythonhosted.org/packages/0f/24/642e3289917ecf860386e26dfde775f9962d26ab7f6c2e364ed3ca3c25d8/ijson-3.4.0.post0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b200df83c901f5bfa416d069ac71077aa1608f854a4c50df1b84ced560e9c9ec", size = 142193, upload-time = "2025-10-10T05:28:16.131Z" },
{ url = "https://files.pythonhosted.org/packages/0f/f5/fd2f038abe95e553e1c3ee207cda19db9196eb416e63c7c89699a8cf0db7/ijson-3.4.0.post0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6458bd8e679cdff459a0a5e555b107c3bbacb1f382da3fe0f40e392871eb518d", size = 150904, upload-time = "2025-10-10T05:28:17.401Z" },
{ url = "https://files.pythonhosted.org/packages/1b/20/aaec6977f9d538bbadd760c7fa0f6a0937742abdcc920ec6478a8576e55f/ijson-3.4.0.post0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:114ed248166ac06377e87a245a158d6b98019d2bdd3bb93995718e0bd996154f", size = 87863, upload-time = "2025-10-10T05:28:20.786Z" },
{ url = "https://files.pythonhosted.org/packages/5b/29/06bf56a866e2fe21453a1ad8f3a5d7bca3c723f73d96329656dfee969783/ijson-3.4.0.post0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ffb21203736b08fe27cb30df6a4f802fafb9ef7646c5ff7ef79569b63ea76c57", size = 59806, upload-time = "2025-10-10T05:28:21.596Z" },
{ url = "https://files.pythonhosted.org/packages/ba/ae/e1d0fda91ba7a444b75f0d60cb845fdb1f55d3111351529dcbf4b1c276fe/ijson-3.4.0.post0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:07f20ecd748602ac7f18c617637e53bd73ded7f3b22260bba3abe401a7fc284e", size = 59643, upload-time = "2025-10-10T05:28:22.45Z" },
{ url = "https://files.pythonhosted.org/packages/4d/24/5a24533be2726396cc1724dc237bada09b19715b5bfb0e7b9400db0901ad/ijson-3.4.0.post0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:27aa193d47ffc6bc4e45453896ad98fb089a367e8283b973f1fe5c0198b60b4e", size = 138082, upload-time = "2025-10-10T05:28:23.319Z" },
{ url = "https://files.pythonhosted.org/packages/05/60/026c3efcec23c329657e878cbc0a9a25b42e7eb3971e8c2377cb3284e2b7/ijson-3.4.0.post0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ccddb2894eb7af162ba43b9475ac5825d15d568832f82eb8783036e5d2aebd42", size = 149145, upload-time = "2025-10-10T05:28:24.279Z" },
{ url = "https://files.pythonhosted.org/packages/ed/c2/036499909b7a1bc0bcd85305e4348ad171aeb9df57581287533bdb3497e9/ijson-3.4.0.post0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:61ab0b8c5bf707201dc67e02c116f4b6545c4afd7feb2264b989d242d9c4348a", size = 149046, upload-time = "2025-10-10T05:28:25.186Z" },
{ url = "https://files.pythonhosted.org/packages/ba/75/e7736073ad96867c129f9e799e3e65086badd89dbf3911f76d9b3bf8a115/ijson-3.4.0.post0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:254cfb8c124af68327a0e7a49b50bbdacafd87c4690a3d62c96eb01020a685ef", size = 150356, upload-time = "2025-10-10T05:28:26.135Z" },
{ url = "https://files.pythonhosted.org/packages/9d/1b/1c1575d2cda136985561fcf774fe6c54412cd0fa08005342015af0403193/ijson-3.4.0.post0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:04ac9ca54db20f82aeda6379b5f4f6112fdb150d09ebce04affeab98a17b4ed3", size = 142322, upload-time = "2025-10-10T05:28:27.125Z" },
{ url = "https://files.pythonhosted.org/packages/28/4d/aba9871feb624df8494435d1a9ddc7b6a4f782c6044bfc0d770a4b59f145/ijson-3.4.0.post0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a603d7474bf35e7b3a8e49c8dabfc4751841931301adff3f3318171c4e407f32", size = 151386, upload-time = "2025-10-10T05:28:28.274Z" },
{ url = "https://files.pythonhosted.org/packages/c7/89/4344e176f2c5f5ef3251c9bfa4ddd5b4cf3f9601fd6ec3f677a3ba0b9c71/ijson-3.4.0.post0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:45a0b1c833ed2620eaf8da958f06ac8351c59e5e470e078400d23814670ed708", size = 92342, upload-time = "2025-10-10T05:28:31.389Z" },
{ url = "https://files.pythonhosted.org/packages/d4/b1/85012c586a6645f9fb8bfa3ef62ed2f303c8d73fc7c2f705111582925980/ijson-3.4.0.post0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7809ec8c8f40228edaaa089f33e811dff4c5b8509702652870d3f286c9682e27", size = 62028, upload-time = "2025-10-10T05:28:32.849Z" },
{ url = "https://files.pythonhosted.org/packages/65/ea/7b7e2815c101d78b33e74d64ddb70cccc377afccd5dda76e566ed3fcb56f/ijson-3.4.0.post0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cf4a34c2cfe852aee75c89c05b0a4531c49dc0be27eeed221afd6fbf9c3e149c", size = 61773, upload-time = "2025-10-10T05:28:34.016Z" },
{ url = "https://files.pythonhosted.org/packages/59/7d/2175e599cb77a64f528629bad3ce95dfdf2aa6171d313c1fc00bbfaf0d22/ijson-3.4.0.post0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a39d5d36067604b26b78de70b8951c90e9272450642661fe531a8f7a6936a7fa", size = 198562, upload-time = "2025-10-10T05:28:34.878Z" },
{ url = "https://files.pythonhosted.org/packages/13/97/82247c501c92405bb2fc44ab5efb497335bcb9cf0f5d3a0b04a800737bd8/ijson-3.4.0.post0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83fc738d81c9ea686b452996110b8a6678296c481e0546857db24785bff8da92", size = 216212, upload-time = "2025-10-10T05:28:36.208Z" },
{ url = "https://files.pythonhosted.org/packages/95/ca/b956f507bb02e05ce109fd11ab6a2c054f8b686cc5affe41afe50630984d/ijson-3.4.0.post0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b2a81aee91633868f5b40280e2523f7c5392e920a5082f47c5e991e516b483f6", size = 206618, upload-time = "2025-10-10T05:28:37.243Z" },
{ url = "https://files.pythonhosted.org/packages/3e/12/e827840ab81d86a9882e499097934df53294f05155f1acfcb9a211ac1142/ijson-3.4.0.post0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:56169e298c5a2e7196aaa55da78ddc2415876a74fe6304f81b1eb0d3273346f7", size = 210689, upload-time = "2025-10-10T05:28:38.252Z" },
{ url = "https://files.pythonhosted.org/packages/1b/3b/59238d9422c31a4aefa22ebeb8e599e706158a0ab03669ef623be77a499a/ijson-3.4.0.post0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:eeb9540f0b1a575cbb5968166706946458f98c16e7accc6f2fe71efa29864241", size = 199927, upload-time = "2025-10-10T05:28:39.233Z" },
{ url = "https://files.pythonhosted.org/packages/b6/0f/ec01c36c128c37edb8a5ae8f3de3256009f886338d459210dfe121ee4ba9/ijson-3.4.0.post0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ba3478ff0bb49d7ba88783f491a99b6e3fa929c930ab062d2bb7837e6a38fe88", size = 204455, upload-time = "2025-10-10T05:28:40.644Z" },
{ url = "https://files.pythonhosted.org/packages/af/0b/a4ce8524fd850302bbf5d9f38d07c0fa981fdbe44951d2fcd036935b67dd/ijson-3.4.0.post0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da6a21b88cbf5ecbc53371283988d22c9643aa71ae2873bbeaefd2dea3b6160b", size = 88361, upload-time = "2025-10-10T05:28:43.73Z" },
{ url = "https://files.pythonhosted.org/packages/be/90/a5e5f33e46f28174a9c8142d12dcb3d26ce358d9a2230b9b15f5c987b3a5/ijson-3.4.0.post0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cf24a48a1c3ca9d44a04feb59ccefeb9aa52bb49b9cb70ad30518c25cce74bb7", size = 59960, upload-time = "2025-10-10T05:28:44.585Z" },
{ url = "https://files.pythonhosted.org/packages/83/e2/551dd7037dda759aa0ce53f0d3d7be03b03c6b05c0b0a5d5ab7a47e6b4b1/ijson-3.4.0.post0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d14427d366f95f21adcb97d0ed1f6d30f6fdc04d0aa1e4de839152c50c2b8d65", size = 59957, upload-time = "2025-10-10T05:28:45.748Z" },
{ url = "https://files.pythonhosted.org/packages/ac/b9/3006384f85cc26cf83dbbd542d362cc336f1e1ddd491e32147cfa46ea8ae/ijson-3.4.0.post0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:339d49f6c5d24051c85d9226be96d2d56e633cb8b7d09dd8099de8d8b51a97e2", size = 139967, upload-time = "2025-10-10T05:28:47.229Z" },
{ url = "https://files.pythonhosted.org/packages/77/3b/b5234add8115cbfe8635b6c152fb527327f45e4c0f0bf2e93844b36b5217/ijson-3.4.0.post0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7206afcb396aaef66c2b066997b4e9d9042c4b7d777f4d994e9cec6d322c2fe6", size = 149196, upload-time = "2025-10-10T05:28:48.226Z" },
{ url = "https://files.pythonhosted.org/packages/a2/d2/c4ae543e37d7a9fba09740c221976a63705dbad23a9cda9022fc9fa0f3de/ijson-3.4.0.post0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c8dd327da225887194fe8b93f2b3c9c256353e14a6b9eefc940ed17fde38f5b8", size = 148516, upload-time = "2025-10-10T05:28:49.237Z" },
{ url = "https://files.pythonhosted.org/packages/0d/a1/914b5fb1c26af2474cd04841626e0e95576499a4ca940661fb105ee12dd2/ijson-3.4.0.post0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4810546e66128af51fd4a0c9a640e84e8508e9c15c4f247d8a3e3253b20e1465", size = 149770, upload-time = "2025-10-10T05:28:50.501Z" },
{ url = "https://files.pythonhosted.org/packages/7a/c1/51c3584102d0d85d4aa10cc88dbbe431ecb9fe98160a9e2fad62a4456aed/ijson-3.4.0.post0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:103a0838061297d063bca81d724b0958b616f372bd893bbc278320152252c652", size = 143688, upload-time = "2025-10-10T05:28:51.823Z" },
{ url = "https://files.pythonhosted.org/packages/47/3d/a54f13d766332620bded8ee76bcdd274509ecc53cf99573450f95b3ad910/ijson-3.4.0.post0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:40007c977e230e04118b27322f25a72ae342a3d61464b2057fcd9b21eeb7427a", size = 150688, upload-time = "2025-10-10T05:28:52.757Z" },
{ url = "https://files.pythonhosted.org/packages/69/1c/8a199fded709e762aced89bb7086973c837e432dd714bbad78a6ac789c23/ijson-3.4.0.post0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:226447e40ca9340a39ed07d68ea02ee14b52cb4fe649425b256c1f0073531c83", size = 92345, upload-time = "2025-10-10T05:28:55.657Z" },
{ url = "https://files.pythonhosted.org/packages/be/60/04e97f6a403203bd2eb8849570bdce5719d696b5fb96aa2a62566fe7a1d9/ijson-3.4.0.post0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2c88f0669d45d4b1aa017c9b68d378e7cd15d188dfb6f0209adc78b7f45590a7", size = 62029, upload-time = "2025-10-10T05:28:56.561Z" },
{ url = "https://files.pythonhosted.org/packages/2a/97/e88295f9456ba939d90d4603af28fcabda3b443ef55e709e9381df3daa58/ijson-3.4.0.post0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:56b3089dc28c12492d92cc4896d2be585a89ecae34e25d08c1df88f21815cb50", size = 61776, upload-time = "2025-10-10T05:28:57.401Z" },
{ url = "https://files.pythonhosted.org/packages/1b/9f/0e9c236e720c2de887ab0d7cad8a15d2aa55fb449f792437fc99899957a9/ijson-3.4.0.post0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c117321cfa7b749cc1213f9b4c80dc958f0a206df98ec038ae4bcbbdb8463a15", size = 199808, upload-time = "2025-10-10T05:28:58.62Z" },
{ url = "https://files.pythonhosted.org/packages/0e/70/c21de30e7013e074924cd82057acfc5760e7b2cc41180f80770621b0ad36/ijson-3.4.0.post0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8311f48db6a33116db5c81682f08b6e2405501a4b4e460193ae69fec3cd1f87a", size = 217152, upload-time = "2025-10-10T05:28:59.656Z" },
{ url = "https://files.pythonhosted.org/packages/64/78/63a0bcc0707037df4e22bb836451279d850592258c859685a402c27f5d6d/ijson-3.4.0.post0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:91c61a3e63e04da648737e6b4abd537df1b46fb8cdf3219b072e790bb3c1a46b", size = 207663, upload-time = "2025-10-10T05:29:00.73Z" },
{ url = "https://files.pythonhosted.org/packages/7d/85/834e9838d69893cb7567e1210be044444213c78f7414aaf1cd241df16078/ijson-3.4.0.post0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1709171023ce82651b2f132575c2e6282e47f64ad67bd3260da476418d0e7895", size = 211157, upload-time = "2025-10-10T05:29:01.87Z" },
{ url = "https://files.pythonhosted.org/packages/2e/9b/9fda503799ebc30397710552e5dedc1d98d9ea6a694e5717415892623a94/ijson-3.4.0.post0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:5f0a72b1e3c0f78551670c12b2fdc1bf05f2796254d9c2055ba319bec2216020", size = 200231, upload-time = "2025-10-10T05:29:02.883Z" },
{ url = "https://files.pythonhosted.org/packages/15/f3/6419d1d5795a16591233d3aa3747b084e82c0c1d7184bdad9be638174560/ijson-3.4.0.post0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b982a3597b0439ce9c8f4cfc929d86c6ed43907908be1e8463a34dc35fe5b258", size = 204825, upload-time = "2025-10-10T05:29:04.242Z" },
{ url = "https://files.pythonhosted.org/packages/43/66/27cfcea16e85b95e33814eae2052dab187206b8820cdd90aa39d32ffb441/ijson-3.4.0.post0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:add9242f886eae844a7410b84aee2bbb8bdc83c624f227cb1fdb2d0476a96cb1", size = 57029, upload-time = "2025-10-10T05:29:19.733Z" },
{ url = "https://files.pythonhosted.org/packages/b8/1b/df3f1561c6629241fb2f8bd7ea1da14e3c2dd16fe9d7cbc97120870ed09c/ijson-3.4.0.post0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:69718ed41710dfcaa7564b0af42abc05875d4f7aaa24627c808867ef32634bc7", size = 56523, upload-time = "2025-10-10T05:29:20.641Z" },
{ url = "https://files.pythonhosted.org/packages/39/0a/6c6a3221ddecf62b696fde0e864415237e05b9a36ab6685a606b8fb3b5a2/ijson-3.4.0.post0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:636b6eca96c6c43c04629c6b37fad0181662eaacf9877c71c698485637f752f9", size = 70546, upload-time = "2025-10-10T05:29:21.526Z" },
{ url = "https://files.pythonhosted.org/packages/42/cb/edf69755e86a3a9f8b418efd60239cb308af46c7c8e12f869423f51c9851/ijson-3.4.0.post0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb5e73028f6e63d27b3d286069fe350ed80a4ccc493b022b590fea4bb086710d", size = 70532, upload-time = "2025-10-10T05:29:22.718Z" },
{ url = "https://files.pythonhosted.org/packages/96/7e/c8730ea39b8712622cd5a1bdff676098208400e37bb92052ba52f93e2aa1/ijson-3.4.0.post0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:461acf4320219459dabe5ed90a45cb86c9ba8cc6d6db9dad0d9427d42f57794c", size = 67927, upload-time = "2025-10-10T05:29:23.596Z" },
]
[[package]] [[package]]
name = "imagehash" name = "imagehash"
version = "4.3.2" version = "4.3.2"
@@ -3267,7 +3191,6 @@ dependencies = [
{ name = "flower", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "flower", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "gotenberg-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "gotenberg-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "httpx-oauth", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "httpx-oauth", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "ijson", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "imap-tools", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "imap-tools", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "langdetect", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "langdetect", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
@@ -3291,7 +3214,6 @@ dependencies = [
{ name = "rapidfuzz", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "rapidfuzz", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "redis", extra = ["hiredis"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "redis", extra = ["hiredis"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "regex", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "regex", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "rich", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "scikit-learn", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "scikit-learn", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "sentence-transformers", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "sentence-transformers", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "setproctitle", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "setproctitle", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
@@ -3418,8 +3340,6 @@ requires-dist = [
{ name = "gotenberg-client", specifier = "~=0.13.1" }, { name = "gotenberg-client", specifier = "~=0.13.1" },
{ name = "granian", extras = ["uvloop"], marker = "extra == 'webserver'", specifier = "~=2.6.0" }, { name = "granian", extras = ["uvloop"], marker = "extra == 'webserver'", specifier = "~=2.6.0" },
{ name = "httpx-oauth", specifier = "~=0.16" }, { name = "httpx-oauth", specifier = "~=0.16" },
{ name = "ijson" },
{ name = "ijson", specifier = "~=3.3" },
{ name = "imap-tools", specifier = "~=1.11.0" }, { name = "imap-tools", specifier = "~=1.11.0" },
{ name = "jinja2", specifier = "~=3.1.5" }, { name = "jinja2", specifier = "~=3.1.5" },
{ name = "langdetect", specifier = "~=1.0.9" }, { name = "langdetect", specifier = "~=1.0.9" },
@@ -3449,7 +3369,6 @@ requires-dist = [
{ name = "rapidfuzz", specifier = "~=3.14.0" }, { name = "rapidfuzz", specifier = "~=3.14.0" },
{ name = "redis", extras = ["hiredis"], specifier = "~=5.2.1" }, { name = "redis", extras = ["hiredis"], specifier = "~=5.2.1" },
{ name = "regex", specifier = ">=2025.9.18" }, { name = "regex", specifier = ">=2025.9.18" },
{ name = "rich", specifier = "~=14.1.0" },
{ name = "scikit-learn", specifier = "~=1.7.0" }, { name = "scikit-learn", specifier = "~=1.7.0" },
{ name = "sentence-transformers", specifier = ">=4.1" }, { name = "sentence-transformers", specifier = ">=4.1" },
{ name = "setproctitle", specifier = "~=1.3.4" }, { name = "setproctitle", specifier = "~=1.3.4" },
@@ -4743,15 +4662,15 @@ wheels = [
[[package]] [[package]]
name = "rich" name = "rich"
version = "14.1.0" version = "14.3.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "markdown-it-py", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "markdown-it-py", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } sdist = { url = "https://files.pythonhosted.org/packages/a1/84/4831f881aa6ff3c976f6d6809b58cdfa350593ffc0dc3c58f5f6586780fb/rich-14.3.1.tar.gz", hash = "sha256:b8c5f568a3a749f9290ec6bddedf835cec33696bfc1e48bcfecb276c7386e4b8", size = 230125, upload-time = "2026-01-24T21:40:44.847Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, { url = "https://files.pythonhosted.org/packages/87/2a/a1810c8627b9ec8c57ec5ec325d306701ae7be50235e8fd81266e002a3cc/rich-14.3.1-py3-none-any.whl", hash = "sha256:da750b1aebbff0b372557426fb3f35ba56de8ef954b3190315eb64076d6fb54e", size = 309952, upload-time = "2026-01-24T21:40:42.969Z" },
] ]
[[package]] [[package]]