mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-02-11 23:59:31 -06:00
Compare commits
65 Commits
feature-zx
...
feature-do
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fe6b3a1a41 | ||
|
|
65bf55f610 | ||
|
|
8391469b1c | ||
|
|
a4f448e930 | ||
|
|
c2b4787c45 | ||
|
|
865c79a9cc | ||
|
|
19171b1641 | ||
|
|
64e95d9903 | ||
|
|
6092ea8ee8 | ||
|
|
9cd71de89d | ||
|
|
06b5c22858 | ||
|
|
b1f2606022 | ||
|
|
5a0a8a58b3 | ||
|
|
1a47f3801f | ||
|
|
23390d0890 | ||
|
|
8b663393c2 | ||
|
|
640025f2a9 | ||
|
|
e0a1688be8 | ||
|
|
ddbf9982a5 | ||
|
|
d36a64d3fe | ||
|
|
4e70f304fe | ||
|
|
8eb931f6f6 | ||
|
|
1d0e80c784 | ||
|
|
8b722a3db5 | ||
|
|
9d3e62ff16 | ||
|
|
d81748b39d | ||
|
|
775e32bf3b | ||
|
|
daa4586eeb | ||
|
|
8014932419 | ||
|
|
7fa400f486 | ||
|
|
43480bb611 | ||
|
|
99199efb5f | ||
|
|
bfb65a1eb8 | ||
|
|
b676397b80 | ||
|
|
5dd2e1040d | ||
|
|
f7413506f3 | ||
|
|
40d5f8f756 | ||
|
|
a5c211cc0f | ||
|
|
667e4b81eb | ||
|
|
3a5a32771e | ||
|
|
79001c280d | ||
|
|
6ecd66da86 | ||
|
|
41d8854f56 | ||
|
|
57395ff99c | ||
|
|
90e3ed142f | ||
|
|
9ca80af42f | ||
|
|
224a873de2 | ||
|
|
719582938e | ||
|
|
9b0af67033 | ||
|
|
7f2789e323 | ||
|
|
b436530e4f | ||
|
|
0ab94ab130 | ||
|
|
ce5f5140f9 | ||
|
|
d8cb07b4a6 | ||
|
|
1e48f9f9a9 | ||
|
|
dc20db39e7 | ||
|
|
065f501272 | ||
|
|
339a4db893 | ||
|
|
0cc5f12cbf | ||
|
|
e099998b2f | ||
|
|
521628c1c3 | ||
|
|
80ed84f538 | ||
|
|
2557c03463 | ||
|
|
9ed75561e7 | ||
|
|
02a7500696 |
@@ -64,6 +64,8 @@ ARG RUNTIME_PACKAGES="\
|
||||
libmagic1 \
|
||||
media-types \
|
||||
zlib1g \
|
||||
# Barcode splitter
|
||||
libzbar0 \
|
||||
poppler-utils \
|
||||
htop \
|
||||
sudo"
|
||||
|
||||
1
.github/dependabot.yml
vendored
1
.github/dependabot.yml
vendored
@@ -69,6 +69,7 @@ updates:
|
||||
patterns:
|
||||
- "ocrmypdf"
|
||||
- "pdf2image"
|
||||
- "pyzbar"
|
||||
- "zxing-cpp"
|
||||
- "tika-client"
|
||||
- "gotenberg-client"
|
||||
|
||||
2
.github/workflows/ci-backend.yml
vendored
2
.github/workflows/ci-backend.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends \
|
||||
unpaper tesseract-ocr imagemagick ghostscript poppler-utils
|
||||
unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
|
||||
- name: Configure ImageMagick
|
||||
run: |
|
||||
sudo cp docker/rootfs/etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml
|
||||
|
||||
4
.github/workflows/ci-docs.yml
vendored
4
.github/workflows/ci-docs.yml
vendored
@@ -26,8 +26,8 @@ permissions:
|
||||
pages: write
|
||||
id-token: write
|
||||
env:
|
||||
DEFAULT_UV_VERSION: "0.10.x"
|
||||
DEFAULT_PYTHON_VERSION: "3.12"
|
||||
DEFAULT_UV_VERSION: "0.9.x"
|
||||
DEFAULT_PYTHON_VERSION: "3.11"
|
||||
jobs:
|
||||
build:
|
||||
name: Build Documentation
|
||||
|
||||
4
.github/workflows/ci-release.yml
vendored
4
.github/workflows/ci-release.yml
vendored
@@ -8,8 +8,8 @@ concurrency:
|
||||
group: release-${{ github.ref }}
|
||||
cancel-in-progress: false
|
||||
env:
|
||||
DEFAULT_UV_VERSION: "0.10.x"
|
||||
DEFAULT_PYTHON_VERSION: "3.12"
|
||||
DEFAULT_UV_VERSION: "0.9.x"
|
||||
DEFAULT_PYTHON_VERSION: "3.11"
|
||||
jobs:
|
||||
wait-for-docker:
|
||||
name: Wait for Docker Build
|
||||
|
||||
@@ -20,6 +20,7 @@ src/documents/admin.py:0: error: Skipping analyzing "auditlog.models": module is
|
||||
src/documents/admin.py:0: error: Skipping analyzing "treenode.admin": module is installed, but missing library stubs or py.typed marker [import-untyped]
|
||||
src/documents/barcodes.py:0: error: "Image" has no attribute "filename" [attr-defined]
|
||||
src/documents/barcodes.py:0: error: Cannot find implementation or library stub for module named "zxingcpp" [import-not-found]
|
||||
src/documents/barcodes.py:0: error: Skipping analyzing "pyzbar": module is installed, but missing library stubs or py.typed marker [import-untyped]
|
||||
src/documents/bulk_download.py:0: error: Return type "None" of "add_document" incompatible with return type "Never" in supertype "BulkArchiveStrategy" [override]
|
||||
src/documents/bulk_download.py:0: error: Return type "None" of "add_document" incompatible with return type "Never" in supertype "BulkArchiveStrategy" [override]
|
||||
src/documents/bulk_download.py:0: error: Return type "None" of "add_document" incompatible with return type "Never" in supertype "BulkArchiveStrategy" [override]
|
||||
@@ -98,7 +99,6 @@ src/documents/conditionals.py:0: error: Function is missing a type annotation fo
|
||||
src/documents/conditionals.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/conditionals.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/consumer.py:0: error: "ConsumerPluginMixin" has no attribute "input_doc" [attr-defined]
|
||||
src/documents/consumer.py:0: error: "ConsumerPluginMixin" has no attribute "log" [attr-defined]
|
||||
src/documents/consumer.py:0: error: "ConsumerPluginMixin" has no attribute "metadata" [attr-defined]
|
||||
src/documents/consumer.py:0: error: "ConsumerPluginMixin" has no attribute "metadata" [attr-defined]
|
||||
src/documents/consumer.py:0: error: "ConsumerPluginMixin" has no attribute "metadata" [attr-defined]
|
||||
@@ -345,18 +345,11 @@ src/documents/migrations/0001_initial.py:0: error: Skipping analyzing "multisele
|
||||
src/documents/migrations/0008_sharelinkbundle.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/migrations/0008_sharelinkbundle.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/models.py:0: error: Argument 1 to "Path" has incompatible type "Path | None"; expected "str | PathLike[str]" [arg-type]
|
||||
src/documents/models.py:0: error: Could not resolve manager type for "documents.models.Document.deleted_objects" [django-manager-missing]
|
||||
src/documents/models.py:0: error: Could not resolve manager type for "documents.models.Document.global_objects" [django-manager-missing]
|
||||
src/documents/models.py:0: error: Could not resolve manager type for "documents.models.Document.objects" [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'custom_fields' for relation 'documents.models.CustomFieldInstance.document'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'documents' for relation 'documents.models.Document.correspondent'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'documents' for relation 'documents.models.Document.document_type'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'documents' for relation 'documents.models.Document.storage_path'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'fields' for relation 'documents.models.CustomFieldInstance.field'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'notes' for relation 'documents.models.Note.document'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'runs' for relation 'documents.models.WorkflowRun.workflow'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'share_links' for relation 'documents.models.ShareLink.document'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'workflow_runs' for relation 'documents.models.WorkflowRun.document'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Function is missing a return type annotation [no-untyped-def]
|
||||
src/documents/models.py:0: error: Function is missing a return type annotation [no-untyped-def]
|
||||
src/documents/models.py:0: error: Function is missing a return type annotation [no-untyped-def]
|
||||
@@ -983,10 +976,6 @@ src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annot
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
@@ -1012,7 +1001,6 @@ src/documents/tests/test_bulk_edit.py:0: error: Item "dict[Any, Any]" of "Group
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Item "dict[Any, Any]" of "Group | dict[Any, Any]" has no attribute "count" [union-attr]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Too few arguments for "count" of "list" [call-arg]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Too few arguments for "count" of "list" [call-arg]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Unsupported operand types for - ("None" and "int") [operator]
|
||||
src/documents/tests/test_caching.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/tests/test_classifier.py:0: error: "None" has no attribute "classes_" [attr-defined]
|
||||
src/documents/tests/test_classifier.py:0: error: "None" has no attribute "classes_" [attr-defined]
|
||||
|
||||
@@ -154,6 +154,8 @@ ARG RUNTIME_PACKAGES="\
|
||||
libmagic1 \
|
||||
media-types \
|
||||
zlib1g \
|
||||
# Barcode splitter
|
||||
libzbar0 \
|
||||
poppler-utils"
|
||||
|
||||
# Install basic runtime packages.
|
||||
|
||||
@@ -774,6 +774,7 @@ At this time, the library utilized for detection of barcodes supports the follow
|
||||
- QR Code
|
||||
- SQ Code
|
||||
|
||||
You may check for updates on the [zbar library homepage](https://github.com/mchehab/zbar).
|
||||
For usage in Paperless, the type of barcode does not matter, only the contents of it.
|
||||
|
||||
For how to enable barcode usage, see [the configuration](configuration.md#barcodes).
|
||||
|
||||
@@ -1222,6 +1222,14 @@ using Python's `re.match()`, which anchors at the start of the filename.
|
||||
|
||||
The default ignores are `[.stfolder, .stversions, .localized, @eaDir, .Spotlight-V100, .Trashes, __MACOSX]` and cannot be overridden.
|
||||
|
||||
#### [`PAPERLESS_CONSUMER_BARCODE_SCANNER=<string>`](#PAPERLESS_CONSUMER_BARCODE_SCANNER) {#PAPERLESS_CONSUMER_BARCODE_SCANNER}
|
||||
|
||||
: Sets the barcode scanner used for barcode functionality.
|
||||
|
||||
Currently, "PYZBAR" (the default) or "ZXING" might be selected.
|
||||
If you have problems that your Barcodes/QR-Codes are not detected
|
||||
(especially with bad scan quality and/or small codes), try the other one.
|
||||
|
||||
#### [`PAPERLESS_PRE_CONSUME_SCRIPT=<filename>`](#PAPERLESS_PRE_CONSUME_SCRIPT) {#PAPERLESS_PRE_CONSUME_SCRIPT}
|
||||
|
||||
: After some initial validation, Paperless can trigger an arbitrary
|
||||
|
||||
@@ -23,28 +23,3 @@ separating the directory ignore from the file ignore.
|
||||
Document and thumbnail encryption is no longer supported. This was previously deprecated in [paperless-ng 0.9.3](https://github.com/paperless-ngx/paperless-ngx/blob/dev/docs/changelog.md#paperless-ng-093)
|
||||
|
||||
Users must decrypt their document using the `decrypt_documents` command before upgrading.
|
||||
|
||||
## Barcode Scanner Changes
|
||||
|
||||
Support for [pyzbar](https://github.com/NaturalHistoryMuseum/pyzbar) has been removed. The underlying libzbar library has
|
||||
seen no updates in 16 years and is largely unmaintained, and the pyzbar Python wrapper last saw a release in March 2022. In
|
||||
practice, pyzbar struggled with barcode detection reliability, particularly on skewed, low-contrast, or partially
|
||||
obscured barcodes. [zxing-cpp](https://github.com/zxing-cpp/zxing-cpp) is actively maintained, significantly more
|
||||
reliable at finding barcodes, and now ships pre-built wheels for both x86_64 and arm64, removing the need to build the library.
|
||||
|
||||
The `CONSUMER_BARCODE_SCANNER` setting has been removed. zxing-cpp is now the only backend.
|
||||
|
||||
### Summary
|
||||
|
||||
| Old Setting | New Setting | Notes |
|
||||
| -------------------------- | ----------- | --------------------------------- |
|
||||
| `CONSUMER_BARCODE_SCANNER` | _Removed_ | zxing-cpp is now the only backend |
|
||||
|
||||
### Action Required
|
||||
|
||||
- If you were already using `CONSUMER_BARCODE_SCANNER=ZXING`, simply remove the setting.
|
||||
- If you had `CONSUMER_BARCODE_SCANNER=PYZBAR` or were using the default, no functional changes are needed beyond
|
||||
removing the setting. zxing-cpp supports all the same barcode formats and you should see improved detection
|
||||
reliability.
|
||||
- The `libzbar0` / `libzbar-dev` system packages are no longer required and can be removed from any custom Docker
|
||||
images or host installations.
|
||||
|
||||
@@ -207,12 +207,13 @@ are released, dependency support is confirmed, etc.
|
||||
- `libpq-dev` for PostgreSQL
|
||||
- `libmagic-dev` for mime type detection
|
||||
- `mariadb-client` for MariaDB compile time
|
||||
- `libzbar0` for barcode detection
|
||||
- `poppler-utils` for barcode detection
|
||||
|
||||
Use this list for your preferred package management:
|
||||
|
||||
```
|
||||
python3 python3-pip python3-dev imagemagick fonts-liberation gnupg libpq-dev default-libmysqlclient-dev pkg-config libmagic-dev poppler-utils
|
||||
python3 python3-pip python3-dev imagemagick fonts-liberation gnupg libpq-dev default-libmysqlclient-dev pkg-config libmagic-dev libzbar0 poppler-utils
|
||||
```
|
||||
|
||||
These dependencies are required for OCRmyPDF, which is used for text
|
||||
|
||||
@@ -68,6 +68,7 @@ dependencies = [
|
||||
"python-gnupg~=0.5.4",
|
||||
"python-ipware~=3.0.0",
|
||||
"python-magic~=0.4.27",
|
||||
"pyzbar~=0.1.9",
|
||||
"rapidfuzz~=3.14.0",
|
||||
"redis[hiredis]~=5.2.1",
|
||||
"regex>=2025.9.18",
|
||||
@@ -80,7 +81,7 @@ dependencies = [
|
||||
"watchfiles>=1.1.1",
|
||||
"whitenoise~=6.11",
|
||||
"whoosh-reloaded>=2.7.5",
|
||||
"zxing-cpp~=3.0.0",
|
||||
"zxing-cpp~=2.3.0",
|
||||
]
|
||||
|
||||
optional-dependencies.mariadb = [
|
||||
@@ -171,6 +172,10 @@ psycopg-c = [
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-trixie-3.3.0/psycopg_c-3.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-trixie-3.3.0/psycopg_c-3.3.0-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
||||
]
|
||||
zxing-cpp = [
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
||||
]
|
||||
|
||||
torch = [
|
||||
{ index = "pytorch-cpu" },
|
||||
|
||||
@@ -65,6 +65,13 @@ describe('PngxPdfViewerComponent', () => {
|
||||
const pageSpy = jest.fn()
|
||||
component.pageChange.subscribe(pageSpy)
|
||||
|
||||
// In real usage the viewer may have multiple pages; our pdfjs mock defaults
|
||||
// to a single page, so explicitly simulate a multi-page document here.
|
||||
const pdf = (component as any).pdf as { numPages: number }
|
||||
pdf.numPages = 3
|
||||
const viewer = (component as any).pdfViewer as PDFViewer
|
||||
viewer.setDocument(pdf)
|
||||
|
||||
component.zoomScale = PdfZoomScale.PageFit
|
||||
component.zoom = PdfZoomLevel.Two
|
||||
component.rotation = 90
|
||||
@@ -81,7 +88,6 @@ describe('PngxPdfViewerComponent', () => {
|
||||
page: new SimpleChange(undefined, 2, false),
|
||||
})
|
||||
|
||||
const viewer = (component as any).pdfViewer as PDFViewer
|
||||
expect(viewer.pagesRotation).toBe(90)
|
||||
expect(viewer.currentPageNumber).toBe(2)
|
||||
expect(pageSpy).toHaveBeenCalledWith(2)
|
||||
@@ -196,6 +202,8 @@ describe('PngxPdfViewerComponent', () => {
|
||||
const scaleSpy = jest.spyOn(component as any, 'applyViewerState')
|
||||
const resizeSpy = jest.spyOn(component as any, 'setupResizeObserver')
|
||||
|
||||
// Angular sets the input value before calling ngOnChanges; mirror that here.
|
||||
component.src = 'test.pdf'
|
||||
component.ngOnChanges({
|
||||
src: new SimpleChange(undefined, 'test.pdf', true),
|
||||
zoomScale: new SimpleChange(
|
||||
|
||||
@@ -81,7 +81,7 @@ export class PngxPdfViewerComponent
|
||||
this.dispatchFindIfReady()
|
||||
this.rendered.emit()
|
||||
}
|
||||
private readonly onPagesInit = () => this.applyScale()
|
||||
private readonly onPagesInit = () => this.applyViewerState()
|
||||
private readonly onPageChanging = (evt: { pageNumber: number }) => {
|
||||
// Avoid [(page)] two-way binding re-triggers navigation
|
||||
this.lastViewerPage = evt.pageNumber
|
||||
@@ -90,8 +90,10 @@ export class PngxPdfViewerComponent
|
||||
|
||||
ngOnChanges(changes: SimpleChanges): void {
|
||||
if (changes['src']) {
|
||||
this.hasLoaded = false
|
||||
this.resetViewerState()
|
||||
if (this.src) {
|
||||
this.loadDocument()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -139,6 +141,21 @@ export class PngxPdfViewerComponent
|
||||
this.pdfViewer = undefined
|
||||
}
|
||||
|
||||
private resetViewerState(): void {
|
||||
this.hasLoaded = false
|
||||
this.hasRenderedPage = false
|
||||
this.lastFindQuery = ''
|
||||
this.lastViewerPage = undefined
|
||||
this.loadingTask?.destroy()
|
||||
this.loadingTask = undefined
|
||||
this.pdf = undefined
|
||||
this.linkService.setDocument(null)
|
||||
if (this.pdfViewer) {
|
||||
this.pdfViewer.setDocument(null)
|
||||
this.pdfViewer.currentPageNumber = 1
|
||||
}
|
||||
}
|
||||
|
||||
private async loadDocument(): Promise<void> {
|
||||
if (this.hasLoaded) {
|
||||
return
|
||||
@@ -222,7 +239,11 @@ export class PngxPdfViewerComponent
|
||||
hasPages &&
|
||||
this.page !== this.lastViewerPage
|
||||
) {
|
||||
this.pdfViewer.currentPageNumber = this.page
|
||||
const nextPage = Math.min(
|
||||
Math.max(Math.trunc(this.page), 1),
|
||||
this.pdfViewer.pagesCount
|
||||
)
|
||||
this.pdfViewer.currentPageNumber = nextPage
|
||||
}
|
||||
if (this.page === this.lastViewerPage) {
|
||||
this.lastViewerPage = undefined
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<pngx-page-header [(title)]="title" [id]="documentId">
|
||||
@if (archiveContentRenderType === ContentRenderType.PDF && !useNativePdfViewer) {
|
||||
@if (previewNumPages) {
|
||||
<div class="input-group input-group-sm d-none d-md-flex">
|
||||
<div class="input-group input-group-sm ms-2 d-none d-md-flex">
|
||||
<div class="input-group-text" i18n>Page</div>
|
||||
<input class="form-control flex-grow-0 w-auto" type="number" min="1" [max]="previewNumPages" [(ngModel)]="previewCurrentPage" />
|
||||
<div class="input-group-text" i18n>of {{previewNumPages}}</div>
|
||||
@@ -24,6 +24,88 @@
|
||||
<i-bs width="1.2em" height="1.2em" name="trash"></i-bs><span class="d-none d-lg-inline ps-1" i18n>Delete</span>
|
||||
</button>
|
||||
|
||||
@if (document?.versions?.length > 0) {
|
||||
<div class="btn-group" ngbDropdown autoClose="outside">
|
||||
<button class="btn btn-sm btn-outline-secondary dropdown-toggle" ngbDropdownToggle>
|
||||
<i-bs name="file-earmark-diff"></i-bs>
|
||||
<span class="d-none d-lg-inline ps-1" i18n>Versions</span>
|
||||
</button>
|
||||
<div class="dropdown-menu shadow" ngbDropdownMenu>
|
||||
<div class="px-3 py-2">
|
||||
@if (versionUploadState === UploadState.Idle) {
|
||||
<div class="input-group input-group-sm mb-2">
|
||||
<span class="input-group-text" i18n>Label</span>
|
||||
<input class="form-control" type="text" [(ngModel)]="newVersionLabel" i18n-placeholder placeholder="Optional" [disabled]="!userIsOwner || !userCanEdit" />
|
||||
</div>
|
||||
<input #versionFileInput type="file" class="visually-hidden" (change)="onVersionFileSelected($event)" />
|
||||
<button class="btn btn-sm btn-outline-secondary w-100" (click)="versionFileInput.click()" [disabled]="!userIsOwner || !userCanEdit">
|
||||
<i-bs name="file-earmark-plus"></i-bs><span class="ps-1" i18n>Add new version</span>
|
||||
</button>
|
||||
} @else {
|
||||
@switch (versionUploadState) {
|
||||
@case (UploadState.Uploading) {
|
||||
<div class="small text-muted mt-1 d-flex align-items-center">
|
||||
<output class="spinner-border spinner-border-sm me-2" aria-hidden="true"></output>
|
||||
<span i18n>Uploading version...</span>
|
||||
</div>
|
||||
}
|
||||
@case (UploadState.Processing) {
|
||||
<div class="small text-muted mt-1 d-flex align-items-center">
|
||||
<output class="spinner-border spinner-border-sm me-2" aria-hidden="true"></output>
|
||||
<span i18n>Processing version...</span>
|
||||
</div>
|
||||
}
|
||||
@case (UploadState.Failed) {
|
||||
<div class="small text-danger mt-1 d-flex align-items-center justify-content-between">
|
||||
<span i18n>Version upload failed.</span>
|
||||
<button type="button" class="btn btn-link btn-sm p-0 ms-2" (click)="clearVersionUploadStatus()" i18n>Dismiss</button>
|
||||
</div>
|
||||
@if (versionUploadError) {
|
||||
<div class="small text-muted mt-1">{{ versionUploadError }}</div>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
</div>
|
||||
<div class="dropdown-divider"></div>
|
||||
@for (version of document.versions; track version.id) {
|
||||
<div class="dropdown-item">
|
||||
<div class="d-flex align-items-center w-100 version-item">
|
||||
<span class="badge bg-light text-lowercase text-muted">
|
||||
{{ version.checksum | slice:0:8 }}
|
||||
</span>
|
||||
<button type="button" class="btn btn-link link-underline link-underline-opacity-0 flex-grow-1 ms-3 small text-start" (click)="selectVersion(version.id)">
|
||||
<div class="small">
|
||||
@if (version.version_label) {
|
||||
{{ version.version_label }}
|
||||
} @else {
|
||||
<span i18n>ID</span> #{{version.id}}
|
||||
}
|
||||
</div>
|
||||
<div class="version-subtitle text-muted">
|
||||
{{ version.added | customDate:'short' }}
|
||||
</div>
|
||||
</button>
|
||||
@if (selectedVersionId === version.id) { <span class="ms-2">✓</span> }
|
||||
@if (!version.is_root) {
|
||||
<pngx-confirm-button
|
||||
buttonClasses="btn-link btn-sm text-danger ms-2"
|
||||
iconName="trash"
|
||||
confirmMessage="Delete this version?"
|
||||
i18n-confirmMessage
|
||||
[disabled]="!userIsOwner || !userCanEdit"
|
||||
(confirm)="deleteVersion(version.id)"
|
||||
>
|
||||
<span class="visually-hidden" i18n>Delete version</span>
|
||||
</pngx-confirm-button>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
|
||||
<div class="btn-group">
|
||||
<button (click)="download()" class="btn btn-sm btn-outline-primary" [disabled]="downloading">
|
||||
@if (downloading) {
|
||||
@@ -457,7 +539,7 @@
|
||||
@if (!useNativePdfViewer) {
|
||||
<div class="preview-sticky pdf-viewer-container">
|
||||
<pngx-pdf-viewer
|
||||
[src]="{ url: previewUrl, password: password }"
|
||||
[src]="pdfSource"
|
||||
[renderMode]="PdfRenderMode.All"
|
||||
[(page)]="previewCurrentPage"
|
||||
[zoomScale]="previewZoomScale"
|
||||
|
||||
@@ -30,7 +30,7 @@ import {
|
||||
} from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
|
||||
import { DeviceDetectorService } from 'ngx-device-detector'
|
||||
import { of, throwError } from 'rxjs'
|
||||
import { Subject, of, throwError } from 'rxjs'
|
||||
import { routes } from 'src/app/app-routing.module'
|
||||
import { Correspondent } from 'src/app/data/correspondent'
|
||||
import { CustomFieldDataType } from 'src/app/data/custom-field'
|
||||
@@ -65,6 +65,10 @@ import { TagService } from 'src/app/services/rest/tag.service'
|
||||
import { UserService } from 'src/app/services/rest/user.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import {
|
||||
UploadState,
|
||||
WebsocketStatusService,
|
||||
} from 'src/app/services/websocket-status.service'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { ConfirmDialogComponent } from '../common/confirm-dialog/confirm-dialog.component'
|
||||
import { PasswordRemovalConfirmDialogComponent } from '../common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component'
|
||||
@@ -127,6 +131,24 @@ const customFields = [
|
||||
},
|
||||
]
|
||||
|
||||
function createFileInput(file?: File) {
|
||||
const input = document.createElement('input')
|
||||
input.type = 'file'
|
||||
const files = file
|
||||
? ({
|
||||
0: file,
|
||||
length: 1,
|
||||
item: () => file,
|
||||
} as unknown as FileList)
|
||||
: ({
|
||||
length: 0,
|
||||
item: () => null,
|
||||
} as unknown as FileList)
|
||||
Object.defineProperty(input, 'files', { value: files })
|
||||
input.value = ''
|
||||
return input
|
||||
}
|
||||
|
||||
describe('DocumentDetailComponent', () => {
|
||||
let component: DocumentDetailComponent
|
||||
let fixture: ComponentFixture<DocumentDetailComponent>
|
||||
@@ -142,6 +164,7 @@ describe('DocumentDetailComponent', () => {
|
||||
let deviceDetectorService: DeviceDetectorService
|
||||
let httpTestingController: HttpTestingController
|
||||
let componentRouterService: ComponentRouterService
|
||||
let websocketStatusService: WebsocketStatusService
|
||||
|
||||
let currentUserCan = true
|
||||
let currentUserHasObjectPermissions = true
|
||||
@@ -291,6 +314,7 @@ describe('DocumentDetailComponent', () => {
|
||||
fixture = TestBed.createComponent(DocumentDetailComponent)
|
||||
httpTestingController = TestBed.inject(HttpTestingController)
|
||||
componentRouterService = TestBed.inject(ComponentRouterService)
|
||||
websocketStatusService = TestBed.inject(WebsocketStatusService)
|
||||
component = fixture.componentInstance
|
||||
})
|
||||
|
||||
@@ -354,6 +378,88 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(component.document).toEqual(doc)
|
||||
})
|
||||
|
||||
it('should redirect to root when opening a version document id', () => {
|
||||
const navigateSpy = jest.spyOn(router, 'navigate')
|
||||
jest
|
||||
.spyOn(activatedRoute, 'paramMap', 'get')
|
||||
.mockReturnValue(of(convertToParamMap({ id: 10, section: 'details' })))
|
||||
jest
|
||||
.spyOn(documentService, 'get')
|
||||
.mockReturnValueOnce(throwError(() => ({ status: 404 }) as any))
|
||||
const getRootSpy = jest
|
||||
.spyOn(documentService, 'getRootId')
|
||||
.mockReturnValue(of({ root_id: 3 }))
|
||||
jest.spyOn(openDocumentsService, 'getOpenDocument').mockReturnValue(null)
|
||||
jest
|
||||
.spyOn(openDocumentsService, 'openDocument')
|
||||
.mockReturnValueOnce(of(true))
|
||||
jest.spyOn(customFieldsService, 'listAll').mockReturnValue(
|
||||
of({
|
||||
count: customFields.length,
|
||||
all: customFields.map((f) => f.id),
|
||||
results: customFields,
|
||||
})
|
||||
)
|
||||
|
||||
fixture.detectChanges()
|
||||
httpTestingController.expectOne(component.previewUrl).flush('preview')
|
||||
|
||||
expect(getRootSpy).toHaveBeenCalledWith(10)
|
||||
expect(navigateSpy).toHaveBeenCalledWith(['documents', 3, 'details'], {
|
||||
replaceUrl: true,
|
||||
})
|
||||
})
|
||||
|
||||
it('should not render a delete button for the root/original version', () => {
|
||||
const docWithVersions = {
|
||||
...doc,
|
||||
versions: [
|
||||
{
|
||||
id: doc.id,
|
||||
added: new Date('2024-01-01T00:00:00Z'),
|
||||
version_label: 'Original',
|
||||
checksum: 'aaaa',
|
||||
is_root: true,
|
||||
},
|
||||
{
|
||||
id: 10,
|
||||
added: new Date('2024-01-02T00:00:00Z'),
|
||||
version_label: 'Edited',
|
||||
checksum: 'bbbb',
|
||||
is_root: false,
|
||||
},
|
||||
],
|
||||
} as Document
|
||||
|
||||
jest
|
||||
.spyOn(activatedRoute, 'paramMap', 'get')
|
||||
.mockReturnValue(of(convertToParamMap({ id: 3, section: 'details' })))
|
||||
jest.spyOn(documentService, 'get').mockReturnValueOnce(of(docWithVersions))
|
||||
jest
|
||||
.spyOn(documentService, 'getMetadata')
|
||||
.mockReturnValue(of({ has_archive_version: true } as any))
|
||||
jest.spyOn(openDocumentsService, 'getOpenDocument').mockReturnValue(null)
|
||||
jest
|
||||
.spyOn(openDocumentsService, 'openDocument')
|
||||
.mockReturnValueOnce(of(true))
|
||||
jest.spyOn(customFieldsService, 'listAll').mockReturnValue(
|
||||
of({
|
||||
count: customFields.length,
|
||||
all: customFields.map((f) => f.id),
|
||||
results: customFields,
|
||||
})
|
||||
)
|
||||
|
||||
fixture.detectChanges()
|
||||
httpTestingController.expectOne(component.previewUrl).flush('preview')
|
||||
fixture.detectChanges()
|
||||
|
||||
const deleteButtons = fixture.debugElement.queryAll(
|
||||
By.css('pngx-confirm-button')
|
||||
)
|
||||
expect(deleteButtons.length).toEqual(1)
|
||||
})
|
||||
|
||||
it('should fall back to details tab when duplicates tab is active but no duplicates', () => {
|
||||
initNormally()
|
||||
component.activeNavID = component.DocumentDetailNavIDs.Duplicates
|
||||
@@ -1441,6 +1547,185 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(closeSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('selectVersion should update preview and handle preview failures', () => {
|
||||
const previewSpy = jest.spyOn(documentService, 'getPreviewUrl')
|
||||
initNormally()
|
||||
httpTestingController.expectOne(component.previewUrl).flush('preview')
|
||||
|
||||
previewSpy.mockReturnValueOnce('preview-version')
|
||||
jest.spyOn(documentService, 'getThumbUrl').mockReturnValue('thumb-version')
|
||||
|
||||
component.selectVersion(10)
|
||||
httpTestingController.expectOne('preview-version').flush('version text')
|
||||
|
||||
expect(component.previewUrl).toBe('preview-version')
|
||||
expect(component.thumbUrl).toBe('thumb-version')
|
||||
expect(component.previewText).toBe('version text')
|
||||
const pdfSource = component.pdfSource as { url: string; password?: string }
|
||||
expect(pdfSource.url).toBe('preview-version')
|
||||
expect(pdfSource.password).toBeUndefined()
|
||||
|
||||
previewSpy.mockReturnValueOnce('preview-error')
|
||||
component.selectVersion(11)
|
||||
httpTestingController
|
||||
.expectOne('preview-error')
|
||||
.error(new ErrorEvent('fail'))
|
||||
|
||||
expect(component.previewText).toContain('An error occurred loading content')
|
||||
})
|
||||
|
||||
it('deleteVersion should update versions, fall back, and surface errors', () => {
|
||||
initNormally()
|
||||
httpTestingController.expectOne(component.previewUrl).flush('preview')
|
||||
|
||||
component.document.versions = [
|
||||
{
|
||||
id: 3,
|
||||
added: new Date(),
|
||||
version_label: 'Original',
|
||||
checksum: 'aaaa',
|
||||
is_root: true,
|
||||
},
|
||||
{
|
||||
id: 10,
|
||||
added: new Date(),
|
||||
version_label: 'Edited',
|
||||
checksum: 'bbbb',
|
||||
is_root: false,
|
||||
},
|
||||
]
|
||||
component.selectedVersionId = 10
|
||||
|
||||
const openDoc = { ...doc, versions: [] } as Document
|
||||
jest.spyOn(openDocumentsService, 'getOpenDocument').mockReturnValue(openDoc)
|
||||
const saveSpy = jest.spyOn(openDocumentsService, 'save')
|
||||
const deleteSpy = jest.spyOn(documentService, 'deleteVersion')
|
||||
const versionsSpy = jest.spyOn(documentService, 'getVersions')
|
||||
const selectSpy = jest
|
||||
.spyOn(component, 'selectVersion')
|
||||
.mockImplementation(() => {})
|
||||
const errorSpy = jest.spyOn(toastService, 'showError')
|
||||
|
||||
deleteSpy.mockReturnValueOnce(of({ result: 'ok', current_version_id: 99 }))
|
||||
versionsSpy.mockReturnValueOnce(
|
||||
of({ id: doc.id, versions: [{ id: 99, is_root: false }] } as Document)
|
||||
)
|
||||
component.deleteVersion(10)
|
||||
|
||||
expect(component.document.versions).toEqual([{ id: 99, is_root: false }])
|
||||
expect(openDoc.versions).toEqual([{ id: 99, is_root: false }])
|
||||
expect(saveSpy).toHaveBeenCalled()
|
||||
expect(selectSpy).toHaveBeenCalledWith(99)
|
||||
|
||||
component.selectedVersionId = 3
|
||||
deleteSpy.mockReturnValueOnce(of({ result: 'ok' }))
|
||||
versionsSpy.mockReturnValueOnce(
|
||||
of({
|
||||
id: doc.id,
|
||||
versions: [
|
||||
{ id: 7, is_root: false },
|
||||
{ id: 9, is_root: false },
|
||||
],
|
||||
} as Document)
|
||||
)
|
||||
component.deleteVersion(3)
|
||||
expect(selectSpy).toHaveBeenCalledWith(7)
|
||||
|
||||
deleteSpy.mockReturnValueOnce(throwError(() => new Error('nope')))
|
||||
component.deleteVersion(10)
|
||||
expect(errorSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('onVersionFileSelected should cover upload flows and reset status', () => {
|
||||
initNormally()
|
||||
httpTestingController.expectOne(component.previewUrl).flush('preview')
|
||||
|
||||
const uploadSpy = jest.spyOn(documentService, 'uploadVersion')
|
||||
const versionsSpy = jest.spyOn(documentService, 'getVersions')
|
||||
const infoSpy = jest.spyOn(toastService, 'showInfo')
|
||||
const finishedSpy = jest.spyOn(
|
||||
websocketStatusService,
|
||||
'onDocumentConsumptionFinished'
|
||||
)
|
||||
const failedSpy = jest.spyOn(
|
||||
websocketStatusService,
|
||||
'onDocumentConsumptionFailed'
|
||||
)
|
||||
const selectSpy = jest
|
||||
.spyOn(component, 'selectVersion')
|
||||
.mockImplementation(() => {})
|
||||
const openDoc = { ...doc, versions: [] } as Document
|
||||
jest.spyOn(openDocumentsService, 'getOpenDocument').mockReturnValue(openDoc)
|
||||
const saveSpy = jest.spyOn(openDocumentsService, 'save')
|
||||
|
||||
component.onVersionFileSelected({ target: createFileInput() } as any)
|
||||
expect(uploadSpy).not.toHaveBeenCalled()
|
||||
|
||||
const fileMissing = new File(['data'], 'version.pdf', {
|
||||
type: 'application/pdf',
|
||||
})
|
||||
component.newVersionLabel = ' label '
|
||||
uploadSpy.mockReturnValueOnce(of({}))
|
||||
component.onVersionFileSelected({
|
||||
target: createFileInput(fileMissing),
|
||||
} as any)
|
||||
expect(uploadSpy).toHaveBeenCalledWith(
|
||||
component.documentId,
|
||||
fileMissing,
|
||||
'label'
|
||||
)
|
||||
expect(component.newVersionLabel).toBe('')
|
||||
expect(component.versionUploadState).toBe(UploadState.Failed)
|
||||
expect(component.versionUploadError).toBe('Missing task ID.')
|
||||
expect(infoSpy).toHaveBeenCalled()
|
||||
|
||||
const finishedFail$ = new Subject<any>()
|
||||
const failedFail$ = new Subject<any>()
|
||||
finishedSpy.mockReturnValueOnce(finishedFail$ as any)
|
||||
failedSpy.mockReturnValueOnce(failedFail$ as any)
|
||||
uploadSpy.mockReturnValueOnce(of('task-1'))
|
||||
component.onVersionFileSelected({
|
||||
target: createFileInput(
|
||||
new File(['data'], 'version.pdf', { type: 'application/pdf' })
|
||||
),
|
||||
} as any)
|
||||
expect(component.versionUploadState).toBe(UploadState.Processing)
|
||||
failedFail$.next({ taskId: 'task-1', message: 'nope' })
|
||||
expect(component.versionUploadState).toBe(UploadState.Failed)
|
||||
expect(component.versionUploadError).toBe('nope')
|
||||
expect(versionsSpy).not.toHaveBeenCalled()
|
||||
|
||||
const finishedOk$ = new Subject<any>()
|
||||
const failedOk$ = new Subject<any>()
|
||||
finishedSpy.mockReturnValueOnce(finishedOk$ as any)
|
||||
failedSpy.mockReturnValueOnce(failedOk$ as any)
|
||||
uploadSpy.mockReturnValueOnce(of({ task_id: 'task-2' }))
|
||||
const versions = [
|
||||
{ id: 7, is_root: false },
|
||||
{ id: 12, is_root: false },
|
||||
] as any
|
||||
versionsSpy.mockReturnValueOnce(of({ id: doc.id, versions } as Document))
|
||||
component.onVersionFileSelected({
|
||||
target: createFileInput(
|
||||
new File(['data'], 'version.pdf', { type: 'application/pdf' })
|
||||
),
|
||||
} as any)
|
||||
finishedOk$.next({ taskId: 'task-2' })
|
||||
|
||||
expect(component.document.versions).toEqual(versions)
|
||||
expect(openDoc.versions).toEqual(versions)
|
||||
expect(saveSpy).toHaveBeenCalled()
|
||||
expect(selectSpy).toHaveBeenCalledWith(12)
|
||||
expect(component.versionUploadState).toBe(UploadState.Idle)
|
||||
expect(component.versionUploadError).toBeNull()
|
||||
|
||||
component.versionUploadState = UploadState.Failed
|
||||
component.versionUploadError = 'boom'
|
||||
component.clearVersionUploadStatus()
|
||||
expect(component.versionUploadState).toBe(UploadState.Idle)
|
||||
expect(component.versionUploadError).toBeNull()
|
||||
})
|
||||
|
||||
function initNormally() {
|
||||
jest
|
||||
.spyOn(activatedRoute, 'paramMap', 'get')
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AsyncPipe, NgTemplateOutlet } from '@angular/common'
|
||||
import { AsyncPipe, NgTemplateOutlet, SlicePipe } from '@angular/common'
|
||||
import { HttpClient, HttpResponse } from '@angular/common/http'
|
||||
import { Component, inject, OnDestroy, OnInit, ViewChild } from '@angular/core'
|
||||
import {
|
||||
@@ -20,7 +20,7 @@ import {
|
||||
import { dirtyCheck, DirtyComponent } from '@ngneat/dirty-check-forms'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { DeviceDetectorService } from 'ngx-device-detector'
|
||||
import { BehaviorSubject, Observable, of, Subject, timer } from 'rxjs'
|
||||
import { BehaviorSubject, merge, Observable, of, Subject, timer } from 'rxjs'
|
||||
import {
|
||||
catchError,
|
||||
debounceTime,
|
||||
@@ -29,6 +29,7 @@ import {
|
||||
first,
|
||||
map,
|
||||
switchMap,
|
||||
take,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs/operators'
|
||||
@@ -80,10 +81,15 @@ import { TagService } from 'src/app/services/rest/tag.service'
|
||||
import { UserService } from 'src/app/services/rest/user.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import {
|
||||
UploadState,
|
||||
WebsocketStatusService,
|
||||
} from 'src/app/services/websocket-status.service'
|
||||
import { getFilenameFromContentDisposition } from 'src/app/utils/http'
|
||||
import { ISODateAdapter } from 'src/app/utils/ngb-iso-date-adapter'
|
||||
import * as UTIF from 'utif'
|
||||
import { DocumentDetailFieldID } from '../admin/settings/settings.component'
|
||||
import { ConfirmButtonComponent } from '../common/confirm-button/confirm-button.component'
|
||||
import { ConfirmDialogComponent } from '../common/confirm-dialog/confirm-dialog.component'
|
||||
import { PasswordRemovalConfirmDialogComponent } from '../common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component'
|
||||
import { CustomFieldsDropdownComponent } from '../common/custom-fields-dropdown/custom-fields-dropdown.component'
|
||||
@@ -110,15 +116,16 @@ import { PDFEditorComponent } from '../common/pdf-editor/pdf-editor.component'
|
||||
import { PngxPdfViewerComponent } from '../common/pdf-viewer/pdf-viewer.component'
|
||||
import {
|
||||
PdfRenderMode,
|
||||
PdfSource,
|
||||
PdfZoomLevel,
|
||||
PdfZoomScale,
|
||||
PngxPdfDocumentProxy,
|
||||
} from '../common/pdf-viewer/pdf-viewer.types'
|
||||
import { ShareLinksDialogComponent } from '../common/share-links-dialog/share-links-dialog.component'
|
||||
import { SuggestionsDropdownComponent } from '../common/suggestions-dropdown/suggestions-dropdown.component'
|
||||
import { DocumentHistoryComponent } from '../document-history/document-history.component'
|
||||
import { DocumentNotesComponent } from '../document-notes/document-notes.component'
|
||||
import { ComponentWithPermissions } from '../with-permissions/with-permissions.component'
|
||||
import { DocumentHistoryComponent } from './document-history/document-history.component'
|
||||
import { MetadataCollapseComponent } from './metadata-collapse/metadata-collapse.component'
|
||||
|
||||
enum DocumentDetailNavIDs {
|
||||
@@ -176,6 +183,8 @@ enum ContentRenderType {
|
||||
TextAreaComponent,
|
||||
RouterModule,
|
||||
PngxPdfViewerComponent,
|
||||
ConfirmButtonComponent,
|
||||
SlicePipe,
|
||||
],
|
||||
})
|
||||
export class DocumentDetailComponent
|
||||
@@ -183,6 +192,8 @@ export class DocumentDetailComponent
|
||||
implements OnInit, OnDestroy, DirtyComponent
|
||||
{
|
||||
PdfRenderMode = PdfRenderMode
|
||||
UploadState = UploadState
|
||||
|
||||
documentsService = inject(DocumentService)
|
||||
private route = inject(ActivatedRoute)
|
||||
private tagService = inject(TagService)
|
||||
@@ -204,6 +215,7 @@ export class DocumentDetailComponent
|
||||
private componentRouterService = inject(ComponentRouterService)
|
||||
private deviceDetectorService = inject(DeviceDetectorService)
|
||||
private savedViewService = inject(SavedViewService)
|
||||
private readonly websocketStatusService = inject(WebsocketStatusService)
|
||||
|
||||
@ViewChild('inputTitle')
|
||||
titleInput: TextComponent
|
||||
@@ -233,6 +245,13 @@ export class DocumentDetailComponent
|
||||
tiffURL: string
|
||||
tiffError: string
|
||||
|
||||
// Versioning
|
||||
selectedVersionId: number
|
||||
newVersionLabel: string = ''
|
||||
pdfSource: PdfSource
|
||||
versionUploadState: UploadState = UploadState.Idle
|
||||
versionUploadError: string | null = null
|
||||
|
||||
correspondents: Correspondent[]
|
||||
documentTypes: DocumentType[]
|
||||
storagePaths: StoragePath[]
|
||||
@@ -278,6 +297,7 @@ export class DocumentDetailComponent
|
||||
public readonly DocumentDetailFieldID = DocumentDetailFieldID
|
||||
|
||||
@ViewChild('nav') nav: NgbNav
|
||||
@ViewChild('versionFileInput') versionFileInput
|
||||
@ViewChild('pdfPreview') set pdfPreview(element) {
|
||||
// this gets called when component added or removed from DOM
|
||||
if (
|
||||
@@ -345,6 +365,17 @@ export class DocumentDetailComponent
|
||||
return ContentRenderType.Other
|
||||
}
|
||||
|
||||
private updatePdfSource() {
|
||||
if (!this.previewUrl) {
|
||||
this.pdfSource = undefined
|
||||
return
|
||||
}
|
||||
this.pdfSource = {
|
||||
url: this.previewUrl,
|
||||
password: this.password || undefined,
|
||||
}
|
||||
}
|
||||
|
||||
get isRTL() {
|
||||
if (!this.metadata || !this.metadata.lang) return false
|
||||
else {
|
||||
@@ -420,7 +451,12 @@ export class DocumentDetailComponent
|
||||
}
|
||||
|
||||
private loadDocument(documentId: number): void {
|
||||
this.previewUrl = this.documentsService.getPreviewUrl(documentId)
|
||||
let redirectedToRoot = false
|
||||
this.selectedVersionId = documentId
|
||||
this.previewUrl = this.documentsService.getPreviewUrl(
|
||||
this.selectedVersionId
|
||||
)
|
||||
this.updatePdfSource()
|
||||
this.http
|
||||
.get(this.previewUrl, { responseType: 'text' })
|
||||
.pipe(
|
||||
@@ -435,11 +471,28 @@ export class DocumentDetailComponent
|
||||
err.message ?? err.toString()
|
||||
}`),
|
||||
})
|
||||
this.thumbUrl = this.documentsService.getThumbUrl(documentId)
|
||||
this.thumbUrl = this.documentsService.getThumbUrl(this.selectedVersionId)
|
||||
this.documentsService
|
||||
.get(documentId)
|
||||
.pipe(
|
||||
catchError(() => {
|
||||
catchError((error) => {
|
||||
if (error?.status === 404) {
|
||||
return this.documentsService.getRootId(documentId).pipe(
|
||||
map((result) => {
|
||||
const rootId = result?.root_id
|
||||
if (rootId && rootId !== documentId) {
|
||||
const section =
|
||||
this.route.snapshot.paramMap.get('section') || 'details'
|
||||
redirectedToRoot = true
|
||||
this.router.navigate(['documents', rootId, section], {
|
||||
replaceUrl: true,
|
||||
})
|
||||
}
|
||||
return null
|
||||
}),
|
||||
catchError(() => of(null))
|
||||
)
|
||||
}
|
||||
// 404 is handled in the subscribe below
|
||||
return of(null)
|
||||
}),
|
||||
@@ -450,6 +503,9 @@ export class DocumentDetailComponent
|
||||
.subscribe({
|
||||
next: (doc) => {
|
||||
if (!doc) {
|
||||
if (redirectedToRoot) {
|
||||
return
|
||||
}
|
||||
this.router.navigate(['404'], { replaceUrl: true })
|
||||
return
|
||||
}
|
||||
@@ -666,6 +722,11 @@ export class DocumentDetailComponent
|
||||
|
||||
updateComponent(doc: Document) {
|
||||
this.document = doc
|
||||
// Default selected version is the newest version
|
||||
const versions = doc.versions ?? []
|
||||
this.selectedVersionId = versions.length
|
||||
? Math.max(...versions.map((version) => version.id))
|
||||
: doc.id
|
||||
this.requiresPassword = false
|
||||
this.updateFormForCustomFields()
|
||||
if (this.archiveContentRenderType === ContentRenderType.TIFF) {
|
||||
@@ -724,6 +785,73 @@ export class DocumentDetailComponent
|
||||
}
|
||||
}
|
||||
|
||||
// Update file preview and download target to a specific version (by document id)
|
||||
selectVersion(versionId: number) {
|
||||
this.selectedVersionId = versionId
|
||||
this.previewUrl = this.documentsService.getPreviewUrl(
|
||||
this.documentId,
|
||||
false,
|
||||
this.selectedVersionId
|
||||
)
|
||||
this.updatePdfSource()
|
||||
this.thumbUrl = this.documentsService.getThumbUrl(this.selectedVersionId)
|
||||
// For text previews, refresh content
|
||||
this.http
|
||||
.get(this.previewUrl, { responseType: 'text' })
|
||||
.pipe(
|
||||
first(),
|
||||
takeUntil(this.unsubscribeNotifier),
|
||||
takeUntil(this.docChangeNotifier)
|
||||
)
|
||||
.subscribe({
|
||||
next: (res) => (this.previewText = res.toString()),
|
||||
error: (err) =>
|
||||
(this.previewText = $localize`An error occurred loading content: ${
|
||||
err.message ?? err.toString()
|
||||
}`),
|
||||
})
|
||||
}
|
||||
|
||||
deleteVersion(versionId: number) {
|
||||
const wasSelected = this.selectedVersionId === versionId
|
||||
this.documentsService
|
||||
.deleteVersion(this.documentId, versionId)
|
||||
.pipe(
|
||||
switchMap((result) =>
|
||||
this.documentsService
|
||||
.getVersions(this.documentId)
|
||||
.pipe(map((doc) => ({ doc, result })))
|
||||
),
|
||||
first(),
|
||||
takeUntil(this.unsubscribeNotifier)
|
||||
)
|
||||
.subscribe({
|
||||
next: ({ doc, result }) => {
|
||||
if (doc?.versions) {
|
||||
this.document.versions = doc.versions
|
||||
const openDoc = this.openDocumentService.getOpenDocument(
|
||||
this.documentId
|
||||
)
|
||||
if (openDoc) {
|
||||
openDoc.versions = doc.versions
|
||||
this.openDocumentService.save()
|
||||
}
|
||||
}
|
||||
|
||||
if (wasSelected) {
|
||||
const fallbackId =
|
||||
result?.current_version_id ??
|
||||
doc?.versions?.[0]?.id ??
|
||||
this.documentId
|
||||
this.selectVersion(fallbackId)
|
||||
}
|
||||
},
|
||||
error: (error) => {
|
||||
this.toastService.showError($localize`Error deleting version`, error)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
get customFieldFormFields(): FormArray {
|
||||
return this.documentForm.get('custom_fields') as FormArray
|
||||
}
|
||||
@@ -1140,10 +1268,102 @@ export class DocumentDetailComponent
|
||||
})
|
||||
}
|
||||
|
||||
onVersionFileSelected(event: Event) {
|
||||
const input = event.target as HTMLInputElement
|
||||
if (!input?.files || input.files.length === 0) return
|
||||
const file = input.files[0]
|
||||
// Reset input to allow re-selection of the same file later
|
||||
input.value = ''
|
||||
const label = this.newVersionLabel?.trim()
|
||||
this.versionUploadState = UploadState.Uploading
|
||||
this.versionUploadError = null
|
||||
this.documentsService
|
||||
.uploadVersion(this.documentId, file, label)
|
||||
.pipe(
|
||||
first(),
|
||||
tap(() => {
|
||||
this.toastService.showInfo(
|
||||
$localize`Uploading new version. Processing will happen in the background.`
|
||||
)
|
||||
this.newVersionLabel = ''
|
||||
this.versionUploadState = UploadState.Processing
|
||||
}),
|
||||
map((taskId) =>
|
||||
typeof taskId === 'string'
|
||||
? taskId
|
||||
: (taskId as { task_id?: string })?.task_id
|
||||
),
|
||||
switchMap((taskId) => {
|
||||
if (!taskId) {
|
||||
this.versionUploadState = UploadState.Failed
|
||||
this.versionUploadError = $localize`Missing task ID.`
|
||||
return of(null)
|
||||
}
|
||||
return merge(
|
||||
this.websocketStatusService.onDocumentConsumptionFinished().pipe(
|
||||
filter((status) => status.taskId === taskId),
|
||||
map(() => ({ state: 'success' as const }))
|
||||
),
|
||||
this.websocketStatusService.onDocumentConsumptionFailed().pipe(
|
||||
filter((status) => status.taskId === taskId),
|
||||
map((status) => ({
|
||||
state: 'failed' as const,
|
||||
message: status.message,
|
||||
}))
|
||||
)
|
||||
).pipe(take(1))
|
||||
}),
|
||||
switchMap((result) => {
|
||||
if (result?.state !== 'success') {
|
||||
if (result?.state === 'failed') {
|
||||
this.versionUploadState = UploadState.Failed
|
||||
this.versionUploadError =
|
||||
result.message || $localize`Upload failed.`
|
||||
}
|
||||
return of(null)
|
||||
}
|
||||
return this.documentsService.getVersions(this.documentId)
|
||||
}),
|
||||
takeUntil(this.unsubscribeNotifier),
|
||||
takeUntil(this.docChangeNotifier)
|
||||
)
|
||||
.subscribe({
|
||||
next: (doc) => {
|
||||
if (doc?.versions) {
|
||||
this.document.versions = doc.versions
|
||||
const openDoc = this.openDocumentService.getOpenDocument(
|
||||
this.documentId
|
||||
)
|
||||
if (openDoc) {
|
||||
openDoc.versions = doc.versions
|
||||
this.openDocumentService.save()
|
||||
}
|
||||
this.selectVersion(
|
||||
Math.max(...doc.versions.map((version) => version.id))
|
||||
)
|
||||
this.clearVersionUploadStatus()
|
||||
}
|
||||
},
|
||||
error: (error) => {
|
||||
this.versionUploadState = UploadState.Failed
|
||||
this.versionUploadError = error?.message || $localize`Upload failed.`
|
||||
this.toastService.showError(
|
||||
$localize`Error uploading new version`,
|
||||
error
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
clearVersionUploadStatus() {
|
||||
this.versionUploadState = UploadState.Idle
|
||||
this.versionUploadError = null
|
||||
}
|
||||
|
||||
download(original: boolean = false) {
|
||||
this.downloading = true
|
||||
const downloadUrl = this.documentsService.getDownloadUrl(
|
||||
this.documentId,
|
||||
this.selectedVersionId || this.documentId,
|
||||
original
|
||||
)
|
||||
this.http
|
||||
@@ -1230,6 +1450,7 @@ export class DocumentDetailComponent
|
||||
onPasswordKeyUp(event: KeyboardEvent) {
|
||||
if ('Enter' == event.key) {
|
||||
this.password = (event.target as HTMLInputElement).value
|
||||
this.updatePdfSource()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
@if (loading) {
|
||||
<div class="d-flex">
|
||||
<div class="spinner-border spinner-border-sm fw-normal" role="status"></div>
|
||||
<output class="spinner-border spinner-border-sm fw-normal" role="status"></output>
|
||||
</div>
|
||||
} @else {
|
||||
<ul class="list-group">
|
||||
@@ -83,8 +83,22 @@ describe('DocumentHistoryComponent', () => {
|
||||
expect(result).toBe(correspondentName)
|
||||
})
|
||||
expect(getCachedSpy).toHaveBeenCalledWith(parseInt(correspondentId))
|
||||
// no correspondent found
|
||||
getCachedSpy.mockReturnValue(of(null))
|
||||
})
|
||||
|
||||
it('getPrettyName should memoize results to avoid resubscribe loops', () => {
|
||||
const correspondentId = '1'
|
||||
const getCachedSpy = jest
|
||||
.spyOn(correspondentService, 'getCached')
|
||||
.mockReturnValue(of({ name: 'John Doe' }))
|
||||
const a = component.getPrettyName(DataType.Correspondent, correspondentId)
|
||||
const b = component.getPrettyName(DataType.Correspondent, correspondentId)
|
||||
expect(a).toBe(b)
|
||||
expect(getCachedSpy).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('getPrettyName should fall back to the correspondent id when missing', () => {
|
||||
const correspondentId = '1'
|
||||
jest.spyOn(correspondentService, 'getCached').mockReturnValue(of(null))
|
||||
component
|
||||
.getPrettyName(DataType.Correspondent, correspondentId)
|
||||
.subscribe((result) => {
|
||||
@@ -104,8 +118,11 @@ describe('DocumentHistoryComponent', () => {
|
||||
expect(result).toBe(documentTypeName)
|
||||
})
|
||||
expect(getCachedSpy).toHaveBeenCalledWith(parseInt(documentTypeId))
|
||||
// no document type found
|
||||
getCachedSpy.mockReturnValue(of(null))
|
||||
})
|
||||
|
||||
it('getPrettyName should fall back to the document type id when missing', () => {
|
||||
const documentTypeId = '1'
|
||||
jest.spyOn(documentTypeService, 'getCached').mockReturnValue(of(null))
|
||||
component
|
||||
.getPrettyName(DataType.DocumentType, documentTypeId)
|
||||
.subscribe((result) => {
|
||||
@@ -125,8 +142,11 @@ describe('DocumentHistoryComponent', () => {
|
||||
expect(result).toBe(storagePath)
|
||||
})
|
||||
expect(getCachedSpy).toHaveBeenCalledWith(parseInt(storagePathId))
|
||||
// no storage path found
|
||||
getCachedSpy.mockReturnValue(of(null))
|
||||
})
|
||||
|
||||
it('getPrettyName should fall back to the storage path id when missing', () => {
|
||||
const storagePathId = '1'
|
||||
jest.spyOn(storagePathService, 'getCached').mockReturnValue(of(null))
|
||||
component
|
||||
.getPrettyName(DataType.StoragePath, storagePathId)
|
||||
.subscribe((result) => {
|
||||
@@ -144,8 +164,11 @@ describe('DocumentHistoryComponent', () => {
|
||||
expect(result).toBe(ownerUsername)
|
||||
})
|
||||
expect(getCachedSpy).toHaveBeenCalledWith(parseInt(ownerId))
|
||||
// no user found
|
||||
getCachedSpy.mockReturnValue(of(null))
|
||||
})
|
||||
|
||||
it('getPrettyName should fall back to the owner id when missing', () => {
|
||||
const ownerId = '1'
|
||||
jest.spyOn(userService, 'getCached').mockReturnValue(of(null))
|
||||
component.getPrettyName('owner', ownerId).subscribe((result) => {
|
||||
expect(result).toBe(ownerId)
|
||||
})
|
||||
@@ -0,0 +1,114 @@
|
||||
import { AsyncPipe, KeyValuePipe, TitleCasePipe } from '@angular/common'
|
||||
import { Component, Input, OnInit, inject } from '@angular/core'
|
||||
import { NgbTooltipModule } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { Observable, first, map, of, shareReplay } from 'rxjs'
|
||||
import { AuditLogAction, AuditLogEntry } from 'src/app/data/auditlog-entry'
|
||||
import { DataType } from 'src/app/data/datatype'
|
||||
import { CustomDatePipe } from 'src/app/pipes/custom-date.pipe'
|
||||
import { CorrespondentService } from 'src/app/services/rest/correspondent.service'
|
||||
import { DocumentTypeService } from 'src/app/services/rest/document-type.service'
|
||||
import { DocumentService } from 'src/app/services/rest/document.service'
|
||||
import { StoragePathService } from 'src/app/services/rest/storage-path.service'
|
||||
import { UserService } from 'src/app/services/rest/user.service'
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-document-history',
|
||||
templateUrl: './document-history.component.html',
|
||||
styleUrl: './document-history.component.scss',
|
||||
imports: [
|
||||
CustomDatePipe,
|
||||
NgbTooltipModule,
|
||||
AsyncPipe,
|
||||
KeyValuePipe,
|
||||
TitleCasePipe,
|
||||
NgxBootstrapIconsModule,
|
||||
],
|
||||
})
|
||||
export class DocumentHistoryComponent implements OnInit {
|
||||
private documentService = inject(DocumentService)
|
||||
private correspondentService = inject(CorrespondentService)
|
||||
private storagePathService = inject(StoragePathService)
|
||||
private documentTypeService = inject(DocumentTypeService)
|
||||
private userService = inject(UserService)
|
||||
|
||||
public AuditLogAction = AuditLogAction
|
||||
|
||||
private _documentId: number
|
||||
@Input()
|
||||
set documentId(id: number) {
|
||||
if (this._documentId !== id) {
|
||||
this._documentId = id
|
||||
this.prettyNameCache.clear()
|
||||
this.loadHistory()
|
||||
}
|
||||
}
|
||||
|
||||
public loading: boolean = true
|
||||
public entries: AuditLogEntry[] = []
|
||||
|
||||
private readonly prettyNameCache = new Map<string, Observable<string>>()
|
||||
|
||||
ngOnInit(): void {
|
||||
this.loadHistory()
|
||||
}
|
||||
|
||||
private loadHistory(): void {
|
||||
if (this._documentId) {
|
||||
this.loading = true
|
||||
this.documentService.getHistory(this._documentId).subscribe((entries) => {
|
||||
this.entries = entries
|
||||
this.loading = false
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
getPrettyName(type: DataType | string, id: string): Observable<string> {
|
||||
const cacheKey = `${type}:${id}`
|
||||
const cached = this.prettyNameCache.get(cacheKey)
|
||||
if (cached) {
|
||||
return cached
|
||||
}
|
||||
|
||||
const idInt = parseInt(id, 10)
|
||||
const fallback$ = of(id)
|
||||
|
||||
let result$: Observable<string>
|
||||
if (!Number.isFinite(idInt)) {
|
||||
result$ = fallback$
|
||||
} else {
|
||||
switch (type) {
|
||||
case DataType.Correspondent:
|
||||
result$ = this.correspondentService.getCached(idInt).pipe(
|
||||
first(),
|
||||
map((correspondent) => correspondent?.name ?? id)
|
||||
)
|
||||
break
|
||||
case DataType.DocumentType:
|
||||
result$ = this.documentTypeService.getCached(idInt).pipe(
|
||||
first(),
|
||||
map((documentType) => documentType?.name ?? id)
|
||||
)
|
||||
break
|
||||
case DataType.StoragePath:
|
||||
result$ = this.storagePathService.getCached(idInt).pipe(
|
||||
first(),
|
||||
map((storagePath) => storagePath?.path ?? id)
|
||||
)
|
||||
break
|
||||
case 'owner':
|
||||
result$ = this.userService.getCached(idInt).pipe(
|
||||
first(),
|
||||
map((user) => user?.username ?? id)
|
||||
)
|
||||
break
|
||||
default:
|
||||
result$ = fallback$
|
||||
}
|
||||
}
|
||||
|
||||
const shared$ = result$.pipe(shareReplay({ bufferSize: 1, refCount: true }))
|
||||
this.prettyNameCache.set(cacheKey, shared$)
|
||||
return shared$
|
||||
}
|
||||
}
|
||||
@@ -1,85 +0,0 @@
|
||||
import { AsyncPipe, KeyValuePipe, TitleCasePipe } from '@angular/common'
|
||||
import { Component, Input, OnInit, inject } from '@angular/core'
|
||||
import { NgbTooltipModule } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { Observable, first, map, of } from 'rxjs'
|
||||
import { AuditLogAction, AuditLogEntry } from 'src/app/data/auditlog-entry'
|
||||
import { DataType } from 'src/app/data/datatype'
|
||||
import { CustomDatePipe } from 'src/app/pipes/custom-date.pipe'
|
||||
import { CorrespondentService } from 'src/app/services/rest/correspondent.service'
|
||||
import { DocumentTypeService } from 'src/app/services/rest/document-type.service'
|
||||
import { DocumentService } from 'src/app/services/rest/document.service'
|
||||
import { StoragePathService } from 'src/app/services/rest/storage-path.service'
|
||||
import { UserService } from 'src/app/services/rest/user.service'
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-document-history',
|
||||
templateUrl: './document-history.component.html',
|
||||
styleUrl: './document-history.component.scss',
|
||||
imports: [
|
||||
CustomDatePipe,
|
||||
NgbTooltipModule,
|
||||
AsyncPipe,
|
||||
KeyValuePipe,
|
||||
TitleCasePipe,
|
||||
NgxBootstrapIconsModule,
|
||||
],
|
||||
})
|
||||
export class DocumentHistoryComponent implements OnInit {
|
||||
private documentService = inject(DocumentService)
|
||||
private correspondentService = inject(CorrespondentService)
|
||||
private storagePathService = inject(StoragePathService)
|
||||
private documentTypeService = inject(DocumentTypeService)
|
||||
private userService = inject(UserService)
|
||||
|
||||
public AuditLogAction = AuditLogAction
|
||||
|
||||
private _documentId: number
|
||||
@Input()
|
||||
set documentId(id: number) {
|
||||
this._documentId = id
|
||||
this.ngOnInit()
|
||||
}
|
||||
|
||||
public loading: boolean = true
|
||||
public entries: AuditLogEntry[] = []
|
||||
|
||||
ngOnInit(): void {
|
||||
if (this._documentId) {
|
||||
this.loading = true
|
||||
this.documentService
|
||||
.getHistory(this._documentId)
|
||||
.subscribe((auditLogEntries) => {
|
||||
this.entries = auditLogEntries
|
||||
this.loading = false
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
getPrettyName(type: DataType | string, id: string): Observable<string> {
|
||||
switch (type) {
|
||||
case DataType.Correspondent:
|
||||
return this.correspondentService.getCached(parseInt(id, 10)).pipe(
|
||||
first(),
|
||||
map((correspondent) => correspondent?.name ?? id)
|
||||
)
|
||||
case DataType.DocumentType:
|
||||
return this.documentTypeService.getCached(parseInt(id, 10)).pipe(
|
||||
first(),
|
||||
map((documentType) => documentType?.name ?? id)
|
||||
)
|
||||
case DataType.StoragePath:
|
||||
return this.storagePathService.getCached(parseInt(id, 10)).pipe(
|
||||
first(),
|
||||
map((storagePath) => storagePath?.path ?? id)
|
||||
)
|
||||
case 'owner':
|
||||
return this.userService.getCached(parseInt(id, 10)).pipe(
|
||||
first(),
|
||||
map((user) => user?.username ?? id)
|
||||
)
|
||||
default:
|
||||
return of(id)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -161,6 +161,18 @@ export interface Document extends ObjectWithPermissions {
|
||||
|
||||
duplicate_documents?: Document[]
|
||||
|
||||
// Versioning
|
||||
root_document?: number
|
||||
versions?: DocumentVersionInfo[]
|
||||
|
||||
// Frontend only
|
||||
__changedFields?: string[]
|
||||
}
|
||||
|
||||
export interface DocumentVersionInfo {
|
||||
id: number
|
||||
added?: Date
|
||||
version_label?: string
|
||||
checksum?: string
|
||||
is_root: boolean
|
||||
}
|
||||
|
||||
@@ -233,6 +233,13 @@ describe(`DocumentService`, () => {
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the correct preview URL for a specific version', () => {
|
||||
const url = service.getPreviewUrl(documents[0].id, false, 123)
|
||||
expect(url).toEqual(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/preview/?version=123`
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the correct thumb URL for a single document', () => {
|
||||
let url = service.getThumbUrl(documents[0].id)
|
||||
expect(url).toEqual(
|
||||
@@ -289,6 +296,43 @@ describe(`DocumentService`, () => {
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/history/`
|
||||
)
|
||||
})
|
||||
|
||||
it('should call appropriate api endpoint for getting root document id', () => {
|
||||
subscription = service.getRootId(documents[0].id).subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/root/`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
req.flush({ root_id: documents[0].id })
|
||||
})
|
||||
|
||||
it('should call appropriate api endpoint for deleting a document version', () => {
|
||||
subscription = service.deleteVersion(documents[0].id, 10).subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/versions/10/`
|
||||
)
|
||||
expect(req.request.method).toEqual('DELETE')
|
||||
req.flush({ result: 'OK', current_version_id: documents[0].id })
|
||||
})
|
||||
|
||||
it('should call appropriate api endpoint for uploading a new version', () => {
|
||||
const file = new File(['hello'], 'test.pdf', { type: 'application/pdf' })
|
||||
|
||||
subscription = service
|
||||
.uploadVersion(documents[0].id, file, 'Label')
|
||||
.subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/update_version/`
|
||||
)
|
||||
expect(req.request.method).toEqual('POST')
|
||||
expect(req.request.body).toBeInstanceOf(FormData)
|
||||
|
||||
const body = req.request.body as FormData
|
||||
expect(body.get('version_label')).toEqual('Label')
|
||||
expect(body.get('document')).toBeInstanceOf(File)
|
||||
|
||||
req.flush('task-id')
|
||||
})
|
||||
})
|
||||
|
||||
it('should construct sort fields respecting permissions', () => {
|
||||
|
||||
@@ -163,12 +163,19 @@ export class DocumentService extends AbstractPaperlessService<Document> {
|
||||
})
|
||||
}
|
||||
|
||||
getPreviewUrl(id: number, original: boolean = false): string {
|
||||
getPreviewUrl(
|
||||
id: number,
|
||||
original: boolean = false,
|
||||
versionID: number = null
|
||||
): string {
|
||||
let url = new URL(this.getResourceUrl(id, 'preview'))
|
||||
if (this._searchQuery) url.hash = `#search="${this.searchQuery}"`
|
||||
if (original) {
|
||||
url.searchParams.append('original', 'true')
|
||||
}
|
||||
if (versionID) {
|
||||
url.searchParams.append('version', versionID.toString())
|
||||
}
|
||||
return url.toString()
|
||||
}
|
||||
|
||||
@@ -184,6 +191,38 @@ export class DocumentService extends AbstractPaperlessService<Document> {
|
||||
return url
|
||||
}
|
||||
|
||||
uploadVersion(documentId: number, file: File, versionLabel?: string) {
|
||||
const formData = new FormData()
|
||||
formData.append('document', file, file.name)
|
||||
if (versionLabel) {
|
||||
formData.append('version_label', versionLabel)
|
||||
}
|
||||
return this.http.post<string>(
|
||||
this.getResourceUrl(documentId, 'update_version'),
|
||||
formData
|
||||
)
|
||||
}
|
||||
|
||||
getVersions(documentId: number): Observable<Document> {
|
||||
return this.http.get<Document>(this.getResourceUrl(documentId), {
|
||||
params: {
|
||||
fields: 'id,versions',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
getRootId(documentId: number) {
|
||||
return this.http.get<{ root_id: number }>(
|
||||
this.getResourceUrl(documentId, 'root')
|
||||
)
|
||||
}
|
||||
|
||||
deleteVersion(rootDocumentId: number, versionId: number) {
|
||||
return this.http.delete<{ result: string; current_version_id: number }>(
|
||||
this.getResourceUrl(rootDocumentId, `versions/${versionId}`)
|
||||
)
|
||||
}
|
||||
|
||||
getNextAsn(): Observable<number> {
|
||||
return this.http.get<number>(this.getResourceUrl(null, 'next_asn'))
|
||||
}
|
||||
|
||||
@@ -89,6 +89,13 @@ export class FileStatus {
|
||||
}
|
||||
}
|
||||
|
||||
export enum UploadState {
|
||||
Idle = 'idle',
|
||||
Uploading = 'uploading',
|
||||
Processing = 'processing',
|
||||
Failed = 'failed',
|
||||
}
|
||||
|
||||
@Injectable({
|
||||
providedIn: 'root',
|
||||
})
|
||||
|
||||
@@ -79,9 +79,11 @@ import {
|
||||
eye,
|
||||
fileEarmark,
|
||||
fileEarmarkCheck,
|
||||
fileEarmarkDiff,
|
||||
fileEarmarkFill,
|
||||
fileEarmarkLock,
|
||||
fileEarmarkMinus,
|
||||
fileEarmarkPlus,
|
||||
fileEarmarkRichtext,
|
||||
fileText,
|
||||
files,
|
||||
@@ -297,9 +299,11 @@ const icons = {
|
||||
eye,
|
||||
fileEarmark,
|
||||
fileEarmarkCheck,
|
||||
fileEarmarkDiff,
|
||||
fileEarmarkFill,
|
||||
fileEarmarkLock,
|
||||
fileEarmarkMinus,
|
||||
fileEarmarkPlus,
|
||||
fileEarmarkRichtext,
|
||||
files,
|
||||
fileText,
|
||||
|
||||
@@ -28,6 +28,8 @@ from documents.utils import maybe_override_pixel_limit
|
||||
from paperless.config import BarcodeConfig
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from PIL import Image
|
||||
|
||||
logger = logging.getLogger("paperless.barcodes")
|
||||
@@ -260,6 +262,26 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
||||
|
||||
return barcodes
|
||||
|
||||
@staticmethod
|
||||
def read_barcodes_pyzbar(image: Image.Image) -> list[str]:
|
||||
barcodes = []
|
||||
|
||||
from pyzbar import pyzbar
|
||||
|
||||
# Decode the barcode image
|
||||
detected_barcodes = pyzbar.decode(image)
|
||||
|
||||
# Traverse through all the detected barcodes in image
|
||||
for barcode in detected_barcodes:
|
||||
if barcode.data:
|
||||
decoded_barcode = barcode.data.decode("utf-8")
|
||||
barcodes.append(decoded_barcode)
|
||||
logger.debug(
|
||||
f"Barcode of type {barcode.type} found: {decoded_barcode}",
|
||||
)
|
||||
|
||||
return barcodes
|
||||
|
||||
def detect(self) -> None:
|
||||
"""
|
||||
Scan all pages of the PDF as images, updating barcodes and the pages
|
||||
@@ -272,6 +294,14 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
||||
# No op if not a TIFF
|
||||
self.convert_from_tiff_to_pdf()
|
||||
|
||||
# Choose the library for reading
|
||||
if settings.CONSUMER_BARCODE_SCANNER == "PYZBAR":
|
||||
reader: Callable[[Image.Image], list[str]] = self.read_barcodes_pyzbar
|
||||
logger.debug("Scanning for barcodes using PYZBAR")
|
||||
else:
|
||||
reader = self.read_barcodes_zxing
|
||||
logger.debug("Scanning for barcodes using ZXING")
|
||||
|
||||
try:
|
||||
# Read number of pages from pdf
|
||||
with Pdf.open(self.pdf_file) as pdf:
|
||||
@@ -319,7 +349,7 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
||||
)
|
||||
|
||||
# Detect barcodes
|
||||
for barcode_value in self.read_barcodes_zxing(page):
|
||||
for barcode_value in reader(page):
|
||||
self.barcodes.append(
|
||||
Barcode(current_page_number, barcode_value, self.settings),
|
||||
)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
@@ -73,6 +72,48 @@ def restore_archive_serial_numbers(backup: dict[int, int | None]) -> None:
|
||||
logger.info(f"Restored archive serial numbers for documents {list(backup.keys())}")
|
||||
|
||||
|
||||
def _get_root_ids_by_doc_id(doc_ids: list[int]) -> dict[int, int]:
|
||||
"""
|
||||
Resolve each provided document id to its root document id.
|
||||
|
||||
- If the id is already a root document: root id is itself.
|
||||
- If the id is a version document: root id is its `root_document_id`.
|
||||
"""
|
||||
qs = Document.objects.filter(id__in=doc_ids).only("id", "root_document_id")
|
||||
return {doc.id: doc.root_document_id or doc.id for doc in qs}
|
||||
|
||||
|
||||
def _get_root_and_current_docs_by_root_id(
|
||||
root_ids: set[int],
|
||||
) -> tuple[dict[int, Document], dict[int, Document]]:
|
||||
"""
|
||||
Returns:
|
||||
- root_docs: root_id -> root Document
|
||||
- current_docs: root_id -> newest version Document (or root if none)
|
||||
"""
|
||||
root_docs = {
|
||||
doc.id: doc
|
||||
for doc in Document.objects.filter(id__in=root_ids).select_related(
|
||||
"owner",
|
||||
)
|
||||
}
|
||||
latest_versions_by_root_id: dict[int, Document] = {}
|
||||
for version_doc in Document.objects.filter(root_document_id__in=root_ids).order_by(
|
||||
"root_document_id",
|
||||
"-id",
|
||||
):
|
||||
root_id = version_doc.root_document_id
|
||||
if root_id is None:
|
||||
continue
|
||||
latest_versions_by_root_id.setdefault(root_id, version_doc)
|
||||
|
||||
current_docs: dict[int, Document] = {
|
||||
root_id: latest_versions_by_root_id.get(root_id, root_docs[root_id])
|
||||
for root_id in root_docs
|
||||
}
|
||||
return root_docs, current_docs
|
||||
|
||||
|
||||
def set_correspondent(
|
||||
doc_ids: list[int],
|
||||
correspondent: Correspondent,
|
||||
@@ -309,16 +350,28 @@ def modify_custom_fields(
|
||||
@shared_task
|
||||
def delete(doc_ids: list[int]) -> Literal["OK"]:
|
||||
try:
|
||||
Document.objects.filter(id__in=doc_ids).delete()
|
||||
root_ids = (
|
||||
Document.objects.filter(id__in=doc_ids, root_document__isnull=True)
|
||||
.values_list("id", flat=True)
|
||||
.distinct()
|
||||
)
|
||||
version_ids = (
|
||||
Document.objects.filter(root_document_id__in=root_ids)
|
||||
.values_list("id", flat=True)
|
||||
.distinct()
|
||||
)
|
||||
delete_ids = list({*doc_ids, *version_ids})
|
||||
|
||||
Document.objects.filter(id__in=delete_ids).delete()
|
||||
|
||||
from documents import index
|
||||
|
||||
with index.open_index_writer() as writer:
|
||||
for id in doc_ids:
|
||||
for id in delete_ids:
|
||||
index.remove_document_by_id(writer, id)
|
||||
|
||||
status_mgr = DocumentsStatusManager()
|
||||
status_mgr.send_documents_deleted(doc_ids)
|
||||
status_mgr.send_documents_deleted(delete_ids)
|
||||
except Exception as e:
|
||||
if "Data too long for column" in str(e):
|
||||
logger.warning(
|
||||
@@ -363,43 +416,60 @@ def set_permissions(
|
||||
return "OK"
|
||||
|
||||
|
||||
def rotate(doc_ids: list[int], degrees: int) -> Literal["OK"]:
|
||||
def rotate(
|
||||
doc_ids: list[int],
|
||||
degrees: int,
|
||||
*,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
logger.info(
|
||||
f"Attempting to rotate {len(doc_ids)} documents by {degrees} degrees.",
|
||||
)
|
||||
qs = Document.objects.filter(id__in=doc_ids)
|
||||
affected_docs: list[int] = []
|
||||
doc_to_root_id = _get_root_ids_by_doc_id(doc_ids)
|
||||
root_ids = set(doc_to_root_id.values())
|
||||
root_docs_by_id, current_docs_by_root_id = _get_root_and_current_docs_by_root_id(
|
||||
root_ids,
|
||||
)
|
||||
import pikepdf
|
||||
|
||||
rotate_tasks = []
|
||||
for doc in qs:
|
||||
if doc.mime_type != "application/pdf":
|
||||
for root_id in root_ids:
|
||||
root_doc = root_docs_by_id[root_id]
|
||||
source_doc = current_docs_by_root_id[root_id]
|
||||
if source_doc.mime_type != "application/pdf":
|
||||
logger.warning(
|
||||
f"Document {doc.id} is not a PDF, skipping rotation.",
|
||||
f"Document {root_doc.id} is not a PDF, skipping rotation.",
|
||||
)
|
||||
continue
|
||||
try:
|
||||
with pikepdf.open(doc.source_path, allow_overwriting_input=True) as pdf:
|
||||
# Write rotated output to a temp file and create a new version via consume pipeline
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{root_doc.id}_rotated.pdf"
|
||||
)
|
||||
with pikepdf.open(source_doc.source_path) as pdf:
|
||||
for page in pdf.pages:
|
||||
page.rotate(degrees, relative=True)
|
||||
pdf.save()
|
||||
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
doc.save()
|
||||
rotate_tasks.append(
|
||||
update_document_content_maybe_archive_file.s(
|
||||
document_id=doc.id,
|
||||
pdf.remove_unreferenced_resources()
|
||||
pdf.save(filepath)
|
||||
|
||||
# Preserve metadata/permissions via overrides; mark as new version
|
||||
overrides = DocumentMetadataOverrides().from_document(root_doc)
|
||||
if user is not None:
|
||||
overrides.actor_id = user.id
|
||||
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
root_document_id=root_doc.id,
|
||||
),
|
||||
overrides,
|
||||
)
|
||||
logger.info(
|
||||
f"Rotated document {doc.id} by {degrees} degrees",
|
||||
f"Queued new rotated version for document {root_doc.id} by {degrees} degrees",
|
||||
)
|
||||
affected_docs.append(doc.id)
|
||||
except Exception as e:
|
||||
logger.exception(f"Error rotating document {doc.id}: {e}")
|
||||
|
||||
if len(affected_docs) > 0:
|
||||
bulk_update_task = bulk_update_documents.si(document_ids=affected_docs)
|
||||
chord(header=rotate_tasks, body=bulk_update_task).delay()
|
||||
logger.exception(f"Error rotating document {root_doc.id}: {e}")
|
||||
|
||||
return "OK"
|
||||
|
||||
@@ -584,30 +654,62 @@ def split(
|
||||
return "OK"
|
||||
|
||||
|
||||
def delete_pages(doc_ids: list[int], pages: list[int]) -> Literal["OK"]:
|
||||
def delete_pages(
|
||||
doc_ids: list[int],
|
||||
pages: list[int],
|
||||
*,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
logger.info(
|
||||
f"Attempting to delete pages {pages} from {len(doc_ids)} documents",
|
||||
)
|
||||
doc = Document.objects.get(id=doc_ids[0])
|
||||
doc = Document.objects.select_related("root_document").get(id=doc_ids[0])
|
||||
root_doc: Document
|
||||
if doc.root_document_id is None or doc.root_document is None:
|
||||
root_doc = doc
|
||||
else:
|
||||
root_doc = doc.root_document
|
||||
|
||||
source_doc = (
|
||||
Document.objects.filter(Q(id=root_doc.id) | Q(root_document=root_doc))
|
||||
.order_by("-id")
|
||||
.first()
|
||||
)
|
||||
if source_doc is None:
|
||||
source_doc = root_doc
|
||||
pages = sorted(pages) # sort pages to avoid index issues
|
||||
import pikepdf
|
||||
|
||||
try:
|
||||
with pikepdf.open(doc.source_path, allow_overwriting_input=True) as pdf:
|
||||
# Produce edited PDF to a temp file and create a new version
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{root_doc.id}_pages_deleted.pdf"
|
||||
)
|
||||
with pikepdf.open(source_doc.source_path) as pdf:
|
||||
offset = 1 # pages are 1-indexed
|
||||
for page_num in pages:
|
||||
pdf.pages.remove(pdf.pages[page_num - offset])
|
||||
offset += 1 # remove() changes the index of the pages
|
||||
pdf.remove_unreferenced_resources()
|
||||
pdf.save()
|
||||
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
if doc.page_count is not None:
|
||||
doc.page_count = doc.page_count - len(pages)
|
||||
doc.save()
|
||||
update_document_content_maybe_archive_file.delay(document_id=doc.id)
|
||||
logger.info(f"Deleted pages {pages} from document {doc.id}")
|
||||
pdf.save(filepath)
|
||||
|
||||
overrides = DocumentMetadataOverrides().from_document(root_doc)
|
||||
if user is not None:
|
||||
overrides.actor_id = user.id
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
root_document_id=root_doc.id,
|
||||
),
|
||||
overrides,
|
||||
)
|
||||
logger.info(
|
||||
f"Queued new version for document {root_doc.id} after deleting pages {pages}",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(f"Error deleting pages from document {doc.id}: {e}")
|
||||
logger.exception(f"Error deleting pages from document {root_doc.id}: {e}")
|
||||
|
||||
return "OK"
|
||||
|
||||
@@ -632,13 +734,26 @@ def edit_pdf(
|
||||
logger.info(
|
||||
f"Editing PDF of document {doc_ids[0]} with {len(operations)} operations",
|
||||
)
|
||||
doc = Document.objects.get(id=doc_ids[0])
|
||||
doc = Document.objects.select_related("root_document").get(id=doc_ids[0])
|
||||
root_doc: Document
|
||||
if doc.root_document_id is None or doc.root_document is None:
|
||||
root_doc = doc
|
||||
else:
|
||||
root_doc = doc.root_document
|
||||
|
||||
source_doc = (
|
||||
Document.objects.filter(Q(id=root_doc.id) | Q(root_document=root_doc))
|
||||
.order_by("-id")
|
||||
.first()
|
||||
)
|
||||
if source_doc is None:
|
||||
source_doc = root_doc
|
||||
import pikepdf
|
||||
|
||||
pdf_docs: list[pikepdf.Pdf] = []
|
||||
|
||||
try:
|
||||
with pikepdf.open(doc.source_path) as src:
|
||||
with pikepdf.open(source_doc.source_path) as src:
|
||||
# prepare output documents
|
||||
max_idx = max(op.get("doc", 0) for op in operations)
|
||||
pdf_docs = [pikepdf.new() for _ in range(max_idx + 1)]
|
||||
@@ -657,42 +772,56 @@ def edit_pdf(
|
||||
dst.pages[-1].rotate(op["rotate"], relative=True)
|
||||
|
||||
if update_document:
|
||||
temp_path = doc.source_path.with_suffix(".tmp.pdf")
|
||||
# Create a new version from the edited PDF rather than replacing in-place
|
||||
pdf = pdf_docs[0]
|
||||
pdf.remove_unreferenced_resources()
|
||||
# save the edited PDF to a temporary file in case of errors
|
||||
pdf.save(temp_path)
|
||||
# replace the original document with the edited one
|
||||
temp_path.replace(doc.source_path)
|
||||
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
doc.page_count = len(pdf.pages)
|
||||
doc.save()
|
||||
update_document_content_maybe_archive_file.delay(document_id=doc.id)
|
||||
else:
|
||||
consume_tasks = []
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{root_doc.id}_edited.pdf"
|
||||
)
|
||||
pdf.save(filepath)
|
||||
overrides = (
|
||||
DocumentMetadataOverrides().from_document(doc)
|
||||
DocumentMetadataOverrides().from_document(root_doc)
|
||||
if include_metadata
|
||||
else DocumentMetadataOverrides()
|
||||
)
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
overrides.actor_id = user.id
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
root_document_id=root_doc.id,
|
||||
),
|
||||
overrides,
|
||||
)
|
||||
else:
|
||||
consume_tasks = []
|
||||
overrides = (
|
||||
DocumentMetadataOverrides().from_document(root_doc)
|
||||
if include_metadata
|
||||
else DocumentMetadataOverrides()
|
||||
)
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
overrides.actor_id = user.id
|
||||
if not delete_original:
|
||||
overrides.skip_asn_if_exists = True
|
||||
if delete_original and len(pdf_docs) == 1:
|
||||
overrides.asn = doc.archive_serial_number
|
||||
overrides.asn = root_doc.archive_serial_number
|
||||
for idx, pdf in enumerate(pdf_docs, start=1):
|
||||
filepath: Path = (
|
||||
version_filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{doc.id}_edit_{idx}.pdf"
|
||||
/ f"{root_doc.id}_edit_{idx}.pdf"
|
||||
)
|
||||
pdf.remove_unreferenced_resources()
|
||||
pdf.save(filepath)
|
||||
pdf.save(version_filepath)
|
||||
consume_tasks.append(
|
||||
consume_file.s(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
original_file=version_filepath,
|
||||
),
|
||||
overrides,
|
||||
),
|
||||
@@ -714,7 +843,7 @@ def edit_pdf(
|
||||
group(consume_tasks).delay()
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Error editing document {doc.id}: {e}")
|
||||
logger.exception(f"Error editing document {root_doc.id}: {e}")
|
||||
raise ValueError(
|
||||
f"An error occurred while editing the document: {e}",
|
||||
) from e
|
||||
@@ -737,38 +866,61 @@ def remove_password(
|
||||
import pikepdf
|
||||
|
||||
for doc_id in doc_ids:
|
||||
doc = Document.objects.get(id=doc_id)
|
||||
doc = Document.objects.select_related("root_document").get(id=doc_id)
|
||||
root_doc: Document
|
||||
if doc.root_document_id is None or doc.root_document is None:
|
||||
root_doc = doc
|
||||
else:
|
||||
root_doc = doc.root_document
|
||||
|
||||
source_doc = (
|
||||
Document.objects.filter(Q(id=root_doc.id) | Q(root_document=root_doc))
|
||||
.order_by("-id")
|
||||
.first()
|
||||
)
|
||||
if source_doc is None:
|
||||
source_doc = root_doc
|
||||
try:
|
||||
logger.info(
|
||||
f"Attempting password removal from document {doc_ids[0]}",
|
||||
)
|
||||
with pikepdf.open(doc.source_path, password=password) as pdf:
|
||||
temp_path = doc.source_path.with_suffix(".tmp.pdf")
|
||||
with pikepdf.open(source_doc.source_path, password=password) as pdf:
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{root_doc.id}_unprotected.pdf"
|
||||
)
|
||||
pdf.remove_unreferenced_resources()
|
||||
pdf.save(temp_path)
|
||||
pdf.save(filepath)
|
||||
|
||||
if update_document:
|
||||
# replace the original document with the unprotected one
|
||||
temp_path.replace(doc.source_path)
|
||||
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
doc.page_count = len(pdf.pages)
|
||||
doc.save()
|
||||
update_document_content_maybe_archive_file.delay(document_id=doc.id)
|
||||
else:
|
||||
consume_tasks = []
|
||||
# Create a new version rather than modifying the root/original in place.
|
||||
overrides = (
|
||||
DocumentMetadataOverrides().from_document(doc)
|
||||
DocumentMetadataOverrides().from_document(root_doc)
|
||||
if include_metadata
|
||||
else DocumentMetadataOverrides()
|
||||
)
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{doc.id}_unprotected.pdf"
|
||||
overrides.actor_id = user.id
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
root_document_id=root_doc.id,
|
||||
),
|
||||
overrides,
|
||||
)
|
||||
temp_path.replace(filepath)
|
||||
else:
|
||||
consume_tasks = []
|
||||
overrides = (
|
||||
DocumentMetadataOverrides().from_document(root_doc)
|
||||
if include_metadata
|
||||
else DocumentMetadataOverrides()
|
||||
)
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
overrides.actor_id = user.id
|
||||
|
||||
consume_tasks.append(
|
||||
consume_file.s(
|
||||
ConsumableDocument(
|
||||
@@ -780,12 +932,17 @@ def remove_password(
|
||||
)
|
||||
|
||||
if delete_original:
|
||||
chord(header=consume_tasks, body=delete.si([doc.id])).delay()
|
||||
chord(
|
||||
header=consume_tasks,
|
||||
body=delete.si([doc.id]),
|
||||
).delay()
|
||||
else:
|
||||
group(consume_tasks).delay()
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Error removing password from document {doc.id}: {e}")
|
||||
logger.exception(
|
||||
f"Error removing password from document {root_doc.id}: {e}",
|
||||
)
|
||||
raise ValueError(
|
||||
f"An error occurred while removing the password: {e}",
|
||||
) from e
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
from typing import Any
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
@@ -14,6 +15,59 @@ from documents.classifier import DocumentClassifier
|
||||
from documents.models import Document
|
||||
|
||||
|
||||
def _resolve_effective_doc(pk: int, request) -> Document | None:
|
||||
"""
|
||||
Resolve which Document row should be considered for caching keys:
|
||||
- If a version is requested, use that version
|
||||
- If pk is a root doc, use its newest child version if present, else the root.
|
||||
- Else, pk is a version, use that version.
|
||||
Returns None if resolution fails (treat as no-cache).
|
||||
"""
|
||||
try:
|
||||
request_doc = Document.objects.only("id", "root_document_id").get(pk=pk)
|
||||
except Document.DoesNotExist:
|
||||
return None
|
||||
|
||||
root_doc = (
|
||||
request_doc
|
||||
if request_doc.root_document_id is None
|
||||
else Document.objects.only("id").get(id=request_doc.root_document_id)
|
||||
)
|
||||
|
||||
version_param = (
|
||||
request.query_params.get("version")
|
||||
if hasattr(request, "query_params")
|
||||
else None
|
||||
)
|
||||
if version_param:
|
||||
try:
|
||||
version_id = int(version_param)
|
||||
candidate = Document.objects.only("id", "root_document_id").get(
|
||||
id=version_id,
|
||||
)
|
||||
if (
|
||||
candidate.id != root_doc.id
|
||||
and candidate.root_document_id != root_doc.id
|
||||
):
|
||||
return None
|
||||
return candidate
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# Default behavior: if pk is a root doc, prefer its newest child version
|
||||
if request_doc.root_document_id is None:
|
||||
latest = (
|
||||
Document.objects.filter(root_document=root_doc)
|
||||
.only("id")
|
||||
.order_by("id")
|
||||
.last()
|
||||
)
|
||||
return latest or root_doc
|
||||
|
||||
# pk is already a version
|
||||
return request_doc
|
||||
|
||||
|
||||
def suggestions_etag(request, pk: int) -> str | None:
|
||||
"""
|
||||
Returns an optional string for the ETag, allowing browser caching of
|
||||
@@ -71,12 +125,10 @@ def metadata_etag(request, pk: int) -> str | None:
|
||||
Metadata is extracted from the original file, so use its checksum as the
|
||||
ETag
|
||||
"""
|
||||
try:
|
||||
doc = Document.objects.only("checksum").get(pk=pk)
|
||||
doc = _resolve_effective_doc(pk, request)
|
||||
if doc is None:
|
||||
return None
|
||||
return doc.checksum
|
||||
except Document.DoesNotExist: # pragma: no cover
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
def metadata_last_modified(request, pk: int) -> datetime | None:
|
||||
@@ -85,28 +137,25 @@ def metadata_last_modified(request, pk: int) -> datetime | None:
|
||||
not the modification of the original file, but of the database object, but might as well
|
||||
error on the side of more cautious
|
||||
"""
|
||||
try:
|
||||
doc = Document.objects.only("modified").get(pk=pk)
|
||||
doc = _resolve_effective_doc(pk, request)
|
||||
if doc is None:
|
||||
return None
|
||||
return doc.modified
|
||||
except Document.DoesNotExist: # pragma: no cover
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
def preview_etag(request, pk: int) -> str | None:
|
||||
"""
|
||||
ETag for the document preview, using the original or archive checksum, depending on the request
|
||||
"""
|
||||
try:
|
||||
doc = Document.objects.only("checksum", "archive_checksum").get(pk=pk)
|
||||
doc = _resolve_effective_doc(pk, request)
|
||||
if doc is None:
|
||||
return None
|
||||
use_original = (
|
||||
"original" in request.query_params
|
||||
hasattr(request, "query_params")
|
||||
and "original" in request.query_params
|
||||
and request.query_params["original"] == "true"
|
||||
)
|
||||
return doc.checksum if use_original else doc.archive_checksum
|
||||
except Document.DoesNotExist: # pragma: no cover
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
def preview_last_modified(request, pk: int) -> datetime | None:
|
||||
@@ -114,24 +163,25 @@ def preview_last_modified(request, pk: int) -> datetime | None:
|
||||
Uses the documents modified time to set the Last-Modified header. Not strictly
|
||||
speaking correct, but close enough and quick
|
||||
"""
|
||||
try:
|
||||
doc = Document.objects.only("modified").get(pk=pk)
|
||||
doc = _resolve_effective_doc(pk, request)
|
||||
if doc is None:
|
||||
return None
|
||||
return doc.modified
|
||||
except Document.DoesNotExist: # pragma: no cover
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
def thumbnail_last_modified(request, pk: int) -> datetime | None:
|
||||
def thumbnail_last_modified(request: Any, pk: int) -> datetime | None:
|
||||
"""
|
||||
Returns the filesystem last modified either from cache or from filesystem.
|
||||
Cache should be (slightly?) faster than filesystem
|
||||
"""
|
||||
try:
|
||||
doc = Document.objects.only("pk").get(pk=pk)
|
||||
doc = _resolve_effective_doc(pk, request)
|
||||
if doc is None:
|
||||
return None
|
||||
if not doc.thumbnail_path.exists():
|
||||
return None
|
||||
doc_key = get_thumbnail_modified_key(pk)
|
||||
# Use the effective document id for cache key
|
||||
doc_key = get_thumbnail_modified_key(doc.id)
|
||||
|
||||
cache_hit = cache.get(doc_key)
|
||||
if cache_hit is not None:
|
||||
|
||||
@@ -102,6 +102,12 @@ class ConsumerStatusShortMessage(str, Enum):
|
||||
|
||||
|
||||
class ConsumerPluginMixin:
|
||||
if TYPE_CHECKING:
|
||||
from logging import Logger
|
||||
from logging import LoggerAdapter
|
||||
|
||||
log: "LoggerAdapter" # type: ignore[type-arg]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
input_doc: ConsumableDocument,
|
||||
@@ -116,6 +122,14 @@ class ConsumerPluginMixin:
|
||||
|
||||
self.filename = self.metadata.filename or self.input_doc.original_file.name
|
||||
|
||||
if input_doc.root_document_id:
|
||||
self.log.debug(
|
||||
f"Document root document id: {input_doc.root_document_id}",
|
||||
)
|
||||
root_document = Document.objects.get(pk=input_doc.root_document_id)
|
||||
version_index = Document.objects.filter(root_document=root_document).count()
|
||||
self.filename += f"_v{version_index}"
|
||||
|
||||
def _send_progress(
|
||||
self,
|
||||
current_progress: int,
|
||||
@@ -477,6 +491,80 @@ class ConsumerPlugin(
|
||||
try:
|
||||
with transaction.atomic():
|
||||
# store the document.
|
||||
if self.input_doc.root_document_id:
|
||||
# If this is a new version of an existing document, we need
|
||||
# to make sure we're not creating a new document, but updating
|
||||
# the existing one.
|
||||
root_doc = Document.objects.get(
|
||||
pk=self.input_doc.root_document_id,
|
||||
)
|
||||
original_document = Document.objects.get(
|
||||
pk=self.input_doc.root_document_id,
|
||||
)
|
||||
self.log.debug("Saving record for updated version to database")
|
||||
setattr(original_document, "pk", None)
|
||||
original_document.root_document = root_doc
|
||||
file_for_checksum = (
|
||||
self.unmodified_original
|
||||
if self.unmodified_original is not None
|
||||
else self.working_copy
|
||||
)
|
||||
original_document.checksum = hashlib.md5(
|
||||
file_for_checksum.read_bytes(),
|
||||
).hexdigest()
|
||||
original_document.content = ""
|
||||
original_document.page_count = page_count
|
||||
original_document.mime_type = mime_type
|
||||
original_document.original_filename = self.filename
|
||||
original_document.storage_path = root_doc.storage_path
|
||||
# Clear unique file path fields so they can be generated uniquely later
|
||||
original_document.filename = None
|
||||
original_document.archive_filename = None
|
||||
original_document.archive_checksum = None
|
||||
if self.metadata.version_label is not None:
|
||||
original_document.version_label = self.metadata.version_label
|
||||
original_document.added = timezone.now()
|
||||
original_document.modified = timezone.now()
|
||||
actor = None
|
||||
|
||||
# Save the new version, potentially creating an audit log entry for the version addition if enabled.
|
||||
if (
|
||||
settings.AUDIT_LOG_ENABLED
|
||||
and self.metadata.actor_id is not None
|
||||
):
|
||||
actor = User.objects.filter(pk=self.metadata.actor_id).first()
|
||||
if actor is not None:
|
||||
from auditlog.context import ( # type: ignore[import-untyped]
|
||||
set_actor,
|
||||
)
|
||||
|
||||
with set_actor(actor):
|
||||
original_document.save()
|
||||
else:
|
||||
original_document.save()
|
||||
else:
|
||||
original_document.save()
|
||||
|
||||
# Create a log entry for the version addition, if enabled
|
||||
if settings.AUDIT_LOG_ENABLED:
|
||||
from auditlog.models import ( # type: ignore[import-untyped]
|
||||
LogEntry,
|
||||
)
|
||||
|
||||
LogEntry.objects.log_create(
|
||||
instance=root_doc,
|
||||
changes={
|
||||
"Version Added": ["None", original_document.id],
|
||||
},
|
||||
action=LogEntry.Action.UPDATE,
|
||||
actor=actor,
|
||||
additional_data={
|
||||
"reason": "Version added",
|
||||
"version_id": original_document.id,
|
||||
},
|
||||
)
|
||||
document = original_document
|
||||
else:
|
||||
document = self._store(
|
||||
text=text,
|
||||
date=date,
|
||||
@@ -700,6 +788,9 @@ class ConsumerPlugin(
|
||||
if self.metadata.asn is not None:
|
||||
document.archive_serial_number = self.metadata.asn
|
||||
|
||||
if self.metadata.version_label is not None:
|
||||
document.version_label = self.metadata.version_label
|
||||
|
||||
if self.metadata.owner_id:
|
||||
document.owner = User.objects.get(
|
||||
pk=self.metadata.owner_id,
|
||||
|
||||
@@ -31,6 +31,8 @@ class DocumentMetadataOverrides:
|
||||
change_groups: list[int] | None = None
|
||||
custom_fields: dict | None = None
|
||||
skip_asn_if_exists: bool = False
|
||||
version_label: str | None = None
|
||||
actor_id: int | None = None
|
||||
|
||||
def update(self, other: "DocumentMetadataOverrides") -> "DocumentMetadataOverrides":
|
||||
"""
|
||||
@@ -50,8 +52,12 @@ class DocumentMetadataOverrides:
|
||||
self.storage_path_id = other.storage_path_id
|
||||
if other.owner_id is not None:
|
||||
self.owner_id = other.owner_id
|
||||
if other.actor_id is not None:
|
||||
self.actor_id = other.actor_id
|
||||
if other.skip_asn_if_exists:
|
||||
self.skip_asn_if_exists = True
|
||||
if other.version_label is not None:
|
||||
self.version_label = other.version_label
|
||||
|
||||
# merge
|
||||
if self.tag_ids is None:
|
||||
@@ -160,6 +166,7 @@ class ConsumableDocument:
|
||||
|
||||
source: DocumentSource
|
||||
original_file: Path
|
||||
root_document_id: int | None = None
|
||||
original_path: Path | None = None
|
||||
mailrule_id: int | None = None
|
||||
mime_type: str = dataclasses.field(init=False, default=None)
|
||||
|
||||
37
src/documents/migrations/0012_document_root_document.py
Normal file
37
src/documents/migrations/0012_document_root_document.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# Generated by Django 5.1.6 on 2025-02-26 17:08
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0011_optimize_integer_field_sizes"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="root_document",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="versions",
|
||||
to="documents.document",
|
||||
verbose_name="root document for this version",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="version_label",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
help_text="Optional short label for a document version.",
|
||||
max_length=64,
|
||||
null=True,
|
||||
verbose_name="version label",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -155,7 +155,7 @@ class StoragePath(MatchingModel):
|
||||
verbose_name_plural = _("storage paths")
|
||||
|
||||
|
||||
class Document(SoftDeleteModel, ModelWithOwner):
|
||||
class Document(SoftDeleteModel, ModelWithOwner): # type: ignore[django-manager-missing]
|
||||
correspondent = models.ForeignKey(
|
||||
Correspondent,
|
||||
blank=True,
|
||||
@@ -308,6 +308,23 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
||||
),
|
||||
)
|
||||
|
||||
root_document = models.ForeignKey(
|
||||
"self",
|
||||
blank=True,
|
||||
null=True,
|
||||
related_name="versions",
|
||||
on_delete=models.CASCADE,
|
||||
verbose_name=_("root document for this version"),
|
||||
)
|
||||
|
||||
version_label = models.CharField(
|
||||
_("version label"),
|
||||
max_length=64,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text=_("Optional short label for a document version."),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ("-created",)
|
||||
verbose_name = _("document")
|
||||
@@ -419,6 +436,19 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
||||
tags_to_add = self.tags.model.objects.filter(id__in=tag_ids)
|
||||
self.tags.add(*tags_to_add)
|
||||
|
||||
def delete(
|
||||
self,
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
# If deleting a root document, move all its versions to trash as well.
|
||||
if self.root_document_id is None:
|
||||
Document.objects.filter(root_document=self).delete()
|
||||
return super().delete(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
class SavedView(ModelWithOwner):
|
||||
class DisplayMode(models.TextChoices):
|
||||
@@ -1712,5 +1742,5 @@ class WorkflowRun(SoftDeleteModel):
|
||||
verbose_name = _("workflow run")
|
||||
verbose_name_plural = _("workflow runs")
|
||||
|
||||
def __str__(self):
|
||||
def __str__(self) -> str:
|
||||
return f"WorkflowRun of {self.workflow} at {self.run_at} on {self.document}"
|
||||
|
||||
@@ -7,7 +7,9 @@ from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from decimal import Decimal
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Any
|
||||
from typing import Literal
|
||||
from typing import TypedDict
|
||||
|
||||
import magic
|
||||
from celery import states
|
||||
@@ -89,6 +91,8 @@ if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
||||
from django.db.models.query import QuerySet
|
||||
from rest_framework.relations import ManyRelatedField
|
||||
from rest_framework.relations import RelatedField
|
||||
|
||||
|
||||
logger = logging.getLogger("paperless.serializers")
|
||||
@@ -1046,6 +1050,7 @@ def _get_viewable_duplicates(
|
||||
duplicates = Document.global_objects.filter(
|
||||
Q(checksum__in=checksums) | Q(archive_checksum__in=checksums),
|
||||
).exclude(pk=document.pk)
|
||||
duplicates = duplicates.filter(root_document__isnull=True)
|
||||
duplicates = duplicates.order_by("-created")
|
||||
allowed = get_objects_for_user_owner_aware(
|
||||
user,
|
||||
@@ -1062,6 +1067,22 @@ class DuplicateDocumentSummarySerializer(serializers.Serializer):
|
||||
deleted_at = serializers.DateTimeField(allow_null=True)
|
||||
|
||||
|
||||
class DocumentVersionInfoSerializer(serializers.Serializer):
|
||||
id = serializers.IntegerField()
|
||||
added = serializers.DateTimeField()
|
||||
version_label = serializers.CharField(required=False, allow_null=True)
|
||||
checksum = serializers.CharField(required=False, allow_null=True)
|
||||
is_root = serializers.BooleanField()
|
||||
|
||||
|
||||
class _DocumentVersionInfo(TypedDict):
|
||||
id: int
|
||||
added: datetime
|
||||
version_label: str | None
|
||||
checksum: str | None
|
||||
is_root: bool
|
||||
|
||||
|
||||
@extend_schema_serializer(
|
||||
deprecate_fields=["created_date"],
|
||||
)
|
||||
@@ -1082,6 +1103,10 @@ class DocumentSerializer(
|
||||
duplicate_documents = SerializerMethodField()
|
||||
|
||||
notes = NotesSerializer(many=True, required=False, read_only=True)
|
||||
root_document: RelatedField[Document, Document, Any] | ManyRelatedField = (
|
||||
serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
)
|
||||
versions = SerializerMethodField()
|
||||
|
||||
custom_fields = CustomFieldInstanceSerializer(
|
||||
many=True,
|
||||
@@ -1115,6 +1140,32 @@ class DocumentSerializer(
|
||||
duplicates = _get_viewable_duplicates(obj, user)
|
||||
return list(duplicates.values("id", "title", "deleted_at"))
|
||||
|
||||
@extend_schema_field(DocumentVersionInfoSerializer(many=True))
|
||||
def get_versions(self, obj):
|
||||
root_doc = obj if obj.root_document_id is None else obj.root_document
|
||||
if root_doc is None:
|
||||
return []
|
||||
versions_qs = Document.objects.filter(root_document=root_doc).only(
|
||||
"id",
|
||||
"added",
|
||||
"checksum",
|
||||
"version_label",
|
||||
)
|
||||
versions = [*versions_qs, root_doc]
|
||||
|
||||
def build_info(doc: Document) -> _DocumentVersionInfo:
|
||||
return {
|
||||
"id": doc.id,
|
||||
"added": doc.added,
|
||||
"version_label": doc.version_label,
|
||||
"checksum": doc.checksum,
|
||||
"is_root": doc.id == root_doc.id,
|
||||
}
|
||||
|
||||
info = [build_info(doc) for doc in versions]
|
||||
info.sort(key=lambda item: item["id"], reverse=True)
|
||||
return info
|
||||
|
||||
def get_original_file_name(self, obj) -> str | None:
|
||||
return obj.original_filename
|
||||
|
||||
@@ -1303,6 +1354,8 @@ class DocumentSerializer(
|
||||
"remove_inbox_tags",
|
||||
"page_count",
|
||||
"mime_type",
|
||||
"root_document",
|
||||
"versions",
|
||||
)
|
||||
list_serializer_class = OwnedObjectListSerializer
|
||||
|
||||
@@ -1997,6 +2050,22 @@ class PostDocumentSerializer(serializers.Serializer):
|
||||
return created.date()
|
||||
|
||||
|
||||
class DocumentVersionSerializer(serializers.Serializer):
|
||||
document = serializers.FileField(
|
||||
label="Document",
|
||||
write_only=True,
|
||||
)
|
||||
version_label = serializers.CharField(
|
||||
label="Version label",
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
max_length=64,
|
||||
)
|
||||
|
||||
validate_document = PostDocumentSerializer().validate_document
|
||||
|
||||
|
||||
class BulkDownloadSerializer(DocumentListSerializer):
|
||||
content = serializers.ChoiceField(
|
||||
choices=["archive", "originals", "both"],
|
||||
@@ -2196,7 +2265,7 @@ class TasksViewSerializer(OwnedObjectSerializer):
|
||||
return list(duplicates.values("id", "title", "deleted_at"))
|
||||
|
||||
|
||||
class RunTaskViewSerializer(serializers.Serializer):
|
||||
class RunTaskViewSerializer(serializers.Serializer[dict[str, Any]]):
|
||||
task_name = serializers.ChoiceField(
|
||||
choices=PaperlessTask.TaskName.choices,
|
||||
label="Task Name",
|
||||
@@ -2204,7 +2273,7 @@ class RunTaskViewSerializer(serializers.Serializer):
|
||||
)
|
||||
|
||||
|
||||
class AcknowledgeTasksViewSerializer(serializers.Serializer):
|
||||
class AcknowledgeTasksViewSerializer(serializers.Serializer[dict[str, Any]]):
|
||||
tasks = serializers.ListField(
|
||||
required=True,
|
||||
label="Tasks",
|
||||
@@ -2951,7 +3020,7 @@ class TrashSerializer(SerializerWithPerms):
|
||||
write_only=True,
|
||||
)
|
||||
|
||||
def validate_documents(self, documents):
|
||||
def validate_documents(self, documents: list[int]) -> list[int]:
|
||||
count = Document.deleted_objects.filter(id__in=documents).count()
|
||||
if not count == len(documents):
|
||||
raise serializers.ValidationError(
|
||||
|
||||
@@ -156,7 +156,10 @@ def consume_file(
|
||||
if overrides is None:
|
||||
overrides = DocumentMetadataOverrides()
|
||||
|
||||
plugins: list[type[ConsumeTaskPlugin]] = [
|
||||
plugins: list[type[ConsumeTaskPlugin]] = (
|
||||
[ConsumerPreflightPlugin, ConsumerPlugin]
|
||||
if input_doc.root_document_id is not None
|
||||
else [
|
||||
ConsumerPreflightPlugin,
|
||||
AsnCheckPlugin,
|
||||
CollatePlugin,
|
||||
@@ -165,6 +168,7 @@ def consume_file(
|
||||
WorkflowTriggerPlugin,
|
||||
ConsumerPlugin,
|
||||
]
|
||||
)
|
||||
|
||||
with (
|
||||
ProgressManager(
|
||||
|
||||
257
src/documents/tests/test_api_document_versions.py
Normal file
257
src/documents/tests/test_api_document_versions.py
Normal file
@@ -0,0 +1,257 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest import mock
|
||||
|
||||
from auditlog.models import LogEntry # type: ignore[import-untyped]
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from documents.data_models import DocumentSource
|
||||
from documents.models import Document
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
|
||||
class TestDocumentVersioningApi(DirectoriesMixin, APITestCase):
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
|
||||
self.user = User.objects.create_superuser(username="temp_admin")
|
||||
self.client.force_authenticate(user=self.user)
|
||||
|
||||
def _make_pdf_upload(self, name: str = "version.pdf") -> SimpleUploadedFile:
|
||||
return SimpleUploadedFile(
|
||||
name,
|
||||
b"%PDF-1.4\n1 0 obj\n<<>>\nendobj\n%%EOF",
|
||||
content_type="application/pdf",
|
||||
)
|
||||
|
||||
def test_root_endpoint_returns_root_for_version_and_root(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
version = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
)
|
||||
|
||||
resp_root = self.client.get(f"/api/documents/{root.id}/root/")
|
||||
self.assertEqual(resp_root.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(resp_root.data["root_id"], root.id)
|
||||
|
||||
resp_version = self.client.get(f"/api/documents/{version.id}/root/")
|
||||
self.assertEqual(resp_version.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(resp_version.data["root_id"], root.id)
|
||||
|
||||
def test_delete_version_disallows_deleting_root(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
|
||||
with mock.patch("documents.index.remove_document_from_index"):
|
||||
resp = self.client.delete(f"/api/documents/{root.id}/versions/{root.id}/")
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertTrue(Document.objects.filter(id=root.id).exists())
|
||||
|
||||
def test_delete_version_deletes_version_and_returns_current_version(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
v1 = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
)
|
||||
v2 = Document.objects.create(
|
||||
title="v2",
|
||||
checksum="v2",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
)
|
||||
|
||||
with mock.patch("documents.index.remove_document_from_index"):
|
||||
resp = self.client.delete(f"/api/documents/{root.id}/versions/{v2.id}/")
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertFalse(Document.objects.filter(id=v2.id).exists())
|
||||
self.assertEqual(resp.data["current_version_id"], v1.id)
|
||||
|
||||
with mock.patch("documents.index.remove_document_from_index"):
|
||||
resp = self.client.delete(f"/api/documents/{root.id}/versions/{v1.id}/")
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertFalse(Document.objects.filter(id=v1.id).exists())
|
||||
self.assertEqual(resp.data["current_version_id"], root.id)
|
||||
|
||||
def test_delete_version_writes_audit_log_entry(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
version = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
)
|
||||
version_id = version.id
|
||||
|
||||
with mock.patch("documents.index.remove_document_from_index"):
|
||||
resp = self.client.delete(
|
||||
f"/api/documents/{root.id}/versions/{version_id}/",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
|
||||
# Audit log entry is created against the root document.
|
||||
entry = (
|
||||
LogEntry.objects.filter(
|
||||
content_type=ContentType.objects.get_for_model(Document),
|
||||
object_id=root.id,
|
||||
)
|
||||
.order_by("-timestamp")
|
||||
.first()
|
||||
)
|
||||
self.assertIsNotNone(entry)
|
||||
assert entry is not None
|
||||
self.assertIsNotNone(entry.actor)
|
||||
assert entry.actor is not None
|
||||
self.assertEqual(entry.actor.id, self.user.id)
|
||||
self.assertEqual(entry.action, LogEntry.Action.UPDATE)
|
||||
self.assertEqual(
|
||||
entry.changes,
|
||||
{"Version Deleted": ["None", version_id]},
|
||||
)
|
||||
additional_data = entry.additional_data or {}
|
||||
self.assertEqual(additional_data.get("version_id"), version_id)
|
||||
|
||||
def test_delete_version_returns_404_when_version_not_related(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
other_root = Document.objects.create(
|
||||
title="other",
|
||||
checksum="other",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
other_version = Document.objects.create(
|
||||
title="other-v1",
|
||||
checksum="other-v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=other_root,
|
||||
)
|
||||
|
||||
with mock.patch("documents.index.remove_document_from_index"):
|
||||
resp = self.client.delete(
|
||||
f"/api/documents/{root.id}/versions/{other_version.id}/",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def test_delete_version_accepts_version_id_as_root_parameter(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
version = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
)
|
||||
|
||||
with mock.patch("documents.index.remove_document_from_index"):
|
||||
resp = self.client.delete(
|
||||
f"/api/documents/{version.id}/versions/{version.id}/",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertFalse(Document.objects.filter(id=version.id).exists())
|
||||
self.assertEqual(resp.data["current_version_id"], root.id)
|
||||
|
||||
def test_update_version_enqueues_consume_with_overrides(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
upload = self._make_pdf_upload()
|
||||
|
||||
async_task = mock.Mock()
|
||||
async_task.id = "task-123"
|
||||
|
||||
with mock.patch("documents.views.consume_file") as consume_mock:
|
||||
consume_mock.delay.return_value = async_task
|
||||
resp = self.client.post(
|
||||
f"/api/documents/{root.id}/update_version/",
|
||||
{"document": upload, "version_label": " New Version "},
|
||||
format="multipart",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(resp.data, "task-123")
|
||||
consume_mock.delay.assert_called_once()
|
||||
input_doc, overrides = consume_mock.delay.call_args[0]
|
||||
self.assertEqual(input_doc.root_document_id, root.id)
|
||||
self.assertEqual(input_doc.source, DocumentSource.ApiUpload)
|
||||
self.assertEqual(overrides.version_label, "New Version")
|
||||
self.assertEqual(overrides.actor_id, self.user.id)
|
||||
|
||||
def test_update_version_returns_403_without_permission(self) -> None:
|
||||
owner = User.objects.create_user(username="owner")
|
||||
other = User.objects.create_user(username="other")
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
owner=owner,
|
||||
)
|
||||
self.client.force_authenticate(user=other)
|
||||
|
||||
resp = self.client.post(
|
||||
f"/api/documents/{root.id}/update_version/",
|
||||
{"document": self._make_pdf_upload()},
|
||||
format="multipart",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
def test_update_version_returns_404_for_missing_document(self) -> None:
|
||||
resp = self.client.post(
|
||||
"/api/documents/9999/update_version/",
|
||||
{"document": self._make_pdf_upload()},
|
||||
format="multipart",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def test_update_version_requires_document(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
|
||||
resp = self.client.post(
|
||||
f"/api/documents/{root.id}/update_version/",
|
||||
{"version_label": "label"},
|
||||
format="multipart",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
@@ -554,6 +554,36 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertIsNone(response.data[1]["actor"])
|
||||
self.assertEqual(response.data[1]["action"], "create")
|
||||
|
||||
def test_document_history_logs_version_deletion(self) -> None:
|
||||
root_doc = Document.objects.create(
|
||||
title="Root",
|
||||
checksum="123",
|
||||
mime_type="application/pdf",
|
||||
owner=self.user,
|
||||
)
|
||||
version_doc = Document.objects.create(
|
||||
title="Version",
|
||||
checksum="456",
|
||||
mime_type="application/pdf",
|
||||
root_document=root_doc,
|
||||
owner=self.user,
|
||||
)
|
||||
|
||||
response = self.client.delete(
|
||||
f"/api/documents/{root_doc.pk}/versions/{version_doc.pk}/",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
response = self.client.get(f"/api/documents/{root_doc.pk}/history/")
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(len(response.data), 2)
|
||||
self.assertEqual(response.data[0]["actor"]["id"], self.user.id)
|
||||
self.assertEqual(response.data[0]["action"], "update")
|
||||
self.assertEqual(
|
||||
response.data[0]["changes"],
|
||||
{"Version Deleted": ["None", version_doc.pk]},
|
||||
)
|
||||
|
||||
@override_settings(AUDIT_LOG_ENABLED=False)
|
||||
def test_document_history_action_disabled(self) -> None:
|
||||
"""
|
||||
|
||||
@@ -4,6 +4,7 @@ from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
from django.conf import settings
|
||||
from django.test import TestCase
|
||||
from django.test import override_settings
|
||||
@@ -24,6 +25,13 @@ from documents.tests.utils import FileSystemAssertsMixin
|
||||
from documents.tests.utils import SampleDirMixin
|
||||
from paperless.models import ApplicationConfiguration
|
||||
|
||||
try:
|
||||
import zxingcpp # noqa: F401
|
||||
|
||||
HAS_ZXING_LIB = True
|
||||
except ImportError:
|
||||
HAS_ZXING_LIB = False
|
||||
|
||||
|
||||
class GetReaderPluginMixin:
|
||||
@contextmanager
|
||||
@@ -40,6 +48,7 @@ class GetReaderPluginMixin:
|
||||
reader.cleanup()
|
||||
|
||||
|
||||
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
|
||||
class TestBarcode(
|
||||
DirectoriesMixin,
|
||||
FileSystemAssertsMixin,
|
||||
@@ -597,6 +606,7 @@ class TestBarcode(
|
||||
self.assertDictEqual(separator_page_numbers, {0: False})
|
||||
|
||||
|
||||
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
|
||||
class TestBarcodeNewConsume(
|
||||
DirectoriesMixin,
|
||||
FileSystemAssertsMixin,
|
||||
@@ -774,23 +784,25 @@ class TestAsnBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
|
||||
|
||||
self.assertEqual(document.archive_serial_number, 123)
|
||||
|
||||
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
|
||||
def test_scan_file_for_qrcode_without_upscale(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- A printed and scanned PDF document with a rather small QR code
|
||||
WHEN:
|
||||
- ASN barcode detection is run with default settings
|
||||
- pyzbar is used for detection, as zxing would behave differently, and detect the QR code
|
||||
THEN:
|
||||
- ASN 123 is detected
|
||||
- ASN is not detected
|
||||
"""
|
||||
|
||||
test_file = self.BARCODE_SAMPLE_DIR / "barcode-qr-asn-000123-upscale-dpi.pdf"
|
||||
|
||||
with self.get_reader(test_file) as reader:
|
||||
reader.detect()
|
||||
self.assertEqual(len(reader.barcodes), 1)
|
||||
self.assertEqual(reader.asn, 123)
|
||||
self.assertEqual(len(reader.barcodes), 0)
|
||||
|
||||
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
|
||||
@override_settings(CONSUMER_BARCODE_DPI=600)
|
||||
@override_settings(CONSUMER_BARCODE_UPSCALE=1.5)
|
||||
def test_scan_file_for_qrcode_with_upscale(self) -> None:
|
||||
@@ -798,7 +810,10 @@ class TestAsnBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
|
||||
GIVEN:
|
||||
- A printed and scanned PDF document with a rather small QR code
|
||||
WHEN:
|
||||
- ASN barcode detection is run with 600dpi and an upscale factor of 1.5
|
||||
- ASN barcode detection is run with 600dpi and an upscale factor of 1.5 and pyzbar
|
||||
- pyzbar is used for detection, as zxing would behave differently.
|
||||
Upscaling is a workaround for detection problems with pyzbar,
|
||||
when you cannot switch to zxing (aarch64 build problems of zxing)
|
||||
THEN:
|
||||
- ASN 123 is detected
|
||||
"""
|
||||
@@ -811,6 +826,24 @@ class TestAsnBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, Tes
|
||||
self.assertEqual(reader.asn, 123)
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not HAS_ZXING_LIB,
|
||||
reason="No zxingcpp",
|
||||
)
|
||||
@override_settings(CONSUMER_BARCODE_SCANNER="ZXING")
|
||||
class TestBarcodeZxing(TestBarcode):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not HAS_ZXING_LIB,
|
||||
reason="No zxingcpp",
|
||||
)
|
||||
@override_settings(CONSUMER_BARCODE_SCANNER="ZXING")
|
||||
class TestAsnBarcodesZxing(TestAsnBarcode):
|
||||
pass
|
||||
|
||||
|
||||
class TestTagBarcode(DirectoriesMixin, SampleDirMixin, GetReaderPluginMixin, TestCase):
|
||||
@contextmanager
|
||||
def get_reader(self, filepath: Path) -> BarcodePlugin:
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import hashlib
|
||||
import shutil
|
||||
from datetime import date
|
||||
from pathlib import Path
|
||||
@@ -922,15 +921,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
|
||||
mock_consume_file.assert_not_called()
|
||||
|
||||
@mock.patch("documents.tasks.bulk_update_documents.si")
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.s")
|
||||
@mock.patch("celery.chord.delay")
|
||||
def test_rotate(
|
||||
self,
|
||||
mock_chord,
|
||||
mock_update_document,
|
||||
mock_update_documents,
|
||||
) -> None:
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
def test_rotate(self, mock_consume_delay):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing documents
|
||||
@@ -941,19 +933,22 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
"""
|
||||
doc_ids = [self.doc1.id, self.doc2.id]
|
||||
result = bulk_edit.rotate(doc_ids, 90)
|
||||
self.assertEqual(mock_update_document.call_count, 2)
|
||||
mock_update_documents.assert_called_once()
|
||||
mock_chord.assert_called_once()
|
||||
self.assertEqual(mock_consume_delay.call_count, 2)
|
||||
for call, expected_id in zip(
|
||||
mock_consume_delay.call_args_list,
|
||||
doc_ids,
|
||||
):
|
||||
consumable, overrides = call.args
|
||||
self.assertEqual(consumable.root_document_id, expected_id)
|
||||
self.assertIsNotNone(overrides)
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.tasks.bulk_update_documents.si")
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.s")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
def test_rotate_with_error(
|
||||
self,
|
||||
mock_pdf_save,
|
||||
mock_update_archive_file,
|
||||
mock_update_documents,
|
||||
mock_consume_delay,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -972,16 +967,12 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
error_str = cm.output[0]
|
||||
expected_str = "Error rotating document"
|
||||
self.assertIn(expected_str, error_str)
|
||||
mock_update_archive_file.assert_not_called()
|
||||
mock_consume_delay.assert_not_called()
|
||||
|
||||
@mock.patch("documents.tasks.bulk_update_documents.si")
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.s")
|
||||
@mock.patch("celery.chord.delay")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
def test_rotate_non_pdf(
|
||||
self,
|
||||
mock_chord,
|
||||
mock_update_document,
|
||||
mock_update_documents,
|
||||
mock_consume_delay,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -993,17 +984,18 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
"""
|
||||
with self.assertLogs("paperless.bulk_edit", level="INFO") as cm:
|
||||
result = bulk_edit.rotate([self.doc2.id, self.img_doc.id], 90)
|
||||
output_str = cm.output[1]
|
||||
expected_str = "Document 4 is not a PDF, skipping rotation"
|
||||
self.assertIn(expected_str, output_str)
|
||||
self.assertEqual(mock_update_document.call_count, 1)
|
||||
mock_update_documents.assert_called_once()
|
||||
mock_chord.assert_called_once()
|
||||
expected_str = f"Document {self.img_doc.id} is not a PDF, skipping rotation"
|
||||
self.assertTrue(any(expected_str in line for line in cm.output))
|
||||
self.assertEqual(mock_consume_delay.call_count, 1)
|
||||
consumable, overrides = mock_consume_delay.call_args[0]
|
||||
self.assertEqual(consumable.root_document_id, self.doc2.id)
|
||||
self.assertIsNotNone(overrides)
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
def test_delete_pages(self, mock_pdf_save, mock_update_archive_file) -> None:
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
def test_delete_pages(self, mock_magic, mock_pdf_save, mock_consume_delay):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing documents
|
||||
@@ -1011,28 +1003,22 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
- Delete pages action is called with 1 document and 2 pages
|
||||
THEN:
|
||||
- Save should be called once
|
||||
- Archive file should be updated once
|
||||
- The document's page_count should be reduced by the number of deleted pages
|
||||
- A new version should be enqueued via consume_file
|
||||
"""
|
||||
doc_ids = [self.doc2.id]
|
||||
initial_page_count = self.doc2.page_count
|
||||
pages = [1, 3]
|
||||
result = bulk_edit.delete_pages(doc_ids, pages)
|
||||
mock_pdf_save.assert_called_once()
|
||||
mock_update_archive_file.assert_called_once()
|
||||
mock_consume_delay.assert_called_once()
|
||||
consumable, overrides = mock_consume_delay.call_args[0]
|
||||
self.assertEqual(consumable.root_document_id, self.doc2.id)
|
||||
self.assertTrue(str(consumable.original_file).endswith("_pages_deleted.pdf"))
|
||||
self.assertIsNotNone(overrides)
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
expected_page_count = initial_page_count - len(pages)
|
||||
self.doc2.refresh_from_db()
|
||||
self.assertEqual(self.doc2.page_count, expected_page_count)
|
||||
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
def test_delete_pages_with_error(
|
||||
self,
|
||||
mock_pdf_save,
|
||||
mock_update_archive_file,
|
||||
) -> None:
|
||||
def test_delete_pages_with_error(self, mock_pdf_save, mock_consume_delay):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing documents
|
||||
@@ -1041,7 +1027,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
- PikePDF raises an error
|
||||
THEN:
|
||||
- Save should be called once
|
||||
- Archive file should not be updated
|
||||
- No new version should be enqueued
|
||||
"""
|
||||
mock_pdf_save.side_effect = Exception("Error saving PDF")
|
||||
doc_ids = [self.doc2.id]
|
||||
@@ -1052,7 +1038,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
error_str = cm.output[0]
|
||||
expected_str = "Error deleting pages from document"
|
||||
self.assertIn(expected_str, error_str)
|
||||
mock_update_archive_file.assert_not_called()
|
||||
mock_consume_delay.assert_not_called()
|
||||
|
||||
@mock.patch("documents.bulk_edit.group")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
@@ -1151,24 +1137,18 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.doc2.refresh_from_db()
|
||||
self.assertEqual(self.doc2.archive_serial_number, 333)
|
||||
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
|
||||
def test_edit_pdf_with_update_document(
|
||||
self,
|
||||
mock_update_document: mock.Mock,
|
||||
) -> None:
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
def test_edit_pdf_with_update_document(self, mock_consume_delay):
|
||||
"""
|
||||
GIVEN:
|
||||
- A single existing PDF document
|
||||
WHEN:
|
||||
- edit_pdf is called with update_document=True and a single output
|
||||
THEN:
|
||||
- The original document is updated in-place
|
||||
- The update_document_content_maybe_archive_file task is triggered
|
||||
- A version update is enqueued targeting the existing document
|
||||
"""
|
||||
doc_ids = [self.doc2.id]
|
||||
operations = [{"page": 1}, {"page": 2}]
|
||||
original_checksum = self.doc2.checksum
|
||||
original_page_count = self.doc2.page_count
|
||||
|
||||
result = bulk_edit.edit_pdf(
|
||||
doc_ids,
|
||||
@@ -1178,10 +1158,11 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
self.doc2.refresh_from_db()
|
||||
self.assertNotEqual(self.doc2.checksum, original_checksum)
|
||||
self.assertNotEqual(self.doc2.page_count, original_page_count)
|
||||
mock_update_document.assert_called_once_with(document_id=self.doc2.id)
|
||||
mock_consume_delay.assert_called_once()
|
||||
consumable, overrides = mock_consume_delay.call_args[0]
|
||||
self.assertEqual(consumable.root_document_id, self.doc2.id)
|
||||
self.assertTrue(str(consumable.original_file).endswith("_edited.pdf"))
|
||||
self.assertIsNotNone(overrides)
|
||||
|
||||
@mock.patch("documents.bulk_edit.group")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
@@ -1258,10 +1239,20 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
mock_consume_file.assert_not_called()
|
||||
|
||||
@mock.patch("documents.bulk_edit.update_document_content_maybe_archive_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.bulk_edit.tempfile.mkdtemp")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_remove_password_update_document(self, mock_open, mock_update_document):
|
||||
def test_remove_password_update_document(
|
||||
self,
|
||||
mock_open,
|
||||
mock_mkdtemp,
|
||||
mock_consume_delay,
|
||||
mock_update_document,
|
||||
):
|
||||
doc = self.doc1
|
||||
original_checksum = doc.checksum
|
||||
temp_dir = self.dirs.scratch_dir / "remove-password-update"
|
||||
temp_dir.mkdir(parents=True, exist_ok=True)
|
||||
mock_mkdtemp.return_value = str(temp_dir)
|
||||
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock(), mock.Mock(), mock.Mock()]
|
||||
@@ -1281,12 +1272,17 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open.assert_called_once_with(doc.source_path, password="secret")
|
||||
fake_pdf.remove_unreferenced_resources.assert_called_once()
|
||||
doc.refresh_from_db()
|
||||
self.assertNotEqual(doc.checksum, original_checksum)
|
||||
expected_checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
self.assertEqual(doc.checksum, expected_checksum)
|
||||
self.assertEqual(doc.page_count, len(fake_pdf.pages))
|
||||
mock_update_document.assert_called_once_with(document_id=doc.id)
|
||||
mock_update_document.assert_not_called()
|
||||
mock_consume_delay.assert_called_once()
|
||||
consumable, overrides = mock_consume_delay.call_args[0]
|
||||
expected_path = temp_dir / f"{doc.id}_unprotected.pdf"
|
||||
self.assertTrue(expected_path.exists())
|
||||
self.assertEqual(
|
||||
Path(consumable.original_file).resolve(),
|
||||
expected_path.resolve(),
|
||||
)
|
||||
self.assertEqual(consumable.root_document_id, doc.id)
|
||||
self.assertIsNotNone(overrides)
|
||||
|
||||
@mock.patch("documents.bulk_edit.chord")
|
||||
@mock.patch("documents.bulk_edit.group")
|
||||
@@ -1295,12 +1291,12 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_remove_password_creates_consumable_document(
|
||||
self,
|
||||
mock_open,
|
||||
mock_mkdtemp,
|
||||
mock_consume_file,
|
||||
mock_group,
|
||||
mock_chord,
|
||||
):
|
||||
mock_open: mock.Mock,
|
||||
mock_mkdtemp: mock.Mock,
|
||||
mock_consume_file: mock.Mock,
|
||||
mock_group: mock.Mock,
|
||||
mock_chord: mock.Mock,
|
||||
) -> None:
|
||||
doc = self.doc2
|
||||
temp_dir = self.dirs.scratch_dir / "remove-password"
|
||||
temp_dir.mkdir(parents=True, exist_ok=True)
|
||||
@@ -1309,8 +1305,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock(), mock.Mock()]
|
||||
|
||||
def save_side_effect(target_path):
|
||||
Path(target_path).write_bytes(b"password removed")
|
||||
def save_side_effect(target_path: Path) -> None:
|
||||
target_path.write_bytes(b"password removed")
|
||||
|
||||
fake_pdf.save.side_effect = save_side_effect
|
||||
mock_open.return_value.__enter__.return_value = fake_pdf
|
||||
@@ -1352,13 +1348,13 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_remove_password_deletes_original(
|
||||
self,
|
||||
mock_open,
|
||||
mock_mkdtemp,
|
||||
mock_consume_file,
|
||||
mock_group,
|
||||
mock_chord,
|
||||
mock_delete,
|
||||
):
|
||||
mock_open: mock.Mock,
|
||||
mock_mkdtemp: mock.Mock,
|
||||
mock_consume_file: mock.Mock,
|
||||
mock_group: mock.Mock,
|
||||
mock_chord: mock.Mock,
|
||||
mock_delete: mock.Mock,
|
||||
) -> None:
|
||||
doc = self.doc2
|
||||
temp_dir = self.dirs.scratch_dir / "remove-password-delete"
|
||||
temp_dir.mkdir(parents=True, exist_ok=True)
|
||||
@@ -1367,8 +1363,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock(), mock.Mock()]
|
||||
|
||||
def save_side_effect(target_path):
|
||||
Path(target_path).write_bytes(b"password removed")
|
||||
def save_side_effect(target_path: Path) -> None:
|
||||
target_path.write_bytes(b"password removed")
|
||||
|
||||
fake_pdf.save.side_effect = save_side_effect
|
||||
mock_open.return_value.__enter__.return_value = fake_pdf
|
||||
@@ -1391,7 +1387,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
mock_delete.si.assert_called_once_with([doc.id])
|
||||
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_remove_password_open_failure(self, mock_open):
|
||||
def test_remove_password_open_failure(self, mock_open: mock.Mock) -> None:
|
||||
mock_open.side_effect = RuntimeError("wrong password")
|
||||
|
||||
with self.assertLogs("paperless.bulk_edit", level="ERROR") as cm:
|
||||
|
||||
@@ -78,6 +78,28 @@ class TestDocument(TestCase):
|
||||
empty_trash([document.pk])
|
||||
self.assertEqual(mock_unlink.call_count, 2)
|
||||
|
||||
def test_delete_root_deletes_versions(self) -> None:
|
||||
root = Document.objects.create(
|
||||
correspondent=Correspondent.objects.create(name="Test0"),
|
||||
title="Head",
|
||||
content="content",
|
||||
checksum="checksum",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
Document.objects.create(
|
||||
root_document=root,
|
||||
correspondent=root.correspondent,
|
||||
title="Version",
|
||||
content="content",
|
||||
checksum="checksum2",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
|
||||
root.delete()
|
||||
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
self.assertEqual(Document.deleted_objects.count(), 2)
|
||||
|
||||
def test_file_name(self) -> None:
|
||||
doc = Document(
|
||||
mime_type="application/pdf",
|
||||
|
||||
@@ -10,6 +10,7 @@ from collections import deque
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from time import mktime
|
||||
from typing import Any
|
||||
from typing import Literal
|
||||
from unicodedata import normalize
|
||||
from urllib.parse import quote
|
||||
@@ -37,8 +38,10 @@ from django.db.models import Sum
|
||||
from django.db.models import When
|
||||
from django.db.models.functions import Lower
|
||||
from django.db.models.manager import Manager
|
||||
from django.db.models.query import QuerySet
|
||||
from django.http import FileResponse
|
||||
from django.http import Http404
|
||||
from django.http import HttpRequest
|
||||
from django.http import HttpResponse
|
||||
from django.http import HttpResponseBadRequest
|
||||
from django.http import HttpResponseForbidden
|
||||
@@ -83,6 +86,7 @@ from rest_framework.mixins import ListModelMixin
|
||||
from rest_framework.mixins import RetrieveModelMixin
|
||||
from rest_framework.mixins import UpdateModelMixin
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
@@ -168,6 +172,7 @@ from documents.serialisers import CustomFieldSerializer
|
||||
from documents.serialisers import DocumentListSerializer
|
||||
from documents.serialisers import DocumentSerializer
|
||||
from documents.serialisers import DocumentTypeSerializer
|
||||
from documents.serialisers import DocumentVersionSerializer
|
||||
from documents.serialisers import EmailSerializer
|
||||
from documents.serialisers import NotesSerializer
|
||||
from documents.serialisers import PostDocumentSerializer
|
||||
@@ -747,7 +752,7 @@ class DocumentViewSet(
|
||||
GenericViewSet,
|
||||
):
|
||||
model = Document
|
||||
queryset = Document.objects.annotate(num_notes=Count("notes"))
|
||||
queryset = Document.objects.all()
|
||||
serializer_class = DocumentSerializer
|
||||
pagination_class = StandardPagination
|
||||
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
|
||||
@@ -777,7 +782,8 @@ class DocumentViewSet(
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
Document.objects.distinct()
|
||||
Document.objects.filter(root_document__isnull=True)
|
||||
.distinct()
|
||||
.order_by("-created")
|
||||
.annotate(num_notes=Count("notes"))
|
||||
.select_related("correspondent", "storage_path", "document_type", "owner")
|
||||
@@ -803,6 +809,37 @@ class DocumentViewSet(
|
||||
)
|
||||
return super().get_serializer(*args, **kwargs)
|
||||
|
||||
@extend_schema(
|
||||
operation_id="documents_root",
|
||||
responses=inline_serializer(
|
||||
name="DocumentRootResponse",
|
||||
fields={
|
||||
"root_id": serializers.IntegerField(),
|
||||
},
|
||||
),
|
||||
)
|
||||
@action(methods=["get"], detail=True, url_path="root")
|
||||
def root(self, request, pk=None):
|
||||
try:
|
||||
doc = Document.global_objects.select_related(
|
||||
"owner",
|
||||
"root_document",
|
||||
).get(pk=pk)
|
||||
except Document.DoesNotExist:
|
||||
raise Http404
|
||||
|
||||
root_doc = doc if doc.root_document_id is None else doc.root_document
|
||||
if root_doc is None:
|
||||
raise Http404
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"view_document",
|
||||
root_doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
|
||||
return Response({"root_id": root_doc.id})
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
response = super().update(request, *args, **kwargs)
|
||||
from documents import index
|
||||
@@ -839,18 +876,58 @@ class DocumentViewSet(
|
||||
and request.query_params["original"] == "true"
|
||||
)
|
||||
|
||||
def _resolve_file_doc(self, root_doc: Document, request):
|
||||
version_param = request.query_params.get("version")
|
||||
if version_param:
|
||||
try:
|
||||
version_id = int(version_param)
|
||||
except (TypeError, ValueError):
|
||||
raise NotFound("Invalid version parameter")
|
||||
try:
|
||||
candidate = Document.global_objects.select_related("owner").get(
|
||||
id=version_id,
|
||||
)
|
||||
except Document.DoesNotExist:
|
||||
raise Http404
|
||||
if (
|
||||
candidate.id != root_doc.id
|
||||
and candidate.root_document_id != root_doc.id
|
||||
):
|
||||
raise Http404
|
||||
return candidate
|
||||
latest = Document.objects.filter(root_document=root_doc).order_by("id").last()
|
||||
return latest or root_doc
|
||||
|
||||
def file_response(self, pk, request, disposition):
|
||||
doc = Document.global_objects.select_related("owner").get(id=pk)
|
||||
request_doc = Document.global_objects.select_related("owner").get(id=pk)
|
||||
root_doc = (
|
||||
request_doc
|
||||
if request_doc.root_document_id is None
|
||||
else Document.global_objects.select_related("owner").get(
|
||||
id=request_doc.root_document_id,
|
||||
)
|
||||
)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"view_document",
|
||||
doc,
|
||||
root_doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
# If a version is explicitly requested, use it. Otherwise:
|
||||
# - if pk is a root document: serve newest version
|
||||
# - if pk is a version: serve that version
|
||||
if "version" in request.query_params:
|
||||
file_doc = self._resolve_file_doc(root_doc, request)
|
||||
else:
|
||||
file_doc = (
|
||||
self._resolve_file_doc(root_doc, request)
|
||||
if request_doc.root_document_id is None
|
||||
else request_doc
|
||||
)
|
||||
return serve_file(
|
||||
doc=doc,
|
||||
doc=file_doc,
|
||||
use_archive=not self.original_requested(request)
|
||||
and doc.has_archive_version,
|
||||
and file_doc.has_archive_version,
|
||||
disposition=disposition,
|
||||
)
|
||||
|
||||
@@ -885,16 +962,33 @@ class DocumentViewSet(
|
||||
)
|
||||
def metadata(self, request, pk=None):
|
||||
try:
|
||||
doc = Document.objects.select_related("owner").get(pk=pk)
|
||||
request_doc = Document.objects.select_related("owner").get(pk=pk)
|
||||
root_doc = (
|
||||
request_doc
|
||||
if request_doc.root_document_id is None
|
||||
else Document.objects.select_related("owner").get(
|
||||
id=request_doc.root_document_id,
|
||||
)
|
||||
)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"view_document",
|
||||
doc,
|
||||
root_doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
except Document.DoesNotExist:
|
||||
raise Http404
|
||||
|
||||
# Choose the effective document (newest version by default, or explicit via ?version=)
|
||||
if "version" in request.query_params:
|
||||
doc = self._resolve_file_doc(root_doc, request)
|
||||
else:
|
||||
doc = (
|
||||
self._resolve_file_doc(root_doc, request)
|
||||
if request_doc.root_document_id is None
|
||||
else request_doc
|
||||
)
|
||||
|
||||
document_cached_metadata = get_metadata_cache(doc.pk)
|
||||
|
||||
archive_metadata = None
|
||||
@@ -1063,8 +1157,36 @@ class DocumentViewSet(
|
||||
)
|
||||
def preview(self, request, pk=None):
|
||||
try:
|
||||
response = self.file_response(pk, request, "inline")
|
||||
return response
|
||||
request_doc = Document.objects.select_related("owner").get(id=pk)
|
||||
root_doc = (
|
||||
request_doc
|
||||
if request_doc.root_document_id is None
|
||||
else Document.objects.select_related("owner").get(
|
||||
id=request_doc.root_document_id,
|
||||
)
|
||||
)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"view_document",
|
||||
root_doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
|
||||
if "version" in request.query_params:
|
||||
file_doc = self._resolve_file_doc(root_doc, request)
|
||||
else:
|
||||
file_doc = (
|
||||
self._resolve_file_doc(root_doc, request)
|
||||
if request_doc.root_document_id is None
|
||||
else request_doc
|
||||
)
|
||||
|
||||
return serve_file(
|
||||
doc=file_doc,
|
||||
use_archive=not self.original_requested(request)
|
||||
and file_doc.has_archive_version,
|
||||
disposition="inline",
|
||||
)
|
||||
except (FileNotFoundError, Document.DoesNotExist):
|
||||
raise Http404
|
||||
|
||||
@@ -1073,15 +1195,29 @@ class DocumentViewSet(
|
||||
@method_decorator(last_modified(thumbnail_last_modified))
|
||||
def thumb(self, request, pk=None):
|
||||
try:
|
||||
doc = Document.objects.select_related("owner").get(id=pk)
|
||||
request_doc = Document.objects.select_related("owner").get(id=pk)
|
||||
root_doc = (
|
||||
request_doc
|
||||
if request_doc.root_document_id is None
|
||||
else Document.objects.select_related("owner").get(
|
||||
id=request_doc.root_document_id,
|
||||
)
|
||||
)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"view_document",
|
||||
doc,
|
||||
root_doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
|
||||
handle = doc.thumbnail_file
|
||||
if "version" in request.query_params:
|
||||
file_doc = self._resolve_file_doc(root_doc, request)
|
||||
else:
|
||||
file_doc = (
|
||||
self._resolve_file_doc(root_doc, request)
|
||||
if request_doc.root_document_id is None
|
||||
else request_doc
|
||||
)
|
||||
handle = file_doc.thumbnail_file
|
||||
|
||||
return HttpResponse(handle, content_type="image/webp")
|
||||
except (FileNotFoundError, Document.DoesNotExist):
|
||||
@@ -1373,6 +1509,159 @@ class DocumentViewSet(
|
||||
"Error emailing documents, check logs for more detail.",
|
||||
)
|
||||
|
||||
@extend_schema(
|
||||
operation_id="documents_update_version",
|
||||
request=DocumentVersionSerializer,
|
||||
responses={
|
||||
200: OpenApiTypes.STR,
|
||||
},
|
||||
)
|
||||
@action(methods=["post"], detail=True, parser_classes=[parsers.MultiPartParser])
|
||||
def update_version(self, request, pk=None):
|
||||
serializer = DocumentVersionSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
try:
|
||||
doc = Document.objects.select_related("owner").get(pk=pk)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"change_document",
|
||||
doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
except Document.DoesNotExist:
|
||||
raise Http404
|
||||
|
||||
try:
|
||||
doc_name, doc_data = serializer.validated_data.get("document")
|
||||
version_label = serializer.validated_data.get("version_label")
|
||||
|
||||
t = int(mktime(datetime.now().timetuple()))
|
||||
|
||||
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
temp_file_path = Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR)) / Path(
|
||||
pathvalidate.sanitize_filename(doc_name),
|
||||
)
|
||||
|
||||
temp_file_path.write_bytes(doc_data)
|
||||
|
||||
os.utime(temp_file_path, times=(t, t))
|
||||
|
||||
input_doc = ConsumableDocument(
|
||||
source=DocumentSource.ApiUpload,
|
||||
original_file=temp_file_path,
|
||||
root_document_id=doc.pk,
|
||||
)
|
||||
|
||||
overrides = DocumentMetadataOverrides()
|
||||
if version_label:
|
||||
overrides.version_label = version_label.strip()
|
||||
if request.user is not None:
|
||||
overrides.actor_id = request.user.id
|
||||
|
||||
async_task = consume_file.delay(
|
||||
input_doc,
|
||||
overrides,
|
||||
)
|
||||
logger.debug(
|
||||
f"Updated document {doc.id} with new version",
|
||||
)
|
||||
return Response(async_task.id)
|
||||
except Exception as e:
|
||||
logger.warning(f"An error occurred updating document: {e!s}")
|
||||
return HttpResponseServerError(
|
||||
"Error updating document, check logs for more detail.",
|
||||
)
|
||||
|
||||
@extend_schema(
|
||||
operation_id="documents_delete_version",
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="version_id",
|
||||
type=OpenApiTypes.INT,
|
||||
location=OpenApiParameter.PATH,
|
||||
),
|
||||
],
|
||||
responses=inline_serializer(
|
||||
name="DeleteDocumentVersionResult",
|
||||
fields={
|
||||
"result": serializers.CharField(),
|
||||
"current_version_id": serializers.IntegerField(),
|
||||
},
|
||||
),
|
||||
)
|
||||
@action(
|
||||
methods=["delete"],
|
||||
detail=True,
|
||||
url_path=r"versions/(?P<version_id>\d+)",
|
||||
)
|
||||
def delete_version(self, request, pk=None, version_id=None):
|
||||
try:
|
||||
root_doc = Document.objects.select_related("owner").get(pk=pk)
|
||||
if root_doc.root_document_id is not None:
|
||||
root_doc = Document.objects.select_related("owner").get(
|
||||
pk=root_doc.root_document_id,
|
||||
)
|
||||
except Document.DoesNotExist:
|
||||
raise Http404
|
||||
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"delete_document",
|
||||
root_doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
|
||||
try:
|
||||
version_doc = Document.objects.select_related("owner").get(
|
||||
pk=version_id,
|
||||
)
|
||||
except Document.DoesNotExist:
|
||||
raise Http404
|
||||
|
||||
if version_doc.id == root_doc.id:
|
||||
return HttpResponseBadRequest(
|
||||
"Cannot delete the root/original version. Delete the document instead.",
|
||||
)
|
||||
|
||||
if version_doc.root_document_id != root_doc.id:
|
||||
raise Http404
|
||||
|
||||
from documents import index
|
||||
|
||||
index.remove_document_from_index(version_doc)
|
||||
version_doc_id = version_doc.id
|
||||
version_doc.delete()
|
||||
if settings.AUDIT_LOG_ENABLED:
|
||||
actor = (
|
||||
request.user if request.user and request.user.is_authenticated else None
|
||||
)
|
||||
LogEntry.objects.log_create(
|
||||
instance=root_doc,
|
||||
changes={
|
||||
"Version Deleted": ["None", version_doc_id],
|
||||
},
|
||||
action=LogEntry.Action.UPDATE,
|
||||
actor=actor,
|
||||
additional_data={
|
||||
"reason": "Version deleted",
|
||||
"version_id": version_doc_id,
|
||||
},
|
||||
)
|
||||
|
||||
current = (
|
||||
Document.objects.filter(Q(id=root_doc.id) | Q(root_document=root_doc))
|
||||
.order_by("-id")
|
||||
.first()
|
||||
)
|
||||
return Response(
|
||||
{
|
||||
"result": "OK",
|
||||
"current_version_id": current.id if current else root_doc.id,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class ChatStreamingSerializer(serializers.Serializer):
|
||||
q = serializers.CharField(required=True)
|
||||
@@ -1639,7 +1928,7 @@ class SavedViewViewSet(ModelViewSet, PassUserMixin):
|
||||
.prefetch_related("filter_rules")
|
||||
)
|
||||
|
||||
def perform_create(self, serializer) -> None:
|
||||
def perform_create(self, serializer: serializers.BaseSerializer[Any]) -> None:
|
||||
serializer.save(owner=self.request.user)
|
||||
|
||||
|
||||
@@ -1672,13 +1961,13 @@ class BulkEditView(PassUserMixin):
|
||||
"modify_custom_fields": "custom_fields",
|
||||
"set_permissions": None,
|
||||
"delete": "deleted_at",
|
||||
"rotate": "checksum",
|
||||
"delete_pages": "checksum",
|
||||
"rotate": None,
|
||||
"delete_pages": None,
|
||||
"split": None,
|
||||
"merge": None,
|
||||
"edit_pdf": "checksum",
|
||||
"edit_pdf": None,
|
||||
"reprocess": "checksum",
|
||||
"remove_password": "checksum",
|
||||
"remove_password": None,
|
||||
}
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
@@ -1696,6 +1985,8 @@ class BulkEditView(PassUserMixin):
|
||||
if method in [
|
||||
bulk_edit.split,
|
||||
bulk_edit.merge,
|
||||
bulk_edit.rotate,
|
||||
bulk_edit.delete_pages,
|
||||
bulk_edit.edit_pdf,
|
||||
bulk_edit.remove_password,
|
||||
]:
|
||||
@@ -3181,7 +3472,7 @@ class CustomFieldViewSet(ModelViewSet):
|
||||
|
||||
queryset = CustomField.objects.all().order_by("-created")
|
||||
|
||||
def get_queryset(self):
|
||||
def get_queryset(self) -> QuerySet[CustomField]:
|
||||
filter = (
|
||||
Q(fields__document__deleted_at__isnull=True)
|
||||
if self.request.user is None or self.request.user.is_superuser
|
||||
@@ -3494,11 +3785,16 @@ class TrashView(ListModelMixin, PassUserMixin):
|
||||
|
||||
queryset = Document.deleted_objects.all()
|
||||
|
||||
def get(self, request, format=None):
|
||||
def get(self, request: Request, format: str | None = None) -> Response:
|
||||
self.serializer_class = DocumentSerializer
|
||||
return self.list(request, format)
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
def post(
|
||||
self,
|
||||
request: Request,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> Response | HttpResponse:
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
@@ -3522,7 +3818,7 @@ class TrashView(ListModelMixin, PassUserMixin):
|
||||
return Response({"result": "OK", "doc_ids": doc_ids})
|
||||
|
||||
|
||||
def serve_logo(request, filename=None):
|
||||
def serve_logo(request: HttpRequest, filename: str | None = None) -> FileResponse:
|
||||
"""
|
||||
Serves the configured logo file with Content-Disposition: attachment.
|
||||
Prevents inline execution of SVGs. See GHSA-6p53-hqqw-8j62
|
||||
|
||||
@@ -167,6 +167,17 @@ def settings_values_check(app_configs, **kwargs):
|
||||
)
|
||||
return msgs
|
||||
|
||||
def _barcode_scanner_validate():
|
||||
"""
|
||||
Validates the barcode scanner type
|
||||
"""
|
||||
msgs = []
|
||||
if settings.CONSUMER_BARCODE_SCANNER not in ["PYZBAR", "ZXING"]:
|
||||
msgs.append(
|
||||
Error(f'Invalid Barcode Scanner "{settings.CONSUMER_BARCODE_SCANNER}"'),
|
||||
)
|
||||
return msgs
|
||||
|
||||
def _email_certificate_validate():
|
||||
msgs = []
|
||||
# Existence checks
|
||||
@@ -184,6 +195,7 @@ def settings_values_check(app_configs, **kwargs):
|
||||
return (
|
||||
_ocrmypdf_settings_check()
|
||||
+ _timezone_validate()
|
||||
+ _barcode_scanner_validate()
|
||||
+ _email_certificate_validate()
|
||||
)
|
||||
|
||||
|
||||
@@ -1106,6 +1106,11 @@ CONSUMER_BARCODE_STRING: Final[str] = os.getenv(
|
||||
"PATCHT",
|
||||
)
|
||||
|
||||
CONSUMER_BARCODE_SCANNER: Final[str] = os.getenv(
|
||||
"PAPERLESS_CONSUMER_BARCODE_SCANNER",
|
||||
"PYZBAR",
|
||||
).upper()
|
||||
|
||||
CONSUMER_ENABLE_ASN_BARCODE: Final[bool] = __get_boolean(
|
||||
"PAPERLESS_CONSUMER_ENABLE_ASN_BARCODE",
|
||||
)
|
||||
|
||||
@@ -187,6 +187,31 @@ class TestTimezoneSettingsChecks(DirectoriesMixin, TestCase):
|
||||
self.assertIn('Timezone "TheMoon\\MyCrater"', msg.msg)
|
||||
|
||||
|
||||
class TestBarcodeSettingsChecks(DirectoriesMixin, TestCase):
|
||||
@override_settings(CONSUMER_BARCODE_SCANNER="Invalid")
|
||||
def test_barcode_scanner_invalid(self) -> None:
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 1)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn('Invalid Barcode Scanner "Invalid"', msg.msg)
|
||||
|
||||
@override_settings(CONSUMER_BARCODE_SCANNER="")
|
||||
def test_barcode_scanner_empty(self) -> None:
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 1)
|
||||
|
||||
msg = msgs[0]
|
||||
|
||||
self.assertIn('Invalid Barcode Scanner ""', msg.msg)
|
||||
|
||||
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
|
||||
def test_barcode_scanner_valid(self) -> None:
|
||||
msgs = settings_values_check(None)
|
||||
self.assertEqual(len(msgs), 0)
|
||||
|
||||
|
||||
class TestEmailCertSettingsChecks(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
@override_settings(EMAIL_CERTIFICATE_FILE=Path("/tmp/not_actually_here.pem"))
|
||||
def test_not_valid_file(self) -> None:
|
||||
|
||||
90
uv.lock
generated
90
uv.lock
generated
@@ -3073,6 +3073,7 @@ dependencies = [
|
||||
{ name = "python-gnupg", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "python-ipware", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "python-magic", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "pyzbar", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "rapidfuzz", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "redis", extra = ["hiredis"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "regex", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
@@ -3086,7 +3087,9 @@ dependencies = [
|
||||
{ name = "watchfiles", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "whitenoise", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "whoosh-reloaded", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "zxing-cpp", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "zxing-cpp", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version != '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version != '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or sys_platform == 'darwin'" },
|
||||
{ name = "zxing-cpp", version = "2.3.0", source = { url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_aarch64.whl" }, marker = "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'" },
|
||||
{ name = "zxing-cpp", version = "2.3.0", source = { url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl" }, marker = "python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'linux'" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
@@ -3225,6 +3228,7 @@ requires-dist = [
|
||||
{ name = "python-gnupg", specifier = "~=0.5.4" },
|
||||
{ name = "python-ipware", specifier = "~=3.0.0" },
|
||||
{ name = "python-magic", specifier = "~=0.4.27" },
|
||||
{ name = "pyzbar", specifier = "~=0.1.9" },
|
||||
{ name = "rapidfuzz", specifier = "~=3.14.0" },
|
||||
{ name = "redis", extras = ["hiredis"], specifier = "~=5.2.1" },
|
||||
{ name = "regex", specifier = ">=2025.9.18" },
|
||||
@@ -3237,7 +3241,9 @@ requires-dist = [
|
||||
{ name = "watchfiles", specifier = ">=1.1.1" },
|
||||
{ name = "whitenoise", specifier = "~=6.11" },
|
||||
{ name = "whoosh-reloaded", specifier = ">=2.7.5" },
|
||||
{ name = "zxing-cpp", specifier = "~=3.0.0" },
|
||||
{ name = "zxing-cpp", marker = "(python_full_version != '3.12.*' and platform_machine == 'aarch64') or (python_full_version != '3.12.*' and platform_machine == 'x86_64') or (platform_machine != 'aarch64' and platform_machine != 'x86_64') or sys_platform != 'linux'", specifier = "~=2.3.0" },
|
||||
{ name = "zxing-cpp", marker = "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_aarch64.whl" },
|
||||
{ name = "zxing-cpp", marker = "python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'linux'", url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl" },
|
||||
]
|
||||
provides-extras = ["mariadb", "postgres", "webserver"]
|
||||
|
||||
@@ -4277,6 +4283,14 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyzbar"
|
||||
version = "0.1.9"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/24/81ebe6a1c00760471a3028a23cbe0b94e5fa2926e5ba47adc895920887bc/pyzbar-0.1.9-py2.py3-none-any.whl", hash = "sha256:4559628b8192feb25766d954b36a3753baaf5c97c03135aec7e4a026036b475d", size = 32560, upload-time = "2022-03-15T14:53:40.637Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "qrcode"
|
||||
version = "8.2"
|
||||
@@ -5223,10 +5237,6 @@ wheels = [
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:0826ac8e409551e12b2360ac18b4161a838cbd111933e694752f351191331d09" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:7fbbf409143a4fe0812a40c0b46a436030a7e1d14fe8c5234dfbe44df47f617e" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:b39cafff7229699f9d6e172cac74d85fd71b568268e439e08d9c540e54732a3e" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-2-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:7417ef370d7c3969dd509dae8d5c7daeb945af335ab76dd38358ba30a91251c1" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-2-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:90821a3194b8806d9fa9fdaa9308c1bc73df0c26808274b14129a97c99f35794" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-2-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:358bd7125cbec6e692d60618a5eec7f55a51b29e3652a849fd42af021d818023" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-2-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:470de4176007c2700735e003a830828a88d27129032a3add07291da07e2a94e8" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:2d16abfce6c92584ceeb00c3b2665d5798424dd9ed235ea69b72e045cd53ae97" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:4584ab167995c0479f6821e3dceaf199c8166c811d3adbba5d8eedbbfa6764fd" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.10.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:45a1c5057629444aeb1c452c18298fa7f30f2f7aeadd4dc41f9d340980294407" },
|
||||
@@ -6231,28 +6241,50 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "zxing-cpp"
|
||||
version = "3.0.0"
|
||||
version = "2.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f1/c6/ac2a12cdc2b1c296804fc6a65bf112b607825ca7f47742a5aca541134711/zxing_cpp-3.0.0.tar.gz", hash = "sha256:703353304de24d947bd68044fac4e062953a7b64029de6941ba8ffeb4476b60d", size = 1197544, upload-time = "2026-02-10T12:50:11.252Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/84/689a748f08635ff1543265905532cbe6dfaa299350cfd6591e4456da3014/zxing_cpp-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:63bcc80e7a6c741f1948381bb1b9c36082400624a217e3306aebb1e2bec21f6f", size = 910995, upload-time = "2026-02-10T12:49:22.189Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/3d/f3c23181697a2407e2079dc122ba8c266b46842e3ffc810d510716a95759/zxing_cpp-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0b30e2f4b081a85fe5f09ba34cb17486d607625f2ddeb0c80d5212d2872e5530", size = 865029, upload-time = "2026-02-10T12:49:24.719Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/48/1e56b02edfda18d557abea7cf5790a7a0aade06191f7c2bbce4a4efab0fd/zxing_cpp-3.0.0-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dd640c33a06da8b15e36a8e0c3c8236531fea13a95d7eaa8deb91ccb5d76c4e7", size = 993311, upload-time = "2026-02-10T12:49:26.487Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/47/78fe46ee99e4f6b67467a96ca61e75e907d2e469f63bbd92127b91008c02/zxing_cpp-3.0.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:630adc04f3a7916054a91c71d7dd55568e798289be5f16186a17ea05555eb60f", size = 1070707, upload-time = "2026-02-10T12:49:27.746Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/9c/25ddd83cd109a97a0382fe807a8b0904b3eefcf42d22df6aa6ae6a5e2b86/zxing_cpp-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c171e9b37f596293d1134e74c3285a8b7cf06ef72e2ad39c4a7d54b1aa939782", size = 912816, upload-time = "2026-02-10T12:49:33.174Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/cc/e2e0d68e60fb132c31c728e24dc529cbb5579bfa1365c64b62290aefe317/zxing_cpp-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e712d958155408c8e902ea91d8feb3f4edfa41fd207ef85ca9e59f3f0c7060ad", size = 866684, upload-time = "2026-02-10T12:49:34.913Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/f9/538488cacaea1e3e989cf87c389d075e2139ee50fab786de7e59b64f9411/zxing_cpp-3.0.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4f62174643de2012bde470bf2048d8a29b5d93bb23bbdc6c075e7e92dbd5794", size = 994390, upload-time = "2026-02-10T12:49:36.294Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/c1/3eab6fa0b1c6e83a23ce94727e1551ca49a6edabe4691adaa8d03ff742a2/zxing_cpp-3.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:156b363a0aae0b2472c58628346b5223ebb72935f0fa5def3d7ab4a7211c3e88", size = 1071503, upload-time = "2026-02-10T12:49:38.575Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/7f/32b4cc8545da72061d360aca9d96c51738d48e2f3a8eebe06a47f4103dd6/zxing_cpp-3.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b76fac77c94545c5a6e2e6184a121c09409fff29f9c7557e350c16b78025d74", size = 914798, upload-time = "2026-02-10T12:49:43.556Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/21/5ba18d19383fe5f044fefa79640f4234665bc77057cf3d584e5eb979685f/zxing_cpp-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bf58043c543d3440f1cbef6bfa9e5ad7139c39c90955d1f294f4778f0cd1ec0", size = 867437, upload-time = "2026-02-10T12:49:45.424Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/2a/94d98c5b728e1dfeec3a343f2581bf7f372ca448cefff50076cab0c6e0c4/zxing_cpp-3.0.0-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:548cc0e767f24193038031c76f60f2de0965ab5b05106dff6095bcae89607748", size = 995650, upload-time = "2026-02-10T12:49:47.222Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/0f/03f09d048b7dde279a5bed8839ffbb21f7e8995747afa17970791c0356ff/zxing_cpp-3.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cfdf7a393541f4cd7c7c9329ec5d56b49a5cfc91bf24cdc53ec301d41c2afd68", size = 1074289, upload-time = "2026-02-10T12:49:48.804Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/c4/c4f276e43c4df74896b7cac2a3e5deabaf743e8256ee6736380d64f7295b/zxing_cpp-3.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:26ee52319b545a0db5adc19c682d5bd7efa210456daff0293f5cc78311c52d90", size = 914828, upload-time = "2026-02-10T12:49:53.306Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/7e/971bb37b9091b02fd12f7c13745335a77a8e9e907abc3e0530ff9c4e6b32/zxing_cpp-3.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c4d44e63c0cb06df1d7ab636018b3e7139d5b010c22a5dcb18f3badfa29e1e1c", size = 867410, upload-time = "2026-02-10T12:49:55.061Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/df/cbf7e3ad2ca5f80f71df39c99fb7061f39fb390a9cab031dab2be361c8be/zxing_cpp-3.0.0-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9e9f7404b9b33abf863ccb243f6b0e99c4818028894dfdd8fb41e142fcdad65", size = 996406, upload-time = "2026-02-10T12:49:56.42Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/ac/ae87a5ed87a7623e18a986e4394c3e12a5fa0f4fa55dae3be7f5ca6ef392/zxing_cpp-3.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0a96c8eaf1adff4c5aaf99c74d2b5ce3d542d44c21f964ac3f70eaaefcdc141e", size = 1074221, upload-time = "2026-02-10T12:49:57.971Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/06/8ecd68d8a9e9bb7166808480a1c09ab059c9974b5c54a40640d4e4e1d814/zxing_cpp-3.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:af13fcbbe24ca4285bda83309f50954107ddf7d12686c332a838f4eaf88ff619", size = 915701, upload-time = "2026-02-10T12:50:01.942Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/38/76f89b42fff2fae62595b3adc88b72e6eb1460acb9c43a8ed4c2455297df/zxing_cpp-3.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1b74a6b3608d035818d6d4fa9545875acae92635028b8927e3922175cb4fe19b", size = 868123, upload-time = "2026-02-10T12:50:03.222Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/3b/b76d979f74f09a7d764fe4c22583ba8322ef0f347e3193eceb1461b84913/zxing_cpp-3.0.0-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27901910b14e2d6a6f8eba585249d02ac23660de1a6fef3dc3a283bb017c41d0", size = 997309, upload-time = "2026-02-10T12:50:04.835Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/e4/dd9ce2a725c83c15b1bc45b3d4e6be30f9528bcb9a4749002e1c4c8dca51/zxing_cpp-3.0.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:489fc0ab4af893e1b10b58b70c34db80fbbaf6e5c27c216e8f3f2367cf18a45d", size = 1074223, upload-time = "2026-02-10T12:50:06.622Z" },
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.12' and sys_platform == 'darwin'",
|
||||
"python_full_version == '3.11.*' and sys_platform == 'darwin'",
|
||||
"python_full_version < '3.11' and sys_platform == 'darwin'",
|
||||
"(python_full_version >= '3.12' and platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (python_full_version >= '3.13' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and platform_machine == 'x86_64' and sys_platform == 'linux')",
|
||||
"python_full_version == '3.11.*' and sys_platform == 'linux'",
|
||||
"python_full_version < '3.11' and sys_platform == 'linux'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d9/f2/b781bf6119abe665069777e3c0f154752cf924fe8a55fca027243abbc555/zxing_cpp-2.3.0.tar.gz", hash = "sha256:3babedb67a4c15c9de2c2b4c42d70af83a6c85780c1b2d9803ac64c6ae69f14e", size = 1172666, upload-time = "2025-01-01T21:54:05.856Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/31/93/3e830a3dd44a9f7d11219883bc6f131ca68da2a5ad48690d9645e19c3b55/zxing_cpp-2.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4e1ffcdd8e44a344cbf32bb0435e1fbe67241337c0a0f22452c2b8f7c16dc75e", size = 1694502, upload-time = "2025-01-01T21:53:06.339Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/4c/6bf1551c9b0097e13bcc54b82828e66719c021afd3ef05fd3d7650e0e768/zxing_cpp-2.3.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfde95506d3fec439705dbc8771ace025d049dce324861ddbf74be3ab0fabd36", size = 991445, upload-time = "2025-01-01T21:53:08.204Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/6c/1bf6e40fadcb73958f672385c5186b062485c818cecc32b36ddf5666da1e/zxing_cpp-2.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd3f175f7b57cfbdea56afdb5335eaebaadeebc06e20a087d9aa3f99637c4aa5", size = 982960, upload-time = "2025-01-01T21:53:10.136Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/60/d420be9446b25a65064a665603bd24295e143e2bafde500bfc952a07fbee/zxing_cpp-2.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6ef0548f4247480da988ce1dad4d9c5b8d7cb2871538894fb9615c9ac0bb8656", size = 1697594, upload-time = "2025-01-01T21:53:17.292Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/34/ea057223cc34e63b1ff27b2794bcddfa58a1a64af7314882291255b56980/zxing_cpp-2.3.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfc1095dc3303ed24be2622916e199a071bae19b19d432a0ce7ca993f95879ec", size = 991930, upload-time = "2025-01-01T21:53:18.808Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/d3/75a6d6485e704527c5e18f825f6bd6b5e5129f56c3526f28142911b48410/zxing_cpp-2.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64e5a4ff5168142d8b33ca648978c8ec4125c50b33aa1521e0c5344c6ffacef7", size = 983751, upload-time = "2025-01-01T21:53:21.757Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/d2/e4552dc7d341ccf6242410a13bf95cbd37d7bf194a482d400729b5934b87/zxing_cpp-2.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2f457c0aa53c1de263e34cac9917ef647bfb9adcc9e3d4f42a8a1fc02558e1a6", size = 1698659, upload-time = "2025-01-01T21:53:36.692Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/6c/00252c1b3545c13d68922b67cb7c555f739b3a1755cc2a694fd8705ecae2/zxing_cpp-2.3.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:899955e0091fa0e159b9eb429e43d0a23e2be4a5347c9629c858844f02024b4b", size = 992014, upload-time = "2025-01-01T21:53:39.621Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/30/3143bf75944d65c9432349a79b97f9414965a44875ec9eeb5745592b4ecd/zxing_cpp-2.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dec2805c0e9dec0d7707c97ca5196f98d2730d2dfcea80442807123b9f8ec850", size = 984542, upload-time = "2025-01-01T21:53:41.01Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/46/ef7c69bea44a7c64d4a740679dd18c59616d610fb468c057d8bfbda5f063/zxing_cpp-2.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3da0fbf0d93ef85663def561e8f7880447970710ea6b1768dfc05550a9ee3e00", size = 1698948, upload-time = "2025-01-01T21:53:46.768Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/2e/8ed22a7b3743a8aa6a588366e34c44056d118ea7614b6bdbc44817ab4a7f/zxing_cpp-2.3.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0b36f3be2e6d928bea9bd529f173ef41092061f0f46d27f591c87486f9a7366", size = 992070, upload-time = "2025-01-01T21:53:48.258Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/5e/5784ad14f8514e4321f3a828dccc00ebcf70202f6ef967174d26bcb65568/zxing_cpp-2.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ba641ca5a0f19b97d7bc6a0212e61dab267a2b1a52a84946d02bdcd859ec318", size = 984869, upload-time = "2025-01-01T21:53:51.256Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zxing-cpp"
|
||||
version = "2.3.0"
|
||||
source = { url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_aarch64.whl" }
|
||||
resolution-markers = [
|
||||
"python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'",
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_aarch64.whl", hash = "sha256:cfe600ed871ac540733fea3dac15c345b1ef61b703dd73ab0b618d29a491e611" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zxing-cpp"
|
||||
version = "2.3.0"
|
||||
source = { url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl" }
|
||||
resolution-markers = [
|
||||
"python_full_version == '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'linux'",
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl", hash = "sha256:15c6b1b6975a2a7d3dc679a05f6aed435753e39a105f37bed11098d00e0b5e79" },
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user