Compare commits

...

16 Commits

Author SHA1 Message Date
shamoon
9a29195792 Mas testing 2025-12-31 17:06:24 -08:00
shamoon
bdd00498a1 skip_asn_if_exists 2025-12-31 12:09:56 -08:00
shamoon
92deebddd4 "Handoff" ASN when merging or editing PDFs 2025-12-31 11:50:27 -08:00
shamoon
c7efcee3d6 First, release ASNs before document replacement (and restore if needed) 2025-12-31 10:42:07 -08:00
GitHub Actions
72fd05501b Auto translate strings 2025-12-29 14:50:09 +00:00
shamoon
a3c19b1e2d Fix: validate cf integer values within PostgreSQL range (#11666) 2025-12-29 06:48:31 -08:00
shamoon
2e6458dbcc Fix environment variable reference in workflow 2025-12-28 20:50:04 -08:00
shamoon
8471507115 Fix ref injection in translate-strings workflow 2025-12-28 20:47:44 -08:00
shamoon
99724a25a2 Fix: support ordering by storage path name (#11661) 2025-12-28 16:05:21 -08:00
shamoon
504c824cfe Fix: propagate metadata override created value (#11659) 2025-12-27 19:42:45 -08:00
shamoon
01c7a345cb Merge branch 'main' into dev 2025-12-26 11:03:55 -08:00
GitHub Actions
890c2d6757 Auto translate strings 2025-12-24 05:28:28 +00:00
shamoon
00cf026524 Feature: Indonesian translation (#11641) 2025-12-23 21:26:53 -08:00
shamoon
7604a0b583 Fix: prevent ASN collisions for merge operations (#11634) 2025-12-19 20:05:34 -08:00
shamoon
4e789acf2d Chore: mark test_error_skip_rule as flaky 2025-12-18 10:15:04 -08:00
shamoon
d9459d04ea Chore: refactor preview URL variable naming and safeUrl usage 2025-12-18 09:59:14 -08:00
21 changed files with 466 additions and 79 deletions

View File

@@ -12,9 +12,11 @@ jobs:
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v6 uses: actions/checkout@v6
env:
GH_REF: ${{ github.ref }} # sonar rule:githubactions:S7630 - avoid injection
with: with:
token: ${{ secrets.PNGX_BOT_PAT }} token: ${{ secrets.PNGX_BOT_PAT }}
ref: ${{ github.head_ref }} ref: ${{ env.GH_REF }}
- name: Set up Python - name: Set up Python
id: setup-python id: setup-python
uses: actions/setup-python@v6 uses: actions/setup-python@v6

View File

@@ -31,6 +31,7 @@
"fi-FI": "src/locale/messages.fi_FI.xlf", "fi-FI": "src/locale/messages.fi_FI.xlf",
"fr-FR": "src/locale/messages.fr_FR.xlf", "fr-FR": "src/locale/messages.fr_FR.xlf",
"hu-HU": "src/locale/messages.hu_HU.xlf", "hu-HU": "src/locale/messages.hu_HU.xlf",
"id-ID": "src/locale/messages.id_ID.xlf",
"it-IT": "src/locale/messages.it_IT.xlf", "it-IT": "src/locale/messages.it_IT.xlf",
"ja-JP": "src/locale/messages.ja_JP.xlf", "ja-JP": "src/locale/messages.ja_JP.xlf",
"lb-LU": "src/locale/messages.lb_LU.xlf", "lb-LU": "src/locale/messages.lb_LU.xlf",

View File

@@ -10028,179 +10028,186 @@
<context context-type="linenumber">135</context> <context context-type="linenumber">135</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="8312065814232621608" datatype="html">
<source>Indonesian</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">141</context>
</context-group>
</trans-unit>
<trans-unit id="2935232983274991580" datatype="html"> <trans-unit id="2935232983274991580" datatype="html">
<source>Italian</source> <source>Italian</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">141</context> <context context-type="linenumber">147</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="6924606686202701860" datatype="html"> <trans-unit id="6924606686202701860" datatype="html">
<source>Japanese</source> <source>Japanese</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">147</context> <context context-type="linenumber">153</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="6145439649200570157" datatype="html"> <trans-unit id="6145439649200570157" datatype="html">
<source>Korean</source> <source>Korean</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">153</context> <context context-type="linenumber">159</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="1334425850005897370" datatype="html"> <trans-unit id="1334425850005897370" datatype="html">
<source>Luxembourgish</source> <source>Luxembourgish</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">159</context> <context context-type="linenumber">165</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="3071065188816255493" datatype="html"> <trans-unit id="3071065188816255493" datatype="html">
<source>Dutch</source> <source>Dutch</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">165</context> <context context-type="linenumber">171</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="8069284467804715623" datatype="html"> <trans-unit id="8069284467804715623" datatype="html">
<source>Norwegian</source> <source>Norwegian</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">171</context> <context context-type="linenumber">177</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="4977087909184008115" datatype="html"> <trans-unit id="4977087909184008115" datatype="html">
<source>Persian</source> <source>Persian</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">177</context> <context context-type="linenumber">183</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="792060551707690640" datatype="html"> <trans-unit id="792060551707690640" datatype="html">
<source>Polish</source> <source>Polish</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">183</context> <context context-type="linenumber">189</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="9184513005098760425" datatype="html"> <trans-unit id="9184513005098760425" datatype="html">
<source>Portuguese (Brazil)</source> <source>Portuguese (Brazil)</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">189</context> <context context-type="linenumber">195</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="153799456510623899" datatype="html"> <trans-unit id="153799456510623899" datatype="html">
<source>Portuguese</source> <source>Portuguese</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">195</context> <context context-type="linenumber">201</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="8118856427047826368" datatype="html"> <trans-unit id="8118856427047826368" datatype="html">
<source>Romanian</source> <source>Romanian</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">201</context> <context context-type="linenumber">207</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="7137419789978325708" datatype="html"> <trans-unit id="7137419789978325708" datatype="html">
<source>Russian</source> <source>Russian</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">207</context> <context context-type="linenumber">213</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="9102963095355753902" datatype="html"> <trans-unit id="9102963095355753902" datatype="html">
<source>Slovak</source> <source>Slovak</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">213</context> <context context-type="linenumber">219</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="4287008301409320881" datatype="html"> <trans-unit id="4287008301409320881" datatype="html">
<source>Slovenian</source> <source>Slovenian</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">219</context> <context context-type="linenumber">225</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="8608389829607915090" datatype="html"> <trans-unit id="8608389829607915090" datatype="html">
<source>Serbian</source> <source>Serbian</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">225</context> <context context-type="linenumber">231</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="499386805970351976" datatype="html"> <trans-unit id="499386805970351976" datatype="html">
<source>Swedish</source> <source>Swedish</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">231</context> <context context-type="linenumber">237</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="5682359291233237791" datatype="html"> <trans-unit id="5682359291233237791" datatype="html">
<source>Turkish</source> <source>Turkish</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">237</context> <context context-type="linenumber">243</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="3578644052206125685" datatype="html"> <trans-unit id="3578644052206125685" datatype="html">
<source>Ukrainian</source> <source>Ukrainian</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">243</context> <context context-type="linenumber">249</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="3611216939636790848" datatype="html"> <trans-unit id="3611216939636790848" datatype="html">
<source>Vietnamese</source> <source>Vietnamese</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">249</context> <context context-type="linenumber">255</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="4689443708886954687" datatype="html"> <trans-unit id="4689443708886954687" datatype="html">
<source>Chinese Simplified</source> <source>Chinese Simplified</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">255</context> <context context-type="linenumber">261</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="8082606363137705994" datatype="html"> <trans-unit id="8082606363137705994" datatype="html">
<source>Chinese Traditional</source> <source>Chinese Traditional</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">261</context> <context context-type="linenumber">267</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="4912706592792948707" datatype="html"> <trans-unit id="4912706592792948707" datatype="html">
<source>ISO 8601</source> <source>ISO 8601</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">269</context> <context context-type="linenumber">275</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="313643372755303297" datatype="html"> <trans-unit id="313643372755303297" datatype="html">
<source>Successfully completed one-time migratration of settings to the database!</source> <source>Successfully completed one-time migratration of settings to the database!</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">603</context> <context context-type="linenumber">609</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="5558341108007064934" datatype="html"> <trans-unit id="5558341108007064934" datatype="html">
<source>Unable to migrate settings to the database, please try saving manually.</source> <source>Unable to migrate settings to the database, please try saving manually.</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">604</context> <context context-type="linenumber">610</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="1168781785897678748" datatype="html"> <trans-unit id="1168781785897678748" datatype="html">
<source>You can restart the tour from the settings page.</source> <source>You can restart the tour from the settings page.</source>
<context-group purpose="location"> <context-group purpose="location">
<context context-type="sourcefile">src/app/services/settings.service.ts</context> <context context-type="sourcefile">src/app/services/settings.service.ts</context>
<context context-type="linenumber">677</context> <context context-type="linenumber">683</context>
</context-group> </context-group>
</trans-unit> </trans-unit>
<trans-unit id="3852289441366561594" datatype="html"> <trans-unit id="3852289441366561594" datatype="html">

View File

@@ -28,6 +28,7 @@ import localeFa from '@angular/common/locales/fa'
import localeFi from '@angular/common/locales/fi' import localeFi from '@angular/common/locales/fi'
import localeFr from '@angular/common/locales/fr' import localeFr from '@angular/common/locales/fr'
import localeHu from '@angular/common/locales/hu' import localeHu from '@angular/common/locales/hu'
import localeId from '@angular/common/locales/id'
import localeIt from '@angular/common/locales/it' import localeIt from '@angular/common/locales/it'
import localeJa from '@angular/common/locales/ja' import localeJa from '@angular/common/locales/ja'
import localeKo from '@angular/common/locales/ko' import localeKo from '@angular/common/locales/ko'
@@ -63,6 +64,7 @@ registerLocaleData(localeFa)
registerLocaleData(localeFi) registerLocaleData(localeFi)
registerLocaleData(localeFr) registerLocaleData(localeFr)
registerLocaleData(localeHu) registerLocaleData(localeHu)
registerLocaleData(localeId)
registerLocaleData(localeIt) registerLocaleData(localeIt)
registerLocaleData(localeJa) registerLocaleData(localeJa)
registerLocaleData(localeKo) registerLocaleData(localeKo)

View File

@@ -14,7 +14,7 @@
@if (previewText) { @if (previewText) {
<div class="bg-light p-3 overflow-auto whitespace-preserve" width="100%">{{previewText}}</div> <div class="bg-light p-3 overflow-auto whitespace-preserve" width="100%">{{previewText}}</div>
} @else { } @else {
<object [data]="previewURL | safeUrl" width="100%" class="bg-light" [class.p-2]="!isPdf"></object> <object [data]="previewUrl | safeUrl" width="100%" class="bg-light" [class.p-2]="!isPdf"></object>
} }
} @else { } @else {
@if (requiresPassword) { @if (requiresPassword) {
@@ -24,7 +24,7 @@
} }
@if (!requiresPassword) { @if (!requiresPassword) {
<pdf-viewer <pdf-viewer
[src]="previewURL" [src]="previewUrl"
[original-size]="false" [original-size]="false"
[show-borders]="false" [show-borders]="false"
[show-all]="true" [show-all]="true"

View File

@@ -71,7 +71,7 @@ export class PreviewPopupComponent implements OnDestroy {
return (this.isPdf && this.useNativePdfViewer) || !this.isPdf return (this.isPdf && this.useNativePdfViewer) || !this.isPdf
} }
get previewURL() { get previewUrl() {
return this.documentService.getPreviewUrl(this.document.id) return this.documentService.getPreviewUrl(this.document.id)
} }
@@ -93,7 +93,7 @@ export class PreviewPopupComponent implements OnDestroy {
init() { init() {
if (this.document.mime_type?.includes('text')) { if (this.document.mime_type?.includes('text')) {
this.http this.http
.get(this.previewURL, { responseType: 'text' }) .get(this.previewUrl, { responseType: 'text' })
.pipe(first(), takeUntil(this.unsubscribeNotifier)) .pipe(first(), takeUntil(this.unsubscribeNotifier))
.subscribe({ .subscribe({
next: (res) => { next: (res) => {
@@ -126,10 +126,6 @@ export class PreviewPopupComponent implements OnDestroy {
} }
} }
get previewUrl() {
return this.documentService.getPreviewUrl(this.document.id)
}
mouseEnterPreview() { mouseEnterPreview() {
this.mouseOnPreview = true this.mouseOnPreview = true
if (!this.popover.isOpen()) { if (!this.popover.isOpen()) {

View File

@@ -379,7 +379,7 @@
<ng-template #previewContent> <ng-template #previewContent>
<div class="thumb-preview position-absolute pe-none text-center" [class.fade]="previewLoaded"> <div class="thumb-preview position-absolute pe-none text-center" [class.fade]="previewLoaded">
@if (showThumbnailOverlay) { @if (showThumbnailOverlay) {
<img [src]="thumbUrl | safeUrl" class="mx-auto" [attr.width]="previewZoomScale === 'page-fit' ? 'auto' : '100%'" [attr.height]="previewZoomScale === 'page-fit' ? '100%' : 'auto'" alt="Document loading..." i18n-alt /> <img [src]="thumbUrl" class="mx-auto" [attr.width]="previewZoomScale === 'page-fit' ? 'auto' : '100%'" [attr.height]="previewZoomScale === 'page-fit' ? '100%' : 'auto'" alt="Document loading..." i18n-alt />
} }
<div class="position-absolute top-0 start-0 m-2 p-2 d-flex align-items-center justify-content-center"> <div class="position-absolute top-0 start-0 m-2 p-2 d-flex align-items-center justify-content-center">
<div> <div>
@@ -414,7 +414,7 @@
} }
@case (ContentRenderType.Image) { @case (ContentRenderType.Image) {
<div class="preview-sticky"> <div class="preview-sticky">
<img [src]="previewUrl | safeUrl" width="100%" height="100%" alt="{{title}}" /> <img [src]="previewUrl" width="100%" height="100%" alt="{{title}}" />
</div> </div>
} }
@case (ContentRenderType.TIFF) { @case (ContentRenderType.TIFF) {

View File

@@ -136,6 +136,12 @@ const LANGUAGE_OPTIONS = [
englishName: 'Hungarian', englishName: 'Hungarian',
dateInputFormat: 'yyyy.mm.dd', dateInputFormat: 'yyyy.mm.dd',
}, },
{
code: 'id-id',
name: $localize`Indonesian`,
englishName: 'Indonesian',
dateInputFormat: 'dd-mm-yyyy',
},
{ {
code: 'it-it', code: 'it-it',
name: $localize`Italian`, name: $localize`Italian`,

View File

@@ -171,6 +171,7 @@ import localeFa from '@angular/common/locales/fa'
import localeFi from '@angular/common/locales/fi' import localeFi from '@angular/common/locales/fi'
import localeFr from '@angular/common/locales/fr' import localeFr from '@angular/common/locales/fr'
import localeHu from '@angular/common/locales/hu' import localeHu from '@angular/common/locales/hu'
import localeId from '@angular/common/locales/id'
import localeIt from '@angular/common/locales/it' import localeIt from '@angular/common/locales/it'
import localeJa from '@angular/common/locales/ja' import localeJa from '@angular/common/locales/ja'
import localeKo from '@angular/common/locales/ko' import localeKo from '@angular/common/locales/ko'
@@ -209,6 +210,7 @@ registerLocaleData(localeFa)
registerLocaleData(localeFi) registerLocaleData(localeFi)
registerLocaleData(localeFr) registerLocaleData(localeFr)
registerLocaleData(localeHu) registerLocaleData(localeHu)
registerLocaleData(localeId)
registerLocaleData(localeIt) registerLocaleData(localeIt)
registerLocaleData(localeJa) registerLocaleData(localeJa)
registerLocaleData(localeKo) registerLocaleData(localeKo)

View File

@@ -16,6 +16,7 @@ from pikepdf import Pdf
from documents.converters import convert_from_tiff_to_pdf from documents.converters import convert_from_tiff_to_pdf
from documents.data_models import ConsumableDocument from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides from documents.data_models import DocumentMetadataOverrides
from documents.models import Document
from documents.models import Tag from documents.models import Tag
from documents.plugins.base import ConsumeTaskPlugin from documents.plugins.base import ConsumeTaskPlugin
from documents.plugins.base import StopConsumeTaskError from documents.plugins.base import StopConsumeTaskError
@@ -115,6 +116,24 @@ class BarcodePlugin(ConsumeTaskPlugin):
self._tiff_conversion_done = False self._tiff_conversion_done = False
self.barcodes: list[Barcode] = [] self.barcodes: list[Barcode] = []
def _apply_detected_asn(self, detected_asn: int) -> None:
"""
Apply a detected ASN to metadata if allowed.
"""
if (
self.metadata.skip_asn_if_exists
and Document.global_objects.filter(
archive_serial_number=detected_asn,
).exists()
):
logger.info(
f"Found ASN in barcode {detected_asn} but skipping because it already exists.",
)
return
logger.info(f"Found ASN in barcode: {detected_asn}")
self.metadata.asn = detected_asn
def run(self) -> None: def run(self) -> None:
# Some operations may use PIL, override pixel setting if needed # Some operations may use PIL, override pixel setting if needed
maybe_override_pixel_limit() maybe_override_pixel_limit()
@@ -187,8 +206,7 @@ class BarcodePlugin(ConsumeTaskPlugin):
# Update/overwrite an ASN if possible # Update/overwrite an ASN if possible
# After splitting, as otherwise each split document gets the same ASN # After splitting, as otherwise each split document gets the same ASN
if self.settings.barcode_enable_asn and (located_asn := self.asn) is not None: if self.settings.barcode_enable_asn and (located_asn := self.asn) is not None:
logger.info(f"Found ASN in barcode: {located_asn}") self._apply_detected_asn(located_asn)
self.metadata.asn = located_asn
def cleanup(self) -> None: def cleanup(self) -> None:
self.temp_dir.cleanup() self.temp_dir.cleanup()

View File

@@ -7,7 +7,6 @@ from pathlib import Path
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from typing import Literal from typing import Literal
from celery import chain
from celery import chord from celery import chord
from celery import group from celery import group
from celery import shared_task from celery import shared_task
@@ -38,6 +37,42 @@ if TYPE_CHECKING:
logger: logging.Logger = logging.getLogger("paperless.bulk_edit") logger: logging.Logger = logging.getLogger("paperless.bulk_edit")
@shared_task(bind=True)
def restore_archive_serial_numbers_task(
self,
backup: dict[int, int],
*args,
**kwargs,
) -> None:
restore_archive_serial_numbers(backup)
def release_archive_serial_numbers(doc_ids: list[int]) -> dict[int, int]:
"""
Clears ASNs on documents that are about to be replaced so new documents
can be assigned ASNs without uniqueness collisions. Returns a backup map
of doc_id -> previous ASN for potential restoration.
"""
qs = Document.objects.filter(
id__in=doc_ids,
archive_serial_number__isnull=False,
).only("pk", "archive_serial_number")
backup = dict(qs.values_list("pk", "archive_serial_number"))
qs.update(archive_serial_number=None)
logger.info(f"Released archive serial numbers for documents {list(backup.keys())}")
return backup
def restore_archive_serial_numbers(backup: dict[int, int]) -> None:
"""
Restores ASNs using the provided backup map, intended for
rollback when replacement consumption fails.
"""
for doc_id, asn in backup.items():
Document.objects.filter(pk=doc_id).update(archive_serial_number=asn)
logger.info(f"Restored archive serial numbers for documents {list(backup.keys())}")
def set_correspondent( def set_correspondent(
doc_ids: list[int], doc_ids: list[int],
correspondent: Correspondent, correspondent: Correspondent,
@@ -386,6 +421,7 @@ def merge(
merged_pdf = pikepdf.new() merged_pdf = pikepdf.new()
version: str = merged_pdf.pdf_version version: str = merged_pdf.pdf_version
handoff_asn: int | None = None
# use doc_ids to preserve order # use doc_ids to preserve order
for doc_id in doc_ids: for doc_id in doc_ids:
doc = qs.get(id=doc_id) doc = qs.get(id=doc_id)
@@ -401,6 +437,8 @@ def merge(
version = max(version, pdf.pdf_version) version = max(version, pdf.pdf_version)
merged_pdf.pages.extend(pdf.pages) merged_pdf.pages.extend(pdf.pages)
affected_docs.append(doc.id) affected_docs.append(doc.id)
if handoff_asn is None and doc.archive_serial_number is not None:
handoff_asn = doc.archive_serial_number
except Exception as e: except Exception as e:
logger.exception( logger.exception(
f"Error merging document {doc.id}, it will not be included in the merge: {e}", f"Error merging document {doc.id}, it will not be included in the merge: {e}",
@@ -426,6 +464,8 @@ def merge(
DocumentMetadataOverrides.from_document(metadata_document) DocumentMetadataOverrides.from_document(metadata_document)
) )
overrides.title = metadata_document.title + " (merged)" overrides.title = metadata_document.title + " (merged)"
if metadata_document.archive_serial_number is not None:
handoff_asn = metadata_document.archive_serial_number
else: else:
overrides = DocumentMetadataOverrides() overrides = DocumentMetadataOverrides()
else: else:
@@ -433,6 +473,11 @@ def merge(
if user is not None: if user is not None:
overrides.owner_id = user.id overrides.owner_id = user.id
if not delete_originals:
overrides.skip_asn_if_exists = True
if delete_originals and handoff_asn is not None:
overrides.asn = handoff_asn
logger.info("Adding merged document to the task queue.") logger.info("Adding merged document to the task queue.")
@@ -445,12 +490,20 @@ def merge(
) )
if delete_originals: if delete_originals:
backup = release_archive_serial_numbers(affected_docs)
logger.info( logger.info(
"Queueing removal of original documents after consumption of merged document", "Queueing removal of original documents after consumption of merged document",
) )
chain(consume_task, delete.si(affected_docs)).delay() try:
else: consume_task.apply_async(
consume_task.delay() link=[delete.si(affected_docs)],
link_error=[restore_archive_serial_numbers_task.s(backup)],
)
except Exception:
restore_archive_serial_numbers(backup)
raise
else:
consume_task.delay()
return "OK" return "OK"
@@ -492,6 +545,8 @@ def split(
overrides.title = f"{doc.title} (split {idx + 1})" overrides.title = f"{doc.title} (split {idx + 1})"
if user is not None: if user is not None:
overrides.owner_id = user.id overrides.owner_id = user.id
if not delete_originals:
overrides.skip_asn_if_exists = True
logger.info( logger.info(
f"Adding split document with pages {split_doc} to the task queue.", f"Adding split document with pages {split_doc} to the task queue.",
) )
@@ -506,10 +561,20 @@ def split(
) )
if delete_originals: if delete_originals:
backup = release_archive_serial_numbers([doc.id])
logger.info( logger.info(
"Queueing removal of original document after consumption of the split documents", "Queueing removal of original document after consumption of the split documents",
) )
chord(header=consume_tasks, body=delete.si([doc.id])).delay() try:
chord(
header=consume_tasks,
body=delete.si([doc.id]),
).apply_async(
link_error=[restore_archive_serial_numbers_task.s(backup)],
)
except Exception:
restore_archive_serial_numbers(backup)
raise
else: else:
group(consume_tasks).delay() group(consume_tasks).delay()
@@ -612,7 +677,10 @@ def edit_pdf(
) )
if user is not None: if user is not None:
overrides.owner_id = user.id overrides.owner_id = user.id
if not delete_original:
overrides.skip_asn_if_exists = True
if delete_original and len(pdf_docs) == 1:
overrides.asn = doc.archive_serial_number
for idx, pdf in enumerate(pdf_docs, start=1): for idx, pdf in enumerate(pdf_docs, start=1):
filepath: Path = ( filepath: Path = (
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR)) Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
@@ -631,7 +699,17 @@ def edit_pdf(
) )
if delete_original: if delete_original:
chord(header=consume_tasks, body=delete.si([doc.id])).delay() backup = release_archive_serial_numbers([doc.id])
try:
chord(
header=consume_tasks,
body=delete.si([doc.id]),
).apply_async(
link_error=[restore_archive_serial_numbers_task.s(backup)],
)
except Exception:
restore_archive_serial_numbers(backup)
raise
else: else:
group(consume_tasks).delay() group(consume_tasks).delay()

View File

@@ -813,7 +813,7 @@ class ConsumerPreflightPlugin(
Check that if override_asn is given, it is unique and within a valid range Check that if override_asn is given, it is unique and within a valid range
""" """
if self.metadata.asn is None: if self.metadata.asn is None:
# check not necessary in case no ASN gets set # if ASN is None
return return
# Validate the range is above zero and less than uint32_t max # Validate the range is above zero and less than uint32_t max
# otherwise, Whoosh can't handle it in the index # otherwise, Whoosh can't handle it in the index

View File

@@ -22,7 +22,7 @@ class DocumentMetadataOverrides:
document_type_id: int | None = None document_type_id: int | None = None
tag_ids: list[int] | None = None tag_ids: list[int] | None = None
storage_path_id: int | None = None storage_path_id: int | None = None
created: datetime.datetime | None = None created: datetime.date | None = None
asn: int | None = None asn: int | None = None
owner_id: int | None = None owner_id: int | None = None
view_users: list[int] | None = None view_users: list[int] | None = None
@@ -30,6 +30,7 @@ class DocumentMetadataOverrides:
change_users: list[int] | None = None change_users: list[int] | None = None
change_groups: list[int] | None = None change_groups: list[int] | None = None
custom_fields: dict | None = None custom_fields: dict | None = None
skip_asn_if_exists: bool = False
def update(self, other: "DocumentMetadataOverrides") -> "DocumentMetadataOverrides": def update(self, other: "DocumentMetadataOverrides") -> "DocumentMetadataOverrides":
""" """
@@ -49,6 +50,8 @@ class DocumentMetadataOverrides:
self.storage_path_id = other.storage_path_id self.storage_path_id = other.storage_path_id
if other.owner_id is not None: if other.owner_id is not None:
self.owner_id = other.owner_id self.owner_id = other.owner_id
if other.skip_asn_if_exists:
self.skip_asn_if_exists = True
# merge # merge
if self.tag_ids is None: if self.tag_ids is None:
@@ -100,6 +103,7 @@ class DocumentMetadataOverrides:
overrides.storage_path_id = doc.storage_path.id if doc.storage_path else None overrides.storage_path_id = doc.storage_path.id if doc.storage_path else None
overrides.owner_id = doc.owner.id if doc.owner else None overrides.owner_id = doc.owner.id if doc.owner else None
overrides.tag_ids = list(doc.tags.values_list("id", flat=True)) overrides.tag_ids = list(doc.tags.values_list("id", flat=True))
overrides.created = doc.created
overrides.view_users = list( overrides.view_users = list(
get_users_with_perms( get_users_with_perms(

View File

@@ -18,6 +18,8 @@ from django.core.exceptions import ValidationError
from django.core.validators import DecimalValidator from django.core.validators import DecimalValidator
from django.core.validators import EmailValidator from django.core.validators import EmailValidator
from django.core.validators import MaxLengthValidator from django.core.validators import MaxLengthValidator
from django.core.validators import MaxValueValidator
from django.core.validators import MinValueValidator
from django.core.validators import RegexValidator from django.core.validators import RegexValidator
from django.core.validators import integer_validator from django.core.validators import integer_validator
from django.db.models import Count from django.db.models import Count
@@ -875,6 +877,13 @@ class CustomFieldInstanceSerializer(serializers.ModelSerializer):
uri_validator(data["value"]) uri_validator(data["value"])
elif field.data_type == CustomField.FieldDataType.INT: elif field.data_type == CustomField.FieldDataType.INT:
integer_validator(data["value"]) integer_validator(data["value"])
try:
value_int = int(data["value"])
except (TypeError, ValueError):
raise serializers.ValidationError("Enter a valid integer.")
# Keep values within the PostgreSQL integer range
MinValueValidator(-2147483648)(value_int)
MaxValueValidator(2147483647)(value_int)
elif ( elif (
field.data_type == CustomField.FieldDataType.MONETARY field.data_type == CustomField.FieldDataType.MONETARY
and data["value"] != "" and data["value"] != ""

View File

@@ -1664,6 +1664,44 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.consume_file_mock.assert_not_called() self.consume_file_mock.assert_not_called()
def test_patch_document_integer_custom_field_out_of_range(self):
"""
GIVEN:
- An integer custom field
- A document
WHEN:
- Patching the document with an integer value exceeding PostgreSQL's range
THEN:
- HTTP 400 is returned (validation catches the overflow)
- No custom field instance is created
"""
cf_int = CustomField.objects.create(
name="intfield",
data_type=CustomField.FieldDataType.INT,
)
doc = Document.objects.create(
title="Doc",
checksum="123",
mime_type="application/pdf",
)
response = self.client.patch(
f"/api/documents/{doc.pk}/",
{
"custom_fields": [
{
"field": cf_int.pk,
"value": 2**31, # overflow for PostgreSQL integer fields
},
],
},
format="json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn("custom_fields", response.data)
self.assertEqual(CustomFieldInstance.objects.count(), 0)
def test_upload_with_webui_source(self): def test_upload_with_webui_source(self):
""" """
GIVEN: A document with a source file GIVEN: A document with a source file

View File

@@ -581,7 +581,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
- Consume file should be called - Consume file should be called
""" """
doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id] doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
metadata_document_id = self.doc1.id metadata_document_id = self.doc2.id
user = User.objects.create(username="test_user") user = User.objects.create(username="test_user")
result = bulk_edit.merge( result = bulk_edit.merge(
@@ -602,20 +602,21 @@ class TestPDFActions(DirectoriesMixin, TestCase):
expected_filename, expected_filename,
) )
self.assertEqual(consume_file_args[1].title, None) self.assertEqual(consume_file_args[1].title, None)
# No metadata_document_id, delete_originals False, so ASN should be None
self.assertIsNone(consume_file_args[1].asn)
# With metadata_document_id overrides # With metadata_document_id overrides
result = bulk_edit.merge(doc_ids, metadata_document_id=metadata_document_id) result = bulk_edit.merge(doc_ids, metadata_document_id=metadata_document_id)
consume_file_args, _ = mock_consume_file.call_args consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].title, "A (merged)") self.assertEqual(consume_file_args[1].title, "B (merged)")
self.assertEqual(consume_file_args[1].created, self.doc2.created)
self.assertEqual(result, "OK") self.assertEqual(result, "OK")
@mock.patch("documents.bulk_edit.delete.si") @mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s") @mock.patch("documents.tasks.consume_file.s")
@mock.patch("documents.bulk_edit.chain")
def test_merge_and_delete_originals( def test_merge_and_delete_originals(
self, self,
mock_chain,
mock_consume_file, mock_consume_file,
mock_delete_documents, mock_delete_documents,
): ):
@@ -629,6 +630,12 @@ class TestPDFActions(DirectoriesMixin, TestCase):
- Document deletion task should be called - Document deletion task should be called
""" """
doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id] doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
self.doc1.archive_serial_number = 101
self.doc2.archive_serial_number = 102
self.doc3.archive_serial_number = 103
self.doc1.save()
self.doc2.save()
self.doc3.save()
result = bulk_edit.merge(doc_ids, delete_originals=True) result = bulk_edit.merge(doc_ids, delete_originals=True)
self.assertEqual(result, "OK") self.assertEqual(result, "OK")
@@ -639,7 +646,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
mock_consume_file.assert_called() mock_consume_file.assert_called()
mock_delete_documents.assert_called() mock_delete_documents.assert_called()
mock_chain.assert_called_once() consume_sig = mock_consume_file.return_value
consume_sig.apply_async.assert_called_once()
consume_file_args, _ = mock_consume_file.call_args consume_file_args, _ = mock_consume_file.call_args
self.assertEqual( self.assertEqual(
@@ -647,6 +655,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
expected_filename, expected_filename,
) )
self.assertEqual(consume_file_args[1].title, None) self.assertEqual(consume_file_args[1].title, None)
self.assertEqual(consume_file_args[1].asn, 101)
delete_documents_args, _ = mock_delete_documents.call_args delete_documents_args, _ = mock_delete_documents.call_args
self.assertEqual( self.assertEqual(
@@ -654,6 +663,92 @@ class TestPDFActions(DirectoriesMixin, TestCase):
doc_ids, doc_ids,
) )
self.doc1.refresh_from_db()
self.doc2.refresh_from_db()
self.doc3.refresh_from_db()
self.assertIsNone(self.doc1.archive_serial_number)
self.assertIsNone(self.doc2.archive_serial_number)
self.assertIsNone(self.doc3.archive_serial_number)
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
def test_merge_and_delete_originals_restore_on_failure(
self,
mock_consume_file,
mock_delete_documents,
):
"""
GIVEN:
- Existing documents
WHEN:
- Merge action with deleting documents is called with 1 document
- Error occurs when queuing consume file task
THEN:
- Archive serial numbers are restored
"""
doc_ids = [self.doc1.id]
self.doc1.archive_serial_number = 111
self.doc1.save()
sig = mock.Mock()
sig.apply_async.side_effect = Exception("boom")
mock_consume_file.return_value = sig
with self.assertRaises(Exception):
bulk_edit.merge(doc_ids, delete_originals=True)
self.doc1.refresh_from_db()
self.assertEqual(self.doc1.archive_serial_number, 111)
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
def test_merge_and_delete_originals_metadata_handoff(
self,
mock_consume_file,
mock_delete_documents,
):
"""
GIVEN:
- Existing documents with ASNs
WHEN:
- Merge with delete_originals=True and metadata_document_id set
THEN:
- Handoff ASN uses metadata document ASN
"""
doc_ids = [self.doc1.id, self.doc2.id]
self.doc1.archive_serial_number = 101
self.doc2.archive_serial_number = 202
self.doc1.save()
self.doc2.save()
result = bulk_edit.merge(
doc_ids,
metadata_document_id=self.doc2.id,
delete_originals=True,
)
self.assertEqual(result, "OK")
consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].asn, 202)
def test_restore_archive_serial_numbers_task(self):
"""
GIVEN:
- Existing document with no archive serial number
WHEN:
- Restore archive serial number task is called with backup data
THEN:
- Document archive serial number is restored
"""
self.doc1.archive_serial_number = 444
self.doc1.save()
Document.objects.filter(pk=self.doc1.id).update(archive_serial_number=None)
backup = {self.doc1.id: 444}
bulk_edit.restore_archive_serial_numbers_task(backup)
self.doc1.refresh_from_db()
self.assertEqual(self.doc1.archive_serial_number, 444)
@mock.patch("documents.tasks.consume_file.s") @mock.patch("documents.tasks.consume_file.s")
def test_merge_with_archive_fallback(self, mock_consume_file): def test_merge_with_archive_fallback(self, mock_consume_file):
""" """
@@ -722,6 +817,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
self.assertEqual(mock_consume_file.call_count, 2) self.assertEqual(mock_consume_file.call_count, 2)
consume_file_args, _ = mock_consume_file.call_args consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].title, "B (split 2)") self.assertEqual(consume_file_args[1].title, "B (split 2)")
self.assertIsNone(consume_file_args[1].asn)
self.assertEqual(result, "OK") self.assertEqual(result, "OK")
@@ -746,6 +842,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
""" """
doc_ids = [self.doc2.id] doc_ids = [self.doc2.id]
pages = [[1, 2], [3]] pages = [[1, 2], [3]]
self.doc2.archive_serial_number = 200
self.doc2.save()
result = bulk_edit.split(doc_ids, pages, delete_originals=True) result = bulk_edit.split(doc_ids, pages, delete_originals=True)
self.assertEqual(result, "OK") self.assertEqual(result, "OK")
@@ -763,6 +861,42 @@ class TestPDFActions(DirectoriesMixin, TestCase):
doc_ids, doc_ids,
) )
self.doc2.refresh_from_db()
self.assertIsNone(self.doc2.archive_serial_number)
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
@mock.patch("documents.bulk_edit.chord")
def test_split_restore_on_failure(
self,
mock_chord,
mock_consume_file,
mock_delete_documents,
):
"""
GIVEN:
- Existing documents
WHEN:
- Split action with deleting documents is called with 1 document and 2 page groups
- Error occurs when queuing chord task
THEN:
- Archive serial numbers are restored
"""
doc_ids = [self.doc2.id]
pages = [[1, 2]]
self.doc2.archive_serial_number = 222
self.doc2.save()
sig = mock.Mock()
sig.apply_async.side_effect = Exception("boom")
mock_chord.return_value = sig
result = bulk_edit.split(doc_ids, pages, delete_originals=True)
self.assertEqual(result, "OK")
self.doc2.refresh_from_db()
self.assertEqual(self.doc2.archive_serial_number, 222)
@mock.patch("documents.tasks.consume_file.delay") @mock.patch("documents.tasks.consume_file.delay")
@mock.patch("pikepdf.Pdf.save") @mock.patch("pikepdf.Pdf.save")
def test_split_with_errors(self, mock_save_pdf, mock_consume_file): def test_split_with_errors(self, mock_save_pdf, mock_consume_file):
@@ -963,10 +1097,49 @@ class TestPDFActions(DirectoriesMixin, TestCase):
mock_chord.return_value.delay.return_value = None mock_chord.return_value.delay.return_value = None
doc_ids = [self.doc2.id] doc_ids = [self.doc2.id]
operations = [{"page": 1}, {"page": 2}] operations = [{"page": 1}, {"page": 2}]
self.doc2.archive_serial_number = 250
self.doc2.save()
result = bulk_edit.edit_pdf(doc_ids, operations, delete_original=True) result = bulk_edit.edit_pdf(doc_ids, operations, delete_original=True)
self.assertEqual(result, "OK") self.assertEqual(result, "OK")
mock_chord.assert_called_once() mock_chord.assert_called_once()
consume_file_args, _ = mock_consume_file.call_args
self.assertEqual(consume_file_args[1].asn, 250)
self.doc2.refresh_from_db()
self.assertIsNone(self.doc2.archive_serial_number)
@mock.patch("documents.bulk_edit.delete.si")
@mock.patch("documents.tasks.consume_file.s")
@mock.patch("documents.bulk_edit.chord")
def test_edit_pdf_restore_on_failure(
self,
mock_chord,
mock_consume_file,
mock_delete_documents,
):
"""
GIVEN:
- Existing document
WHEN:
- edit_pdf is called with delete_original=True
- Error occurs when queuing chord task
THEN:
- Archive serial numbers are restored
"""
doc_ids = [self.doc2.id]
operations = [{"page": 1}]
self.doc2.archive_serial_number = 333
self.doc2.save()
sig = mock.Mock()
sig.apply_async.side_effect = Exception("boom")
mock_chord.return_value = sig
with self.assertRaises(Exception):
bulk_edit.edit_pdf(doc_ids, operations, delete_original=True)
self.doc2.refresh_from_db()
self.assertEqual(self.doc2.archive_serial_number, 333)
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay") @mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
def test_edit_pdf_with_update_document(self, mock_update_document): def test_edit_pdf_with_update_document(self, mock_update_document):

View File

@@ -14,6 +14,7 @@ from django.test import override_settings
from django.utils import timezone from django.utils import timezone
from guardian.core import ObjectPermissionChecker from guardian.core import ObjectPermissionChecker
from documents.barcodes import BarcodePlugin
from documents.consumer import ConsumerError from documents.consumer import ConsumerError
from documents.data_models import DocumentMetadataOverrides from documents.data_models import DocumentMetadataOverrides
from documents.data_models import DocumentSource from documents.data_models import DocumentSource
@@ -1232,3 +1233,46 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
r"sample\.pdf: Error while executing post-consume script: Command '\[.*\]' returned non-zero exit status \d+\.", r"sample\.pdf: Error while executing post-consume script: Command '\[.*\]' returned non-zero exit status \d+\.",
): ):
consumer.run_post_consume_script(doc) consumer.run_post_consume_script(doc)
class TestMetadataOverrides(TestCase):
def test_update_skip_asn_if_exists(self):
base = DocumentMetadataOverrides()
incoming = DocumentMetadataOverrides(skip_asn_if_exists=True)
base.update(incoming)
self.assertTrue(base.skip_asn_if_exists)
class TestBarcodeApplyDetectedASN(TestCase):
"""
GIVEN:
- Existing Documents with ASN 123
WHEN:
- A BarcodePlugin which detected an ASN
THEN:
- If skip_asn_if_exists is set, and ASN exists, do not set ASN
- If skip_asn_if_exists is set, and ASN does not exist, set ASN
"""
def test_apply_detected_asn_skips_existing_when_flag_set(self):
doc = Document.objects.create(
checksum="X1",
title="D1",
archive_serial_number=123,
)
metadata = DocumentMetadataOverrides(skip_asn_if_exists=True)
plugin = BarcodePlugin(
input_doc=mock.Mock(),
metadata=metadata,
status_mgr=mock.Mock(),
base_tmp_dir=tempfile.gettempdir(),
task_id="test-task",
)
plugin._apply_detected_asn(123)
self.assertIsNone(plugin.metadata.asn)
doc.hard_delete()
plugin._apply_detected_asn(123)
self.assertEqual(plugin.metadata.asn, 123)

View File

@@ -708,6 +708,7 @@ class DocumentViewSet(
"title", "title",
"correspondent__name", "correspondent__name",
"document_type__name", "document_type__name",
"storage_path__name",
"created", "created",
"modified", "modified",
"added", "added",

View File

@@ -2,7 +2,7 @@ msgid ""
msgstr "" msgstr ""
"Project-Id-Version: paperless-ngx\n" "Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2025-12-12 17:41+0000\n" "POT-Creation-Date: 2025-12-29 14:49+0000\n"
"PO-Revision-Date: 2022-02-17 04:17\n" "PO-Revision-Date: 2022-02-17 04:17\n"
"Last-Translator: \n" "Last-Translator: \n"
"Language-Team: English\n" "Language-Team: English\n"
@@ -1219,35 +1219,35 @@ msgstr ""
msgid "workflow runs" msgid "workflow runs"
msgstr "" msgstr ""
#: documents/serialisers.py:640 #: documents/serialisers.py:642
msgid "Invalid color." msgid "Invalid color."
msgstr "" msgstr ""
#: documents/serialisers.py:1826 #: documents/serialisers.py:1835
#, python-format #, python-format
msgid "File type %(type)s not supported" msgid "File type %(type)s not supported"
msgstr "" msgstr ""
#: documents/serialisers.py:1870 #: documents/serialisers.py:1879
#, python-format #, python-format
msgid "Custom field id must be an integer: %(id)s" msgid "Custom field id must be an integer: %(id)s"
msgstr "" msgstr ""
#: documents/serialisers.py:1877 #: documents/serialisers.py:1886
#, python-format #, python-format
msgid "Custom field with id %(id)s does not exist" msgid "Custom field with id %(id)s does not exist"
msgstr "" msgstr ""
#: documents/serialisers.py:1894 documents/serialisers.py:1904 #: documents/serialisers.py:1903 documents/serialisers.py:1913
msgid "" msgid ""
"Custom fields must be a list of integers or an object mapping ids to values." "Custom fields must be a list of integers or an object mapping ids to values."
msgstr "" msgstr ""
#: documents/serialisers.py:1899 #: documents/serialisers.py:1908
msgid "Some custom fields don't exist or were specified twice." msgid "Some custom fields don't exist or were specified twice."
msgstr "" msgstr ""
#: documents/serialisers.py:2014 #: documents/serialisers.py:2023
msgid "Invalid variable detected." msgid "Invalid variable detected."
msgstr "" msgstr ""
@@ -1767,82 +1767,86 @@ msgid "Hungarian"
msgstr "" msgstr ""
#: paperless/settings.py:789 #: paperless/settings.py:789
msgid "Italian" msgid "Indonesian"
msgstr "" msgstr ""
#: paperless/settings.py:790 #: paperless/settings.py:790
msgid "Japanese" msgid "Italian"
msgstr "" msgstr ""
#: paperless/settings.py:791 #: paperless/settings.py:791
msgid "Korean" msgid "Japanese"
msgstr "" msgstr ""
#: paperless/settings.py:792 #: paperless/settings.py:792
msgid "Luxembourgish" msgid "Korean"
msgstr "" msgstr ""
#: paperless/settings.py:793 #: paperless/settings.py:793
msgid "Norwegian" msgid "Luxembourgish"
msgstr "" msgstr ""
#: paperless/settings.py:794 #: paperless/settings.py:794
msgid "Dutch" msgid "Norwegian"
msgstr "" msgstr ""
#: paperless/settings.py:795 #: paperless/settings.py:795
msgid "Polish" msgid "Dutch"
msgstr "" msgstr ""
#: paperless/settings.py:796 #: paperless/settings.py:796
msgid "Portuguese (Brazil)" msgid "Polish"
msgstr "" msgstr ""
#: paperless/settings.py:797 #: paperless/settings.py:797
msgid "Portuguese" msgid "Portuguese (Brazil)"
msgstr "" msgstr ""
#: paperless/settings.py:798 #: paperless/settings.py:798
msgid "Romanian" msgid "Portuguese"
msgstr "" msgstr ""
#: paperless/settings.py:799 #: paperless/settings.py:799
msgid "Russian" msgid "Romanian"
msgstr "" msgstr ""
#: paperless/settings.py:800 #: paperless/settings.py:800
msgid "Slovak" msgid "Russian"
msgstr "" msgstr ""
#: paperless/settings.py:801 #: paperless/settings.py:801
msgid "Slovenian" msgid "Slovak"
msgstr "" msgstr ""
#: paperless/settings.py:802 #: paperless/settings.py:802
msgid "Serbian" msgid "Slovenian"
msgstr "" msgstr ""
#: paperless/settings.py:803 #: paperless/settings.py:803
msgid "Swedish" msgid "Serbian"
msgstr "" msgstr ""
#: paperless/settings.py:804 #: paperless/settings.py:804
msgid "Turkish" msgid "Swedish"
msgstr "" msgstr ""
#: paperless/settings.py:805 #: paperless/settings.py:805
msgid "Ukrainian" msgid "Turkish"
msgstr "" msgstr ""
#: paperless/settings.py:806 #: paperless/settings.py:806
msgid "Vietnamese" msgid "Ukrainian"
msgstr "" msgstr ""
#: paperless/settings.py:807 #: paperless/settings.py:807
msgid "Chinese Simplified" msgid "Vietnamese"
msgstr "" msgstr ""
#: paperless/settings.py:808 #: paperless/settings.py:808
msgid "Chinese Simplified"
msgstr ""
#: paperless/settings.py:809
msgid "Chinese Traditional" msgid "Chinese Traditional"
msgstr "" msgstr ""

View File

@@ -786,6 +786,7 @@ LANGUAGES = [
("fi-fi", _("Finnish")), ("fi-fi", _("Finnish")),
("fr-fr", _("French")), ("fr-fr", _("French")),
("hu-hu", _("Hungarian")), ("hu-hu", _("Hungarian")),
("id-id", _("Indonesian")),
("it-it", _("Italian")), ("it-it", _("Italian")),
("ja-jp", _("Japanese")), ("ja-jp", _("Japanese")),
("ko-kr", _("Korean")), ("ko-kr", _("Korean")),

View File

@@ -1108,6 +1108,7 @@ class TestMail(
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 2) self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 2)
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages_spam), 1) self.assertEqual(len(self.mailMocker.bogus_mailbox.messages_spam), 1)
@pytest.mark.flaky(reruns=4)
def test_error_skip_rule(self): def test_error_skip_rule(self):
account = MailAccount.objects.create( account = MailAccount.objects.create(
name="test2", name="test2",