Compare commits

..

4 Commits

Author SHA1 Message Date
Trenton Holmes
c08b025423 Set a stricter set of checks for pytest, and try changing how we distribute tests to workers 2026-01-24 19:53:00 -08:00
Trenton Holmes
55d06fda03 Handle the new pytest subTest integration 2026-01-24 19:45:30 -08:00
Trenton Holmes
c5fb80dc35 Remove randomly for the moment 2026-01-24 19:35:16 -08:00
Trenton Holmes
cb5a0fd7d9 Upgrades pytest and adds pytest-randomly for better surfacing of timing and ordering errors 2026-01-24 15:06:36 -08:00
43 changed files with 859 additions and 695 deletions

View File

@@ -4,7 +4,8 @@
set -eu set -eu
for command in document_archiver \ for command in decrypt_documents \
document_archiver \
document_exporter \ document_exporter \
document_importer \ document_importer \
mail_fetcher \ mail_fetcher \

View File

@@ -0,0 +1,14 @@
#!/command/with-contenv /usr/bin/bash
# shellcheck shell=bash
set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py decrypt_documents "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py decrypt_documents "$@"
else
echo "Unknown user."
fi

View File

@@ -580,6 +580,36 @@ document.
documents, such as encrypted PDF documents. The archiver will skip over documents, such as encrypted PDF documents. The archiver will skip over
these documents each time it sees them. these documents each time it sees them.
### Managing encryption {#encryption}
!!! warning
Encryption was removed in [paperless-ng 0.9](changelog.md#paperless-ng-090)
because it did not really provide any additional security, the passphrase
was stored in a configuration file on the same system as the documents.
Furthermore, the entire text content of the documents is stored plain in
the database, even if your documents are encrypted. Filenames are not
encrypted as well. Finally, the web server provides transparent access to
your encrypted documents.
Consider running paperless on an encrypted filesystem instead, which
will then at least provide security against physical hardware theft.
#### Enabling encryption
Enabling encryption is no longer supported.
#### Disabling encryption
Basic usage to disable encryption of your document store:
(Note: If `PAPERLESS_PASSPHRASE` isn't set already, you need to specify
it here)
```
decrypt_documents [--passphrase SECR3TP4SSPHRA$E]
```
### Detecting duplicates {#fuzzy_duplicate} ### Detecting duplicates {#fuzzy_duplicate}
Paperless already catches and prevents upload of exactly matching documents, Paperless already catches and prevents upload of exactly matching documents,

View File

@@ -1146,9 +1146,8 @@ via the consumption directory, you can disable the consumer to save resources.
#### [`PAPERLESS_CONSUMER_DELETE_DUPLICATES=<bool>`](#PAPERLESS_CONSUMER_DELETE_DUPLICATES) {#PAPERLESS_CONSUMER_DELETE_DUPLICATES} #### [`PAPERLESS_CONSUMER_DELETE_DUPLICATES=<bool>`](#PAPERLESS_CONSUMER_DELETE_DUPLICATES) {#PAPERLESS_CONSUMER_DELETE_DUPLICATES}
: As of version 3.0 Paperless-ngx allows duplicate documents to be consumed by default, _except_ when : When the consumer detects a duplicate document, it will not touch
this setting is enabled. When enabled, Paperless will check if a document with the same hash already the original document. This default behavior can be changed here.
exists in the system and delete the duplicate file from the consumption directory without consuming it.
Defaults to false. Defaults to false.

View File

@@ -17,9 +17,3 @@ separating the directory ignore from the file ignore.
| `CONSUMER_POLLING_RETRY_COUNT` | _Removed_ | Automatic with stability tracking | | `CONSUMER_POLLING_RETRY_COUNT` | _Removed_ | Automatic with stability tracking |
| `CONSUMER_IGNORE_PATTERNS` | [`CONSUMER_IGNORE_PATTERNS`](configuration.md#PAPERLESS_CONSUMER_IGNORE_PATTERNS) | **Now regex, not fnmatch**; user patterns are added to (not replacing) default ones | | `CONSUMER_IGNORE_PATTERNS` | [`CONSUMER_IGNORE_PATTERNS`](configuration.md#PAPERLESS_CONSUMER_IGNORE_PATTERNS) | **Now regex, not fnmatch**; user patterns are added to (not replacing) default ones |
| _New_ | [`CONSUMER_IGNORE_DIRS`](configuration.md#PAPERLESS_CONSUMER_IGNORE_DIRS) | Additional directories to ignore; user entries are added to (not replacing) defaults | | _New_ | [`CONSUMER_IGNORE_DIRS`](configuration.md#PAPERLESS_CONSUMER_IGNORE_DIRS) | Additional directories to ignore; user entries are added to (not replacing) defaults |
## Encryption Support
Document and thumbnail encryption is no longer supported. This was previously deprecated in [paperless-ng 0.9.3](https://github.com/paperless-ngx/paperless-ngx/blob/dev/docs/changelog.md#paperless-ng-093)
Users must decrypt their document using the `decrypt_documents` command before upgrading.

View File

@@ -114,15 +114,16 @@ testing = [
"daphne", "daphne",
"factory-boy~=3.3.1", "factory-boy~=3.3.1",
"imagehash", "imagehash",
"pytest~=8.4.1", "pytest~=9.0.0",
"pytest-cov~=7.0.0", "pytest-cov~=7.0.0",
"pytest-django~=4.11.1", "pytest-django~=4.11.1",
"pytest-env", "pytest-env~=1.2.0",
"pytest-httpx", "pytest-httpx",
"pytest-mock", "pytest-mock~=3.15.1",
"pytest-rerunfailures", #"pytest-randomly~=4.0.1",
"pytest-rerunfailures~=16.1",
"pytest-sugar", "pytest-sugar",
"pytest-xdist", "pytest-xdist~=3.8.0",
] ]
lint = [ lint = [
@@ -260,11 +261,15 @@ write-changes = true
ignore-words-list = "criterias,afterall,valeu,ureue,equest,ure,assertIn,Oktober" ignore-words-list = "criterias,afterall,valeu,ureue,equest,ure,assertIn,Oktober"
skip = "src-ui/src/locale/*,src-ui/pnpm-lock.yaml,src-ui/e2e/*,src/paperless_mail/tests/samples/*,src/documents/tests/samples/*,*.po,*.json" skip = "src-ui/src/locale/*,src-ui/pnpm-lock.yaml,src-ui/e2e/*,src/paperless_mail/tests/samples/*,src/documents/tests/samples/*,*.po,*.json"
[tool.pytest.ini_options] [tool.pytest]
minversion = "8.0" minversion = "9.0"
pythonpath = [ pythonpath = [ "src" ]
"src",
] strict_config = true
strict_markers = true
strict_parametrization_ids = true
strict_xfail = true
testpaths = [ testpaths = [
"src/documents/tests/", "src/documents/tests/",
"src/paperless/tests/", "src/paperless/tests/",
@@ -275,6 +280,7 @@ testpaths = [
"src/paperless_remote/tests/", "src/paperless_remote/tests/",
"src/paperless_ai/tests", "src/paperless_ai/tests",
] ]
addopts = [ addopts = [
"--pythonwarnings=all", "--pythonwarnings=all",
"--cov", "--cov",
@@ -282,11 +288,14 @@ addopts = [
"--cov-report=xml", "--cov-report=xml",
"--numprocesses=auto", "--numprocesses=auto",
"--maxprocesses=16", "--maxprocesses=16",
"--quiet", "--dist=loadscope",
"--durations=50", "--durations=50",
"--durations-min=0.5",
"--junitxml=junit.xml", "--junitxml=junit.xml",
"-o junit_family=legacy", "-o",
"junit_family=legacy",
] ]
norecursedirs = [ "src/locale/", ".venv/", "src-ui/" ] norecursedirs = [ "src/locale/", ".venv/", "src-ui/" ]
DJANGO_SETTINGS_MODULE = "paperless.settings" DJANGO_SETTINGS_MODULE = "paperless.settings"

View File

@@ -97,12 +97,6 @@
<br/><em>(<ng-container i18n>click for full output</ng-container>)</em> <br/><em>(<ng-container i18n>click for full output</ng-container>)</em>
} }
</ng-template> </ng-template>
@if (task.duplicate_documents?.length > 0) {
<div class="small text-warning-emphasis d-flex align-items-center gap-1">
<i-bs class="lh-1" width="1em" height="1em" name="exclamation-triangle"></i-bs>
<span i18n>Duplicate(s) detected</span>
</div>
}
</td> </td>
} }
<td class="d-lg-none"> <td class="d-lg-none">

View File

@@ -370,37 +370,6 @@
</ng-template> </ng-template>
</li> </li>
} }
@if (document?.duplicate_documents?.length) {
<li [ngbNavItem]="DocumentDetailNavIDs.Duplicates">
<a class="text-nowrap" ngbNavLink i18n>
Duplicates
<span class="badge text-bg-secondary ms-1">{{ document.duplicate_documents.length }}</span>
</a>
<ng-template ngbNavContent>
<div class="d-flex flex-column gap-2">
<div class="fst-italic" i18n>Duplicate documents detected:</div>
<div class="list-group">
@for (duplicate of document.duplicate_documents; track duplicate.id) {
<a
class="list-group-item list-group-item-action d-flex justify-content-between align-items-center"
[routerLink]="['/documents', duplicate.id, 'details']"
[class.disabled]="duplicate.deleted_at"
>
<span class="d-flex align-items-center gap-2">
<span>{{ duplicate.title || ('#' + duplicate.id) }}</span>
@if (duplicate.deleted_at) {
<span class="badge text-bg-secondary" i18n>In trash</span>
}
</span>
<span class="text-secondary">#{{ duplicate.id }}</span>
</a>
}
</div>
</div>
</ng-template>
</li>
}
</ul> </ul>
<div [ngbNavOutlet]="nav" class="mt-3"></div> <div [ngbNavOutlet]="nav" class="mt-3"></div>

View File

@@ -301,16 +301,16 @@ describe('DocumentDetailComponent', () => {
.spyOn(openDocumentsService, 'openDocument') .spyOn(openDocumentsService, 'openDocument')
.mockReturnValueOnce(of(true)) .mockReturnValueOnce(of(true))
fixture.detectChanges() fixture.detectChanges()
expect(component.activeNavID).toEqual(component.DocumentDetailNavIDs.Notes) expect(component.activeNavID).toEqual(5) // DocumentDetailNavIDs.Notes
}) })
it('should change url on tab switch', () => { it('should change url on tab switch', () => {
initNormally() initNormally()
const navigateSpy = jest.spyOn(router, 'navigate') const navigateSpy = jest.spyOn(router, 'navigate')
component.nav.select(component.DocumentDetailNavIDs.Notes) component.nav.select(5)
component.nav.navChange.next({ component.nav.navChange.next({
activeId: 1, activeId: 1,
nextId: component.DocumentDetailNavIDs.Notes, nextId: 5,
preventDefault: () => {}, preventDefault: () => {},
}) })
fixture.detectChanges() fixture.detectChanges()
@@ -352,18 +352,6 @@ describe('DocumentDetailComponent', () => {
expect(component.document).toEqual(doc) expect(component.document).toEqual(doc)
}) })
it('should fall back to details tab when duplicates tab is active but no duplicates', () => {
initNormally()
component.activeNavID = component.DocumentDetailNavIDs.Duplicates
const noDupDoc = { ...doc, duplicate_documents: [] }
component.updateComponent(noDupDoc)
expect(component.activeNavID).toEqual(
component.DocumentDetailNavIDs.Details
)
})
it('should load already-opened document via param', () => { it('should load already-opened document via param', () => {
initNormally() initNormally()
jest.spyOn(documentService, 'get').mockReturnValueOnce(of(doc)) jest.spyOn(documentService, 'get').mockReturnValueOnce(of(doc))
@@ -379,38 +367,6 @@ describe('DocumentDetailComponent', () => {
expect(component.document).toEqual(doc) expect(component.document).toEqual(doc)
}) })
it('should update cached open document duplicates when reloading an open doc', () => {
const openDoc = { ...doc, duplicate_documents: [{ id: 1, title: 'Old' }] }
const updatedDuplicates = [
{ id: 2, title: 'Newer duplicate', deleted_at: null },
]
jest
.spyOn(activatedRoute, 'paramMap', 'get')
.mockReturnValue(of(convertToParamMap({ id: 3, section: 'details' })))
jest.spyOn(documentService, 'get').mockReturnValue(
of({
...doc,
modified: new Date('2024-01-02T00:00:00Z'),
duplicate_documents: updatedDuplicates,
})
)
jest.spyOn(openDocumentsService, 'getOpenDocument').mockReturnValue(openDoc)
const saveSpy = jest.spyOn(openDocumentsService, 'save')
jest.spyOn(openDocumentsService, 'openDocument').mockReturnValue(of(true))
jest.spyOn(customFieldsService, 'listAll').mockReturnValue(
of({
count: customFields.length,
all: customFields.map((f) => f.id),
results: customFields,
})
)
fixture.detectChanges()
expect(openDoc.duplicate_documents).toEqual(updatedDuplicates)
expect(saveSpy).toHaveBeenCalled()
})
it('should disable form if user cannot edit', () => { it('should disable form if user cannot edit', () => {
currentUserHasObjectPermissions = false currentUserHasObjectPermissions = false
initNormally() initNormally()

View File

@@ -8,7 +8,7 @@ import {
FormsModule, FormsModule,
ReactiveFormsModule, ReactiveFormsModule,
} from '@angular/forms' } from '@angular/forms'
import { ActivatedRoute, Router, RouterModule } from '@angular/router' import { ActivatedRoute, Router } from '@angular/router'
import { import {
NgbDateStruct, NgbDateStruct,
NgbDropdownModule, NgbDropdownModule,
@@ -124,7 +124,6 @@ enum DocumentDetailNavIDs {
Notes = 5, Notes = 5,
Permissions = 6, Permissions = 6,
History = 7, History = 7,
Duplicates = 8,
} }
enum ContentRenderType { enum ContentRenderType {
@@ -182,7 +181,6 @@ export enum ZoomSetting {
NgxBootstrapIconsModule, NgxBootstrapIconsModule,
PdfViewerModule, PdfViewerModule,
TextAreaComponent, TextAreaComponent,
RouterModule,
], ],
}) })
export class DocumentDetailComponent export class DocumentDetailComponent
@@ -456,11 +454,6 @@ export class DocumentDetailComponent
const openDocument = this.openDocumentService.getOpenDocument( const openDocument = this.openDocumentService.getOpenDocument(
this.documentId this.documentId
) )
// update duplicate documents if present
if (openDocument && doc?.duplicate_documents) {
openDocument.duplicate_documents = doc.duplicate_documents
this.openDocumentService.save()
}
const useDoc = openDocument || doc const useDoc = openDocument || doc
if (openDocument) { if (openDocument) {
if ( if (
@@ -711,13 +704,6 @@ export class DocumentDetailComponent
} }
this.title = this.documentTitlePipe.transform(doc.title) this.title = this.documentTitlePipe.transform(doc.title)
this.prepareForm(doc) this.prepareForm(doc)
if (
this.activeNavID === DocumentDetailNavIDs.Duplicates &&
!doc?.duplicate_documents?.length
) {
this.activeNavID = DocumentDetailNavIDs.Details
}
} }
get customFieldFormFields(): FormArray { get customFieldFormFields(): FormArray {

View File

@@ -159,8 +159,6 @@ export interface Document extends ObjectWithPermissions {
page_count?: number page_count?: number
duplicate_documents?: Document[]
// Frontend only // Frontend only
__changedFields?: string[] __changedFields?: string[]
} }

View File

@@ -1,4 +1,3 @@
import { Document } from './document'
import { ObjectWithId } from './object-with-id' import { ObjectWithId } from './object-with-id'
export enum PaperlessTaskType { export enum PaperlessTaskType {
@@ -43,7 +42,5 @@ export interface PaperlessTask extends ObjectWithId {
related_document?: number related_document?: number
duplicate_documents?: Document[]
owner?: number owner?: number
} }

View File

@@ -1,4 +1,5 @@
# this is here so that django finds the checks. # this is here so that django finds the checks.
from documents.checks import changed_password_check
from documents.checks import parser_check from documents.checks import parser_check
__all__ = ["parser_check"] __all__ = ["changed_password_check", "parser_check"]

View File

@@ -60,6 +60,7 @@ class DocumentAdmin(GuardedModelAdmin):
"added", "added",
"modified", "modified",
"mime_type", "mime_type",
"storage_type",
"filename", "filename",
"checksum", "checksum",
"archive_filename", "archive_filename",

View File

@@ -1,12 +1,60 @@
import textwrap
from django.conf import settings from django.conf import settings
from django.core.checks import Error from django.core.checks import Error
from django.core.checks import Warning from django.core.checks import Warning
from django.core.checks import register from django.core.checks import register
from django.core.exceptions import FieldError
from django.db.utils import OperationalError
from django.db.utils import ProgrammingError
from documents.signals import document_consumer_declaration from documents.signals import document_consumer_declaration
from documents.templating.utils import convert_format_str_to_template_format from documents.templating.utils import convert_format_str_to_template_format
@register()
def changed_password_check(app_configs, **kwargs):
from documents.models import Document
from paperless.db import GnuPG
try:
encrypted_doc = (
Document.objects.filter(
storage_type=Document.STORAGE_TYPE_GPG,
)
.only("pk", "storage_type")
.first()
)
except (OperationalError, ProgrammingError, FieldError):
return [] # No documents table yet
if encrypted_doc:
if not settings.PASSPHRASE:
return [
Error(
"The database contains encrypted documents but no password is set.",
),
]
if not GnuPG.decrypted(encrypted_doc.source_file):
return [
Error(
textwrap.dedent(
"""
The current password doesn't match the password of the
existing documents.
If you intend to change your password, you must first export
all of the old documents, start fresh with the new password
and then re-import them."
""",
),
),
]
return []
@register() @register()
def parser_check(app_configs, **kwargs): def parser_check(app_configs, **kwargs):
parsers = [] parsers = []

View File

@@ -128,7 +128,7 @@ def thumbnail_last_modified(request, pk: int) -> datetime | None:
Cache should be (slightly?) faster than filesystem Cache should be (slightly?) faster than filesystem
""" """
try: try:
doc = Document.objects.only("pk").get(pk=pk) doc = Document.objects.only("storage_type").get(pk=pk)
if not doc.thumbnail_path.exists(): if not doc.thumbnail_path.exists():
return None return None
doc_key = get_thumbnail_modified_key(pk) doc_key = get_thumbnail_modified_key(pk)

View File

@@ -497,6 +497,7 @@ class ConsumerPlugin(
create_source_path_directory(document.source_path) create_source_path_directory(document.source_path)
self._write( self._write(
document.storage_type,
self.unmodified_original self.unmodified_original
if self.unmodified_original is not None if self.unmodified_original is not None
else self.working_copy, else self.working_copy,
@@ -504,6 +505,7 @@ class ConsumerPlugin(
) )
self._write( self._write(
document.storage_type,
thumbnail, thumbnail,
document.thumbnail_path, document.thumbnail_path,
) )
@@ -515,6 +517,7 @@ class ConsumerPlugin(
) )
create_source_path_directory(document.archive_path) create_source_path_directory(document.archive_path)
self._write( self._write(
document.storage_type,
archive_path, archive_path,
document.archive_path, document.archive_path,
) )
@@ -634,6 +637,8 @@ class ConsumerPlugin(
) )
self.log.debug(f"Creation date from st_mtime: {create_date}") self.log.debug(f"Creation date from st_mtime: {create_date}")
storage_type = Document.STORAGE_TYPE_UNENCRYPTED
if self.metadata.filename: if self.metadata.filename:
title = Path(self.metadata.filename).stem title = Path(self.metadata.filename).stem
else: else:
@@ -660,6 +665,7 @@ class ConsumerPlugin(
checksum=hashlib.md5(file_for_checksum.read_bytes()).hexdigest(), checksum=hashlib.md5(file_for_checksum.read_bytes()).hexdigest(),
created=create_date, created=create_date,
modified=create_date, modified=create_date,
storage_type=storage_type,
page_count=page_count, page_count=page_count,
original_filename=self.filename, original_filename=self.filename,
) )
@@ -730,7 +736,7 @@ class ConsumerPlugin(
} }
CustomFieldInstance.objects.create(**args) # adds to document CustomFieldInstance.objects.create(**args) # adds to document
def _write(self, source, target): def _write(self, storage_type, source, target):
with ( with (
Path(source).open("rb") as read_file, Path(source).open("rb") as read_file,
Path(target).open("wb") as write_file, Path(target).open("wb") as write_file,
@@ -779,44 +785,18 @@ class ConsumerPreflightPlugin(
Q(checksum=checksum) | Q(archive_checksum=checksum), Q(checksum=checksum) | Q(archive_checksum=checksum),
) )
if existing_doc.exists(): if existing_doc.exists():
existing_doc = existing_doc.order_by("-created") msg = ConsumerStatusShortMessage.DOCUMENT_ALREADY_EXISTS
duplicates_in_trash = existing_doc.filter(deleted_at__isnull=False) log_msg = f"Not consuming {self.filename}: It is a duplicate of {existing_doc.get().title} (#{existing_doc.get().pk})."
log_msg = (
f"Consuming duplicate {self.filename}: "
f"{existing_doc.count()} existing document(s) share the same content."
)
if duplicates_in_trash.exists(): if existing_doc.first().deleted_at is not None:
log_msg += " Note: at least one existing document is in the trash." msg = ConsumerStatusShortMessage.DOCUMENT_ALREADY_EXISTS_IN_TRASH
log_msg += " Note: existing document is in the trash."
self.log.warning(log_msg)
if settings.CONSUMER_DELETE_DUPLICATES: if settings.CONSUMER_DELETE_DUPLICATES:
duplicate = existing_doc.first()
duplicate_label = (
duplicate.title
or duplicate.original_filename
or (Path(duplicate.filename).name if duplicate.filename else None)
or str(duplicate.pk)
)
Path(self.input_doc.original_file).unlink() Path(self.input_doc.original_file).unlink()
failure_msg = (
f"Not consuming {self.filename}: "
f"It is a duplicate of {duplicate_label} (#{duplicate.pk})"
)
status_msg = ConsumerStatusShortMessage.DOCUMENT_ALREADY_EXISTS
if duplicates_in_trash.exists():
status_msg = (
ConsumerStatusShortMessage.DOCUMENT_ALREADY_EXISTS_IN_TRASH
)
failure_msg += " Note: existing document is in the trash."
self._fail( self._fail(
status_msg, msg,
failure_msg, log_msg,
) )
def pre_check_directories(self): def pre_check_directories(self):

View File

@@ -126,6 +126,7 @@ def generate_filename(
doc: Document, doc: Document,
*, *,
counter=0, counter=0,
append_gpg=True,
archive_filename=False, archive_filename=False,
) -> Path: ) -> Path:
base_path: Path | None = None base_path: Path | None = None
@@ -169,4 +170,8 @@ def generate_filename(
final_filename = f"{doc.pk:07}{counter_str}{filetype_str}" final_filename = f"{doc.pk:07}{counter_str}{filetype_str}"
full_path = Path(final_filename) full_path = Path(final_filename)
# Add GPG extension if needed
if append_gpg and doc.storage_type == doc.STORAGE_TYPE_GPG:
full_path = full_path.with_suffix(full_path.suffix + ".gpg")
return full_path return full_path

View File

@@ -0,0 +1,93 @@
from pathlib import Path
from django.conf import settings
from django.core.management.base import BaseCommand
from django.core.management.base import CommandError
from documents.models import Document
from paperless.db import GnuPG
class Command(BaseCommand):
help = (
"This is how you migrate your stored documents from an encrypted "
"state to an unencrypted one (or vice-versa)"
)
def add_arguments(self, parser) -> None:
parser.add_argument(
"--passphrase",
help=(
"If PAPERLESS_PASSPHRASE isn't set already, you need to specify it here"
),
)
def handle(self, *args, **options) -> None:
try:
self.stdout.write(
self.style.WARNING(
"\n\n"
"WARNING: This script is going to work directly on your "
"document originals, so\n"
"WARNING: you probably shouldn't run "
"this unless you've got a recent backup\n"
"WARNING: handy. It "
"*should* work without a hitch, but be safe and backup your\n"
"WARNING: stuff first.\n\n"
"Hit Ctrl+C to exit now, or Enter to "
"continue.\n\n",
),
)
_ = input()
except KeyboardInterrupt:
return
passphrase = options["passphrase"] or settings.PASSPHRASE
if not passphrase:
raise CommandError(
"Passphrase not defined. Please set it with --passphrase or "
"by declaring it in your environment or your config.",
)
self.__gpg_to_unencrypted(passphrase)
def __gpg_to_unencrypted(self, passphrase: str) -> None:
encrypted_files = Document.objects.filter(
storage_type=Document.STORAGE_TYPE_GPG,
)
for document in encrypted_files:
self.stdout.write(f"Decrypting {document}")
old_paths = [document.source_path, document.thumbnail_path]
with document.source_file as file_handle:
raw_document = GnuPG.decrypted(file_handle, passphrase)
with document.thumbnail_file as file_handle:
raw_thumb = GnuPG.decrypted(file_handle, passphrase)
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
ext: str = Path(document.filename).suffix
if not ext == ".gpg":
raise CommandError(
f"Abort: encrypted file {document.source_path} does not "
f"end with .gpg",
)
document.filename = Path(document.filename).stem
with document.source_path.open("wb") as f:
f.write(raw_document)
with document.thumbnail_path.open("wb") as f:
f.write(raw_thumb)
Document.objects.filter(id=document.id).update(
storage_type=document.storage_type,
filename=document.filename,
)
for path in old_paths:
path.unlink()

View File

@@ -3,6 +3,7 @@ import json
import os import os
import shutil import shutil
import tempfile import tempfile
import time
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
@@ -55,6 +56,7 @@ from documents.settings import EXPORTER_FILE_NAME
from documents.settings import EXPORTER_THUMBNAIL_NAME from documents.settings import EXPORTER_THUMBNAIL_NAME
from documents.utils import copy_file_with_basic_stats from documents.utils import copy_file_with_basic_stats
from paperless import version from paperless import version
from paperless.db import GnuPG
from paperless.models import ApplicationConfiguration from paperless.models import ApplicationConfiguration
from paperless_mail.models import MailAccount from paperless_mail.models import MailAccount
from paperless_mail.models import MailRule from paperless_mail.models import MailRule
@@ -314,17 +316,20 @@ class Command(CryptMixin, BaseCommand):
total=len(document_manifest), total=len(document_manifest),
disable=self.no_progress_bar, disable=self.no_progress_bar,
): ):
# 3.1. store files unencrypted
document_dict["fields"]["storage_type"] = Document.STORAGE_TYPE_UNENCRYPTED
document = document_map[document_dict["pk"]] document = document_map[document_dict["pk"]]
# 3.1. generate a unique filename # 3.2. generate a unique filename
base_name = self.generate_base_name(document) base_name = self.generate_base_name(document)
# 3.2. write filenames into manifest # 3.3. write filenames into manifest
original_target, thumbnail_target, archive_target = ( original_target, thumbnail_target, archive_target = (
self.generate_document_targets(document, base_name, document_dict) self.generate_document_targets(document, base_name, document_dict)
) )
# 3.3. write files to target folder # 3.4. write files to target folder
if not self.data_only: if not self.data_only:
self.copy_document_files( self.copy_document_files(
document, document,
@@ -418,6 +423,7 @@ class Command(CryptMixin, BaseCommand):
base_name = generate_filename( base_name = generate_filename(
document, document,
counter=filename_counter, counter=filename_counter,
append_gpg=False,
) )
else: else:
base_name = document.get_public_filename(counter=filename_counter) base_name = document.get_public_filename(counter=filename_counter)
@@ -476,6 +482,28 @@ class Command(CryptMixin, BaseCommand):
If the document is encrypted, the files are decrypted before copying them to the target location. If the document is encrypted, the files are decrypted before copying them to the target location.
""" """
if document.storage_type == Document.STORAGE_TYPE_GPG:
t = int(time.mktime(document.created.timetuple()))
original_target.parent.mkdir(parents=True, exist_ok=True)
with document.source_file as out_file:
original_target.write_bytes(GnuPG.decrypted(out_file))
os.utime(original_target, times=(t, t))
if thumbnail_target:
thumbnail_target.parent.mkdir(parents=True, exist_ok=True)
with document.thumbnail_file as out_file:
thumbnail_target.write_bytes(GnuPG.decrypted(out_file))
os.utime(thumbnail_target, times=(t, t))
if archive_target:
archive_target.parent.mkdir(parents=True, exist_ok=True)
if TYPE_CHECKING:
assert isinstance(document.archive_path, Path)
with document.archive_path as out_file:
archive_target.write_bytes(GnuPG.decrypted(out_file))
os.utime(archive_target, times=(t, t))
else:
self.check_and_copy( self.check_and_copy(
document.source_path, document.source_path,
document.checksum, document.checksum,

View File

@@ -383,6 +383,8 @@ class Command(CryptMixin, BaseCommand):
else: else:
archive_path = None archive_path = None
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
with FileLock(settings.MEDIA_LOCK): with FileLock(settings.MEDIA_LOCK):
if Path(document.source_path).is_file(): if Path(document.source_path).is_file():
raise FileExistsError(document.source_path) raise FileExistsError(document.source_path)

View File

@@ -1,16 +0,0 @@
# Generated by Django 5.2.9 on 2026-01-24 23:05
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("documents", "0003_workflowaction_order"),
]
operations = [
migrations.RemoveField(
model_name="document",
name="storage_type",
),
]

View File

@@ -1,23 +0,0 @@
# Generated by Django 5.2.7 on 2026-01-14 17:45
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0004_remove_document_storage_type"),
]
operations = [
migrations.AlterField(
model_name="document",
name="checksum",
field=models.CharField(
editable=False,
max_length=32,
verbose_name="checksum",
help_text="The checksum of the original document.",
),
),
]

View File

@@ -154,6 +154,13 @@ class StoragePath(MatchingModel):
class Document(SoftDeleteModel, ModelWithOwner): class Document(SoftDeleteModel, ModelWithOwner):
STORAGE_TYPE_UNENCRYPTED = "unencrypted"
STORAGE_TYPE_GPG = "gpg"
STORAGE_TYPES = (
(STORAGE_TYPE_UNENCRYPTED, _("Unencrypted")),
(STORAGE_TYPE_GPG, _("Encrypted with GNU Privacy Guard")),
)
correspondent = models.ForeignKey( correspondent = models.ForeignKey(
Correspondent, Correspondent,
blank=True, blank=True,
@@ -205,6 +212,7 @@ class Document(SoftDeleteModel, ModelWithOwner):
_("checksum"), _("checksum"),
max_length=32, max_length=32,
editable=False, editable=False,
unique=True,
help_text=_("The checksum of the original document."), help_text=_("The checksum of the original document."),
) )
@@ -242,6 +250,14 @@ class Document(SoftDeleteModel, ModelWithOwner):
db_index=True, db_index=True,
) )
storage_type = models.CharField(
_("storage type"),
max_length=11,
choices=STORAGE_TYPES,
default=STORAGE_TYPE_UNENCRYPTED,
editable=False,
)
added = models.DateTimeField( added = models.DateTimeField(
_("added"), _("added"),
default=timezone.now, default=timezone.now,
@@ -337,7 +353,12 @@ class Document(SoftDeleteModel, ModelWithOwner):
@property @property
def source_path(self) -> Path: def source_path(self) -> Path:
fname = str(self.filename) if self.filename else f"{self.pk:07}{self.file_type}" if self.filename:
fname = str(self.filename)
else:
fname = f"{self.pk:07}{self.file_type}"
if self.storage_type == self.STORAGE_TYPE_GPG:
fname += ".gpg" # pragma: no cover
return (settings.ORIGINALS_DIR / Path(fname)).resolve() return (settings.ORIGINALS_DIR / Path(fname)).resolve()
@@ -386,6 +407,8 @@ class Document(SoftDeleteModel, ModelWithOwner):
@property @property
def thumbnail_path(self) -> Path: def thumbnail_path(self) -> Path:
webp_file_name = f"{self.pk:07}.webp" webp_file_name = f"{self.pk:07}.webp"
if self.storage_type == self.STORAGE_TYPE_GPG:
webp_file_name += ".gpg"
webp_file_path = settings.THUMBNAIL_DIR / Path(webp_file_name) webp_file_path = settings.THUMBNAIL_DIR / Path(webp_file_name)

View File

@@ -148,29 +148,13 @@ def get_document_count_filter_for_user(user):
) )
def get_objects_for_user_owner_aware( def get_objects_for_user_owner_aware(user, perms, Model) -> QuerySet:
user, objects_owned = Model.objects.filter(owner=user)
perms, objects_unowned = Model.objects.filter(owner__isnull=True)
Model,
*,
include_deleted=False,
) -> QuerySet:
"""
Returns objects the user owns, are unowned, or has explicit perms.
When include_deleted is True, soft-deleted items are also included.
"""
manager = (
Model.global_objects
if include_deleted and hasattr(Model, "global_objects")
else Model.objects
)
objects_owned = manager.filter(owner=user)
objects_unowned = manager.filter(owner__isnull=True)
objects_with_perms = get_objects_for_user( objects_with_perms = get_objects_for_user(
user=user, user=user,
perms=perms, perms=perms,
klass=manager.all(), klass=Model,
accept_global_perms=False, accept_global_perms=False,
) )
return objects_owned | objects_unowned | objects_with_perms return objects_owned | objects_unowned | objects_with_perms

View File

@@ -23,7 +23,6 @@ from django.core.validators import MinValueValidator
from django.core.validators import RegexValidator from django.core.validators import RegexValidator
from django.core.validators import integer_validator from django.core.validators import integer_validator
from django.db.models import Count from django.db.models import Count
from django.db.models import Q
from django.db.models.functions import Lower from django.db.models.functions import Lower
from django.utils.crypto import get_random_string from django.utils.crypto import get_random_string
from django.utils.dateparse import parse_datetime from django.utils.dateparse import parse_datetime
@@ -73,7 +72,6 @@ from documents.models import WorkflowTrigger
from documents.parsers import is_mime_type_supported from documents.parsers import is_mime_type_supported
from documents.permissions import get_document_count_filter_for_user from documents.permissions import get_document_count_filter_for_user
from documents.permissions import get_groups_with_only_permission from documents.permissions import get_groups_with_only_permission
from documents.permissions import get_objects_for_user_owner_aware
from documents.permissions import set_permissions_for_object from documents.permissions import set_permissions_for_object
from documents.regex import validate_regex_pattern from documents.regex import validate_regex_pattern
from documents.templating.filepath import validate_filepath_template_and_render from documents.templating.filepath import validate_filepath_template_and_render
@@ -1016,29 +1014,6 @@ class NotesSerializer(serializers.ModelSerializer):
return ret return ret
def _get_viewable_duplicates(document: Document, user: User | None):
checksums = {document.checksum}
if document.archive_checksum:
checksums.add(document.archive_checksum)
duplicates = Document.global_objects.filter(
Q(checksum__in=checksums) | Q(archive_checksum__in=checksums),
).exclude(pk=document.pk)
duplicates = duplicates.order_by("-created")
allowed = get_objects_for_user_owner_aware(
user,
"documents.view_document",
Document,
include_deleted=True,
)
return duplicates.filter(id__in=allowed.values_list("id", flat=True))
class DuplicateDocumentSummarySerializer(serializers.Serializer):
id = serializers.IntegerField()
title = serializers.CharField()
deleted_at = serializers.DateTimeField(allow_null=True)
@extend_schema_serializer( @extend_schema_serializer(
deprecate_fields=["created_date"], deprecate_fields=["created_date"],
) )
@@ -1056,7 +1031,6 @@ class DocumentSerializer(
archived_file_name = SerializerMethodField() archived_file_name = SerializerMethodField()
created_date = serializers.DateField(required=False) created_date = serializers.DateField(required=False)
page_count = SerializerMethodField() page_count = SerializerMethodField()
duplicate_documents = SerializerMethodField()
notes = NotesSerializer(many=True, required=False, read_only=True) notes = NotesSerializer(many=True, required=False, read_only=True)
@@ -1082,16 +1056,6 @@ class DocumentSerializer(
def get_page_count(self, obj) -> int | None: def get_page_count(self, obj) -> int | None:
return obj.page_count return obj.page_count
@extend_schema_field(DuplicateDocumentSummarySerializer(many=True))
def get_duplicate_documents(self, obj):
view = self.context.get("view")
if view and getattr(view, "action", None) != "retrieve":
return []
request = self.context.get("request")
user = request.user if request else None
duplicates = _get_viewable_duplicates(obj, user)
return list(duplicates.values("id", "title", "deleted_at"))
def get_original_file_name(self, obj) -> str | None: def get_original_file_name(self, obj) -> str | None:
return obj.original_filename return obj.original_filename
@@ -1269,7 +1233,6 @@ class DocumentSerializer(
"archive_serial_number", "archive_serial_number",
"original_file_name", "original_file_name",
"archived_file_name", "archived_file_name",
"duplicate_documents",
"owner", "owner",
"permissions", "permissions",
"user_can_change", "user_can_change",
@@ -2131,12 +2094,10 @@ class TasksViewSerializer(OwnedObjectSerializer):
"result", "result",
"acknowledged", "acknowledged",
"related_document", "related_document",
"duplicate_documents",
"owner", "owner",
) )
related_document = serializers.SerializerMethodField() related_document = serializers.SerializerMethodField()
duplicate_documents = serializers.SerializerMethodField()
created_doc_re = re.compile(r"New document id (\d+) created") created_doc_re = re.compile(r"New document id (\d+) created")
duplicate_doc_re = re.compile(r"It is a duplicate of .* \(#(\d+)\)") duplicate_doc_re = re.compile(r"It is a duplicate of .* \(#(\d+)\)")
@@ -2161,17 +2122,6 @@ class TasksViewSerializer(OwnedObjectSerializer):
return result return result
@extend_schema_field(DuplicateDocumentSummarySerializer(many=True))
def get_duplicate_documents(self, obj):
related_document = self.get_related_document(obj)
request = self.context.get("request")
user = request.user if request else None
document = Document.global_objects.filter(pk=related_document).first()
if not related_document or not user or not document:
return []
duplicates = _get_viewable_duplicates(document, user)
return list(duplicates.values("id", "title", "deleted_at"))
class RunTaskViewSerializer(serializers.Serializer): class RunTaskViewSerializer(serializers.Serializer):
task_name = serializers.ChoiceField( task_name = serializers.ChoiceField(

View File

@@ -108,6 +108,7 @@ def create_dummy_document():
page_count=5, page_count=5,
created=timezone.now(), created=timezone.now(),
modified=timezone.now(), modified=timezone.now(),
storage_type=Document.STORAGE_TYPE_UNENCRYPTED,
added=timezone.now(), added=timezone.now(),
filename="/dummy/filename.pdf", filename="/dummy/filename.pdf",
archive_filename="/dummy/archive_filename.pdf", archive_filename="/dummy/archive_filename.pdf",

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.6 KiB

View File

@@ -7,7 +7,6 @@ from django.contrib.auth.models import User
from rest_framework import status from rest_framework import status
from rest_framework.test import APITestCase from rest_framework.test import APITestCase
from documents.models import Document
from documents.models import PaperlessTask from documents.models import PaperlessTask
from documents.tests.utils import DirectoriesMixin from documents.tests.utils import DirectoriesMixin
from documents.views import TasksViewSet from documents.views import TasksViewSet
@@ -259,7 +258,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
task_id=str(uuid.uuid4()), task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf", task_file_name="task_one.pdf",
status=celery.states.FAILURE, status=celery.states.FAILURE,
result="test.pdf: Unexpected error during ingestion.", result="test.pdf: Not consuming test.pdf: It is a duplicate.",
) )
response = self.client.get(self.ENDPOINT) response = self.client.get(self.ENDPOINT)
@@ -271,7 +270,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
self.assertEqual( self.assertEqual(
returned_data["result"], returned_data["result"],
"test.pdf: Unexpected error during ingestion.", "test.pdf: Not consuming test.pdf: It is a duplicate.",
) )
def test_task_name_webui(self): def test_task_name_webui(self):
@@ -326,34 +325,20 @@ class TestTasks(DirectoriesMixin, APITestCase):
self.assertEqual(returned_data["task_file_name"], "anothertest.pdf") self.assertEqual(returned_data["task_file_name"], "anothertest.pdf")
def test_task_result_duplicate_warning_includes_count(self): def test_task_result_failed_duplicate_includes_related_doc(self):
""" """
GIVEN: GIVEN:
- A celery task succeeds, but a duplicate exists - A celery task failed with a duplicate error
WHEN: WHEN:
- API call is made to get tasks - API call is made to get tasks
THEN: THEN:
- The returned data includes duplicate warning metadata - The returned data includes a related document link
""" """
checksum = "duplicate-checksum"
Document.objects.create(
title="Existing",
content="",
mime_type="application/pdf",
checksum=checksum,
)
created_doc = Document.objects.create(
title="Created",
content="",
mime_type="application/pdf",
checksum=checksum,
archive_checksum="another-checksum",
)
PaperlessTask.objects.create( PaperlessTask.objects.create(
task_id=str(uuid.uuid4()), task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf", task_file_name="task_one.pdf",
status=celery.states.SUCCESS, status=celery.states.FAILURE,
result=f"Success. New document id {created_doc.pk} created", result="Not consuming task_one.pdf: It is a duplicate of task_one_existing.pdf (#1234).",
) )
response = self.client.get(self.ENDPOINT) response = self.client.get(self.ENDPOINT)
@@ -363,7 +348,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
returned_data = response.data[0] returned_data = response.data[0]
self.assertEqual(returned_data["related_document"], str(created_doc.pk)) self.assertEqual(returned_data["related_document"], "1234")
def test_run_train_classifier_task(self): def test_run_train_classifier_task(self):
""" """

View File

@@ -1,3 +1,4 @@
import textwrap
from unittest import mock from unittest import mock
from django.core.checks import Error from django.core.checks import Error
@@ -5,11 +6,60 @@ from django.core.checks import Warning
from django.test import TestCase from django.test import TestCase
from django.test import override_settings from django.test import override_settings
from documents.checks import changed_password_check
from documents.checks import filename_format_check from documents.checks import filename_format_check
from documents.checks import parser_check from documents.checks import parser_check
from documents.models import Document
from documents.tests.factories import DocumentFactory
class TestDocumentChecks(TestCase): class TestDocumentChecks(TestCase):
def test_changed_password_check_empty_db(self):
self.assertListEqual(changed_password_check(None), [])
def test_changed_password_check_no_encryption(self):
DocumentFactory.create(storage_type=Document.STORAGE_TYPE_UNENCRYPTED)
self.assertListEqual(changed_password_check(None), [])
def test_encrypted_missing_passphrase(self):
DocumentFactory.create(storage_type=Document.STORAGE_TYPE_GPG)
msgs = changed_password_check(None)
self.assertEqual(len(msgs), 1)
msg_text = msgs[0].msg
self.assertEqual(
msg_text,
"The database contains encrypted documents but no password is set.",
)
@override_settings(
PASSPHRASE="test",
)
@mock.patch("paperless.db.GnuPG.decrypted")
@mock.patch("documents.models.Document.source_file")
def test_encrypted_decrypt_fails(self, mock_decrypted, mock_source_file):
mock_decrypted.return_value = None
mock_source_file.return_value = b""
DocumentFactory.create(storage_type=Document.STORAGE_TYPE_GPG)
msgs = changed_password_check(None)
self.assertEqual(len(msgs), 1)
msg_text = msgs[0].msg
self.assertEqual(
msg_text,
textwrap.dedent(
"""
The current password doesn't match the password of the
existing documents.
If you intend to change your password, you must first export
all of the old documents, start fresh with the new password
and then re-import them."
""",
),
)
def test_parser_check(self): def test_parser_check(self):
self.assertEqual(parser_check(None), []) self.assertEqual(parser_check(None), [])

View File

@@ -485,21 +485,21 @@ class TestConsumer(
with self.get_consumer(self.get_test_file()) as consumer: with self.get_consumer(self.get_test_file()) as consumer:
consumer.run() consumer.run()
with self.assertRaisesMessage(ConsumerError, "It is a duplicate"):
with self.get_consumer(self.get_test_file()) as consumer: with self.get_consumer(self.get_test_file()) as consumer:
consumer.run() consumer.run()
self.assertEqual(Document.objects.count(), 2) self._assert_first_last_send_progress(last_status="FAILED")
self._assert_first_last_send_progress()
def testDuplicates2(self): def testDuplicates2(self):
with self.get_consumer(self.get_test_file()) as consumer: with self.get_consumer(self.get_test_file()) as consumer:
consumer.run() consumer.run()
with self.assertRaisesMessage(ConsumerError, "It is a duplicate"):
with self.get_consumer(self.get_test_archive_file()) as consumer: with self.get_consumer(self.get_test_archive_file()) as consumer:
consumer.run() consumer.run()
self.assertEqual(Document.objects.count(), 2) self._assert_first_last_send_progress(last_status="FAILED")
self._assert_first_last_send_progress()
def testDuplicates3(self): def testDuplicates3(self):
with self.get_consumer(self.get_test_archive_file()) as consumer: with self.get_consumer(self.get_test_archive_file()) as consumer:
@@ -513,11 +513,10 @@ class TestConsumer(
Document.objects.all().delete() Document.objects.all().delete()
with self.assertRaisesMessage(ConsumerError, "document is in the trash"):
with self.get_consumer(self.get_test_file()) as consumer: with self.get_consumer(self.get_test_file()) as consumer:
consumer.run() consumer.run()
self.assertEqual(Document.objects.count(), 1)
def testAsnExists(self): def testAsnExists(self):
with self.get_consumer( with self.get_consumer(
self.get_test_file(), self.get_test_file(),
@@ -719,45 +718,12 @@ class TestConsumer(
dst = self.get_test_file() dst = self.get_test_file()
self.assertIsFile(dst) self.assertIsFile(dst)
expected_message = ( with self.assertRaises(ConsumerError):
f"{dst.name}: Not consuming {dst.name}: "
f"It is a duplicate of {document.title} (#{document.pk})"
)
with self.assertRaisesMessage(ConsumerError, expected_message):
with self.get_consumer(dst) as consumer: with self.get_consumer(dst) as consumer:
consumer.run() consumer.run()
self.assertIsNotFile(dst) self.assertIsNotFile(dst)
self.assertEqual(Document.objects.count(), 1) self._assert_first_last_send_progress(last_status="FAILED")
self._assert_first_last_send_progress(last_status=ProgressStatusOptions.FAILED)
@override_settings(CONSUMER_DELETE_DUPLICATES=True)
def test_delete_duplicate_in_trash(self):
dst = self.get_test_file()
with self.get_consumer(dst) as consumer:
consumer.run()
# Move the existing document to trash
document = Document.objects.first()
document.delete()
dst = self.get_test_file()
self.assertIsFile(dst)
expected_message = (
f"{dst.name}: Not consuming {dst.name}: "
f"It is a duplicate of {document.title} (#{document.pk})"
f" Note: existing document is in the trash."
)
with self.assertRaisesMessage(ConsumerError, expected_message):
with self.get_consumer(dst) as consumer:
consumer.run()
self.assertIsNotFile(dst)
self.assertEqual(Document.global_objects.count(), 1)
self.assertEqual(Document.objects.count(), 0)
@override_settings(CONSUMER_DELETE_DUPLICATES=False) @override_settings(CONSUMER_DELETE_DUPLICATES=False)
def test_no_delete_duplicate(self): def test_no_delete_duplicate(self):
@@ -777,12 +743,15 @@ class TestConsumer(
dst = self.get_test_file() dst = self.get_test_file()
self.assertIsFile(dst) self.assertIsFile(dst)
with self.assertRaisesRegex(
ConsumerError,
r"sample\.pdf: Not consuming sample\.pdf: It is a duplicate of sample \(#\d+\)",
):
with self.get_consumer(dst) as consumer: with self.get_consumer(dst) as consumer:
consumer.run() consumer.run()
self.assertIsNotFile(dst) self.assertIsFile(dst)
self.assertEqual(Document.objects.count(), 2) self._assert_first_last_send_progress(last_status="FAILED")
self._assert_first_last_send_progress()
@override_settings(FILENAME_FORMAT="{title}") @override_settings(FILENAME_FORMAT="{title}")
@mock.patch("documents.parsers.document_consumer_declaration.send") @mock.patch("documents.parsers.document_consumer_declaration.send")

View File

@@ -224,17 +224,18 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
THEN: THEN:
- The collated file gets put into foo/bar - The collated file gets put into foo/bar
""" """
# TODO: parameterize this instead
for path in [ for path in [
Path("foo") / "bar" / "double-sided", Path("foo") / "bar" / "double-sided",
Path("double-sided") / "foo" / "bar", Path("double-sided") / "foo" / "bar",
]: ]:
with self.subTest(path=path): with self.subTest(path=str(path)):
# Ensure we get fresh directories for each run # Ensure we get fresh directories for each run
self.tearDown() self.tearDown()
self.setUp() self.setUp()
self.create_staging_file() self.create_staging_file()
self.consume_file("double-sided-odd.pdf", path / "foo.pdf") self.consume_file("double-sided-odd.pdf", Path(path) / "foo.pdf")
self.assertIsFile( self.assertIsFile(
self.dirs.consumption_dir / "foo" / "bar" / "foo-collated.pdf", self.dirs.consumption_dir / "foo" / "bar" / "foo-collated.pdf",
) )

View File

@@ -34,14 +34,22 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
def test_generate_source_filename(self): def test_generate_source_filename(self):
document = Document() document = Document()
document.mime_type = "application/pdf" document.mime_type = "application/pdf"
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
document.save() document.save()
self.assertEqual(generate_filename(document), Path(f"{document.pk:07d}.pdf")) self.assertEqual(generate_filename(document), Path(f"{document.pk:07d}.pdf"))
document.storage_type = Document.STORAGE_TYPE_GPG
self.assertEqual(
generate_filename(document),
Path(f"{document.pk:07d}.pdf.gpg"),
)
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}") @override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
def test_file_renaming(self): def test_file_renaming(self):
document = Document() document = Document()
document.mime_type = "application/pdf" document.mime_type = "application/pdf"
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
document.save() document.save()
# Test default source_path # Test default source_path
@@ -55,6 +63,11 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
# Ensure that filename is properly generated # Ensure that filename is properly generated
self.assertEqual(document.filename, Path("none/none.pdf")) self.assertEqual(document.filename, Path("none/none.pdf"))
# Enable encryption and check again
document.storage_type = Document.STORAGE_TYPE_GPG
document.filename = generate_filename(document)
self.assertEqual(document.filename, Path("none/none.pdf.gpg"))
document.save() document.save()
# test that creating dirs for the source_path creates the correct directory # test that creating dirs for the source_path creates the correct directory
@@ -74,14 +87,14 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
settings.ORIGINALS_DIR / "none", settings.ORIGINALS_DIR / "none",
) )
self.assertIsFile( self.assertIsFile(
settings.ORIGINALS_DIR / "test" / "test.pdf", settings.ORIGINALS_DIR / "test" / "test.pdf.gpg",
) )
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}") @override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
def test_file_renaming_missing_permissions(self): def test_file_renaming_missing_permissions(self):
document = Document() document = Document()
document.mime_type = "application/pdf" document.mime_type = "application/pdf"
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
document.save() document.save()
# Ensure that filename is properly generated # Ensure that filename is properly generated
@@ -115,13 +128,14 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
def test_file_renaming_database_error(self): def test_file_renaming_database_error(self):
Document.objects.create( Document.objects.create(
mime_type="application/pdf", mime_type="application/pdf",
storage_type=Document.STORAGE_TYPE_UNENCRYPTED,
checksum="AAAAA", checksum="AAAAA",
) )
document = Document() document = Document()
document.mime_type = "application/pdf" document.mime_type = "application/pdf"
document.checksum = "BBBBB" document.checksum = "BBBBB"
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
document.save() document.save()
# Ensure that filename is properly generated # Ensure that filename is properly generated
@@ -156,7 +170,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
def test_document_delete(self): def test_document_delete(self):
document = Document() document = Document()
document.mime_type = "application/pdf" document.mime_type = "application/pdf"
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
document.save() document.save()
# Ensure that filename is properly generated # Ensure that filename is properly generated
@@ -182,7 +196,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
def test_document_delete_trash_dir(self): def test_document_delete_trash_dir(self):
document = Document() document = Document()
document.mime_type = "application/pdf" document.mime_type = "application/pdf"
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
document.save() document.save()
# Ensure that filename is properly generated # Ensure that filename is properly generated
@@ -207,7 +221,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
# Create an identical document and ensure it is trashed under a new name # Create an identical document and ensure it is trashed under a new name
document = Document() document = Document()
document.mime_type = "application/pdf" document.mime_type = "application/pdf"
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
document.save() document.save()
document.filename = generate_filename(document) document.filename = generate_filename(document)
document.save() document.save()
@@ -221,7 +235,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
def test_document_delete_nofile(self): def test_document_delete_nofile(self):
document = Document() document = Document()
document.mime_type = "application/pdf" document.mime_type = "application/pdf"
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
document.save() document.save()
document.delete() document.delete()
@@ -231,7 +245,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
def test_directory_not_empty(self): def test_directory_not_empty(self):
document = Document() document = Document()
document.mime_type = "application/pdf" document.mime_type = "application/pdf"
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
document.save() document.save()
# Ensure that filename is properly generated # Ensure that filename is properly generated
@@ -348,7 +362,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
def test_nested_directory_cleanup(self): def test_nested_directory_cleanup(self):
document = Document() document = Document()
document.mime_type = "application/pdf" document.mime_type = "application/pdf"
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
document.save() document.save()
# Ensure that filename is properly generated # Ensure that filename is properly generated
@@ -376,6 +390,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
document = Document() document = Document()
document.pk = 1 document.pk = 1
document.mime_type = "application/pdf" document.mime_type = "application/pdf"
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
self.assertEqual(generate_filename(document), Path("0000001.pdf")) self.assertEqual(generate_filename(document), Path("0000001.pdf"))
@@ -388,6 +403,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
document = Document() document = Document()
document.pk = 1 document.pk = 1
document.mime_type = "application/pdf" document.mime_type = "application/pdf"
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
self.assertEqual(generate_filename(document), Path("0000001.pdf")) self.assertEqual(generate_filename(document), Path("0000001.pdf"))
@@ -413,6 +429,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
document = Document() document = Document()
document.pk = 1 document.pk = 1
document.mime_type = "application/pdf" document.mime_type = "application/pdf"
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
self.assertEqual(generate_filename(document), Path("0000001.pdf")) self.assertEqual(generate_filename(document), Path("0000001.pdf"))
@@ -421,6 +438,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
document = Document() document = Document()
document.pk = 1 document.pk = 1
document.mime_type = "application/pdf" document.mime_type = "application/pdf"
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
self.assertEqual(generate_filename(document), Path("0000001.pdf")) self.assertEqual(generate_filename(document), Path("0000001.pdf"))
@@ -1240,7 +1258,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
title="doc1", title="doc1",
mime_type="application/pdf", mime_type="application/pdf",
) )
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
document.save() document.save()
# Ensure that filename is properly generated # Ensure that filename is properly generated
@@ -1714,6 +1732,7 @@ class TestPathDateLocalization:
document = DocumentFactory.create( document = DocumentFactory.create(
title="My Document", title="My Document",
mime_type="application/pdf", mime_type="application/pdf",
storage_type=Document.STORAGE_TYPE_UNENCRYPTED,
created=self.TEST_DATE, # 2023-10-26 (which is a Thursday) created=self.TEST_DATE, # 2023-10-26 (which is a Thursday)
) )
with override_settings(FILENAME_FORMAT=filename_format): with override_settings(FILENAME_FORMAT=filename_format):

View File

@@ -1,5 +1,7 @@
import filecmp import filecmp
import hashlib
import shutil import shutil
import tempfile
from io import StringIO from io import StringIO
from pathlib import Path from pathlib import Path
from unittest import mock from unittest import mock
@@ -94,6 +96,66 @@ class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
self.assertEqual(doc2.archive_filename, "document_01.pdf") self.assertEqual(doc2.archive_filename, "document_01.pdf")
class TestDecryptDocuments(FileSystemAssertsMixin, TestCase):
@mock.patch("documents.management.commands.decrypt_documents.input")
def test_decrypt(self, m):
media_dir = tempfile.mkdtemp()
originals_dir = Path(media_dir) / "documents" / "originals"
thumb_dir = Path(media_dir) / "documents" / "thumbnails"
originals_dir.mkdir(parents=True, exist_ok=True)
thumb_dir.mkdir(parents=True, exist_ok=True)
with override_settings(
ORIGINALS_DIR=originals_dir,
THUMBNAIL_DIR=thumb_dir,
PASSPHRASE="test",
FILENAME_FORMAT=None,
):
doc = Document.objects.create(
checksum="82186aaa94f0b98697d704b90fd1c072",
title="wow",
filename="0000004.pdf.gpg",
mime_type="application/pdf",
storage_type=Document.STORAGE_TYPE_GPG,
)
shutil.copy(
(
Path(__file__).parent
/ "samples"
/ "documents"
/ "originals"
/ "0000004.pdf.gpg"
),
originals_dir / "0000004.pdf.gpg",
)
shutil.copy(
(
Path(__file__).parent
/ "samples"
/ "documents"
/ "thumbnails"
/ "0000004.webp.gpg"
),
thumb_dir / f"{doc.id:07}.webp.gpg",
)
call_command("decrypt_documents")
doc.refresh_from_db()
self.assertEqual(doc.storage_type, Document.STORAGE_TYPE_UNENCRYPTED)
self.assertEqual(doc.filename, "0000004.pdf")
self.assertIsFile(Path(originals_dir) / "0000004.pdf")
self.assertIsFile(doc.source_path)
self.assertIsFile(Path(thumb_dir) / f"{doc.id:07}.webp")
self.assertIsFile(doc.thumbnail_path)
with doc.source_file as f:
checksum: str = hashlib.md5(f.read()).hexdigest()
self.assertEqual(checksum, doc.checksum)
class TestMakeIndex(TestCase): class TestMakeIndex(TestCase):
@mock.patch("documents.management.commands.document_index.index_reindex") @mock.patch("documents.management.commands.document_index.index_reindex")
def test_reindex(self, m): def test_reindex(self, m):

View File

@@ -86,8 +86,9 @@ class TestExportImport(
content="Content", content="Content",
checksum="82186aaa94f0b98697d704b90fd1c072", checksum="82186aaa94f0b98697d704b90fd1c072",
title="wow_dec", title="wow_dec",
filename="0000004.pdf", filename="0000004.pdf.gpg",
mime_type="application/pdf", mime_type="application/pdf",
storage_type=Document.STORAGE_TYPE_GPG,
) )
self.note = Note.objects.create( self.note = Note.objects.create(
@@ -241,6 +242,11 @@ class TestExportImport(
checksum = hashlib.md5(f.read()).hexdigest() checksum = hashlib.md5(f.read()).hexdigest()
self.assertEqual(checksum, element["fields"]["checksum"]) self.assertEqual(checksum, element["fields"]["checksum"])
self.assertEqual(
element["fields"]["storage_type"],
Document.STORAGE_TYPE_UNENCRYPTED,
)
if document_exporter.EXPORTER_ARCHIVE_NAME in element: if document_exporter.EXPORTER_ARCHIVE_NAME in element:
fname = ( fname = (
self.target / element[document_exporter.EXPORTER_ARCHIVE_NAME] self.target / element[document_exporter.EXPORTER_ARCHIVE_NAME]
@@ -430,7 +436,7 @@ class TestExportImport(
Document.objects.create( Document.objects.create(
checksum="AAAAAAAAAAAAAAAAA", checksum="AAAAAAAAAAAAAAAAA",
title="wow", title="wow",
filename="0000010.pdf", filename="0000004.pdf",
mime_type="application/pdf", mime_type="application/pdf",
) )
self.assertRaises(FileNotFoundError, call_command, "document_exporter", target) self.assertRaises(FileNotFoundError, call_command, "document_exporter", target)

View File

@@ -195,6 +195,7 @@ from paperless import version
from paperless.celery import app as celery_app from paperless.celery import app as celery_app
from paperless.config import AIConfig from paperless.config import AIConfig
from paperless.config import GeneralConfig from paperless.config import GeneralConfig
from paperless.db import GnuPG
from paperless.models import ApplicationConfiguration from paperless.models import ApplicationConfiguration
from paperless.serialisers import GroupSerializer from paperless.serialisers import GroupSerializer
from paperless.serialisers import UserSerializer from paperless.serialisers import UserSerializer
@@ -1070,7 +1071,9 @@ class DocumentViewSet(
doc, doc,
): ):
return HttpResponseForbidden("Insufficient permissions") return HttpResponseForbidden("Insufficient permissions")
if doc.storage_type == Document.STORAGE_TYPE_GPG:
handle = GnuPG.decrypted(doc.thumbnail_file)
else:
handle = doc.thumbnail_file handle = doc.thumbnail_file
return HttpResponse(handle, content_type="image/webp") return HttpResponse(handle, content_type="image/webp")
@@ -2821,6 +2824,9 @@ def serve_file(*, doc: Document, use_archive: bool, disposition: str):
if mime_type in {"application/csv", "text/csv"} and disposition == "inline": if mime_type in {"application/csv", "text/csv"} and disposition == "inline":
mime_type = "text/plain" mime_type = "text/plain"
if doc.storage_type == Document.STORAGE_TYPE_GPG:
file_handle = GnuPG.decrypted(file_handle)
response = HttpResponse(file_handle, content_type=mime_type) response = HttpResponse(file_handle, content_type=mime_type)
# Firefox is not able to handle unicode characters in filename field # Firefox is not able to handle unicode characters in filename field
# RFC 5987 addresses this issue # RFC 5987 addresses this issue

File diff suppressed because it is too large Load Diff

17
src/paperless/db.py Normal file
View File

@@ -0,0 +1,17 @@
import gnupg
from django.conf import settings
class GnuPG:
"""
A handy singleton to use when handling encrypted files.
"""
gpg = gnupg.GPG(gnupghome=settings.GNUPG_HOME)
@classmethod
def decrypted(cls, file_handle, passphrase=None):
if not passphrase:
passphrase = settings.PASSPHRASE
return cls.gpg.decrypt_file(file_handle, passphrase=passphrase).data

View File

@@ -1203,6 +1203,19 @@ EMAIL_PARSE_DEFAULT_LAYOUT = __get_int(
1, # MailRule.PdfLayout.TEXT_HTML but that can't be imported here 1, # MailRule.PdfLayout.TEXT_HTML but that can't be imported here
) )
# Pre-2.x versions of Paperless stored your documents locally with GPG
# encryption, but that is no longer the default. This behaviour is still
# available, but it must be explicitly enabled by setting
# `PAPERLESS_PASSPHRASE` in your environment or config file. The default is to
# store these files unencrypted.
#
# Translation:
# * If you're a new user, you can safely ignore this setting.
# * If you're upgrading from 1.x, this must be set, OR you can run
# `./manage.py change_storage_type gpg unencrypted` to decrypt your files,
# after which you can unset this value.
PASSPHRASE = os.getenv("PAPERLESS_PASSPHRASE")
# Trigger a script after every successful document consumption? # Trigger a script after every successful document consumption?
PRE_CONSUME_SCRIPT = os.getenv("PAPERLESS_PRE_CONSUME_SCRIPT") PRE_CONSUME_SCRIPT = os.getenv("PAPERLESS_PRE_CONSUME_SCRIPT")
POST_CONSUME_SCRIPT = os.getenv("PAPERLESS_POST_CONSUME_SCRIPT") POST_CONSUME_SCRIPT = os.getenv("PAPERLESS_POST_CONSUME_SCRIPT")

74
uv.lock generated
View File

@@ -3152,15 +3152,15 @@ dev = [
{ name = "mkdocs-material", specifier = "~=9.7.0" }, { name = "mkdocs-material", specifier = "~=9.7.0" },
{ name = "pre-commit", specifier = "~=4.5.1" }, { name = "pre-commit", specifier = "~=4.5.1" },
{ name = "pre-commit-uv", specifier = "~=4.2.0" }, { name = "pre-commit-uv", specifier = "~=4.2.0" },
{ name = "pytest", specifier = "~=8.4.1" }, { name = "pytest", specifier = "~=9.0.0" },
{ name = "pytest-cov", specifier = "~=7.0.0" }, { name = "pytest-cov", specifier = "~=7.0.0" },
{ name = "pytest-django", specifier = "~=4.11.1" }, { name = "pytest-django", specifier = "~=4.11.1" },
{ name = "pytest-env" }, { name = "pytest-env", specifier = "~=1.2.0" },
{ name = "pytest-httpx" }, { name = "pytest-httpx" },
{ name = "pytest-mock" }, { name = "pytest-mock", specifier = "~=3.15.1" },
{ name = "pytest-rerunfailures" }, { name = "pytest-rerunfailures", specifier = "~=16.1" },
{ name = "pytest-sugar" }, { name = "pytest-sugar" },
{ name = "pytest-xdist" }, { name = "pytest-xdist", specifier = "~=3.8.0" },
{ name = "ruff", specifier = "~=0.14.0" }, { name = "ruff", specifier = "~=0.14.0" },
] ]
docs = [ docs = [
@@ -3176,15 +3176,15 @@ testing = [
{ name = "daphne" }, { name = "daphne" },
{ name = "factory-boy", specifier = "~=3.3.1" }, { name = "factory-boy", specifier = "~=3.3.1" },
{ name = "imagehash" }, { name = "imagehash" },
{ name = "pytest", specifier = "~=8.4.1" }, { name = "pytest", specifier = "~=9.0.0" },
{ name = "pytest-cov", specifier = "~=7.0.0" }, { name = "pytest-cov", specifier = "~=7.0.0" },
{ name = "pytest-django", specifier = "~=4.11.1" }, { name = "pytest-django", specifier = "~=4.11.1" },
{ name = "pytest-env" }, { name = "pytest-env", specifier = "~=1.2.0" },
{ name = "pytest-httpx" }, { name = "pytest-httpx" },
{ name = "pytest-mock" }, { name = "pytest-mock", specifier = "~=3.15.1" },
{ name = "pytest-rerunfailures" }, { name = "pytest-rerunfailures", specifier = "~=16.1" },
{ name = "pytest-sugar" }, { name = "pytest-sugar" },
{ name = "pytest-xdist" }, { name = "pytest-xdist", specifier = "~=3.8.0" },
] ]
typing = [ typing = [
{ name = "celery-types" }, { name = "celery-types" },
@@ -3841,7 +3841,7 @@ wheels = [
[[package]] [[package]]
name = "pytest" name = "pytest"
version = "8.4.2" version = "9.0.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "exceptiongroup", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" }, { name = "exceptiongroup", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" },
@@ -3851,9 +3851,9 @@ dependencies = [
{ name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" }, { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
] ]
[[package]] [[package]]
@@ -3897,15 +3897,15 @@ wheels = [
[[package]] [[package]]
name = "pytest-httpx" name = "pytest-httpx"
version = "0.35.0" version = "0.36.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/1f/89/5b12b7b29e3d0af3a4b9c071ee92fa25a9017453731a38f08ba01c280f4c/pytest_httpx-0.35.0.tar.gz", hash = "sha256:d619ad5d2e67734abfbb224c3d9025d64795d4b8711116b1a13f72a251ae511f", size = 54146, upload-time = "2024-11-28T19:16:54.237Z" } sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/5574834da9499066fa1a5ea9c336f94dba2eae02298d36dab192fcf95c86/pytest_httpx-0.36.0.tar.gz", hash = "sha256:9edb66a5fd4388ce3c343189bc67e7e1cb50b07c2e3fc83b97d511975e8a831b", size = 56793, upload-time = "2025-12-02T16:34:57.414Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/b0/ed/026d467c1853dd83102411a78126b4842618e86c895f93528b0528c7a620/pytest_httpx-0.35.0-py3-none-any.whl", hash = "sha256:ee11a00ffcea94a5cbff47af2114d34c5b231c326902458deed73f9c459fd744", size = 19442, upload-time = "2024-11-28T19:16:52.787Z" }, { url = "https://files.pythonhosted.org/packages/e2/d2/1eb1ea9c84f0d2033eb0b49675afdc71aa4ea801b74615f00f3c33b725e3/pytest_httpx-0.36.0-py3-none-any.whl", hash = "sha256:bd4c120bb80e142df856e825ec9f17981effb84d159f9fa29ed97e2357c3a9c8", size = 20229, upload-time = "2025-12-02T16:34:56.45Z" },
] ]
[[package]] [[package]]
@@ -5108,13 +5108,13 @@ dependencies = [
{ name = "typing-extensions", marker = "sys_platform == 'darwin'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin'" },
] ]
wheels = [ wheels = [
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp310-none-macosx_11_0_arm64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:bf1e68cfb935ae2046374ff02a7aa73dda70351b46342846f557055b3a540bf0" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp311-none-macosx_11_0_arm64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:a52952a8c90a422c14627ea99b9826b7557203b46b4d0772d3ca5c7699692425" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp312-none-macosx_11_0_arm64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:287242dd1f830846098b5eca847f817aa5c6015ea57ab4c1287809efea7b77eb" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp313-cp313t-macosx_11_0_arm64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8924d10d36eac8fe0652a060a03fc2ae52980841850b9a1a2ddb0f27a4f181cd" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp313-none-macosx_11_0_arm64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:bcee64ae7aa65876ceeae6dcaebe75109485b213528c74939602208a20706e3f" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp314-cp314-macosx_11_0_arm64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:defadbeb055cfcf5def58f70937145aecbd7a4bc295238ded1d0e85ae2cf0e1d" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp314-cp314t-macosx_11_0_arm64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:886f84b181f766f53265ba0a1d503011e60f53fff9d569563ef94f24160e1072" },
] ]
[[package]] [[package]]
@@ -5138,20 +5138,20 @@ dependencies = [
{ name = "typing-extensions", marker = "sys_platform == 'linux'" }, { name = "typing-extensions", marker = "sys_platform == 'linux'" },
] ]
wheels = [ wheels = [
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp310-cp310-manylinux_2_28_aarch64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:10866c8a48c4aa5ae3f48538dc8a055b99c57d9c6af2bf5dd715374d9d6ddca3" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp310-cp310-manylinux_2_28_x86_64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:7210713b66943fdbfcc237b2e782871b649123ac5d29f548ce8c85be4223ab38" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp311-cp311-manylinux_2_28_aarch64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:0e611cfb16724e62252b67d31073bc5c490cb83e92ecdc1192762535e0e44487" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:3de2adb9b4443dc9210ef1f1b16da3647ace53553166d6360bbbd7edd6f16e4d" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp312-cp312-manylinux_2_28_aarch64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3bf9b442a51a2948e41216a76d7ab00f0694cfcaaa51b6f9bcab57b7f89843e6" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7417d8c565f219d3455654cb431c6d892a3eb40246055e14d645422de13b9ea1" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313-manylinux_2_28_aarch64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:3e532e553b37ee859205a9b2d1c7977fd6922f53bbb1b9bfdd5bdc00d1a60ed4" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313-manylinux_2_28_x86_64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:39b3dff6d8fba240ae0d1bede4ca11c2531ae3b47329206512d99e17907ff74b" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313t-manylinux_2_28_aarch64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:01b1884f724977a20c7da2f640f1c7b37f4a2c117a7f4a6c1c0424d14cb86322" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313t-manylinux_2_28_x86_64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:031a597147fa81b1e6d79ccf1ad3ccc7fafa27941d6cf26ff5caaa384fb20e92" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314-manylinux_2_28_aarch64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:65010ab4aacce6c9a1ddfc935f986c003ca8638ded04348fd326c3e74346237c" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314-manylinux_2_28_x86_64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:88adf5157db5da1d54b1c9fe4a6c1d20ceef00e75d854e206a87dbf69e3037dc" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314t-manylinux_2_28_aarch64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:3ac2b8df2c55430e836dcda31940d47f1f5f94b8731057b6f20300ebea394dd9" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314t-manylinux_2_28_x86_64.whl" }, { url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:5b688445f928f13563b7418b17c57e97bf955ab559cf73cd8f2b961f8572dbb3" },
] ]
[[package]] [[package]]