mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-01-24 22:39:02 -06:00
Compare commits
4 Commits
dev
...
chore/pyte
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c08b025423 | ||
|
|
55d06fda03 | ||
|
|
c5fb80dc35 | ||
|
|
cb5a0fd7d9 |
@@ -4,7 +4,8 @@
|
||||
|
||||
set -eu
|
||||
|
||||
for command in document_archiver \
|
||||
for command in decrypt_documents \
|
||||
document_archiver \
|
||||
document_exporter \
|
||||
document_importer \
|
||||
mail_fetcher \
|
||||
|
||||
14
docker/rootfs/usr/local/bin/decrypt_documents
Executable file
14
docker/rootfs/usr/local/bin/decrypt_documents
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/command/with-contenv /usr/bin/bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py decrypt_documents "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py decrypt_documents "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
@@ -580,6 +580,36 @@ document.
|
||||
documents, such as encrypted PDF documents. The archiver will skip over
|
||||
these documents each time it sees them.
|
||||
|
||||
### Managing encryption {#encryption}
|
||||
|
||||
!!! warning
|
||||
|
||||
Encryption was removed in [paperless-ng 0.9](changelog.md#paperless-ng-090)
|
||||
because it did not really provide any additional security, the passphrase
|
||||
was stored in a configuration file on the same system as the documents.
|
||||
Furthermore, the entire text content of the documents is stored plain in
|
||||
the database, even if your documents are encrypted. Filenames are not
|
||||
encrypted as well. Finally, the web server provides transparent access to
|
||||
your encrypted documents.
|
||||
|
||||
Consider running paperless on an encrypted filesystem instead, which
|
||||
will then at least provide security against physical hardware theft.
|
||||
|
||||
#### Enabling encryption
|
||||
|
||||
Enabling encryption is no longer supported.
|
||||
|
||||
#### Disabling encryption
|
||||
|
||||
Basic usage to disable encryption of your document store:
|
||||
|
||||
(Note: If `PAPERLESS_PASSPHRASE` isn't set already, you need to specify
|
||||
it here)
|
||||
|
||||
```
|
||||
decrypt_documents [--passphrase SECR3TP4SSPHRA$E]
|
||||
```
|
||||
|
||||
### Detecting duplicates {#fuzzy_duplicate}
|
||||
|
||||
Paperless already catches and prevents upload of exactly matching documents,
|
||||
|
||||
@@ -17,9 +17,3 @@ separating the directory ignore from the file ignore.
|
||||
| `CONSUMER_POLLING_RETRY_COUNT` | _Removed_ | Automatic with stability tracking |
|
||||
| `CONSUMER_IGNORE_PATTERNS` | [`CONSUMER_IGNORE_PATTERNS`](configuration.md#PAPERLESS_CONSUMER_IGNORE_PATTERNS) | **Now regex, not fnmatch**; user patterns are added to (not replacing) default ones |
|
||||
| _New_ | [`CONSUMER_IGNORE_DIRS`](configuration.md#PAPERLESS_CONSUMER_IGNORE_DIRS) | Additional directories to ignore; user entries are added to (not replacing) defaults |
|
||||
|
||||
## Encryption Support
|
||||
|
||||
Document and thumbnail encryption is no longer supported. This was previously deprecated in [paperless-ng 0.9.3](https://github.com/paperless-ngx/paperless-ngx/blob/dev/docs/changelog.md#paperless-ng-093)
|
||||
|
||||
Users must decrypt their document using the `decrypt_documents` command before upgrading.
|
||||
|
||||
@@ -114,15 +114,16 @@ testing = [
|
||||
"daphne",
|
||||
"factory-boy~=3.3.1",
|
||||
"imagehash",
|
||||
"pytest~=8.4.1",
|
||||
"pytest~=9.0.0",
|
||||
"pytest-cov~=7.0.0",
|
||||
"pytest-django~=4.11.1",
|
||||
"pytest-env",
|
||||
"pytest-env~=1.2.0",
|
||||
"pytest-httpx",
|
||||
"pytest-mock",
|
||||
"pytest-rerunfailures",
|
||||
"pytest-mock~=3.15.1",
|
||||
#"pytest-randomly~=4.0.1",
|
||||
"pytest-rerunfailures~=16.1",
|
||||
"pytest-sugar",
|
||||
"pytest-xdist",
|
||||
"pytest-xdist~=3.8.0",
|
||||
]
|
||||
|
||||
lint = [
|
||||
@@ -260,11 +261,15 @@ write-changes = true
|
||||
ignore-words-list = "criterias,afterall,valeu,ureue,equest,ure,assertIn,Oktober"
|
||||
skip = "src-ui/src/locale/*,src-ui/pnpm-lock.yaml,src-ui/e2e/*,src/paperless_mail/tests/samples/*,src/documents/tests/samples/*,*.po,*.json"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
minversion = "8.0"
|
||||
pythonpath = [
|
||||
"src",
|
||||
]
|
||||
[tool.pytest]
|
||||
minversion = "9.0"
|
||||
pythonpath = [ "src" ]
|
||||
|
||||
strict_config = true
|
||||
strict_markers = true
|
||||
strict_parametrization_ids = true
|
||||
strict_xfail = true
|
||||
|
||||
testpaths = [
|
||||
"src/documents/tests/",
|
||||
"src/paperless/tests/",
|
||||
@@ -275,6 +280,7 @@ testpaths = [
|
||||
"src/paperless_remote/tests/",
|
||||
"src/paperless_ai/tests",
|
||||
]
|
||||
|
||||
addopts = [
|
||||
"--pythonwarnings=all",
|
||||
"--cov",
|
||||
@@ -282,11 +288,14 @@ addopts = [
|
||||
"--cov-report=xml",
|
||||
"--numprocesses=auto",
|
||||
"--maxprocesses=16",
|
||||
"--quiet",
|
||||
"--dist=loadscope",
|
||||
"--durations=50",
|
||||
"--durations-min=0.5",
|
||||
"--junitxml=junit.xml",
|
||||
"-o junit_family=legacy",
|
||||
"-o",
|
||||
"junit_family=legacy",
|
||||
]
|
||||
|
||||
norecursedirs = [ "src/locale/", ".venv/", "src-ui/" ]
|
||||
|
||||
DJANGO_SETTINGS_MODULE = "paperless.settings"
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
# this is here so that django finds the checks.
|
||||
from documents.checks import changed_password_check
|
||||
from documents.checks import parser_check
|
||||
|
||||
__all__ = ["parser_check"]
|
||||
__all__ = ["changed_password_check", "parser_check"]
|
||||
|
||||
@@ -60,6 +60,7 @@ class DocumentAdmin(GuardedModelAdmin):
|
||||
"added",
|
||||
"modified",
|
||||
"mime_type",
|
||||
"storage_type",
|
||||
"filename",
|
||||
"checksum",
|
||||
"archive_filename",
|
||||
|
||||
@@ -1,12 +1,60 @@
|
||||
import textwrap
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.checks import Error
|
||||
from django.core.checks import Warning
|
||||
from django.core.checks import register
|
||||
from django.core.exceptions import FieldError
|
||||
from django.db.utils import OperationalError
|
||||
from django.db.utils import ProgrammingError
|
||||
|
||||
from documents.signals import document_consumer_declaration
|
||||
from documents.templating.utils import convert_format_str_to_template_format
|
||||
|
||||
|
||||
@register()
|
||||
def changed_password_check(app_configs, **kwargs):
|
||||
from documents.models import Document
|
||||
from paperless.db import GnuPG
|
||||
|
||||
try:
|
||||
encrypted_doc = (
|
||||
Document.objects.filter(
|
||||
storage_type=Document.STORAGE_TYPE_GPG,
|
||||
)
|
||||
.only("pk", "storage_type")
|
||||
.first()
|
||||
)
|
||||
except (OperationalError, ProgrammingError, FieldError):
|
||||
return [] # No documents table yet
|
||||
|
||||
if encrypted_doc:
|
||||
if not settings.PASSPHRASE:
|
||||
return [
|
||||
Error(
|
||||
"The database contains encrypted documents but no password is set.",
|
||||
),
|
||||
]
|
||||
|
||||
if not GnuPG.decrypted(encrypted_doc.source_file):
|
||||
return [
|
||||
Error(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
The current password doesn't match the password of the
|
||||
existing documents.
|
||||
|
||||
If you intend to change your password, you must first export
|
||||
all of the old documents, start fresh with the new password
|
||||
and then re-import them."
|
||||
""",
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
return []
|
||||
|
||||
|
||||
@register()
|
||||
def parser_check(app_configs, **kwargs):
|
||||
parsers = []
|
||||
|
||||
@@ -128,7 +128,7 @@ def thumbnail_last_modified(request, pk: int) -> datetime | None:
|
||||
Cache should be (slightly?) faster than filesystem
|
||||
"""
|
||||
try:
|
||||
doc = Document.objects.only("pk").get(pk=pk)
|
||||
doc = Document.objects.only("storage_type").get(pk=pk)
|
||||
if not doc.thumbnail_path.exists():
|
||||
return None
|
||||
doc_key = get_thumbnail_modified_key(pk)
|
||||
|
||||
@@ -497,6 +497,7 @@ class ConsumerPlugin(
|
||||
create_source_path_directory(document.source_path)
|
||||
|
||||
self._write(
|
||||
document.storage_type,
|
||||
self.unmodified_original
|
||||
if self.unmodified_original is not None
|
||||
else self.working_copy,
|
||||
@@ -504,6 +505,7 @@ class ConsumerPlugin(
|
||||
)
|
||||
|
||||
self._write(
|
||||
document.storage_type,
|
||||
thumbnail,
|
||||
document.thumbnail_path,
|
||||
)
|
||||
@@ -515,6 +517,7 @@ class ConsumerPlugin(
|
||||
)
|
||||
create_source_path_directory(document.archive_path)
|
||||
self._write(
|
||||
document.storage_type,
|
||||
archive_path,
|
||||
document.archive_path,
|
||||
)
|
||||
@@ -634,6 +637,8 @@ class ConsumerPlugin(
|
||||
)
|
||||
self.log.debug(f"Creation date from st_mtime: {create_date}")
|
||||
|
||||
storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
|
||||
if self.metadata.filename:
|
||||
title = Path(self.metadata.filename).stem
|
||||
else:
|
||||
@@ -660,6 +665,7 @@ class ConsumerPlugin(
|
||||
checksum=hashlib.md5(file_for_checksum.read_bytes()).hexdigest(),
|
||||
created=create_date,
|
||||
modified=create_date,
|
||||
storage_type=storage_type,
|
||||
page_count=page_count,
|
||||
original_filename=self.filename,
|
||||
)
|
||||
@@ -730,7 +736,7 @@ class ConsumerPlugin(
|
||||
}
|
||||
CustomFieldInstance.objects.create(**args) # adds to document
|
||||
|
||||
def _write(self, source, target):
|
||||
def _write(self, storage_type, source, target):
|
||||
with (
|
||||
Path(source).open("rb") as read_file,
|
||||
Path(target).open("wb") as write_file,
|
||||
|
||||
@@ -126,6 +126,7 @@ def generate_filename(
|
||||
doc: Document,
|
||||
*,
|
||||
counter=0,
|
||||
append_gpg=True,
|
||||
archive_filename=False,
|
||||
) -> Path:
|
||||
base_path: Path | None = None
|
||||
@@ -169,4 +170,8 @@ def generate_filename(
|
||||
final_filename = f"{doc.pk:07}{counter_str}{filetype_str}"
|
||||
full_path = Path(final_filename)
|
||||
|
||||
# Add GPG extension if needed
|
||||
if append_gpg and doc.storage_type == doc.STORAGE_TYPE_GPG:
|
||||
full_path = full_path.with_suffix(full_path.suffix + ".gpg")
|
||||
|
||||
return full_path
|
||||
|
||||
93
src/documents/management/commands/decrypt_documents.py
Normal file
93
src/documents/management/commands/decrypt_documents.py
Normal file
@@ -0,0 +1,93 @@
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.base import CommandError
|
||||
|
||||
from documents.models import Document
|
||||
from paperless.db import GnuPG
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
"This is how you migrate your stored documents from an encrypted "
|
||||
"state to an unencrypted one (or vice-versa)"
|
||||
)
|
||||
|
||||
def add_arguments(self, parser) -> None:
|
||||
parser.add_argument(
|
||||
"--passphrase",
|
||||
help=(
|
||||
"If PAPERLESS_PASSPHRASE isn't set already, you need to specify it here"
|
||||
),
|
||||
)
|
||||
|
||||
def handle(self, *args, **options) -> None:
|
||||
try:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
"\n\n"
|
||||
"WARNING: This script is going to work directly on your "
|
||||
"document originals, so\n"
|
||||
"WARNING: you probably shouldn't run "
|
||||
"this unless you've got a recent backup\n"
|
||||
"WARNING: handy. It "
|
||||
"*should* work without a hitch, but be safe and backup your\n"
|
||||
"WARNING: stuff first.\n\n"
|
||||
"Hit Ctrl+C to exit now, or Enter to "
|
||||
"continue.\n\n",
|
||||
),
|
||||
)
|
||||
_ = input()
|
||||
except KeyboardInterrupt:
|
||||
return
|
||||
|
||||
passphrase = options["passphrase"] or settings.PASSPHRASE
|
||||
if not passphrase:
|
||||
raise CommandError(
|
||||
"Passphrase not defined. Please set it with --passphrase or "
|
||||
"by declaring it in your environment or your config.",
|
||||
)
|
||||
|
||||
self.__gpg_to_unencrypted(passphrase)
|
||||
|
||||
def __gpg_to_unencrypted(self, passphrase: str) -> None:
|
||||
encrypted_files = Document.objects.filter(
|
||||
storage_type=Document.STORAGE_TYPE_GPG,
|
||||
)
|
||||
|
||||
for document in encrypted_files:
|
||||
self.stdout.write(f"Decrypting {document}")
|
||||
|
||||
old_paths = [document.source_path, document.thumbnail_path]
|
||||
|
||||
with document.source_file as file_handle:
|
||||
raw_document = GnuPG.decrypted(file_handle, passphrase)
|
||||
with document.thumbnail_file as file_handle:
|
||||
raw_thumb = GnuPG.decrypted(file_handle, passphrase)
|
||||
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
|
||||
ext: str = Path(document.filename).suffix
|
||||
|
||||
if not ext == ".gpg":
|
||||
raise CommandError(
|
||||
f"Abort: encrypted file {document.source_path} does not "
|
||||
f"end with .gpg",
|
||||
)
|
||||
|
||||
document.filename = Path(document.filename).stem
|
||||
|
||||
with document.source_path.open("wb") as f:
|
||||
f.write(raw_document)
|
||||
|
||||
with document.thumbnail_path.open("wb") as f:
|
||||
f.write(raw_thumb)
|
||||
|
||||
Document.objects.filter(id=document.id).update(
|
||||
storage_type=document.storage_type,
|
||||
filename=document.filename,
|
||||
)
|
||||
|
||||
for path in old_paths:
|
||||
path.unlink()
|
||||
@@ -3,6 +3,7 @@ import json
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
@@ -55,6 +56,7 @@ from documents.settings import EXPORTER_FILE_NAME
|
||||
from documents.settings import EXPORTER_THUMBNAIL_NAME
|
||||
from documents.utils import copy_file_with_basic_stats
|
||||
from paperless import version
|
||||
from paperless.db import GnuPG
|
||||
from paperless.models import ApplicationConfiguration
|
||||
from paperless_mail.models import MailAccount
|
||||
from paperless_mail.models import MailRule
|
||||
@@ -314,17 +316,20 @@ class Command(CryptMixin, BaseCommand):
|
||||
total=len(document_manifest),
|
||||
disable=self.no_progress_bar,
|
||||
):
|
||||
# 3.1. store files unencrypted
|
||||
document_dict["fields"]["storage_type"] = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
|
||||
document = document_map[document_dict["pk"]]
|
||||
|
||||
# 3.1. generate a unique filename
|
||||
# 3.2. generate a unique filename
|
||||
base_name = self.generate_base_name(document)
|
||||
|
||||
# 3.2. write filenames into manifest
|
||||
# 3.3. write filenames into manifest
|
||||
original_target, thumbnail_target, archive_target = (
|
||||
self.generate_document_targets(document, base_name, document_dict)
|
||||
)
|
||||
|
||||
# 3.3. write files to target folder
|
||||
# 3.4. write files to target folder
|
||||
if not self.data_only:
|
||||
self.copy_document_files(
|
||||
document,
|
||||
@@ -418,6 +423,7 @@ class Command(CryptMixin, BaseCommand):
|
||||
base_name = generate_filename(
|
||||
document,
|
||||
counter=filename_counter,
|
||||
append_gpg=False,
|
||||
)
|
||||
else:
|
||||
base_name = document.get_public_filename(counter=filename_counter)
|
||||
@@ -476,24 +482,46 @@ class Command(CryptMixin, BaseCommand):
|
||||
|
||||
If the document is encrypted, the files are decrypted before copying them to the target location.
|
||||
"""
|
||||
self.check_and_copy(
|
||||
document.source_path,
|
||||
document.checksum,
|
||||
original_target,
|
||||
)
|
||||
if document.storage_type == Document.STORAGE_TYPE_GPG:
|
||||
t = int(time.mktime(document.created.timetuple()))
|
||||
|
||||
if thumbnail_target:
|
||||
self.check_and_copy(document.thumbnail_path, None, thumbnail_target)
|
||||
original_target.parent.mkdir(parents=True, exist_ok=True)
|
||||
with document.source_file as out_file:
|
||||
original_target.write_bytes(GnuPG.decrypted(out_file))
|
||||
os.utime(original_target, times=(t, t))
|
||||
|
||||
if archive_target:
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(document.archive_path, Path)
|
||||
if thumbnail_target:
|
||||
thumbnail_target.parent.mkdir(parents=True, exist_ok=True)
|
||||
with document.thumbnail_file as out_file:
|
||||
thumbnail_target.write_bytes(GnuPG.decrypted(out_file))
|
||||
os.utime(thumbnail_target, times=(t, t))
|
||||
|
||||
if archive_target:
|
||||
archive_target.parent.mkdir(parents=True, exist_ok=True)
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(document.archive_path, Path)
|
||||
with document.archive_path as out_file:
|
||||
archive_target.write_bytes(GnuPG.decrypted(out_file))
|
||||
os.utime(archive_target, times=(t, t))
|
||||
else:
|
||||
self.check_and_copy(
|
||||
document.archive_path,
|
||||
document.archive_checksum,
|
||||
archive_target,
|
||||
document.source_path,
|
||||
document.checksum,
|
||||
original_target,
|
||||
)
|
||||
|
||||
if thumbnail_target:
|
||||
self.check_and_copy(document.thumbnail_path, None, thumbnail_target)
|
||||
|
||||
if archive_target:
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(document.archive_path, Path)
|
||||
self.check_and_copy(
|
||||
document.archive_path,
|
||||
document.archive_checksum,
|
||||
archive_target,
|
||||
)
|
||||
|
||||
def check_and_write_json(
|
||||
self,
|
||||
content: list[dict] | dict,
|
||||
|
||||
@@ -383,6 +383,8 @@ class Command(CryptMixin, BaseCommand):
|
||||
else:
|
||||
archive_path = None
|
||||
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
|
||||
with FileLock(settings.MEDIA_LOCK):
|
||||
if Path(document.source_path).is_file():
|
||||
raise FileExistsError(document.source_path)
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
# Generated by Django 5.2.9 on 2026-01-24 23:05
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0003_workflowaction_order"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="document",
|
||||
name="storage_type",
|
||||
),
|
||||
]
|
||||
@@ -154,6 +154,13 @@ class StoragePath(MatchingModel):
|
||||
|
||||
|
||||
class Document(SoftDeleteModel, ModelWithOwner):
|
||||
STORAGE_TYPE_UNENCRYPTED = "unencrypted"
|
||||
STORAGE_TYPE_GPG = "gpg"
|
||||
STORAGE_TYPES = (
|
||||
(STORAGE_TYPE_UNENCRYPTED, _("Unencrypted")),
|
||||
(STORAGE_TYPE_GPG, _("Encrypted with GNU Privacy Guard")),
|
||||
)
|
||||
|
||||
correspondent = models.ForeignKey(
|
||||
Correspondent,
|
||||
blank=True,
|
||||
@@ -243,6 +250,14 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
||||
db_index=True,
|
||||
)
|
||||
|
||||
storage_type = models.CharField(
|
||||
_("storage type"),
|
||||
max_length=11,
|
||||
choices=STORAGE_TYPES,
|
||||
default=STORAGE_TYPE_UNENCRYPTED,
|
||||
editable=False,
|
||||
)
|
||||
|
||||
added = models.DateTimeField(
|
||||
_("added"),
|
||||
default=timezone.now,
|
||||
@@ -338,7 +353,12 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
||||
|
||||
@property
|
||||
def source_path(self) -> Path:
|
||||
fname = str(self.filename) if self.filename else f"{self.pk:07}{self.file_type}"
|
||||
if self.filename:
|
||||
fname = str(self.filename)
|
||||
else:
|
||||
fname = f"{self.pk:07}{self.file_type}"
|
||||
if self.storage_type == self.STORAGE_TYPE_GPG:
|
||||
fname += ".gpg" # pragma: no cover
|
||||
|
||||
return (settings.ORIGINALS_DIR / Path(fname)).resolve()
|
||||
|
||||
@@ -387,6 +407,8 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
||||
@property
|
||||
def thumbnail_path(self) -> Path:
|
||||
webp_file_name = f"{self.pk:07}.webp"
|
||||
if self.storage_type == self.STORAGE_TYPE_GPG:
|
||||
webp_file_name += ".gpg"
|
||||
|
||||
webp_file_path = settings.THUMBNAIL_DIR / Path(webp_file_name)
|
||||
|
||||
|
||||
@@ -108,6 +108,7 @@ def create_dummy_document():
|
||||
page_count=5,
|
||||
created=timezone.now(),
|
||||
modified=timezone.now(),
|
||||
storage_type=Document.STORAGE_TYPE_UNENCRYPTED,
|
||||
added=timezone.now(),
|
||||
filename="/dummy/filename.pdf",
|
||||
archive_filename="/dummy/archive_filename.pdf",
|
||||
|
||||
Binary file not shown.
BIN
src/documents/tests/samples/documents/originals/0000004.pdf.gpg
Normal file
BIN
src/documents/tests/samples/documents/originals/0000004.pdf.gpg
Normal file
Binary file not shown.
Binary file not shown.
|
Before Width: | Height: | Size: 2.6 KiB |
Binary file not shown.
@@ -1,3 +1,4 @@
|
||||
import textwrap
|
||||
from unittest import mock
|
||||
|
||||
from django.core.checks import Error
|
||||
@@ -5,11 +6,60 @@ from django.core.checks import Warning
|
||||
from django.test import TestCase
|
||||
from django.test import override_settings
|
||||
|
||||
from documents.checks import changed_password_check
|
||||
from documents.checks import filename_format_check
|
||||
from documents.checks import parser_check
|
||||
from documents.models import Document
|
||||
from documents.tests.factories import DocumentFactory
|
||||
|
||||
|
||||
class TestDocumentChecks(TestCase):
|
||||
def test_changed_password_check_empty_db(self):
|
||||
self.assertListEqual(changed_password_check(None), [])
|
||||
|
||||
def test_changed_password_check_no_encryption(self):
|
||||
DocumentFactory.create(storage_type=Document.STORAGE_TYPE_UNENCRYPTED)
|
||||
self.assertListEqual(changed_password_check(None), [])
|
||||
|
||||
def test_encrypted_missing_passphrase(self):
|
||||
DocumentFactory.create(storage_type=Document.STORAGE_TYPE_GPG)
|
||||
msgs = changed_password_check(None)
|
||||
self.assertEqual(len(msgs), 1)
|
||||
msg_text = msgs[0].msg
|
||||
self.assertEqual(
|
||||
msg_text,
|
||||
"The database contains encrypted documents but no password is set.",
|
||||
)
|
||||
|
||||
@override_settings(
|
||||
PASSPHRASE="test",
|
||||
)
|
||||
@mock.patch("paperless.db.GnuPG.decrypted")
|
||||
@mock.patch("documents.models.Document.source_file")
|
||||
def test_encrypted_decrypt_fails(self, mock_decrypted, mock_source_file):
|
||||
mock_decrypted.return_value = None
|
||||
mock_source_file.return_value = b""
|
||||
|
||||
DocumentFactory.create(storage_type=Document.STORAGE_TYPE_GPG)
|
||||
|
||||
msgs = changed_password_check(None)
|
||||
|
||||
self.assertEqual(len(msgs), 1)
|
||||
msg_text = msgs[0].msg
|
||||
self.assertEqual(
|
||||
msg_text,
|
||||
textwrap.dedent(
|
||||
"""
|
||||
The current password doesn't match the password of the
|
||||
existing documents.
|
||||
|
||||
If you intend to change your password, you must first export
|
||||
all of the old documents, start fresh with the new password
|
||||
and then re-import them."
|
||||
""",
|
||||
),
|
||||
)
|
||||
|
||||
def test_parser_check(self):
|
||||
self.assertEqual(parser_check(None), [])
|
||||
|
||||
|
||||
@@ -224,17 +224,18 @@ class TestDoubleSided(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
THEN:
|
||||
- The collated file gets put into foo/bar
|
||||
"""
|
||||
# TODO: parameterize this instead
|
||||
for path in [
|
||||
Path("foo") / "bar" / "double-sided",
|
||||
Path("double-sided") / "foo" / "bar",
|
||||
]:
|
||||
with self.subTest(path=path):
|
||||
with self.subTest(path=str(path)):
|
||||
# Ensure we get fresh directories for each run
|
||||
self.tearDown()
|
||||
self.setUp()
|
||||
|
||||
self.create_staging_file()
|
||||
self.consume_file("double-sided-odd.pdf", path / "foo.pdf")
|
||||
self.consume_file("double-sided-odd.pdf", Path(path) / "foo.pdf")
|
||||
self.assertIsFile(
|
||||
self.dirs.consumption_dir / "foo" / "bar" / "foo-collated.pdf",
|
||||
)
|
||||
|
||||
@@ -34,14 +34,22 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
def test_generate_source_filename(self):
|
||||
document = Document()
|
||||
document.mime_type = "application/pdf"
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
document.save()
|
||||
|
||||
self.assertEqual(generate_filename(document), Path(f"{document.pk:07d}.pdf"))
|
||||
|
||||
document.storage_type = Document.STORAGE_TYPE_GPG
|
||||
self.assertEqual(
|
||||
generate_filename(document),
|
||||
Path(f"{document.pk:07d}.pdf.gpg"),
|
||||
)
|
||||
|
||||
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
|
||||
def test_file_renaming(self):
|
||||
document = Document()
|
||||
document.mime_type = "application/pdf"
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
document.save()
|
||||
|
||||
# Test default source_path
|
||||
@@ -55,6 +63,11 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
# Ensure that filename is properly generated
|
||||
self.assertEqual(document.filename, Path("none/none.pdf"))
|
||||
|
||||
# Enable encryption and check again
|
||||
document.storage_type = Document.STORAGE_TYPE_GPG
|
||||
document.filename = generate_filename(document)
|
||||
self.assertEqual(document.filename, Path("none/none.pdf.gpg"))
|
||||
|
||||
document.save()
|
||||
|
||||
# test that creating dirs for the source_path creates the correct directory
|
||||
@@ -74,14 +87,14 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
settings.ORIGINALS_DIR / "none",
|
||||
)
|
||||
self.assertIsFile(
|
||||
settings.ORIGINALS_DIR / "test" / "test.pdf",
|
||||
settings.ORIGINALS_DIR / "test" / "test.pdf.gpg",
|
||||
)
|
||||
|
||||
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
|
||||
def test_file_renaming_missing_permissions(self):
|
||||
document = Document()
|
||||
document.mime_type = "application/pdf"
|
||||
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
document.save()
|
||||
|
||||
# Ensure that filename is properly generated
|
||||
@@ -115,13 +128,14 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
def test_file_renaming_database_error(self):
|
||||
Document.objects.create(
|
||||
mime_type="application/pdf",
|
||||
storage_type=Document.STORAGE_TYPE_UNENCRYPTED,
|
||||
checksum="AAAAA",
|
||||
)
|
||||
|
||||
document = Document()
|
||||
document.mime_type = "application/pdf"
|
||||
document.checksum = "BBBBB"
|
||||
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
document.save()
|
||||
|
||||
# Ensure that filename is properly generated
|
||||
@@ -156,7 +170,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
def test_document_delete(self):
|
||||
document = Document()
|
||||
document.mime_type = "application/pdf"
|
||||
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
document.save()
|
||||
|
||||
# Ensure that filename is properly generated
|
||||
@@ -182,7 +196,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
def test_document_delete_trash_dir(self):
|
||||
document = Document()
|
||||
document.mime_type = "application/pdf"
|
||||
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
document.save()
|
||||
|
||||
# Ensure that filename is properly generated
|
||||
@@ -207,7 +221,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
# Create an identical document and ensure it is trashed under a new name
|
||||
document = Document()
|
||||
document.mime_type = "application/pdf"
|
||||
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
document.save()
|
||||
document.filename = generate_filename(document)
|
||||
document.save()
|
||||
@@ -221,7 +235,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
def test_document_delete_nofile(self):
|
||||
document = Document()
|
||||
document.mime_type = "application/pdf"
|
||||
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
document.save()
|
||||
|
||||
document.delete()
|
||||
@@ -231,7 +245,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
def test_directory_not_empty(self):
|
||||
document = Document()
|
||||
document.mime_type = "application/pdf"
|
||||
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
document.save()
|
||||
|
||||
# Ensure that filename is properly generated
|
||||
@@ -348,7 +362,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
def test_nested_directory_cleanup(self):
|
||||
document = Document()
|
||||
document.mime_type = "application/pdf"
|
||||
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
document.save()
|
||||
|
||||
# Ensure that filename is properly generated
|
||||
@@ -376,6 +390,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
document = Document()
|
||||
document.pk = 1
|
||||
document.mime_type = "application/pdf"
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
|
||||
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
|
||||
|
||||
@@ -388,6 +403,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
document = Document()
|
||||
document.pk = 1
|
||||
document.mime_type = "application/pdf"
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
|
||||
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
|
||||
|
||||
@@ -413,6 +429,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
document = Document()
|
||||
document.pk = 1
|
||||
document.mime_type = "application/pdf"
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
|
||||
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
|
||||
|
||||
@@ -421,6 +438,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
document = Document()
|
||||
document.pk = 1
|
||||
document.mime_type = "application/pdf"
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
|
||||
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
|
||||
|
||||
@@ -1240,7 +1258,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
||||
title="doc1",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
document.save()
|
||||
|
||||
# Ensure that filename is properly generated
|
||||
@@ -1714,6 +1732,7 @@ class TestPathDateLocalization:
|
||||
document = DocumentFactory.create(
|
||||
title="My Document",
|
||||
mime_type="application/pdf",
|
||||
storage_type=Document.STORAGE_TYPE_UNENCRYPTED,
|
||||
created=self.TEST_DATE, # 2023-10-26 (which is a Thursday)
|
||||
)
|
||||
with override_settings(FILENAME_FORMAT=filename_format):
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import filecmp
|
||||
import hashlib
|
||||
import shutil
|
||||
import tempfile
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
@@ -94,6 +96,66 @@ class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
self.assertEqual(doc2.archive_filename, "document_01.pdf")
|
||||
|
||||
|
||||
class TestDecryptDocuments(FileSystemAssertsMixin, TestCase):
|
||||
@mock.patch("documents.management.commands.decrypt_documents.input")
|
||||
def test_decrypt(self, m):
|
||||
media_dir = tempfile.mkdtemp()
|
||||
originals_dir = Path(media_dir) / "documents" / "originals"
|
||||
thumb_dir = Path(media_dir) / "documents" / "thumbnails"
|
||||
originals_dir.mkdir(parents=True, exist_ok=True)
|
||||
thumb_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with override_settings(
|
||||
ORIGINALS_DIR=originals_dir,
|
||||
THUMBNAIL_DIR=thumb_dir,
|
||||
PASSPHRASE="test",
|
||||
FILENAME_FORMAT=None,
|
||||
):
|
||||
doc = Document.objects.create(
|
||||
checksum="82186aaa94f0b98697d704b90fd1c072",
|
||||
title="wow",
|
||||
filename="0000004.pdf.gpg",
|
||||
mime_type="application/pdf",
|
||||
storage_type=Document.STORAGE_TYPE_GPG,
|
||||
)
|
||||
|
||||
shutil.copy(
|
||||
(
|
||||
Path(__file__).parent
|
||||
/ "samples"
|
||||
/ "documents"
|
||||
/ "originals"
|
||||
/ "0000004.pdf.gpg"
|
||||
),
|
||||
originals_dir / "0000004.pdf.gpg",
|
||||
)
|
||||
shutil.copy(
|
||||
(
|
||||
Path(__file__).parent
|
||||
/ "samples"
|
||||
/ "documents"
|
||||
/ "thumbnails"
|
||||
/ "0000004.webp.gpg"
|
||||
),
|
||||
thumb_dir / f"{doc.id:07}.webp.gpg",
|
||||
)
|
||||
|
||||
call_command("decrypt_documents")
|
||||
|
||||
doc.refresh_from_db()
|
||||
|
||||
self.assertEqual(doc.storage_type, Document.STORAGE_TYPE_UNENCRYPTED)
|
||||
self.assertEqual(doc.filename, "0000004.pdf")
|
||||
self.assertIsFile(Path(originals_dir) / "0000004.pdf")
|
||||
self.assertIsFile(doc.source_path)
|
||||
self.assertIsFile(Path(thumb_dir) / f"{doc.id:07}.webp")
|
||||
self.assertIsFile(doc.thumbnail_path)
|
||||
|
||||
with doc.source_file as f:
|
||||
checksum: str = hashlib.md5(f.read()).hexdigest()
|
||||
self.assertEqual(checksum, doc.checksum)
|
||||
|
||||
|
||||
class TestMakeIndex(TestCase):
|
||||
@mock.patch("documents.management.commands.document_index.index_reindex")
|
||||
def test_reindex(self, m):
|
||||
|
||||
@@ -86,8 +86,9 @@ class TestExportImport(
|
||||
content="Content",
|
||||
checksum="82186aaa94f0b98697d704b90fd1c072",
|
||||
title="wow_dec",
|
||||
filename="0000004.pdf",
|
||||
filename="0000004.pdf.gpg",
|
||||
mime_type="application/pdf",
|
||||
storage_type=Document.STORAGE_TYPE_GPG,
|
||||
)
|
||||
|
||||
self.note = Note.objects.create(
|
||||
@@ -241,6 +242,11 @@ class TestExportImport(
|
||||
checksum = hashlib.md5(f.read()).hexdigest()
|
||||
self.assertEqual(checksum, element["fields"]["checksum"])
|
||||
|
||||
self.assertEqual(
|
||||
element["fields"]["storage_type"],
|
||||
Document.STORAGE_TYPE_UNENCRYPTED,
|
||||
)
|
||||
|
||||
if document_exporter.EXPORTER_ARCHIVE_NAME in element:
|
||||
fname = (
|
||||
self.target / element[document_exporter.EXPORTER_ARCHIVE_NAME]
|
||||
@@ -430,7 +436,7 @@ class TestExportImport(
|
||||
Document.objects.create(
|
||||
checksum="AAAAAAAAAAAAAAAAA",
|
||||
title="wow",
|
||||
filename="0000010.pdf",
|
||||
filename="0000004.pdf",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
self.assertRaises(FileNotFoundError, call_command, "document_exporter", target)
|
||||
|
||||
@@ -195,6 +195,7 @@ from paperless import version
|
||||
from paperless.celery import app as celery_app
|
||||
from paperless.config import AIConfig
|
||||
from paperless.config import GeneralConfig
|
||||
from paperless.db import GnuPG
|
||||
from paperless.models import ApplicationConfiguration
|
||||
from paperless.serialisers import GroupSerializer
|
||||
from paperless.serialisers import UserSerializer
|
||||
@@ -1070,8 +1071,10 @@ class DocumentViewSet(
|
||||
doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
|
||||
handle = doc.thumbnail_file
|
||||
if doc.storage_type == Document.STORAGE_TYPE_GPG:
|
||||
handle = GnuPG.decrypted(doc.thumbnail_file)
|
||||
else:
|
||||
handle = doc.thumbnail_file
|
||||
|
||||
return HttpResponse(handle, content_type="image/webp")
|
||||
except (FileNotFoundError, Document.DoesNotExist):
|
||||
@@ -2821,6 +2824,9 @@ def serve_file(*, doc: Document, use_archive: bool, disposition: str):
|
||||
if mime_type in {"application/csv", "text/csv"} and disposition == "inline":
|
||||
mime_type = "text/plain"
|
||||
|
||||
if doc.storage_type == Document.STORAGE_TYPE_GPG:
|
||||
file_handle = GnuPG.decrypted(file_handle)
|
||||
|
||||
response = HttpResponse(file_handle, content_type=mime_type)
|
||||
# Firefox is not able to handle unicode characters in filename field
|
||||
# RFC 5987 addresses this issue
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
17
src/paperless/db.py
Normal file
17
src/paperless/db.py
Normal file
@@ -0,0 +1,17 @@
|
||||
import gnupg
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class GnuPG:
|
||||
"""
|
||||
A handy singleton to use when handling encrypted files.
|
||||
"""
|
||||
|
||||
gpg = gnupg.GPG(gnupghome=settings.GNUPG_HOME)
|
||||
|
||||
@classmethod
|
||||
def decrypted(cls, file_handle, passphrase=None):
|
||||
if not passphrase:
|
||||
passphrase = settings.PASSPHRASE
|
||||
|
||||
return cls.gpg.decrypt_file(file_handle, passphrase=passphrase).data
|
||||
@@ -1203,6 +1203,19 @@ EMAIL_PARSE_DEFAULT_LAYOUT = __get_int(
|
||||
1, # MailRule.PdfLayout.TEXT_HTML but that can't be imported here
|
||||
)
|
||||
|
||||
# Pre-2.x versions of Paperless stored your documents locally with GPG
|
||||
# encryption, but that is no longer the default. This behaviour is still
|
||||
# available, but it must be explicitly enabled by setting
|
||||
# `PAPERLESS_PASSPHRASE` in your environment or config file. The default is to
|
||||
# store these files unencrypted.
|
||||
#
|
||||
# Translation:
|
||||
# * If you're a new user, you can safely ignore this setting.
|
||||
# * If you're upgrading from 1.x, this must be set, OR you can run
|
||||
# `./manage.py change_storage_type gpg unencrypted` to decrypt your files,
|
||||
# after which you can unset this value.
|
||||
PASSPHRASE = os.getenv("PAPERLESS_PASSPHRASE")
|
||||
|
||||
# Trigger a script after every successful document consumption?
|
||||
PRE_CONSUME_SCRIPT = os.getenv("PAPERLESS_PRE_CONSUME_SCRIPT")
|
||||
POST_CONSUME_SCRIPT = os.getenv("PAPERLESS_POST_CONSUME_SCRIPT")
|
||||
|
||||
74
uv.lock
generated
74
uv.lock
generated
@@ -3152,15 +3152,15 @@ dev = [
|
||||
{ name = "mkdocs-material", specifier = "~=9.7.0" },
|
||||
{ name = "pre-commit", specifier = "~=4.5.1" },
|
||||
{ name = "pre-commit-uv", specifier = "~=4.2.0" },
|
||||
{ name = "pytest", specifier = "~=8.4.1" },
|
||||
{ name = "pytest", specifier = "~=9.0.0" },
|
||||
{ name = "pytest-cov", specifier = "~=7.0.0" },
|
||||
{ name = "pytest-django", specifier = "~=4.11.1" },
|
||||
{ name = "pytest-env" },
|
||||
{ name = "pytest-env", specifier = "~=1.2.0" },
|
||||
{ name = "pytest-httpx" },
|
||||
{ name = "pytest-mock" },
|
||||
{ name = "pytest-rerunfailures" },
|
||||
{ name = "pytest-mock", specifier = "~=3.15.1" },
|
||||
{ name = "pytest-rerunfailures", specifier = "~=16.1" },
|
||||
{ name = "pytest-sugar" },
|
||||
{ name = "pytest-xdist" },
|
||||
{ name = "pytest-xdist", specifier = "~=3.8.0" },
|
||||
{ name = "ruff", specifier = "~=0.14.0" },
|
||||
]
|
||||
docs = [
|
||||
@@ -3176,15 +3176,15 @@ testing = [
|
||||
{ name = "daphne" },
|
||||
{ name = "factory-boy", specifier = "~=3.3.1" },
|
||||
{ name = "imagehash" },
|
||||
{ name = "pytest", specifier = "~=8.4.1" },
|
||||
{ name = "pytest", specifier = "~=9.0.0" },
|
||||
{ name = "pytest-cov", specifier = "~=7.0.0" },
|
||||
{ name = "pytest-django", specifier = "~=4.11.1" },
|
||||
{ name = "pytest-env" },
|
||||
{ name = "pytest-env", specifier = "~=1.2.0" },
|
||||
{ name = "pytest-httpx" },
|
||||
{ name = "pytest-mock" },
|
||||
{ name = "pytest-rerunfailures" },
|
||||
{ name = "pytest-mock", specifier = "~=3.15.1" },
|
||||
{ name = "pytest-rerunfailures", specifier = "~=16.1" },
|
||||
{ name = "pytest-sugar" },
|
||||
{ name = "pytest-xdist" },
|
||||
{ name = "pytest-xdist", specifier = "~=3.8.0" },
|
||||
]
|
||||
typing = [
|
||||
{ name = "celery-types" },
|
||||
@@ -3841,7 +3841,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.4.2"
|
||||
version = "9.0.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "exceptiongroup", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" },
|
||||
@@ -3851,9 +3851,9 @@ dependencies = [
|
||||
{ name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3897,15 +3897,15 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pytest-httpx"
|
||||
version = "0.35.0"
|
||||
version = "0.36.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1f/89/5b12b7b29e3d0af3a4b9c071ee92fa25a9017453731a38f08ba01c280f4c/pytest_httpx-0.35.0.tar.gz", hash = "sha256:d619ad5d2e67734abfbb224c3d9025d64795d4b8711116b1a13f72a251ae511f", size = 54146, upload-time = "2024-11-28T19:16:54.237Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/5574834da9499066fa1a5ea9c336f94dba2eae02298d36dab192fcf95c86/pytest_httpx-0.36.0.tar.gz", hash = "sha256:9edb66a5fd4388ce3c343189bc67e7e1cb50b07c2e3fc83b97d511975e8a831b", size = 56793, upload-time = "2025-12-02T16:34:57.414Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/ed/026d467c1853dd83102411a78126b4842618e86c895f93528b0528c7a620/pytest_httpx-0.35.0-py3-none-any.whl", hash = "sha256:ee11a00ffcea94a5cbff47af2114d34c5b231c326902458deed73f9c459fd744", size = 19442, upload-time = "2024-11-28T19:16:52.787Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/d2/1eb1ea9c84f0d2033eb0b49675afdc71aa4ea801b74615f00f3c33b725e3/pytest_httpx-0.36.0-py3-none-any.whl", hash = "sha256:bd4c120bb80e142df856e825ec9f17981effb84d159f9fa29ed97e2357c3a9c8", size = 20229, upload-time = "2025-12-02T16:34:56.45Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5108,13 +5108,13 @@ dependencies = [
|
||||
{ name = "typing-extensions", marker = "sys_platform == 'darwin'" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp310-none-macosx_11_0_arm64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp311-none-macosx_11_0_arm64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp312-none-macosx_11_0_arm64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp313-cp313t-macosx_11_0_arm64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp313-none-macosx_11_0_arm64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp314-cp314-macosx_11_0_arm64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp314-cp314t-macosx_11_0_arm64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:bf1e68cfb935ae2046374ff02a7aa73dda70351b46342846f557055b3a540bf0" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:a52952a8c90a422c14627ea99b9826b7557203b46b4d0772d3ca5c7699692425" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:287242dd1f830846098b5eca847f817aa5c6015ea57ab4c1287809efea7b77eb" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8924d10d36eac8fe0652a060a03fc2ae52980841850b9a1a2ddb0f27a4f181cd" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:bcee64ae7aa65876ceeae6dcaebe75109485b213528c74939602208a20706e3f" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:defadbeb055cfcf5def58f70937145aecbd7a4bc295238ded1d0e85ae2cf0e1d" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:886f84b181f766f53265ba0a1d503011e60f53fff9d569563ef94f24160e1072" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5138,20 +5138,20 @@ dependencies = [
|
||||
{ name = "typing-extensions", marker = "sys_platform == 'linux'" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp310-cp310-manylinux_2_28_aarch64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp310-cp310-manylinux_2_28_x86_64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp311-cp311-manylinux_2_28_aarch64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp312-cp312-manylinux_2_28_aarch64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313-manylinux_2_28_aarch64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313-manylinux_2_28_x86_64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313t-manylinux_2_28_aarch64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313t-manylinux_2_28_x86_64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314-manylinux_2_28_aarch64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314-manylinux_2_28_x86_64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314t-manylinux_2_28_aarch64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314t-manylinux_2_28_x86_64.whl" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:10866c8a48c4aa5ae3f48538dc8a055b99c57d9c6af2bf5dd715374d9d6ddca3" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:7210713b66943fdbfcc237b2e782871b649123ac5d29f548ce8c85be4223ab38" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:0e611cfb16724e62252b67d31073bc5c490cb83e92ecdc1192762535e0e44487" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:3de2adb9b4443dc9210ef1f1b16da3647ace53553166d6360bbbd7edd6f16e4d" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3bf9b442a51a2948e41216a76d7ab00f0694cfcaaa51b6f9bcab57b7f89843e6" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7417d8c565f219d3455654cb431c6d892a3eb40246055e14d645422de13b9ea1" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:3e532e553b37ee859205a9b2d1c7977fd6922f53bbb1b9bfdd5bdc00d1a60ed4" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:39b3dff6d8fba240ae0d1bede4ca11c2531ae3b47329206512d99e17907ff74b" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:01b1884f724977a20c7da2f640f1c7b37f4a2c117a7f4a6c1c0424d14cb86322" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:031a597147fa81b1e6d79ccf1ad3ccc7fafa27941d6cf26ff5caaa384fb20e92" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:65010ab4aacce6c9a1ddfc935f986c003ca8638ded04348fd326c3e74346237c" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:88adf5157db5da1d54b1c9fe4a6c1d20ceef00e75d854e206a87dbf69e3037dc" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:3ac2b8df2c55430e836dcda31940d47f1f5f94b8731057b6f20300ebea394dd9" },
|
||||
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:5b688445f928f13563b7418b17c57e97bf955ab559cf73cd8f2b961f8572dbb3" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
Reference in New Issue
Block a user