mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-01-24 22:39:02 -06:00
Compare commits
27 Commits
chore/pyte
...
feature-pw
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bc406d612a | ||
|
|
b12936c8c6 | ||
|
|
cf89d81b9e | ||
|
|
d0032c18be | ||
|
|
f596294e1f | ||
|
|
02e590c70c | ||
|
|
3c53e4bab1 | ||
|
|
880b3e6d15 | ||
|
|
f7a6f79c8b | ||
|
|
87dc22fbf6 | ||
|
|
2332b3f6ad | ||
|
|
5fbc985b67 | ||
|
|
7f95160a63 | ||
|
|
1aaf128bcb | ||
|
|
10db1e6405 | ||
|
|
0e2611163b | ||
|
|
b917db44ed | ||
|
|
bca409d932 | ||
|
|
07d67b3299 | ||
|
|
5fca9bac50 | ||
|
|
b21df970fd | ||
|
|
833890d0ca | ||
|
|
eb1708420e | ||
|
|
3bb74772a9 | ||
|
|
402c9af81b | ||
|
|
c1de78162b | ||
|
|
f888722a73 |
@@ -4,8 +4,7 @@
|
|||||||
|
|
||||||
set -eu
|
set -eu
|
||||||
|
|
||||||
for command in decrypt_documents \
|
for command in document_archiver \
|
||||||
document_archiver \
|
|
||||||
document_exporter \
|
document_exporter \
|
||||||
document_importer \
|
document_importer \
|
||||||
mail_fetcher \
|
mail_fetcher \
|
||||||
|
|||||||
@@ -1,14 +0,0 @@
|
|||||||
#!/command/with-contenv /usr/bin/bash
|
|
||||||
# shellcheck shell=bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
cd "${PAPERLESS_SRC_DIR}"
|
|
||||||
|
|
||||||
if [[ $(id -u) == 0 ]]; then
|
|
||||||
s6-setuidgid paperless python3 manage.py decrypt_documents "$@"
|
|
||||||
elif [[ $(id -un) == "paperless" ]]; then
|
|
||||||
python3 manage.py decrypt_documents "$@"
|
|
||||||
else
|
|
||||||
echo "Unknown user."
|
|
||||||
fi
|
|
||||||
@@ -580,36 +580,6 @@ document.
|
|||||||
documents, such as encrypted PDF documents. The archiver will skip over
|
documents, such as encrypted PDF documents. The archiver will skip over
|
||||||
these documents each time it sees them.
|
these documents each time it sees them.
|
||||||
|
|
||||||
### Managing encryption {#encryption}
|
|
||||||
|
|
||||||
!!! warning
|
|
||||||
|
|
||||||
Encryption was removed in [paperless-ng 0.9](changelog.md#paperless-ng-090)
|
|
||||||
because it did not really provide any additional security, the passphrase
|
|
||||||
was stored in a configuration file on the same system as the documents.
|
|
||||||
Furthermore, the entire text content of the documents is stored plain in
|
|
||||||
the database, even if your documents are encrypted. Filenames are not
|
|
||||||
encrypted as well. Finally, the web server provides transparent access to
|
|
||||||
your encrypted documents.
|
|
||||||
|
|
||||||
Consider running paperless on an encrypted filesystem instead, which
|
|
||||||
will then at least provide security against physical hardware theft.
|
|
||||||
|
|
||||||
#### Enabling encryption
|
|
||||||
|
|
||||||
Enabling encryption is no longer supported.
|
|
||||||
|
|
||||||
#### Disabling encryption
|
|
||||||
|
|
||||||
Basic usage to disable encryption of your document store:
|
|
||||||
|
|
||||||
(Note: If `PAPERLESS_PASSPHRASE` isn't set already, you need to specify
|
|
||||||
it here)
|
|
||||||
|
|
||||||
```
|
|
||||||
decrypt_documents [--passphrase SECR3TP4SSPHRA$E]
|
|
||||||
```
|
|
||||||
|
|
||||||
### Detecting duplicates {#fuzzy_duplicate}
|
### Detecting duplicates {#fuzzy_duplicate}
|
||||||
|
|
||||||
Paperless already catches and prevents upload of exactly matching documents,
|
Paperless already catches and prevents upload of exactly matching documents,
|
||||||
|
|||||||
@@ -17,3 +17,9 @@ separating the directory ignore from the file ignore.
|
|||||||
| `CONSUMER_POLLING_RETRY_COUNT` | _Removed_ | Automatic with stability tracking |
|
| `CONSUMER_POLLING_RETRY_COUNT` | _Removed_ | Automatic with stability tracking |
|
||||||
| `CONSUMER_IGNORE_PATTERNS` | [`CONSUMER_IGNORE_PATTERNS`](configuration.md#PAPERLESS_CONSUMER_IGNORE_PATTERNS) | **Now regex, not fnmatch**; user patterns are added to (not replacing) default ones |
|
| `CONSUMER_IGNORE_PATTERNS` | [`CONSUMER_IGNORE_PATTERNS`](configuration.md#PAPERLESS_CONSUMER_IGNORE_PATTERNS) | **Now regex, not fnmatch**; user patterns are added to (not replacing) default ones |
|
||||||
| _New_ | [`CONSUMER_IGNORE_DIRS`](configuration.md#PAPERLESS_CONSUMER_IGNORE_DIRS) | Additional directories to ignore; user entries are added to (not replacing) defaults |
|
| _New_ | [`CONSUMER_IGNORE_DIRS`](configuration.md#PAPERLESS_CONSUMER_IGNORE_DIRS) | Additional directories to ignore; user entries are added to (not replacing) defaults |
|
||||||
|
|
||||||
|
## Encryption Support
|
||||||
|
|
||||||
|
Document and thumbnail encryption is no longer supported. This was previously deprecated in [paperless-ng 0.9.3](https://github.com/paperless-ngx/paperless-ngx/blob/dev/docs/changelog.md#paperless-ng-093)
|
||||||
|
|
||||||
|
Users must decrypt their document using the `decrypt_documents` command before upgrading.
|
||||||
|
|||||||
@@ -430,6 +430,24 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
|
@case (WorkflowActionType.PasswordRemoval) {
|
||||||
|
<div class="row">
|
||||||
|
<div class="col">
|
||||||
|
<p class="small" i18n>
|
||||||
|
One or more passwords separated by commas or new lines. The workflow will try them in order until one succeeds.
|
||||||
|
</p>
|
||||||
|
<pngx-input-textarea
|
||||||
|
i18n-title
|
||||||
|
title="Passwords"
|
||||||
|
formControlName="passwords"
|
||||||
|
rows="4"
|
||||||
|
[error]="error?.actions?.[i]?.passwords"
|
||||||
|
hint="Passwords are stored in plain text. Use with caution."
|
||||||
|
i18n-hint
|
||||||
|
></pngx-input-textarea>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
}
|
||||||
}
|
}
|
||||||
</div>
|
</div>
|
||||||
</ng-template>
|
</ng-template>
|
||||||
|
|||||||
@@ -139,6 +139,10 @@ export const WORKFLOW_ACTION_OPTIONS = [
|
|||||||
id: WorkflowActionType.Webhook,
|
id: WorkflowActionType.Webhook,
|
||||||
name: $localize`Webhook`,
|
name: $localize`Webhook`,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
id: WorkflowActionType.PasswordRemoval,
|
||||||
|
name: $localize`Password removal`,
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
export enum TriggerFilterType {
|
export enum TriggerFilterType {
|
||||||
@@ -1133,6 +1137,7 @@ export class WorkflowEditDialogComponent
|
|||||||
headers: new FormControl(action.webhook?.headers),
|
headers: new FormControl(action.webhook?.headers),
|
||||||
include_document: new FormControl(!!action.webhook?.include_document),
|
include_document: new FormControl(!!action.webhook?.include_document),
|
||||||
}),
|
}),
|
||||||
|
passwords: new FormControl(action.passwords),
|
||||||
}),
|
}),
|
||||||
{ emitEvent }
|
{ emitEvent }
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -181,6 +181,7 @@ export enum ZoomSetting {
|
|||||||
NgxBootstrapIconsModule,
|
NgxBootstrapIconsModule,
|
||||||
PdfViewerModule,
|
PdfViewerModule,
|
||||||
TextAreaComponent,
|
TextAreaComponent,
|
||||||
|
PasswordRemovalConfirmDialogComponent,
|
||||||
],
|
],
|
||||||
})
|
})
|
||||||
export class DocumentDetailComponent
|
export class DocumentDetailComponent
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ export enum WorkflowActionType {
|
|||||||
Removal = 2,
|
Removal = 2,
|
||||||
Email = 3,
|
Email = 3,
|
||||||
Webhook = 4,
|
Webhook = 4,
|
||||||
|
PasswordRemoval = 5,
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface WorkflowActionEmail extends ObjectWithId {
|
export interface WorkflowActionEmail extends ObjectWithId {
|
||||||
@@ -97,4 +98,6 @@ export interface WorkflowAction extends ObjectWithId {
|
|||||||
email?: WorkflowActionEmail
|
email?: WorkflowActionEmail
|
||||||
|
|
||||||
webhook?: WorkflowActionWebhook
|
webhook?: WorkflowActionWebhook
|
||||||
|
|
||||||
|
passwords?: string
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
# this is here so that django finds the checks.
|
# this is here so that django finds the checks.
|
||||||
from documents.checks import changed_password_check
|
|
||||||
from documents.checks import parser_check
|
from documents.checks import parser_check
|
||||||
|
|
||||||
__all__ = ["changed_password_check", "parser_check"]
|
__all__ = ["parser_check"]
|
||||||
|
|||||||
@@ -60,7 +60,6 @@ class DocumentAdmin(GuardedModelAdmin):
|
|||||||
"added",
|
"added",
|
||||||
"modified",
|
"modified",
|
||||||
"mime_type",
|
"mime_type",
|
||||||
"storage_type",
|
|
||||||
"filename",
|
"filename",
|
||||||
"checksum",
|
"checksum",
|
||||||
"archive_filename",
|
"archive_filename",
|
||||||
|
|||||||
@@ -1,60 +1,12 @@
|
|||||||
import textwrap
|
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.checks import Error
|
from django.core.checks import Error
|
||||||
from django.core.checks import Warning
|
from django.core.checks import Warning
|
||||||
from django.core.checks import register
|
from django.core.checks import register
|
||||||
from django.core.exceptions import FieldError
|
|
||||||
from django.db.utils import OperationalError
|
|
||||||
from django.db.utils import ProgrammingError
|
|
||||||
|
|
||||||
from documents.signals import document_consumer_declaration
|
from documents.signals import document_consumer_declaration
|
||||||
from documents.templating.utils import convert_format_str_to_template_format
|
from documents.templating.utils import convert_format_str_to_template_format
|
||||||
|
|
||||||
|
|
||||||
@register()
|
|
||||||
def changed_password_check(app_configs, **kwargs):
|
|
||||||
from documents.models import Document
|
|
||||||
from paperless.db import GnuPG
|
|
||||||
|
|
||||||
try:
|
|
||||||
encrypted_doc = (
|
|
||||||
Document.objects.filter(
|
|
||||||
storage_type=Document.STORAGE_TYPE_GPG,
|
|
||||||
)
|
|
||||||
.only("pk", "storage_type")
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
except (OperationalError, ProgrammingError, FieldError):
|
|
||||||
return [] # No documents table yet
|
|
||||||
|
|
||||||
if encrypted_doc:
|
|
||||||
if not settings.PASSPHRASE:
|
|
||||||
return [
|
|
||||||
Error(
|
|
||||||
"The database contains encrypted documents but no password is set.",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
if not GnuPG.decrypted(encrypted_doc.source_file):
|
|
||||||
return [
|
|
||||||
Error(
|
|
||||||
textwrap.dedent(
|
|
||||||
"""
|
|
||||||
The current password doesn't match the password of the
|
|
||||||
existing documents.
|
|
||||||
|
|
||||||
If you intend to change your password, you must first export
|
|
||||||
all of the old documents, start fresh with the new password
|
|
||||||
and then re-import them."
|
|
||||||
""",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
@register()
|
@register()
|
||||||
def parser_check(app_configs, **kwargs):
|
def parser_check(app_configs, **kwargs):
|
||||||
parsers = []
|
parsers = []
|
||||||
|
|||||||
@@ -128,7 +128,7 @@ def thumbnail_last_modified(request, pk: int) -> datetime | None:
|
|||||||
Cache should be (slightly?) faster than filesystem
|
Cache should be (slightly?) faster than filesystem
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
doc = Document.objects.only("storage_type").get(pk=pk)
|
doc = Document.objects.only("pk").get(pk=pk)
|
||||||
if not doc.thumbnail_path.exists():
|
if not doc.thumbnail_path.exists():
|
||||||
return None
|
return None
|
||||||
doc_key = get_thumbnail_modified_key(pk)
|
doc_key = get_thumbnail_modified_key(pk)
|
||||||
|
|||||||
@@ -497,7 +497,6 @@ class ConsumerPlugin(
|
|||||||
create_source_path_directory(document.source_path)
|
create_source_path_directory(document.source_path)
|
||||||
|
|
||||||
self._write(
|
self._write(
|
||||||
document.storage_type,
|
|
||||||
self.unmodified_original
|
self.unmodified_original
|
||||||
if self.unmodified_original is not None
|
if self.unmodified_original is not None
|
||||||
else self.working_copy,
|
else self.working_copy,
|
||||||
@@ -505,7 +504,6 @@ class ConsumerPlugin(
|
|||||||
)
|
)
|
||||||
|
|
||||||
self._write(
|
self._write(
|
||||||
document.storage_type,
|
|
||||||
thumbnail,
|
thumbnail,
|
||||||
document.thumbnail_path,
|
document.thumbnail_path,
|
||||||
)
|
)
|
||||||
@@ -517,7 +515,6 @@ class ConsumerPlugin(
|
|||||||
)
|
)
|
||||||
create_source_path_directory(document.archive_path)
|
create_source_path_directory(document.archive_path)
|
||||||
self._write(
|
self._write(
|
||||||
document.storage_type,
|
|
||||||
archive_path,
|
archive_path,
|
||||||
document.archive_path,
|
document.archive_path,
|
||||||
)
|
)
|
||||||
@@ -637,8 +634,6 @@ class ConsumerPlugin(
|
|||||||
)
|
)
|
||||||
self.log.debug(f"Creation date from st_mtime: {create_date}")
|
self.log.debug(f"Creation date from st_mtime: {create_date}")
|
||||||
|
|
||||||
storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
|
|
||||||
if self.metadata.filename:
|
if self.metadata.filename:
|
||||||
title = Path(self.metadata.filename).stem
|
title = Path(self.metadata.filename).stem
|
||||||
else:
|
else:
|
||||||
@@ -665,7 +660,6 @@ class ConsumerPlugin(
|
|||||||
checksum=hashlib.md5(file_for_checksum.read_bytes()).hexdigest(),
|
checksum=hashlib.md5(file_for_checksum.read_bytes()).hexdigest(),
|
||||||
created=create_date,
|
created=create_date,
|
||||||
modified=create_date,
|
modified=create_date,
|
||||||
storage_type=storage_type,
|
|
||||||
page_count=page_count,
|
page_count=page_count,
|
||||||
original_filename=self.filename,
|
original_filename=self.filename,
|
||||||
)
|
)
|
||||||
@@ -736,7 +730,7 @@ class ConsumerPlugin(
|
|||||||
}
|
}
|
||||||
CustomFieldInstance.objects.create(**args) # adds to document
|
CustomFieldInstance.objects.create(**args) # adds to document
|
||||||
|
|
||||||
def _write(self, storage_type, source, target):
|
def _write(self, source, target):
|
||||||
with (
|
with (
|
||||||
Path(source).open("rb") as read_file,
|
Path(source).open("rb") as read_file,
|
||||||
Path(target).open("wb") as write_file,
|
Path(target).open("wb") as write_file,
|
||||||
|
|||||||
@@ -126,7 +126,6 @@ def generate_filename(
|
|||||||
doc: Document,
|
doc: Document,
|
||||||
*,
|
*,
|
||||||
counter=0,
|
counter=0,
|
||||||
append_gpg=True,
|
|
||||||
archive_filename=False,
|
archive_filename=False,
|
||||||
) -> Path:
|
) -> Path:
|
||||||
base_path: Path | None = None
|
base_path: Path | None = None
|
||||||
@@ -170,8 +169,4 @@ def generate_filename(
|
|||||||
final_filename = f"{doc.pk:07}{counter_str}{filetype_str}"
|
final_filename = f"{doc.pk:07}{counter_str}{filetype_str}"
|
||||||
full_path = Path(final_filename)
|
full_path = Path(final_filename)
|
||||||
|
|
||||||
# Add GPG extension if needed
|
|
||||||
if append_gpg and doc.storage_type == doc.STORAGE_TYPE_GPG:
|
|
||||||
full_path = full_path.with_suffix(full_path.suffix + ".gpg")
|
|
||||||
|
|
||||||
return full_path
|
return full_path
|
||||||
|
|||||||
@@ -1,93 +0,0 @@
|
|||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.core.management.base import CommandError
|
|
||||||
|
|
||||||
from documents.models import Document
|
|
||||||
from paperless.db import GnuPG
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = (
|
|
||||||
"This is how you migrate your stored documents from an encrypted "
|
|
||||||
"state to an unencrypted one (or vice-versa)"
|
|
||||||
)
|
|
||||||
|
|
||||||
def add_arguments(self, parser) -> None:
|
|
||||||
parser.add_argument(
|
|
||||||
"--passphrase",
|
|
||||||
help=(
|
|
||||||
"If PAPERLESS_PASSPHRASE isn't set already, you need to specify it here"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def handle(self, *args, **options) -> None:
|
|
||||||
try:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.WARNING(
|
|
||||||
"\n\n"
|
|
||||||
"WARNING: This script is going to work directly on your "
|
|
||||||
"document originals, so\n"
|
|
||||||
"WARNING: you probably shouldn't run "
|
|
||||||
"this unless you've got a recent backup\n"
|
|
||||||
"WARNING: handy. It "
|
|
||||||
"*should* work without a hitch, but be safe and backup your\n"
|
|
||||||
"WARNING: stuff first.\n\n"
|
|
||||||
"Hit Ctrl+C to exit now, or Enter to "
|
|
||||||
"continue.\n\n",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
_ = input()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
return
|
|
||||||
|
|
||||||
passphrase = options["passphrase"] or settings.PASSPHRASE
|
|
||||||
if not passphrase:
|
|
||||||
raise CommandError(
|
|
||||||
"Passphrase not defined. Please set it with --passphrase or "
|
|
||||||
"by declaring it in your environment or your config.",
|
|
||||||
)
|
|
||||||
|
|
||||||
self.__gpg_to_unencrypted(passphrase)
|
|
||||||
|
|
||||||
def __gpg_to_unencrypted(self, passphrase: str) -> None:
|
|
||||||
encrypted_files = Document.objects.filter(
|
|
||||||
storage_type=Document.STORAGE_TYPE_GPG,
|
|
||||||
)
|
|
||||||
|
|
||||||
for document in encrypted_files:
|
|
||||||
self.stdout.write(f"Decrypting {document}")
|
|
||||||
|
|
||||||
old_paths = [document.source_path, document.thumbnail_path]
|
|
||||||
|
|
||||||
with document.source_file as file_handle:
|
|
||||||
raw_document = GnuPG.decrypted(file_handle, passphrase)
|
|
||||||
with document.thumbnail_file as file_handle:
|
|
||||||
raw_thumb = GnuPG.decrypted(file_handle, passphrase)
|
|
||||||
|
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
|
|
||||||
ext: str = Path(document.filename).suffix
|
|
||||||
|
|
||||||
if not ext == ".gpg":
|
|
||||||
raise CommandError(
|
|
||||||
f"Abort: encrypted file {document.source_path} does not "
|
|
||||||
f"end with .gpg",
|
|
||||||
)
|
|
||||||
|
|
||||||
document.filename = Path(document.filename).stem
|
|
||||||
|
|
||||||
with document.source_path.open("wb") as f:
|
|
||||||
f.write(raw_document)
|
|
||||||
|
|
||||||
with document.thumbnail_path.open("wb") as f:
|
|
||||||
f.write(raw_thumb)
|
|
||||||
|
|
||||||
Document.objects.filter(id=document.id).update(
|
|
||||||
storage_type=document.storage_type,
|
|
||||||
filename=document.filename,
|
|
||||||
)
|
|
||||||
|
|
||||||
for path in old_paths:
|
|
||||||
path.unlink()
|
|
||||||
@@ -3,7 +3,6 @@ import json
|
|||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
@@ -56,7 +55,6 @@ from documents.settings import EXPORTER_FILE_NAME
|
|||||||
from documents.settings import EXPORTER_THUMBNAIL_NAME
|
from documents.settings import EXPORTER_THUMBNAIL_NAME
|
||||||
from documents.utils import copy_file_with_basic_stats
|
from documents.utils import copy_file_with_basic_stats
|
||||||
from paperless import version
|
from paperless import version
|
||||||
from paperless.db import GnuPG
|
|
||||||
from paperless.models import ApplicationConfiguration
|
from paperless.models import ApplicationConfiguration
|
||||||
from paperless_mail.models import MailAccount
|
from paperless_mail.models import MailAccount
|
||||||
from paperless_mail.models import MailRule
|
from paperless_mail.models import MailRule
|
||||||
@@ -316,20 +314,17 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
total=len(document_manifest),
|
total=len(document_manifest),
|
||||||
disable=self.no_progress_bar,
|
disable=self.no_progress_bar,
|
||||||
):
|
):
|
||||||
# 3.1. store files unencrypted
|
|
||||||
document_dict["fields"]["storage_type"] = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
|
|
||||||
document = document_map[document_dict["pk"]]
|
document = document_map[document_dict["pk"]]
|
||||||
|
|
||||||
# 3.2. generate a unique filename
|
# 3.1. generate a unique filename
|
||||||
base_name = self.generate_base_name(document)
|
base_name = self.generate_base_name(document)
|
||||||
|
|
||||||
# 3.3. write filenames into manifest
|
# 3.2. write filenames into manifest
|
||||||
original_target, thumbnail_target, archive_target = (
|
original_target, thumbnail_target, archive_target = (
|
||||||
self.generate_document_targets(document, base_name, document_dict)
|
self.generate_document_targets(document, base_name, document_dict)
|
||||||
)
|
)
|
||||||
|
|
||||||
# 3.4. write files to target folder
|
# 3.3. write files to target folder
|
||||||
if not self.data_only:
|
if not self.data_only:
|
||||||
self.copy_document_files(
|
self.copy_document_files(
|
||||||
document,
|
document,
|
||||||
@@ -423,7 +418,6 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
base_name = generate_filename(
|
base_name = generate_filename(
|
||||||
document,
|
document,
|
||||||
counter=filename_counter,
|
counter=filename_counter,
|
||||||
append_gpg=False,
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
base_name = document.get_public_filename(counter=filename_counter)
|
base_name = document.get_public_filename(counter=filename_counter)
|
||||||
@@ -482,46 +476,24 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
|
|
||||||
If the document is encrypted, the files are decrypted before copying them to the target location.
|
If the document is encrypted, the files are decrypted before copying them to the target location.
|
||||||
"""
|
"""
|
||||||
if document.storage_type == Document.STORAGE_TYPE_GPG:
|
self.check_and_copy(
|
||||||
t = int(time.mktime(document.created.timetuple()))
|
document.source_path,
|
||||||
|
document.checksum,
|
||||||
|
original_target,
|
||||||
|
)
|
||||||
|
|
||||||
original_target.parent.mkdir(parents=True, exist_ok=True)
|
if thumbnail_target:
|
||||||
with document.source_file as out_file:
|
self.check_and_copy(document.thumbnail_path, None, thumbnail_target)
|
||||||
original_target.write_bytes(GnuPG.decrypted(out_file))
|
|
||||||
os.utime(original_target, times=(t, t))
|
|
||||||
|
|
||||||
if thumbnail_target:
|
if archive_target:
|
||||||
thumbnail_target.parent.mkdir(parents=True, exist_ok=True)
|
if TYPE_CHECKING:
|
||||||
with document.thumbnail_file as out_file:
|
assert isinstance(document.archive_path, Path)
|
||||||
thumbnail_target.write_bytes(GnuPG.decrypted(out_file))
|
|
||||||
os.utime(thumbnail_target, times=(t, t))
|
|
||||||
|
|
||||||
if archive_target:
|
|
||||||
archive_target.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
assert isinstance(document.archive_path, Path)
|
|
||||||
with document.archive_path as out_file:
|
|
||||||
archive_target.write_bytes(GnuPG.decrypted(out_file))
|
|
||||||
os.utime(archive_target, times=(t, t))
|
|
||||||
else:
|
|
||||||
self.check_and_copy(
|
self.check_and_copy(
|
||||||
document.source_path,
|
document.archive_path,
|
||||||
document.checksum,
|
document.archive_checksum,
|
||||||
original_target,
|
archive_target,
|
||||||
)
|
)
|
||||||
|
|
||||||
if thumbnail_target:
|
|
||||||
self.check_and_copy(document.thumbnail_path, None, thumbnail_target)
|
|
||||||
|
|
||||||
if archive_target:
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
assert isinstance(document.archive_path, Path)
|
|
||||||
self.check_and_copy(
|
|
||||||
document.archive_path,
|
|
||||||
document.archive_checksum,
|
|
||||||
archive_target,
|
|
||||||
)
|
|
||||||
|
|
||||||
def check_and_write_json(
|
def check_and_write_json(
|
||||||
self,
|
self,
|
||||||
content: list[dict] | dict,
|
content: list[dict] | dict,
|
||||||
|
|||||||
@@ -383,8 +383,6 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
else:
|
else:
|
||||||
archive_path = None
|
archive_path = None
|
||||||
|
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
|
|
||||||
with FileLock(settings.MEDIA_LOCK):
|
with FileLock(settings.MEDIA_LOCK):
|
||||||
if Path(document.source_path).is_file():
|
if Path(document.source_path).is_file():
|
||||||
raise FileExistsError(document.source_path)
|
raise FileExistsError(document.source_path)
|
||||||
|
|||||||
@@ -0,0 +1,16 @@
|
|||||||
|
# Generated by Django 5.2.9 on 2026-01-24 23:05
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("documents", "0003_workflowaction_order"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="document",
|
||||||
|
name="storage_type",
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
# Generated by Django 5.2.7 on 2025-12-29 03:56
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
from django.db import models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("documents", "0004_remove_document_storage_type"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="workflowaction",
|
||||||
|
name="passwords",
|
||||||
|
field=models.TextField(
|
||||||
|
blank=True,
|
||||||
|
help_text="Passwords to try when removing PDF protection. Separate with commas or new lines.",
|
||||||
|
null=True,
|
||||||
|
verbose_name="passwords",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="workflowaction",
|
||||||
|
name="type",
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
choices=[
|
||||||
|
(1, "Assignment"),
|
||||||
|
(2, "Removal"),
|
||||||
|
(3, "Email"),
|
||||||
|
(4, "Webhook"),
|
||||||
|
(5, "Password removal"),
|
||||||
|
],
|
||||||
|
default=1,
|
||||||
|
verbose_name="Workflow Action Type",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -154,13 +154,6 @@ class StoragePath(MatchingModel):
|
|||||||
|
|
||||||
|
|
||||||
class Document(SoftDeleteModel, ModelWithOwner):
|
class Document(SoftDeleteModel, ModelWithOwner):
|
||||||
STORAGE_TYPE_UNENCRYPTED = "unencrypted"
|
|
||||||
STORAGE_TYPE_GPG = "gpg"
|
|
||||||
STORAGE_TYPES = (
|
|
||||||
(STORAGE_TYPE_UNENCRYPTED, _("Unencrypted")),
|
|
||||||
(STORAGE_TYPE_GPG, _("Encrypted with GNU Privacy Guard")),
|
|
||||||
)
|
|
||||||
|
|
||||||
correspondent = models.ForeignKey(
|
correspondent = models.ForeignKey(
|
||||||
Correspondent,
|
Correspondent,
|
||||||
blank=True,
|
blank=True,
|
||||||
@@ -250,14 +243,6 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
|||||||
db_index=True,
|
db_index=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
storage_type = models.CharField(
|
|
||||||
_("storage type"),
|
|
||||||
max_length=11,
|
|
||||||
choices=STORAGE_TYPES,
|
|
||||||
default=STORAGE_TYPE_UNENCRYPTED,
|
|
||||||
editable=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
added = models.DateTimeField(
|
added = models.DateTimeField(
|
||||||
_("added"),
|
_("added"),
|
||||||
default=timezone.now,
|
default=timezone.now,
|
||||||
@@ -353,12 +338,7 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def source_path(self) -> Path:
|
def source_path(self) -> Path:
|
||||||
if self.filename:
|
fname = str(self.filename) if self.filename else f"{self.pk:07}{self.file_type}"
|
||||||
fname = str(self.filename)
|
|
||||||
else:
|
|
||||||
fname = f"{self.pk:07}{self.file_type}"
|
|
||||||
if self.storage_type == self.STORAGE_TYPE_GPG:
|
|
||||||
fname += ".gpg" # pragma: no cover
|
|
||||||
|
|
||||||
return (settings.ORIGINALS_DIR / Path(fname)).resolve()
|
return (settings.ORIGINALS_DIR / Path(fname)).resolve()
|
||||||
|
|
||||||
@@ -407,8 +387,6 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
|||||||
@property
|
@property
|
||||||
def thumbnail_path(self) -> Path:
|
def thumbnail_path(self) -> Path:
|
||||||
webp_file_name = f"{self.pk:07}.webp"
|
webp_file_name = f"{self.pk:07}.webp"
|
||||||
if self.storage_type == self.STORAGE_TYPE_GPG:
|
|
||||||
webp_file_name += ".gpg"
|
|
||||||
|
|
||||||
webp_file_path = settings.THUMBNAIL_DIR / Path(webp_file_name)
|
webp_file_path = settings.THUMBNAIL_DIR / Path(webp_file_name)
|
||||||
|
|
||||||
@@ -1288,6 +1266,10 @@ class WorkflowAction(models.Model):
|
|||||||
4,
|
4,
|
||||||
_("Webhook"),
|
_("Webhook"),
|
||||||
)
|
)
|
||||||
|
PASSWORD_REMOVAL = (
|
||||||
|
5,
|
||||||
|
_("Password removal"),
|
||||||
|
)
|
||||||
|
|
||||||
type = models.PositiveIntegerField(
|
type = models.PositiveIntegerField(
|
||||||
_("Workflow Action Type"),
|
_("Workflow Action Type"),
|
||||||
@@ -1517,6 +1499,15 @@ class WorkflowAction(models.Model):
|
|||||||
verbose_name=_("webhook"),
|
verbose_name=_("webhook"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
passwords = models.TextField(
|
||||||
|
_("passwords"),
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text=_(
|
||||||
|
"Passwords to try when removing PDF protection. Separate with commas or new lines.",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _("workflow action")
|
verbose_name = _("workflow action")
|
||||||
verbose_name_plural = _("workflow actions")
|
verbose_name_plural = _("workflow actions")
|
||||||
|
|||||||
@@ -2453,6 +2453,7 @@ class WorkflowActionSerializer(serializers.ModelSerializer):
|
|||||||
"remove_change_groups",
|
"remove_change_groups",
|
||||||
"email",
|
"email",
|
||||||
"webhook",
|
"webhook",
|
||||||
|
"passwords",
|
||||||
]
|
]
|
||||||
|
|
||||||
def validate(self, attrs):
|
def validate(self, attrs):
|
||||||
@@ -2509,6 +2510,20 @@ class WorkflowActionSerializer(serializers.ModelSerializer):
|
|||||||
"Webhook data is required for webhook actions",
|
"Webhook data is required for webhook actions",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
"type" in attrs
|
||||||
|
and attrs["type"] == WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL
|
||||||
|
):
|
||||||
|
passwords = attrs.get("passwords")
|
||||||
|
if passwords is None or not isinstance(passwords, str):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"Passwords are required for password removal actions",
|
||||||
|
)
|
||||||
|
if not passwords.strip():
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"Passwords are required for password removal actions",
|
||||||
|
)
|
||||||
|
|
||||||
return attrs
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -48,6 +48,7 @@ from documents.permissions import get_objects_for_user_owner_aware
|
|||||||
from documents.templating.utils import convert_format_str_to_template_format
|
from documents.templating.utils import convert_format_str_to_template_format
|
||||||
from documents.workflows.actions import build_workflow_action_context
|
from documents.workflows.actions import build_workflow_action_context
|
||||||
from documents.workflows.actions import execute_email_action
|
from documents.workflows.actions import execute_email_action
|
||||||
|
from documents.workflows.actions import execute_password_removal_action
|
||||||
from documents.workflows.actions import execute_webhook_action
|
from documents.workflows.actions import execute_webhook_action
|
||||||
from documents.workflows.mutations import apply_assignment_to_document
|
from documents.workflows.mutations import apply_assignment_to_document
|
||||||
from documents.workflows.mutations import apply_assignment_to_overrides
|
from documents.workflows.mutations import apply_assignment_to_overrides
|
||||||
@@ -822,6 +823,8 @@ def run_workflows(
|
|||||||
logging_group,
|
logging_group,
|
||||||
original_file,
|
original_file,
|
||||||
)
|
)
|
||||||
|
elif action.type == WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL:
|
||||||
|
execute_password_removal_action(action, document, logging_group)
|
||||||
|
|
||||||
if not use_overrides:
|
if not use_overrides:
|
||||||
# limit title to 128 characters
|
# limit title to 128 characters
|
||||||
|
|||||||
@@ -108,7 +108,6 @@ def create_dummy_document():
|
|||||||
page_count=5,
|
page_count=5,
|
||||||
created=timezone.now(),
|
created=timezone.now(),
|
||||||
modified=timezone.now(),
|
modified=timezone.now(),
|
||||||
storage_type=Document.STORAGE_TYPE_UNENCRYPTED,
|
|
||||||
added=timezone.now(),
|
added=timezone.now(),
|
||||||
filename="/dummy/filename.pdf",
|
filename="/dummy/filename.pdf",
|
||||||
archive_filename="/dummy/archive_filename.pdf",
|
archive_filename="/dummy/archive_filename.pdf",
|
||||||
|
|||||||
BIN
src/documents/tests/samples/documents/originals/0000004.pdf
Normal file
BIN
src/documents/tests/samples/documents/originals/0000004.pdf
Normal file
Binary file not shown.
Binary file not shown.
BIN
src/documents/tests/samples/documents/thumbnails/0000004.webp
Normal file
BIN
src/documents/tests/samples/documents/thumbnails/0000004.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 2.6 KiB |
Binary file not shown.
@@ -808,3 +808,57 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
|
|||||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||||
self.action.refresh_from_db()
|
self.action.refresh_from_db()
|
||||||
self.assertEqual(self.action.assign_title, "Patched Title")
|
self.assertEqual(self.action.assign_title, "Patched Title")
|
||||||
|
|
||||||
|
def test_password_action_passwords_field(self):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Nothing
|
||||||
|
WHEN:
|
||||||
|
- A workflow password removal action is created with passwords set
|
||||||
|
THEN:
|
||||||
|
- The passwords field is correctly stored and retrieved
|
||||||
|
"""
|
||||||
|
passwords = "password1,password2\npassword3"
|
||||||
|
response = self.client.post(
|
||||||
|
"/api/workflow_actions/",
|
||||||
|
{
|
||||||
|
"type": WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL,
|
||||||
|
"passwords": passwords,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
||||||
|
self.assertEqual(response.data["passwords"], passwords)
|
||||||
|
|
||||||
|
def test_password_action_no_passwords_field(self):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Nothing
|
||||||
|
WHEN:
|
||||||
|
- A workflow password removal action is created with no passwords set
|
||||||
|
- A workflow password removal action is created with passwords set to empty string
|
||||||
|
THEN:
|
||||||
|
- The required validation error is raised
|
||||||
|
"""
|
||||||
|
response = self.client.post(
|
||||||
|
"/api/workflow_actions/",
|
||||||
|
{
|
||||||
|
"type": WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||||
|
self.assertIn(
|
||||||
|
"Passwords are required",
|
||||||
|
str(response.data["non_field_errors"][0]),
|
||||||
|
)
|
||||||
|
response = self.client.post(
|
||||||
|
"/api/workflow_actions/",
|
||||||
|
{
|
||||||
|
"type": WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL,
|
||||||
|
"passwords": "",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||||
|
self.assertIn(
|
||||||
|
"Passwords are required",
|
||||||
|
str(response.data["non_field_errors"][0]),
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import textwrap
|
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
from django.core.checks import Error
|
from django.core.checks import Error
|
||||||
@@ -6,60 +5,11 @@ from django.core.checks import Warning
|
|||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from django.test import override_settings
|
from django.test import override_settings
|
||||||
|
|
||||||
from documents.checks import changed_password_check
|
|
||||||
from documents.checks import filename_format_check
|
from documents.checks import filename_format_check
|
||||||
from documents.checks import parser_check
|
from documents.checks import parser_check
|
||||||
from documents.models import Document
|
|
||||||
from documents.tests.factories import DocumentFactory
|
|
||||||
|
|
||||||
|
|
||||||
class TestDocumentChecks(TestCase):
|
class TestDocumentChecks(TestCase):
|
||||||
def test_changed_password_check_empty_db(self):
|
|
||||||
self.assertListEqual(changed_password_check(None), [])
|
|
||||||
|
|
||||||
def test_changed_password_check_no_encryption(self):
|
|
||||||
DocumentFactory.create(storage_type=Document.STORAGE_TYPE_UNENCRYPTED)
|
|
||||||
self.assertListEqual(changed_password_check(None), [])
|
|
||||||
|
|
||||||
def test_encrypted_missing_passphrase(self):
|
|
||||||
DocumentFactory.create(storage_type=Document.STORAGE_TYPE_GPG)
|
|
||||||
msgs = changed_password_check(None)
|
|
||||||
self.assertEqual(len(msgs), 1)
|
|
||||||
msg_text = msgs[0].msg
|
|
||||||
self.assertEqual(
|
|
||||||
msg_text,
|
|
||||||
"The database contains encrypted documents but no password is set.",
|
|
||||||
)
|
|
||||||
|
|
||||||
@override_settings(
|
|
||||||
PASSPHRASE="test",
|
|
||||||
)
|
|
||||||
@mock.patch("paperless.db.GnuPG.decrypted")
|
|
||||||
@mock.patch("documents.models.Document.source_file")
|
|
||||||
def test_encrypted_decrypt_fails(self, mock_decrypted, mock_source_file):
|
|
||||||
mock_decrypted.return_value = None
|
|
||||||
mock_source_file.return_value = b""
|
|
||||||
|
|
||||||
DocumentFactory.create(storage_type=Document.STORAGE_TYPE_GPG)
|
|
||||||
|
|
||||||
msgs = changed_password_check(None)
|
|
||||||
|
|
||||||
self.assertEqual(len(msgs), 1)
|
|
||||||
msg_text = msgs[0].msg
|
|
||||||
self.assertEqual(
|
|
||||||
msg_text,
|
|
||||||
textwrap.dedent(
|
|
||||||
"""
|
|
||||||
The current password doesn't match the password of the
|
|
||||||
existing documents.
|
|
||||||
|
|
||||||
If you intend to change your password, you must first export
|
|
||||||
all of the old documents, start fresh with the new password
|
|
||||||
and then re-import them."
|
|
||||||
""",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_parser_check(self):
|
def test_parser_check(self):
|
||||||
self.assertEqual(parser_check(None), [])
|
self.assertEqual(parser_check(None), [])
|
||||||
|
|
||||||
|
|||||||
@@ -34,22 +34,14 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
def test_generate_source_filename(self):
|
def test_generate_source_filename(self):
|
||||||
document = Document()
|
document = Document()
|
||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
document.save()
|
document.save()
|
||||||
|
|
||||||
self.assertEqual(generate_filename(document), Path(f"{document.pk:07d}.pdf"))
|
self.assertEqual(generate_filename(document), Path(f"{document.pk:07d}.pdf"))
|
||||||
|
|
||||||
document.storage_type = Document.STORAGE_TYPE_GPG
|
|
||||||
self.assertEqual(
|
|
||||||
generate_filename(document),
|
|
||||||
Path(f"{document.pk:07d}.pdf.gpg"),
|
|
||||||
)
|
|
||||||
|
|
||||||
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
|
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
|
||||||
def test_file_renaming(self):
|
def test_file_renaming(self):
|
||||||
document = Document()
|
document = Document()
|
||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
document.save()
|
document.save()
|
||||||
|
|
||||||
# Test default source_path
|
# Test default source_path
|
||||||
@@ -63,11 +55,6 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
# Ensure that filename is properly generated
|
# Ensure that filename is properly generated
|
||||||
self.assertEqual(document.filename, Path("none/none.pdf"))
|
self.assertEqual(document.filename, Path("none/none.pdf"))
|
||||||
|
|
||||||
# Enable encryption and check again
|
|
||||||
document.storage_type = Document.STORAGE_TYPE_GPG
|
|
||||||
document.filename = generate_filename(document)
|
|
||||||
self.assertEqual(document.filename, Path("none/none.pdf.gpg"))
|
|
||||||
|
|
||||||
document.save()
|
document.save()
|
||||||
|
|
||||||
# test that creating dirs for the source_path creates the correct directory
|
# test that creating dirs for the source_path creates the correct directory
|
||||||
@@ -87,14 +74,14 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
settings.ORIGINALS_DIR / "none",
|
settings.ORIGINALS_DIR / "none",
|
||||||
)
|
)
|
||||||
self.assertIsFile(
|
self.assertIsFile(
|
||||||
settings.ORIGINALS_DIR / "test" / "test.pdf.gpg",
|
settings.ORIGINALS_DIR / "test" / "test.pdf",
|
||||||
)
|
)
|
||||||
|
|
||||||
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
|
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
|
||||||
def test_file_renaming_missing_permissions(self):
|
def test_file_renaming_missing_permissions(self):
|
||||||
document = Document()
|
document = Document()
|
||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
document.save()
|
document.save()
|
||||||
|
|
||||||
# Ensure that filename is properly generated
|
# Ensure that filename is properly generated
|
||||||
@@ -128,14 +115,13 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
def test_file_renaming_database_error(self):
|
def test_file_renaming_database_error(self):
|
||||||
Document.objects.create(
|
Document.objects.create(
|
||||||
mime_type="application/pdf",
|
mime_type="application/pdf",
|
||||||
storage_type=Document.STORAGE_TYPE_UNENCRYPTED,
|
|
||||||
checksum="AAAAA",
|
checksum="AAAAA",
|
||||||
)
|
)
|
||||||
|
|
||||||
document = Document()
|
document = Document()
|
||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.checksum = "BBBBB"
|
document.checksum = "BBBBB"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
document.save()
|
document.save()
|
||||||
|
|
||||||
# Ensure that filename is properly generated
|
# Ensure that filename is properly generated
|
||||||
@@ -170,7 +156,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
def test_document_delete(self):
|
def test_document_delete(self):
|
||||||
document = Document()
|
document = Document()
|
||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
document.save()
|
document.save()
|
||||||
|
|
||||||
# Ensure that filename is properly generated
|
# Ensure that filename is properly generated
|
||||||
@@ -196,7 +182,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
def test_document_delete_trash_dir(self):
|
def test_document_delete_trash_dir(self):
|
||||||
document = Document()
|
document = Document()
|
||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
document.save()
|
document.save()
|
||||||
|
|
||||||
# Ensure that filename is properly generated
|
# Ensure that filename is properly generated
|
||||||
@@ -221,7 +207,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
# Create an identical document and ensure it is trashed under a new name
|
# Create an identical document and ensure it is trashed under a new name
|
||||||
document = Document()
|
document = Document()
|
||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
document.save()
|
document.save()
|
||||||
document.filename = generate_filename(document)
|
document.filename = generate_filename(document)
|
||||||
document.save()
|
document.save()
|
||||||
@@ -235,7 +221,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
def test_document_delete_nofile(self):
|
def test_document_delete_nofile(self):
|
||||||
document = Document()
|
document = Document()
|
||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
document.save()
|
document.save()
|
||||||
|
|
||||||
document.delete()
|
document.delete()
|
||||||
@@ -245,7 +231,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
def test_directory_not_empty(self):
|
def test_directory_not_empty(self):
|
||||||
document = Document()
|
document = Document()
|
||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
document.save()
|
document.save()
|
||||||
|
|
||||||
# Ensure that filename is properly generated
|
# Ensure that filename is properly generated
|
||||||
@@ -362,7 +348,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
def test_nested_directory_cleanup(self):
|
def test_nested_directory_cleanup(self):
|
||||||
document = Document()
|
document = Document()
|
||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
document.save()
|
document.save()
|
||||||
|
|
||||||
# Ensure that filename is properly generated
|
# Ensure that filename is properly generated
|
||||||
@@ -390,7 +376,6 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
document = Document()
|
document = Document()
|
||||||
document.pk = 1
|
document.pk = 1
|
||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
|
|
||||||
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
|
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
|
||||||
|
|
||||||
@@ -403,7 +388,6 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
document = Document()
|
document = Document()
|
||||||
document.pk = 1
|
document.pk = 1
|
||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
|
|
||||||
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
|
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
|
||||||
|
|
||||||
@@ -429,7 +413,6 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
document = Document()
|
document = Document()
|
||||||
document.pk = 1
|
document.pk = 1
|
||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
|
|
||||||
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
|
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
|
||||||
|
|
||||||
@@ -438,7 +421,6 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
document = Document()
|
document = Document()
|
||||||
document.pk = 1
|
document.pk = 1
|
||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
|
|
||||||
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
|
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
|
||||||
|
|
||||||
@@ -1258,7 +1240,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
title="doc1",
|
title="doc1",
|
||||||
mime_type="application/pdf",
|
mime_type="application/pdf",
|
||||||
)
|
)
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
|
||||||
document.save()
|
document.save()
|
||||||
|
|
||||||
# Ensure that filename is properly generated
|
# Ensure that filename is properly generated
|
||||||
@@ -1732,7 +1714,6 @@ class TestPathDateLocalization:
|
|||||||
document = DocumentFactory.create(
|
document = DocumentFactory.create(
|
||||||
title="My Document",
|
title="My Document",
|
||||||
mime_type="application/pdf",
|
mime_type="application/pdf",
|
||||||
storage_type=Document.STORAGE_TYPE_UNENCRYPTED,
|
|
||||||
created=self.TEST_DATE, # 2023-10-26 (which is a Thursday)
|
created=self.TEST_DATE, # 2023-10-26 (which is a Thursday)
|
||||||
)
|
)
|
||||||
with override_settings(FILENAME_FORMAT=filename_format):
|
with override_settings(FILENAME_FORMAT=filename_format):
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
import filecmp
|
import filecmp
|
||||||
import hashlib
|
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
@@ -96,66 +94,6 @@ class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
self.assertEqual(doc2.archive_filename, "document_01.pdf")
|
self.assertEqual(doc2.archive_filename, "document_01.pdf")
|
||||||
|
|
||||||
|
|
||||||
class TestDecryptDocuments(FileSystemAssertsMixin, TestCase):
|
|
||||||
@mock.patch("documents.management.commands.decrypt_documents.input")
|
|
||||||
def test_decrypt(self, m):
|
|
||||||
media_dir = tempfile.mkdtemp()
|
|
||||||
originals_dir = Path(media_dir) / "documents" / "originals"
|
|
||||||
thumb_dir = Path(media_dir) / "documents" / "thumbnails"
|
|
||||||
originals_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
thumb_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
with override_settings(
|
|
||||||
ORIGINALS_DIR=originals_dir,
|
|
||||||
THUMBNAIL_DIR=thumb_dir,
|
|
||||||
PASSPHRASE="test",
|
|
||||||
FILENAME_FORMAT=None,
|
|
||||||
):
|
|
||||||
doc = Document.objects.create(
|
|
||||||
checksum="82186aaa94f0b98697d704b90fd1c072",
|
|
||||||
title="wow",
|
|
||||||
filename="0000004.pdf.gpg",
|
|
||||||
mime_type="application/pdf",
|
|
||||||
storage_type=Document.STORAGE_TYPE_GPG,
|
|
||||||
)
|
|
||||||
|
|
||||||
shutil.copy(
|
|
||||||
(
|
|
||||||
Path(__file__).parent
|
|
||||||
/ "samples"
|
|
||||||
/ "documents"
|
|
||||||
/ "originals"
|
|
||||||
/ "0000004.pdf.gpg"
|
|
||||||
),
|
|
||||||
originals_dir / "0000004.pdf.gpg",
|
|
||||||
)
|
|
||||||
shutil.copy(
|
|
||||||
(
|
|
||||||
Path(__file__).parent
|
|
||||||
/ "samples"
|
|
||||||
/ "documents"
|
|
||||||
/ "thumbnails"
|
|
||||||
/ "0000004.webp.gpg"
|
|
||||||
),
|
|
||||||
thumb_dir / f"{doc.id:07}.webp.gpg",
|
|
||||||
)
|
|
||||||
|
|
||||||
call_command("decrypt_documents")
|
|
||||||
|
|
||||||
doc.refresh_from_db()
|
|
||||||
|
|
||||||
self.assertEqual(doc.storage_type, Document.STORAGE_TYPE_UNENCRYPTED)
|
|
||||||
self.assertEqual(doc.filename, "0000004.pdf")
|
|
||||||
self.assertIsFile(Path(originals_dir) / "0000004.pdf")
|
|
||||||
self.assertIsFile(doc.source_path)
|
|
||||||
self.assertIsFile(Path(thumb_dir) / f"{doc.id:07}.webp")
|
|
||||||
self.assertIsFile(doc.thumbnail_path)
|
|
||||||
|
|
||||||
with doc.source_file as f:
|
|
||||||
checksum: str = hashlib.md5(f.read()).hexdigest()
|
|
||||||
self.assertEqual(checksum, doc.checksum)
|
|
||||||
|
|
||||||
|
|
||||||
class TestMakeIndex(TestCase):
|
class TestMakeIndex(TestCase):
|
||||||
@mock.patch("documents.management.commands.document_index.index_reindex")
|
@mock.patch("documents.management.commands.document_index.index_reindex")
|
||||||
def test_reindex(self, m):
|
def test_reindex(self, m):
|
||||||
|
|||||||
@@ -86,9 +86,8 @@ class TestExportImport(
|
|||||||
content="Content",
|
content="Content",
|
||||||
checksum="82186aaa94f0b98697d704b90fd1c072",
|
checksum="82186aaa94f0b98697d704b90fd1c072",
|
||||||
title="wow_dec",
|
title="wow_dec",
|
||||||
filename="0000004.pdf.gpg",
|
filename="0000004.pdf",
|
||||||
mime_type="application/pdf",
|
mime_type="application/pdf",
|
||||||
storage_type=Document.STORAGE_TYPE_GPG,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self.note = Note.objects.create(
|
self.note = Note.objects.create(
|
||||||
@@ -242,11 +241,6 @@ class TestExportImport(
|
|||||||
checksum = hashlib.md5(f.read()).hexdigest()
|
checksum = hashlib.md5(f.read()).hexdigest()
|
||||||
self.assertEqual(checksum, element["fields"]["checksum"])
|
self.assertEqual(checksum, element["fields"]["checksum"])
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
element["fields"]["storage_type"],
|
|
||||||
Document.STORAGE_TYPE_UNENCRYPTED,
|
|
||||||
)
|
|
||||||
|
|
||||||
if document_exporter.EXPORTER_ARCHIVE_NAME in element:
|
if document_exporter.EXPORTER_ARCHIVE_NAME in element:
|
||||||
fname = (
|
fname = (
|
||||||
self.target / element[document_exporter.EXPORTER_ARCHIVE_NAME]
|
self.target / element[document_exporter.EXPORTER_ARCHIVE_NAME]
|
||||||
@@ -436,7 +430,7 @@ class TestExportImport(
|
|||||||
Document.objects.create(
|
Document.objects.create(
|
||||||
checksum="AAAAAAAAAAAAAAAAA",
|
checksum="AAAAAAAAAAAAAAAAA",
|
||||||
title="wow",
|
title="wow",
|
||||||
filename="0000004.pdf",
|
filename="0000010.pdf",
|
||||||
mime_type="application/pdf",
|
mime_type="application/pdf",
|
||||||
)
|
)
|
||||||
self.assertRaises(FileNotFoundError, call_command, "document_exporter", target)
|
self.assertRaises(FileNotFoundError, call_command, "document_exporter", target)
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import datetime
|
|||||||
import json
|
import json
|
||||||
import shutil
|
import shutil
|
||||||
import socket
|
import socket
|
||||||
|
import tempfile
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
@@ -60,6 +61,7 @@ from documents.tests.utils import DirectoriesMixin
|
|||||||
from documents.tests.utils import DummyProgressManager
|
from documents.tests.utils import DummyProgressManager
|
||||||
from documents.tests.utils import FileSystemAssertsMixin
|
from documents.tests.utils import FileSystemAssertsMixin
|
||||||
from documents.tests.utils import SampleDirMixin
|
from documents.tests.utils import SampleDirMixin
|
||||||
|
from documents.workflows.actions import execute_password_removal_action
|
||||||
from paperless_mail.models import MailAccount
|
from paperless_mail.models import MailAccount
|
||||||
from paperless_mail.models import MailRule
|
from paperless_mail.models import MailRule
|
||||||
|
|
||||||
@@ -3613,6 +3615,196 @@ class TestWorkflows(
|
|||||||
|
|
||||||
mock_post.assert_called_once()
|
mock_post.assert_called_once()
|
||||||
|
|
||||||
|
@mock.patch("documents.bulk_edit.remove_password")
|
||||||
|
def test_password_removal_action_attempts_multiple_passwords(
|
||||||
|
self,
|
||||||
|
mock_remove_password,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Workflow password removal action
|
||||||
|
- Multiple passwords provided
|
||||||
|
WHEN:
|
||||||
|
- Document updated triggering the workflow
|
||||||
|
THEN:
|
||||||
|
- Password removal is attempted until one succeeds
|
||||||
|
"""
|
||||||
|
doc = Document.objects.create(
|
||||||
|
title="Protected",
|
||||||
|
checksum="pw-checksum",
|
||||||
|
)
|
||||||
|
trigger = WorkflowTrigger.objects.create(
|
||||||
|
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||||
|
)
|
||||||
|
action = WorkflowAction.objects.create(
|
||||||
|
type=WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL,
|
||||||
|
passwords="wrong, right\n extra ",
|
||||||
|
)
|
||||||
|
workflow = Workflow.objects.create(name="Password workflow")
|
||||||
|
workflow.triggers.add(trigger)
|
||||||
|
workflow.actions.add(action)
|
||||||
|
|
||||||
|
mock_remove_password.side_effect = [
|
||||||
|
ValueError("wrong password"),
|
||||||
|
"OK",
|
||||||
|
]
|
||||||
|
|
||||||
|
run_workflows(trigger.type, doc)
|
||||||
|
|
||||||
|
assert mock_remove_password.call_count == 2
|
||||||
|
mock_remove_password.assert_has_calls(
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
[doc.id],
|
||||||
|
password="wrong",
|
||||||
|
update_document=True,
|
||||||
|
user=doc.owner,
|
||||||
|
),
|
||||||
|
mock.call(
|
||||||
|
[doc.id],
|
||||||
|
password="right",
|
||||||
|
update_document=True,
|
||||||
|
user=doc.owner,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
@mock.patch("documents.bulk_edit.remove_password")
|
||||||
|
def test_password_removal_action_fails_without_correct_password(
|
||||||
|
self,
|
||||||
|
mock_remove_password,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Workflow password removal action
|
||||||
|
- No correct password provided
|
||||||
|
WHEN:
|
||||||
|
- Document updated triggering the workflow
|
||||||
|
THEN:
|
||||||
|
- Password removal is attempted for all passwords and fails
|
||||||
|
"""
|
||||||
|
doc = Document.objects.create(
|
||||||
|
title="Protected",
|
||||||
|
checksum="pw-checksum-2",
|
||||||
|
)
|
||||||
|
trigger = WorkflowTrigger.objects.create(
|
||||||
|
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||||
|
)
|
||||||
|
action = WorkflowAction.objects.create(
|
||||||
|
type=WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL,
|
||||||
|
passwords=" \n , ",
|
||||||
|
)
|
||||||
|
workflow = Workflow.objects.create(name="Password workflow missing passwords")
|
||||||
|
workflow.triggers.add(trigger)
|
||||||
|
workflow.actions.add(action)
|
||||||
|
|
||||||
|
run_workflows(trigger.type, doc)
|
||||||
|
|
||||||
|
mock_remove_password.assert_not_called()
|
||||||
|
|
||||||
|
@mock.patch("documents.bulk_edit.remove_password")
|
||||||
|
def test_password_removal_action_skips_without_passwords(
|
||||||
|
self,
|
||||||
|
mock_remove_password,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Workflow password removal action with no passwords
|
||||||
|
WHEN:
|
||||||
|
- Workflow is run
|
||||||
|
THEN:
|
||||||
|
- Password removal is not attempted
|
||||||
|
"""
|
||||||
|
doc = Document.objects.create(
|
||||||
|
title="Protected",
|
||||||
|
checksum="pw-checksum-2",
|
||||||
|
)
|
||||||
|
trigger = WorkflowTrigger.objects.create(
|
||||||
|
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||||
|
)
|
||||||
|
action = WorkflowAction.objects.create(
|
||||||
|
type=WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL,
|
||||||
|
passwords="",
|
||||||
|
)
|
||||||
|
workflow = Workflow.objects.create(name="Password workflow missing passwords")
|
||||||
|
workflow.triggers.add(trigger)
|
||||||
|
workflow.actions.add(action)
|
||||||
|
|
||||||
|
run_workflows(trigger.type, doc)
|
||||||
|
|
||||||
|
mock_remove_password.assert_not_called()
|
||||||
|
|
||||||
|
@mock.patch("documents.bulk_edit.remove_password")
|
||||||
|
def test_password_removal_consumable_document_deferred(
|
||||||
|
self,
|
||||||
|
mock_remove_password,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Workflow password removal action
|
||||||
|
- Simulated consumption trigger (a ConsumableDocument is used)
|
||||||
|
WHEN:
|
||||||
|
- Document consumption is finished
|
||||||
|
THEN:
|
||||||
|
- Password removal is attempted
|
||||||
|
"""
|
||||||
|
action = WorkflowAction.objects.create(
|
||||||
|
type=WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL,
|
||||||
|
passwords="first, second",
|
||||||
|
)
|
||||||
|
|
||||||
|
temp_dir = Path(tempfile.mkdtemp())
|
||||||
|
original_file = temp_dir / "file.pdf"
|
||||||
|
original_file.write_bytes(b"pdf content")
|
||||||
|
consumable = ConsumableDocument(
|
||||||
|
source=DocumentSource.ApiUpload,
|
||||||
|
original_file=original_file,
|
||||||
|
)
|
||||||
|
|
||||||
|
execute_password_removal_action(action, consumable, logging_group=None)
|
||||||
|
|
||||||
|
mock_remove_password.assert_not_called()
|
||||||
|
|
||||||
|
mock_remove_password.side_effect = [
|
||||||
|
ValueError("bad password"),
|
||||||
|
"OK",
|
||||||
|
]
|
||||||
|
|
||||||
|
doc = Document.objects.create(
|
||||||
|
checksum="pw-checksum-consumed",
|
||||||
|
title="Protected",
|
||||||
|
)
|
||||||
|
|
||||||
|
document_consumption_finished.send(
|
||||||
|
sender=self.__class__,
|
||||||
|
document=doc,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert mock_remove_password.call_count == 2
|
||||||
|
mock_remove_password.assert_has_calls(
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
[doc.id],
|
||||||
|
password="first",
|
||||||
|
update_document=True,
|
||||||
|
user=doc.owner,
|
||||||
|
),
|
||||||
|
mock.call(
|
||||||
|
[doc.id],
|
||||||
|
password="second",
|
||||||
|
update_document=True,
|
||||||
|
user=doc.owner,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
# ensure handler disconnected after first run
|
||||||
|
document_consumption_finished.send(
|
||||||
|
sender=self.__class__,
|
||||||
|
document=doc,
|
||||||
|
)
|
||||||
|
assert mock_remove_password.call_count == 2
|
||||||
|
|
||||||
|
|
||||||
class TestWebhookSend:
|
class TestWebhookSend:
|
||||||
def test_send_webhook_data_or_json(
|
def test_send_webhook_data_or_json(
|
||||||
|
|||||||
@@ -195,7 +195,6 @@ from paperless import version
|
|||||||
from paperless.celery import app as celery_app
|
from paperless.celery import app as celery_app
|
||||||
from paperless.config import AIConfig
|
from paperless.config import AIConfig
|
||||||
from paperless.config import GeneralConfig
|
from paperless.config import GeneralConfig
|
||||||
from paperless.db import GnuPG
|
|
||||||
from paperless.models import ApplicationConfiguration
|
from paperless.models import ApplicationConfiguration
|
||||||
from paperless.serialisers import GroupSerializer
|
from paperless.serialisers import GroupSerializer
|
||||||
from paperless.serialisers import UserSerializer
|
from paperless.serialisers import UserSerializer
|
||||||
@@ -1071,10 +1070,8 @@ class DocumentViewSet(
|
|||||||
doc,
|
doc,
|
||||||
):
|
):
|
||||||
return HttpResponseForbidden("Insufficient permissions")
|
return HttpResponseForbidden("Insufficient permissions")
|
||||||
if doc.storage_type == Document.STORAGE_TYPE_GPG:
|
|
||||||
handle = GnuPG.decrypted(doc.thumbnail_file)
|
handle = doc.thumbnail_file
|
||||||
else:
|
|
||||||
handle = doc.thumbnail_file
|
|
||||||
|
|
||||||
return HttpResponse(handle, content_type="image/webp")
|
return HttpResponse(handle, content_type="image/webp")
|
||||||
except (FileNotFoundError, Document.DoesNotExist):
|
except (FileNotFoundError, Document.DoesNotExist):
|
||||||
@@ -2824,9 +2821,6 @@ def serve_file(*, doc: Document, use_archive: bool, disposition: str):
|
|||||||
if mime_type in {"application/csv", "text/csv"} and disposition == "inline":
|
if mime_type in {"application/csv", "text/csv"} and disposition == "inline":
|
||||||
mime_type = "text/plain"
|
mime_type = "text/plain"
|
||||||
|
|
||||||
if doc.storage_type == Document.STORAGE_TYPE_GPG:
|
|
||||||
file_handle = GnuPG.decrypted(file_handle)
|
|
||||||
|
|
||||||
response = HttpResponse(file_handle, content_type=mime_type)
|
response = HttpResponse(file_handle, content_type=mime_type)
|
||||||
# Firefox is not able to handle unicode characters in filename field
|
# Firefox is not able to handle unicode characters in filename field
|
||||||
# RFC 5987 addresses this issue
|
# RFC 5987 addresses this issue
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import re
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@@ -14,6 +15,7 @@ from documents.models import Document
|
|||||||
from documents.models import DocumentType
|
from documents.models import DocumentType
|
||||||
from documents.models import WorkflowAction
|
from documents.models import WorkflowAction
|
||||||
from documents.models import WorkflowTrigger
|
from documents.models import WorkflowTrigger
|
||||||
|
from documents.signals import document_consumption_finished
|
||||||
from documents.templating.workflows import parse_w_workflow_placeholders
|
from documents.templating.workflows import parse_w_workflow_placeholders
|
||||||
from documents.workflows.webhooks import send_webhook
|
from documents.workflows.webhooks import send_webhook
|
||||||
|
|
||||||
@@ -265,3 +267,74 @@ def execute_webhook_action(
|
|||||||
f"Error occurred sending webhook: {e}",
|
f"Error occurred sending webhook: {e}",
|
||||||
extra={"group": logging_group},
|
extra={"group": logging_group},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def execute_password_removal_action(
|
||||||
|
action: WorkflowAction,
|
||||||
|
document: Document | ConsumableDocument,
|
||||||
|
logging_group,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Try to remove a password from a document using the configured list.
|
||||||
|
"""
|
||||||
|
passwords = action.passwords
|
||||||
|
if not passwords:
|
||||||
|
logger.warning(
|
||||||
|
"Password removal action %s has no passwords configured",
|
||||||
|
action.pk,
|
||||||
|
extra={"group": logging_group},
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
passwords = [
|
||||||
|
password.strip()
|
||||||
|
for password in re.split(r"[,\n]", passwords)
|
||||||
|
if password.strip()
|
||||||
|
]
|
||||||
|
|
||||||
|
if isinstance(document, ConsumableDocument):
|
||||||
|
# hook the consumption-finished signal to attempt password removal later
|
||||||
|
def handler(sender, **kwargs):
|
||||||
|
consumed_document: Document = kwargs.get("document")
|
||||||
|
if consumed_document is not None:
|
||||||
|
execute_password_removal_action(
|
||||||
|
action,
|
||||||
|
consumed_document,
|
||||||
|
logging_group,
|
||||||
|
)
|
||||||
|
document_consumption_finished.disconnect(handler)
|
||||||
|
|
||||||
|
document_consumption_finished.connect(handler, weak=False)
|
||||||
|
return
|
||||||
|
|
||||||
|
# import here to avoid circular dependency
|
||||||
|
from documents.bulk_edit import remove_password
|
||||||
|
|
||||||
|
for password in passwords:
|
||||||
|
try:
|
||||||
|
remove_password(
|
||||||
|
[document.id],
|
||||||
|
password=password,
|
||||||
|
update_document=True,
|
||||||
|
user=document.owner,
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"Removed password from document %s using workflow action %s",
|
||||||
|
document.pk,
|
||||||
|
action.pk,
|
||||||
|
extra={"group": logging_group},
|
||||||
|
)
|
||||||
|
return
|
||||||
|
except ValueError as e:
|
||||||
|
logger.warning(
|
||||||
|
"Password removal failed for document %s with supplied password: %s",
|
||||||
|
document.pk,
|
||||||
|
e,
|
||||||
|
extra={"group": logging_group},
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.error(
|
||||||
|
"Password removal failed for document %s after trying all provided passwords",
|
||||||
|
document.pk,
|
||||||
|
extra={"group": logging_group},
|
||||||
|
)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,17 +0,0 @@
|
|||||||
import gnupg
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
|
|
||||||
class GnuPG:
|
|
||||||
"""
|
|
||||||
A handy singleton to use when handling encrypted files.
|
|
||||||
"""
|
|
||||||
|
|
||||||
gpg = gnupg.GPG(gnupghome=settings.GNUPG_HOME)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def decrypted(cls, file_handle, passphrase=None):
|
|
||||||
if not passphrase:
|
|
||||||
passphrase = settings.PASSPHRASE
|
|
||||||
|
|
||||||
return cls.gpg.decrypt_file(file_handle, passphrase=passphrase).data
|
|
||||||
@@ -1203,19 +1203,6 @@ EMAIL_PARSE_DEFAULT_LAYOUT = __get_int(
|
|||||||
1, # MailRule.PdfLayout.TEXT_HTML but that can't be imported here
|
1, # MailRule.PdfLayout.TEXT_HTML but that can't be imported here
|
||||||
)
|
)
|
||||||
|
|
||||||
# Pre-2.x versions of Paperless stored your documents locally with GPG
|
|
||||||
# encryption, but that is no longer the default. This behaviour is still
|
|
||||||
# available, but it must be explicitly enabled by setting
|
|
||||||
# `PAPERLESS_PASSPHRASE` in your environment or config file. The default is to
|
|
||||||
# store these files unencrypted.
|
|
||||||
#
|
|
||||||
# Translation:
|
|
||||||
# * If you're a new user, you can safely ignore this setting.
|
|
||||||
# * If you're upgrading from 1.x, this must be set, OR you can run
|
|
||||||
# `./manage.py change_storage_type gpg unencrypted` to decrypt your files,
|
|
||||||
# after which you can unset this value.
|
|
||||||
PASSPHRASE = os.getenv("PAPERLESS_PASSPHRASE")
|
|
||||||
|
|
||||||
# Trigger a script after every successful document consumption?
|
# Trigger a script after every successful document consumption?
|
||||||
PRE_CONSUME_SCRIPT = os.getenv("PAPERLESS_PRE_CONSUME_SCRIPT")
|
PRE_CONSUME_SCRIPT = os.getenv("PAPERLESS_PRE_CONSUME_SCRIPT")
|
||||||
POST_CONSUME_SCRIPT = os.getenv("PAPERLESS_POST_CONSUME_SCRIPT")
|
POST_CONSUME_SCRIPT = os.getenv("PAPERLESS_POST_CONSUME_SCRIPT")
|
||||||
|
|||||||
Reference in New Issue
Block a user