Merge branch 'dev' into feature-document-versions-1218

This commit is contained in:
shamoon
2026-02-23 17:07:22 -08:00
36 changed files with 1147 additions and 167 deletions

View File

@@ -0,0 +1,29 @@
# Generated by Django 5.2.11 on 2026-02-14 19:19
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0011_optimize_integer_field_sizes"),
]
operations = [
migrations.AlterField(
model_name="workflowaction",
name="type",
field=models.PositiveSmallIntegerField(
choices=[
(1, "Assignment"),
(2, "Removal"),
(3, "Email"),
(4, "Webhook"),
(5, "Password removal"),
(6, "Move to trash"),
],
default=1,
verbose_name="Workflow Action Type",
),
),
]

View File

@@ -1439,6 +1439,10 @@ class WorkflowAction(models.Model):
5,
_("Password removal"),
)
MOVE_TO_TRASH = (
6,
_("Move to trash"),
)
type = models.PositiveSmallIntegerField(
_("Workflow Action Type"),

View File

@@ -48,6 +48,7 @@ from documents.permissions import get_objects_for_user_owner_aware
from documents.templating.utils import convert_format_str_to_template_format
from documents.workflows.actions import build_workflow_action_context
from documents.workflows.actions import execute_email_action
from documents.workflows.actions import execute_move_to_trash_action
from documents.workflows.actions import execute_password_removal_action
from documents.workflows.actions import execute_webhook_action
from documents.workflows.mutations import apply_assignment_to_document
@@ -58,6 +59,8 @@ from documents.workflows.utils import get_workflows_for_trigger
from paperless.config import AIConfig
if TYPE_CHECKING:
import uuid
from documents.classifier import DocumentClassifier
from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides
@@ -733,7 +736,7 @@ def add_to_index(sender, document, **kwargs) -> None:
def run_workflows_added(
sender,
document: Document,
logging_group=None,
logging_group: uuid.UUID | None = None,
original_file=None,
**kwargs,
) -> None:
@@ -749,7 +752,7 @@ def run_workflows_added(
def run_workflows_updated(
sender,
document: Document,
logging_group=None,
logging_group: uuid.UUID | None = None,
**kwargs,
) -> None:
run_workflows(
@@ -763,7 +766,7 @@ def run_workflows(
trigger_type: WorkflowTrigger.WorkflowTriggerType,
document: Document | ConsumableDocument,
workflow_to_run: Workflow | None = None,
logging_group=None,
logging_group: uuid.UUID | None = None,
overrides: DocumentMetadataOverrides | None = None,
original_file: Path | None = None,
) -> tuple[DocumentMetadataOverrides, str] | None:
@@ -789,14 +792,33 @@ def run_workflows(
for workflow in workflows:
if not use_overrides:
# This can be called from bulk_update_documents, which may be running multiple times
# Refresh this so the matching data is fresh and instance fields are re-freshed
# Otherwise, this instance might be behind and overwrite the work another process did
document.refresh_from_db()
doc_tag_ids = list(document.tags.values_list("pk", flat=True))
if TYPE_CHECKING:
assert isinstance(document, Document)
try:
# This can be called from bulk_update_documents, which may be running multiple times
# Refresh this so the matching data is fresh and instance fields are re-freshed
# Otherwise, this instance might be behind and overwrite the work another process did
document.refresh_from_db()
doc_tag_ids = list(document.tags.values_list("pk", flat=True))
except Document.DoesNotExist:
# Document was hard deleted by a previous workflow or another process
logger.info(
"Document no longer exists, skipping remaining workflows",
extra={"group": logging_group},
)
break
# Check if document was soft deleted (moved to trash)
if document.is_deleted:
logger.info(
"Document was moved to trash, skipping remaining workflows",
extra={"group": logging_group},
)
break
if matching.document_matches_workflow(document, workflow, trigger_type):
action: WorkflowAction
has_move_to_trash_action = False
for action in workflow.actions.order_by("order", "pk"):
message = f"Applying {action} from {workflow}"
if not use_overrides:
@@ -840,6 +862,8 @@ def run_workflows(
)
elif action.type == WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL:
execute_password_removal_action(action, document, logging_group)
elif action.type == WorkflowAction.WorkflowActionType.MOVE_TO_TRASH:
has_move_to_trash_action = True
if not use_overrides:
# limit title to 128 characters
@@ -854,7 +878,12 @@ def run_workflows(
document=document if not use_overrides else None,
)
if has_move_to_trash_action:
execute_move_to_trash_action(action, document, logging_group)
if use_overrides:
if TYPE_CHECKING:
assert overrides is not None
return overrides, "\n".join(messages)

View File

@@ -896,3 +896,210 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
"Passwords are required",
str(response.data["non_field_errors"][0]),
)
def test_trash_action_validation(self) -> None:
"""
GIVEN:
- API request to create a workflow with a trash action
WHEN:
- API is called
THEN:
- Correct HTTP response
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Workflow 2",
"order": 1,
"triggers": [
{
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
"sources": [DocumentSource.ApiUpload],
"filter_filename": "*",
},
],
"actions": [
{
"type": WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
},
],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Workflow 3",
"order": 2,
"triggers": [
{
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
"sources": [DocumentSource.ApiUpload],
"filter_filename": "*",
},
],
"actions": [
{
"type": WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
},
],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_trash_action_as_last_action_valid(self) -> None:
"""
GIVEN:
- API request to create a workflow with multiple actions
- Move to trash action is the last action
WHEN:
- API is called
THEN:
- Workflow is created successfully
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Workflow with Move to Trash Last",
"order": 1,
"triggers": [
{
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
"sources": [DocumentSource.ApiUpload],
"filter_filename": "*",
},
],
"actions": [
{
"type": WorkflowAction.WorkflowActionType.ASSIGNMENT,
"assign_title": "Assigned Title",
},
{
"type": WorkflowAction.WorkflowActionType.REMOVAL,
"remove_all_tags": True,
},
{
"type": WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
},
],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_update_workflow_add_trash_at_end_valid(self) -> None:
"""
GIVEN:
- Existing workflow without trash action
WHEN:
- PATCH to add trash action at end
THEN:
- HTTP 200 success
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Workflow to Add Move to Trash",
"order": 1,
"triggers": [
{
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
"sources": [DocumentSource.ApiUpload],
"filter_filename": "*",
},
],
"actions": [
{
"type": WorkflowAction.WorkflowActionType.ASSIGNMENT,
"assign_title": "First Action",
},
],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
workflow_id = response.data["id"]
response = self.client.patch(
f"{self.ENDPOINT}{workflow_id}/",
json.dumps(
{
"actions": [
{
"type": WorkflowAction.WorkflowActionType.ASSIGNMENT,
"assign_title": "First Action",
},
{
"type": WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
},
],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_update_workflow_remove_trash_action_valid(self) -> None:
"""
GIVEN:
- Existing workflow with trash action
WHEN:
- PATCH to remove trash action
THEN:
- HTTP 200 success
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Workflow to Remove move to trash",
"order": 1,
"triggers": [
{
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
"sources": [DocumentSource.ApiUpload],
"filter_filename": "*",
},
],
"actions": [
{
"type": WorkflowAction.WorkflowActionType.ASSIGNMENT,
"assign_title": "First Action",
},
{
"type": WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
},
],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
workflow_id = response.data["id"]
response = self.client.patch(
f"{self.ENDPOINT}{workflow_id}/",
json.dumps(
{
"actions": [
{
"type": WorkflowAction.WorkflowActionType.ASSIGNMENT,
"assign_title": "Only Action",
},
],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)

View File

@@ -3,9 +3,11 @@ import json
import shutil
import socket
import tempfile
from collections.abc import Callable
from datetime import timedelta
from pathlib import Path
from typing import TYPE_CHECKING
from typing import Any
from unittest import mock
import pytest
@@ -55,6 +57,7 @@ from documents.models import WorkflowActionEmail
from documents.models import WorkflowActionWebhook
from documents.models import WorkflowRun
from documents.models import WorkflowTrigger
from documents.plugins.base import StopConsumeTaskError
from documents.serialisers import WorkflowTriggerSerializer
from documents.signals import document_consumption_finished
from documents.tests.utils import DirectoriesMixin
@@ -3914,6 +3917,427 @@ class TestWorkflows(
)
assert mock_remove_password.call_count == 2
def test_workflow_trash_action_soft_delete(self):
"""
GIVEN:
- Document updated workflow with delete action
WHEN:
- Document that matches is updated
THEN:
- Document is moved to trash (soft deleted)
"""
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
)
action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
)
w = Workflow.objects.create(
name="Workflow 1",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
)
self.assertEqual(Document.objects.count(), 1)
self.assertEqual(Document.deleted_objects.count(), 0)
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
self.assertEqual(Document.objects.count(), 0)
self.assertEqual(Document.deleted_objects.count(), 1)
@override_settings(
PAPERLESS_EMAIL_HOST="localhost",
EMAIL_ENABLED=True,
PAPERLESS_URL="http://localhost:8000",
)
@mock.patch("django.core.mail.message.EmailMessage.send")
def test_workflow_trash_with_email_action(self, mock_email_send):
"""
GIVEN:
- Workflow with email action, then move to trash action
WHEN:
- Document matches and workflow runs
THEN:
- Email is sent first
- Document is moved to trash (soft deleted)
"""
mock_email_send.return_value = 1
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
)
email_action = WorkflowActionEmail.objects.create(
subject="Document deleted: {doc_title}",
body="Document {doc_title} will be deleted",
to="user@example.com",
include_document=False,
)
email_workflow_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.EMAIL,
email=email_action,
)
trash_workflow_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
)
w = Workflow.objects.create(
name="Workflow with email then move to trash",
order=0,
)
w.triggers.add(trigger)
w.actions.add(email_workflow_action, trash_workflow_action)
w.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
)
self.assertEqual(Document.objects.count(), 1)
self.assertEqual(Document.deleted_objects.count(), 0)
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
mock_email_send.assert_called_once()
self.assertEqual(Document.objects.count(), 0)
self.assertEqual(Document.deleted_objects.count(), 1)
@override_settings(
PAPERLESS_URL="http://localhost:8000",
)
@mock.patch("documents.workflows.webhooks.send_webhook.delay")
def test_workflow_trash_with_webhook_action(self, mock_webhook_delay):
"""
GIVEN:
- Workflow with webhook action (include_document=True), then move to trash action
WHEN:
- Document matches and workflow runs
THEN:
- Webhook .delay() is called with complete data including file bytes
- Document is moved to trash (soft deleted)
- Webhook task has all necessary data and doesn't rely on document existence
"""
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
)
webhook_action = WorkflowActionWebhook.objects.create(
use_params=True,
params={
"title": "{{doc_title}}",
"message": "Document being deleted",
},
url="https://paperless-ngx.com/webhook",
include_document=True,
)
webhook_workflow_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.WEBHOOK,
webhook=webhook_action,
)
trash_workflow_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
)
w = Workflow.objects.create(
name="Workflow with webhook then move to trash",
order=0,
)
w.triggers.add(trigger)
w.actions.add(webhook_workflow_action, trash_workflow_action)
w.save()
test_file = shutil.copy(
self.SAMPLE_DIR / "simple.pdf",
self.dirs.scratch_dir / "simple.pdf",
)
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="simple.pdf",
filename=test_file,
mime_type="application/pdf",
)
self.assertEqual(Document.objects.count(), 1)
self.assertEqual(Document.deleted_objects.count(), 0)
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
mock_webhook_delay.assert_called_once()
call_kwargs = mock_webhook_delay.call_args[1]
self.assertEqual(call_kwargs["url"], "https://paperless-ngx.com/webhook")
self.assertEqual(
call_kwargs["data"],
{"title": "sample test", "message": "Document being deleted"},
)
self.assertIsNotNone(call_kwargs["files"])
self.assertIn("file", call_kwargs["files"])
self.assertEqual(call_kwargs["files"]["file"][0], "simple.pdf")
self.assertEqual(call_kwargs["files"]["file"][2], "application/pdf")
self.assertIsInstance(call_kwargs["files"]["file"][1], bytes)
self.assertEqual(Document.objects.count(), 0)
self.assertEqual(Document.deleted_objects.count(), 1)
@override_settings(
PAPERLESS_EMAIL_HOST="localhost",
EMAIL_ENABLED=True,
PAPERLESS_URL="http://localhost:8000",
)
@mock.patch("django.core.mail.message.EmailMessage.send")
def test_workflow_trash_after_email_failure(self, mock_email_send) -> None:
"""
GIVEN:
- Workflow with email action (that fails), then move to trash action
WHEN:
- Document matches and workflow runs
- Email action raises exception
THEN:
- Email failure is logged
- Move to Trash still executes successfully (soft delete)
"""
mock_email_send.side_effect = Exception("Email server error")
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
)
email_action = WorkflowActionEmail.objects.create(
subject="Document deleted: {doc_title}",
body="Document {doc_title} will be deleted",
to="user@example.com",
include_document=False,
)
email_workflow_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.EMAIL,
email=email_action,
)
trash_workflow_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
)
w = Workflow.objects.create(
name="Workflow with failing email then move to trash",
order=0,
)
w.triggers.add(trigger)
w.actions.add(email_workflow_action, trash_workflow_action)
w.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
)
self.assertEqual(Document.objects.count(), 1)
self.assertEqual(Document.deleted_objects.count(), 0)
with self.assertLogs("paperless.workflows.actions", level="ERROR") as cm:
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
expected_str = "Error occurred sending notification email"
self.assertIn(expected_str, cm.output[0])
self.assertEqual(Document.objects.count(), 0)
self.assertEqual(Document.deleted_objects.count(), 1)
def test_multiple_workflows_trash_then_assignment(self):
"""
GIVEN:
- Workflow 1 (order=0) with move to trash action
- Workflow 2 (order=1) with assignment action
- Both workflows match the same document
WHEN:
- Workflows run sequentially
THEN:
- First workflow runs and deletes document (soft delete)
- Second workflow does not trigger (document no longer exists)
- Logs confirm move to trash and skipping of remaining workflows
"""
trigger1 = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
)
trash_workflow_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
)
w1 = Workflow.objects.create(
name="Workflow 1 - Move to Trash",
order=0,
)
w1.triggers.add(trigger1)
w1.actions.add(trash_workflow_action)
w1.save()
trigger2 = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
)
assignment_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.ASSIGNMENT,
assign_correspondent=self.c2,
)
w2 = Workflow.objects.create(
name="Workflow 2 - Assignment",
order=1,
)
w2.triggers.add(trigger2)
w2.actions.add(assignment_action)
w2.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
)
self.assertEqual(Document.objects.count(), 1)
self.assertEqual(Document.deleted_objects.count(), 0)
with self.assertLogs("paperless", level="DEBUG") as cm:
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
self.assertEqual(Document.objects.count(), 0)
self.assertEqual(Document.deleted_objects.count(), 1)
# We check logs instead of WorkflowRun.objects.count() because when the document
# is soft-deleted, the WorkflowRun is cascade-deleted (hard delete) since it does
# not inherit from the SoftDeleteModel. The logs confirm that the first workflow
# executed the move to trash and remaining workflows were skipped.
log_output = "\n".join(cm.output)
self.assertIn("Moved document", log_output)
self.assertIn("to trash", log_output)
self.assertIn(
"Document was moved to trash, skipping remaining workflows",
log_output,
)
def test_workflow_delete_action_during_consumption(self):
"""
GIVEN:
- Workflow with consumption trigger and delete action
WHEN:
- Document is being consumed and workflow runs
THEN:
- StopConsumeTaskError is raised to halt consumption
- Original file is deleted
- No document is created
"""
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
sources=f"{DocumentSource.ConsumeFolder}",
filter_filename="*",
)
action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
)
w = Workflow.objects.create(
name="Workflow Delete During Consumption",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
# Create a test file to be consumed
test_file = shutil.copy(
self.SAMPLE_DIR / "simple.pdf",
self.dirs.scratch_dir / "simple.pdf",
)
test_file_path = Path(test_file)
self.assertTrue(test_file_path.exists())
# Create a ConsumableDocument
consumable_doc = ConsumableDocument(
source=DocumentSource.ConsumeFolder,
original_file=test_file_path,
)
self.assertEqual(Document.objects.count(), 0)
# Run workflows with overrides (consumption flow)
with self.assertRaises(StopConsumeTaskError) as context:
run_workflows(
WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
consumable_doc,
overrides=DocumentMetadataOverrides(),
)
self.assertIn("deleted by workflow action", str(context.exception))
# File should be deleted
self.assertFalse(test_file_path.exists())
# No document should be created
self.assertEqual(Document.objects.count(), 0)
def test_workflow_delete_action_during_consumption_with_assignment(self):
"""
GIVEN:
- Workflow with consumption trigger, assignment action, then delete action
WHEN:
- Document is being consumed and workflow runs
THEN:
- StopConsumeTaskError is raised to halt consumption
- Original file is deleted
- No document is created (even though assignment would have worked)
"""
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
sources=f"{DocumentSource.ConsumeFolder}",
filter_filename="*",
)
assignment_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.ASSIGNMENT,
assign_title="This should not be applied",
assign_correspondent=self.c,
)
trash_workflow_action = WorkflowAction.objects.create(
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
)
w = Workflow.objects.create(
name="Workflow Assignment then Delete During Consumption",
order=0,
)
w.triggers.add(trigger)
w.actions.add(assignment_action, trash_workflow_action)
w.save()
# Create a test file to be consumed
test_file = shutil.copy(
self.SAMPLE_DIR / "simple.pdf",
self.dirs.scratch_dir / "simple2.pdf",
)
test_file_path = Path(test_file)
self.assertTrue(test_file_path.exists())
# Create a ConsumableDocument
consumable_doc = ConsumableDocument(
source=DocumentSource.ConsumeFolder,
original_file=test_file_path,
)
self.assertEqual(Document.objects.count(), 0)
# Run workflows with overrides (consumption flow)
with self.assertRaises(StopConsumeTaskError):
run_workflows(
WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
consumable_doc,
overrides=DocumentMetadataOverrides(),
)
# File should be deleted
self.assertFalse(test_file_path.exists())
# No document should be created
self.assertEqual(Document.objects.count(), 0)
class TestWebhookSend:
def test_send_webhook_data_or_json(
@@ -3956,13 +4380,17 @@ class TestWebhookSend:
@pytest.fixture
def resolve_to(monkeypatch):
def resolve_to(monkeypatch: pytest.MonkeyPatch) -> Callable[[str], None]:
"""
Force DNS resolution to a specific IP for any hostname.
"""
def _set(ip: str):
def fake_getaddrinfo(host, *_args, **_kwargs):
def _set(ip: str) -> None:
def fake_getaddrinfo(
host: str,
*_args: object,
**_kwargs: object,
) -> list[tuple[Any, ...]]:
return [(socket.AF_INET, None, None, "", (ip, 0))]
monkeypatch.setattr(socket, "getaddrinfo", fake_getaddrinfo)
@@ -4103,7 +4531,7 @@ class TestWebhookSecurity:
def test_strips_user_supplied_host_header(
self,
httpx_mock: HTTPXMock,
resolve_to,
resolve_to: Callable[[str], None],
) -> None:
"""
GIVEN:
@@ -4169,7 +4597,7 @@ class TestDateWorkflowLocalization(
self,
title_template: str,
expected_title: str,
):
) -> None:
"""
GIVEN:
- Document added workflow with title template using localize_date filter
@@ -4234,7 +4662,7 @@ class TestDateWorkflowLocalization(
self,
title_template: str,
expected_title: str,
):
) -> None:
"""
GIVEN:
- Document updated workflow with title template using localize_date filter
@@ -4310,7 +4738,7 @@ class TestDateWorkflowLocalization(
settings: SettingsWrapper,
title_template: str,
expected_title: str,
):
) -> None:
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
sources=f"{DocumentSource.ApiUpload}",

View File

@@ -1,5 +1,6 @@
import logging
import re
import uuid
from pathlib import Path
from django.conf import settings
@@ -15,6 +16,7 @@ from documents.models import Document
from documents.models import DocumentType
from documents.models import WorkflowAction
from documents.models import WorkflowTrigger
from documents.plugins.base import StopConsumeTaskError
from documents.signals import document_consumption_finished
from documents.templating.workflows import parse_w_workflow_placeholders
from documents.workflows.webhooks import send_webhook
@@ -338,3 +340,33 @@ def execute_password_removal_action(
document.pk,
extra={"group": logging_group},
)
def execute_move_to_trash_action(
action: WorkflowAction,
document: Document | ConsumableDocument,
logging_group: uuid.UUID | None,
) -> None:
"""
Execute a move to trash action for a workflow on an existing document or a
document in consumption. In case of an existing document it soft-deletes
the document. In case of consumption it aborts consumption and deletes the
file.
"""
if isinstance(document, Document):
document.delete()
logger.debug(
f"Moved document {document} to trash",
extra={"group": logging_group},
)
else:
if document.original_file.exists():
document.original_file.unlink()
logger.info(
f"Workflow move to trash action triggered during consumption, "
f"deleting file {document.original_file}",
extra={"group": logging_group},
)
raise StopConsumeTaskError(
"Document deleted by workflow action during consumption",
)

View File

@@ -2,7 +2,7 @@ msgid ""
msgstr ""
"Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2026-02-16 17:32+0000\n"
"POT-Creation-Date: 2026-02-24 00:43+0000\n"
"PO-Revision-Date: 2022-02-17 04:17\n"
"Last-Translator: \n"
"Language-Team: English\n"
@@ -89,7 +89,7 @@ msgstr ""
msgid "Automatic"
msgstr ""
#: documents/models.py:66 documents/models.py:444 documents/models.py:1659
#: documents/models.py:66 documents/models.py:444 documents/models.py:1663
#: paperless_mail/models.py:23 paperless_mail/models.py:143
msgid "name"
msgstr ""
@@ -252,7 +252,7 @@ msgid "The position of this document in your physical document archive."
msgstr ""
#: documents/models.py:313 documents/models.py:688 documents/models.py:742
#: documents/models.py:1702
#: documents/models.py:1706
msgid "document"
msgstr ""
@@ -1093,193 +1093,197 @@ msgid "Password removal"
msgstr ""
#: documents/models.py:1414
msgid "Move to trash"
msgstr ""
#: documents/models.py:1418
msgid "Workflow Action Type"
msgstr ""
#: documents/models.py:1419 documents/models.py:1661
#: documents/models.py:1423 documents/models.py:1665
#: paperless_mail/models.py:145
msgid "order"
msgstr ""
#: documents/models.py:1422
#: documents/models.py:1426
msgid "assign title"
msgstr ""
#: documents/models.py:1426
#: documents/models.py:1430
msgid "Assign a document title, must be a Jinja2 template, see documentation."
msgstr ""
#: documents/models.py:1434 paperless_mail/models.py:274
#: documents/models.py:1438 paperless_mail/models.py:274
msgid "assign this tag"
msgstr ""
#: documents/models.py:1443 paperless_mail/models.py:282
#: documents/models.py:1447 paperless_mail/models.py:282
msgid "assign this document type"
msgstr ""
#: documents/models.py:1452 paperless_mail/models.py:296
#: documents/models.py:1456 paperless_mail/models.py:296
msgid "assign this correspondent"
msgstr ""
#: documents/models.py:1461
#: documents/models.py:1465
msgid "assign this storage path"
msgstr ""
#: documents/models.py:1470
#: documents/models.py:1474
msgid "assign this owner"
msgstr ""
#: documents/models.py:1477
#: documents/models.py:1481
msgid "grant view permissions to these users"
msgstr ""
#: documents/models.py:1484
#: documents/models.py:1488
msgid "grant view permissions to these groups"
msgstr ""
#: documents/models.py:1491
#: documents/models.py:1495
msgid "grant change permissions to these users"
msgstr ""
#: documents/models.py:1498
#: documents/models.py:1502
msgid "grant change permissions to these groups"
msgstr ""
#: documents/models.py:1505
#: documents/models.py:1509
msgid "assign these custom fields"
msgstr ""
#: documents/models.py:1509
#: documents/models.py:1513
msgid "custom field values"
msgstr ""
#: documents/models.py:1513
#: documents/models.py:1517
msgid "Optional values to assign to the custom fields."
msgstr ""
#: documents/models.py:1522
#: documents/models.py:1526
msgid "remove these tag(s)"
msgstr ""
#: documents/models.py:1527
#: documents/models.py:1531
msgid "remove all tags"
msgstr ""
#: documents/models.py:1534
#: documents/models.py:1538
msgid "remove these document type(s)"
msgstr ""
#: documents/models.py:1539
#: documents/models.py:1543
msgid "remove all document types"
msgstr ""
#: documents/models.py:1546
#: documents/models.py:1550
msgid "remove these correspondent(s)"
msgstr ""
#: documents/models.py:1551
#: documents/models.py:1555
msgid "remove all correspondents"
msgstr ""
#: documents/models.py:1558
#: documents/models.py:1562
msgid "remove these storage path(s)"
msgstr ""
#: documents/models.py:1563
#: documents/models.py:1567
msgid "remove all storage paths"
msgstr ""
#: documents/models.py:1570
#: documents/models.py:1574
msgid "remove these owner(s)"
msgstr ""
#: documents/models.py:1575
#: documents/models.py:1579
msgid "remove all owners"
msgstr ""
#: documents/models.py:1582
#: documents/models.py:1586
msgid "remove view permissions for these users"
msgstr ""
#: documents/models.py:1589
#: documents/models.py:1593
msgid "remove view permissions for these groups"
msgstr ""
#: documents/models.py:1596
#: documents/models.py:1600
msgid "remove change permissions for these users"
msgstr ""
#: documents/models.py:1603
#: documents/models.py:1607
msgid "remove change permissions for these groups"
msgstr ""
#: documents/models.py:1608
#: documents/models.py:1612
msgid "remove all permissions"
msgstr ""
#: documents/models.py:1615
#: documents/models.py:1619
msgid "remove these custom fields"
msgstr ""
#: documents/models.py:1620
#: documents/models.py:1624
msgid "remove all custom fields"
msgstr ""
#: documents/models.py:1629
#: documents/models.py:1633
msgid "email"
msgstr ""
#: documents/models.py:1638
#: documents/models.py:1642
msgid "webhook"
msgstr ""
#: documents/models.py:1642
#: documents/models.py:1646
msgid "passwords"
msgstr ""
#: documents/models.py:1646
#: documents/models.py:1650
msgid ""
"Passwords to try when removing PDF protection. Separate with commas or new "
"lines."
msgstr ""
#: documents/models.py:1651
#: documents/models.py:1655
msgid "workflow action"
msgstr ""
#: documents/models.py:1652
#: documents/models.py:1656
msgid "workflow actions"
msgstr ""
#: documents/models.py:1667
#: documents/models.py:1671
msgid "triggers"
msgstr ""
#: documents/models.py:1674
#: documents/models.py:1678
msgid "actions"
msgstr ""
#: documents/models.py:1677 paperless_mail/models.py:154
#: documents/models.py:1681 paperless_mail/models.py:154
msgid "enabled"
msgstr ""
#: documents/models.py:1688
#: documents/models.py:1692
msgid "workflow"
msgstr ""
#: documents/models.py:1692
#: documents/models.py:1696
msgid "workflow trigger type"
msgstr ""
#: documents/models.py:1706
#: documents/models.py:1710
msgid "date run"
msgstr ""
#: documents/models.py:1712
#: documents/models.py:1716
msgid "workflow run"
msgstr ""
#: documents/models.py:1713
#: documents/models.py:1717
msgid "workflow runs"
msgstr ""

View File

@@ -1,6 +1,6 @@
from typing import Final
__version__: Final[tuple[int, int, int]] = (2, 20, 7)
__version__: Final[tuple[int, int, int]] = (2, 20, 8)
# Version string like X.Y.Z
__full_version_str__: Final[str] = ".".join(map(str, __version__))
# Version string like X.Y

View File

@@ -23,6 +23,7 @@ def get_embedding_model() -> BaseEmbedding:
return OpenAIEmbedding(
model=config.llm_embedding_model or "text-embedding-3-small",
api_key=config.llm_api_key,
api_base=config.llm_endpoint or None,
)
case LLMEmbeddingBackend.HUGGINGFACE:
return HuggingFaceEmbedding(

View File

@@ -65,12 +65,14 @@ def test_get_embedding_model_openai(mock_ai_config):
mock_ai_config.return_value.llm_embedding_backend = LLMEmbeddingBackend.OPENAI
mock_ai_config.return_value.llm_embedding_model = "text-embedding-3-small"
mock_ai_config.return_value.llm_api_key = "test_api_key"
mock_ai_config.return_value.llm_endpoint = "http://test-url"
with patch("paperless_ai.embedding.OpenAIEmbedding") as MockOpenAIEmbedding:
model = get_embedding_model()
MockOpenAIEmbedding.assert_called_once_with(
model="text-embedding-3-small",
api_key="test_api_key",
api_base="http://test-url",
)
assert model == MockOpenAIEmbedding.return_value

View File

@@ -15,7 +15,7 @@ class Migration(migrations.Migration):
name="stop_processing",
field=models.BooleanField(
default=False,
help_text="If True, no further rules will be processed after this one if any document is consumed.",
help_text="If True, no further rules will be processed after this one if any document is queued.",
verbose_name="Stop processing further rules",
),
),

View File

@@ -272,6 +272,24 @@ class TestAPIMailAccounts(DirectoriesMixin, APITestCase):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["success"], True)
def test_mail_account_test_existing_nonexistent_id_forbidden(self) -> None:
response = self.client.post(
f"{self.ENDPOINT}test/",
json.dumps(
{
"id": 999999,
"imap_server": "server.example.com",
"imap_port": 443,
"imap_security": MailAccount.ImapSecurity.SSL,
"username": "admin",
"password": "******",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.content.decode(), "Insufficient permissions")
def test_get_mail_accounts_owner_aware(self) -> None:
"""
GIVEN:

View File

@@ -8,6 +8,7 @@ from datetime import timedelta
from unittest import mock
import pytest
from django.contrib.auth.models import Permission
from django.contrib.auth.models import User
from django.core.management import call_command
from django.db import DatabaseError
@@ -1734,6 +1735,10 @@ class TestMailAccountTestView(APITestCase):
username="testuser",
password="testpassword",
)
self.user.user_permissions.add(
*Permission.objects.filter(codename__in=["add_mailaccount"]),
)
self.user.save()
self.client.force_authenticate(user=self.user)
self.url = "/api/mail_accounts/test/"
@@ -1850,6 +1855,56 @@ class TestMailAccountTestView(APITestCase):
expected_str = "Unable to refresh oauth token"
self.assertIn(expected_str, error_str)
def test_mail_account_test_view_existing_forbidden_for_other_owner(self) -> None:
other_user = User.objects.create_user(
username="otheruser",
password="testpassword",
)
existing_account = MailAccount.objects.create(
name="Owned account",
imap_server="imap.example.com",
imap_port=993,
imap_security=MailAccount.ImapSecurity.SSL,
username="admin",
password="secret",
owner=other_user,
)
data = {
"id": existing_account.id,
"imap_server": "imap.example.com",
"imap_port": 993,
"imap_security": MailAccount.ImapSecurity.SSL,
"username": "admin",
"password": "****",
"is_token": False,
}
response = self.client.post(self.url, data, format="json")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.content.decode(), "Insufficient permissions")
def test_mail_account_test_view_requires_add_permission_without_account_id(
self,
) -> None:
self.user.user_permissions.remove(
*Permission.objects.filter(codename__in=["add_mailaccount"]),
)
self.user.save()
data = {
"imap_server": "imap.example.com",
"imap_port": 993,
"imap_security": MailAccount.ImapSecurity.SSL,
"username": "admin",
"password": "secret",
"is_token": False,
}
response = self.client.post(self.url, data, format="json")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.content.decode(), "Insufficient permissions")
class TestMailAccountProcess(APITestCase):
def setUp(self) -> None:

View File

@@ -86,13 +86,34 @@ class MailAccountViewSet(ModelViewSet, PassUserMixin):
request.data["name"] = datetime.datetime.now().isoformat()
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
existing_account = None
account_id = request.data.get("id")
# account exists, use the password from there instead of *** and refresh_token / expiration
# testing a new connection requires add permission
if account_id is None and not request.user.has_perms(
["paperless_mail.add_mailaccount"],
):
return HttpResponseForbidden("Insufficient permissions")
# testing an existing account requires change permission on that account
if account_id is not None:
try:
existing_account = MailAccount.objects.get(pk=account_id)
except (TypeError, ValueError, MailAccount.DoesNotExist):
return HttpResponseForbidden("Insufficient permissions")
if not has_perms_owner_aware(
request.user,
"change_mailaccount",
existing_account,
):
return HttpResponseForbidden("Insufficient permissions")
# account exists, use the password from there instead of ***
if (
len(serializer.validated_data.get("password").replace("*", "")) == 0
and request.data["id"] is not None
and existing_account is not None
):
existing_account = MailAccount.objects.get(pk=request.data["id"])
serializer.validated_data["password"] = existing_account.password
serializer.validated_data["account_type"] = existing_account.account_type
serializer.validated_data["refresh_token"] = existing_account.refresh_token
@@ -106,7 +127,8 @@ class MailAccountViewSet(ModelViewSet, PassUserMixin):
) as M:
try:
if (
account.is_token
existing_account is not None
and account.is_token
and account.expiration is not None
and account.expiration < timezone.now()
):
@@ -248,6 +270,7 @@ class OauthCallbackView(GenericAPIView):
imap_server=imap_server,
refresh_token=refresh_token,
expiration=timezone.now() + timedelta(seconds=expires_in),
owner=request.user,
defaults=defaults,
)
return HttpResponseRedirect(