mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-12-29 13:48:09 -06:00
Merge branch 'dev' into dev
This commit is contained in:
@@ -14,7 +14,7 @@ def settings(request):
|
||||
app_logo = (
|
||||
django_settings.APP_LOGO
|
||||
if general_config.app_logo is None or len(general_config.app_logo) == 0
|
||||
else general_config.app_logo
|
||||
else django_settings.BASE_URL + general_config.app_logo.lstrip("/")
|
||||
)
|
||||
|
||||
return {
|
||||
|
||||
@@ -176,9 +176,9 @@ class CustomFieldsFilter(Filter):
|
||||
if fields_with_matching_selects.count() > 0:
|
||||
for field in fields_with_matching_selects:
|
||||
options = field.extra_data.get("select_options", [])
|
||||
for index, option in enumerate(options):
|
||||
if option.lower().find(value.lower()) != -1:
|
||||
option_ids.extend([index])
|
||||
for _, option in enumerate(options):
|
||||
if option.get("label").lower().find(value.lower()) != -1:
|
||||
option_ids.extend([option.get("id")])
|
||||
return (
|
||||
qs.filter(custom_fields__field__name__icontains=value)
|
||||
| qs.filter(custom_fields__value_text__icontains=value)
|
||||
@@ -195,19 +195,21 @@ class CustomFieldsFilter(Filter):
|
||||
return qs
|
||||
|
||||
|
||||
class SelectField(serializers.IntegerField):
|
||||
class SelectField(serializers.CharField):
|
||||
def __init__(self, custom_field: CustomField):
|
||||
self._options = custom_field.extra_data["select_options"]
|
||||
super().__init__(min_value=0, max_value=len(self._options))
|
||||
super().__init__(max_length=16)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
if not isinstance(data, int):
|
||||
# If the supplied value is not an integer,
|
||||
# we will try to map it to an option index.
|
||||
try:
|
||||
data = self._options.index(data)
|
||||
except ValueError:
|
||||
pass
|
||||
# If the supplied value is the option label instead of the ID
|
||||
try:
|
||||
data = next(
|
||||
option.get("id")
|
||||
for option in self._options
|
||||
if option.get("label") == data
|
||||
)
|
||||
except StopIteration:
|
||||
pass
|
||||
return super().to_internal_value(data)
|
||||
|
||||
|
||||
|
||||
@@ -317,10 +317,8 @@ class Command(BaseCommand):
|
||||
|
||||
# Check the files against the timeout
|
||||
still_waiting = {}
|
||||
for filepath in notified_files:
|
||||
# Time of the last inotify event for this file
|
||||
last_event_time = notified_files[filepath]
|
||||
|
||||
# last_event_time is time of the last inotify event for this file
|
||||
for filepath, last_event_time in notified_files.items():
|
||||
# Current time - last time over the configured timeout
|
||||
waited_long_enough = (
|
||||
monotonic() - last_event_time
|
||||
|
||||
@@ -294,9 +294,9 @@ class Command(CryptMixin, BaseCommand):
|
||||
manifest_dict = {}
|
||||
|
||||
# Build an overall manifest
|
||||
for key in manifest_key_to_object_query:
|
||||
for key, object_query in manifest_key_to_object_query.items():
|
||||
manifest_dict[key] = json.loads(
|
||||
serializers.serialize("json", manifest_key_to_object_query[key]),
|
||||
serializers.serialize("json", object_query),
|
||||
)
|
||||
|
||||
self.encrypt_secret_fields(manifest_dict)
|
||||
@@ -370,8 +370,8 @@ class Command(CryptMixin, BaseCommand):
|
||||
|
||||
# 4.1 write primary manifest to target folder
|
||||
manifest = []
|
||||
for key in manifest_dict:
|
||||
manifest.extend(manifest_dict[key])
|
||||
for key, item in manifest_dict.items():
|
||||
manifest.extend(item)
|
||||
manifest_path = (self.target / "manifest.json").resolve()
|
||||
self.check_and_write_json(
|
||||
manifest,
|
||||
|
||||
@@ -34,7 +34,7 @@ from documents.settings import EXPORTER_ARCHIVE_NAME
|
||||
from documents.settings import EXPORTER_CRYPTO_SETTINGS_NAME
|
||||
from documents.settings import EXPORTER_FILE_NAME
|
||||
from documents.settings import EXPORTER_THUMBNAIL_NAME
|
||||
from documents.signals.handlers import update_cf_instance_documents
|
||||
from documents.signals.handlers import check_paths_and_prune_custom_fields
|
||||
from documents.signals.handlers import update_filename_and_move_files
|
||||
from documents.utils import copy_file_with_basic_stats
|
||||
from paperless import version
|
||||
@@ -262,7 +262,7 @@ class Command(CryptMixin, BaseCommand):
|
||||
),
|
||||
disable_signal(
|
||||
post_save,
|
||||
receiver=update_cf_instance_documents,
|
||||
receiver=check_paths_and_prune_custom_fields,
|
||||
sender=CustomField,
|
||||
),
|
||||
):
|
||||
|
||||
@@ -409,6 +409,7 @@ def document_matches_workflow(
|
||||
elif (
|
||||
trigger_type == WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED
|
||||
or trigger_type == WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED
|
||||
or trigger_type == WorkflowTrigger.WorkflowTriggerType.SCHEDULED
|
||||
):
|
||||
trigger_matched, reason = existing_document_matches_workflow(
|
||||
document,
|
||||
|
||||
@@ -0,0 +1,143 @@
|
||||
# Generated by Django 5.1.1 on 2024-11-05 05:19
|
||||
|
||||
import django.core.validators
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1057_paperlesstask_owner"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="workflowtrigger",
|
||||
name="schedule_date_custom_field",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="documents.customfield",
|
||||
verbose_name="schedule date custom field",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="workflowtrigger",
|
||||
name="schedule_date_field",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("added", "Added"),
|
||||
("created", "Created"),
|
||||
("modified", "Modified"),
|
||||
("custom_field", "Custom Field"),
|
||||
],
|
||||
default="added",
|
||||
help_text="The field to check for a schedule trigger.",
|
||||
max_length=20,
|
||||
verbose_name="schedule date field",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="workflowtrigger",
|
||||
name="schedule_is_recurring",
|
||||
field=models.BooleanField(
|
||||
default=False,
|
||||
help_text="If the schedule should be recurring.",
|
||||
verbose_name="schedule is recurring",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="workflowtrigger",
|
||||
name="schedule_offset_days",
|
||||
field=models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="The number of days to offset the schedule trigger by.",
|
||||
verbose_name="schedule offset days",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="workflowtrigger",
|
||||
name="schedule_recurring_interval_days",
|
||||
field=models.PositiveIntegerField(
|
||||
default=1,
|
||||
help_text="The number of days between recurring schedule triggers.",
|
||||
validators=[django.core.validators.MinValueValidator(1)],
|
||||
verbose_name="schedule recurring delay in days",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="workflowtrigger",
|
||||
name="type",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Consumption Started"),
|
||||
(2, "Document Added"),
|
||||
(3, "Document Updated"),
|
||||
(4, "Scheduled"),
|
||||
],
|
||||
default=1,
|
||||
verbose_name="Workflow Trigger Type",
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="WorkflowRun",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
(
|
||||
"type",
|
||||
models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Consumption Started"),
|
||||
(2, "Document Added"),
|
||||
(3, "Document Updated"),
|
||||
(4, "Scheduled"),
|
||||
],
|
||||
null=True,
|
||||
verbose_name="workflow trigger type",
|
||||
),
|
||||
),
|
||||
(
|
||||
"run_at",
|
||||
models.DateTimeField(
|
||||
db_index=True,
|
||||
default=django.utils.timezone.now,
|
||||
verbose_name="date run",
|
||||
),
|
||||
),
|
||||
(
|
||||
"document",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="workflow_runs",
|
||||
to="documents.document",
|
||||
verbose_name="document",
|
||||
),
|
||||
),
|
||||
(
|
||||
"workflow",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="runs",
|
||||
to="documents.workflow",
|
||||
verbose_name="workflow",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "workflow run",
|
||||
"verbose_name_plural": "workflow runs",
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,79 @@
|
||||
# Generated by Django 5.1.1 on 2024-11-13 05:14
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
from django.db import transaction
|
||||
from django.utils.crypto import get_random_string
|
||||
|
||||
|
||||
def migrate_customfield_selects(apps, schema_editor):
|
||||
"""
|
||||
Migrate the custom field selects from a simple list of strings to a list of dictionaries with
|
||||
label and id. Then update all instances of the custom field to use the new format.
|
||||
"""
|
||||
CustomFieldInstance = apps.get_model("documents", "CustomFieldInstance")
|
||||
CustomField = apps.get_model("documents", "CustomField")
|
||||
|
||||
with transaction.atomic():
|
||||
for custom_field in CustomField.objects.filter(
|
||||
data_type="select",
|
||||
): # CustomField.FieldDataType.SELECT
|
||||
old_select_options = custom_field.extra_data["select_options"]
|
||||
custom_field.extra_data["select_options"] = [
|
||||
{"id": get_random_string(16), "label": value}
|
||||
for value in old_select_options
|
||||
]
|
||||
custom_field.save()
|
||||
|
||||
for instance in CustomFieldInstance.objects.filter(field=custom_field):
|
||||
if instance.value_select:
|
||||
instance.value_select = custom_field.extra_data["select_options"][
|
||||
int(instance.value_select)
|
||||
]["id"]
|
||||
instance.save()
|
||||
|
||||
|
||||
def reverse_migrate_customfield_selects(apps, schema_editor):
|
||||
"""
|
||||
Reverse the migration of the custom field selects from a list of dictionaries with label and id
|
||||
to a simple list of strings. Then update all instances of the custom field to use the old format,
|
||||
which is just the index of the selected option.
|
||||
"""
|
||||
CustomFieldInstance = apps.get_model("documents", "CustomFieldInstance")
|
||||
CustomField = apps.get_model("documents", "CustomField")
|
||||
|
||||
with transaction.atomic():
|
||||
for custom_field in CustomField.objects.all():
|
||||
if custom_field.data_type == "select": # CustomField.FieldDataType.SELECT
|
||||
old_select_options = custom_field.extra_data["select_options"]
|
||||
custom_field.extra_data["select_options"] = [
|
||||
option["label"]
|
||||
for option in custom_field.extra_data["select_options"]
|
||||
]
|
||||
custom_field.save()
|
||||
|
||||
for instance in CustomFieldInstance.objects.filter(field=custom_field):
|
||||
instance.value_select = next(
|
||||
index
|
||||
for index, option in enumerate(old_select_options)
|
||||
if option.get("id") == instance.value_select
|
||||
)
|
||||
instance.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1058_workflowtrigger_schedule_date_custom_field_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="customfieldinstance",
|
||||
name="value_select",
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
migrations.RunPython(
|
||||
migrate_customfield_selects,
|
||||
reverse_migrate_customfield_selects,
|
||||
),
|
||||
]
|
||||
@@ -947,7 +947,7 @@ class CustomFieldInstance(SoftDeleteModel):
|
||||
|
||||
value_document_ids = models.JSONField(null=True)
|
||||
|
||||
value_select = models.PositiveSmallIntegerField(null=True)
|
||||
value_select = models.CharField(null=True, max_length=16)
|
||||
|
||||
class Meta:
|
||||
ordering = ("created",)
|
||||
@@ -962,7 +962,11 @@ class CustomFieldInstance(SoftDeleteModel):
|
||||
|
||||
def __str__(self) -> str:
|
||||
value = (
|
||||
self.field.extra_data["select_options"][self.value_select]
|
||||
next(
|
||||
option.get("label")
|
||||
for option in self.field.extra_data["select_options"]
|
||||
if option.get("id") == self.value_select
|
||||
)
|
||||
if (
|
||||
self.field.data_type == CustomField.FieldDataType.SELECT
|
||||
and self.value_select is not None
|
||||
@@ -1016,12 +1020,19 @@ class WorkflowTrigger(models.Model):
|
||||
CONSUMPTION = 1, _("Consumption Started")
|
||||
DOCUMENT_ADDED = 2, _("Document Added")
|
||||
DOCUMENT_UPDATED = 3, _("Document Updated")
|
||||
SCHEDULED = 4, _("Scheduled")
|
||||
|
||||
class DocumentSourceChoices(models.IntegerChoices):
|
||||
CONSUME_FOLDER = DocumentSource.ConsumeFolder.value, _("Consume Folder")
|
||||
API_UPLOAD = DocumentSource.ApiUpload.value, _("Api Upload")
|
||||
MAIL_FETCH = DocumentSource.MailFetch.value, _("Mail Fetch")
|
||||
|
||||
class ScheduleDateField(models.TextChoices):
|
||||
ADDED = "added", _("Added")
|
||||
CREATED = "created", _("Created")
|
||||
MODIFIED = "modified", _("Modified")
|
||||
CUSTOM_FIELD = "custom_field", _("Custom Field")
|
||||
|
||||
type = models.PositiveIntegerField(
|
||||
_("Workflow Trigger Type"),
|
||||
choices=WorkflowTriggerType.choices,
|
||||
@@ -1098,6 +1109,49 @@ class WorkflowTrigger(models.Model):
|
||||
verbose_name=_("has this correspondent"),
|
||||
)
|
||||
|
||||
schedule_offset_days = models.PositiveIntegerField(
|
||||
_("schedule offset days"),
|
||||
default=0,
|
||||
help_text=_(
|
||||
"The number of days to offset the schedule trigger by.",
|
||||
),
|
||||
)
|
||||
|
||||
schedule_is_recurring = models.BooleanField(
|
||||
_("schedule is recurring"),
|
||||
default=False,
|
||||
help_text=_(
|
||||
"If the schedule should be recurring.",
|
||||
),
|
||||
)
|
||||
|
||||
schedule_recurring_interval_days = models.PositiveIntegerField(
|
||||
_("schedule recurring delay in days"),
|
||||
default=1,
|
||||
validators=[MinValueValidator(1)],
|
||||
help_text=_(
|
||||
"The number of days between recurring schedule triggers.",
|
||||
),
|
||||
)
|
||||
|
||||
schedule_date_field = models.CharField(
|
||||
_("schedule date field"),
|
||||
max_length=20,
|
||||
choices=ScheduleDateField.choices,
|
||||
default=ScheduleDateField.ADDED,
|
||||
help_text=_(
|
||||
"The field to check for a schedule trigger.",
|
||||
),
|
||||
)
|
||||
|
||||
schedule_date_custom_field = models.ForeignKey(
|
||||
CustomField,
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
verbose_name=_("schedule date custom field"),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("workflow trigger")
|
||||
verbose_name_plural = _("workflow triggers")
|
||||
@@ -1348,3 +1402,39 @@ class Workflow(models.Model):
|
||||
|
||||
def __str__(self):
|
||||
return f"Workflow: {self.name}"
|
||||
|
||||
|
||||
class WorkflowRun(models.Model):
|
||||
workflow = models.ForeignKey(
|
||||
Workflow,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="runs",
|
||||
verbose_name=_("workflow"),
|
||||
)
|
||||
|
||||
type = models.PositiveIntegerField(
|
||||
_("workflow trigger type"),
|
||||
choices=WorkflowTrigger.WorkflowTriggerType.choices,
|
||||
null=True,
|
||||
)
|
||||
|
||||
document = models.ForeignKey(
|
||||
Document,
|
||||
null=True,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="workflow_runs",
|
||||
verbose_name=_("document"),
|
||||
)
|
||||
|
||||
run_at = models.DateTimeField(
|
||||
_("date run"),
|
||||
default=timezone.now,
|
||||
db_index=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("workflow run")
|
||||
verbose_name_plural = _("workflow runs")
|
||||
|
||||
def __str__(self):
|
||||
return f"WorkflowRun of {self.workflow} at {self.run_at} on {self.document}"
|
||||
|
||||
@@ -160,7 +160,7 @@ class SetPermissionsMixin:
|
||||
},
|
||||
}
|
||||
if set_permissions is not None:
|
||||
for action in permissions_dict:
|
||||
for action, _ in permissions_dict.items():
|
||||
if action in set_permissions:
|
||||
users = set_permissions[action]["users"]
|
||||
permissions_dict[action]["users"] = self._validate_user_ids(users)
|
||||
@@ -533,20 +533,27 @@ class CustomFieldSerializer(serializers.ModelSerializer):
|
||||
if (
|
||||
"data_type" in attrs
|
||||
and attrs["data_type"] == CustomField.FieldDataType.SELECT
|
||||
and (
|
||||
) or (
|
||||
self.instance
|
||||
and self.instance.data_type == CustomField.FieldDataType.SELECT
|
||||
):
|
||||
if (
|
||||
"extra_data" not in attrs
|
||||
or "select_options" not in attrs["extra_data"]
|
||||
or not isinstance(attrs["extra_data"]["select_options"], list)
|
||||
or len(attrs["extra_data"]["select_options"]) == 0
|
||||
or not all(
|
||||
isinstance(option, str) and len(option) > 0
|
||||
len(option.get("label", "")) > 0
|
||||
for option in attrs["extra_data"]["select_options"]
|
||||
)
|
||||
)
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
{"error": "extra_data.select_options must be a valid list"},
|
||||
)
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
{"error": "extra_data.select_options must be a valid list"},
|
||||
)
|
||||
# labels are valid, generate ids if not present
|
||||
for option in attrs["extra_data"]["select_options"]:
|
||||
if option.get("id") is None:
|
||||
option["id"] = get_random_string(length=16)
|
||||
elif (
|
||||
"data_type" in attrs
|
||||
and attrs["data_type"] == CustomField.FieldDataType.MONETARY
|
||||
@@ -646,10 +653,14 @@ class CustomFieldInstanceSerializer(serializers.ModelSerializer):
|
||||
elif field.data_type == CustomField.FieldDataType.SELECT:
|
||||
select_options = field.extra_data["select_options"]
|
||||
try:
|
||||
select_options[data["value"]]
|
||||
next(
|
||||
option
|
||||
for option in select_options
|
||||
if option["id"] == data["value"]
|
||||
)
|
||||
except Exception:
|
||||
raise serializers.ValidationError(
|
||||
f"Value must be index of an element in {select_options}",
|
||||
f"Value must be an id of an element in {select_options}",
|
||||
)
|
||||
elif field.data_type == CustomField.FieldDataType.DOCUMENTLINK:
|
||||
doc_ids = data["value"]
|
||||
@@ -1772,6 +1783,11 @@ class WorkflowTriggerSerializer(serializers.ModelSerializer):
|
||||
"filter_has_tags",
|
||||
"filter_has_correspondent",
|
||||
"filter_has_document_type",
|
||||
"schedule_offset_days",
|
||||
"schedule_is_recurring",
|
||||
"schedule_recurring_interval_days",
|
||||
"schedule_date_field",
|
||||
"schedule_date_custom_field",
|
||||
]
|
||||
|
||||
def validate(self, attrs):
|
||||
|
||||
@@ -37,6 +37,7 @@ from documents.models import PaperlessTask
|
||||
from documents.models import Tag
|
||||
from documents.models import Workflow
|
||||
from documents.models import WorkflowAction
|
||||
from documents.models import WorkflowRun
|
||||
from documents.models import WorkflowTrigger
|
||||
from documents.permissions import get_objects_for_user_owner_aware
|
||||
from documents.permissions import set_permissions_for_object
|
||||
@@ -367,21 +368,6 @@ class CannotMoveFilesException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
# should be disabled in /src/documents/management/commands/document_importer.py handle
|
||||
@receiver(models.signals.post_save, sender=CustomField)
|
||||
def update_cf_instance_documents(sender, instance: CustomField, **kwargs):
|
||||
"""
|
||||
'Select' custom field instances get their end-user value (e.g. in file names) from the select_options in extra_data,
|
||||
which is contained in the custom field itself. So when the field is changed, we (may) need to update the file names
|
||||
of all documents that have this custom field.
|
||||
"""
|
||||
if (
|
||||
instance.data_type == CustomField.FieldDataType.SELECT
|
||||
): # Only select fields, for now
|
||||
for cf_instance in instance.fields.all():
|
||||
update_filename_and_move_files(sender, cf_instance)
|
||||
|
||||
|
||||
# should be disabled in /src/documents/management/commands/document_importer.py handle
|
||||
@receiver(models.signals.post_save, sender=CustomFieldInstance)
|
||||
@receiver(models.signals.m2m_changed, sender=Document.tags.through)
|
||||
@@ -520,6 +506,34 @@ def update_filename_and_move_files(
|
||||
)
|
||||
|
||||
|
||||
# should be disabled in /src/documents/management/commands/document_importer.py handle
|
||||
@receiver(models.signals.post_save, sender=CustomField)
|
||||
def check_paths_and_prune_custom_fields(sender, instance: CustomField, **kwargs):
|
||||
"""
|
||||
When a custom field is updated:
|
||||
1. 'Select' custom field instances get their end-user value (e.g. in file names) from the select_options in extra_data,
|
||||
which is contained in the custom field itself. So when the field is changed, we (may) need to update the file names
|
||||
of all documents that have this custom field.
|
||||
2. If a 'Select' field option was removed, we need to nullify the custom field instances that have the option.
|
||||
"""
|
||||
if (
|
||||
instance.data_type == CustomField.FieldDataType.SELECT
|
||||
): # Only select fields, for now
|
||||
for cf_instance in instance.fields.all():
|
||||
options = instance.extra_data.get("select_options", [])
|
||||
try:
|
||||
next(
|
||||
option["label"]
|
||||
for option in options
|
||||
if option["id"] == cf_instance.value
|
||||
)
|
||||
except StopIteration:
|
||||
# The value of this custom field instance is not in the select options anymore
|
||||
cf_instance.value_select = None
|
||||
cf_instance.save()
|
||||
update_filename_and_move_files(sender, cf_instance)
|
||||
|
||||
|
||||
def set_log_entry(sender, document: Document, logging_group=None, **kwargs):
|
||||
ct = ContentType.objects.get(model="document")
|
||||
user = User.objects.get(username="consumer")
|
||||
@@ -917,6 +931,12 @@ def run_workflows(
|
||||
document.save()
|
||||
document.tags.set(doc_tag_ids)
|
||||
|
||||
WorkflowRun.objects.create(
|
||||
workflow=workflow,
|
||||
type=trigger_type,
|
||||
document=document if not use_overrides else None,
|
||||
)
|
||||
|
||||
if use_overrides:
|
||||
return overrides, "\n".join(messages)
|
||||
|
||||
|
||||
@@ -31,10 +31,14 @@ from documents.double_sided import CollatePlugin
|
||||
from documents.file_handling import create_source_path_directory
|
||||
from documents.file_handling import generate_unique_filename
|
||||
from documents.models import Correspondent
|
||||
from documents.models import CustomFieldInstance
|
||||
from documents.models import Document
|
||||
from documents.models import DocumentType
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
from documents.models import Workflow
|
||||
from documents.models import WorkflowRun
|
||||
from documents.models import WorkflowTrigger
|
||||
from documents.parsers import DocumentParser
|
||||
from documents.parsers import get_parser_class_for_mime_type
|
||||
from documents.plugins.base import ConsumeTaskPlugin
|
||||
@@ -44,6 +48,7 @@ from documents.plugins.helpers import ProgressStatusOptions
|
||||
from documents.sanity_checker import SanityCheckFailedException
|
||||
from documents.signals import document_updated
|
||||
from documents.signals.handlers import cleanup_document_deletion
|
||||
from documents.signals.handlers import run_workflows
|
||||
|
||||
if settings.AUDIT_LOG_ENABLED:
|
||||
from auditlog.models import LogEntry
|
||||
@@ -337,3 +342,85 @@ def empty_trash(doc_ids=None):
|
||||
cleanup_document_deletion,
|
||||
sender=Document,
|
||||
)
|
||||
|
||||
|
||||
@shared_task
|
||||
def check_scheduled_workflows():
|
||||
scheduled_workflows: list[Workflow] = (
|
||||
Workflow.objects.filter(
|
||||
triggers__type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
enabled=True,
|
||||
)
|
||||
.distinct()
|
||||
.prefetch_related("triggers")
|
||||
)
|
||||
if scheduled_workflows.count() > 0:
|
||||
logger.debug(f"Checking {len(scheduled_workflows)} scheduled workflows")
|
||||
for workflow in scheduled_workflows:
|
||||
schedule_triggers = workflow.triggers.filter(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
)
|
||||
trigger: WorkflowTrigger
|
||||
for trigger in schedule_triggers:
|
||||
documents = Document.objects.none()
|
||||
offset_td = timedelta(days=trigger.schedule_offset_days)
|
||||
logger.debug(
|
||||
f"Checking trigger {trigger} with offset {offset_td} against field: {trigger.schedule_date_field}",
|
||||
)
|
||||
match trigger.schedule_date_field:
|
||||
case WorkflowTrigger.ScheduleDateField.ADDED:
|
||||
documents = Document.objects.filter(
|
||||
added__lt=timezone.now() - offset_td,
|
||||
)
|
||||
case WorkflowTrigger.ScheduleDateField.CREATED:
|
||||
documents = Document.objects.filter(
|
||||
created__lt=timezone.now() - offset_td,
|
||||
)
|
||||
case WorkflowTrigger.ScheduleDateField.MODIFIED:
|
||||
documents = Document.objects.filter(
|
||||
modified__lt=timezone.now() - offset_td,
|
||||
)
|
||||
case WorkflowTrigger.ScheduleDateField.CUSTOM_FIELD:
|
||||
cf_instances = CustomFieldInstance.objects.filter(
|
||||
field=trigger.schedule_date_custom_field,
|
||||
value_date__lt=timezone.now() - offset_td,
|
||||
)
|
||||
documents = Document.objects.filter(
|
||||
id__in=cf_instances.values_list("document", flat=True),
|
||||
)
|
||||
if documents.count() > 0:
|
||||
logger.debug(
|
||||
f"Found {documents.count()} documents for trigger {trigger}",
|
||||
)
|
||||
for document in documents:
|
||||
workflow_runs = WorkflowRun.objects.filter(
|
||||
document=document,
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
workflow=workflow,
|
||||
).order_by("-run_at")
|
||||
if not trigger.schedule_is_recurring and workflow_runs.exists():
|
||||
# schedule is non-recurring and the workflow has already been run
|
||||
logger.debug(
|
||||
f"Skipping document {document} for non-recurring workflow {workflow} as it has already been run",
|
||||
)
|
||||
continue
|
||||
elif (
|
||||
trigger.schedule_is_recurring
|
||||
and workflow_runs.exists()
|
||||
and (
|
||||
workflow_runs.last().run_at
|
||||
> timezone.now()
|
||||
- timedelta(
|
||||
days=trigger.schedule_recurring_interval_days,
|
||||
)
|
||||
)
|
||||
):
|
||||
# schedule is recurring but the last run was within the number of recurring interval days
|
||||
logger.debug(
|
||||
f"Skipping document {document} for recurring workflow {workflow} as the last run was within the recurring interval",
|
||||
)
|
||||
continue
|
||||
run_workflows(
|
||||
WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
document,
|
||||
)
|
||||
|
||||
@@ -253,7 +253,11 @@ def get_custom_fields_context(
|
||||
):
|
||||
options = field_instance.field.extra_data["select_options"]
|
||||
value = pathvalidate.sanitize_filename(
|
||||
options[int(field_instance.value)],
|
||||
next(
|
||||
option["label"]
|
||||
for option in options
|
||||
if option["id"] == field_instance.value
|
||||
),
|
||||
replacement_text="-",
|
||||
)
|
||||
else:
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import json
|
||||
from unittest import mock
|
||||
|
||||
from auditlog.models import LogEntry
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import override_settings
|
||||
from guardian.shortcuts import assign_perm
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APITestCase
|
||||
@@ -51,8 +53,12 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
self.doc3.tags.add(self.t2)
|
||||
self.doc4.tags.add(self.t1, self.t2)
|
||||
self.sp1 = StoragePath.objects.create(name="sp1", path="Something/{checksum}")
|
||||
self.cf1 = CustomField.objects.create(name="cf1", data_type="text")
|
||||
self.cf2 = CustomField.objects.create(name="cf2", data_type="text")
|
||||
self.cf1 = CustomField.objects.create(name="cf1", data_type="string")
|
||||
self.cf2 = CustomField.objects.create(name="cf2", data_type="string")
|
||||
|
||||
def setup_mock(self, m, method_name, return_value="OK"):
|
||||
m.return_value = return_value
|
||||
m.__name__ = method_name
|
||||
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
||||
def test_api_set_correspondent(self, bulk_update_task_mock):
|
||||
@@ -178,7 +184,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.modify_tags")
|
||||
def test_api_modify_tags(self, m):
|
||||
m.return_value = "OK"
|
||||
self.setup_mock(m, "modify_tags")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -211,7 +217,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
- API returns HTTP 400
|
||||
- modify_tags is not called
|
||||
"""
|
||||
m.return_value = "OK"
|
||||
self.setup_mock(m, "modify_tags")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -230,7 +236,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.modify_custom_fields")
|
||||
def test_api_modify_custom_fields(self, m):
|
||||
m.return_value = "OK"
|
||||
self.setup_mock(m, "modify_custom_fields")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -263,8 +269,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
- API returns HTTP 400
|
||||
- modify_custom_fields is not called
|
||||
"""
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "modify_custom_fields")
|
||||
# Missing add_custom_fields
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
@@ -359,7 +364,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.delete")
|
||||
def test_api_delete(self, m):
|
||||
m.return_value = "OK"
|
||||
self.setup_mock(m, "delete")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -383,8 +388,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
THEN:
|
||||
- set_storage_path is called with correct document IDs and storage_path ID
|
||||
"""
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "set_storage_path")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -414,8 +418,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
THEN:
|
||||
- set_storage_path is called with correct document IDs and None storage_path
|
||||
"""
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "set_storage_path")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -728,7 +731,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
|
||||
def test_set_permissions(self, m):
|
||||
m.return_value = "OK"
|
||||
self.setup_mock(m, "set_permissions")
|
||||
user1 = User.objects.create(username="user1")
|
||||
user2 = User.objects.create(username="user2")
|
||||
permissions = {
|
||||
@@ -763,7 +766,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
|
||||
def test_set_permissions_merge(self, m):
|
||||
m.return_value = "OK"
|
||||
self.setup_mock(m, "set_permissions")
|
||||
user1 = User.objects.create(username="user1")
|
||||
user2 = User.objects.create(username="user2")
|
||||
permissions = {
|
||||
@@ -823,7 +826,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
THEN:
|
||||
- User is not able to change permissions
|
||||
"""
|
||||
m.return_value = "OK"
|
||||
self.setup_mock(m, "set_permissions")
|
||||
self.doc1.owner = User.objects.get(username="temp_admin")
|
||||
self.doc1.save()
|
||||
user1 = User.objects.create(username="user1")
|
||||
@@ -875,7 +878,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
THEN:
|
||||
- set_storage_path only called if user can edit all docs
|
||||
"""
|
||||
m.return_value = "OK"
|
||||
self.setup_mock(m, "set_storage_path")
|
||||
self.doc1.owner = User.objects.get(username="temp_admin")
|
||||
self.doc1.save()
|
||||
user1 = User.objects.create(username="user1")
|
||||
@@ -919,8 +922,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.rotate")
|
||||
def test_rotate(self, m):
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "rotate")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -974,8 +976,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.merge")
|
||||
def test_merge(self, m):
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "merge")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -1003,8 +1004,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
user1 = User.objects.create(username="user1")
|
||||
self.client.force_authenticate(user=user1)
|
||||
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "merge")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -1053,8 +1053,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
THEN:
|
||||
- The API fails with a correct error code
|
||||
"""
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "merge")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -1074,8 +1073,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.split")
|
||||
def test_split(self, m):
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "split")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -1165,8 +1163,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.delete_pages")
|
||||
def test_delete_pages(self, m):
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "delete_pages")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -1254,3 +1251,87 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn(b"pages must be a list of integers", response.content)
|
||||
|
||||
@override_settings(AUDIT_LOG_ENABLED=True)
|
||||
def test_bulk_edit_audit_log_enabled_simple_field(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Audit log is enabled
|
||||
WHEN:
|
||||
- API to bulk edit documents is called
|
||||
THEN:
|
||||
- Audit log is created
|
||||
"""
|
||||
LogEntry.objects.all().delete()
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
{
|
||||
"documents": [self.doc1.id],
|
||||
"method": "set_correspondent",
|
||||
"parameters": {"correspondent": self.c2.id},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(LogEntry.objects.filter(object_pk=self.doc1.id).count(), 1)
|
||||
|
||||
@override_settings(AUDIT_LOG_ENABLED=True)
|
||||
def test_bulk_edit_audit_log_enabled_tags(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Audit log is enabled
|
||||
WHEN:
|
||||
- API to bulk edit tags is called
|
||||
THEN:
|
||||
- Audit log is created
|
||||
"""
|
||||
LogEntry.objects.all().delete()
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
{
|
||||
"documents": [self.doc1.id],
|
||||
"method": "modify_tags",
|
||||
"parameters": {
|
||||
"add_tags": [self.t1.id],
|
||||
"remove_tags": [self.t2.id],
|
||||
},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(LogEntry.objects.filter(object_pk=self.doc1.id).count(), 1)
|
||||
|
||||
@override_settings(AUDIT_LOG_ENABLED=True)
|
||||
def test_bulk_edit_audit_log_enabled_custom_fields(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Audit log is enabled
|
||||
WHEN:
|
||||
- API to bulk edit custom fields is called
|
||||
THEN:
|
||||
- Audit log is created
|
||||
"""
|
||||
LogEntry.objects.all().delete()
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
{
|
||||
"documents": [self.doc1.id],
|
||||
"method": "modify_custom_fields",
|
||||
"parameters": {
|
||||
"add_custom_fields": [self.cf1.id],
|
||||
"remove_custom_fields": [],
|
||||
},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(LogEntry.objects.filter(object_pk=self.doc1.id).count(), 2)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import json
|
||||
from datetime import date
|
||||
from unittest.mock import ANY
|
||||
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.auth.models import User
|
||||
@@ -61,7 +62,10 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
||||
"data_type": "select",
|
||||
"name": "Select Field",
|
||||
"extra_data": {
|
||||
"select_options": ["Option 1", "Option 2"],
|
||||
"select_options": [
|
||||
{"label": "Option 1", "id": "abc-123"},
|
||||
{"label": "Option 2", "id": "def-456"},
|
||||
],
|
||||
},
|
||||
},
|
||||
),
|
||||
@@ -73,7 +77,10 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
self.assertCountEqual(
|
||||
data["extra_data"]["select_options"],
|
||||
["Option 1", "Option 2"],
|
||||
[
|
||||
{"label": "Option 1", "id": "abc-123"},
|
||||
{"label": "Option 2", "id": "def-456"},
|
||||
],
|
||||
)
|
||||
|
||||
def test_create_custom_field_nonunique_name(self):
|
||||
@@ -138,6 +145,133 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
||||
)
|
||||
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def test_custom_field_select_unique_ids(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Nothing
|
||||
- Existing custom field
|
||||
WHEN:
|
||||
- API request to create custom field with select options without id
|
||||
THEN:
|
||||
- Unique ids are generated for each option
|
||||
"""
|
||||
resp = self.client.post(
|
||||
self.ENDPOINT,
|
||||
json.dumps(
|
||||
{
|
||||
"data_type": "select",
|
||||
"name": "Select Field",
|
||||
"extra_data": {
|
||||
"select_options": [
|
||||
{"label": "Option 1"},
|
||||
{"label": "Option 2"},
|
||||
],
|
||||
},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
|
||||
|
||||
data = resp.json()
|
||||
|
||||
self.assertCountEqual(
|
||||
data["extra_data"]["select_options"],
|
||||
[
|
||||
{"label": "Option 1", "id": ANY},
|
||||
{"label": "Option 2", "id": ANY},
|
||||
],
|
||||
)
|
||||
|
||||
# Add a new option
|
||||
resp = self.client.patch(
|
||||
f"{self.ENDPOINT}{data['id']}/",
|
||||
json.dumps(
|
||||
{
|
||||
"extra_data": {
|
||||
"select_options": data["extra_data"]["select_options"]
|
||||
+ [{"label": "Option 3"}],
|
||||
},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
|
||||
data = resp.json()
|
||||
|
||||
self.assertCountEqual(
|
||||
data["extra_data"]["select_options"],
|
||||
[
|
||||
{"label": "Option 1", "id": ANY},
|
||||
{"label": "Option 2", "id": ANY},
|
||||
{"label": "Option 3", "id": ANY},
|
||||
],
|
||||
)
|
||||
|
||||
def test_custom_field_select_options_pruned(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Select custom field exists and document instance with one of the options
|
||||
WHEN:
|
||||
- API request to remove an option from the select field
|
||||
THEN:
|
||||
- The option is removed from the field
|
||||
- The option is removed from the document instance
|
||||
"""
|
||||
custom_field_select = CustomField.objects.create(
|
||||
name="Select Field",
|
||||
data_type=CustomField.FieldDataType.SELECT,
|
||||
extra_data={
|
||||
"select_options": [
|
||||
{"label": "Option 1", "id": "abc-123"},
|
||||
{"label": "Option 2", "id": "def-456"},
|
||||
{"label": "Option 3", "id": "ghi-789"},
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="WOW",
|
||||
content="the content",
|
||||
checksum="123",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
CustomFieldInstance.objects.create(
|
||||
document=doc,
|
||||
field=custom_field_select,
|
||||
value_text="abc-123",
|
||||
)
|
||||
|
||||
resp = self.client.patch(
|
||||
f"{self.ENDPOINT}{custom_field_select.id}/",
|
||||
json.dumps(
|
||||
{
|
||||
"extra_data": {
|
||||
"select_options": [
|
||||
{"label": "Option 1", "id": "abc-123"},
|
||||
{"label": "Option 3", "id": "ghi-789"},
|
||||
],
|
||||
},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
|
||||
data = resp.json()
|
||||
|
||||
self.assertCountEqual(
|
||||
data["extra_data"]["select_options"],
|
||||
[
|
||||
{"label": "Option 1", "id": "abc-123"},
|
||||
{"label": "Option 3", "id": "ghi-789"},
|
||||
],
|
||||
)
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.custom_fields.first().value, None)
|
||||
|
||||
def test_create_custom_field_monetary_validation(self):
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -261,7 +395,10 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
||||
name="Test Custom Field Select",
|
||||
data_type=CustomField.FieldDataType.SELECT,
|
||||
extra_data={
|
||||
"select_options": ["Option 1", "Option 2"],
|
||||
"select_options": [
|
||||
{"label": "Option 1", "id": "abc-123"},
|
||||
{"label": "Option 2", "id": "def-456"},
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
@@ -309,7 +446,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
||||
},
|
||||
{
|
||||
"field": custom_field_select.id,
|
||||
"value": 0,
|
||||
"value": "abc-123",
|
||||
},
|
||||
],
|
||||
},
|
||||
@@ -332,7 +469,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
||||
{"field": custom_field_monetary.id, "value": "EUR11.10"},
|
||||
{"field": custom_field_monetary2.id, "value": "11.1"},
|
||||
{"field": custom_field_documentlink.id, "value": [doc2.id]},
|
||||
{"field": custom_field_select.id, "value": 0},
|
||||
{"field": custom_field_select.id, "value": "abc-123"},
|
||||
],
|
||||
)
|
||||
|
||||
@@ -722,7 +859,10 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
||||
name="Test Custom Field SELECT",
|
||||
data_type=CustomField.FieldDataType.SELECT,
|
||||
extra_data={
|
||||
"select_options": ["Option 1", "Option 2"],
|
||||
"select_options": [
|
||||
{"label": "Option 1", "id": "abc-123"},
|
||||
{"label": "Option 2", "id": "def-456"},
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
@@ -730,7 +870,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
||||
f"/api/documents/{doc.id}/",
|
||||
data={
|
||||
"custom_fields": [
|
||||
{"field": custom_field_select.id, "value": 3},
|
||||
{"field": custom_field_select.id, "value": "not an option"},
|
||||
],
|
||||
},
|
||||
format="json",
|
||||
|
||||
@@ -657,13 +657,16 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
name="Test Custom Field Select",
|
||||
data_type=CustomField.FieldDataType.SELECT,
|
||||
extra_data={
|
||||
"select_options": ["Option 1", "Choice 2"],
|
||||
"select_options": [
|
||||
{"label": "Option 1", "id": "abc123"},
|
||||
{"label": "Choice 2", "id": "def456"},
|
||||
],
|
||||
},
|
||||
)
|
||||
CustomFieldInstance.objects.create(
|
||||
document=doc1,
|
||||
field=custom_field_select,
|
||||
value_select=1,
|
||||
value_select="def456",
|
||||
)
|
||||
|
||||
r = self.client.get("/api/documents/?custom_fields__icontains=choice")
|
||||
|
||||
@@ -46,7 +46,13 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
|
||||
|
||||
# Add some options to the select_field
|
||||
select = self.custom_fields["select_field"]
|
||||
select.extra_data = {"select_options": ["A", "B", "C"]}
|
||||
select.extra_data = {
|
||||
"select_options": [
|
||||
{"label": "A", "id": "abc-123"},
|
||||
{"label": "B", "id": "def-456"},
|
||||
{"label": "C", "id": "ghi-789"},
|
||||
],
|
||||
}
|
||||
select.save()
|
||||
|
||||
# Now we will create some test documents
|
||||
@@ -122,9 +128,9 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
|
||||
|
||||
# CustomField.FieldDataType.SELECT
|
||||
self._create_document(select_field=None)
|
||||
self._create_document(select_field=0)
|
||||
self._create_document(select_field=1)
|
||||
self._create_document(select_field=2)
|
||||
self._create_document(select_field="abc-123")
|
||||
self._create_document(select_field="def-456")
|
||||
self._create_document(select_field="ghi-789")
|
||||
|
||||
def _create_document(self, **kwargs):
|
||||
title = str(kwargs)
|
||||
@@ -296,18 +302,18 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
|
||||
)
|
||||
|
||||
def test_select(self):
|
||||
# For select fields, you can either specify the index
|
||||
# For select fields, you can either specify the id of the option
|
||||
# or the name of the option. They function exactly the same.
|
||||
self._assert_query_match_predicate(
|
||||
["select_field", "exact", 1],
|
||||
["select_field", "exact", "def-456"],
|
||||
lambda document: "select_field" in document
|
||||
and document["select_field"] == 1,
|
||||
and document["select_field"] == "def-456",
|
||||
)
|
||||
# This is the same as:
|
||||
self._assert_query_match_predicate(
|
||||
["select_field", "exact", "B"],
|
||||
lambda document: "select_field" in document
|
||||
and document["select_field"] == 1,
|
||||
and document["select_field"] == "def-456",
|
||||
)
|
||||
|
||||
# ==========================================================#
|
||||
@@ -522,9 +528,9 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
|
||||
|
||||
def test_invalid_value(self):
|
||||
self._assert_validation_error(
|
||||
json.dumps(["select_field", "exact", "not an option"]),
|
||||
json.dumps(["select_field", "exact", []]),
|
||||
["custom_field_query", "2"],
|
||||
"integer",
|
||||
"string",
|
||||
)
|
||||
|
||||
def test_invalid_logical_operator(self):
|
||||
|
||||
@@ -544,7 +544,11 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
name="test",
|
||||
data_type=CustomField.FieldDataType.SELECT,
|
||||
extra_data={
|
||||
"select_options": ["apple", "banana", "cherry"],
|
||||
"select_options": [
|
||||
{"label": "apple", "id": "abc123"},
|
||||
{"label": "banana", "id": "def456"},
|
||||
{"label": "cherry", "id": "ghi789"},
|
||||
],
|
||||
},
|
||||
)
|
||||
doc = Document.objects.create(
|
||||
@@ -555,14 +559,22 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
archive_checksum="B",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
CustomFieldInstance.objects.create(field=cf, document=doc, value_select=0)
|
||||
CustomFieldInstance.objects.create(
|
||||
field=cf,
|
||||
document=doc,
|
||||
value_select="abc123",
|
||||
)
|
||||
|
||||
self.assertEqual(generate_filename(doc), "document_apple.pdf")
|
||||
|
||||
# handler should not have been called
|
||||
self.assertEqual(m.call_count, 0)
|
||||
cf.extra_data = {
|
||||
"select_options": ["aubergine", "banana", "cherry"],
|
||||
"select_options": [
|
||||
{"label": "aubergine", "id": "abc123"},
|
||||
{"label": "banana", "id": "def456"},
|
||||
{"label": "cherry", "id": "ghi789"},
|
||||
],
|
||||
}
|
||||
cf.save()
|
||||
self.assertEqual(generate_filename(doc), "document_aubergine.pdf")
|
||||
@@ -1373,13 +1385,18 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
||||
cf2 = CustomField.objects.create(
|
||||
name="Select Field",
|
||||
data_type=CustomField.FieldDataType.SELECT,
|
||||
extra_data={"select_options": ["ChoiceOne", "ChoiceTwo"]},
|
||||
extra_data={
|
||||
"select_options": [
|
||||
{"label": "ChoiceOne", "id": "abc=123"},
|
||||
{"label": "ChoiceTwo", "id": "def-456"},
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
cfi1 = CustomFieldInstance.objects.create(
|
||||
document=doc_a,
|
||||
field=cf2,
|
||||
value_select=0,
|
||||
value_select="abc=123",
|
||||
)
|
||||
|
||||
cfi = CustomFieldInstance.objects.create(
|
||||
|
||||
87
src/documents/tests/test_migration_custom_field_selects.py
Normal file
87
src/documents/tests/test_migration_custom_field_selects.py
Normal file
@@ -0,0 +1,87 @@
|
||||
from unittest.mock import ANY
|
||||
|
||||
from documents.tests.utils import TestMigrations
|
||||
|
||||
|
||||
class TestMigrateCustomFieldSelects(TestMigrations):
|
||||
migrate_from = "1058_workflowtrigger_schedule_date_custom_field_and_more"
|
||||
migrate_to = "1059_alter_customfieldinstance_value_select"
|
||||
|
||||
def setUpBeforeMigration(self, apps):
|
||||
CustomField = apps.get_model("documents.CustomField")
|
||||
self.old_format = CustomField.objects.create(
|
||||
name="cf1",
|
||||
data_type="select",
|
||||
extra_data={"select_options": ["Option 1", "Option 2", "Option 3"]},
|
||||
)
|
||||
Document = apps.get_model("documents.Document")
|
||||
doc = Document.objects.create(title="doc1")
|
||||
CustomFieldInstance = apps.get_model("documents.CustomFieldInstance")
|
||||
self.old_instance = CustomFieldInstance.objects.create(
|
||||
field=self.old_format,
|
||||
value_select=0,
|
||||
document=doc,
|
||||
)
|
||||
|
||||
def test_migrate_old_to_new_select_fields(self):
|
||||
self.old_format.refresh_from_db()
|
||||
self.old_instance.refresh_from_db()
|
||||
|
||||
self.assertEqual(
|
||||
self.old_format.extra_data["select_options"],
|
||||
[
|
||||
{"label": "Option 1", "id": ANY},
|
||||
{"label": "Option 2", "id": ANY},
|
||||
{"label": "Option 3", "id": ANY},
|
||||
],
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
self.old_instance.value_select,
|
||||
self.old_format.extra_data["select_options"][0]["id"],
|
||||
)
|
||||
|
||||
|
||||
class TestMigrationCustomFieldSelectsReverse(TestMigrations):
|
||||
migrate_from = "1059_alter_customfieldinstance_value_select"
|
||||
migrate_to = "1058_workflowtrigger_schedule_date_custom_field_and_more"
|
||||
|
||||
def setUpBeforeMigration(self, apps):
|
||||
CustomField = apps.get_model("documents.CustomField")
|
||||
self.new_format = CustomField.objects.create(
|
||||
name="cf1",
|
||||
data_type="select",
|
||||
extra_data={
|
||||
"select_options": [
|
||||
{"label": "Option 1", "id": "id1"},
|
||||
{"label": "Option 2", "id": "id2"},
|
||||
{"label": "Option 3", "id": "id3"},
|
||||
],
|
||||
},
|
||||
)
|
||||
Document = apps.get_model("documents.Document")
|
||||
doc = Document.objects.create(title="doc1")
|
||||
CustomFieldInstance = apps.get_model("documents.CustomFieldInstance")
|
||||
self.new_instance = CustomFieldInstance.objects.create(
|
||||
field=self.new_format,
|
||||
value_select="id1",
|
||||
document=doc,
|
||||
)
|
||||
|
||||
def test_migrate_new_to_old_select_fields(self):
|
||||
self.new_format.refresh_from_db()
|
||||
self.new_instance.refresh_from_db()
|
||||
|
||||
self.assertEqual(
|
||||
self.new_format.extra_data["select_options"],
|
||||
[
|
||||
"Option 1",
|
||||
"Option 2",
|
||||
"Option 3",
|
||||
],
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
self.new_instance.value_select,
|
||||
0,
|
||||
)
|
||||
@@ -6,12 +6,14 @@ from django.conf import settings
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import TestCase
|
||||
from django.test import override_settings
|
||||
from django.utils import timezone
|
||||
from rest_framework import status
|
||||
|
||||
from documents.models import Document
|
||||
from documents.models import ShareLink
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from paperless.models import ApplicationConfiguration
|
||||
|
||||
|
||||
class TestViews(DirectoriesMixin, TestCase):
|
||||
@@ -67,6 +69,26 @@ class TestViews(DirectoriesMixin, TestCase):
|
||||
f"frontend/{language_actual}/main.js",
|
||||
)
|
||||
|
||||
@override_settings(BASE_URL="/paperless/")
|
||||
def test_index_app_logo_with_base_url(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing config with app_logo specified
|
||||
WHEN:
|
||||
- Index page is loaded
|
||||
THEN:
|
||||
- app_logo is prefixed with BASE_URL
|
||||
"""
|
||||
config = ApplicationConfiguration.objects.first()
|
||||
config.app_logo = "/logo/example.jpg"
|
||||
config.save()
|
||||
self.client.force_login(self.user)
|
||||
response = self.client.get("/")
|
||||
self.assertEqual(
|
||||
response.context["APP_LOGO"],
|
||||
f"/paperless{config.app_logo}",
|
||||
)
|
||||
|
||||
def test_share_link_views(self):
|
||||
"""
|
||||
GIVEN:
|
||||
|
||||
@@ -29,6 +29,7 @@ from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
from documents.models import Workflow
|
||||
from documents.models import WorkflowAction
|
||||
from documents.models import WorkflowRun
|
||||
from documents.models import WorkflowTrigger
|
||||
from documents.signals import document_consumption_finished
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
@@ -1306,6 +1307,275 @@ class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, APITestCase):
|
||||
# group2 should have been added
|
||||
self.assertIn(self.group2, group_perms)
|
||||
|
||||
def test_workflow_scheduled_trigger_created(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow with SCHEDULED trigger against the created field and action that assigns owner
|
||||
- Existing doc that matches the trigger
|
||||
WHEN:
|
||||
- Scheduled workflows are checked
|
||||
THEN:
|
||||
- Workflow runs, document owner is updated
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
schedule_offset_days=1,
|
||||
schedule_date_field="created",
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc assign owner",
|
||||
assign_owner=self.user2,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
now = timezone.localtime(timezone.now())
|
||||
created = now - timedelta(weeks=520)
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
created=created,
|
||||
)
|
||||
|
||||
tasks.check_scheduled_workflows()
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.owner, self.user2)
|
||||
|
||||
def test_workflow_scheduled_trigger_added(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow with SCHEDULED trigger against the added field and action that assigns owner
|
||||
- Existing doc that matches the trigger
|
||||
WHEN:
|
||||
- Scheduled workflows are checked
|
||||
THEN:
|
||||
- Workflow runs, document owner is updated
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
schedule_offset_days=1,
|
||||
schedule_date_field=WorkflowTrigger.ScheduleDateField.ADDED,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc assign owner",
|
||||
assign_owner=self.user2,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
added = timezone.now() - timedelta(days=365)
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
added=added,
|
||||
)
|
||||
|
||||
tasks.check_scheduled_workflows()
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.owner, self.user2)
|
||||
|
||||
@mock.patch("documents.models.Document.objects.filter", autospec=True)
|
||||
def test_workflow_scheduled_trigger_modified(self, mock_filter):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow with SCHEDULED trigger against the modified field and action that assigns owner
|
||||
- Existing doc that matches the trigger
|
||||
WHEN:
|
||||
- Scheduled workflows are checked
|
||||
THEN:
|
||||
- Workflow runs, document owner is updated
|
||||
"""
|
||||
# we have to mock because modified field is auto_now
|
||||
mock_filter.return_value = Document.objects.all()
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
schedule_offset_days=1,
|
||||
schedule_date_field=WorkflowTrigger.ScheduleDateField.MODIFIED,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc assign owner",
|
||||
assign_owner=self.user2,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
)
|
||||
|
||||
tasks.check_scheduled_workflows()
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.owner, self.user2)
|
||||
|
||||
def test_workflow_scheduled_trigger_custom_field(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow with SCHEDULED trigger against a custom field and action that assigns owner
|
||||
- Existing doc that matches the trigger
|
||||
WHEN:
|
||||
- Scheduled workflows are checked
|
||||
THEN:
|
||||
- Workflow runs, document owner is updated
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
schedule_offset_days=1,
|
||||
schedule_date_field=WorkflowTrigger.ScheduleDateField.CUSTOM_FIELD,
|
||||
schedule_date_custom_field=self.cf1,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc assign owner",
|
||||
assign_owner=self.user2,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
)
|
||||
CustomFieldInstance.objects.create(
|
||||
document=doc,
|
||||
field=self.cf1,
|
||||
value_date=timezone.now() - timedelta(days=2),
|
||||
)
|
||||
|
||||
tasks.check_scheduled_workflows()
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.owner, self.user2)
|
||||
|
||||
def test_workflow_scheduled_already_run(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow with SCHEDULED trigger
|
||||
- Existing doc that has already had the workflow run
|
||||
WHEN:
|
||||
- Scheduled workflows are checked
|
||||
THEN:
|
||||
- Workflow does not run again
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
schedule_offset_days=1,
|
||||
schedule_date_field=WorkflowTrigger.ScheduleDateField.CREATED,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc assign owner",
|
||||
assign_owner=self.user2,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
created=timezone.now() - timedelta(days=2),
|
||||
)
|
||||
|
||||
wr = WorkflowRun.objects.create(
|
||||
workflow=w,
|
||||
document=doc,
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
run_at=timezone.now(),
|
||||
)
|
||||
self.assertEqual(
|
||||
str(wr),
|
||||
f"WorkflowRun of {w} at {wr.run_at} on {doc}",
|
||||
) # coverage
|
||||
|
||||
tasks.check_scheduled_workflows()
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertIsNone(doc.owner)
|
||||
|
||||
def test_workflow_scheduled_trigger_too_early(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow with SCHEDULED trigger and recurring interval of 7 days
|
||||
- Workflow run date is 6 days ago
|
||||
WHEN:
|
||||
- Scheduled workflows are checked
|
||||
THEN:
|
||||
- Workflow does not run as the offset is not met
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
schedule_offset_days=30,
|
||||
schedule_date_field=WorkflowTrigger.ScheduleDateField.CREATED,
|
||||
schedule_is_recurring=True,
|
||||
schedule_recurring_interval_days=7,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc assign owner",
|
||||
assign_owner=self.user2,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
created=timezone.now() - timedelta(days=40),
|
||||
)
|
||||
|
||||
WorkflowRun.objects.create(
|
||||
workflow=w,
|
||||
document=doc,
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
run_at=timezone.now() - timedelta(days=6),
|
||||
)
|
||||
|
||||
with self.assertLogs(level="DEBUG") as cm:
|
||||
tasks.check_scheduled_workflows()
|
||||
self.assertIn(
|
||||
"last run was within the recurring interval",
|
||||
" ".join(cm.output),
|
||||
)
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertIsNone(doc.owner)
|
||||
|
||||
def test_workflow_enabled_disabled(self):
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
||||
@@ -1354,7 +1624,7 @@ class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, APITestCase):
|
||||
|
||||
def test_new_trigger_type_raises_exception(self):
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=4,
|
||||
type=99,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc assign owner",
|
||||
@@ -1370,7 +1640,7 @@ class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, APITestCase):
|
||||
doc = Document.objects.create(
|
||||
title="test",
|
||||
)
|
||||
self.assertRaises(Exception, document_matches_workflow, doc, w, 4)
|
||||
self.assertRaises(Exception, document_matches_workflow, doc, w, 99)
|
||||
|
||||
def test_removal_action_document_updated_workflow(self):
|
||||
"""
|
||||
|
||||
@@ -26,11 +26,13 @@ from django.db.models import Case
|
||||
from django.db.models import Count
|
||||
from django.db.models import IntegerField
|
||||
from django.db.models import Max
|
||||
from django.db.models import Model
|
||||
from django.db.models import Q
|
||||
from django.db.models import Sum
|
||||
from django.db.models import When
|
||||
from django.db.models.functions import Length
|
||||
from django.db.models.functions import Lower
|
||||
from django.db.models.manager import Manager
|
||||
from django.http import Http404
|
||||
from django.http import HttpResponse
|
||||
from django.http import HttpResponseBadRequest
|
||||
@@ -426,7 +428,7 @@ class DocumentViewSet(
|
||||
)
|
||||
|
||||
def file_response(self, pk, request, disposition):
|
||||
doc = Document.objects.select_related("owner").get(id=pk)
|
||||
doc = Document.global_objects.select_related("owner").get(id=pk)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"view_document",
|
||||
@@ -961,6 +963,22 @@ class SavedViewViewSet(ModelViewSet, PassUserMixin):
|
||||
|
||||
|
||||
class BulkEditView(PassUserMixin):
|
||||
MODIFIED_FIELD_BY_METHOD = {
|
||||
"set_correspondent": "correspondent",
|
||||
"set_document_type": "document_type",
|
||||
"set_storage_path": "storage_path",
|
||||
"add_tag": "tags",
|
||||
"remove_tag": "tags",
|
||||
"modify_tags": "tags",
|
||||
"modify_custom_fields": "custom_fields",
|
||||
"set_permissions": None,
|
||||
"delete": "deleted_at",
|
||||
"rotate": "checksum",
|
||||
"delete_pages": "checksum",
|
||||
"split": None,
|
||||
"merge": None,
|
||||
}
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
serializer_class = BulkEditSerializer
|
||||
parser_classes = (parsers.JSONParser,)
|
||||
@@ -1013,8 +1031,53 @@ class BulkEditView(PassUserMixin):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
|
||||
try:
|
||||
modified_field = self.MODIFIED_FIELD_BY_METHOD[method.__name__]
|
||||
if settings.AUDIT_LOG_ENABLED and modified_field:
|
||||
old_documents = {
|
||||
obj["pk"]: obj
|
||||
for obj in Document.objects.filter(pk__in=documents).values(
|
||||
"pk",
|
||||
"correspondent",
|
||||
"document_type",
|
||||
"storage_path",
|
||||
"tags",
|
||||
"custom_fields",
|
||||
"deleted_at",
|
||||
"checksum",
|
||||
)
|
||||
}
|
||||
|
||||
# TODO: parameter validation
|
||||
result = method(documents, **parameters)
|
||||
|
||||
if settings.AUDIT_LOG_ENABLED and modified_field:
|
||||
new_documents = Document.objects.filter(pk__in=documents)
|
||||
for doc in new_documents:
|
||||
old_value = old_documents[doc.pk][modified_field]
|
||||
new_value = getattr(doc, modified_field)
|
||||
|
||||
if isinstance(new_value, Model):
|
||||
# correspondent, document type, etc.
|
||||
new_value = new_value.pk
|
||||
elif isinstance(new_value, Manager):
|
||||
# tags, custom fields
|
||||
new_value = list(new_value.values_list("pk", flat=True))
|
||||
|
||||
LogEntry.objects.log_create(
|
||||
instance=doc,
|
||||
changes={
|
||||
modified_field: [
|
||||
old_value,
|
||||
new_value,
|
||||
],
|
||||
},
|
||||
action=LogEntry.Action.UPDATE,
|
||||
actor=user,
|
||||
additional_data={
|
||||
"reason": f"Bulk edit: {method.__name__}",
|
||||
},
|
||||
)
|
||||
|
||||
return Response({"result": result})
|
||||
except Exception as e:
|
||||
logger.warning(f"An error occurred performing bulk edit: {e!s}")
|
||||
@@ -1546,6 +1609,12 @@ class StoragePathViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
|
||||
filterset_class = StoragePathFilterSet
|
||||
ordering_fields = ("name", "path", "matching_algorithm", "match", "document_count")
|
||||
|
||||
def get_permissions(self):
|
||||
if self.action == "test":
|
||||
# Test action does not require object level permissions
|
||||
self.permission_classes = (IsAuthenticated,)
|
||||
return super().get_permissions()
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
"""
|
||||
When a storage path is deleted, see if documents
|
||||
@@ -1562,17 +1631,12 @@ class StoragePathViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class StoragePathTestView(GenericAPIView):
|
||||
"""
|
||||
Test storage path against a document
|
||||
"""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
serializer_class = StoragePathTestSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
@action(methods=["post"], detail=False)
|
||||
def test(self, request):
|
||||
"""
|
||||
Test storage path against a document
|
||||
"""
|
||||
serializer = StoragePathTestSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
document = serializer.validated_data.get("document")
|
||||
|
||||
Reference in New Issue
Block a user