mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-12-14 01:21:14 -06:00
Merge branch 'dev' into feature-8271
This commit is contained in:
@@ -24,7 +24,7 @@ from documents.models import StoragePath
|
||||
from documents.permissions import set_permissions_for_object
|
||||
from documents.tasks import bulk_update_documents
|
||||
from documents.tasks import consume_file
|
||||
from documents.tasks import update_document_archive_file
|
||||
from documents.tasks import update_document_content_maybe_archive_file
|
||||
|
||||
logger: logging.Logger = logging.getLogger("paperless.bulk_edit")
|
||||
|
||||
@@ -191,7 +191,7 @@ def delete(doc_ids: list[int]) -> Literal["OK"]:
|
||||
|
||||
def reprocess(doc_ids: list[int]) -> Literal["OK"]:
|
||||
for document_id in doc_ids:
|
||||
update_document_archive_file.delay(
|
||||
update_document_content_maybe_archive_file.delay(
|
||||
document_id=document_id,
|
||||
)
|
||||
|
||||
@@ -245,7 +245,7 @@ def rotate(doc_ids: list[int], degrees: int) -> Literal["OK"]:
|
||||
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
doc.save()
|
||||
rotate_tasks.append(
|
||||
update_document_archive_file.s(
|
||||
update_document_content_maybe_archive_file.s(
|
||||
document_id=doc.id,
|
||||
),
|
||||
)
|
||||
@@ -423,7 +423,7 @@ def delete_pages(doc_ids: list[int], pages: list[int]) -> Literal["OK"]:
|
||||
if doc.page_count is not None:
|
||||
doc.page_count = doc.page_count - len(pages)
|
||||
doc.save()
|
||||
update_document_archive_file.delay(document_id=doc.id)
|
||||
update_document_content_maybe_archive_file.delay(document_id=doc.id)
|
||||
logger.info(f"Deleted pages {pages} from document {doc.id}")
|
||||
except Exception as e:
|
||||
logger.exception(f"Error deleting pages from document {doc.id}: {e}")
|
||||
|
||||
@@ -14,7 +14,7 @@ def settings(request):
|
||||
app_logo = (
|
||||
django_settings.APP_LOGO
|
||||
if general_config.app_logo is None or len(general_config.app_logo) == 0
|
||||
else general_config.app_logo
|
||||
else django_settings.BASE_URL + general_config.app_logo.lstrip("/")
|
||||
)
|
||||
|
||||
return {
|
||||
|
||||
@@ -9,7 +9,7 @@ from django.core.management.base import BaseCommand
|
||||
from documents.management.commands.mixins import MultiProcessMixin
|
||||
from documents.management.commands.mixins import ProgressBarMixin
|
||||
from documents.models import Document
|
||||
from documents.tasks import update_document_archive_file
|
||||
from documents.tasks import update_document_content_maybe_archive_file
|
||||
|
||||
logger = logging.getLogger("paperless.management.archiver")
|
||||
|
||||
@@ -77,13 +77,13 @@ class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand):
|
||||
|
||||
if self.process_count == 1:
|
||||
for doc_id in document_ids:
|
||||
update_document_archive_file(doc_id)
|
||||
update_document_content_maybe_archive_file(doc_id)
|
||||
else: # pragma: no cover
|
||||
with multiprocessing.Pool(self.process_count) as pool:
|
||||
list(
|
||||
tqdm.tqdm(
|
||||
pool.imap_unordered(
|
||||
update_document_archive_file,
|
||||
update_document_content_maybe_archive_file,
|
||||
document_ids,
|
||||
),
|
||||
total=len(document_ids),
|
||||
|
||||
@@ -317,10 +317,8 @@ class Command(BaseCommand):
|
||||
|
||||
# Check the files against the timeout
|
||||
still_waiting = {}
|
||||
for filepath in notified_files:
|
||||
# Time of the last inotify event for this file
|
||||
last_event_time = notified_files[filepath]
|
||||
|
||||
# last_event_time is time of the last inotify event for this file
|
||||
for filepath, last_event_time in notified_files.items():
|
||||
# Current time - last time over the configured timeout
|
||||
waited_long_enough = (
|
||||
monotonic() - last_event_time
|
||||
|
||||
@@ -8,6 +8,7 @@ from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import tqdm
|
||||
from allauth.mfa.models import Authenticator
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
from allauth.socialaccount.models import SocialToken
|
||||
@@ -81,6 +82,18 @@ class Command(CryptMixin, BaseCommand):
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-cj",
|
||||
"--compare-json",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help=(
|
||||
"Compare json file checksums when determining whether to "
|
||||
"export a json file or not (manifest or metadata). "
|
||||
"If not specified, the file is always exported."
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--delete",
|
||||
@@ -177,6 +190,7 @@ class Command(CryptMixin, BaseCommand):
|
||||
self.target = Path(options["target"]).resolve()
|
||||
self.split_manifest: bool = options["split_manifest"]
|
||||
self.compare_checksums: bool = options["compare_checksums"]
|
||||
self.compare_json: bool = options["compare_json"]
|
||||
self.use_filename_format: bool = options["use_filename_format"]
|
||||
self.use_folder_prefix: bool = options["use_folder_prefix"]
|
||||
self.delete: bool = options["delete"]
|
||||
@@ -270,6 +284,7 @@ class Command(CryptMixin, BaseCommand):
|
||||
"social_accounts": SocialAccount.objects.all(),
|
||||
"social_apps": SocialApp.objects.all(),
|
||||
"social_tokens": SocialToken.objects.all(),
|
||||
"authenticators": Authenticator.objects.all(),
|
||||
}
|
||||
|
||||
if settings.AUDIT_LOG_ENABLED:
|
||||
@@ -279,9 +294,9 @@ class Command(CryptMixin, BaseCommand):
|
||||
manifest_dict = {}
|
||||
|
||||
# Build an overall manifest
|
||||
for key in manifest_key_to_object_query:
|
||||
for key, object_query in manifest_key_to_object_query.items():
|
||||
manifest_dict[key] = json.loads(
|
||||
serializers.serialize("json", manifest_key_to_object_query[key]),
|
||||
serializers.serialize("json", object_query),
|
||||
)
|
||||
|
||||
self.encrypt_secret_fields(manifest_dict)
|
||||
@@ -341,12 +356,11 @@ class Command(CryptMixin, BaseCommand):
|
||||
manifest_dict["custom_field_instances"],
|
||||
),
|
||||
)
|
||||
manifest_name.write_text(
|
||||
json.dumps(content, indent=2, ensure_ascii=False),
|
||||
encoding="utf-8",
|
||||
|
||||
self.check_and_write_json(
|
||||
content,
|
||||
manifest_name,
|
||||
)
|
||||
if manifest_name in self.files_in_export_dir:
|
||||
self.files_in_export_dir.remove(manifest_name)
|
||||
|
||||
# These were exported already
|
||||
if self.split_manifest:
|
||||
@@ -356,15 +370,13 @@ class Command(CryptMixin, BaseCommand):
|
||||
|
||||
# 4.1 write primary manifest to target folder
|
||||
manifest = []
|
||||
for key in manifest_dict:
|
||||
manifest.extend(manifest_dict[key])
|
||||
for key, item in manifest_dict.items():
|
||||
manifest.extend(item)
|
||||
manifest_path = (self.target / "manifest.json").resolve()
|
||||
manifest_path.write_text(
|
||||
json.dumps(manifest, indent=2, ensure_ascii=False),
|
||||
encoding="utf-8",
|
||||
self.check_and_write_json(
|
||||
manifest,
|
||||
manifest_path,
|
||||
)
|
||||
if manifest_path in self.files_in_export_dir:
|
||||
self.files_in_export_dir.remove(manifest_path)
|
||||
|
||||
# 4.2 write version information to target folder
|
||||
extra_metadata_path = (self.target / "metadata.json").resolve()
|
||||
@@ -376,16 +388,11 @@ class Command(CryptMixin, BaseCommand):
|
||||
# Django stores most of these in the field itself, we store them once here
|
||||
if self.passphrase:
|
||||
metadata.update(self.get_crypt_params())
|
||||
extra_metadata_path.write_text(
|
||||
json.dumps(
|
||||
metadata,
|
||||
indent=2,
|
||||
ensure_ascii=False,
|
||||
),
|
||||
encoding="utf-8",
|
||||
|
||||
self.check_and_write_json(
|
||||
metadata,
|
||||
extra_metadata_path,
|
||||
)
|
||||
if extra_metadata_path in self.files_in_export_dir:
|
||||
self.files_in_export_dir.remove(extra_metadata_path)
|
||||
|
||||
if self.delete:
|
||||
# 5. Remove files which we did not explicitly export in this run
|
||||
@@ -514,6 +521,35 @@ class Command(CryptMixin, BaseCommand):
|
||||
archive_target,
|
||||
)
|
||||
|
||||
def check_and_write_json(
|
||||
self,
|
||||
content: list[dict] | dict,
|
||||
target: Path,
|
||||
):
|
||||
"""
|
||||
Writes the source content to the target json file.
|
||||
If --compare-json arg was used, don't write to target file if
|
||||
the file exists and checksum is identical to content checksum.
|
||||
This preserves the file timestamps when no changes are made.
|
||||
"""
|
||||
|
||||
target = target.resolve()
|
||||
perform_write = True
|
||||
if target in self.files_in_export_dir:
|
||||
self.files_in_export_dir.remove(target)
|
||||
if self.compare_json:
|
||||
target_checksum = hashlib.md5(target.read_bytes()).hexdigest()
|
||||
src_str = json.dumps(content, indent=2, ensure_ascii=False)
|
||||
src_checksum = hashlib.md5(src_str.encode("utf-8")).hexdigest()
|
||||
if src_checksum == target_checksum:
|
||||
perform_write = False
|
||||
|
||||
if perform_write:
|
||||
target.write_text(
|
||||
json.dumps(content, indent=2, ensure_ascii=False),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
def check_and_copy(
|
||||
self,
|
||||
source: Path,
|
||||
|
||||
@@ -409,6 +409,7 @@ def document_matches_workflow(
|
||||
elif (
|
||||
trigger_type == WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED
|
||||
or trigger_type == WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED
|
||||
or trigger_type == WorkflowTrigger.WorkflowTriggerType.SCHEDULED
|
||||
):
|
||||
trigger_matched, reason = existing_document_matches_workflow(
|
||||
document,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# Generated by Django 4.2.13 on 2024-06-28 17:57
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
@@ -43,7 +44,7 @@ class Migration(migrations.Migration):
|
||||
name="content",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
db_index=("mysql" not in settings.DATABASES["default"]["ENGINE"]),
|
||||
help_text="The raw, text-only data of the document. This field is primarily used for searching.",
|
||||
),
|
||||
),
|
||||
|
||||
28
src/documents/migrations/1057_paperlesstask_owner.py
Normal file
28
src/documents/migrations/1057_paperlesstask_owner.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 5.1.1 on 2024-11-04 21:56
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1056_customfieldinstance_deleted_at_and_more"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="paperlesstask",
|
||||
name="owner",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
default=None,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="owner",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,143 @@
|
||||
# Generated by Django 5.1.1 on 2024-11-05 05:19
|
||||
|
||||
import django.core.validators
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1057_paperlesstask_owner"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="workflowtrigger",
|
||||
name="schedule_date_custom_field",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="documents.customfield",
|
||||
verbose_name="schedule date custom field",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="workflowtrigger",
|
||||
name="schedule_date_field",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("added", "Added"),
|
||||
("created", "Created"),
|
||||
("modified", "Modified"),
|
||||
("custom_field", "Custom Field"),
|
||||
],
|
||||
default="added",
|
||||
help_text="The field to check for a schedule trigger.",
|
||||
max_length=20,
|
||||
verbose_name="schedule date field",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="workflowtrigger",
|
||||
name="schedule_is_recurring",
|
||||
field=models.BooleanField(
|
||||
default=False,
|
||||
help_text="If the schedule should be recurring.",
|
||||
verbose_name="schedule is recurring",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="workflowtrigger",
|
||||
name="schedule_offset_days",
|
||||
field=models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="The number of days to offset the schedule trigger by.",
|
||||
verbose_name="schedule offset days",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="workflowtrigger",
|
||||
name="schedule_recurring_interval_days",
|
||||
field=models.PositiveIntegerField(
|
||||
default=1,
|
||||
help_text="The number of days between recurring schedule triggers.",
|
||||
validators=[django.core.validators.MinValueValidator(1)],
|
||||
verbose_name="schedule recurring delay in days",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="workflowtrigger",
|
||||
name="type",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Consumption Started"),
|
||||
(2, "Document Added"),
|
||||
(3, "Document Updated"),
|
||||
(4, "Scheduled"),
|
||||
],
|
||||
default=1,
|
||||
verbose_name="Workflow Trigger Type",
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="WorkflowRun",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
(
|
||||
"type",
|
||||
models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Consumption Started"),
|
||||
(2, "Document Added"),
|
||||
(3, "Document Updated"),
|
||||
(4, "Scheduled"),
|
||||
],
|
||||
null=True,
|
||||
verbose_name="workflow trigger type",
|
||||
),
|
||||
),
|
||||
(
|
||||
"run_at",
|
||||
models.DateTimeField(
|
||||
db_index=True,
|
||||
default=django.utils.timezone.now,
|
||||
verbose_name="date run",
|
||||
),
|
||||
),
|
||||
(
|
||||
"document",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="workflow_runs",
|
||||
to="documents.document",
|
||||
verbose_name="document",
|
||||
),
|
||||
),
|
||||
(
|
||||
"workflow",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="runs",
|
||||
to="documents.workflow",
|
||||
verbose_name="workflow",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "workflow run",
|
||||
"verbose_name_plural": "workflow runs",
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -641,7 +641,7 @@ class UiSettings(models.Model):
|
||||
return self.user.username
|
||||
|
||||
|
||||
class PaperlessTask(models.Model):
|
||||
class PaperlessTask(ModelWithOwner):
|
||||
ALL_STATES = sorted(states.ALL_STATES)
|
||||
TASK_STATE_CHOICES = sorted(zip(ALL_STATES, ALL_STATES))
|
||||
|
||||
@@ -1020,12 +1020,19 @@ class WorkflowTrigger(models.Model):
|
||||
CONSUMPTION = 1, _("Consumption Started")
|
||||
DOCUMENT_ADDED = 2, _("Document Added")
|
||||
DOCUMENT_UPDATED = 3, _("Document Updated")
|
||||
SCHEDULED = 4, _("Scheduled")
|
||||
|
||||
class DocumentSourceChoices(models.IntegerChoices):
|
||||
CONSUME_FOLDER = DocumentSource.ConsumeFolder.value, _("Consume Folder")
|
||||
API_UPLOAD = DocumentSource.ApiUpload.value, _("Api Upload")
|
||||
MAIL_FETCH = DocumentSource.MailFetch.value, _("Mail Fetch")
|
||||
|
||||
class ScheduleDateField(models.TextChoices):
|
||||
ADDED = "added", _("Added")
|
||||
CREATED = "created", _("Created")
|
||||
MODIFIED = "modified", _("Modified")
|
||||
CUSTOM_FIELD = "custom_field", _("Custom Field")
|
||||
|
||||
type = models.PositiveIntegerField(
|
||||
_("Workflow Trigger Type"),
|
||||
choices=WorkflowTriggerType.choices,
|
||||
@@ -1102,6 +1109,49 @@ class WorkflowTrigger(models.Model):
|
||||
verbose_name=_("has this correspondent"),
|
||||
)
|
||||
|
||||
schedule_offset_days = models.PositiveIntegerField(
|
||||
_("schedule offset days"),
|
||||
default=0,
|
||||
help_text=_(
|
||||
"The number of days to offset the schedule trigger by.",
|
||||
),
|
||||
)
|
||||
|
||||
schedule_is_recurring = models.BooleanField(
|
||||
_("schedule is recurring"),
|
||||
default=False,
|
||||
help_text=_(
|
||||
"If the schedule should be recurring.",
|
||||
),
|
||||
)
|
||||
|
||||
schedule_recurring_interval_days = models.PositiveIntegerField(
|
||||
_("schedule recurring delay in days"),
|
||||
default=1,
|
||||
validators=[MinValueValidator(1)],
|
||||
help_text=_(
|
||||
"The number of days between recurring schedule triggers.",
|
||||
),
|
||||
)
|
||||
|
||||
schedule_date_field = models.CharField(
|
||||
_("schedule date field"),
|
||||
max_length=20,
|
||||
choices=ScheduleDateField.choices,
|
||||
default=ScheduleDateField.ADDED,
|
||||
help_text=_(
|
||||
"The field to check for a schedule trigger.",
|
||||
),
|
||||
)
|
||||
|
||||
schedule_date_custom_field = models.ForeignKey(
|
||||
CustomField,
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
verbose_name=_("schedule date custom field"),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("workflow trigger")
|
||||
verbose_name_plural = _("workflow triggers")
|
||||
@@ -1352,3 +1402,39 @@ class Workflow(models.Model):
|
||||
|
||||
def __str__(self):
|
||||
return f"Workflow: {self.name}"
|
||||
|
||||
|
||||
class WorkflowRun(models.Model):
|
||||
workflow = models.ForeignKey(
|
||||
Workflow,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="runs",
|
||||
verbose_name=_("workflow"),
|
||||
)
|
||||
|
||||
type = models.PositiveIntegerField(
|
||||
_("workflow trigger type"),
|
||||
choices=WorkflowTrigger.WorkflowTriggerType.choices,
|
||||
null=True,
|
||||
)
|
||||
|
||||
document = models.ForeignKey(
|
||||
Document,
|
||||
null=True,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="workflow_runs",
|
||||
verbose_name=_("document"),
|
||||
)
|
||||
|
||||
run_at = models.DateTimeField(
|
||||
_("date run"),
|
||||
default=timezone.now,
|
||||
db_index=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("workflow run")
|
||||
verbose_name_plural = _("workflow runs")
|
||||
|
||||
def __str__(self):
|
||||
return f"WorkflowRun of {self.workflow} at {self.run_at} on {self.document}"
|
||||
|
||||
@@ -160,7 +160,7 @@ class SetPermissionsMixin:
|
||||
},
|
||||
}
|
||||
if set_permissions is not None:
|
||||
for action in permissions_dict:
|
||||
for action, _ in permissions_dict.items():
|
||||
if action in set_permissions:
|
||||
users = set_permissions[action]["users"]
|
||||
permissions_dict[action]["users"] = self._validate_user_ids(users)
|
||||
@@ -1578,7 +1578,7 @@ class UiSettingsViewSerializer(serializers.ModelSerializer):
|
||||
return ui_settings
|
||||
|
||||
|
||||
class TasksViewSerializer(serializers.ModelSerializer):
|
||||
class TasksViewSerializer(OwnedObjectSerializer):
|
||||
class Meta:
|
||||
model = PaperlessTask
|
||||
depth = 1
|
||||
@@ -1593,6 +1593,7 @@ class TasksViewSerializer(serializers.ModelSerializer):
|
||||
"result",
|
||||
"acknowledged",
|
||||
"related_document",
|
||||
"owner",
|
||||
)
|
||||
|
||||
type = serializers.SerializerMethodField()
|
||||
@@ -1782,6 +1783,11 @@ class WorkflowTriggerSerializer(serializers.ModelSerializer):
|
||||
"filter_has_tags",
|
||||
"filter_has_correspondent",
|
||||
"filter_has_document_type",
|
||||
"schedule_offset_days",
|
||||
"schedule_is_recurring",
|
||||
"schedule_recurring_interval_days",
|
||||
"schedule_date_field",
|
||||
"schedule_date_custom_field",
|
||||
]
|
||||
|
||||
def validate(self, attrs):
|
||||
|
||||
@@ -37,6 +37,7 @@ from documents.models import PaperlessTask
|
||||
from documents.models import Tag
|
||||
from documents.models import Workflow
|
||||
from documents.models import WorkflowAction
|
||||
from documents.models import WorkflowRun
|
||||
from documents.models import WorkflowTrigger
|
||||
from documents.permissions import get_objects_for_user_owner_aware
|
||||
from documents.permissions import set_permissions_for_object
|
||||
@@ -899,6 +900,7 @@ def run_workflows(
|
||||
"triggers",
|
||||
)
|
||||
.order_by("order")
|
||||
.distinct()
|
||||
)
|
||||
|
||||
for workflow in workflows:
|
||||
@@ -928,6 +930,12 @@ def run_workflows(
|
||||
document.save()
|
||||
document.tags.set(doc_tag_ids)
|
||||
|
||||
WorkflowRun.objects.create(
|
||||
workflow=workflow,
|
||||
type=trigger_type,
|
||||
document=document if not use_overrides else None,
|
||||
)
|
||||
|
||||
if use_overrides:
|
||||
return overrides, "\n".join(messages)
|
||||
|
||||
@@ -951,9 +959,10 @@ def before_task_publish_handler(sender=None, headers=None, body=None, **kwargs):
|
||||
close_old_connections()
|
||||
|
||||
task_args = body[0]
|
||||
input_doc, _ = task_args
|
||||
input_doc, overrides = task_args
|
||||
|
||||
task_file_name = input_doc.original_file.name
|
||||
user_id = overrides.owner_id if overrides else None
|
||||
|
||||
PaperlessTask.objects.create(
|
||||
task_id=headers["id"],
|
||||
@@ -964,6 +973,7 @@ def before_task_publish_handler(sender=None, headers=None, body=None, **kwargs):
|
||||
date_created=timezone.now(),
|
||||
date_started=None,
|
||||
date_done=None,
|
||||
owner_id=user_id,
|
||||
)
|
||||
except Exception: # pragma: no cover
|
||||
# Don't let an exception in the signal handlers prevent
|
||||
|
||||
@@ -31,10 +31,14 @@ from documents.double_sided import CollatePlugin
|
||||
from documents.file_handling import create_source_path_directory
|
||||
from documents.file_handling import generate_unique_filename
|
||||
from documents.models import Correspondent
|
||||
from documents.models import CustomFieldInstance
|
||||
from documents.models import Document
|
||||
from documents.models import DocumentType
|
||||
from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
from documents.models import Workflow
|
||||
from documents.models import WorkflowRun
|
||||
from documents.models import WorkflowTrigger
|
||||
from documents.parsers import DocumentParser
|
||||
from documents.parsers import get_parser_class_for_mime_type
|
||||
from documents.plugins.base import ConsumeTaskPlugin
|
||||
@@ -44,6 +48,7 @@ from documents.plugins.helpers import ProgressStatusOptions
|
||||
from documents.sanity_checker import SanityCheckFailedException
|
||||
from documents.signals import document_updated
|
||||
from documents.signals.handlers import cleanup_document_deletion
|
||||
from documents.signals.handlers import run_workflows
|
||||
|
||||
if settings.AUDIT_LOG_ENABLED:
|
||||
from auditlog.models import LogEntry
|
||||
@@ -206,9 +211,10 @@ def bulk_update_documents(document_ids):
|
||||
|
||||
|
||||
@shared_task
|
||||
def update_document_archive_file(document_id):
|
||||
def update_document_content_maybe_archive_file(document_id):
|
||||
"""
|
||||
Re-creates the archive file of a document, including new OCR content and thumbnail
|
||||
Re-creates OCR content and thumbnail for a document, and archive file if
|
||||
it exists.
|
||||
"""
|
||||
document = Document.objects.get(id=document_id)
|
||||
|
||||
@@ -234,8 +240,9 @@ def update_document_archive_file(document_id):
|
||||
document.get_public_filename(),
|
||||
)
|
||||
|
||||
if parser.get_archive_path():
|
||||
with transaction.atomic():
|
||||
with transaction.atomic():
|
||||
oldDocument = Document.objects.get(pk=document.pk)
|
||||
if parser.get_archive_path():
|
||||
with open(parser.get_archive_path(), "rb") as f:
|
||||
checksum = hashlib.md5(f.read()).hexdigest()
|
||||
# I'm going to save first so that in case the file move
|
||||
@@ -246,7 +253,6 @@ def update_document_archive_file(document_id):
|
||||
document,
|
||||
archive_filename=True,
|
||||
)
|
||||
oldDocument = Document.objects.get(pk=document.pk)
|
||||
Document.objects.filter(pk=document.pk).update(
|
||||
archive_checksum=checksum,
|
||||
content=parser.get_text(),
|
||||
@@ -268,24 +274,41 @@ def update_document_archive_file(document_id):
|
||||
],
|
||||
},
|
||||
additional_data={
|
||||
"reason": "Update document archive file",
|
||||
"reason": "Update document content",
|
||||
},
|
||||
action=LogEntry.Action.UPDATE,
|
||||
)
|
||||
else:
|
||||
Document.objects.filter(pk=document.pk).update(
|
||||
content=parser.get_text(),
|
||||
)
|
||||
|
||||
if settings.AUDIT_LOG_ENABLED:
|
||||
LogEntry.objects.log_create(
|
||||
instance=oldDocument,
|
||||
changes={
|
||||
"content": [oldDocument.content, parser.get_text()],
|
||||
},
|
||||
additional_data={
|
||||
"reason": "Update document content",
|
||||
},
|
||||
action=LogEntry.Action.UPDATE,
|
||||
)
|
||||
|
||||
with FileLock(settings.MEDIA_LOCK):
|
||||
with FileLock(settings.MEDIA_LOCK):
|
||||
if parser.get_archive_path():
|
||||
create_source_path_directory(document.archive_path)
|
||||
shutil.move(parser.get_archive_path(), document.archive_path)
|
||||
shutil.move(thumbnail, document.thumbnail_path)
|
||||
shutil.move(thumbnail, document.thumbnail_path)
|
||||
|
||||
document.refresh_from_db()
|
||||
logger.info(
|
||||
f"Updating index for document {document_id} ({document.archive_checksum})",
|
||||
)
|
||||
with index.open_index_writer() as writer:
|
||||
index.update_document(writer, document)
|
||||
document.refresh_from_db()
|
||||
logger.info(
|
||||
f"Updating index for document {document_id} ({document.archive_checksum})",
|
||||
)
|
||||
with index.open_index_writer() as writer:
|
||||
index.update_document(writer, document)
|
||||
|
||||
clear_document_caches(document.pk)
|
||||
clear_document_caches(document.pk)
|
||||
|
||||
except Exception:
|
||||
logger.exception(
|
||||
@@ -319,3 +342,85 @@ def empty_trash(doc_ids=None):
|
||||
cleanup_document_deletion,
|
||||
sender=Document,
|
||||
)
|
||||
|
||||
|
||||
@shared_task
|
||||
def check_scheduled_workflows():
|
||||
scheduled_workflows: list[Workflow] = (
|
||||
Workflow.objects.filter(
|
||||
triggers__type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
enabled=True,
|
||||
)
|
||||
.distinct()
|
||||
.prefetch_related("triggers")
|
||||
)
|
||||
if scheduled_workflows.count() > 0:
|
||||
logger.debug(f"Checking {len(scheduled_workflows)} scheduled workflows")
|
||||
for workflow in scheduled_workflows:
|
||||
schedule_triggers = workflow.triggers.filter(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
)
|
||||
trigger: WorkflowTrigger
|
||||
for trigger in schedule_triggers:
|
||||
documents = Document.objects.none()
|
||||
offset_td = timedelta(days=trigger.schedule_offset_days)
|
||||
logger.debug(
|
||||
f"Checking trigger {trigger} with offset {offset_td} against field: {trigger.schedule_date_field}",
|
||||
)
|
||||
match trigger.schedule_date_field:
|
||||
case WorkflowTrigger.ScheduleDateField.ADDED:
|
||||
documents = Document.objects.filter(
|
||||
added__lt=timezone.now() - offset_td,
|
||||
)
|
||||
case WorkflowTrigger.ScheduleDateField.CREATED:
|
||||
documents = Document.objects.filter(
|
||||
created__lt=timezone.now() - offset_td,
|
||||
)
|
||||
case WorkflowTrigger.ScheduleDateField.MODIFIED:
|
||||
documents = Document.objects.filter(
|
||||
modified__lt=timezone.now() - offset_td,
|
||||
)
|
||||
case WorkflowTrigger.ScheduleDateField.CUSTOM_FIELD:
|
||||
cf_instances = CustomFieldInstance.objects.filter(
|
||||
field=trigger.schedule_date_custom_field,
|
||||
value_date__lt=timezone.now() - offset_td,
|
||||
)
|
||||
documents = Document.objects.filter(
|
||||
id__in=cf_instances.values_list("document", flat=True),
|
||||
)
|
||||
if documents.count() > 0:
|
||||
logger.debug(
|
||||
f"Found {documents.count()} documents for trigger {trigger}",
|
||||
)
|
||||
for document in documents:
|
||||
workflow_runs = WorkflowRun.objects.filter(
|
||||
document=document,
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
workflow=workflow,
|
||||
).order_by("-run_at")
|
||||
if not trigger.schedule_is_recurring and workflow_runs.exists():
|
||||
# schedule is non-recurring and the workflow has already been run
|
||||
logger.debug(
|
||||
f"Skipping document {document} for non-recurring workflow {workflow} as it has already been run",
|
||||
)
|
||||
continue
|
||||
elif (
|
||||
trigger.schedule_is_recurring
|
||||
and workflow_runs.exists()
|
||||
and (
|
||||
workflow_runs.last().run_at
|
||||
> timezone.now()
|
||||
- timedelta(
|
||||
days=trigger.schedule_recurring_interval_days,
|
||||
)
|
||||
)
|
||||
):
|
||||
# schedule is recurring but the last run was within the number of recurring interval days
|
||||
logger.debug(
|
||||
f"Skipping document {document} for recurring workflow {workflow} as the last run was within the recurring interval",
|
||||
)
|
||||
continue
|
||||
run_workflows(
|
||||
WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
document,
|
||||
)
|
||||
|
||||
35
src/documents/templates/mfa/authenticate.html
Normal file
35
src/documents/templates/mfa/authenticate.html
Normal file
@@ -0,0 +1,35 @@
|
||||
{% extends "paperless-ngx/base.html" %}
|
||||
{% load i18n %}
|
||||
{% load allauth %}
|
||||
{% load allauth static %}
|
||||
|
||||
{% block head_title %}
|
||||
{% trans "Paperless-ngx Two-Factor Authentication" %}
|
||||
{% endblock head_title %}
|
||||
|
||||
{% block form_top_content %}
|
||||
<p>
|
||||
{% blocktranslate %}Your account is protected by two-factor authentication. Please enter an authenticator code:{% endblocktranslate %}
|
||||
</p>
|
||||
{% endblock form_top_content %}
|
||||
|
||||
{% block form_content %}
|
||||
{% translate "Code" as i18n_code %}
|
||||
<div class="form-floating">
|
||||
<input type="code" name="code" id="inputCode" autocomplete="one-time-code" placeholder="{{ i18n_code }}" class="form-control" required>
|
||||
<label for="inputCode">{{ i18n_code }}</label>
|
||||
</div>
|
||||
<div class="d-grid mt-3">
|
||||
<button class="btn btn-lg btn-primary" type="submit">{% translate "Sign in" %}</button>
|
||||
<button class="btn btn-lg btn-secondary mt-2" type="submit" form="logout-from-stage">{% translate "Cancel" %}</button>
|
||||
</div>
|
||||
{% endblock form_content %}
|
||||
|
||||
{% block after_form_content %}
|
||||
<form id="logout-from-stage"
|
||||
method="post"
|
||||
action="{% url 'account_logout' %}">
|
||||
<input type="hidden" name="next" value="{% url 'account_login' %}">
|
||||
{% csrf_token %}
|
||||
</form>
|
||||
{% endblock after_form_content %}
|
||||
@@ -1,7 +1,9 @@
|
||||
import json
|
||||
from unittest import mock
|
||||
|
||||
from auditlog.models import LogEntry
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import override_settings
|
||||
from guardian.shortcuts import assign_perm
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APITestCase
|
||||
@@ -51,8 +53,12 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
self.doc3.tags.add(self.t2)
|
||||
self.doc4.tags.add(self.t1, self.t2)
|
||||
self.sp1 = StoragePath.objects.create(name="sp1", path="Something/{checksum}")
|
||||
self.cf1 = CustomField.objects.create(name="cf1", data_type="text")
|
||||
self.cf2 = CustomField.objects.create(name="cf2", data_type="text")
|
||||
self.cf1 = CustomField.objects.create(name="cf1", data_type="string")
|
||||
self.cf2 = CustomField.objects.create(name="cf2", data_type="string")
|
||||
|
||||
def setup_mock(self, m, method_name, return_value="OK"):
|
||||
m.return_value = return_value
|
||||
m.__name__ = method_name
|
||||
|
||||
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
||||
def test_api_set_correspondent(self, bulk_update_task_mock):
|
||||
@@ -178,7 +184,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.modify_tags")
|
||||
def test_api_modify_tags(self, m):
|
||||
m.return_value = "OK"
|
||||
self.setup_mock(m, "modify_tags")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -211,7 +217,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
- API returns HTTP 400
|
||||
- modify_tags is not called
|
||||
"""
|
||||
m.return_value = "OK"
|
||||
self.setup_mock(m, "modify_tags")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -230,7 +236,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.modify_custom_fields")
|
||||
def test_api_modify_custom_fields(self, m):
|
||||
m.return_value = "OK"
|
||||
self.setup_mock(m, "modify_custom_fields")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -263,8 +269,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
- API returns HTTP 400
|
||||
- modify_custom_fields is not called
|
||||
"""
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "modify_custom_fields")
|
||||
# Missing add_custom_fields
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
@@ -359,7 +364,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.delete")
|
||||
def test_api_delete(self, m):
|
||||
m.return_value = "OK"
|
||||
self.setup_mock(m, "delete")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -383,8 +388,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
THEN:
|
||||
- set_storage_path is called with correct document IDs and storage_path ID
|
||||
"""
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "set_storage_path")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -414,8 +418,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
THEN:
|
||||
- set_storage_path is called with correct document IDs and None storage_path
|
||||
"""
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "set_storage_path")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -728,7 +731,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
|
||||
def test_set_permissions(self, m):
|
||||
m.return_value = "OK"
|
||||
self.setup_mock(m, "set_permissions")
|
||||
user1 = User.objects.create(username="user1")
|
||||
user2 = User.objects.create(username="user2")
|
||||
permissions = {
|
||||
@@ -763,7 +766,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
|
||||
def test_set_permissions_merge(self, m):
|
||||
m.return_value = "OK"
|
||||
self.setup_mock(m, "set_permissions")
|
||||
user1 = User.objects.create(username="user1")
|
||||
user2 = User.objects.create(username="user2")
|
||||
permissions = {
|
||||
@@ -823,7 +826,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
THEN:
|
||||
- User is not able to change permissions
|
||||
"""
|
||||
m.return_value = "OK"
|
||||
self.setup_mock(m, "set_permissions")
|
||||
self.doc1.owner = User.objects.get(username="temp_admin")
|
||||
self.doc1.save()
|
||||
user1 = User.objects.create(username="user1")
|
||||
@@ -875,7 +878,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
THEN:
|
||||
- set_storage_path only called if user can edit all docs
|
||||
"""
|
||||
m.return_value = "OK"
|
||||
self.setup_mock(m, "set_storage_path")
|
||||
self.doc1.owner = User.objects.get(username="temp_admin")
|
||||
self.doc1.save()
|
||||
user1 = User.objects.create(username="user1")
|
||||
@@ -919,8 +922,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.rotate")
|
||||
def test_rotate(self, m):
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "rotate")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -974,8 +976,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.merge")
|
||||
def test_merge(self, m):
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "merge")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -1003,8 +1004,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
user1 = User.objects.create(username="user1")
|
||||
self.client.force_authenticate(user=user1)
|
||||
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "merge")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -1053,8 +1053,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
THEN:
|
||||
- The API fails with a correct error code
|
||||
"""
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "merge")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -1074,8 +1073,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.split")
|
||||
def test_split(self, m):
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "split")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -1165,8 +1163,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.delete_pages")
|
||||
def test_delete_pages(self, m):
|
||||
m.return_value = "OK"
|
||||
|
||||
self.setup_mock(m, "delete_pages")
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
@@ -1254,3 +1251,87 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn(b"pages must be a list of integers", response.content)
|
||||
|
||||
@override_settings(AUDIT_LOG_ENABLED=True)
|
||||
def test_bulk_edit_audit_log_enabled_simple_field(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Audit log is enabled
|
||||
WHEN:
|
||||
- API to bulk edit documents is called
|
||||
THEN:
|
||||
- Audit log is created
|
||||
"""
|
||||
LogEntry.objects.all().delete()
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
{
|
||||
"documents": [self.doc1.id],
|
||||
"method": "set_correspondent",
|
||||
"parameters": {"correspondent": self.c2.id},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(LogEntry.objects.filter(object_pk=self.doc1.id).count(), 1)
|
||||
|
||||
@override_settings(AUDIT_LOG_ENABLED=True)
|
||||
def test_bulk_edit_audit_log_enabled_tags(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Audit log is enabled
|
||||
WHEN:
|
||||
- API to bulk edit tags is called
|
||||
THEN:
|
||||
- Audit log is created
|
||||
"""
|
||||
LogEntry.objects.all().delete()
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
{
|
||||
"documents": [self.doc1.id],
|
||||
"method": "modify_tags",
|
||||
"parameters": {
|
||||
"add_tags": [self.t1.id],
|
||||
"remove_tags": [self.t2.id],
|
||||
},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(LogEntry.objects.filter(object_pk=self.doc1.id).count(), 1)
|
||||
|
||||
@override_settings(AUDIT_LOG_ENABLED=True)
|
||||
def test_bulk_edit_audit_log_enabled_custom_fields(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Audit log is enabled
|
||||
WHEN:
|
||||
- API to bulk edit custom fields is called
|
||||
THEN:
|
||||
- Audit log is created
|
||||
"""
|
||||
LogEntry.objects.all().delete()
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
{
|
||||
"documents": [self.doc1.id],
|
||||
"method": "modify_custom_fields",
|
||||
"parameters": {
|
||||
"add_custom_fields": [self.cf1.id],
|
||||
"remove_custom_fields": [],
|
||||
},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(LogEntry.objects.filter(object_pk=self.doc1.id).count(), 2)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import json
|
||||
|
||||
from allauth.mfa.models import Authenticator
|
||||
from django.contrib.auth.models import Group
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.auth.models import User
|
||||
@@ -601,6 +602,59 @@ class TestApiUser(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(returned_user2.first_name, "Updated Name 2")
|
||||
self.assertNotEqual(returned_user2.password, initial_password)
|
||||
|
||||
def test_deactivate_totp(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing user account with TOTP enabled
|
||||
WHEN:
|
||||
- API request by a superuser is made to deactivate TOTP
|
||||
- API request by a regular user is made to deactivate TOTP
|
||||
THEN:
|
||||
- TOTP is deactivated, if exists
|
||||
- Regular user is forbidden from deactivating TOTP
|
||||
"""
|
||||
|
||||
user1 = User.objects.create(
|
||||
username="testuser",
|
||||
password="test",
|
||||
first_name="Test",
|
||||
last_name="User",
|
||||
)
|
||||
Authenticator.objects.create(
|
||||
user=user1,
|
||||
type=Authenticator.Type.TOTP,
|
||||
data={},
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
f"{self.ENDPOINT}{user1.pk}/deactivate_totp/",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(Authenticator.objects.filter(user=user1).count(), 0)
|
||||
|
||||
# fail if already deactivated
|
||||
response = self.client.post(
|
||||
f"{self.ENDPOINT}{user1.pk}/deactivate_totp/",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
regular_user = User.objects.create_user(username="regular_user")
|
||||
regular_user.user_permissions.add(
|
||||
*Permission.objects.all(),
|
||||
)
|
||||
self.client.force_authenticate(regular_user)
|
||||
Authenticator.objects.create(
|
||||
user=user1,
|
||||
type=Authenticator.Type.TOTP,
|
||||
data={},
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
f"{self.ENDPOINT}{user1.pk}/deactivate_totp/",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
|
||||
class TestApiGroup(DirectoriesMixin, APITestCase):
|
||||
ENDPOINT = "/api/groups/"
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from unittest import mock
|
||||
|
||||
from allauth.mfa.models import Authenticator
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
from django.contrib.auth.models import User
|
||||
@@ -299,3 +300,82 @@ class TestApiProfile(DirectoriesMixin, APITestCase):
|
||||
len(self.user.socialaccount_set.filter(pk=social_account_id)),
|
||||
0,
|
||||
)
|
||||
|
||||
|
||||
class TestApiTOTPViews(APITestCase):
|
||||
ENDPOINT = "/api/profile/totp/"
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
||||
self.user = User.objects.create_superuser(username="temp_admin")
|
||||
self.client.force_authenticate(user=self.user)
|
||||
|
||||
def test_get_totp(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing user account
|
||||
WHEN:
|
||||
- API request is made to TOTP endpoint
|
||||
THEN:
|
||||
- TOTP is generated
|
||||
"""
|
||||
response = self.client.get(
|
||||
self.ENDPOINT,
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertIn("qr_svg", response.data)
|
||||
self.assertIn("secret", response.data)
|
||||
|
||||
@mock.patch("allauth.mfa.totp.internal.auth.validate_totp_code")
|
||||
def test_activate_totp(self, mock_validate_totp_code):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing user account
|
||||
WHEN:
|
||||
- API request is made to activate TOTP
|
||||
THEN:
|
||||
- TOTP is activated, recovery codes are returned
|
||||
"""
|
||||
mock_validate_totp_code.return_value = True
|
||||
|
||||
response = self.client.post(
|
||||
self.ENDPOINT,
|
||||
data={
|
||||
"secret": "123",
|
||||
"code": "456",
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertTrue(Authenticator.objects.filter(user=self.user).exists())
|
||||
self.assertIn("recovery_codes", response.data)
|
||||
|
||||
def test_deactivate_totp(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing user account with TOTP enabled
|
||||
WHEN:
|
||||
- API request is made to deactivate TOTP
|
||||
THEN:
|
||||
- TOTP is deactivated
|
||||
"""
|
||||
Authenticator.objects.create(
|
||||
user=self.user,
|
||||
type=Authenticator.Type.TOTP,
|
||||
data={},
|
||||
)
|
||||
|
||||
response = self.client.delete(
|
||||
self.ENDPOINT,
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(Authenticator.objects.filter(user=self.user).count(), 0)
|
||||
|
||||
# test fails
|
||||
response = self.client.delete(
|
||||
self.ENDPOINT,
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import uuid
|
||||
|
||||
import celery
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.auth.models import User
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APITestCase
|
||||
@@ -11,7 +12,6 @@ from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
class TestTasks(DirectoriesMixin, APITestCase):
|
||||
ENDPOINT = "/api/tasks/"
|
||||
ENDPOINT_ACKNOWLEDGE = "/api/acknowledge_tasks/"
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
@@ -125,7 +125,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(len(response.data), 1)
|
||||
|
||||
response = self.client.post(
|
||||
self.ENDPOINT_ACKNOWLEDGE,
|
||||
self.ENDPOINT + "acknowledge/",
|
||||
{"tasks": [task.id]},
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
@@ -133,6 +133,52 @@ class TestTasks(DirectoriesMixin, APITestCase):
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
self.assertEqual(len(response.data), 0)
|
||||
|
||||
def test_tasks_owner_aware(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing PaperlessTasks with owner and with no owner
|
||||
WHEN:
|
||||
- API call is made to get tasks
|
||||
THEN:
|
||||
- Only tasks with no owner or request user are returned
|
||||
"""
|
||||
|
||||
regular_user = User.objects.create_user(username="test")
|
||||
regular_user.user_permissions.add(*Permission.objects.all())
|
||||
self.client.logout()
|
||||
self.client.force_authenticate(user=regular_user)
|
||||
|
||||
task1 = PaperlessTask.objects.create(
|
||||
task_id=str(uuid.uuid4()),
|
||||
task_file_name="task_one.pdf",
|
||||
owner=self.user,
|
||||
)
|
||||
|
||||
task2 = PaperlessTask.objects.create(
|
||||
task_id=str(uuid.uuid4()),
|
||||
task_file_name="task_two.pdf",
|
||||
)
|
||||
|
||||
task3 = PaperlessTask.objects.create(
|
||||
task_id=str(uuid.uuid4()),
|
||||
task_file_name="task_three.pdf",
|
||||
owner=regular_user,
|
||||
)
|
||||
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(len(response.data), 2)
|
||||
self.assertEqual(response.data[0]["task_id"], task3.task_id)
|
||||
self.assertEqual(response.data[1]["task_id"], task2.task_id)
|
||||
|
||||
acknowledge_response = self.client.post(
|
||||
self.ENDPOINT + "acknowledge/",
|
||||
{"tasks": [task1.id, task2.id, task3.id]},
|
||||
)
|
||||
self.assertEqual(acknowledge_response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(acknowledge_response.data, {"result": 2})
|
||||
|
||||
def test_task_result_no_error(self):
|
||||
"""
|
||||
GIVEN:
|
||||
|
||||
@@ -607,7 +607,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
mock_consume_file.assert_not_called()
|
||||
|
||||
@mock.patch("documents.tasks.bulk_update_documents.si")
|
||||
@mock.patch("documents.tasks.update_document_archive_file.s")
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.s")
|
||||
@mock.patch("celery.chord.delay")
|
||||
def test_rotate(self, mock_chord, mock_update_document, mock_update_documents):
|
||||
"""
|
||||
@@ -626,7 +626,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.tasks.bulk_update_documents.si")
|
||||
@mock.patch("documents.tasks.update_document_archive_file.s")
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.s")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
def test_rotate_with_error(
|
||||
self,
|
||||
@@ -654,7 +654,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
mock_update_archive_file.assert_not_called()
|
||||
|
||||
@mock.patch("documents.tasks.bulk_update_documents.si")
|
||||
@mock.patch("documents.tasks.update_document_archive_file.s")
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.s")
|
||||
@mock.patch("celery.chord.delay")
|
||||
def test_rotate_non_pdf(
|
||||
self,
|
||||
@@ -680,7 +680,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
mock_chord.assert_called_once()
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.tasks.update_document_archive_file.delay")
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
def test_delete_pages(self, mock_pdf_save, mock_update_archive_file):
|
||||
"""
|
||||
@@ -705,7 +705,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.doc2.refresh_from_db()
|
||||
self.assertEqual(self.doc2.page_count, expected_page_count)
|
||||
|
||||
@mock.patch("documents.tasks.update_document_archive_file.delay")
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
def test_delete_pages_with_error(self, mock_pdf_save, mock_update_archive_file):
|
||||
"""
|
||||
|
||||
@@ -13,7 +13,7 @@ from django.test import override_settings
|
||||
|
||||
from documents.file_handling import generate_filename
|
||||
from documents.models import Document
|
||||
from documents.tasks import update_document_archive_file
|
||||
from documents.tasks import update_document_content_maybe_archive_file
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from documents.tests.utils import FileSystemAssertsMixin
|
||||
|
||||
@@ -46,7 +46,7 @@ class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
os.path.join(self.dirs.originals_dir, f"{doc.id:07}.pdf"),
|
||||
)
|
||||
|
||||
update_document_archive_file(doc.pk)
|
||||
update_document_content_maybe_archive_file(doc.pk)
|
||||
|
||||
doc = Document.objects.get(id=doc.id)
|
||||
|
||||
@@ -63,7 +63,7 @@ class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
doc.save()
|
||||
shutil.copy(sample_file, doc.source_path)
|
||||
|
||||
update_document_archive_file(doc.pk)
|
||||
update_document_content_maybe_archive_file(doc.pk)
|
||||
|
||||
doc = Document.objects.get(id=doc.id)
|
||||
|
||||
@@ -94,8 +94,8 @@ class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
os.path.join(self.dirs.originals_dir, "document_01.pdf"),
|
||||
)
|
||||
|
||||
update_document_archive_file(doc2.pk)
|
||||
update_document_archive_file(doc1.pk)
|
||||
update_document_content_maybe_archive_file(doc2.pk)
|
||||
update_document_content_maybe_archive_file(doc1.pk)
|
||||
|
||||
doc1 = Document.objects.get(id=doc1.id)
|
||||
doc2 = Document.objects.get(id=doc2.id)
|
||||
|
||||
@@ -153,6 +153,7 @@ class TestExportImport(
|
||||
*,
|
||||
use_filename_format=False,
|
||||
compare_checksums=False,
|
||||
compare_json=False,
|
||||
delete=False,
|
||||
no_archive=False,
|
||||
no_thumbnail=False,
|
||||
@@ -165,6 +166,8 @@ class TestExportImport(
|
||||
args += ["--use-filename-format"]
|
||||
if compare_checksums:
|
||||
args += ["--compare-checksums"]
|
||||
if compare_json:
|
||||
args += ["--compare-json"]
|
||||
if delete:
|
||||
args += ["--delete"]
|
||||
if no_archive:
|
||||
@@ -340,6 +343,10 @@ class TestExportImport(
|
||||
self.assertNotEqual(st_mtime_1, st_mtime_2)
|
||||
self.assertNotEqual(st_mtime_2, st_mtime_3)
|
||||
|
||||
self._do_export(compare_json=True)
|
||||
st_mtime_4 = os.stat(os.path.join(self.target, "manifest.json")).st_mtime
|
||||
self.assertEqual(st_mtime_3, st_mtime_4)
|
||||
|
||||
def test_update_export_changed_checksum(self):
|
||||
shutil.rmtree(os.path.join(self.dirs.media_dir, "documents"))
|
||||
shutil.copytree(
|
||||
|
||||
@@ -5,6 +5,7 @@ import celery
|
||||
from django.test import TestCase
|
||||
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.data_models import DocumentSource
|
||||
from documents.models import PaperlessTask
|
||||
from documents.signals.handlers import before_task_publish_handler
|
||||
@@ -48,7 +49,10 @@ class TestTaskSignalHandler(DirectoriesMixin, TestCase):
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file="/consume/hello-999.pdf",
|
||||
),
|
||||
None,
|
||||
DocumentMetadataOverrides(
|
||||
title="Hello world",
|
||||
owner_id=1,
|
||||
),
|
||||
),
|
||||
# kwargs
|
||||
{},
|
||||
@@ -65,6 +69,7 @@ class TestTaskSignalHandler(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(headers["id"], task.task_id)
|
||||
self.assertEqual("hello-999.pdf", task.task_file_name)
|
||||
self.assertEqual("documents.tasks.consume_file", task.task_name)
|
||||
self.assertEqual(1, task.owner_id)
|
||||
self.assertEqual(celery.states.PENDING, task.status)
|
||||
|
||||
def test_task_prerun_handler(self):
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import os
|
||||
import shutil
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
from django.conf import settings
|
||||
@@ -184,3 +186,75 @@ class TestEmptyTrashTask(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
|
||||
tasks.empty_trash()
|
||||
self.assertEqual(Document.global_objects.count(), 0)
|
||||
|
||||
|
||||
class TestUpdateContent(DirectoriesMixin, TestCase):
|
||||
def test_update_content_maybe_archive_file(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing document with archive file
|
||||
WHEN:
|
||||
- Update content task is called
|
||||
THEN:
|
||||
- Document is reprocessed, content and checksum are updated
|
||||
"""
|
||||
sample1 = self.dirs.scratch_dir / "sample.pdf"
|
||||
shutil.copy(
|
||||
Path(__file__).parent
|
||||
/ "samples"
|
||||
/ "documents"
|
||||
/ "originals"
|
||||
/ "0000001.pdf",
|
||||
sample1,
|
||||
)
|
||||
sample1_archive = self.dirs.archive_dir / "sample_archive.pdf"
|
||||
shutil.copy(
|
||||
Path(__file__).parent
|
||||
/ "samples"
|
||||
/ "documents"
|
||||
/ "originals"
|
||||
/ "0000001.pdf",
|
||||
sample1_archive,
|
||||
)
|
||||
doc = Document.objects.create(
|
||||
title="test",
|
||||
content="my document",
|
||||
checksum="wow",
|
||||
archive_checksum="wow",
|
||||
filename=sample1,
|
||||
mime_type="application/pdf",
|
||||
archive_filename=sample1_archive,
|
||||
)
|
||||
|
||||
tasks.update_document_content_maybe_archive_file(doc.pk)
|
||||
self.assertNotEqual(Document.objects.get(pk=doc.pk).content, "test")
|
||||
self.assertNotEqual(Document.objects.get(pk=doc.pk).archive_checksum, "wow")
|
||||
|
||||
def test_update_content_maybe_archive_file_no_archive(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing document without archive file
|
||||
WHEN:
|
||||
- Update content task is called
|
||||
THEN:
|
||||
- Document is reprocessed, content is updated
|
||||
"""
|
||||
sample1 = self.dirs.scratch_dir / "sample.pdf"
|
||||
shutil.copy(
|
||||
Path(__file__).parent
|
||||
/ "samples"
|
||||
/ "documents"
|
||||
/ "originals"
|
||||
/ "0000001.pdf",
|
||||
sample1,
|
||||
)
|
||||
doc = Document.objects.create(
|
||||
title="test",
|
||||
content="my document",
|
||||
checksum="wow",
|
||||
filename=sample1,
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
|
||||
tasks.update_document_content_maybe_archive_file(doc.pk)
|
||||
self.assertNotEqual(Document.objects.get(pk=doc.pk).content, "test")
|
||||
|
||||
@@ -6,12 +6,14 @@ from django.conf import settings
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import TestCase
|
||||
from django.test import override_settings
|
||||
from django.utils import timezone
|
||||
from rest_framework import status
|
||||
|
||||
from documents.models import Document
|
||||
from documents.models import ShareLink
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from paperless.models import ApplicationConfiguration
|
||||
|
||||
|
||||
class TestViews(DirectoriesMixin, TestCase):
|
||||
@@ -67,6 +69,26 @@ class TestViews(DirectoriesMixin, TestCase):
|
||||
f"frontend/{language_actual}/main.js",
|
||||
)
|
||||
|
||||
@override_settings(BASE_URL="/paperless/")
|
||||
def test_index_app_logo_with_base_url(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing config with app_logo specified
|
||||
WHEN:
|
||||
- Index page is loaded
|
||||
THEN:
|
||||
- app_logo is prefixed with BASE_URL
|
||||
"""
|
||||
config = ApplicationConfiguration.objects.first()
|
||||
config.app_logo = "/logo/example.jpg"
|
||||
config.save()
|
||||
self.client.force_login(self.user)
|
||||
response = self.client.get("/")
|
||||
self.assertEqual(
|
||||
response.context["APP_LOGO"],
|
||||
f"/paperless{config.app_logo}",
|
||||
)
|
||||
|
||||
def test_share_link_views(self):
|
||||
"""
|
||||
GIVEN:
|
||||
|
||||
@@ -29,6 +29,7 @@ from documents.models import StoragePath
|
||||
from documents.models import Tag
|
||||
from documents.models import Workflow
|
||||
from documents.models import WorkflowAction
|
||||
from documents.models import WorkflowRun
|
||||
from documents.models import WorkflowTrigger
|
||||
from documents.signals import document_consumption_finished
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
@@ -1306,6 +1307,275 @@ class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, APITestCase):
|
||||
# group2 should have been added
|
||||
self.assertIn(self.group2, group_perms)
|
||||
|
||||
def test_workflow_scheduled_trigger_created(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow with SCHEDULED trigger against the created field and action that assigns owner
|
||||
- Existing doc that matches the trigger
|
||||
WHEN:
|
||||
- Scheduled workflows are checked
|
||||
THEN:
|
||||
- Workflow runs, document owner is updated
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
schedule_offset_days=1,
|
||||
schedule_date_field="created",
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc assign owner",
|
||||
assign_owner=self.user2,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
now = timezone.localtime(timezone.now())
|
||||
created = now - timedelta(weeks=520)
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
created=created,
|
||||
)
|
||||
|
||||
tasks.check_scheduled_workflows()
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.owner, self.user2)
|
||||
|
||||
def test_workflow_scheduled_trigger_added(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow with SCHEDULED trigger against the added field and action that assigns owner
|
||||
- Existing doc that matches the trigger
|
||||
WHEN:
|
||||
- Scheduled workflows are checked
|
||||
THEN:
|
||||
- Workflow runs, document owner is updated
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
schedule_offset_days=1,
|
||||
schedule_date_field=WorkflowTrigger.ScheduleDateField.ADDED,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc assign owner",
|
||||
assign_owner=self.user2,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
added = timezone.now() - timedelta(days=365)
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
added=added,
|
||||
)
|
||||
|
||||
tasks.check_scheduled_workflows()
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.owner, self.user2)
|
||||
|
||||
@mock.patch("documents.models.Document.objects.filter", autospec=True)
|
||||
def test_workflow_scheduled_trigger_modified(self, mock_filter):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow with SCHEDULED trigger against the modified field and action that assigns owner
|
||||
- Existing doc that matches the trigger
|
||||
WHEN:
|
||||
- Scheduled workflows are checked
|
||||
THEN:
|
||||
- Workflow runs, document owner is updated
|
||||
"""
|
||||
# we have to mock because modified field is auto_now
|
||||
mock_filter.return_value = Document.objects.all()
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
schedule_offset_days=1,
|
||||
schedule_date_field=WorkflowTrigger.ScheduleDateField.MODIFIED,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc assign owner",
|
||||
assign_owner=self.user2,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
)
|
||||
|
||||
tasks.check_scheduled_workflows()
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.owner, self.user2)
|
||||
|
||||
def test_workflow_scheduled_trigger_custom_field(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow with SCHEDULED trigger against a custom field and action that assigns owner
|
||||
- Existing doc that matches the trigger
|
||||
WHEN:
|
||||
- Scheduled workflows are checked
|
||||
THEN:
|
||||
- Workflow runs, document owner is updated
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
schedule_offset_days=1,
|
||||
schedule_date_field=WorkflowTrigger.ScheduleDateField.CUSTOM_FIELD,
|
||||
schedule_date_custom_field=self.cf1,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc assign owner",
|
||||
assign_owner=self.user2,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
)
|
||||
CustomFieldInstance.objects.create(
|
||||
document=doc,
|
||||
field=self.cf1,
|
||||
value_date=timezone.now() - timedelta(days=2),
|
||||
)
|
||||
|
||||
tasks.check_scheduled_workflows()
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.owner, self.user2)
|
||||
|
||||
def test_workflow_scheduled_already_run(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow with SCHEDULED trigger
|
||||
- Existing doc that has already had the workflow run
|
||||
WHEN:
|
||||
- Scheduled workflows are checked
|
||||
THEN:
|
||||
- Workflow does not run again
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
schedule_offset_days=1,
|
||||
schedule_date_field=WorkflowTrigger.ScheduleDateField.CREATED,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc assign owner",
|
||||
assign_owner=self.user2,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
created=timezone.now() - timedelta(days=2),
|
||||
)
|
||||
|
||||
wr = WorkflowRun.objects.create(
|
||||
workflow=w,
|
||||
document=doc,
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
run_at=timezone.now(),
|
||||
)
|
||||
self.assertEqual(
|
||||
str(wr),
|
||||
f"WorkflowRun of {w} at {wr.run_at} on {doc}",
|
||||
) # coverage
|
||||
|
||||
tasks.check_scheduled_workflows()
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertIsNone(doc.owner)
|
||||
|
||||
def test_workflow_scheduled_trigger_too_early(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow with SCHEDULED trigger and recurring interval of 7 days
|
||||
- Workflow run date is 6 days ago
|
||||
WHEN:
|
||||
- Scheduled workflows are checked
|
||||
THEN:
|
||||
- Workflow does not run as the offset is not met
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
schedule_offset_days=30,
|
||||
schedule_date_field=WorkflowTrigger.ScheduleDateField.CREATED,
|
||||
schedule_is_recurring=True,
|
||||
schedule_recurring_interval_days=7,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc assign owner",
|
||||
assign_owner=self.user2,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
created=timezone.now() - timedelta(days=40),
|
||||
)
|
||||
|
||||
WorkflowRun.objects.create(
|
||||
workflow=w,
|
||||
document=doc,
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
run_at=timezone.now() - timedelta(days=6),
|
||||
)
|
||||
|
||||
with self.assertLogs(level="DEBUG") as cm:
|
||||
tasks.check_scheduled_workflows()
|
||||
self.assertIn(
|
||||
"last run was within the recurring interval",
|
||||
" ".join(cm.output),
|
||||
)
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertIsNone(doc.owner)
|
||||
|
||||
def test_workflow_enabled_disabled(self):
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
||||
@@ -1354,7 +1624,7 @@ class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, APITestCase):
|
||||
|
||||
def test_new_trigger_type_raises_exception(self):
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=4,
|
||||
type=99,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc assign owner",
|
||||
@@ -1370,7 +1640,7 @@ class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, APITestCase):
|
||||
doc = Document.objects.create(
|
||||
title="test",
|
||||
)
|
||||
self.assertRaises(Exception, document_matches_workflow, doc, w, 4)
|
||||
self.assertRaises(Exception, document_matches_workflow, doc, w, 99)
|
||||
|
||||
def test_removal_action_document_updated_workflow(self):
|
||||
"""
|
||||
|
||||
@@ -26,11 +26,13 @@ from django.db.models import Case
|
||||
from django.db.models import Count
|
||||
from django.db.models import IntegerField
|
||||
from django.db.models import Max
|
||||
from django.db.models import Model
|
||||
from django.db.models import Q
|
||||
from django.db.models import Sum
|
||||
from django.db.models import When
|
||||
from django.db.models.functions import Length
|
||||
from django.db.models.functions import Lower
|
||||
from django.db.models.manager import Manager
|
||||
from django.http import Http404
|
||||
from django.http import HttpResponse
|
||||
from django.http import HttpResponseBadRequest
|
||||
@@ -426,7 +428,7 @@ class DocumentViewSet(
|
||||
)
|
||||
|
||||
def file_response(self, pk, request, disposition):
|
||||
doc = Document.objects.select_related("owner").get(id=pk)
|
||||
doc = Document.global_objects.select_related("owner").get(id=pk)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"view_document",
|
||||
@@ -961,6 +963,22 @@ class SavedViewViewSet(ModelViewSet, PassUserMixin):
|
||||
|
||||
|
||||
class BulkEditView(PassUserMixin):
|
||||
MODIFIED_FIELD_BY_METHOD = {
|
||||
"set_correspondent": "correspondent",
|
||||
"set_document_type": "document_type",
|
||||
"set_storage_path": "storage_path",
|
||||
"add_tag": "tags",
|
||||
"remove_tag": "tags",
|
||||
"modify_tags": "tags",
|
||||
"modify_custom_fields": "custom_fields",
|
||||
"set_permissions": None,
|
||||
"delete": "deleted_at",
|
||||
"rotate": "checksum",
|
||||
"delete_pages": "checksum",
|
||||
"split": None,
|
||||
"merge": None,
|
||||
}
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
serializer_class = BulkEditSerializer
|
||||
parser_classes = (parsers.JSONParser,)
|
||||
@@ -1013,8 +1031,53 @@ class BulkEditView(PassUserMixin):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
|
||||
try:
|
||||
modified_field = self.MODIFIED_FIELD_BY_METHOD[method.__name__]
|
||||
if settings.AUDIT_LOG_ENABLED and modified_field:
|
||||
old_documents = {
|
||||
obj["pk"]: obj
|
||||
for obj in Document.objects.filter(pk__in=documents).values(
|
||||
"pk",
|
||||
"correspondent",
|
||||
"document_type",
|
||||
"storage_path",
|
||||
"tags",
|
||||
"custom_fields",
|
||||
"deleted_at",
|
||||
"checksum",
|
||||
)
|
||||
}
|
||||
|
||||
# TODO: parameter validation
|
||||
result = method(documents, **parameters)
|
||||
|
||||
if settings.AUDIT_LOG_ENABLED and modified_field:
|
||||
new_documents = Document.objects.filter(pk__in=documents)
|
||||
for doc in new_documents:
|
||||
old_value = old_documents[doc.pk][modified_field]
|
||||
new_value = getattr(doc, modified_field)
|
||||
|
||||
if isinstance(new_value, Model):
|
||||
# correspondent, document type, etc.
|
||||
new_value = new_value.pk
|
||||
elif isinstance(new_value, Manager):
|
||||
# tags, custom fields
|
||||
new_value = list(new_value.values_list("pk", flat=True))
|
||||
|
||||
LogEntry.objects.log_create(
|
||||
instance=doc,
|
||||
changes={
|
||||
modified_field: [
|
||||
old_value,
|
||||
new_value,
|
||||
],
|
||||
},
|
||||
action=LogEntry.Action.UPDATE,
|
||||
actor=user,
|
||||
additional_data={
|
||||
"reason": f"Bulk edit: {method.__name__}",
|
||||
},
|
||||
)
|
||||
|
||||
return Response({"result": result})
|
||||
except Exception as e:
|
||||
logger.warning(f"An error occurred performing bulk edit: {e!s}")
|
||||
@@ -1546,6 +1609,12 @@ class StoragePathViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
|
||||
filterset_class = StoragePathFilterSet
|
||||
ordering_fields = ("name", "path", "matching_algorithm", "match", "document_count")
|
||||
|
||||
def get_permissions(self):
|
||||
if self.action == "test":
|
||||
# Test action does not require object level permissions
|
||||
self.permission_classes = (IsAuthenticated,)
|
||||
return super().get_permissions()
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
"""
|
||||
When a storage path is deleted, see if documents
|
||||
@@ -1562,17 +1631,12 @@ class StoragePathViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class StoragePathTestView(GenericAPIView):
|
||||
"""
|
||||
Test storage path against a document
|
||||
"""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
serializer_class = StoragePathTestSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
@action(methods=["post"], detail=False)
|
||||
def test(self, request):
|
||||
"""
|
||||
Test storage path against a document
|
||||
"""
|
||||
serializer = StoragePathTestSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
document = serializer.validated_data.get("document")
|
||||
@@ -1705,6 +1769,7 @@ class RemoteVersionView(GenericAPIView):
|
||||
class TasksViewSet(ReadOnlyModelViewSet):
|
||||
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
|
||||
serializer_class = TasksViewSerializer
|
||||
filter_backends = (ObjectOwnedOrGrantedPermissionsFilter,)
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = (
|
||||
@@ -1719,19 +1784,17 @@ class TasksViewSet(ReadOnlyModelViewSet):
|
||||
queryset = PaperlessTask.objects.filter(task_id=task_id)
|
||||
return queryset
|
||||
|
||||
|
||||
class AcknowledgeTasksView(GenericAPIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
serializer_class = AcknowledgeTasksViewSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
@action(methods=["post"], detail=False)
|
||||
def acknowledge(self, request):
|
||||
serializer = AcknowledgeTasksViewSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
tasks = serializer.validated_data.get("tasks")
|
||||
task_ids = serializer.validated_data.get("tasks")
|
||||
|
||||
try:
|
||||
result = PaperlessTask.objects.filter(id__in=tasks).update(
|
||||
tasks = PaperlessTask.objects.filter(id__in=task_ids)
|
||||
if request.user is not None and not request.user.is_superuser:
|
||||
tasks = tasks.filter(owner=request.user) | tasks.filter(owner=None)
|
||||
result = tasks.update(
|
||||
acknowledged=True,
|
||||
)
|
||||
return Response({"result": result})
|
||||
|
||||
@@ -2,7 +2,7 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: paperless-ngx\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2024-10-19 22:56-0700\n"
|
||||
"POT-Creation-Date: 2024-10-19 23:22-0700\n"
|
||||
"PO-Revision-Date: 2022-02-17 04:17\n"
|
||||
"Last-Translator: \n"
|
||||
"Language-Team: English\n"
|
||||
@@ -1039,6 +1039,7 @@ msgid "Password"
|
||||
msgstr ""
|
||||
|
||||
#: documents/templates/account/login.html:30
|
||||
#: documents/templates/mfa/authenticate.html:23
|
||||
msgid "Sign in"
|
||||
msgstr ""
|
||||
|
||||
@@ -1161,6 +1162,24 @@ msgstr ""
|
||||
msgid "Here's a link to the docs."
|
||||
msgstr ""
|
||||
|
||||
#: documents/templates/mfa/authenticate.html:7
|
||||
msgid "Paperless-ngx Two-Factor Authentication"
|
||||
msgstr ""
|
||||
|
||||
#: documents/templates/mfa/authenticate.html:12
|
||||
msgid ""
|
||||
"Your account is protected by two-factor authentication. Please enter an "
|
||||
"authenticator code:"
|
||||
msgstr ""
|
||||
|
||||
#: documents/templates/mfa/authenticate.html:17
|
||||
msgid "Code"
|
||||
msgstr ""
|
||||
|
||||
#: documents/templates/mfa/authenticate.html:24
|
||||
msgid "Cancel"
|
||||
msgstr ""
|
||||
|
||||
#: documents/templates/paperless-ngx/base.html:58
|
||||
msgid "Share link was not found."
|
||||
msgstr ""
|
||||
@@ -1366,139 +1385,139 @@ msgstr ""
|
||||
msgid "paperless application settings"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:684
|
||||
#: paperless/settings.py:687
|
||||
msgid "English (US)"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:685
|
||||
#: paperless/settings.py:688
|
||||
msgid "Arabic"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:686
|
||||
#: paperless/settings.py:689
|
||||
msgid "Afrikaans"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:687
|
||||
#: paperless/settings.py:690
|
||||
msgid "Belarusian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:688
|
||||
#: paperless/settings.py:691
|
||||
msgid "Bulgarian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:689
|
||||
#: paperless/settings.py:692
|
||||
msgid "Catalan"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:690
|
||||
#: paperless/settings.py:693
|
||||
msgid "Czech"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:691
|
||||
#: paperless/settings.py:694
|
||||
msgid "Danish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:692
|
||||
#: paperless/settings.py:695
|
||||
msgid "German"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:693
|
||||
#: paperless/settings.py:696
|
||||
msgid "Greek"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:694
|
||||
#: paperless/settings.py:697
|
||||
msgid "English (GB)"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:695
|
||||
#: paperless/settings.py:698
|
||||
msgid "Spanish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:696
|
||||
#: paperless/settings.py:699
|
||||
msgid "Finnish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:697
|
||||
#: paperless/settings.py:700
|
||||
msgid "French"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:698
|
||||
#: paperless/settings.py:701
|
||||
msgid "Hungarian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:699
|
||||
#: paperless/settings.py:702
|
||||
msgid "Italian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:700
|
||||
#: paperless/settings.py:703
|
||||
msgid "Japanese"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:701
|
||||
#: paperless/settings.py:704
|
||||
msgid "Korean"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:702
|
||||
#: paperless/settings.py:705
|
||||
msgid "Luxembourgish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:703
|
||||
#: paperless/settings.py:706
|
||||
msgid "Norwegian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:704
|
||||
#: paperless/settings.py:707
|
||||
msgid "Dutch"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:705
|
||||
#: paperless/settings.py:708
|
||||
msgid "Polish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:706
|
||||
#: paperless/settings.py:709
|
||||
msgid "Portuguese (Brazil)"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:707
|
||||
#: paperless/settings.py:710
|
||||
msgid "Portuguese"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:708
|
||||
#: paperless/settings.py:711
|
||||
msgid "Romanian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:709
|
||||
#: paperless/settings.py:712
|
||||
msgid "Russian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:710
|
||||
#: paperless/settings.py:713
|
||||
msgid "Slovak"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:711
|
||||
#: paperless/settings.py:714
|
||||
msgid "Slovenian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:712
|
||||
#: paperless/settings.py:715
|
||||
msgid "Serbian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:713
|
||||
#: paperless/settings.py:716
|
||||
msgid "Swedish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:714
|
||||
#: paperless/settings.py:717
|
||||
msgid "Turkish"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:715
|
||||
#: paperless/settings.py:718
|
||||
msgid "Ukrainian"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:716
|
||||
#: paperless/settings.py:719
|
||||
msgid "Chinese Simplified"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/urls.py:254
|
||||
#: paperless/urls.py:268
|
||||
msgid "Paperless-ngx administration"
|
||||
msgstr ""
|
||||
|
||||
|
||||
@@ -5,9 +5,9 @@ from paperless.checks import paths_check
|
||||
from paperless.checks import settings_values_check
|
||||
|
||||
__all__ = [
|
||||
"celery_app",
|
||||
"audit_log_check",
|
||||
"binaries_check",
|
||||
"celery_app",
|
||||
"paths_check",
|
||||
"settings_values_check",
|
||||
"audit_log_check",
|
||||
]
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import logging
|
||||
|
||||
from allauth.mfa.adapter import get_adapter as get_mfa_adapter
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from django.contrib.auth.models import Group
|
||||
from django.contrib.auth.models import Permission
|
||||
@@ -32,6 +33,11 @@ class UserSerializer(serializers.ModelSerializer):
|
||||
required=False,
|
||||
)
|
||||
inherited_permissions = serializers.SerializerMethodField()
|
||||
is_mfa_enabled = serializers.SerializerMethodField()
|
||||
|
||||
def get_is_mfa_enabled(self, user: User):
|
||||
mfa_adapter = get_mfa_adapter()
|
||||
return mfa_adapter.is_mfa_enabled(user)
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
@@ -49,6 +55,7 @@ class UserSerializer(serializers.ModelSerializer):
|
||||
"groups",
|
||||
"user_permissions",
|
||||
"inherited_permissions",
|
||||
"is_mfa_enabled",
|
||||
)
|
||||
|
||||
def get_inherited_permissions(self, obj):
|
||||
@@ -130,6 +137,11 @@ class ProfileSerializer(serializers.ModelSerializer):
|
||||
read_only=True,
|
||||
source="socialaccount_set",
|
||||
)
|
||||
is_mfa_enabled = serializers.SerializerMethodField()
|
||||
|
||||
def get_is_mfa_enabled(self, user: User):
|
||||
mfa_adapter = get_mfa_adapter()
|
||||
return mfa_adapter.is_mfa_enabled(user)
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
@@ -141,6 +153,7 @@ class ProfileSerializer(serializers.ModelSerializer):
|
||||
"auth_token",
|
||||
"social_accounts",
|
||||
"has_usable_password",
|
||||
"is_mfa_enabled",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -216,6 +216,17 @@ def _parse_beat_schedule() -> dict:
|
||||
"expires": 23.0 * 60.0 * 60.0,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Check and run scheduled workflows",
|
||||
"env_key": "PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON",
|
||||
# Default hourly at 5 minutes past the hour
|
||||
"env_default": "5 */1 * * *",
|
||||
"task": "documents.tasks.check_scheduled_workflows",
|
||||
"options": {
|
||||
# 1 minute before default schedule sends again
|
||||
"expires": 59.0 * 60.0,
|
||||
},
|
||||
},
|
||||
]
|
||||
for task in tasks:
|
||||
# Either get the environment setting or use the default
|
||||
@@ -316,6 +327,7 @@ INSTALLED_APPS = [
|
||||
"allauth",
|
||||
"allauth.account",
|
||||
"allauth.socialaccount",
|
||||
"allauth.mfa",
|
||||
*env_apps,
|
||||
]
|
||||
|
||||
@@ -332,7 +344,7 @@ REST_FRAMEWORK = {
|
||||
"DEFAULT_VERSION": "1",
|
||||
# Make sure these are ordered and that the most recent version appears
|
||||
# last
|
||||
"ALLOWED_VERSIONS": ["1", "2", "3", "4", "5"],
|
||||
"ALLOWED_VERSIONS": ["1", "2", "3", "4", "5", "6"],
|
||||
}
|
||||
|
||||
if DEBUG:
|
||||
@@ -458,6 +470,8 @@ SOCIALACCOUNT_PROVIDERS = json.loads(
|
||||
os.getenv("PAPERLESS_SOCIALACCOUNT_PROVIDERS", "{}"),
|
||||
)
|
||||
|
||||
MFA_TOTP_ISSUER = "Paperless-ngx"
|
||||
|
||||
ACCOUNT_EMAIL_SUBJECT_PREFIX = "[Paperless-ngx] "
|
||||
|
||||
DISABLE_REGULAR_LOGIN = __get_boolean("PAPERLESS_DISABLE_REGULAR_LOGIN")
|
||||
|
||||
@@ -157,6 +157,7 @@ class TestCeleryScheduleParsing(TestCase):
|
||||
INDEX_EXPIRE_TIME = 23.0 * 60.0 * 60.0
|
||||
SANITY_EXPIRE_TIME = ((7.0 * 24.0) - 1.0) * 60.0 * 60.0
|
||||
EMPTY_TRASH_EXPIRE_TIME = 23.0 * 60.0 * 60.0
|
||||
RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME = 59.0 * 60.0
|
||||
|
||||
def test_schedule_configuration_default(self):
|
||||
"""
|
||||
@@ -196,6 +197,11 @@ class TestCeleryScheduleParsing(TestCase):
|
||||
"schedule": crontab(minute=0, hour="1"),
|
||||
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
|
||||
},
|
||||
"Check and run scheduled workflows": {
|
||||
"task": "documents.tasks.check_scheduled_workflows",
|
||||
"schedule": crontab(minute="5", hour="*/1"),
|
||||
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
|
||||
},
|
||||
},
|
||||
schedule,
|
||||
)
|
||||
@@ -243,6 +249,11 @@ class TestCeleryScheduleParsing(TestCase):
|
||||
"schedule": crontab(minute=0, hour="1"),
|
||||
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
|
||||
},
|
||||
"Check and run scheduled workflows": {
|
||||
"task": "documents.tasks.check_scheduled_workflows",
|
||||
"schedule": crontab(minute="5", hour="*/1"),
|
||||
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
|
||||
},
|
||||
},
|
||||
schedule,
|
||||
)
|
||||
@@ -282,6 +293,11 @@ class TestCeleryScheduleParsing(TestCase):
|
||||
"schedule": crontab(minute=0, hour="1"),
|
||||
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
|
||||
},
|
||||
"Check and run scheduled workflows": {
|
||||
"task": "documents.tasks.check_scheduled_workflows",
|
||||
"schedule": crontab(minute="5", hour="*/1"),
|
||||
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
|
||||
},
|
||||
},
|
||||
schedule,
|
||||
)
|
||||
@@ -303,6 +319,7 @@ class TestCeleryScheduleParsing(TestCase):
|
||||
"PAPERLESS_SANITY_TASK_CRON": "disable",
|
||||
"PAPERLESS_INDEX_TASK_CRON": "disable",
|
||||
"PAPERLESS_EMPTY_TRASH_TASK_CRON": "disable",
|
||||
"PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON": "disable",
|
||||
},
|
||||
):
|
||||
schedule = _parse_beat_schedule()
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import os
|
||||
|
||||
from allauth.account import views as allauth_account_views
|
||||
from allauth.mfa.base import views as allauth_mfa_views
|
||||
from allauth.socialaccount import views as allauth_social_account_views
|
||||
from allauth.urls import build_provider_urlpatterns
|
||||
from django.conf import settings
|
||||
@@ -10,14 +11,12 @@ from django.contrib.auth.decorators import login_required
|
||||
from django.urls import path
|
||||
from django.urls import re_path
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||
from django.views.generic import RedirectView
|
||||
from django.views.static import serve
|
||||
from rest_framework.authtoken import views
|
||||
from rest_framework.routers import DefaultRouter
|
||||
|
||||
from documents.views import AcknowledgeTasksView
|
||||
from documents.views import BulkDownloadView
|
||||
from documents.views import BulkEditObjectsView
|
||||
from documents.views import BulkEditView
|
||||
@@ -35,7 +34,6 @@ from documents.views import SelectionDataView
|
||||
from documents.views import SharedLinkView
|
||||
from documents.views import ShareLinkViewSet
|
||||
from documents.views import StatisticsView
|
||||
from documents.views import StoragePathTestView
|
||||
from documents.views import StoragePathViewSet
|
||||
from documents.views import SystemStatusView
|
||||
from documents.views import TagViewSet
|
||||
@@ -54,8 +52,8 @@ from paperless.views import GenerateAuthTokenView
|
||||
from paperless.views import GroupViewSet
|
||||
from paperless.views import ProfileView
|
||||
from paperless.views import SocialAccountProvidersView
|
||||
from paperless.views import TOTPView
|
||||
from paperless.views import UserViewSet
|
||||
from paperless_mail.views import MailAccountTestView
|
||||
from paperless_mail.views import MailAccountViewSet
|
||||
from paperless_mail.views import MailRuleViewSet
|
||||
from paperless_mail.views import OauthCallbackView
|
||||
@@ -93,72 +91,102 @@ urlpatterns = [
|
||||
namespace="rest_framework",
|
||||
),
|
||||
),
|
||||
re_path(
|
||||
"^search/autocomplete/",
|
||||
SearchAutoCompleteView.as_view(),
|
||||
name="autocomplete",
|
||||
),
|
||||
re_path(
|
||||
"^search/",
|
||||
GlobalSearchView.as_view(),
|
||||
name="global_search",
|
||||
),
|
||||
re_path("^statistics/", StatisticsView.as_view(), name="statistics"),
|
||||
re_path(
|
||||
"^documents/post_document/",
|
||||
PostDocumentView.as_view(),
|
||||
name="post_document",
|
||||
include(
|
||||
[
|
||||
re_path(
|
||||
"^$",
|
||||
GlobalSearchView.as_view(),
|
||||
name="global_search",
|
||||
),
|
||||
re_path(
|
||||
"^autocomplete/",
|
||||
SearchAutoCompleteView.as_view(),
|
||||
name="autocomplete",
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
re_path(
|
||||
"^documents/bulk_edit/",
|
||||
BulkEditView.as_view(),
|
||||
name="bulk_edit",
|
||||
"^statistics/",
|
||||
StatisticsView.as_view(),
|
||||
name="statistics",
|
||||
),
|
||||
re_path(
|
||||
"^documents/selection_data/",
|
||||
SelectionDataView.as_view(),
|
||||
name="selection_data",
|
||||
"^documents/",
|
||||
include(
|
||||
[
|
||||
re_path(
|
||||
"^post_document/",
|
||||
PostDocumentView.as_view(),
|
||||
name="post_document",
|
||||
),
|
||||
re_path(
|
||||
"^bulk_edit/",
|
||||
BulkEditView.as_view(),
|
||||
name="bulk_edit",
|
||||
),
|
||||
re_path(
|
||||
"^bulk_download/",
|
||||
BulkDownloadView.as_view(),
|
||||
name="bulk_download",
|
||||
),
|
||||
re_path(
|
||||
"^selection_data/",
|
||||
SelectionDataView.as_view(),
|
||||
name="selection_data",
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
re_path(
|
||||
"^documents/bulk_download/",
|
||||
BulkDownloadView.as_view(),
|
||||
name="bulk_download",
|
||||
"^bulk_edit_objects/",
|
||||
BulkEditObjectsView.as_view(),
|
||||
name="bulk_edit_objects",
|
||||
),
|
||||
re_path(
|
||||
"^remote_version/",
|
||||
RemoteVersionView.as_view(),
|
||||
name="remoteversion",
|
||||
),
|
||||
re_path("^ui_settings/", UiSettingsView.as_view(), name="ui_settings"),
|
||||
re_path(
|
||||
"^acknowledge_tasks/",
|
||||
AcknowledgeTasksView.as_view(),
|
||||
name="acknowledge_tasks",
|
||||
),
|
||||
re_path(
|
||||
"^mail_accounts/test/",
|
||||
MailAccountTestView.as_view(),
|
||||
name="mail_accounts_test",
|
||||
),
|
||||
path("token/", views.obtain_auth_token),
|
||||
re_path(
|
||||
"^bulk_edit_objects/",
|
||||
BulkEditObjectsView.as_view(),
|
||||
name="bulk_edit_objects",
|
||||
),
|
||||
path("profile/generate_auth_token/", GenerateAuthTokenView.as_view()),
|
||||
path(
|
||||
"profile/disconnect_social_account/",
|
||||
DisconnectSocialAccountView.as_view(),
|
||||
"^ui_settings/",
|
||||
UiSettingsView.as_view(),
|
||||
name="ui_settings",
|
||||
),
|
||||
path(
|
||||
"profile/social_account_providers/",
|
||||
SocialAccountProvidersView.as_view(),
|
||||
"token/",
|
||||
views.obtain_auth_token,
|
||||
),
|
||||
re_path(
|
||||
"^profile/",
|
||||
ProfileView.as_view(),
|
||||
name="profile_view",
|
||||
include(
|
||||
[
|
||||
re_path(
|
||||
"^$",
|
||||
ProfileView.as_view(),
|
||||
name="profile_view",
|
||||
),
|
||||
path(
|
||||
"generate_auth_token/",
|
||||
GenerateAuthTokenView.as_view(),
|
||||
),
|
||||
path(
|
||||
"disconnect_social_account/",
|
||||
DisconnectSocialAccountView.as_view(),
|
||||
),
|
||||
path(
|
||||
"social_account_providers/",
|
||||
SocialAccountProvidersView.as_view(),
|
||||
),
|
||||
path(
|
||||
"totp/",
|
||||
TOTPView.as_view(),
|
||||
name="totp_view",
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
re_path(
|
||||
"^status/",
|
||||
@@ -170,11 +198,6 @@ urlpatterns = [
|
||||
TrashView.as_view(),
|
||||
name="trash",
|
||||
),
|
||||
re_path(
|
||||
"^storage_paths/test/",
|
||||
StoragePathTestView.as_view(),
|
||||
name="storage_paths_test",
|
||||
),
|
||||
re_path(
|
||||
r"^oauth/callback/",
|
||||
OauthCallbackView.as_view(),
|
||||
@@ -212,14 +235,6 @@ urlpatterns = [
|
||||
],
|
||||
),
|
||||
),
|
||||
re_path(
|
||||
r"^push$",
|
||||
csrf_exempt(
|
||||
RedirectView.as_view(
|
||||
url=settings.BASE_URL + "api/documents/post_document/",
|
||||
),
|
||||
),
|
||||
),
|
||||
# Frontend assets TODO: this is pretty bad, but it works.
|
||||
path(
|
||||
"assets/<path:path>",
|
||||
@@ -296,6 +311,12 @@ urlpatterns = [
|
||||
),
|
||||
),
|
||||
*build_provider_urlpatterns(),
|
||||
# mfa, see allauth/mfa/base/urls.py
|
||||
path(
|
||||
"2fa/authenticate/",
|
||||
allauth_mfa_views.authenticate,
|
||||
name="mfa_authenticate",
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from typing import Final
|
||||
|
||||
__version__: Final[tuple[int, int, int]] = (2, 13, 4)
|
||||
__version__: Final[tuple[int, int, int]] = (2, 13, 5)
|
||||
# Version string like X.Y.Z
|
||||
__full_version_str__: Final[str] = ".".join(map(str, __version__))
|
||||
# Version string like X.Y
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import os
|
||||
from collections import OrderedDict
|
||||
|
||||
from allauth.mfa import signals
|
||||
from allauth.mfa.adapter import get_adapter as get_mfa_adapter
|
||||
from allauth.mfa.base.internal.flows import delete_and_cleanup
|
||||
from allauth.mfa.models import Authenticator
|
||||
from allauth.mfa.recovery_codes.internal.flows import auto_generate_recovery_codes
|
||||
from allauth.mfa.totp.internal import auth as totp_auth
|
||||
from allauth.socialaccount.adapter import get_adapter
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from django.contrib.auth.models import Group
|
||||
@@ -8,9 +14,12 @@ from django.contrib.auth.models import User
|
||||
from django.db.models.functions import Lower
|
||||
from django.http import HttpResponse
|
||||
from django.http import HttpResponseBadRequest
|
||||
from django.http import HttpResponseForbidden
|
||||
from django.http import HttpResponseNotFound
|
||||
from django.views.generic import View
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from rest_framework.authtoken.models import Token
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.filters import OrderingFilter
|
||||
from rest_framework.generics import GenericAPIView
|
||||
from rest_framework.pagination import PageNumberPagination
|
||||
@@ -100,6 +109,24 @@ class UserViewSet(ModelViewSet):
|
||||
filterset_class = UserFilterSet
|
||||
ordering_fields = ("username",)
|
||||
|
||||
@action(detail=True, methods=["post"])
|
||||
def deactivate_totp(self, request, pk=None):
|
||||
request_user = request.user
|
||||
user = User.objects.get(pk=pk)
|
||||
if not request_user.is_superuser and request_user != user:
|
||||
return HttpResponseForbidden(
|
||||
"You do not have permission to deactivate TOTP for this user",
|
||||
)
|
||||
authenticator = Authenticator.objects.filter(
|
||||
user=user,
|
||||
type=Authenticator.Type.TOTP,
|
||||
).first()
|
||||
if authenticator is not None:
|
||||
delete_and_cleanup(request, authenticator)
|
||||
return Response(True)
|
||||
else:
|
||||
return HttpResponseNotFound("TOTP not found")
|
||||
|
||||
|
||||
class GroupViewSet(ModelViewSet):
|
||||
model = Group
|
||||
@@ -145,6 +172,76 @@ class ProfileView(GenericAPIView):
|
||||
return Response(serializer.to_representation(user))
|
||||
|
||||
|
||||
class TOTPView(GenericAPIView):
|
||||
"""
|
||||
TOTP views
|
||||
"""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""
|
||||
Generates a new TOTP secret and returns the URL and SVG
|
||||
"""
|
||||
user = self.request.user
|
||||
mfa_adapter = get_mfa_adapter()
|
||||
secret = totp_auth.get_totp_secret(regenerate=True)
|
||||
url = mfa_adapter.build_totp_url(user, secret)
|
||||
svg = mfa_adapter.build_totp_svg(url)
|
||||
return Response(
|
||||
{
|
||||
"url": url,
|
||||
"qr_svg": svg,
|
||||
"secret": secret,
|
||||
},
|
||||
)
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
"""
|
||||
Validates a TOTP code and activates the TOTP authenticator
|
||||
"""
|
||||
valid = totp_auth.validate_totp_code(
|
||||
request.data["secret"],
|
||||
request.data["code"],
|
||||
)
|
||||
recovery_codes = None
|
||||
if valid:
|
||||
auth = totp_auth.TOTP.activate(
|
||||
request.user,
|
||||
request.data["secret"],
|
||||
).instance
|
||||
signals.authenticator_added.send(
|
||||
sender=Authenticator,
|
||||
request=request,
|
||||
user=request.user,
|
||||
authenticator=auth,
|
||||
)
|
||||
rc_auth: Authenticator = auto_generate_recovery_codes(request)
|
||||
if rc_auth:
|
||||
recovery_codes = rc_auth.wrap().get_unused_codes()
|
||||
return Response(
|
||||
{
|
||||
"success": valid,
|
||||
"recovery_codes": recovery_codes,
|
||||
},
|
||||
)
|
||||
|
||||
def delete(self, request, *args, **kwargs):
|
||||
"""
|
||||
Deactivates the TOTP authenticator
|
||||
"""
|
||||
user = self.request.user
|
||||
authenticator = Authenticator.objects.filter(
|
||||
user=user,
|
||||
type=Authenticator.Type.TOTP,
|
||||
).first()
|
||||
if authenticator is not None:
|
||||
delete_and_cleanup(request, authenticator)
|
||||
return Response(True)
|
||||
else:
|
||||
return HttpResponseNotFound("TOTP not found")
|
||||
|
||||
|
||||
class GenerateAuthTokenView(GenericAPIView):
|
||||
"""
|
||||
Generates (or re-generates) an auth token, requires a logged in user
|
||||
|
||||
@@ -6,6 +6,7 @@ from django.http import HttpResponseBadRequest
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.utils import timezone
|
||||
from httpx_oauth.oauth2 import GetAccessTokenError
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.generics import GenericAPIView
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
@@ -34,22 +35,14 @@ class MailAccountViewSet(ModelViewSet, PassUserMixin):
|
||||
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
|
||||
filter_backends = (ObjectOwnedOrGrantedPermissionsFilter,)
|
||||
|
||||
def get_permissions(self):
|
||||
if self.action == "test":
|
||||
# Test action does not require object level permissions
|
||||
self.permission_classes = (IsAuthenticated,)
|
||||
return super().get_permissions()
|
||||
|
||||
class MailRuleViewSet(ModelViewSet, PassUserMixin):
|
||||
model = MailRule
|
||||
|
||||
queryset = MailRule.objects.all().order_by("order")
|
||||
serializer_class = MailRuleSerializer
|
||||
pagination_class = StandardPagination
|
||||
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
|
||||
filter_backends = (ObjectOwnedOrGrantedPermissionsFilter,)
|
||||
|
||||
|
||||
class MailAccountTestView(GenericAPIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
serializer_class = MailAccountSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
@action(methods=["post"], detail=False)
|
||||
def test(self, request):
|
||||
logger = logging.getLogger("paperless_mail")
|
||||
request.data["name"] = datetime.datetime.now().isoformat()
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
@@ -95,6 +88,16 @@ class MailAccountTestView(GenericAPIView):
|
||||
return HttpResponseBadRequest("Unable to connect to server")
|
||||
|
||||
|
||||
class MailRuleViewSet(ModelViewSet, PassUserMixin):
|
||||
model = MailRule
|
||||
|
||||
queryset = MailRule.objects.all().order_by("order")
|
||||
serializer_class = MailRuleSerializer
|
||||
pagination_class = StandardPagination
|
||||
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
|
||||
filter_backends = (ObjectOwnedOrGrantedPermissionsFilter,)
|
||||
|
||||
|
||||
class OauthCallbackView(GenericAPIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
from paperless_tesseract.checks import check_default_language_available
|
||||
from paperless_tesseract.checks import get_tesseract_langs
|
||||
|
||||
__all__ = ["get_tesseract_langs", "check_default_language_available"]
|
||||
__all__ = ["check_default_language_available", "get_tesseract_langs"]
|
||||
|
||||
Reference in New Issue
Block a user