mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-07-30 18:27:45 -05:00
Merge branch 'dev' into feature-websockets-status
This commit is contained in:
@@ -4,7 +4,8 @@ from django.utils.safestring import mark_safe
|
||||
from whoosh.writing import AsyncWriter
|
||||
|
||||
from . import index
|
||||
from .models import Correspondent, Document, DocumentType, Log, Tag
|
||||
from .models import Correspondent, Document, DocumentType, Log, Tag, \
|
||||
SavedView, SavedViewFilterRule
|
||||
|
||||
|
||||
class CorrespondentAdmin(admin.ModelAdmin):
|
||||
@@ -17,8 +18,6 @@ class CorrespondentAdmin(admin.ModelAdmin):
|
||||
list_filter = ("matching_algorithm",)
|
||||
list_editable = ("match", "matching_algorithm")
|
||||
|
||||
readonly_fields = ("slug",)
|
||||
|
||||
|
||||
class TagAdmin(admin.ModelAdmin):
|
||||
|
||||
@@ -31,8 +30,6 @@ class TagAdmin(admin.ModelAdmin):
|
||||
list_filter = ("colour", "matching_algorithm")
|
||||
list_editable = ("colour", "match", "matching_algorithm")
|
||||
|
||||
readonly_fields = ("slug", )
|
||||
|
||||
|
||||
class DocumentTypeAdmin(admin.ModelAdmin):
|
||||
|
||||
@@ -44,13 +41,16 @@ class DocumentTypeAdmin(admin.ModelAdmin):
|
||||
list_filter = ("matching_algorithm",)
|
||||
list_editable = ("match", "matching_algorithm")
|
||||
|
||||
readonly_fields = ("slug",)
|
||||
|
||||
|
||||
class DocumentAdmin(admin.ModelAdmin):
|
||||
|
||||
search_fields = ("correspondent__name", "title", "content", "tags__name")
|
||||
readonly_fields = ("added", "mime_type", "storage_type", "filename")
|
||||
readonly_fields = (
|
||||
"added",
|
||||
"modified",
|
||||
"mime_type",
|
||||
"storage_type",
|
||||
"filename")
|
||||
|
||||
list_display_links = ("title",)
|
||||
|
||||
@@ -69,7 +69,7 @@ class DocumentAdmin(admin.ModelAdmin):
|
||||
|
||||
filter_horizontal = ("tags",)
|
||||
|
||||
ordering = ["-created", "correspondent"]
|
||||
ordering = ["-created"]
|
||||
|
||||
date_hierarchy = "created"
|
||||
|
||||
@@ -101,7 +101,7 @@ class DocumentAdmin(admin.ModelAdmin):
|
||||
for tag in obj.tags.all():
|
||||
r += self._html_tag(
|
||||
"span",
|
||||
tag.slug + ", "
|
||||
tag.name + ", "
|
||||
)
|
||||
return r
|
||||
|
||||
@@ -132,8 +132,22 @@ class LogAdmin(admin.ModelAdmin):
|
||||
list_display_links = ("created", "message")
|
||||
|
||||
|
||||
class RuleInline(admin.TabularInline):
|
||||
model = SavedViewFilterRule
|
||||
|
||||
|
||||
class SavedViewAdmin(admin.ModelAdmin):
|
||||
|
||||
list_display = ("name", "user")
|
||||
|
||||
inlines = [
|
||||
RuleInline
|
||||
]
|
||||
|
||||
|
||||
admin.site.register(Correspondent, CorrespondentAdmin)
|
||||
admin.site.register(Tag, TagAdmin)
|
||||
admin.site.register(DocumentType, DocumentTypeAdmin)
|
||||
admin.site.register(Document, DocumentAdmin)
|
||||
admin.site.register(Log, LogAdmin)
|
||||
admin.site.register(SavedView, SavedViewAdmin)
|
||||
|
@@ -1,34 +1,30 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
class DocumentsConfig(AppConfig):
|
||||
|
||||
name = "documents"
|
||||
|
||||
def ready(self):
|
||||
verbose_name = _("Documents")
|
||||
|
||||
from .signals import document_consumption_started
|
||||
def ready(self):
|
||||
from .signals import document_consumption_finished
|
||||
from .signals.handlers import (
|
||||
add_inbox_tags,
|
||||
run_pre_consume_script,
|
||||
run_post_consume_script,
|
||||
set_log_entry,
|
||||
set_correspondent,
|
||||
set_document_type,
|
||||
set_tags,
|
||||
add_to_index
|
||||
|
||||
)
|
||||
|
||||
document_consumption_started.connect(run_pre_consume_script)
|
||||
|
||||
document_consumption_finished.connect(add_inbox_tags)
|
||||
document_consumption_finished.connect(set_correspondent)
|
||||
document_consumption_finished.connect(set_document_type)
|
||||
document_consumption_finished.connect(set_tags)
|
||||
document_consumption_finished.connect(set_log_entry)
|
||||
document_consumption_finished.connect(add_to_index)
|
||||
document_consumption_finished.connect(run_post_consume_script)
|
||||
|
||||
AppConfig.ready(self)
|
||||
|
107
src/documents/bulk_edit.py
Normal file
107
src/documents/bulk_edit.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import itertools
|
||||
|
||||
from django.db.models import Q
|
||||
from django_q.tasks import async_task
|
||||
from whoosh.writing import AsyncWriter
|
||||
|
||||
from documents import index
|
||||
from documents.models import Document, Correspondent, DocumentType
|
||||
|
||||
|
||||
def set_correspondent(doc_ids, correspondent):
|
||||
if correspondent:
|
||||
correspondent = Correspondent.objects.get(id=correspondent)
|
||||
|
||||
qs = Document.objects.filter(
|
||||
Q(id__in=doc_ids) & ~Q(correspondent=correspondent))
|
||||
affected_docs = [doc.id for doc in qs]
|
||||
qs.update(correspondent=correspondent)
|
||||
|
||||
async_task(
|
||||
"documents.tasks.bulk_update_documents", document_ids=affected_docs)
|
||||
|
||||
return "OK"
|
||||
|
||||
|
||||
def set_document_type(doc_ids, document_type):
|
||||
if document_type:
|
||||
document_type = DocumentType.objects.get(id=document_type)
|
||||
|
||||
qs = Document.objects.filter(
|
||||
Q(id__in=doc_ids) & ~Q(document_type=document_type))
|
||||
affected_docs = [doc.id for doc in qs]
|
||||
qs.update(document_type=document_type)
|
||||
|
||||
async_task(
|
||||
"documents.tasks.bulk_update_documents", document_ids=affected_docs)
|
||||
|
||||
return "OK"
|
||||
|
||||
|
||||
def add_tag(doc_ids, tag):
|
||||
|
||||
qs = Document.objects.filter(Q(id__in=doc_ids) & ~Q(tags__id=tag))
|
||||
affected_docs = [doc.id for doc in qs]
|
||||
|
||||
DocumentTagRelationship = Document.tags.through
|
||||
|
||||
DocumentTagRelationship.objects.bulk_create([
|
||||
DocumentTagRelationship(
|
||||
document_id=doc, tag_id=tag) for doc in affected_docs
|
||||
])
|
||||
|
||||
async_task(
|
||||
"documents.tasks.bulk_update_documents", document_ids=affected_docs)
|
||||
|
||||
return "OK"
|
||||
|
||||
|
||||
def remove_tag(doc_ids, tag):
|
||||
|
||||
qs = Document.objects.filter(Q(id__in=doc_ids) & Q(tags__id=tag))
|
||||
affected_docs = [doc.id for doc in qs]
|
||||
|
||||
DocumentTagRelationship = Document.tags.through
|
||||
|
||||
DocumentTagRelationship.objects.filter(
|
||||
Q(document_id__in=affected_docs) &
|
||||
Q(tag_id=tag)
|
||||
).delete()
|
||||
|
||||
async_task(
|
||||
"documents.tasks.bulk_update_documents", document_ids=affected_docs)
|
||||
|
||||
return "OK"
|
||||
|
||||
|
||||
def modify_tags(doc_ids, add_tags, remove_tags):
|
||||
qs = Document.objects.filter(id__in=doc_ids)
|
||||
affected_docs = [doc.id for doc in qs]
|
||||
|
||||
DocumentTagRelationship = Document.tags.through
|
||||
|
||||
DocumentTagRelationship.objects.filter(
|
||||
document_id__in=affected_docs,
|
||||
tag_id__in=remove_tags,
|
||||
).delete()
|
||||
|
||||
DocumentTagRelationship.objects.bulk_create([DocumentTagRelationship(
|
||||
document_id=doc, tag_id=tag) for (doc, tag) in itertools.product(
|
||||
affected_docs, add_tags)
|
||||
], ignore_conflicts=True)
|
||||
|
||||
async_task(
|
||||
"documents.tasks.bulk_update_documents", document_ids=affected_docs)
|
||||
|
||||
return "OK"
|
||||
|
||||
|
||||
def delete(doc_ids):
|
||||
Document.objects.filter(id__in=doc_ids).delete()
|
||||
|
||||
ix = index.open_index()
|
||||
with AsyncWriter(ix) as writer:
|
||||
for id in doc_ids:
|
||||
index.remove_document_by_id(writer, id)
|
||||
|
||||
return "OK"
|
@@ -2,6 +2,7 @@ import textwrap
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.checks import Error, register
|
||||
from django.core.exceptions import FieldError
|
||||
from django.db.utils import OperationalError, ProgrammingError
|
||||
|
||||
from documents.signals import document_consumer_declaration
|
||||
@@ -16,7 +17,7 @@ def changed_password_check(app_configs, **kwargs):
|
||||
try:
|
||||
encrypted_doc = Document.objects.filter(
|
||||
storage_type=Document.STORAGE_TYPE_GPG).first()
|
||||
except (OperationalError, ProgrammingError):
|
||||
except (OperationalError, ProgrammingError, FieldError):
|
||||
return [] # No documents table yet
|
||||
|
||||
if encrypted_doc:
|
||||
@@ -50,6 +51,6 @@ def parser_check(app_configs, **kwargs):
|
||||
|
||||
if len(parsers) == 0:
|
||||
return [Error("No parsers found. This is a bug. The consumer won't be "
|
||||
"able to onsume any documents without parsers.")]
|
||||
"able to consume any documents without parsers.")]
|
||||
else:
|
||||
return []
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import datetime
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
from subprocess import Popen
|
||||
|
||||
import magic
|
||||
from asgiref.sync import async_to_sync
|
||||
@@ -10,13 +10,15 @@ from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.db.models import Q
|
||||
from django.utils import timezone
|
||||
from filelock import FileLock
|
||||
from rest_framework.reverse import reverse
|
||||
|
||||
from .classifier import DocumentClassifier, IncompatibleClassifierVersionError
|
||||
from .file_handling import create_source_path_directory
|
||||
from .file_handling import create_source_path_directory, \
|
||||
generate_unique_filename
|
||||
from .loggers import LoggingMixin
|
||||
from .models import Document, FileInfo, Correspondent, DocumentType, Tag
|
||||
from .parsers import ParseError, get_parser_class_for_mime_type, \
|
||||
get_supported_file_extensions, parse_date
|
||||
from .parsers import ParseError, get_parser_class_for_mime_type, parse_date
|
||||
from .signals import (
|
||||
document_consumption_finished,
|
||||
document_consumption_started
|
||||
@@ -61,6 +63,10 @@ class Consumer(LoggingMixin):
|
||||
|
||||
def pre_check_file_exists(self):
|
||||
if not os.path.isfile(self.path):
|
||||
self.log(
|
||||
"error",
|
||||
"Cannot consume {}: It is not a file.".format(self.path)
|
||||
)
|
||||
self._fail("File not found")
|
||||
|
||||
def pre_check_duplicate(self):
|
||||
@@ -69,6 +75,10 @@ class Consumer(LoggingMixin):
|
||||
if Document.objects.filter(Q(checksum=checksum) | Q(archive_checksum=checksum)).exists(): # NOQA: E501
|
||||
if settings.CONSUMER_DELETE_DUPLICATES:
|
||||
os.unlink(self.path)
|
||||
self.log(
|
||||
"error",
|
||||
"Not consuming {}: It is a duplicate.".format(self.filename)
|
||||
)
|
||||
self._fail("Document is a duplicate")
|
||||
|
||||
def pre_check_directories(self):
|
||||
@@ -77,6 +87,39 @@ class Consumer(LoggingMixin):
|
||||
os.makedirs(settings.ORIGINALS_DIR, exist_ok=True)
|
||||
os.makedirs(settings.ARCHIVE_DIR, exist_ok=True)
|
||||
|
||||
def run_pre_consume_script(self):
|
||||
if not settings.PRE_CONSUME_SCRIPT:
|
||||
return
|
||||
|
||||
try:
|
||||
Popen((settings.PRE_CONSUME_SCRIPT, self.path)).wait()
|
||||
except Exception as e:
|
||||
raise ConsumerError(
|
||||
f"Error while executing pre-consume script: {e}"
|
||||
)
|
||||
|
||||
def run_post_consume_script(self, document):
|
||||
if not settings.POST_CONSUME_SCRIPT:
|
||||
return
|
||||
|
||||
try:
|
||||
Popen((
|
||||
settings.POST_CONSUME_SCRIPT,
|
||||
str(document.pk),
|
||||
document.get_public_filename(),
|
||||
os.path.normpath(document.source_path),
|
||||
os.path.normpath(document.thumbnail_path),
|
||||
reverse("document-download", kwargs={"pk": document.pk}),
|
||||
reverse("document-thumb", kwargs={"pk": document.pk}),
|
||||
str(document.correspondent),
|
||||
str(",".join(document.tags.all().values_list(
|
||||
"name", flat=True)))
|
||||
)).wait()
|
||||
except Exception as e:
|
||||
raise ConsumerError(
|
||||
f"Error while executing pre-consume script: {e}"
|
||||
)
|
||||
|
||||
def try_consume_file(self,
|
||||
path,
|
||||
override_filename=None,
|
||||
@@ -109,19 +152,20 @@ class Consumer(LoggingMixin):
|
||||
self.pre_check_directories()
|
||||
self.pre_check_duplicate()
|
||||
|
||||
self.log("info", "Consuming {}".format(self.filename))
|
||||
self.log("info", f"Consuming {self.filename}")
|
||||
|
||||
# Determine the parser class.
|
||||
|
||||
mime_type = magic.from_file(self.path, mime=True)
|
||||
|
||||
self.log("debug", f"Detected mime type: {mime_type}")
|
||||
|
||||
parser_class = get_parser_class_for_mime_type(mime_type)
|
||||
if not parser_class:
|
||||
self._fail("No parsers abvailable")
|
||||
self._fail(f"Unsupported mime type {mime_type}")
|
||||
else:
|
||||
self.log("debug",
|
||||
f"Parser: {parser_class.__name__} "
|
||||
f"based on mime type {mime_type}")
|
||||
f"Parser: {parser_class.__name__}")
|
||||
|
||||
# Notify all listeners that we're going to do some work.
|
||||
|
||||
@@ -131,6 +175,8 @@ class Consumer(LoggingMixin):
|
||||
logging_group=self.logging_group
|
||||
)
|
||||
|
||||
self.run_pre_consume_script()
|
||||
|
||||
def progress_callback(current_progress, max_progress, message):
|
||||
# recalculate progress to be within 20 and 80
|
||||
p = int((current_progress / max_progress) * 50 + 20)
|
||||
@@ -149,7 +195,7 @@ class Consumer(LoggingMixin):
|
||||
self._send_progress(self.filename, 20, 100, 'WORKING',
|
||||
'Parsing document...')
|
||||
self.log("debug", "Parsing {}...".format(self.filename))
|
||||
document_parser.parse(self.path, mime_type)
|
||||
document_parser.parse(self.path, mime_type, self.filename)
|
||||
|
||||
self.log("debug", f"Generating thumbnail for {self.filename}...")
|
||||
self._send_progress(self.filename, 70, 100, 'WORKING',
|
||||
@@ -181,9 +227,10 @@ class Consumer(LoggingMixin):
|
||||
try:
|
||||
classifier = DocumentClassifier()
|
||||
classifier.reload()
|
||||
except (FileNotFoundError, IncompatibleClassifierVersionError) as e:
|
||||
logging.getLogger(__name__).warning(
|
||||
"Cannot classify documents: {}.".format(e))
|
||||
except (OSError, EOFError, IncompatibleClassifierVersionError) as e:
|
||||
self.log(
|
||||
"warning",
|
||||
f"Cannot classify documents: {e}.")
|
||||
classifier = None
|
||||
self._send_progress(self.filename, 95, 100, 'WORKING',
|
||||
'Storing the document...')
|
||||
@@ -211,36 +258,34 @@ class Consumer(LoggingMixin):
|
||||
|
||||
# After everything is in the database, copy the files into
|
||||
# place. If this fails, we'll also rollback the transaction.
|
||||
with FileLock(settings.MEDIA_LOCK):
|
||||
document.filename = generate_unique_filename(
|
||||
document, settings.ORIGINALS_DIR)
|
||||
create_source_path_directory(document.source_path)
|
||||
|
||||
# TODO: not required, since this is done by the file handling
|
||||
# logic
|
||||
create_source_path_directory(document.source_path)
|
||||
|
||||
self._write(document.storage_type,
|
||||
self.path, document.source_path)
|
||||
|
||||
self._write(document.storage_type,
|
||||
thumbnail, document.thumbnail_path)
|
||||
|
||||
if archive_path and os.path.isfile(archive_path):
|
||||
self._write(document.storage_type,
|
||||
archive_path, document.archive_path)
|
||||
self.path, document.source_path)
|
||||
|
||||
with open(archive_path, 'rb') as f:
|
||||
document.archive_checksum = hashlib.md5(
|
||||
f.read()).hexdigest()
|
||||
document.save()
|
||||
self._write(document.storage_type,
|
||||
thumbnail, document.thumbnail_path)
|
||||
|
||||
# Afte performing all database operations and moving files
|
||||
# into place, tell paperless where the file is.
|
||||
document.filename = os.path.basename(document.source_path)
|
||||
# Saving the document now will trigger the filename handling
|
||||
# logic.
|
||||
if archive_path and os.path.isfile(archive_path):
|
||||
create_source_path_directory(document.archive_path)
|
||||
self._write(document.storage_type,
|
||||
archive_path, document.archive_path)
|
||||
|
||||
with open(archive_path, 'rb') as f:
|
||||
document.archive_checksum = hashlib.md5(
|
||||
f.read()).hexdigest()
|
||||
|
||||
# Don't save with the lock active. Saving will cause the file
|
||||
# renaming logic to aquire the lock as well.
|
||||
document.save()
|
||||
|
||||
# Delete the file only if it was successfully consumed
|
||||
self.log("debug", "Deleting file {}".format(self.path))
|
||||
os.unlink(self.path)
|
||||
|
||||
except Exception as e:
|
||||
self.log(
|
||||
"error",
|
||||
@@ -251,6 +296,8 @@ class Consumer(LoggingMixin):
|
||||
finally:
|
||||
document_parser.cleanup()
|
||||
|
||||
self.run_post_consume_script(document)
|
||||
|
||||
self.log(
|
||||
"info",
|
||||
"Document {} consumption finished".format(document)
|
||||
@@ -278,8 +325,7 @@ class Consumer(LoggingMixin):
|
||||
|
||||
with open(self.path, "rb") as f:
|
||||
document = Document.objects.create(
|
||||
correspondent=file_info.correspondent,
|
||||
title=file_info.title,
|
||||
title=(self.override_title or file_info.title)[:127],
|
||||
content=text,
|
||||
mime_type=mime_type,
|
||||
checksum=hashlib.md5(f.read()).hexdigest(),
|
||||
@@ -288,20 +334,13 @@ class Consumer(LoggingMixin):
|
||||
storage_type=storage_type
|
||||
)
|
||||
|
||||
relevant_tags = set(file_info.tags)
|
||||
if relevant_tags:
|
||||
tag_names = ", ".join([t.slug for t in relevant_tags])
|
||||
self.log("debug", "Tagging with {}".format(tag_names))
|
||||
document.tags.add(*relevant_tags)
|
||||
|
||||
self.apply_overrides(document)
|
||||
|
||||
document.save()
|
||||
|
||||
return document
|
||||
|
||||
def apply_overrides(self, document):
|
||||
if self.override_title:
|
||||
document.title = self.override_title
|
||||
|
||||
if self.override_correspondent_id:
|
||||
document.correspondent = Correspondent.objects.get(
|
||||
pk=self.override_correspondent_id)
|
||||
|
@@ -8,6 +8,12 @@ from django.conf import settings
|
||||
from django.template.defaultfilters import slugify
|
||||
|
||||
|
||||
class defaultdictNoStr(defaultdict):
|
||||
|
||||
def __str__(self):
|
||||
raise ValueError("Don't use {tags} directly.")
|
||||
|
||||
|
||||
def create_source_path_directory(source_path):
|
||||
os.makedirs(os.path.dirname(source_path), exist_ok=True)
|
||||
|
||||
@@ -70,13 +76,35 @@ def many_to_dictionary(field):
|
||||
return mydictionary
|
||||
|
||||
|
||||
def generate_filename(doc):
|
||||
def generate_unique_filename(doc, root):
|
||||
counter = 0
|
||||
|
||||
while True:
|
||||
new_filename = generate_filename(doc, counter)
|
||||
if new_filename == doc.filename:
|
||||
# still the same as before.
|
||||
return new_filename
|
||||
|
||||
if os.path.exists(os.path.join(root, new_filename)):
|
||||
counter += 1
|
||||
else:
|
||||
return new_filename
|
||||
|
||||
|
||||
def generate_filename(doc, counter=0):
|
||||
path = ""
|
||||
|
||||
try:
|
||||
if settings.PAPERLESS_FILENAME_FORMAT is not None:
|
||||
tags = defaultdict(lambda: slugify(None),
|
||||
many_to_dictionary(doc.tags))
|
||||
tags = defaultdictNoStr(lambda: slugify(None),
|
||||
many_to_dictionary(doc.tags))
|
||||
|
||||
tag_list = pathvalidate.sanitize_filename(
|
||||
",".join(sorted(
|
||||
[tag.name for tag in doc.tags.all()]
|
||||
)),
|
||||
replacement_text="-"
|
||||
)
|
||||
|
||||
if doc.correspondent:
|
||||
correspondent = pathvalidate.sanitize_filename(
|
||||
@@ -99,24 +127,28 @@ def generate_filename(doc):
|
||||
document_type=document_type,
|
||||
created=datetime.date.isoformat(doc.created),
|
||||
created_year=doc.created.year if doc.created else "none",
|
||||
created_month=doc.created.month if doc.created else "none",
|
||||
created_day=doc.created.day if doc.created else "none",
|
||||
created_month=f"{doc.created.month:02}" if doc.created else "none", # NOQA: E501
|
||||
created_day=f"{doc.created.day:02}" if doc.created else "none",
|
||||
added=datetime.date.isoformat(doc.added),
|
||||
added_year=doc.added.year if doc.added else "none",
|
||||
added_month=doc.added.month if doc.added else "none",
|
||||
added_day=doc.added.day if doc.added else "none",
|
||||
added_month=f"{doc.added.month:02}" if doc.added else "none",
|
||||
added_day=f"{doc.added.day:02}" if doc.added else "none",
|
||||
tags=tags,
|
||||
)
|
||||
tag_list=tag_list
|
||||
).strip()
|
||||
|
||||
path = path.strip(os.sep)
|
||||
|
||||
except (ValueError, KeyError, IndexError):
|
||||
logging.getLogger(__name__).warning(
|
||||
f"Invalid PAPERLESS_FILENAME_FORMAT: "
|
||||
f"{settings.PAPERLESS_FILENAME_FORMAT}, falling back to default")
|
||||
|
||||
# Always append the primary key to guarantee uniqueness of filename
|
||||
counter_str = f"_{counter:02}" if counter else ""
|
||||
if len(path) > 0:
|
||||
filename = "%s-%07i%s" % (path, doc.pk, doc.file_type)
|
||||
filename = f"{path}{counter_str}{doc.file_type}"
|
||||
else:
|
||||
filename = "%07i%s" % (doc.pk, doc.file_type)
|
||||
filename = f"{doc.pk:07}{counter_str}{doc.file_type}"
|
||||
|
||||
# Append .gpg for encrypted files
|
||||
if doc.storage_type == doc.STORAGE_TYPE_GPG:
|
||||
|
@@ -4,7 +4,7 @@ from .models import Correspondent, Document, Tag, DocumentType, Log
|
||||
|
||||
CHAR_KWARGS = ["istartswith", "iendswith", "icontains", "iexact"]
|
||||
ID_KWARGS = ["in", "exact"]
|
||||
INT_KWARGS = ["exact", "gt", "gte", "lt", "lte"]
|
||||
INT_KWARGS = ["exact", "gt", "gte", "lt", "lte", "isnull"]
|
||||
DATE_KWARGS = ["year", "month", "day", "date__gt", "gt", "date__lt", "lt"]
|
||||
|
||||
|
||||
@@ -37,6 +37,10 @@ class DocumentTypeFilterSet(FilterSet):
|
||||
|
||||
class TagsFilter(Filter):
|
||||
|
||||
def __init__(self, exclude=False):
|
||||
super(TagsFilter, self).__init__()
|
||||
self.exclude = exclude
|
||||
|
||||
def filter(self, qs, value):
|
||||
if not value:
|
||||
return qs
|
||||
@@ -47,7 +51,10 @@ class TagsFilter(Filter):
|
||||
return qs
|
||||
|
||||
for tag_id in tag_ids:
|
||||
qs = qs.filter(tags__id=tag_id)
|
||||
if self.exclude:
|
||||
qs = qs.exclude(tags__id=tag_id)
|
||||
else:
|
||||
qs = qs.filter(tags__id=tag_id)
|
||||
|
||||
return qs
|
||||
|
||||
@@ -74,6 +81,8 @@ class DocumentFilterSet(FilterSet):
|
||||
|
||||
tags__id__all = TagsFilter()
|
||||
|
||||
tags__id__none = TagsFilter(exclude=True)
|
||||
|
||||
is_in_inbox = InboxFilter()
|
||||
|
||||
class Meta:
|
||||
@@ -89,12 +98,14 @@ class DocumentFilterSet(FilterSet):
|
||||
"added": DATE_KWARGS,
|
||||
"modified": DATE_KWARGS,
|
||||
|
||||
"correspondent": ["isnull"],
|
||||
"correspondent__id": ID_KWARGS,
|
||||
"correspondent__name": CHAR_KWARGS,
|
||||
|
||||
"tags__id": ID_KWARGS,
|
||||
"tags__name": CHAR_KWARGS,
|
||||
|
||||
"document_type": ["isnull"],
|
||||
"document_type__id": ID_KWARGS,
|
||||
"document_type__name": CHAR_KWARGS,
|
||||
|
||||
|
@@ -3,7 +3,7 @@ import os
|
||||
from contextlib import contextmanager
|
||||
|
||||
from django.conf import settings
|
||||
from whoosh import highlight
|
||||
from whoosh import highlight, classify, query
|
||||
from whoosh.fields import Schema, TEXT, NUMERIC, KEYWORD, DATETIME
|
||||
from whoosh.highlight import Formatter, get_text
|
||||
from whoosh.index import create_in, exists_in, open_dir
|
||||
@@ -20,32 +20,37 @@ class JsonFormatter(Formatter):
|
||||
self.seen = {}
|
||||
|
||||
def format_token(self, text, token, replace=False):
|
||||
seen = self.seen
|
||||
ttext = self._text(get_text(text, token, replace))
|
||||
if ttext in seen:
|
||||
termnum = seen[ttext]
|
||||
else:
|
||||
termnum = len(seen)
|
||||
seen[ttext] = termnum
|
||||
|
||||
return {'text': ttext, 'term': termnum}
|
||||
return {'text': ttext, 'highlight': 'true'}
|
||||
|
||||
def format_fragment(self, fragment, replace=False):
|
||||
output = []
|
||||
index = fragment.startchar
|
||||
text = fragment.text
|
||||
|
||||
amend_token = None
|
||||
for t in fragment.matches:
|
||||
if t.startchar is None:
|
||||
continue
|
||||
if t.startchar < index:
|
||||
continue
|
||||
if t.startchar > index:
|
||||
output.append({'text': text[index:t.startchar]})
|
||||
output.append(self.format_token(text, t, replace))
|
||||
text_inbetween = text[index:t.startchar]
|
||||
if amend_token and t.startchar - index < 10:
|
||||
amend_token['text'] += text_inbetween
|
||||
else:
|
||||
output.append({'text': text_inbetween,
|
||||
'highlight': False})
|
||||
amend_token = None
|
||||
token = self.format_token(text, t, replace)
|
||||
if amend_token:
|
||||
amend_token['text'] += token['text']
|
||||
else:
|
||||
output.append(token)
|
||||
amend_token = token
|
||||
index = t.endchar
|
||||
if index < fragment.endchar:
|
||||
output.append({'text': text[index:fragment.endchar]})
|
||||
output.append({'text': text[index:fragment.endchar],
|
||||
'highlight': False})
|
||||
return output
|
||||
|
||||
def format(self, fragments, replace=False):
|
||||
@@ -82,11 +87,6 @@ def open_index(recreate=False):
|
||||
|
||||
|
||||
def update_document(writer, doc):
|
||||
# TODO: this line caused many issues all around, since:
|
||||
# We need to make sure that this method does not get called with
|
||||
# deserialized documents (i.e, document objects that don't come from
|
||||
# Django's ORM interfaces directly.
|
||||
logger.debug("Indexing {}...".format(doc))
|
||||
tags = ",".join([t.name for t in doc.tags.all()])
|
||||
writer.update_document(
|
||||
id=doc.pk,
|
||||
@@ -102,9 +102,11 @@ def update_document(writer, doc):
|
||||
|
||||
|
||||
def remove_document(writer, doc):
|
||||
# TODO: see above.
|
||||
logger.debug("Removing {} from index...".format(doc))
|
||||
writer.delete_by_term('id', doc.pk)
|
||||
remove_document_by_id(writer, doc.pk)
|
||||
|
||||
|
||||
def remove_document_by_id(writer, doc_id):
|
||||
writer.delete_by_term('id', doc_id)
|
||||
|
||||
|
||||
def add_or_update_document(document):
|
||||
@@ -120,22 +122,42 @@ def remove_document_from_index(document):
|
||||
|
||||
|
||||
@contextmanager
|
||||
def query_page(ix, querystring, page):
|
||||
def query_page(ix, page, querystring, more_like_doc_id, more_like_doc_content):
|
||||
searcher = ix.searcher()
|
||||
try:
|
||||
qp = MultifieldParser(
|
||||
["content", "title", "correspondent", "tag", "type"],
|
||||
ix.schema)
|
||||
qp.add_plugin(DateParserPlugin())
|
||||
if querystring:
|
||||
qp = MultifieldParser(
|
||||
["content", "title", "correspondent", "tag", "type"],
|
||||
ix.schema)
|
||||
qp.add_plugin(DateParserPlugin())
|
||||
str_q = qp.parse(querystring)
|
||||
corrected = searcher.correct_query(str_q, querystring)
|
||||
else:
|
||||
str_q = None
|
||||
corrected = None
|
||||
|
||||
if more_like_doc_id:
|
||||
docnum = searcher.document_number(id=more_like_doc_id)
|
||||
kts = searcher.key_terms_from_text(
|
||||
'content', more_like_doc_content, numterms=20,
|
||||
model=classify.Bo1Model, normalize=False)
|
||||
more_like_q = query.Or(
|
||||
[query.Term('content', word, boost=weight)
|
||||
for word, weight in kts])
|
||||
result_page = searcher.search_page(
|
||||
more_like_q, page, filter=str_q, mask={docnum})
|
||||
elif str_q:
|
||||
result_page = searcher.search_page(str_q, page)
|
||||
else:
|
||||
raise ValueError(
|
||||
"Either querystring or more_like_doc_id is required."
|
||||
)
|
||||
|
||||
q = qp.parse(querystring)
|
||||
result_page = searcher.search_page(q, page)
|
||||
result_page.results.fragmenter = highlight.ContextFragmenter(
|
||||
surround=50)
|
||||
result_page.results.formatter = JsonFormatter()
|
||||
|
||||
corrected = searcher.correct_query(q, querystring)
|
||||
if corrected.query != q:
|
||||
if corrected and corrected.query != str_q:
|
||||
corrected_query = corrected.string
|
||||
else:
|
||||
corrected_query = None
|
||||
|
@@ -2,7 +2,6 @@ import os
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from termcolor import colored as coloured
|
||||
|
||||
from documents.models import Document
|
||||
from paperless.db import GnuPG
|
||||
@@ -26,16 +25,14 @@ class Command(BaseCommand):
|
||||
def handle(self, *args, **options):
|
||||
|
||||
try:
|
||||
print(coloured(
|
||||
print(
|
||||
"\n\nWARNING: This script is going to work directly on your "
|
||||
"document originals, so\nWARNING: you probably shouldn't run "
|
||||
"this unless you've got a recent backup\nWARNING: handy. It "
|
||||
"*should* work without a hitch, but be safe and backup your\n"
|
||||
"WARNING: stuff first.\n\nHit Ctrl+C to exit now, or Enter to "
|
||||
"continue.\n\n",
|
||||
"yellow",
|
||||
attrs=("bold",)
|
||||
))
|
||||
"continue.\n\n"
|
||||
)
|
||||
__ = input()
|
||||
except KeyboardInterrupt:
|
||||
return
|
||||
@@ -57,8 +54,8 @@ class Command(BaseCommand):
|
||||
|
||||
for document in encrypted_files:
|
||||
|
||||
print(coloured("Decrypting {}".format(
|
||||
document).encode('utf-8'), "green"))
|
||||
print("Decrypting {}".format(
|
||||
document).encode('utf-8'))
|
||||
|
||||
old_paths = [document.source_path, document.thumbnail_path]
|
||||
|
||||
@@ -82,7 +79,8 @@ class Command(BaseCommand):
|
||||
with open(document.thumbnail_path, "wb") as f:
|
||||
f.write(raw_thumb)
|
||||
|
||||
document.save(update_fields=("storage_type", "filename"))
|
||||
Document.objects.filter(id=document.id).update(
|
||||
storage_type=document.storage_type, filename=document.filename)
|
||||
|
||||
for path in old_paths:
|
||||
os.unlink(path)
|
||||
|
@@ -29,10 +29,9 @@ def _tags_from_path(filepath):
|
||||
path_parts = Path(filepath).relative_to(
|
||||
settings.CONSUMPTION_DIR).parent.parts
|
||||
for part in path_parts:
|
||||
tag_ids.add(Tag.objects.get_or_create(
|
||||
slug=slugify(part),
|
||||
defaults={"name": part},
|
||||
)[0].pk)
|
||||
tag_ids.add(Tag.objects.get_or_create(name__iexact=part, defaults={
|
||||
"name": part
|
||||
})[0].pk)
|
||||
|
||||
return tag_ids
|
||||
|
||||
|
@@ -1,16 +1,31 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
from contextlib import contextmanager
|
||||
|
||||
import tqdm
|
||||
from django.conf import settings
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db.models.signals import post_save, m2m_changed
|
||||
from filelock import FileLock
|
||||
|
||||
from documents.models import Document
|
||||
from documents.settings import EXPORTER_FILE_NAME, EXPORTER_THUMBNAIL_NAME, \
|
||||
EXPORTER_ARCHIVE_NAME
|
||||
from ...file_handling import generate_filename, create_source_path_directory
|
||||
from ...file_handling import create_source_path_directory
|
||||
from ...mixins import Renderable
|
||||
from ...signals.handlers import update_filename_and_move_files
|
||||
|
||||
|
||||
@contextmanager
|
||||
def disable_signal(sig, receiver, sender):
|
||||
try:
|
||||
sig.disconnect(receiver=receiver, sender=sender)
|
||||
yield
|
||||
finally:
|
||||
sig.connect(receiver=receiver, sender=sender)
|
||||
|
||||
|
||||
class Command(Renderable, BaseCommand):
|
||||
@@ -30,6 +45,8 @@ class Command(Renderable, BaseCommand):
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
||||
logging.getLogger().handlers[0].level = logging.ERROR
|
||||
|
||||
self.source = options["source"]
|
||||
|
||||
if not os.path.exists(self.source):
|
||||
@@ -45,11 +62,19 @@ class Command(Renderable, BaseCommand):
|
||||
self.manifest = json.load(f)
|
||||
|
||||
self._check_manifest()
|
||||
with disable_signal(post_save,
|
||||
receiver=update_filename_and_move_files,
|
||||
sender=Document):
|
||||
with disable_signal(m2m_changed,
|
||||
receiver=update_filename_and_move_files,
|
||||
sender=Document.tags.through):
|
||||
# Fill up the database with whatever is in the manifest
|
||||
call_command("loaddata", manifest_path)
|
||||
|
||||
# Fill up the database with whatever is in the manifest
|
||||
call_command("loaddata", manifest_path)
|
||||
self._import_files_from_manifest()
|
||||
|
||||
self._import_files_from_manifest()
|
||||
print("Updating search index...")
|
||||
call_command('document_index', 'reindex')
|
||||
|
||||
@staticmethod
|
||||
def _check_manifest_exists(path):
|
||||
@@ -93,10 +118,13 @@ class Command(Renderable, BaseCommand):
|
||||
os.makedirs(settings.THUMBNAIL_DIR, exist_ok=True)
|
||||
os.makedirs(settings.ARCHIVE_DIR, exist_ok=True)
|
||||
|
||||
for record in self.manifest:
|
||||
print("Copy files into paperless...")
|
||||
|
||||
if not record["model"] == "documents.document":
|
||||
continue
|
||||
manifest_documents = list(filter(
|
||||
lambda r: r["model"] == "documents.document",
|
||||
self.manifest))
|
||||
|
||||
for record in tqdm.tqdm(manifest_documents):
|
||||
|
||||
document = Document.objects.get(pk=record["pk"])
|
||||
|
||||
@@ -114,17 +142,16 @@ class Command(Renderable, BaseCommand):
|
||||
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
|
||||
document.filename = generate_filename(document)
|
||||
with FileLock(settings.MEDIA_LOCK):
|
||||
if os.path.isfile(document.source_path):
|
||||
raise FileExistsError(document.source_path)
|
||||
|
||||
if os.path.isfile(document.source_path):
|
||||
raise FileExistsError(document.source_path)
|
||||
create_source_path_directory(document.source_path)
|
||||
|
||||
create_source_path_directory(document.source_path)
|
||||
|
||||
print(f"Moving {document_path} to {document.source_path}")
|
||||
shutil.copy(document_path, document.source_path)
|
||||
shutil.copy(thumbnail_path, document.thumbnail_path)
|
||||
if archive_path:
|
||||
shutil.copy(archive_path, document.archive_path)
|
||||
shutil.copy(document_path, document.source_path)
|
||||
shutil.copy(thumbnail_path, document.thumbnail_path)
|
||||
if archive_path:
|
||||
create_source_path_directory(document.archive_path)
|
||||
shutil.copy(archive_path, document.archive_path)
|
||||
|
||||
document.save()
|
||||
|
@@ -1,4 +1,5 @@
|
||||
from django.core.management import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
from documents.mixins import Renderable
|
||||
from documents.tasks import index_reindex, index_optimize
|
||||
@@ -18,8 +19,8 @@ class Command(Renderable, BaseCommand):
|
||||
def handle(self, *args, **options):
|
||||
|
||||
self.verbosity = options["verbosity"]
|
||||
|
||||
if options['command'] == 'reindex':
|
||||
index_reindex()
|
||||
elif options['command'] == 'optimize':
|
||||
index_optimize()
|
||||
with transaction.atomic():
|
||||
if options['command'] == 'reindex':
|
||||
index_reindex()
|
||||
elif options['command'] == 'optimize':
|
||||
index_optimize()
|
||||
|
@@ -1,4 +1,8 @@
|
||||
import logging
|
||||
|
||||
import tqdm
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models.signals import post_save
|
||||
|
||||
from documents.models import Document
|
||||
from ...mixins import Renderable
|
||||
@@ -18,6 +22,7 @@ class Command(Renderable, BaseCommand):
|
||||
|
||||
self.verbosity = options["verbosity"]
|
||||
|
||||
for document in Document.objects.all():
|
||||
# Saving the document again will generate a new filename and rename
|
||||
document.save()
|
||||
logging.getLogger().handlers[0].level = logging.ERROR
|
||||
|
||||
for document in tqdm.tqdm(Document.objects.all()):
|
||||
post_save.send(Document, instance=document)
|
||||
|
@@ -73,7 +73,7 @@ class Command(Renderable, BaseCommand):
|
||||
classifier = DocumentClassifier()
|
||||
try:
|
||||
classifier.reload()
|
||||
except (FileNotFoundError, IncompatibleClassifierVersionError) as e:
|
||||
except (OSError, EOFError, IncompatibleClassifierVersionError) as e:
|
||||
logging.getLogger(__name__).warning(
|
||||
f"Cannot classify documents: {e}.")
|
||||
classifier = None
|
||||
|
68
src/documents/management/commands/document_thumbnails.py
Normal file
68
src/documents/management/commands/document_thumbnails.py
Normal file
@@ -0,0 +1,68 @@
|
||||
import logging
|
||||
import multiprocessing
|
||||
import shutil
|
||||
|
||||
import tqdm
|
||||
from django import db
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from documents.models import Document
|
||||
from ...mixins import Renderable
|
||||
from ...parsers import get_parser_class_for_mime_type
|
||||
|
||||
|
||||
def _process_document(doc_in):
|
||||
document = Document.objects.get(id=doc_in)
|
||||
parser = get_parser_class_for_mime_type(document.mime_type)(
|
||||
logging_group=None)
|
||||
try:
|
||||
thumb = parser.get_optimised_thumbnail(
|
||||
document.source_path, document.mime_type)
|
||||
|
||||
shutil.move(thumb, document.thumbnail_path)
|
||||
finally:
|
||||
parser.cleanup()
|
||||
|
||||
|
||||
class Command(Renderable, BaseCommand):
|
||||
|
||||
help = """
|
||||
This will regenerate the thumbnails for all documents.
|
||||
""".replace(" ", "")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.verbosity = 0
|
||||
BaseCommand.__init__(self, *args, **kwargs)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"-d", "--document",
|
||||
default=None,
|
||||
type=int,
|
||||
required=False,
|
||||
help="Specify the ID of a document, and this command will only "
|
||||
"run on this specific document."
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
||||
self.verbosity = options["verbosity"]
|
||||
|
||||
logging.getLogger().handlers[0].level = logging.ERROR
|
||||
|
||||
if options['document']:
|
||||
documents = Document.objects.filter(pk=options['document'])
|
||||
else:
|
||||
documents = Document.objects.all()
|
||||
|
||||
ids = [doc.id for doc in documents]
|
||||
|
||||
# Note to future self: this prevents django from reusing database
|
||||
# conncetions between processes, which is bad and does not work
|
||||
# with postgres.
|
||||
db.connections.close_all()
|
||||
|
||||
with multiprocessing.Pool() as pool:
|
||||
list(tqdm.tqdm(
|
||||
pool.imap_unordered(_process_document, ids), total=len(ids)
|
||||
))
|
@@ -6,13 +6,18 @@ import magic
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
from paperless.db import GnuPG
|
||||
|
||||
STORAGE_TYPE_UNENCRYPTED = "unencrypted"
|
||||
STORAGE_TYPE_GPG = "gpg"
|
||||
|
||||
|
||||
def source_path(self):
|
||||
if self.filename:
|
||||
fname = str(self.filename)
|
||||
else:
|
||||
fname = "{:07}.{}".format(self.pk, self.file_type)
|
||||
if self.storage_type == self.STORAGE_TYPE_GPG:
|
||||
if self.storage_type == STORAGE_TYPE_GPG:
|
||||
fname += ".gpg"
|
||||
|
||||
return os.path.join(
|
||||
@@ -26,9 +31,18 @@ def add_mime_types(apps, schema_editor):
|
||||
documents = Document.objects.all()
|
||||
|
||||
for d in documents:
|
||||
d.mime_type = magic.from_file(source_path(d), mime=True)
|
||||
f = open(source_path(d), "rb")
|
||||
if d.storage_type == STORAGE_TYPE_GPG:
|
||||
|
||||
data = GnuPG.decrypted(f)
|
||||
else:
|
||||
data = f.read(1024)
|
||||
|
||||
d.mime_type = magic.from_buffer(data, mime=True)
|
||||
d.save()
|
||||
|
||||
f.close()
|
||||
|
||||
|
||||
def add_file_extensions(apps, schema_editor):
|
||||
Document = apps.get_model("documents", "Document")
|
||||
|
25
src/documents/migrations/1006_auto_20201208_2209.py
Normal file
25
src/documents/migrations/1006_auto_20201208_2209.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 3.1.4 on 2020-12-08 22:09
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('documents', '1005_checksums'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='correspondent',
|
||||
name='slug',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='documenttype',
|
||||
name='slug',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='tag',
|
||||
name='slug',
|
||||
),
|
||||
]
|
@@ -0,0 +1,37 @@
|
||||
# Generated by Django 3.1.4 on 2020-12-12 14:41
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('documents', '1006_auto_20201208_2209'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='SavedView',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(max_length=128)),
|
||||
('show_on_dashboard', models.BooleanField()),
|
||||
('show_in_sidebar', models.BooleanField()),
|
||||
('sort_field', models.CharField(max_length=128)),
|
||||
('sort_reverse', models.BooleanField(default=False)),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SavedViewFilterRule',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('rule_type', models.PositiveIntegerField(choices=[(0, 'Title contains'), (1, 'Content contains'), (2, 'ASN is'), (3, 'Correspondent is'), (4, 'Document type is'), (5, 'Is in inbox'), (6, 'Has tag'), (7, 'Has any tag'), (8, 'Created before'), (9, 'Created after'), (10, 'Created year is'), (11, 'Created month is'), (12, 'Created day is'), (13, 'Added before'), (14, 'Added after'), (15, 'Modified before'), (16, 'Modified after'), (17, 'Does not have tag')])),
|
||||
('value', models.CharField(max_length=128)),
|
||||
('saved_view', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='filter_rules', to='documents.savedview')),
|
||||
],
|
||||
),
|
||||
]
|
34
src/documents/migrations/1008_auto_20201216_1736.py
Normal file
34
src/documents/migrations/1008_auto_20201216_1736.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# Generated by Django 3.1.4 on 2020-12-16 17:36
|
||||
|
||||
from django.db import migrations
|
||||
import django.db.models.functions.text
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('documents', '1007_savedview_savedviewfilterrule'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='correspondent',
|
||||
options={'ordering': (django.db.models.functions.text.Lower('name'),)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='document',
|
||||
options={'ordering': ('-created',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='documenttype',
|
||||
options={'ordering': (django.db.models.functions.text.Lower('name'),)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='savedview',
|
||||
options={'ordering': (django.db.models.functions.text.Lower('name'),)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='tag',
|
||||
options={'ordering': (django.db.models.functions.text.Lower('name'),)},
|
||||
),
|
||||
]
|
29
src/documents/migrations/1009_auto_20201216_2005.py
Normal file
29
src/documents/migrations/1009_auto_20201216_2005.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Generated by Django 3.1.4 on 2020-12-16 20:05
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('documents', '1008_auto_20201216_1736'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='correspondent',
|
||||
options={'ordering': ('name',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='documenttype',
|
||||
options={'ordering': ('name',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='savedview',
|
||||
options={'ordering': ('name',)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='tag',
|
||||
options={'ordering': ('name',)},
|
||||
),
|
||||
]
|
18
src/documents/migrations/1010_auto_20210101_2159.py
Normal file
18
src/documents/migrations/1010_auto_20210101_2159.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-01 21:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('documents', '1009_auto_20201216_2005'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='savedviewfilterrule',
|
||||
name='value',
|
||||
field=models.CharField(blank=True, max_length=128, null=True),
|
||||
),
|
||||
]
|
250
src/documents/migrations/1011_auto_20210101_2340.py
Normal file
250
src/documents/migrations/1011_auto_20210101_2340.py
Normal file
@@ -0,0 +1,250 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-01 23:40
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('documents', '1010_auto_20210101_2159'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='correspondent',
|
||||
options={'ordering': ('name',), 'verbose_name': 'correspondent', 'verbose_name_plural': 'correspondents'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='document',
|
||||
options={'ordering': ('-created',), 'verbose_name': 'document', 'verbose_name_plural': 'documents'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='documenttype',
|
||||
options={'verbose_name': 'document type', 'verbose_name_plural': 'document types'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='log',
|
||||
options={'ordering': ('-created',), 'verbose_name': 'log', 'verbose_name_plural': 'logs'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='savedview',
|
||||
options={'ordering': ('name',), 'verbose_name': 'saved view', 'verbose_name_plural': 'saved views'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='savedviewfilterrule',
|
||||
options={'verbose_name': 'filter rule', 'verbose_name_plural': 'filter rules'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='tag',
|
||||
options={'verbose_name': 'tag', 'verbose_name_plural': 'tags'},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='correspondent',
|
||||
name='is_insensitive',
|
||||
field=models.BooleanField(default=True, verbose_name='is insensitive'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='correspondent',
|
||||
name='match',
|
||||
field=models.CharField(blank=True, max_length=256, verbose_name='match'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='correspondent',
|
||||
name='matching_algorithm',
|
||||
field=models.PositiveIntegerField(choices=[(1, 'Any word'), (2, 'All words'), (3, 'Exact match'), (4, 'Regular expression'), (5, 'Fuzzy word'), (6, 'Automatic')], default=1, verbose_name='matching algorithm'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='correspondent',
|
||||
name='name',
|
||||
field=models.CharField(max_length=128, unique=True, verbose_name='name'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='document',
|
||||
name='added',
|
||||
field=models.DateTimeField(db_index=True, default=django.utils.timezone.now, editable=False, verbose_name='added'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='document',
|
||||
name='archive_checksum',
|
||||
field=models.CharField(blank=True, editable=False, help_text='The checksum of the archived document.', max_length=32, null=True, verbose_name='archive checksum'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='document',
|
||||
name='archive_serial_number',
|
||||
field=models.IntegerField(blank=True, db_index=True, help_text='The position of this document in your physical document archive.', null=True, unique=True, verbose_name='archive serial number'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='document',
|
||||
name='checksum',
|
||||
field=models.CharField(editable=False, help_text='The checksum of the original document.', max_length=32, unique=True, verbose_name='checksum'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='document',
|
||||
name='content',
|
||||
field=models.TextField(blank=True, help_text='The raw, text-only data of the document. This field is primarily used for searching.', verbose_name='content'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='document',
|
||||
name='correspondent',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='documents', to='documents.correspondent', verbose_name='correspondent'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='document',
|
||||
name='created',
|
||||
field=models.DateTimeField(db_index=True, default=django.utils.timezone.now, verbose_name='created'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='document',
|
||||
name='document_type',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='documents', to='documents.documenttype', verbose_name='document type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='document',
|
||||
name='filename',
|
||||
field=models.FilePathField(default=None, editable=False, help_text='Current filename in storage', max_length=1024, null=True, verbose_name='filename'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='document',
|
||||
name='mime_type',
|
||||
field=models.CharField(editable=False, max_length=256, verbose_name='mime type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='document',
|
||||
name='modified',
|
||||
field=models.DateTimeField(auto_now=True, db_index=True, verbose_name='modified'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='document',
|
||||
name='storage_type',
|
||||
field=models.CharField(choices=[('unencrypted', 'Unencrypted'), ('gpg', 'Encrypted with GNU Privacy Guard')], default='unencrypted', editable=False, max_length=11, verbose_name='storage type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='document',
|
||||
name='tags',
|
||||
field=models.ManyToManyField(blank=True, related_name='documents', to='documents.Tag', verbose_name='tags'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='document',
|
||||
name='title',
|
||||
field=models.CharField(blank=True, db_index=True, max_length=128, verbose_name='title'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='documenttype',
|
||||
name='is_insensitive',
|
||||
field=models.BooleanField(default=True, verbose_name='is insensitive'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='documenttype',
|
||||
name='match',
|
||||
field=models.CharField(blank=True, max_length=256, verbose_name='match'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='documenttype',
|
||||
name='matching_algorithm',
|
||||
field=models.PositiveIntegerField(choices=[(1, 'Any word'), (2, 'All words'), (3, 'Exact match'), (4, 'Regular expression'), (5, 'Fuzzy word'), (6, 'Automatic')], default=1, verbose_name='matching algorithm'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='documenttype',
|
||||
name='name',
|
||||
field=models.CharField(max_length=128, unique=True, verbose_name='name'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='log',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True, verbose_name='created'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='log',
|
||||
name='group',
|
||||
field=models.UUIDField(blank=True, null=True, verbose_name='group'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='log',
|
||||
name='level',
|
||||
field=models.PositiveIntegerField(choices=[(10, 'debug'), (20, 'information'), (30, 'warning'), (40, 'error'), (50, 'critical')], default=20, verbose_name='level'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='log',
|
||||
name='message',
|
||||
field=models.TextField(verbose_name='message'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='savedview',
|
||||
name='name',
|
||||
field=models.CharField(max_length=128, verbose_name='name'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='savedview',
|
||||
name='show_in_sidebar',
|
||||
field=models.BooleanField(verbose_name='show in sidebar'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='savedview',
|
||||
name='show_on_dashboard',
|
||||
field=models.BooleanField(verbose_name='show on dashboard'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='savedview',
|
||||
name='sort_field',
|
||||
field=models.CharField(max_length=128, verbose_name='sort field'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='savedview',
|
||||
name='sort_reverse',
|
||||
field=models.BooleanField(default=False, verbose_name='sort reverse'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='savedview',
|
||||
name='user',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='user'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='savedviewfilterrule',
|
||||
name='rule_type',
|
||||
field=models.PositiveIntegerField(choices=[(0, 'title contains'), (1, 'content contains'), (2, 'ASN is'), (3, 'correspondent is'), (4, 'document type is'), (5, 'is in inbox'), (6, 'has tag'), (7, 'has any tag'), (8, 'created before'), (9, 'created after'), (10, 'created year is'), (11, 'created month is'), (12, 'created day is'), (13, 'added before'), (14, 'added after'), (15, 'modified before'), (16, 'modified after'), (17, 'does not have tag')], verbose_name='rule type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='savedviewfilterrule',
|
||||
name='saved_view',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='filter_rules', to='documents.savedview', verbose_name='saved view'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='savedviewfilterrule',
|
||||
name='value',
|
||||
field=models.CharField(blank=True, max_length=128, null=True, verbose_name='value'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='tag',
|
||||
name='colour',
|
||||
field=models.PositiveIntegerField(choices=[(1, '#a6cee3'), (2, '#1f78b4'), (3, '#b2df8a'), (4, '#33a02c'), (5, '#fb9a99'), (6, '#e31a1c'), (7, '#fdbf6f'), (8, '#ff7f00'), (9, '#cab2d6'), (10, '#6a3d9a'), (11, '#b15928'), (12, '#000000'), (13, '#cccccc')], default=1, verbose_name='color'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='tag',
|
||||
name='is_inbox_tag',
|
||||
field=models.BooleanField(default=False, help_text='Marks this tag as an inbox tag: All newly consumed documents will be tagged with inbox tags.', verbose_name='is inbox tag'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='tag',
|
||||
name='is_insensitive',
|
||||
field=models.BooleanField(default=True, verbose_name='is insensitive'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='tag',
|
||||
name='match',
|
||||
field=models.CharField(blank=True, max_length=256, verbose_name='match'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='tag',
|
||||
name='matching_algorithm',
|
||||
field=models.PositiveIntegerField(choices=[(1, 'Any word'), (2, 'All words'), (3, 'Exact match'), (4, 'Regular expression'), (5, 'Fuzzy word'), (6, 'Automatic')], default=1, verbose_name='matching algorithm'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='tag',
|
||||
name='name',
|
||||
field=models.CharField(max_length=128, unique=True, verbose_name='name'),
|
||||
),
|
||||
]
|
@@ -9,9 +9,11 @@ import pathvalidate
|
||||
|
||||
import dateutil.parser
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
from django.utils.text import slugify
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from documents.file_handling import archive_name_from_filename
|
||||
from documents.parsers import get_default_file_extension
|
||||
@@ -27,37 +29,31 @@ class MatchingModel(models.Model):
|
||||
MATCH_AUTO = 6
|
||||
|
||||
MATCHING_ALGORITHMS = (
|
||||
(MATCH_ANY, "Any"),
|
||||
(MATCH_ALL, "All"),
|
||||
(MATCH_LITERAL, "Literal"),
|
||||
(MATCH_REGEX, "Regular Expression"),
|
||||
(MATCH_FUZZY, "Fuzzy Match"),
|
||||
(MATCH_AUTO, "Automatic Classification"),
|
||||
(MATCH_ANY, _("Any word")),
|
||||
(MATCH_ALL, _("All words")),
|
||||
(MATCH_LITERAL, _("Exact match")),
|
||||
(MATCH_REGEX, _("Regular expression")),
|
||||
(MATCH_FUZZY, _("Fuzzy word")),
|
||||
(MATCH_AUTO, _("Automatic")),
|
||||
)
|
||||
|
||||
name = models.CharField(max_length=128, unique=True)
|
||||
slug = models.SlugField(blank=True, editable=False)
|
||||
name = models.CharField(
|
||||
_("name"),
|
||||
max_length=128, unique=True)
|
||||
|
||||
match = models.CharField(
|
||||
_("match"),
|
||||
max_length=256, blank=True)
|
||||
|
||||
match = models.CharField(max_length=256, blank=True)
|
||||
matching_algorithm = models.PositiveIntegerField(
|
||||
_("matching algorithm"),
|
||||
choices=MATCHING_ALGORITHMS,
|
||||
default=MATCH_ANY,
|
||||
help_text=(
|
||||
"Which algorithm you want to use when matching text to the OCR'd "
|
||||
"PDF. Here, \"any\" looks for any occurrence of any word "
|
||||
"provided in the PDF, while \"all\" requires that every word "
|
||||
"provided appear in the PDF, albeit not in the order provided. A "
|
||||
"\"literal\" match means that the text you enter must appear in "
|
||||
"the PDF exactly as you've entered it, and \"regular expression\" "
|
||||
"uses a regex to match the PDF. (If you don't know what a regex "
|
||||
"is, you probably don't want this option.) Finally, a \"fuzzy "
|
||||
"match\" looks for words or phrases that are mostly—but not "
|
||||
"exactly—the same, which can be useful for matching against "
|
||||
"documents containg imperfections that foil accurate OCR."
|
||||
)
|
||||
default=MATCH_ANY
|
||||
)
|
||||
|
||||
is_insensitive = models.BooleanField(default=True)
|
||||
is_insensitive = models.BooleanField(
|
||||
_("is insensitive"),
|
||||
default=True)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
@@ -69,7 +65,6 @@ class MatchingModel(models.Model):
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
self.match = self.match.lower()
|
||||
self.slug = slugify(self.name)
|
||||
|
||||
models.Model.save(self, *args, **kwargs)
|
||||
|
||||
@@ -82,6 +77,8 @@ class Correspondent(MatchingModel):
|
||||
|
||||
class Meta:
|
||||
ordering = ("name",)
|
||||
verbose_name = _("correspondent")
|
||||
verbose_name_plural = _("correspondents")
|
||||
|
||||
|
||||
class Tag(MatchingModel):
|
||||
@@ -102,18 +99,27 @@ class Tag(MatchingModel):
|
||||
(13, "#cccccc")
|
||||
)
|
||||
|
||||
colour = models.PositiveIntegerField(choices=COLOURS, default=1)
|
||||
colour = models.PositiveIntegerField(
|
||||
_("color"),
|
||||
choices=COLOURS, default=1)
|
||||
|
||||
is_inbox_tag = models.BooleanField(
|
||||
_("is inbox tag"),
|
||||
default=False,
|
||||
help_text="Marks this tag as an inbox tag: All newly consumed "
|
||||
"documents will be tagged with inbox tags."
|
||||
help_text=_("Marks this tag as an inbox tag: All newly consumed "
|
||||
"documents will be tagged with inbox tags.")
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("tag")
|
||||
verbose_name_plural = _("tags")
|
||||
|
||||
|
||||
class DocumentType(MatchingModel):
|
||||
|
||||
pass
|
||||
class Meta:
|
||||
verbose_name = _("document type")
|
||||
verbose_name_plural = _("document types")
|
||||
|
||||
|
||||
class Document(models.Model):
|
||||
@@ -121,8 +127,8 @@ class Document(models.Model):
|
||||
STORAGE_TYPE_UNENCRYPTED = "unencrypted"
|
||||
STORAGE_TYPE_GPG = "gpg"
|
||||
STORAGE_TYPES = (
|
||||
(STORAGE_TYPE_UNENCRYPTED, "Unencrypted"),
|
||||
(STORAGE_TYPE_GPG, "Encrypted with GNU Privacy Guard")
|
||||
(STORAGE_TYPE_UNENCRYPTED, _("Unencrypted")),
|
||||
(STORAGE_TYPE_GPG, _("Encrypted with GNU Privacy Guard"))
|
||||
)
|
||||
|
||||
correspondent = models.ForeignKey(
|
||||
@@ -130,54 +136,68 @@ class Document(models.Model):
|
||||
blank=True,
|
||||
null=True,
|
||||
related_name="documents",
|
||||
on_delete=models.SET_NULL
|
||||
on_delete=models.SET_NULL,
|
||||
verbose_name=_("correspondent")
|
||||
)
|
||||
|
||||
title = models.CharField(max_length=128, blank=True, db_index=True)
|
||||
title = models.CharField(
|
||||
_("title"),
|
||||
max_length=128, blank=True, db_index=True)
|
||||
|
||||
document_type = models.ForeignKey(
|
||||
DocumentType,
|
||||
blank=True,
|
||||
null=True,
|
||||
related_name="documents",
|
||||
on_delete=models.SET_NULL
|
||||
on_delete=models.SET_NULL,
|
||||
verbose_name=_("document type")
|
||||
)
|
||||
|
||||
content = models.TextField(
|
||||
_("content"),
|
||||
blank=True,
|
||||
help_text="The raw, text-only data of the document. This field is "
|
||||
"primarily used for searching."
|
||||
help_text=_("The raw, text-only data of the document. This field is "
|
||||
"primarily used for searching.")
|
||||
)
|
||||
|
||||
mime_type = models.CharField(
|
||||
_("mime type"),
|
||||
max_length=256,
|
||||
editable=False
|
||||
)
|
||||
|
||||
tags = models.ManyToManyField(
|
||||
Tag, related_name="documents", blank=True)
|
||||
Tag, related_name="documents", blank=True,
|
||||
verbose_name=_("tags")
|
||||
)
|
||||
|
||||
checksum = models.CharField(
|
||||
_("checksum"),
|
||||
max_length=32,
|
||||
editable=False,
|
||||
unique=True,
|
||||
help_text="The checksum of the original document."
|
||||
help_text=_("The checksum of the original document.")
|
||||
)
|
||||
|
||||
archive_checksum = models.CharField(
|
||||
_("archive checksum"),
|
||||
max_length=32,
|
||||
editable=False,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="The checksum of the archived document."
|
||||
help_text=_("The checksum of the archived document.")
|
||||
)
|
||||
|
||||
created = models.DateTimeField(
|
||||
_("created"),
|
||||
default=timezone.now, db_index=True)
|
||||
|
||||
modified = models.DateTimeField(
|
||||
_("modified"),
|
||||
auto_now=True, editable=False, db_index=True)
|
||||
|
||||
storage_type = models.CharField(
|
||||
_("storage type"),
|
||||
max_length=11,
|
||||
choices=STORAGE_TYPES,
|
||||
default=STORAGE_TYPE_UNENCRYPTED,
|
||||
@@ -185,27 +205,32 @@ class Document(models.Model):
|
||||
)
|
||||
|
||||
added = models.DateTimeField(
|
||||
_("added"),
|
||||
default=timezone.now, editable=False, db_index=True)
|
||||
|
||||
filename = models.FilePathField(
|
||||
_("filename"),
|
||||
max_length=1024,
|
||||
editable=False,
|
||||
default=None,
|
||||
null=True,
|
||||
help_text="Current filename in storage"
|
||||
help_text=_("Current filename in storage")
|
||||
)
|
||||
|
||||
archive_serial_number = models.IntegerField(
|
||||
_("archive serial number"),
|
||||
blank=True,
|
||||
null=True,
|
||||
unique=True,
|
||||
db_index=True,
|
||||
help_text="The position of this document in your physical document "
|
||||
"archive."
|
||||
help_text=_("The position of this document in your physical document "
|
||||
"archive.")
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ("correspondent", "title")
|
||||
ordering = ("-created",)
|
||||
verbose_name = _("document")
|
||||
verbose_name_plural = _("documents")
|
||||
|
||||
def __str__(self):
|
||||
created = datetime.date.isoformat(self.created)
|
||||
@@ -221,7 +246,7 @@ class Document(models.Model):
|
||||
else:
|
||||
fname = "{:07}{}".format(self.pk, self.file_type)
|
||||
if self.storage_type == self.STORAGE_TYPE_GPG:
|
||||
fname += ".gpg"
|
||||
fname += ".gpg" # pragma: no cover
|
||||
|
||||
return os.path.join(
|
||||
settings.ORIGINALS_DIR,
|
||||
@@ -287,76 +312,116 @@ class Document(models.Model):
|
||||
class Log(models.Model):
|
||||
|
||||
LEVELS = (
|
||||
(logging.DEBUG, "Debugging"),
|
||||
(logging.INFO, "Informational"),
|
||||
(logging.WARNING, "Warning"),
|
||||
(logging.ERROR, "Error"),
|
||||
(logging.CRITICAL, "Critical"),
|
||||
(logging.DEBUG, _("debug")),
|
||||
(logging.INFO, _("information")),
|
||||
(logging.WARNING, _("warning")),
|
||||
(logging.ERROR, _("error")),
|
||||
(logging.CRITICAL, _("critical")),
|
||||
)
|
||||
|
||||
group = models.UUIDField(blank=True, null=True)
|
||||
message = models.TextField()
|
||||
level = models.PositiveIntegerField(choices=LEVELS, default=logging.INFO)
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
group = models.UUIDField(
|
||||
_("group"),
|
||||
blank=True, null=True)
|
||||
|
||||
message = models.TextField(_("message"))
|
||||
|
||||
level = models.PositiveIntegerField(
|
||||
_("level"),
|
||||
choices=LEVELS, default=logging.INFO)
|
||||
|
||||
created = models.DateTimeField(_("created"), auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ("-created",)
|
||||
verbose_name = _("log")
|
||||
verbose_name_plural = _("logs")
|
||||
|
||||
def __str__(self):
|
||||
return self.message
|
||||
|
||||
|
||||
class SavedView(models.Model):
|
||||
|
||||
class Meta:
|
||||
|
||||
ordering = ("name",)
|
||||
verbose_name = _("saved view")
|
||||
verbose_name_plural = _("saved views")
|
||||
|
||||
user = models.ForeignKey(User, on_delete=models.CASCADE,
|
||||
verbose_name=_("user"))
|
||||
name = models.CharField(
|
||||
_("name"),
|
||||
max_length=128)
|
||||
|
||||
show_on_dashboard = models.BooleanField(
|
||||
_("show on dashboard"),
|
||||
)
|
||||
show_in_sidebar = models.BooleanField(
|
||||
_("show in sidebar"),
|
||||
)
|
||||
|
||||
sort_field = models.CharField(
|
||||
_("sort field"),
|
||||
max_length=128)
|
||||
sort_reverse = models.BooleanField(
|
||||
_("sort reverse"),
|
||||
default=False)
|
||||
|
||||
|
||||
class SavedViewFilterRule(models.Model):
|
||||
RULE_TYPES = [
|
||||
(0, _("title contains")),
|
||||
(1, _("content contains")),
|
||||
(2, _("ASN is")),
|
||||
(3, _("correspondent is")),
|
||||
(4, _("document type is")),
|
||||
(5, _("is in inbox")),
|
||||
(6, _("has tag")),
|
||||
(7, _("has any tag")),
|
||||
(8, _("created before")),
|
||||
(9, _("created after")),
|
||||
(10, _("created year is")),
|
||||
(11, _("created month is")),
|
||||
(12, _("created day is")),
|
||||
(13, _("added before")),
|
||||
(14, _("added after")),
|
||||
(15, _("modified before")),
|
||||
(16, _("modified after")),
|
||||
(17, _("does not have tag")),
|
||||
]
|
||||
|
||||
saved_view = models.ForeignKey(
|
||||
SavedView,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="filter_rules",
|
||||
verbose_name=_("saved view")
|
||||
)
|
||||
|
||||
rule_type = models.PositiveIntegerField(
|
||||
_("rule type"),
|
||||
choices=RULE_TYPES)
|
||||
|
||||
value = models.CharField(
|
||||
_("value"),
|
||||
max_length=128,
|
||||
blank=True,
|
||||
null=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("filter rule")
|
||||
verbose_name_plural = _("filter rules")
|
||||
|
||||
|
||||
# TODO: why is this in the models file?
|
||||
class FileInfo:
|
||||
|
||||
# This epic regex *almost* worked for our needs, so I'm keeping it here for
|
||||
# posterity, in the hopes that we might find a way to make it work one day.
|
||||
ALMOST_REGEX = re.compile(
|
||||
r"^((?P<date>\d\d\d\d\d\d\d\d\d\d\d\d\d\dZ){separator})?"
|
||||
r"((?P<correspondent>{non_separated_word}+){separator})??"
|
||||
r"(?P<title>{non_separated_word}+)"
|
||||
r"({separator}(?P<tags>[a-z,0-9-]+))?"
|
||||
r"\.(?P<extension>[a-zA-Z.-]+)$".format(
|
||||
separator=r"\s+-\s+",
|
||||
non_separated_word=r"([\w,. ]|([^\s]-))"
|
||||
)
|
||||
)
|
||||
REGEXES = OrderedDict([
|
||||
("created-correspondent-title-tags", re.compile(
|
||||
r"^(?P<created>\d\d\d\d\d\d\d\d(\d\d\d\d\d\d)?Z) - "
|
||||
r"(?P<correspondent>.*) - "
|
||||
r"(?P<title>.*) - "
|
||||
r"(?P<tags>[a-z0-9\-,]*)$",
|
||||
flags=re.IGNORECASE
|
||||
)),
|
||||
("created-title-tags", re.compile(
|
||||
r"^(?P<created>\d\d\d\d\d\d\d\d(\d\d\d\d\d\d)?Z) - "
|
||||
r"(?P<title>.*) - "
|
||||
r"(?P<tags>[a-z0-9\-,]*)$",
|
||||
flags=re.IGNORECASE
|
||||
)),
|
||||
("created-correspondent-title", re.compile(
|
||||
r"^(?P<created>\d\d\d\d\d\d\d\d(\d\d\d\d\d\d)?Z) - "
|
||||
r"(?P<correspondent>.*) - "
|
||||
r"(?P<title>.*)$",
|
||||
flags=re.IGNORECASE
|
||||
)),
|
||||
("created-title", re.compile(
|
||||
r"^(?P<created>\d\d\d\d\d\d\d\d(\d\d\d\d\d\d)?Z) - "
|
||||
r"(?P<title>.*)$",
|
||||
flags=re.IGNORECASE
|
||||
)),
|
||||
("correspondent-title-tags", re.compile(
|
||||
r"(?P<correspondent>.*) - "
|
||||
r"(?P<title>.*) - "
|
||||
r"(?P<tags>[a-z0-9\-,]*)$",
|
||||
flags=re.IGNORECASE
|
||||
)),
|
||||
("correspondent-title", re.compile(
|
||||
r"(?P<correspondent>.*) - "
|
||||
r"(?P<title>.*)?$",
|
||||
flags=re.IGNORECASE
|
||||
)),
|
||||
("title", re.compile(
|
||||
r"(?P<title>.*)$",
|
||||
flags=re.IGNORECASE
|
||||
@@ -379,28 +444,10 @@ class FileInfo:
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _get_correspondent(cls, name):
|
||||
if not name:
|
||||
return None
|
||||
return Correspondent.objects.get_or_create(name=name, defaults={
|
||||
"slug": slugify(name)
|
||||
})[0]
|
||||
|
||||
@classmethod
|
||||
def _get_title(cls, title):
|
||||
return title
|
||||
|
||||
@classmethod
|
||||
def _get_tags(cls, tags):
|
||||
r = []
|
||||
for t in tags.split(","):
|
||||
r.append(Tag.objects.get_or_create(
|
||||
slug=slugify(t),
|
||||
defaults={"name": t}
|
||||
)[0])
|
||||
return tuple(r)
|
||||
|
||||
@classmethod
|
||||
def _mangle_property(cls, properties, name):
|
||||
if name in properties:
|
||||
@@ -410,15 +457,6 @@ class FileInfo:
|
||||
|
||||
@classmethod
|
||||
def from_filename(cls, filename):
|
||||
"""
|
||||
We use a crude naming convention to make handling the correspondent,
|
||||
title, and tags easier:
|
||||
"<date> - <correspondent> - <title> - <tags>"
|
||||
"<correspondent> - <title> - <tags>"
|
||||
"<correspondent> - <title>"
|
||||
"<title>"
|
||||
"""
|
||||
|
||||
# Mutate filename in-place before parsing its components
|
||||
# by applying at most one of the configured transformations.
|
||||
for (pattern, repl) in settings.FILENAME_PARSE_TRANSFORMS:
|
||||
@@ -449,7 +487,5 @@ class FileInfo:
|
||||
if m:
|
||||
properties = m.groupdict()
|
||||
cls._mangle_property(properties, "created")
|
||||
cls._mangle_property(properties, "correspondent")
|
||||
cls._mangle_property(properties, "title")
|
||||
cls._mangle_property(properties, "tags")
|
||||
return cls(**properties)
|
||||
|
@@ -117,6 +117,7 @@ def run_convert(input_file,
|
||||
trim=False,
|
||||
type=None,
|
||||
depth=None,
|
||||
auto_orient=False,
|
||||
extra=None,
|
||||
logging_group=None):
|
||||
|
||||
@@ -134,6 +135,7 @@ def run_convert(input_file,
|
||||
args += ['-trim'] if trim else []
|
||||
args += ['-type', str(type)] if type else []
|
||||
args += ['-depth', str(depth)] if depth else []
|
||||
args += ['-auto-orient'] if auto_orient else []
|
||||
args += [input_file, output_file]
|
||||
|
||||
logger.debug("Execute: " + " ".join(args), extra={'group': logging_group})
|
||||
@@ -142,6 +144,53 @@ def run_convert(input_file,
|
||||
raise ParseError("Convert failed at {}".format(args))
|
||||
|
||||
|
||||
def make_thumbnail_from_pdf(in_path, temp_dir, logging_group=None):
|
||||
"""
|
||||
The thumbnail of a PDF is just a 500px wide image of the first page.
|
||||
"""
|
||||
out_path = os.path.join(temp_dir, "convert.png")
|
||||
|
||||
# Run convert to get a decent thumbnail
|
||||
try:
|
||||
run_convert(density=300,
|
||||
scale="500x5000>",
|
||||
alpha="remove",
|
||||
strip=True,
|
||||
trim=False,
|
||||
auto_orient=True,
|
||||
input_file="{}[0]".format(in_path),
|
||||
output_file=out_path,
|
||||
logging_group=logging_group)
|
||||
except ParseError:
|
||||
# if convert fails, fall back to extracting
|
||||
# the first PDF page as a PNG using Ghostscript
|
||||
logger.warning(
|
||||
"Thumbnail generation with ImageMagick failed, falling back "
|
||||
"to ghostscript. Check your /etc/ImageMagick-x/policy.xml!",
|
||||
extra={'group': logging_group}
|
||||
)
|
||||
gs_out_path = os.path.join(temp_dir, "gs_out.png")
|
||||
cmd = [settings.GS_BINARY,
|
||||
"-q",
|
||||
"-sDEVICE=pngalpha",
|
||||
"-o", gs_out_path,
|
||||
in_path]
|
||||
if not subprocess.Popen(cmd).wait() == 0:
|
||||
raise ParseError("Thumbnail (gs) failed at {}".format(cmd))
|
||||
# then run convert on the output from gs
|
||||
run_convert(density=300,
|
||||
scale="500x5000>",
|
||||
alpha="remove",
|
||||
strip=True,
|
||||
trim=False,
|
||||
auto_orient=True,
|
||||
input_file=gs_out_path,
|
||||
output_file=out_path,
|
||||
logging_group=logging_group)
|
||||
|
||||
return out_path
|
||||
|
||||
|
||||
def parse_date(filename, text):
|
||||
"""
|
||||
Returns the date of the document.
|
||||
@@ -163,8 +212,6 @@ def parse_date(filename, text):
|
||||
|
||||
date = None
|
||||
|
||||
next_year = timezone.now().year + 5 # Arbitrary 5 year future limit
|
||||
|
||||
# if filename date parsing is enabled, search there first:
|
||||
if settings.FILENAME_DATE_ORDER:
|
||||
for m in re.finditer(DATE_REGEX, filename):
|
||||
@@ -176,7 +223,7 @@ def parse_date(filename, text):
|
||||
# Skip all matches that do not parse to a proper date
|
||||
continue
|
||||
|
||||
if date is not None and next_year > date.year > 1900:
|
||||
if date and date.year > 1900 and date <= timezone.now():
|
||||
return date
|
||||
|
||||
# Iterate through all regex matches in text and try to parse the date
|
||||
@@ -189,7 +236,7 @@ def parse_date(filename, text):
|
||||
# Skip all matches that do not parse to a proper date
|
||||
continue
|
||||
|
||||
if date is not None and next_year > date.year > 1900:
|
||||
if date and date.year > 1900 and date <= timezone.now():
|
||||
break
|
||||
else:
|
||||
date = None
|
||||
@@ -210,6 +257,7 @@ class DocumentParser(LoggingMixin):
|
||||
def __init__(self, logging_group, progress_callback):
|
||||
super().__init__()
|
||||
self.logging_group = logging_group
|
||||
os.makedirs(settings.SCRATCH_DIR, exist_ok=True)
|
||||
self.tempdir = tempfile.mkdtemp(
|
||||
prefix="paperless-", dir=settings.SCRATCH_DIR)
|
||||
|
||||
@@ -218,7 +266,10 @@ class DocumentParser(LoggingMixin):
|
||||
self.date = None
|
||||
self.progress_callback = progress_callback
|
||||
|
||||
def parse(self, document_path, mime_type):
|
||||
def extract_metadata(self, document_path, mime_type):
|
||||
return []
|
||||
|
||||
def parse(self, document_path, mime_type, file_name=None):
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_archive_path(self):
|
||||
|
@@ -46,6 +46,10 @@ def check_sanity():
|
||||
for f in files:
|
||||
present_files.append(os.path.normpath(os.path.join(root, f)))
|
||||
|
||||
lockfile = os.path.normpath(settings.MEDIA_LOCK)
|
||||
if lockfile in present_files:
|
||||
present_files.remove(lockfile)
|
||||
|
||||
for doc in Document.objects.all():
|
||||
# Check sanity of the thumbnail
|
||||
if not os.path.isfile(doc.thumbnail_path):
|
||||
|
@@ -1,17 +1,46 @@
|
||||
import magic
|
||||
from pathvalidate import validate_filename, ValidationError
|
||||
from django.utils.text import slugify
|
||||
from rest_framework import serializers
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
|
||||
from .models import Correspondent, Tag, Document, Log, DocumentType
|
||||
from . import bulk_edit
|
||||
from .models import Correspondent, Tag, Document, Log, DocumentType, \
|
||||
SavedView, SavedViewFilterRule
|
||||
from .parsers import is_mime_type_supported
|
||||
|
||||
|
||||
class CorrespondentSerializer(serializers.HyperlinkedModelSerializer):
|
||||
# https://www.django-rest-framework.org/api-guide/serializers/#example
|
||||
class DynamicFieldsModelSerializer(serializers.ModelSerializer):
|
||||
"""
|
||||
A ModelSerializer that takes an additional `fields` argument that
|
||||
controls which fields should be displayed.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Don't pass the 'fields' arg up to the superclass
|
||||
fields = kwargs.pop('fields', None)
|
||||
|
||||
# Instantiate the superclass normally
|
||||
super(DynamicFieldsModelSerializer, self).__init__(*args, **kwargs)
|
||||
|
||||
if fields is not None:
|
||||
# Drop any fields that are not specified in the `fields` argument.
|
||||
allowed = set(fields)
|
||||
existing = set(self.fields)
|
||||
for field_name in existing - allowed:
|
||||
self.fields.pop(field_name)
|
||||
|
||||
|
||||
class CorrespondentSerializer(serializers.ModelSerializer):
|
||||
|
||||
document_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
last_correspondence = serializers.DateTimeField(read_only=True)
|
||||
|
||||
def get_slug(self, obj):
|
||||
return slugify(obj.name)
|
||||
slug = SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Correspondent
|
||||
fields = (
|
||||
@@ -26,10 +55,14 @@ class CorrespondentSerializer(serializers.HyperlinkedModelSerializer):
|
||||
)
|
||||
|
||||
|
||||
class DocumentTypeSerializer(serializers.HyperlinkedModelSerializer):
|
||||
class DocumentTypeSerializer(serializers.ModelSerializer):
|
||||
|
||||
document_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
def get_slug(self, obj):
|
||||
return slugify(obj.name)
|
||||
slug = SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = DocumentType
|
||||
fields = (
|
||||
@@ -43,10 +76,14 @@ class DocumentTypeSerializer(serializers.HyperlinkedModelSerializer):
|
||||
)
|
||||
|
||||
|
||||
class TagSerializer(serializers.HyperlinkedModelSerializer):
|
||||
class TagSerializer(serializers.ModelSerializer):
|
||||
|
||||
document_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
def get_slug(self, obj):
|
||||
return slugify(obj.name)
|
||||
slug = SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Tag
|
||||
fields = (
|
||||
@@ -77,12 +114,24 @@ class DocumentTypeField(serializers.PrimaryKeyRelatedField):
|
||||
return DocumentType.objects.all()
|
||||
|
||||
|
||||
class DocumentSerializer(serializers.ModelSerializer):
|
||||
class DocumentSerializer(DynamicFieldsModelSerializer):
|
||||
|
||||
correspondent = CorrespondentField(allow_null=True)
|
||||
tags = TagsField(many=True)
|
||||
document_type = DocumentTypeField(allow_null=True)
|
||||
|
||||
original_file_name = SerializerMethodField()
|
||||
archived_file_name = SerializerMethodField()
|
||||
|
||||
def get_original_file_name(self, obj):
|
||||
return obj.get_public_filename()
|
||||
|
||||
def get_archived_file_name(self, obj):
|
||||
if obj.archive_checksum:
|
||||
return obj.get_public_filename(archive=True)
|
||||
else:
|
||||
return None
|
||||
|
||||
class Meta:
|
||||
model = Document
|
||||
depth = 1
|
||||
@@ -96,7 +145,9 @@ class DocumentSerializer(serializers.ModelSerializer):
|
||||
"created",
|
||||
"modified",
|
||||
"added",
|
||||
"archive_serial_number"
|
||||
"archive_serial_number",
|
||||
"original_file_name",
|
||||
"archived_file_name",
|
||||
)
|
||||
|
||||
|
||||
@@ -113,6 +164,177 @@ class LogSerializer(serializers.ModelSerializer):
|
||||
)
|
||||
|
||||
|
||||
class SavedViewFilterRuleSerializer(serializers.ModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = SavedViewFilterRule
|
||||
fields = ["rule_type", "value"]
|
||||
|
||||
|
||||
class SavedViewSerializer(serializers.ModelSerializer):
|
||||
|
||||
filter_rules = SavedViewFilterRuleSerializer(many=True)
|
||||
|
||||
class Meta:
|
||||
model = SavedView
|
||||
depth = 1
|
||||
fields = ["id", "name", "show_on_dashboard", "show_in_sidebar",
|
||||
"sort_field", "sort_reverse", "filter_rules"]
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
if 'filter_rules' in validated_data:
|
||||
rules_data = validated_data.pop('filter_rules')
|
||||
else:
|
||||
rules_data = None
|
||||
super(SavedViewSerializer, self).update(instance, validated_data)
|
||||
if rules_data is not None:
|
||||
SavedViewFilterRule.objects.filter(saved_view=instance).delete()
|
||||
for rule_data in rules_data:
|
||||
SavedViewFilterRule.objects.create(
|
||||
saved_view=instance, **rule_data)
|
||||
return instance
|
||||
|
||||
def create(self, validated_data):
|
||||
rules_data = validated_data.pop('filter_rules')
|
||||
saved_view = SavedView.objects.create(**validated_data)
|
||||
for rule_data in rules_data:
|
||||
SavedViewFilterRule.objects.create(
|
||||
saved_view=saved_view, **rule_data)
|
||||
return saved_view
|
||||
|
||||
|
||||
class BulkEditSerializer(serializers.Serializer):
|
||||
|
||||
documents = serializers.ListField(
|
||||
child=serializers.IntegerField(),
|
||||
label="Documents",
|
||||
write_only=True
|
||||
)
|
||||
|
||||
method = serializers.ChoiceField(
|
||||
choices=[
|
||||
"set_correspondent",
|
||||
"set_document_type",
|
||||
"add_tag",
|
||||
"remove_tag",
|
||||
"modify_tags",
|
||||
"delete"
|
||||
],
|
||||
label="Method",
|
||||
write_only=True,
|
||||
)
|
||||
|
||||
parameters = serializers.DictField(allow_empty=True)
|
||||
|
||||
def _validate_document_id_list(self, documents, name="documents"):
|
||||
if not type(documents) == list:
|
||||
raise serializers.ValidationError(f"{name} must be a list")
|
||||
if not all([type(i) == int for i in documents]):
|
||||
raise serializers.ValidationError(
|
||||
f"{name} must be a list of integers")
|
||||
count = Document.objects.filter(id__in=documents).count()
|
||||
if not count == len(documents):
|
||||
raise serializers.ValidationError(
|
||||
f"Some documents in {name} don't exist or were "
|
||||
f"specified twice.")
|
||||
|
||||
def _validate_tag_id_list(self, tags, name="tags"):
|
||||
if not type(tags) == list:
|
||||
raise serializers.ValidationError(f"{name} must be a list")
|
||||
if not all([type(i) == int for i in tags]):
|
||||
raise serializers.ValidationError(
|
||||
f"{name} must be a list of integers")
|
||||
count = Tag.objects.filter(id__in=tags).count()
|
||||
if not count == len(tags):
|
||||
raise serializers.ValidationError(
|
||||
f"Some tags in {name} don't exist or were specified twice.")
|
||||
|
||||
def validate_documents(self, documents):
|
||||
self._validate_document_id_list(documents)
|
||||
return documents
|
||||
|
||||
def validate_method(self, method):
|
||||
if method == "set_correspondent":
|
||||
return bulk_edit.set_correspondent
|
||||
elif method == "set_document_type":
|
||||
return bulk_edit.set_document_type
|
||||
elif method == "add_tag":
|
||||
return bulk_edit.add_tag
|
||||
elif method == "remove_tag":
|
||||
return bulk_edit.remove_tag
|
||||
elif method == "modify_tags":
|
||||
return bulk_edit.modify_tags
|
||||
elif method == "delete":
|
||||
return bulk_edit.delete
|
||||
else:
|
||||
raise serializers.ValidationError("Unsupported method.")
|
||||
|
||||
def _validate_parameters_tags(self, parameters):
|
||||
if 'tag' in parameters:
|
||||
tag_id = parameters['tag']
|
||||
try:
|
||||
Tag.objects.get(id=tag_id)
|
||||
except Tag.DoesNotExist:
|
||||
raise serializers.ValidationError("Tag does not exist")
|
||||
else:
|
||||
raise serializers.ValidationError("tag not specified")
|
||||
|
||||
def _validate_parameters_document_type(self, parameters):
|
||||
if 'document_type' in parameters:
|
||||
document_type_id = parameters['document_type']
|
||||
if document_type_id is None:
|
||||
# None is ok
|
||||
return
|
||||
try:
|
||||
DocumentType.objects.get(id=document_type_id)
|
||||
except DocumentType.DoesNotExist:
|
||||
raise serializers.ValidationError(
|
||||
"Document type does not exist")
|
||||
else:
|
||||
raise serializers.ValidationError("document_type not specified")
|
||||
|
||||
def _validate_parameters_correspondent(self, parameters):
|
||||
if 'correspondent' in parameters:
|
||||
correspondent_id = parameters['correspondent']
|
||||
if correspondent_id is None:
|
||||
return
|
||||
try:
|
||||
Correspondent.objects.get(id=correspondent_id)
|
||||
except Correspondent.DoesNotExist:
|
||||
raise serializers.ValidationError(
|
||||
"Correspondent does not exist")
|
||||
else:
|
||||
raise serializers.ValidationError("correspondent not specified")
|
||||
|
||||
def _validate_parameters_modify_tags(self, parameters):
|
||||
if "add_tags" in parameters:
|
||||
self._validate_tag_id_list(parameters['add_tags'], "add_tags")
|
||||
else:
|
||||
raise serializers.ValidationError("add_tags not specified")
|
||||
|
||||
if "remove_tags" in parameters:
|
||||
self._validate_tag_id_list(parameters['remove_tags'],
|
||||
"remove_tags")
|
||||
else:
|
||||
raise serializers.ValidationError("remove_tags not specified")
|
||||
|
||||
def validate(self, attrs):
|
||||
|
||||
method = attrs['method']
|
||||
parameters = attrs['parameters']
|
||||
|
||||
if method == bulk_edit.set_correspondent:
|
||||
self._validate_parameters_correspondent(parameters)
|
||||
elif method == bulk_edit.set_document_type:
|
||||
self._validate_parameters_document_type(parameters)
|
||||
elif method == bulk_edit.add_tag or method == bulk_edit.remove_tag:
|
||||
self._validate_parameters_tags(parameters)
|
||||
elif method == bulk_edit.modify_tags:
|
||||
self._validate_parameters_modify_tags(parameters)
|
||||
|
||||
return attrs
|
||||
|
||||
|
||||
class PostDocumentSerializer(serializers.Serializer):
|
||||
|
||||
document = serializers.FileField(
|
||||
@@ -151,12 +373,6 @@ class PostDocumentSerializer(serializers.Serializer):
|
||||
)
|
||||
|
||||
def validate_document(self, document):
|
||||
|
||||
try:
|
||||
validate_filename(document.name)
|
||||
except ValidationError:
|
||||
raise serializers.ValidationError("Invalid filename.")
|
||||
|
||||
document_data = document.file.read()
|
||||
mime_type = magic.from_buffer(document_data, mime=True)
|
||||
|
||||
@@ -190,3 +406,11 @@ class PostDocumentSerializer(serializers.Serializer):
|
||||
return [tag.id for tag in tags]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
class SelectionDataSerializer(serializers.Serializer):
|
||||
|
||||
documents = serializers.ListField(
|
||||
required=True,
|
||||
child=serializers.IntegerField()
|
||||
)
|
||||
|
@@ -7,13 +7,15 @@ from django.contrib.admin.models import ADDITION, LogEntry
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db import models, DatabaseError
|
||||
from django.db.models import Q
|
||||
from django.dispatch import receiver
|
||||
from django.utils import timezone
|
||||
from rest_framework.reverse import reverse
|
||||
from filelock import FileLock
|
||||
|
||||
from .. import index, matching
|
||||
from ..file_handling import delete_empty_directories, generate_filename, \
|
||||
create_source_path_directory, archive_name_from_filename
|
||||
from ..file_handling import delete_empty_directories, \
|
||||
create_source_path_directory, archive_name_from_filename, \
|
||||
generate_unique_filename
|
||||
from ..models import Document, Tag
|
||||
|
||||
|
||||
@@ -119,11 +121,14 @@ def set_tags(sender,
|
||||
classifier=None,
|
||||
replace=False,
|
||||
**kwargs):
|
||||
|
||||
if replace:
|
||||
document.tags.clear()
|
||||
current_tags = set([])
|
||||
else:
|
||||
current_tags = set(document.tags.all())
|
||||
Document.tags.through.objects.filter(document=document).exclude(
|
||||
Q(tag__is_inbox_tag=True)).exclude(
|
||||
Q(tag__match="") & ~Q(tag__matching_algorithm=Tag.MATCH_AUTO)
|
||||
).delete()
|
||||
|
||||
current_tags = set(document.tags.all())
|
||||
|
||||
matched_tags = matching.match_tags(document.content, classifier)
|
||||
|
||||
@@ -134,64 +139,39 @@ def set_tags(sender,
|
||||
|
||||
message = 'Tagging "{}" with "{}"'
|
||||
logger(
|
||||
message.format(document, ", ".join([t.slug for t in relevant_tags])),
|
||||
message.format(document, ", ".join([t.name for t in relevant_tags])),
|
||||
logging_group
|
||||
)
|
||||
|
||||
document.tags.add(*relevant_tags)
|
||||
|
||||
|
||||
def run_pre_consume_script(sender, filename, **kwargs):
|
||||
|
||||
if not settings.PRE_CONSUME_SCRIPT:
|
||||
return
|
||||
|
||||
Popen((settings.PRE_CONSUME_SCRIPT, filename)).wait()
|
||||
|
||||
|
||||
def run_post_consume_script(sender, document, **kwargs):
|
||||
|
||||
if not settings.POST_CONSUME_SCRIPT:
|
||||
return
|
||||
|
||||
Popen((
|
||||
settings.POST_CONSUME_SCRIPT,
|
||||
str(document.pk),
|
||||
document.get_public_filename(),
|
||||
os.path.normpath(document.source_path),
|
||||
os.path.normpath(document.thumbnail_path),
|
||||
reverse("document-download", kwargs={"pk": document.pk}),
|
||||
reverse("document-thumb", kwargs={"pk": document.pk}),
|
||||
str(document.correspondent),
|
||||
str(",".join(document.tags.all().values_list("slug", flat=True)))
|
||||
)).wait()
|
||||
|
||||
|
||||
@receiver(models.signals.post_delete, sender=Document)
|
||||
def cleanup_document_deletion(sender, instance, using, **kwargs):
|
||||
for f in (instance.source_path,
|
||||
instance.archive_path,
|
||||
instance.thumbnail_path):
|
||||
if os.path.isfile(f):
|
||||
try:
|
||||
os.unlink(f)
|
||||
logging.getLogger(__name__).debug(
|
||||
f"Deleted file {f}.")
|
||||
except OSError as e:
|
||||
logging.getLogger(__name__).warning(
|
||||
f"While deleting document {str(instance)}, the file "
|
||||
f"{f} could not be deleted: {e}"
|
||||
)
|
||||
with FileLock(settings.MEDIA_LOCK):
|
||||
for f in (instance.source_path,
|
||||
instance.archive_path,
|
||||
instance.thumbnail_path):
|
||||
if os.path.isfile(f):
|
||||
try:
|
||||
os.unlink(f)
|
||||
logging.getLogger(__name__).debug(
|
||||
f"Deleted file {f}.")
|
||||
except OSError as e:
|
||||
logging.getLogger(__name__).warning(
|
||||
f"While deleting document {str(instance)}, the file "
|
||||
f"{f} could not be deleted: {e}"
|
||||
)
|
||||
|
||||
delete_empty_directories(
|
||||
os.path.dirname(instance.source_path),
|
||||
root=settings.ORIGINALS_DIR
|
||||
)
|
||||
delete_empty_directories(
|
||||
os.path.dirname(instance.source_path),
|
||||
root=settings.ORIGINALS_DIR
|
||||
)
|
||||
|
||||
delete_empty_directories(
|
||||
os.path.dirname(instance.archive_path),
|
||||
root=settings.ARCHIVE_DIR
|
||||
)
|
||||
delete_empty_directories(
|
||||
os.path.dirname(instance.archive_path),
|
||||
root=settings.ARCHIVE_DIR
|
||||
)
|
||||
|
||||
|
||||
def validate_move(instance, old_path, new_path):
|
||||
@@ -226,81 +206,87 @@ def update_filename_and_move_files(sender, instance, **kwargs):
|
||||
# This will in turn cause this logic to move the file where it belongs.
|
||||
return
|
||||
|
||||
old_filename = instance.filename
|
||||
new_filename = generate_filename(instance)
|
||||
with FileLock(settings.MEDIA_LOCK):
|
||||
old_filename = instance.filename
|
||||
new_filename = generate_unique_filename(
|
||||
instance, settings.ORIGINALS_DIR)
|
||||
|
||||
if new_filename == instance.filename:
|
||||
# Don't do anything if its the same.
|
||||
return
|
||||
|
||||
old_source_path = instance.source_path
|
||||
new_source_path = os.path.join(settings.ORIGINALS_DIR, new_filename)
|
||||
|
||||
if not validate_move(instance, old_source_path, new_source_path):
|
||||
return
|
||||
|
||||
# archive files are optional, archive checksum tells us if we have one,
|
||||
# since this is None for documents without archived files.
|
||||
if instance.archive_checksum:
|
||||
new_archive_filename = archive_name_from_filename(new_filename)
|
||||
old_archive_path = instance.archive_path
|
||||
new_archive_path = os.path.join(settings.ARCHIVE_DIR,
|
||||
new_archive_filename)
|
||||
|
||||
if not validate_move(instance, old_archive_path, new_archive_path):
|
||||
if new_filename == instance.filename:
|
||||
# Don't do anything if its the same.
|
||||
return
|
||||
|
||||
create_source_path_directory(new_archive_path)
|
||||
else:
|
||||
old_archive_path = None
|
||||
new_archive_path = None
|
||||
old_source_path = instance.source_path
|
||||
new_source_path = os.path.join(settings.ORIGINALS_DIR, new_filename)
|
||||
|
||||
create_source_path_directory(new_source_path)
|
||||
if not validate_move(instance, old_source_path, new_source_path):
|
||||
return
|
||||
|
||||
try:
|
||||
os.rename(old_source_path, new_source_path)
|
||||
# archive files are optional, archive checksum tells us if we have one,
|
||||
# since this is None for documents without archived files.
|
||||
if instance.archive_checksum:
|
||||
os.rename(old_archive_path, new_archive_path)
|
||||
instance.filename = new_filename
|
||||
# Don't save here to prevent infinite recursion.
|
||||
Document.objects.filter(pk=instance.pk).update(filename=new_filename)
|
||||
new_archive_filename = archive_name_from_filename(new_filename)
|
||||
old_archive_path = instance.archive_path
|
||||
new_archive_path = os.path.join(settings.ARCHIVE_DIR,
|
||||
new_archive_filename)
|
||||
|
||||
logging.getLogger(__name__).debug(
|
||||
f"Moved file {old_source_path} to {new_source_path}.")
|
||||
if not validate_move(instance, old_archive_path, new_archive_path):
|
||||
return
|
||||
|
||||
if instance.archive_checksum:
|
||||
logging.getLogger(__name__).debug(
|
||||
f"Moved file {old_archive_path} to {new_archive_path}.")
|
||||
create_source_path_directory(new_archive_path)
|
||||
else:
|
||||
old_archive_path = None
|
||||
new_archive_path = None
|
||||
|
||||
create_source_path_directory(new_source_path)
|
||||
|
||||
except OSError as e:
|
||||
instance.filename = old_filename
|
||||
# this happens when we can't move a file. If that's the case for the
|
||||
# archive file, we try our best to revert the changes.
|
||||
try:
|
||||
os.rename(old_source_path, new_source_path)
|
||||
if instance.archive_checksum:
|
||||
os.rename(old_archive_path, new_archive_path)
|
||||
instance.filename = new_filename
|
||||
|
||||
# Don't save() here to prevent infinite recursion.
|
||||
Document.objects.filter(pk=instance.pk).update(
|
||||
filename=new_filename)
|
||||
|
||||
except OSError as e:
|
||||
instance.filename = old_filename
|
||||
# this happens when we can't move a file. If that's the case for
|
||||
# the archive file, we try our best to revert the changes.
|
||||
# no need to save the instance, the update() has not happened yet.
|
||||
try:
|
||||
os.rename(new_source_path, old_source_path)
|
||||
os.rename(new_archive_path, old_archive_path)
|
||||
except Exception as e:
|
||||
# This is fine, since:
|
||||
# A: if we managed to move source from A to B, we will also
|
||||
# manage to move it from B to A. If not, we have a serious
|
||||
# issue that's going to get caught by the santiy checker.
|
||||
# All files remain in place and will never be overwritten,
|
||||
# so this is not the end of the world.
|
||||
# B: if moving the orignal file failed, nothing has changed
|
||||
# anyway.
|
||||
pass
|
||||
except DatabaseError as e:
|
||||
# this happens after moving files, so move them back into place.
|
||||
# since moving them once succeeded, it's very likely going to
|
||||
# succeed again.
|
||||
os.rename(new_source_path, old_source_path)
|
||||
os.rename(new_archive_path, old_archive_path)
|
||||
except Exception as e:
|
||||
# This is fine, since:
|
||||
# A: if we managed to move source from A to B, we will also manage
|
||||
# to move it from B to A. If not, we have a serious issue
|
||||
# that's going to get caught by the santiy checker.
|
||||
# all files remain in place and will never be overwritten,
|
||||
# so this is not the end of the world.
|
||||
# B: if moving the orignal file failed, nothing has changed anyway.
|
||||
pass
|
||||
except DatabaseError as e:
|
||||
os.rename(new_source_path, old_source_path)
|
||||
if instance.archive_checksum:
|
||||
os.rename(new_archive_path, old_archive_path)
|
||||
instance.filename = old_filename
|
||||
if instance.archive_checksum:
|
||||
os.rename(new_archive_path, old_archive_path)
|
||||
instance.filename = old_filename
|
||||
# again, no need to save the instance, since the actual update()
|
||||
# operation failed.
|
||||
|
||||
if not os.path.isfile(old_source_path):
|
||||
delete_empty_directories(os.path.dirname(old_source_path),
|
||||
root=settings.ORIGINALS_DIR)
|
||||
# finally, remove any empty sub folders. This will do nothing if
|
||||
# something has failed above.
|
||||
if not os.path.isfile(old_source_path):
|
||||
delete_empty_directories(os.path.dirname(old_source_path),
|
||||
root=settings.ORIGINALS_DIR)
|
||||
|
||||
if old_archive_path and not os.path.isfile(old_archive_path):
|
||||
delete_empty_directories(os.path.dirname(old_archive_path),
|
||||
root=settings.ARCHIVE_DIR)
|
||||
if old_archive_path and not os.path.isfile(old_archive_path):
|
||||
delete_empty_directories(os.path.dirname(old_archive_path),
|
||||
root=settings.ARCHIVE_DIR)
|
||||
|
||||
|
||||
def set_log_entry(sender, document=None, logging_group=None, **kwargs):
|
||||
|
@@ -1,6 +1,8 @@
|
||||
import logging
|
||||
|
||||
import tqdm
|
||||
from django.conf import settings
|
||||
from django.db.models.signals import post_save
|
||||
from whoosh.writing import AsyncWriter
|
||||
|
||||
from documents import index, sanity_checker
|
||||
@@ -23,7 +25,7 @@ def index_reindex():
|
||||
ix = index.open_index(recreate=True)
|
||||
|
||||
with AsyncWriter(ix) as writer:
|
||||
for document in documents:
|
||||
for document in tqdm.tqdm(documents):
|
||||
index.update_document(writer, document)
|
||||
|
||||
|
||||
@@ -33,9 +35,9 @@ def train_classifier():
|
||||
try:
|
||||
# load the classifier, since we might not have to train it again.
|
||||
classifier.reload()
|
||||
except (FileNotFoundError, IncompatibleClassifierVersionError):
|
||||
except (OSError, EOFError, IncompatibleClassifierVersionError):
|
||||
# This is what we're going to fix here.
|
||||
pass
|
||||
classifier = DocumentClassifier()
|
||||
|
||||
try:
|
||||
if classifier.train():
|
||||
@@ -86,3 +88,16 @@ def sanity_check():
|
||||
raise SanityFailedError(messages)
|
||||
else:
|
||||
return "No issues detected."
|
||||
|
||||
|
||||
def bulk_update_documents(document_ids):
|
||||
documents = Document.objects.filter(id__in=document_ids)
|
||||
|
||||
ix = index.open_index()
|
||||
|
||||
for doc in documents:
|
||||
post_save.send(Document, instance=doc, created=False)
|
||||
|
||||
with AsyncWriter(ix) as writer:
|
||||
for doc in documents:
|
||||
index.update_document(writer, doc)
|
||||
|
@@ -5,15 +5,20 @@
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>PaperlessUi</title>
|
||||
<title>Paperless-ng</title>
|
||||
<base href="/">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="username" content="{{username}}">
|
||||
<meta name="full_name" content="{{full_name}}">
|
||||
<meta name="cookie_prefix" content="{{cookie_prefix}}">
|
||||
<link rel="icon" type="image/x-icon" href="favicon.ico">
|
||||
<link rel="stylesheet" href="{% static 'frontend/styles.css' %}"></head>
|
||||
<link rel="manifest" href="{% static webmanifest %}">
|
||||
<link rel="stylesheet" href="{% static styles_css %}">
|
||||
</head>
|
||||
<body>
|
||||
<app-root>Loading...</app-root>
|
||||
<script src="{% static 'frontend/runtime.js' %}" defer></script>
|
||||
<script src="{% static 'frontend/polyfills.js' %}" defer></script>
|
||||
<script src="{% static 'frontend/main.js' %}" defer></script>
|
||||
<script src="{% static runtime_js %}" defer></script>
|
||||
<script src="{% static polyfills_js %}" defer></script>
|
||||
<script src="{% static main_js %}" defer></script>
|
||||
</body>
|
||||
</html>
|
||||
|
BIN
src/documents/tests/samples/test_with_bom.pdf
Normal file
BIN
src/documents/tests/samples/test_with_bom.pdf
Normal file
Binary file not shown.
57
src/documents/tests/test_admin.py
Normal file
57
src/documents/tests/test_admin.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from unittest import mock
|
||||
|
||||
from django.contrib.admin.sites import AdminSite
|
||||
from django.test import TestCase
|
||||
from django.utils import timezone
|
||||
|
||||
from documents.admin import DocumentAdmin
|
||||
from documents.models import Document, Tag
|
||||
|
||||
|
||||
class TestDocumentAdmin(TestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.doc_admin = DocumentAdmin(model=Document, admin_site=AdminSite())
|
||||
|
||||
@mock.patch("documents.admin.index.add_or_update_document")
|
||||
def test_save_model(self, m):
|
||||
doc = Document.objects.create(title="test")
|
||||
doc.title = "new title"
|
||||
self.doc_admin.save_model(None, doc, None, None)
|
||||
self.assertEqual(Document.objects.get(id=doc.id).title, "new title")
|
||||
m.assert_called_once()
|
||||
|
||||
def test_tags(self):
|
||||
doc = Document.objects.create(title="test")
|
||||
doc.tags.create(name="t1")
|
||||
doc.tags.create(name="t2")
|
||||
|
||||
self.assertEqual(self.doc_admin.tags_(doc), "<span >t1, </span><span >t2, </span>")
|
||||
|
||||
def test_tags_empty(self):
|
||||
doc = Document.objects.create(title="test")
|
||||
|
||||
self.assertEqual(self.doc_admin.tags_(doc), "")
|
||||
|
||||
@mock.patch("documents.admin.index.remove_document")
|
||||
def test_delete_model(self, m):
|
||||
doc = Document.objects.create(title="test")
|
||||
self.doc_admin.delete_model(None, doc)
|
||||
self.assertRaises(Document.DoesNotExist, Document.objects.get, id=doc.id)
|
||||
m.assert_called_once()
|
||||
|
||||
@mock.patch("documents.admin.index.remove_document")
|
||||
def test_delete_queryset(self, m):
|
||||
for i in range(42):
|
||||
Document.objects.create(title="Many documents with the same title", checksum=f"{i:02}")
|
||||
|
||||
self.assertEqual(Document.objects.count(), 42)
|
||||
|
||||
self.doc_admin.delete_queryset(None, Document.objects.all())
|
||||
|
||||
self.assertEqual(m.call_count, 42)
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
|
||||
def test_created(self):
|
||||
doc = Document.objects.create(title="test", created=timezone.datetime(2020, 4, 12))
|
||||
self.assertEqual(self.doc_admin.created_(doc), "2020-04-12")
|
@@ -1,14 +1,15 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from unittest import mock
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from pathvalidate import ValidationError
|
||||
from rest_framework.test import APITestCase
|
||||
from whoosh.writing import AsyncWriter
|
||||
|
||||
from documents import index
|
||||
from documents.models import Document, Correspondent, DocumentType, Tag
|
||||
from documents import index, bulk_edit
|
||||
from documents.models import Document, Correspondent, DocumentType, Tag, SavedView
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
|
||||
@@ -17,8 +18,8 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
|
||||
def setUp(self):
|
||||
super(TestDocumentApi, self).setUp()
|
||||
|
||||
user = User.objects.create_superuser(username="temp_admin")
|
||||
self.client.force_login(user=user)
|
||||
self.user = User.objects.create_superuser(username="temp_admin")
|
||||
self.client.force_login(user=self.user)
|
||||
|
||||
def testDocuments(self):
|
||||
|
||||
@@ -63,6 +64,58 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
|
||||
|
||||
self.assertEqual(len(Document.objects.all()), 0)
|
||||
|
||||
def test_document_fields(self):
|
||||
c = Correspondent.objects.create(name="c", pk=41)
|
||||
dt = DocumentType.objects.create(name="dt", pk=63)
|
||||
tag = Tag.objects.create(name="t", pk=85)
|
||||
doc = Document.objects.create(title="WOW", content="the content", correspondent=c, document_type=dt, checksum="123", mime_type="application/pdf")
|
||||
|
||||
response = self.client.get("/api/documents/", format='json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
results_full = response.data['results']
|
||||
self.assertTrue("content" in results_full[0])
|
||||
self.assertTrue("id" in results_full[0])
|
||||
|
||||
response = self.client.get("/api/documents/?fields=id", format='json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
results = response.data['results']
|
||||
self.assertFalse("content" in results[0])
|
||||
self.assertTrue("id" in results[0])
|
||||
self.assertEqual(len(results[0]), 1)
|
||||
|
||||
response = self.client.get("/api/documents/?fields=content", format='json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
results = response.data['results']
|
||||
self.assertTrue("content" in results[0])
|
||||
self.assertFalse("id" in results[0])
|
||||
self.assertEqual(len(results[0]), 1)
|
||||
|
||||
response = self.client.get("/api/documents/?fields=id,content", format='json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
results = response.data['results']
|
||||
self.assertTrue("content" in results[0])
|
||||
self.assertTrue("id" in results[0])
|
||||
self.assertEqual(len(results[0]), 2)
|
||||
|
||||
response = self.client.get("/api/documents/?fields=id,conteasdnt", format='json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
results = response.data['results']
|
||||
self.assertFalse("content" in results[0])
|
||||
self.assertTrue("id" in results[0])
|
||||
self.assertEqual(len(results[0]), 1)
|
||||
|
||||
response = self.client.get("/api/documents/?fields=", format='json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
results = response.data['results']
|
||||
self.assertEqual(results_full, results)
|
||||
|
||||
response = self.client.get("/api/documents/?fields=dgfhs", format='json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
results = response.data['results']
|
||||
self.assertEqual(len(results[0]), 0)
|
||||
|
||||
|
||||
|
||||
def test_document_actions(self):
|
||||
|
||||
_, filename = tempfile.mkstemp(dir=self.dirs.originals_dir)
|
||||
@@ -169,15 +222,13 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, 200)
|
||||
results = response.data['results']
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['id'], doc2.id)
|
||||
self.assertEqual(results[1]['id'], doc3.id)
|
||||
self.assertCountEqual([results[0]['id'], results[1]['id']], [doc2.id, doc3.id])
|
||||
|
||||
response = self.client.get("/api/documents/?tags__id__in={},{}".format(tag_inbox.id, tag_3.id))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
results = response.data['results']
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['id'], doc1.id)
|
||||
self.assertEqual(results[1]['id'], doc3.id)
|
||||
self.assertCountEqual([results[0]['id'], results[1]['id']], [doc1.id, doc3.id])
|
||||
|
||||
response = self.client.get("/api/documents/?tags__id__all={},{}".format(tag_2.id, tag_3.id))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
@@ -195,6 +246,23 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
|
||||
results = response.data['results']
|
||||
self.assertEqual(len(results), 3)
|
||||
|
||||
response = self.client.get("/api/documents/?tags__id__none={}".format(tag_3.id))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
results = response.data['results']
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertCountEqual([results[0]['id'], results[1]['id']], [doc1.id, doc2.id])
|
||||
|
||||
response = self.client.get("/api/documents/?tags__id__none={},{}".format(tag_3.id, tag_2.id))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
results = response.data['results']
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]['id'], doc1.id)
|
||||
|
||||
response = self.client.get("/api/documents/?tags__id__none={},{}".format(tag_2.id, tag_inbox.id))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
results = response.data['results']
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
def test_search_no_query(self):
|
||||
response = self.client.get("/api/search/")
|
||||
results = response.data['results']
|
||||
@@ -336,6 +404,25 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
|
||||
|
||||
self.assertEqual(correction, None)
|
||||
|
||||
def test_search_more_like(self):
|
||||
d1=Document.objects.create(title="invoice", content="the thing i bought at a shop and paid with bank account", checksum="A", pk=1)
|
||||
d2=Document.objects.create(title="bank statement 1", content="things i paid for in august", pk=2, checksum="B")
|
||||
d3=Document.objects.create(title="bank statement 3", content="things i paid for in september", pk=3, checksum="C")
|
||||
with AsyncWriter(index.open_index()) as writer:
|
||||
index.update_document(writer, d1)
|
||||
index.update_document(writer, d2)
|
||||
index.update_document(writer, d3)
|
||||
|
||||
response = self.client.get(f"/api/search/?more_like={d2.id}")
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
results = response.data['results']
|
||||
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['id'], d3.id)
|
||||
self.assertEqual(results[1]['id'], d1.id)
|
||||
|
||||
def test_statistics(self):
|
||||
|
||||
doc1 = Document.objects.create(title="none1", checksum="A")
|
||||
@@ -384,16 +471,6 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, 400)
|
||||
m.assert_not_called()
|
||||
|
||||
@mock.patch("documents.views.async_task")
|
||||
@mock.patch("documents.serialisers.validate_filename")
|
||||
def test_upload_invalid_filename(self, validate_filename, async_task):
|
||||
validate_filename.side_effect = ValidationError()
|
||||
with open(os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"), "rb") as f:
|
||||
response = self.client.post("/api/documents/post_document/", {"document": f})
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
async_task.assert_not_called()
|
||||
|
||||
@mock.patch("documents.views.async_task")
|
||||
def test_upload_with_title(self, async_task):
|
||||
with open(os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"), "rb") as f:
|
||||
@@ -475,3 +552,450 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
async_task.assert_not_called()
|
||||
|
||||
def test_get_metadata(self):
|
||||
doc = Document.objects.create(title="test", filename="file.pdf", mime_type="image/png", archive_checksum="A")
|
||||
|
||||
shutil.copy(os.path.join(os.path.dirname(__file__), "samples", "documents", "thumbnails", "0000001.png"), doc.source_path)
|
||||
shutil.copy(os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"), doc.archive_path)
|
||||
|
||||
response = self.client.get(f"/api/documents/{doc.pk}/metadata/")
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
meta = response.data
|
||||
|
||||
self.assertEqual(meta['original_mime_type'], "image/png")
|
||||
self.assertTrue(meta['has_archive_version'])
|
||||
self.assertEqual(len(meta['original_metadata']), 0)
|
||||
self.assertGreater(len(meta['archive_metadata']), 0)
|
||||
|
||||
def test_get_metadata_no_archive(self):
|
||||
doc = Document.objects.create(title="test", filename="file.pdf", mime_type="application/pdf")
|
||||
|
||||
shutil.copy(os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"), doc.source_path)
|
||||
|
||||
response = self.client.get(f"/api/documents/{doc.pk}/metadata/")
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
meta = response.data
|
||||
|
||||
self.assertEqual(meta['original_mime_type'], "application/pdf")
|
||||
self.assertFalse(meta['has_archive_version'])
|
||||
self.assertGreater(len(meta['original_metadata']), 0)
|
||||
self.assertIsNone(meta['archive_metadata'])
|
||||
|
||||
def test_saved_views(self):
|
||||
u1 = User.objects.create_user("user1")
|
||||
u2 = User.objects.create_user("user2")
|
||||
|
||||
v1 = SavedView.objects.create(user=u1, name="test1", sort_field="", show_on_dashboard=False, show_in_sidebar=False)
|
||||
v2 = SavedView.objects.create(user=u2, name="test2", sort_field="", show_on_dashboard=False, show_in_sidebar=False)
|
||||
v3 = SavedView.objects.create(user=u2, name="test3", sort_field="", show_on_dashboard=False, show_in_sidebar=False)
|
||||
|
||||
response = self.client.get("/api/saved_views/")
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.data['count'], 0)
|
||||
|
||||
self.assertEqual(self.client.get(f"/api/saved_views/{v1.id}/").status_code, 404)
|
||||
|
||||
self.client.force_login(user=u1)
|
||||
|
||||
response = self.client.get("/api/saved_views/")
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.data['count'], 1)
|
||||
|
||||
self.assertEqual(self.client.get(f"/api/saved_views/{v1.id}/").status_code, 200)
|
||||
|
||||
self.client.force_login(user=u2)
|
||||
|
||||
response = self.client.get("/api/saved_views/")
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.data['count'], 2)
|
||||
|
||||
self.assertEqual(self.client.get(f"/api/saved_views/{v1.id}/").status_code, 404)
|
||||
|
||||
def test_create_update_patch(self):
|
||||
|
||||
u1 = User.objects.create_user("user1")
|
||||
|
||||
view = {
|
||||
"name": "test",
|
||||
"show_on_dashboard": True,
|
||||
"show_in_sidebar": True,
|
||||
"sort_field": "created2",
|
||||
"filter_rules": [
|
||||
{
|
||||
"rule_type": 4,
|
||||
"value": "test"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
response = self.client.post("/api/saved_views/", view, format='json')
|
||||
self.assertEqual(response.status_code, 201)
|
||||
|
||||
v1 = SavedView.objects.get(name="test")
|
||||
self.assertEqual(v1.sort_field, "created2")
|
||||
self.assertEqual(v1.filter_rules.count(), 1)
|
||||
self.assertEqual(v1.user, self.user)
|
||||
|
||||
response = self.client.patch(f"/api/saved_views/{v1.id}/", {
|
||||
"show_in_sidebar": False
|
||||
}, format='json')
|
||||
|
||||
v1 = SavedView.objects.get(id=v1.id)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertFalse(v1.show_in_sidebar)
|
||||
self.assertEqual(v1.filter_rules.count(), 1)
|
||||
|
||||
view['filter_rules'] = [{
|
||||
"rule_type": 12,
|
||||
"value": "secret"
|
||||
}]
|
||||
|
||||
response = self.client.put(f"/api/saved_views/{v1.id}/", view, format='json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
v1 = SavedView.objects.get(id=v1.id)
|
||||
self.assertEqual(v1.filter_rules.count(), 1)
|
||||
self.assertEqual(v1.filter_rules.first().value, "secret")
|
||||
|
||||
view['filter_rules'] = []
|
||||
|
||||
response = self.client.put(f"/api/saved_views/{v1.id}/", view, format='json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
v1 = SavedView.objects.get(id=v1.id)
|
||||
self.assertEqual(v1.filter_rules.count(), 0)
|
||||
|
||||
|
||||
class TestBulkEdit(DirectoriesMixin, APITestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestBulkEdit, self).setUp()
|
||||
|
||||
user = User.objects.create_superuser(username="temp_admin")
|
||||
self.client.force_login(user=user)
|
||||
|
||||
patcher = mock.patch('documents.bulk_edit.async_task')
|
||||
self.async_task = patcher.start()
|
||||
self.addCleanup(patcher.stop)
|
||||
self.c1 = Correspondent.objects.create(name="c1")
|
||||
self.c2 = Correspondent.objects.create(name="c2")
|
||||
self.dt1 = DocumentType.objects.create(name="dt1")
|
||||
self.dt2 = DocumentType.objects.create(name="dt2")
|
||||
self.t1 = Tag.objects.create(name="t1")
|
||||
self.t2 = Tag.objects.create(name="t2")
|
||||
self.doc1 = Document.objects.create(checksum="A", title="A")
|
||||
self.doc2 = Document.objects.create(checksum="B", title="B", correspondent=self.c1, document_type=self.dt1)
|
||||
self.doc3 = Document.objects.create(checksum="C", title="C", correspondent=self.c2, document_type=self.dt2)
|
||||
self.doc4 = Document.objects.create(checksum="D", title="D")
|
||||
self.doc5 = Document.objects.create(checksum="E", title="E")
|
||||
self.doc2.tags.add(self.t1)
|
||||
self.doc3.tags.add(self.t2)
|
||||
self.doc4.tags.add(self.t1, self.t2)
|
||||
|
||||
def test_set_correspondent(self):
|
||||
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 1)
|
||||
bulk_edit.set_correspondent([self.doc1.id, self.doc2.id, self.doc3.id], self.c2.id)
|
||||
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 3)
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
self.assertCountEqual(kwargs['document_ids'], [self.doc1.id, self.doc2.id])
|
||||
|
||||
def test_unset_correspondent(self):
|
||||
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 1)
|
||||
bulk_edit.set_correspondent([self.doc1.id, self.doc2.id, self.doc3.id], None)
|
||||
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 0)
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
self.assertCountEqual(kwargs['document_ids'], [self.doc2.id, self.doc3.id])
|
||||
|
||||
def test_set_document_type(self):
|
||||
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 1)
|
||||
bulk_edit.set_document_type([self.doc1.id, self.doc2.id, self.doc3.id], self.dt2.id)
|
||||
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 3)
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
self.assertCountEqual(kwargs['document_ids'], [self.doc1.id, self.doc2.id])
|
||||
|
||||
def test_unset_document_type(self):
|
||||
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 1)
|
||||
bulk_edit.set_document_type([self.doc1.id, self.doc2.id, self.doc3.id], None)
|
||||
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 0)
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
self.assertCountEqual(kwargs['document_ids'], [self.doc2.id, self.doc3.id])
|
||||
|
||||
def test_add_tag(self):
|
||||
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 2)
|
||||
bulk_edit.add_tag([self.doc1.id, self.doc2.id, self.doc3.id, self.doc4.id], self.t1.id)
|
||||
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 4)
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
self.assertCountEqual(kwargs['document_ids'], [self.doc1.id, self.doc3.id])
|
||||
|
||||
def test_remove_tag(self):
|
||||
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 2)
|
||||
bulk_edit.remove_tag([self.doc1.id, self.doc3.id, self.doc4.id], self.t1.id)
|
||||
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 1)
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
self.assertCountEqual(kwargs['document_ids'], [self.doc4.id])
|
||||
|
||||
def test_modify_tags(self):
|
||||
tag_unrelated = Tag.objects.create(name="unrelated")
|
||||
self.doc2.tags.add(tag_unrelated)
|
||||
self.doc3.tags.add(tag_unrelated)
|
||||
bulk_edit.modify_tags([self.doc2.id, self.doc3.id], add_tags=[self.t2.id], remove_tags=[self.t1.id])
|
||||
|
||||
self.assertCountEqual(list(self.doc2.tags.all()), [self.t2, tag_unrelated])
|
||||
self.assertCountEqual(list(self.doc3.tags.all()), [self.t2, tag_unrelated])
|
||||
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
# TODO: doc3 should not be affected, but the query for that is rather complicated
|
||||
self.assertCountEqual(kwargs['document_ids'], [self.doc2.id, self.doc3.id])
|
||||
|
||||
def test_delete(self):
|
||||
self.assertEqual(Document.objects.count(), 5)
|
||||
bulk_edit.delete([self.doc1.id, self.doc2.id])
|
||||
self.assertEqual(Document.objects.count(), 3)
|
||||
self.assertCountEqual([doc.id for doc in Document.objects.all()], [self.doc3.id, self.doc4.id, self.doc5.id])
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.set_correspondent")
|
||||
def test_api_set_correspondent(self, m):
|
||||
m.return_value = "OK"
|
||||
response = self.client.post("/api/documents/bulk_edit/", json.dumps({
|
||||
"documents": [self.doc1.id],
|
||||
"method": "set_correspondent",
|
||||
"parameters": {"correspondent": self.c1.id}
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
m.assert_called_once()
|
||||
args, kwargs = m.call_args
|
||||
self.assertEqual(args[0], [self.doc1.id])
|
||||
self.assertEqual(kwargs['correspondent'], self.c1.id)
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.set_correspondent")
|
||||
def test_api_unset_correspondent(self, m):
|
||||
m.return_value = "OK"
|
||||
response = self.client.post("/api/documents/bulk_edit/", json.dumps({
|
||||
"documents": [self.doc1.id],
|
||||
"method": "set_correspondent",
|
||||
"parameters": {"correspondent": None}
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
m.assert_called_once()
|
||||
args, kwargs = m.call_args
|
||||
self.assertEqual(args[0], [self.doc1.id])
|
||||
self.assertIsNone(kwargs['correspondent'])
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.set_document_type")
|
||||
def test_api_set_type(self, m):
|
||||
m.return_value = "OK"
|
||||
response = self.client.post("/api/documents/bulk_edit/", json.dumps({
|
||||
"documents": [self.doc1.id],
|
||||
"method": "set_document_type",
|
||||
"parameters": {"document_type": self.dt1.id}
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
m.assert_called_once()
|
||||
args, kwargs = m.call_args
|
||||
self.assertEqual(args[0], [self.doc1.id])
|
||||
self.assertEqual(kwargs['document_type'], self.dt1.id)
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.set_document_type")
|
||||
def test_api_unset_type(self, m):
|
||||
m.return_value = "OK"
|
||||
response = self.client.post("/api/documents/bulk_edit/", json.dumps({
|
||||
"documents": [self.doc1.id],
|
||||
"method": "set_document_type",
|
||||
"parameters": {"document_type": None}
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
m.assert_called_once()
|
||||
args, kwargs = m.call_args
|
||||
self.assertEqual(args[0], [self.doc1.id])
|
||||
self.assertIsNone(kwargs['document_type'])
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.add_tag")
|
||||
def test_api_add_tag(self, m):
|
||||
m.return_value = "OK"
|
||||
response = self.client.post("/api/documents/bulk_edit/", json.dumps({
|
||||
"documents": [self.doc1.id],
|
||||
"method": "add_tag",
|
||||
"parameters": {"tag": self.t1.id}
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
m.assert_called_once()
|
||||
args, kwargs = m.call_args
|
||||
self.assertEqual(args[0], [self.doc1.id])
|
||||
self.assertEqual(kwargs['tag'], self.t1.id)
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.remove_tag")
|
||||
def test_api_remove_tag(self, m):
|
||||
m.return_value = "OK"
|
||||
response = self.client.post("/api/documents/bulk_edit/", json.dumps({
|
||||
"documents": [self.doc1.id],
|
||||
"method": "remove_tag",
|
||||
"parameters": {"tag": self.t1.id}
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
m.assert_called_once()
|
||||
args, kwargs = m.call_args
|
||||
self.assertEqual(args[0], [self.doc1.id])
|
||||
self.assertEqual(kwargs['tag'], self.t1.id)
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.modify_tags")
|
||||
def test_api_modify_tags(self, m):
|
||||
m.return_value = "OK"
|
||||
response = self.client.post("/api/documents/bulk_edit/", json.dumps({
|
||||
"documents": [self.doc1.id, self.doc3.id],
|
||||
"method": "modify_tags",
|
||||
"parameters": {"add_tags": [self.t1.id], "remove_tags": [self.t2.id]}
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
m.assert_called_once()
|
||||
args, kwargs = m.call_args
|
||||
self.assertListEqual(args[0], [self.doc1.id, self.doc3.id])
|
||||
self.assertEqual(kwargs['add_tags'], [self.t1.id])
|
||||
self.assertEqual(kwargs['remove_tags'], [self.t2.id])
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.delete")
|
||||
def test_api_delete(self, m):
|
||||
m.return_value = "OK"
|
||||
response = self.client.post("/api/documents/bulk_edit/", json.dumps({
|
||||
"documents": [self.doc1.id],
|
||||
"method": "delete",
|
||||
"parameters": {}
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
m.assert_called_once()
|
||||
args, kwargs = m.call_args
|
||||
self.assertEqual(args[0], [self.doc1.id])
|
||||
self.assertEqual(len(kwargs), 0)
|
||||
|
||||
def test_api_invalid_doc(self):
|
||||
self.assertEqual(Document.objects.count(), 5)
|
||||
response = self.client.post("/api/documents/bulk_edit/", json.dumps({
|
||||
"documents": [-235],
|
||||
"method": "delete",
|
||||
"parameters": {}
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(Document.objects.count(), 5)
|
||||
|
||||
def test_api_invalid_method(self):
|
||||
self.assertEqual(Document.objects.count(), 5)
|
||||
response = self.client.post("/api/documents/bulk_edit/", json.dumps({
|
||||
"documents": [self.doc2.id],
|
||||
"method": "exterminate",
|
||||
"parameters": {}
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(Document.objects.count(), 5)
|
||||
|
||||
def test_api_invalid_correspondent(self):
|
||||
self.assertEqual(self.doc2.correspondent, self.c1)
|
||||
response = self.client.post("/api/documents/bulk_edit/", json.dumps({
|
||||
"documents": [self.doc2.id],
|
||||
"method": "set_correspondent",
|
||||
"parameters": {'correspondent': 345657}
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
doc2 = Document.objects.get(id=self.doc2.id)
|
||||
self.assertEqual(doc2.correspondent, self.c1)
|
||||
|
||||
def test_api_invalid_document_type(self):
|
||||
self.assertEqual(self.doc2.document_type, self.dt1)
|
||||
response = self.client.post("/api/documents/bulk_edit/", json.dumps({
|
||||
"documents": [self.doc2.id],
|
||||
"method": "set_document_type",
|
||||
"parameters": {'document_type': 345657}
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
doc2 = Document.objects.get(id=self.doc2.id)
|
||||
self.assertEqual(doc2.document_type, self.dt1)
|
||||
|
||||
def test_api_add_invalid_tag(self):
|
||||
self.assertEqual(list(self.doc2.tags.all()), [self.t1])
|
||||
response = self.client.post("/api/documents/bulk_edit/", json.dumps({
|
||||
"documents": [self.doc2.id],
|
||||
"method": "add_tag",
|
||||
"parameters": {'tag': 345657}
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
self.assertEqual(list(self.doc2.tags.all()), [self.t1])
|
||||
|
||||
def test_api_delete_invalid_tag(self):
|
||||
self.assertEqual(list(self.doc2.tags.all()), [self.t1])
|
||||
response = self.client.post("/api/documents/bulk_edit/", json.dumps({
|
||||
"documents": [self.doc2.id],
|
||||
"method": "remove_tag",
|
||||
"parameters": {'tag': 345657}
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
self.assertEqual(list(self.doc2.tags.all()), [self.t1])
|
||||
|
||||
def test_api_modify_invalid_tags(self):
|
||||
self.assertEqual(list(self.doc2.tags.all()), [self.t1])
|
||||
response = self.client.post("/api/documents/bulk_edit/", json.dumps({
|
||||
"documents": [self.doc2.id],
|
||||
"method": "modify_tags",
|
||||
"parameters": {'add_tags': [self.t2.id, 1657], "remove_tags": [1123123]}
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_api_selection_data_empty(self):
|
||||
response = self.client.post("/api/documents/selection_data/", json.dumps({
|
||||
"documents": []
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
for field, Entity in [('selected_correspondents', Correspondent), ('selected_tags', Tag), ('selected_document_types', DocumentType)]:
|
||||
self.assertEqual(len(response.data[field]), Entity.objects.count())
|
||||
for correspondent in response.data[field]:
|
||||
self.assertEqual(correspondent['document_count'], 0)
|
||||
self.assertCountEqual(
|
||||
map(lambda c: c['id'], response.data[field]),
|
||||
map(lambda c: c['id'], Entity.objects.values('id')))
|
||||
|
||||
def test_api_selection_data(self):
|
||||
response = self.client.post("/api/documents/selection_data/", json.dumps({
|
||||
"documents": [self.doc1.id, self.doc2.id, self.doc4.id, self.doc5.id]
|
||||
}), content_type='application/json')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
self.assertCountEqual(response.data['selected_correspondents'], [{"id": self.c1.id, "document_count": 1}, {"id": self.c2.id, "document_count": 0}])
|
||||
self.assertCountEqual(response.data['selected_tags'], [{"id": self.t1.id, "document_count": 2}, {"id": self.t2.id, "document_count": 1}])
|
||||
self.assertCountEqual(response.data['selected_document_types'], [{"id": self.c1.id, "document_count": 1}, {"id": self.c2.id, "document_count": 0}])
|
||||
|
||||
|
||||
class TestApiAuth(APITestCase):
|
||||
|
||||
def test_auth_required(self):
|
||||
|
||||
d = Document.objects.create(title="Test")
|
||||
|
||||
self.assertEqual(self.client.get("/api/documents/").status_code, 401)
|
||||
|
||||
self.assertEqual(self.client.get(f"/api/documents/{d.id}/").status_code, 401)
|
||||
self.assertEqual(self.client.get(f"/api/documents/{d.id}/download/").status_code, 401)
|
||||
self.assertEqual(self.client.get(f"/api/documents/{d.id}/preview/").status_code, 401)
|
||||
self.assertEqual(self.client.get(f"/api/documents/{d.id}/thumb/").status_code, 401)
|
||||
|
||||
self.assertEqual(self.client.get("/api/tags/").status_code, 401)
|
||||
self.assertEqual(self.client.get("/api/correspondents/").status_code, 401)
|
||||
self.assertEqual(self.client.get("/api/document_types/").status_code, 401)
|
||||
|
||||
self.assertEqual(self.client.get("/api/logs/").status_code, 401)
|
||||
self.assertEqual(self.client.get("/api/saved_views/").status_code, 401)
|
||||
|
||||
self.assertEqual(self.client.get("/api/search/").status_code, 401)
|
||||
self.assertEqual(self.client.get("/api/search/auto_complete/").status_code, 401)
|
||||
self.assertEqual(self.client.get("/api/documents/bulk_edit/").status_code, 401)
|
||||
self.assertEqual(self.client.get("/api/documents/selection_data/").status_code, 401)
|
||||
|
@@ -1,9 +1,12 @@
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
from django.core.checks import Error
|
||||
from django.test import TestCase
|
||||
|
||||
from .factories import DocumentFactory
|
||||
from ..checks import changed_password_check
|
||||
from .. import document_consumer_declaration
|
||||
from ..checks import changed_password_check, parser_check
|
||||
from ..models import Document
|
||||
|
||||
|
||||
@@ -15,3 +18,13 @@ class ChecksTestCase(TestCase):
|
||||
def test_changed_password_check_no_encryption(self):
|
||||
DocumentFactory.create(storage_type=Document.STORAGE_TYPE_UNENCRYPTED)
|
||||
self.assertEqual(changed_password_check(None), [])
|
||||
|
||||
def test_parser_check(self):
|
||||
|
||||
self.assertEqual(parser_check(None), [])
|
||||
|
||||
with mock.patch('documents.checks.document_consumer_declaration.send') as m:
|
||||
m.return_value = []
|
||||
|
||||
self.assertEqual(parser_check(None), [Error("No parsers found. This is a bug. The consumer won't be "
|
||||
"able to consume any documents without parsers.")])
|
||||
|
@@ -27,83 +27,8 @@ class TestAttributes(TestCase):
|
||||
|
||||
self.assertEqual(file_info.title, title, filename)
|
||||
|
||||
self.assertEqual(tuple([t.slug for t in file_info.tags]), tags, filename)
|
||||
self.assertEqual(tuple([t.name for t in file_info.tags]), tags, filename)
|
||||
|
||||
def test_guess_attributes_from_name0(self):
|
||||
self._test_guess_attributes_from_name(
|
||||
"Sender - Title.pdf", "Sender", "Title", ())
|
||||
|
||||
def test_guess_attributes_from_name1(self):
|
||||
self._test_guess_attributes_from_name(
|
||||
"Spaced Sender - Title.pdf", "Spaced Sender", "Title", ())
|
||||
|
||||
def test_guess_attributes_from_name2(self):
|
||||
self._test_guess_attributes_from_name(
|
||||
"Sender - Spaced Title.pdf", "Sender", "Spaced Title", ())
|
||||
|
||||
def test_guess_attributes_from_name3(self):
|
||||
self._test_guess_attributes_from_name(
|
||||
"Dashed-Sender - Title.pdf", "Dashed-Sender", "Title", ())
|
||||
|
||||
def test_guess_attributes_from_name4(self):
|
||||
self._test_guess_attributes_from_name(
|
||||
"Sender - Dashed-Title.pdf", "Sender", "Dashed-Title", ())
|
||||
|
||||
def test_guess_attributes_from_name5(self):
|
||||
self._test_guess_attributes_from_name(
|
||||
"Sender - Title - tag1,tag2,tag3.pdf",
|
||||
"Sender",
|
||||
"Title",
|
||||
self.TAGS
|
||||
)
|
||||
|
||||
def test_guess_attributes_from_name6(self):
|
||||
self._test_guess_attributes_from_name(
|
||||
"Spaced Sender - Title - tag1,tag2,tag3.pdf",
|
||||
"Spaced Sender",
|
||||
"Title",
|
||||
self.TAGS
|
||||
)
|
||||
|
||||
def test_guess_attributes_from_name7(self):
|
||||
self._test_guess_attributes_from_name(
|
||||
"Sender - Spaced Title - tag1,tag2,tag3.pdf",
|
||||
"Sender",
|
||||
"Spaced Title",
|
||||
self.TAGS
|
||||
)
|
||||
|
||||
def test_guess_attributes_from_name8(self):
|
||||
self._test_guess_attributes_from_name(
|
||||
"Dashed-Sender - Title - tag1,tag2,tag3.pdf",
|
||||
"Dashed-Sender",
|
||||
"Title",
|
||||
self.TAGS
|
||||
)
|
||||
|
||||
def test_guess_attributes_from_name9(self):
|
||||
self._test_guess_attributes_from_name(
|
||||
"Sender - Dashed-Title - tag1,tag2,tag3.pdf",
|
||||
"Sender",
|
||||
"Dashed-Title",
|
||||
self.TAGS
|
||||
)
|
||||
|
||||
def test_guess_attributes_from_name10(self):
|
||||
self._test_guess_attributes_from_name(
|
||||
"Σενδερ - Τιτλε - tag1,tag2,tag3.pdf",
|
||||
"Σενδερ",
|
||||
"Τιτλε",
|
||||
self.TAGS
|
||||
)
|
||||
|
||||
def test_guess_attributes_from_name_when_correspondent_empty(self):
|
||||
self._test_guess_attributes_from_name(
|
||||
' - weird empty correspondent but should not break.pdf',
|
||||
None,
|
||||
'weird empty correspondent but should not break',
|
||||
()
|
||||
)
|
||||
|
||||
def test_guess_attributes_from_name_when_title_starts_with_dash(self):
|
||||
self._test_guess_attributes_from_name(
|
||||
@@ -121,28 +46,6 @@ class TestAttributes(TestCase):
|
||||
()
|
||||
)
|
||||
|
||||
def test_guess_attributes_from_name_when_title_is_empty(self):
|
||||
self._test_guess_attributes_from_name(
|
||||
'weird correspondent but should not break - .pdf',
|
||||
'weird correspondent but should not break',
|
||||
'',
|
||||
()
|
||||
)
|
||||
|
||||
def test_case_insensitive_tag_creation(self):
|
||||
"""
|
||||
Tags should be detected and created as lower case.
|
||||
:return:
|
||||
"""
|
||||
|
||||
filename = "Title - Correspondent - tAg1,TAG2.pdf"
|
||||
self.assertEqual(len(FileInfo.from_filename(filename).tags), 2)
|
||||
|
||||
path = "Title - Correspondent - tag1,tag2.pdf"
|
||||
self.assertEqual(len(FileInfo.from_filename(filename).tags), 2)
|
||||
|
||||
self.assertEqual(Tag.objects.all().count(), 2)
|
||||
|
||||
|
||||
class TestFieldPermutations(TestCase):
|
||||
|
||||
@@ -188,7 +91,7 @@ class TestFieldPermutations(TestCase):
|
||||
self.assertEqual(info.tags, (), filename)
|
||||
else:
|
||||
self.assertEqual(
|
||||
[t.slug for t in info.tags], tags.split(','),
|
||||
[t.name for t in info.tags], tags.split(','),
|
||||
filename
|
||||
)
|
||||
|
||||
@@ -199,69 +102,7 @@ class TestFieldPermutations(TestCase):
|
||||
filename = template.format(**spec)
|
||||
self._test_guessed_attributes(filename, **spec)
|
||||
|
||||
def test_title_and_correspondent(self):
|
||||
template = '{correspondent} - {title}.pdf'
|
||||
for correspondent in self.valid_correspondents:
|
||||
for title in self.valid_titles:
|
||||
spec = dict(correspondent=correspondent, title=title)
|
||||
filename = template.format(**spec)
|
||||
self._test_guessed_attributes(filename, **spec)
|
||||
|
||||
def test_title_and_correspondent_and_tags(self):
|
||||
template = '{correspondent} - {title} - {tags}.pdf'
|
||||
for correspondent in self.valid_correspondents:
|
||||
for title in self.valid_titles:
|
||||
for tags in self.valid_tags:
|
||||
spec = dict(correspondent=correspondent, title=title,
|
||||
tags=tags)
|
||||
filename = template.format(**spec)
|
||||
self._test_guessed_attributes(filename, **spec)
|
||||
|
||||
def test_created_and_correspondent_and_title_and_tags(self):
|
||||
|
||||
template = (
|
||||
"{created} - "
|
||||
"{correspondent} - "
|
||||
"{title} - "
|
||||
"{tags}.pdf"
|
||||
)
|
||||
|
||||
for created in self.valid_dates:
|
||||
for correspondent in self.valid_correspondents:
|
||||
for title in self.valid_titles:
|
||||
for tags in self.valid_tags:
|
||||
spec = {
|
||||
"created": created,
|
||||
"correspondent": correspondent,
|
||||
"title": title,
|
||||
"tags": tags,
|
||||
}
|
||||
self._test_guessed_attributes(
|
||||
template.format(**spec), **spec)
|
||||
|
||||
def test_created_and_correspondent_and_title(self):
|
||||
|
||||
template = "{created} - {correspondent} - {title}.pdf"
|
||||
|
||||
for created in self.valid_dates:
|
||||
for correspondent in self.valid_correspondents:
|
||||
for title in self.valid_titles:
|
||||
|
||||
# Skip cases where title looks like a tag as we can't
|
||||
# accommodate such cases.
|
||||
if title.lower() == title:
|
||||
continue
|
||||
|
||||
spec = {
|
||||
"created": created,
|
||||
"correspondent": correspondent,
|
||||
"title": title
|
||||
}
|
||||
self._test_guessed_attributes(
|
||||
template.format(**spec), **spec)
|
||||
|
||||
def test_created_and_title(self):
|
||||
|
||||
template = "{created} - {title}.pdf"
|
||||
|
||||
for created in self.valid_dates:
|
||||
@@ -273,21 +114,6 @@ class TestFieldPermutations(TestCase):
|
||||
self._test_guessed_attributes(
|
||||
template.format(**spec), **spec)
|
||||
|
||||
def test_created_and_title_and_tags(self):
|
||||
|
||||
template = "{created} - {title} - {tags}.pdf"
|
||||
|
||||
for created in self.valid_dates:
|
||||
for title in self.valid_titles:
|
||||
for tags in self.valid_tags:
|
||||
spec = {
|
||||
"created": created,
|
||||
"title": title,
|
||||
"tags": tags
|
||||
}
|
||||
self._test_guessed_attributes(
|
||||
template.format(**spec), **spec)
|
||||
|
||||
def test_invalid_date_format(self):
|
||||
info = FileInfo.from_filename("06112017Z - title.pdf")
|
||||
self.assertEqual(info.title, "title")
|
||||
@@ -336,32 +162,6 @@ class TestFieldPermutations(TestCase):
|
||||
info = FileInfo.from_filename(filename)
|
||||
self.assertEqual(info.title, "anotherall")
|
||||
|
||||
# Complex transformation without date in replacement string
|
||||
with self.settings(
|
||||
FILENAME_PARSE_TRANSFORMS=[(exact_patt, repl1)]):
|
||||
info = FileInfo.from_filename(filename)
|
||||
self.assertEqual(info.title, "0001")
|
||||
self.assertEqual(len(info.tags), 2)
|
||||
self.assertEqual(info.tags[0].slug, "tag1")
|
||||
self.assertEqual(info.tags[1].slug, "tag2")
|
||||
self.assertIsNone(info.created)
|
||||
|
||||
# Complex transformation with date in replacement string
|
||||
with self.settings(
|
||||
FILENAME_PARSE_TRANSFORMS=[
|
||||
(none_patt, "none.gif"),
|
||||
(exact_patt, repl2), # <-- matches
|
||||
(exact_patt, repl1),
|
||||
(all_patt, "all.gif")]):
|
||||
info = FileInfo.from_filename(filename)
|
||||
self.assertEqual(info.title, "0001")
|
||||
self.assertEqual(len(info.tags), 2)
|
||||
self.assertEqual(info.tags[0].slug, "tag1")
|
||||
self.assertEqual(info.tags[1].slug, "tag2")
|
||||
self.assertEqual(info.created.year, 2019)
|
||||
self.assertEqual(info.created.month, 9)
|
||||
self.assertEqual(info.created.day, 8)
|
||||
|
||||
|
||||
class DummyParser(DocumentParser):
|
||||
|
||||
@@ -377,7 +177,7 @@ class DummyParser(DocumentParser):
|
||||
def get_optimised_thumbnail(self, document_path, mime_type):
|
||||
return self.fake_thumb
|
||||
|
||||
def parse(self, document_path, mime_type):
|
||||
def parse(self, document_path, mime_type, file_name=None):
|
||||
self.text = "The Text"
|
||||
|
||||
|
||||
@@ -394,7 +194,7 @@ class FaultyParser(DocumentParser):
|
||||
def get_optimised_thumbnail(self, document_path, mime_type):
|
||||
return self.fake_thumb
|
||||
|
||||
def parse(self, document_path, mime_type):
|
||||
def parse(self, document_path, mime_type, file_name=None):
|
||||
raise ParseError("Does not compute.")
|
||||
|
||||
|
||||
@@ -476,15 +276,13 @@ class TestConsumer(DirectoriesMixin, TestCase):
|
||||
|
||||
def testOverrideFilename(self):
|
||||
filename = self.get_test_file()
|
||||
override_filename = "My Bank - Statement for November.pdf"
|
||||
override_filename = "Statement for November.pdf"
|
||||
|
||||
document = self.consumer.try_consume_file(filename, override_filename=override_filename)
|
||||
|
||||
self.assertEqual(document.correspondent.name, "My Bank")
|
||||
self.assertEqual(document.title, "Statement for November")
|
||||
|
||||
def testOverrideTitle(self):
|
||||
|
||||
document = self.consumer.try_consume_file(self.get_test_file(), override_title="Override Title")
|
||||
self.assertEqual(document.title, "Override Title")
|
||||
|
||||
@@ -552,7 +350,7 @@ class TestConsumer(DirectoriesMixin, TestCase):
|
||||
try:
|
||||
self.consumer.try_consume_file(self.get_test_file())
|
||||
except ConsumerError as e:
|
||||
self.assertTrue("No parsers abvailable for" in str(e))
|
||||
self.assertEqual("Unsupported mime type application/pdf of file sample.pdf", str(e))
|
||||
return
|
||||
|
||||
self.fail("Should throw exception")
|
||||
@@ -594,14 +392,13 @@ class TestConsumer(DirectoriesMixin, TestCase):
|
||||
def testFilenameHandling(self):
|
||||
filename = self.get_test_file()
|
||||
|
||||
document = self.consumer.try_consume_file(filename, override_filename="Bank - Test.pdf", override_title="new docs")
|
||||
document = self.consumer.try_consume_file(filename, override_title="new docs")
|
||||
|
||||
self.assertEqual(document.title, "new docs")
|
||||
self.assertEqual(document.correspondent.name, "Bank")
|
||||
self.assertEqual(document.filename, "Bank/new docs-0000001.pdf")
|
||||
self.assertEqual(document.filename, "none/new docs.pdf")
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{correspondent}/{title}")
|
||||
@mock.patch("documents.signals.handlers.generate_filename")
|
||||
@mock.patch("documents.signals.handlers.generate_unique_filename")
|
||||
def testFilenameHandlingUnstableFormat(self, m):
|
||||
|
||||
filenames = ["this", "that", "now this", "i cant decide"]
|
||||
@@ -611,16 +408,15 @@ class TestConsumer(DirectoriesMixin, TestCase):
|
||||
filenames.insert(0, f)
|
||||
return f
|
||||
|
||||
m.side_effect = lambda f: get_filename()
|
||||
m.side_effect = lambda f, root: get_filename()
|
||||
|
||||
filename = self.get_test_file()
|
||||
|
||||
Tag.objects.create(name="test", is_inbox_tag=True)
|
||||
|
||||
document = self.consumer.try_consume_file(filename, override_filename="Bank - Test.pdf", override_title="new docs")
|
||||
document = self.consumer.try_consume_file(filename, override_title="new docs")
|
||||
|
||||
self.assertEqual(document.title, "new docs")
|
||||
self.assertEqual(document.correspondent.name, "Bank")
|
||||
self.assertIsNotNone(os.path.isfile(document.title))
|
||||
self.assertTrue(os.path.isfile(document.source_path))
|
||||
|
||||
@@ -642,3 +438,81 @@ class TestConsumer(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(document.document_type, dtype)
|
||||
self.assertIn(t1, document.tags.all())
|
||||
self.assertNotIn(t2, document.tags.all())
|
||||
|
||||
@override_settings(CONSUMER_DELETE_DUPLICATES=True)
|
||||
def test_delete_duplicate(self):
|
||||
dst = self.get_test_file()
|
||||
self.assertTrue(os.path.isfile(dst))
|
||||
doc = self.consumer.try_consume_file(dst)
|
||||
|
||||
self.assertFalse(os.path.isfile(dst))
|
||||
self.assertIsNotNone(doc)
|
||||
|
||||
dst = self.get_test_file()
|
||||
self.assertTrue(os.path.isfile(dst))
|
||||
self.assertRaises(ConsumerError, self.consumer.try_consume_file, dst)
|
||||
self.assertFalse(os.path.isfile(dst))
|
||||
|
||||
@override_settings(CONSUMER_DELETE_DUPLICATES=False)
|
||||
def test_no_delete_duplicate(self):
|
||||
dst = self.get_test_file()
|
||||
self.assertTrue(os.path.isfile(dst))
|
||||
doc = self.consumer.try_consume_file(dst)
|
||||
|
||||
self.assertFalse(os.path.isfile(dst))
|
||||
self.assertIsNotNone(doc)
|
||||
|
||||
dst = self.get_test_file()
|
||||
self.assertTrue(os.path.isfile(dst))
|
||||
self.assertRaises(ConsumerError, self.consumer.try_consume_file, dst)
|
||||
self.assertTrue(os.path.isfile(dst))
|
||||
|
||||
|
||||
class PostConsumeTestCase(TestCase):
|
||||
|
||||
@mock.patch("documents.consumer.Popen")
|
||||
@override_settings(POST_CONSUME_SCRIPT=None)
|
||||
def test_no_post_consume_script(self, m):
|
||||
doc = Document.objects.create(title="Test", mime_type="application/pdf")
|
||||
tag1 = Tag.objects.create(name="a")
|
||||
tag2 = Tag.objects.create(name="b")
|
||||
doc.tags.add(tag1)
|
||||
doc.tags.add(tag2)
|
||||
|
||||
Consumer().run_post_consume_script(doc)
|
||||
|
||||
m.assert_not_called()
|
||||
|
||||
@mock.patch("documents.consumer.Popen")
|
||||
@override_settings(POST_CONSUME_SCRIPT="script")
|
||||
def test_post_consume_script_simple(self, m):
|
||||
doc = Document.objects.create(title="Test", mime_type="application/pdf")
|
||||
|
||||
Consumer().run_post_consume_script(doc)
|
||||
|
||||
m.assert_called_once()
|
||||
|
||||
@mock.patch("documents.consumer.Popen")
|
||||
@override_settings(POST_CONSUME_SCRIPT="script")
|
||||
def test_post_consume_script_with_correspondent(self, m):
|
||||
c = Correspondent.objects.create(name="my_bank")
|
||||
doc = Document.objects.create(title="Test", mime_type="application/pdf", correspondent=c)
|
||||
tag1 = Tag.objects.create(name="a")
|
||||
tag2 = Tag.objects.create(name="b")
|
||||
doc.tags.add(tag1)
|
||||
doc.tags.add(tag2)
|
||||
|
||||
Consumer().run_post_consume_script(doc)
|
||||
|
||||
m.assert_called_once()
|
||||
|
||||
args, kwargs = m.call_args
|
||||
|
||||
command = args[0]
|
||||
|
||||
self.assertEqual(command[0], "script")
|
||||
self.assertEqual(command[1], str(doc.pk))
|
||||
self.assertEqual(command[5], f"/api/documents/{doc.pk}/download/")
|
||||
self.assertEqual(command[6], f"/api/documents/{doc.pk}/thumb/")
|
||||
self.assertEqual(command[7], "my_bank")
|
||||
self.assertCountEqual(command[8].split(","), ["a", "b"])
|
||||
|
@@ -1,15 +1,20 @@
|
||||
import datetime
|
||||
import hashlib
|
||||
import os
|
||||
import random
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import DatabaseError
|
||||
from django.test import TestCase, override_settings
|
||||
from django.utils import timezone
|
||||
|
||||
from .utils import DirectoriesMixin
|
||||
from ..file_handling import generate_filename, create_source_path_directory, delete_empty_directories
|
||||
from ..models import Document, Correspondent
|
||||
from ..file_handling import generate_filename, create_source_path_directory, delete_empty_directories, \
|
||||
generate_unique_filename
|
||||
from ..models import Document, Correspondent, Tag, DocumentType
|
||||
|
||||
|
||||
class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
@@ -40,13 +45,13 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
document.filename = generate_filename(document)
|
||||
|
||||
# Ensure that filename is properly generated
|
||||
self.assertEqual(document.filename, "none/none-{:07d}.pdf".format(document.pk))
|
||||
self.assertEqual(document.filename, "none/none.pdf")
|
||||
|
||||
# Enable encryption and check again
|
||||
document.storage_type = Document.STORAGE_TYPE_GPG
|
||||
document.filename = generate_filename(document)
|
||||
self.assertEqual(document.filename,
|
||||
"none/none-{:07d}.pdf.gpg".format(document.pk))
|
||||
"none/none.pdf.gpg")
|
||||
|
||||
document.save()
|
||||
|
||||
@@ -62,7 +67,7 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
# Check proper handling of files
|
||||
self.assertEqual(os.path.isdir(settings.ORIGINALS_DIR + "/test"), True)
|
||||
self.assertEqual(os.path.isdir(settings.ORIGINALS_DIR + "/none"), False)
|
||||
self.assertEqual(os.path.isfile(settings.ORIGINALS_DIR + "/test/test-{:07d}.pdf.gpg".format(document.pk)), True)
|
||||
self.assertEqual(os.path.isfile(settings.ORIGINALS_DIR + "/test/test.pdf.gpg"), True)
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{correspondent}/{correspondent}")
|
||||
def test_file_renaming_missing_permissions(self):
|
||||
@@ -74,12 +79,12 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
# Ensure that filename is properly generated
|
||||
document.filename = generate_filename(document)
|
||||
self.assertEqual(document.filename,
|
||||
"none/none-{:07d}.pdf".format(document.pk))
|
||||
"none/none.pdf")
|
||||
create_source_path_directory(document.source_path)
|
||||
Path(document.source_path).touch()
|
||||
|
||||
# Test source_path
|
||||
self.assertEqual(document.source_path, settings.ORIGINALS_DIR + "/none/none-{:07d}.pdf".format(document.pk))
|
||||
self.assertEqual(document.source_path, settings.ORIGINALS_DIR + "/none/none.pdf")
|
||||
|
||||
# Make the folder read- and execute-only (no writing and no renaming)
|
||||
os.chmod(settings.ORIGINALS_DIR + "/none", 0o555)
|
||||
@@ -89,8 +94,8 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
document.save()
|
||||
|
||||
# Check proper handling of files
|
||||
self.assertEqual(os.path.isfile(settings.ORIGINALS_DIR + "/none/none-{:07d}.pdf".format(document.pk)), True)
|
||||
self.assertEqual(document.filename, "none/none-{:07d}.pdf".format(document.pk))
|
||||
self.assertEqual(os.path.isfile(settings.ORIGINALS_DIR + "/none/none.pdf"), True)
|
||||
self.assertEqual(document.filename, "none/none.pdf")
|
||||
|
||||
os.chmod(settings.ORIGINALS_DIR + "/none", 0o777)
|
||||
|
||||
@@ -108,7 +113,7 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
# Ensure that filename is properly generated
|
||||
document.filename = generate_filename(document)
|
||||
self.assertEqual(document.filename,
|
||||
"none/none-{:07d}.pdf".format(document.pk))
|
||||
"none/none.pdf")
|
||||
create_source_path_directory(document.source_path)
|
||||
Path(document.source_path).touch()
|
||||
|
||||
@@ -125,8 +130,8 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
|
||||
# Check proper handling of files
|
||||
self.assertTrue(os.path.isfile(document.source_path))
|
||||
self.assertEqual(os.path.isfile(settings.ORIGINALS_DIR + "/none/none-{:07d}.pdf".format(document.pk)), True)
|
||||
self.assertEqual(document.filename, "none/none-{:07d}.pdf".format(document.pk))
|
||||
self.assertEqual(os.path.isfile(settings.ORIGINALS_DIR + "/none/none.pdf"), True)
|
||||
self.assertEqual(document.filename, "none/none.pdf")
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{correspondent}/{correspondent}")
|
||||
def test_document_delete(self):
|
||||
@@ -138,7 +143,7 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
# Ensure that filename is properly generated
|
||||
document.filename = generate_filename(document)
|
||||
self.assertEqual(document.filename,
|
||||
"none/none-{:07d}.pdf".format(document.pk))
|
||||
"none/none.pdf")
|
||||
|
||||
create_source_path_directory(document.source_path)
|
||||
Path(document.source_path).touch()
|
||||
@@ -146,7 +151,7 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
# Ensure file deletion after delete
|
||||
pk = document.pk
|
||||
document.delete()
|
||||
self.assertEqual(os.path.isfile(settings.ORIGINALS_DIR + "/none/none-{:07d}.pdf".format(pk)), False)
|
||||
self.assertEqual(os.path.isfile(settings.ORIGINALS_DIR + "/none/none.pdf"), False)
|
||||
self.assertEqual(os.path.isdir(settings.ORIGINALS_DIR + "/none"), False)
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{correspondent}/{correspondent}")
|
||||
@@ -168,7 +173,7 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
# Ensure that filename is properly generated
|
||||
document.filename = generate_filename(document)
|
||||
self.assertEqual(document.filename,
|
||||
"none/none-{:07d}.pdf".format(document.pk))
|
||||
"none/none.pdf")
|
||||
|
||||
create_source_path_directory(document.source_path)
|
||||
|
||||
@@ -185,6 +190,17 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(os.path.isdir(settings.ORIGINALS_DIR + "/none"), True)
|
||||
self.assertTrue(os.path.isfile(important_file))
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{document_type} - {title}")
|
||||
def test_document_type(self):
|
||||
dt = DocumentType.objects.create(name="my_doc_type")
|
||||
d = Document.objects.create(title="the_doc", mime_type="application/pdf")
|
||||
|
||||
self.assertEqual(generate_filename(d), "none - the_doc.pdf")
|
||||
|
||||
d.document_type = dt
|
||||
|
||||
self.assertEqual(generate_filename(d), "my_doc_type - the_doc.pdf")
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{tags[type]}")
|
||||
def test_tags_with_underscore(self):
|
||||
document = Document()
|
||||
@@ -199,7 +215,7 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
|
||||
# Ensure that filename is properly generated
|
||||
self.assertEqual(generate_filename(document),
|
||||
"demo-{:07d}.pdf".format(document.pk))
|
||||
"demo.pdf")
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{tags[type]}")
|
||||
def test_tags_with_dash(self):
|
||||
@@ -215,7 +231,7 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
|
||||
# Ensure that filename is properly generated
|
||||
self.assertEqual(generate_filename(document),
|
||||
"demo-{:07d}.pdf".format(document.pk))
|
||||
"demo.pdf")
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{tags[type]}")
|
||||
def test_tags_malformed(self):
|
||||
@@ -231,7 +247,7 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
|
||||
# Ensure that filename is properly generated
|
||||
self.assertEqual(generate_filename(document),
|
||||
"none-{:07d}.pdf".format(document.pk))
|
||||
"none.pdf")
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{tags[0]}")
|
||||
def test_tags_all(self):
|
||||
@@ -246,7 +262,7 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
|
||||
# Ensure that filename is properly generated
|
||||
self.assertEqual(generate_filename(document),
|
||||
"demo-{:07d}.pdf".format(document.pk))
|
||||
"demo.pdf")
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{tags[1]}")
|
||||
def test_tags_out_of_bounds(self):
|
||||
@@ -261,7 +277,58 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
|
||||
# Ensure that filename is properly generated
|
||||
self.assertEqual(generate_filename(document),
|
||||
"none-{:07d}.pdf".format(document.pk))
|
||||
"none.pdf")
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{tags}")
|
||||
def test_tags_without_args(self):
|
||||
document = Document()
|
||||
document.mime_type = "application/pdf"
|
||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
document.save()
|
||||
|
||||
self.assertEqual(generate_filename(document), f"{document.pk:07}.pdf")
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{title} {tag_list}")
|
||||
def test_tag_list(self):
|
||||
doc = Document.objects.create(title="doc1", mime_type="application/pdf")
|
||||
doc.tags.create(name="tag2")
|
||||
doc.tags.create(name="tag1")
|
||||
|
||||
self.assertEqual(generate_filename(doc), "doc1 tag1,tag2.pdf")
|
||||
|
||||
doc = Document.objects.create(title="doc2", checksum="B", mime_type="application/pdf")
|
||||
|
||||
self.assertEqual(generate_filename(doc), "doc2.pdf")
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="//etc/something/{title}")
|
||||
def test_filename_relative(self):
|
||||
doc = Document.objects.create(title="doc1", mime_type="application/pdf")
|
||||
doc.filename = generate_filename(doc)
|
||||
doc.save()
|
||||
|
||||
self.assertEqual(doc.source_path, os.path.join(settings.ORIGINALS_DIR, "etc", "something", "doc1.pdf"))
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{created_year}-{created_month}-{created_day}")
|
||||
def test_created_year_month_day(self):
|
||||
d1 = timezone.make_aware(datetime.datetime(2020, 3, 6, 1, 1, 1))
|
||||
doc1 = Document.objects.create(title="doc1", mime_type="application/pdf", created=d1)
|
||||
|
||||
self.assertEqual(generate_filename(doc1), "2020-03-06.pdf")
|
||||
|
||||
doc1.created = timezone.make_aware(datetime.datetime(2020, 11, 16, 1, 1, 1))
|
||||
|
||||
self.assertEqual(generate_filename(doc1), "2020-11-16.pdf")
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{added_year}-{added_month}-{added_day}")
|
||||
def test_added_year_month_day(self):
|
||||
d1 = timezone.make_aware(datetime.datetime(232, 1, 9, 1, 1, 1))
|
||||
doc1 = Document.objects.create(title="doc1", mime_type="application/pdf", added=d1)
|
||||
|
||||
self.assertEqual(generate_filename(doc1), "232-01-09.pdf")
|
||||
|
||||
doc1.added = timezone.make_aware(datetime.datetime(2020, 11, 16, 1, 1, 1))
|
||||
|
||||
self.assertEqual(generate_filename(doc1), "2020-11-16.pdf")
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{correspondent}/{correspondent}/{correspondent}")
|
||||
def test_nested_directory_cleanup(self):
|
||||
@@ -272,7 +339,7 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
|
||||
# Ensure that filename is properly generated
|
||||
document.filename = generate_filename(document)
|
||||
self.assertEqual(document.filename, "none/none/none-{:07d}.pdf".format(document.pk))
|
||||
self.assertEqual(document.filename, "none/none/none.pdf")
|
||||
create_source_path_directory(document.source_path)
|
||||
Path(document.source_path).touch()
|
||||
|
||||
@@ -282,7 +349,7 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
pk = document.pk
|
||||
document.delete()
|
||||
|
||||
self.assertEqual(os.path.isfile(settings.ORIGINALS_DIR + "/none/none/none-{:07d}.pdf".format(pk)), False)
|
||||
self.assertEqual(os.path.isfile(settings.ORIGINALS_DIR + "/none/none/none.pdf"), False)
|
||||
self.assertEqual(os.path.isdir(settings.ORIGINALS_DIR + "/none/none"), False)
|
||||
self.assertEqual(os.path.isdir(settings.ORIGINALS_DIR + "/none"), False)
|
||||
self.assertEqual(os.path.isdir(settings.ORIGINALS_DIR), True)
|
||||
@@ -330,6 +397,48 @@ class TestFileHandling(DirectoriesMixin, TestCase):
|
||||
|
||||
self.assertEqual(generate_filename(document), "0000001.pdf")
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{title}")
|
||||
def test_duplicates(self):
|
||||
document = Document.objects.create(mime_type="application/pdf", title="qwe", checksum="A", pk=1)
|
||||
document2 = Document.objects.create(mime_type="application/pdf", title="qwe", checksum="B", pk=2)
|
||||
Path(document.source_path).touch()
|
||||
Path(document2.source_path).touch()
|
||||
document.filename = "0000001.pdf"
|
||||
document.save()
|
||||
|
||||
self.assertTrue(os.path.isfile(document.source_path))
|
||||
self.assertEqual(document.filename, "qwe.pdf")
|
||||
|
||||
document2.filename = "0000002.pdf"
|
||||
document2.save()
|
||||
|
||||
self.assertTrue(os.path.isfile(document.source_path))
|
||||
self.assertEqual(document2.filename, "qwe_01.pdf")
|
||||
|
||||
# saving should not change the file names.
|
||||
|
||||
document.save()
|
||||
|
||||
self.assertTrue(os.path.isfile(document.source_path))
|
||||
self.assertEqual(document.filename, "qwe.pdf")
|
||||
|
||||
document2.save()
|
||||
|
||||
self.assertTrue(os.path.isfile(document.source_path))
|
||||
self.assertEqual(document2.filename, "qwe_01.pdf")
|
||||
|
||||
document.delete()
|
||||
|
||||
self.assertFalse(os.path.isfile(document.source_path))
|
||||
|
||||
# filename free, should remove _01 suffix
|
||||
|
||||
document2.save()
|
||||
|
||||
self.assertTrue(os.path.isfile(document.source_path))
|
||||
self.assertEqual(document2.filename, "qwe.pdf")
|
||||
|
||||
|
||||
|
||||
class TestFileHandlingWithArchive(DirectoriesMixin, TestCase):
|
||||
|
||||
@@ -358,15 +467,14 @@ class TestFileHandlingWithArchive(DirectoriesMixin, TestCase):
|
||||
self.assertFalse(os.path.isfile(archive))
|
||||
self.assertTrue(os.path.isfile(doc.source_path))
|
||||
self.assertTrue(os.path.isfile(doc.archive_path))
|
||||
self.assertEqual(doc.source_path, os.path.join(settings.ORIGINALS_DIR, "none", "my_doc-0000001.pdf"))
|
||||
self.assertEqual(doc.archive_path, os.path.join(settings.ARCHIVE_DIR, "none", "my_doc-0000001.pdf"))
|
||||
self.assertEqual(doc.source_path, os.path.join(settings.ORIGINALS_DIR, "none", "my_doc.pdf"))
|
||||
self.assertEqual(doc.archive_path, os.path.join(settings.ARCHIVE_DIR, "none", "my_doc.pdf"))
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{correspondent}/{title}")
|
||||
def test_move_archive_gone(self):
|
||||
original = os.path.join(settings.ORIGINALS_DIR, "0000001.pdf")
|
||||
archive = os.path.join(settings.ARCHIVE_DIR, "0000001.pdf")
|
||||
Path(original).touch()
|
||||
#Path(archive).touch()
|
||||
doc = Document.objects.create(mime_type="application/pdf", title="my_doc", filename="0000001.pdf", checksum="A", archive_checksum="B")
|
||||
|
||||
self.assertTrue(os.path.isfile(original))
|
||||
@@ -381,7 +489,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, TestCase):
|
||||
Path(original).touch()
|
||||
Path(archive).touch()
|
||||
os.makedirs(os.path.join(settings.ARCHIVE_DIR, "none"))
|
||||
Path(os.path.join(settings.ARCHIVE_DIR, "none", "my_doc-0000001.pdf")).touch()
|
||||
Path(os.path.join(settings.ARCHIVE_DIR, "none", "my_doc.pdf")).touch()
|
||||
doc = Document.objects.create(mime_type="application/pdf", title="my_doc", filename="0000001.pdf", checksum="A", archive_checksum="B")
|
||||
|
||||
self.assertTrue(os.path.isfile(original))
|
||||
@@ -494,14 +602,35 @@ class TestFilenameGeneration(TestCase):
|
||||
def test_invalid_characters(self):
|
||||
|
||||
doc = Document.objects.create(title="This. is the title.", mime_type="application/pdf", pk=1, checksum="1")
|
||||
self.assertEqual(generate_filename(doc), "This. is the title-0000001.pdf")
|
||||
self.assertEqual(generate_filename(doc), "This. is the title.pdf")
|
||||
|
||||
doc = Document.objects.create(title="my\\invalid/../title:yay", mime_type="application/pdf", pk=2, checksum="2")
|
||||
self.assertEqual(generate_filename(doc), "my-invalid-..-title-yay-0000002.pdf")
|
||||
self.assertEqual(generate_filename(doc), "my-invalid-..-title-yay.pdf")
|
||||
|
||||
@override_settings(
|
||||
PAPERLESS_FILENAME_FORMAT="{created}"
|
||||
)
|
||||
def test_date(self):
|
||||
doc = Document.objects.create(title="does not matter", created=datetime.datetime(2020,5,21, 7,36,51, 153), mime_type="application/pdf", pk=2, checksum="2")
|
||||
self.assertEqual(generate_filename(doc), "2020-05-21-0000002.pdf")
|
||||
doc = Document.objects.create(title="does not matter", created=timezone.make_aware(datetime.datetime(2020,5,21, 7,36,51, 153)), mime_type="application/pdf", pk=2, checksum="2")
|
||||
self.assertEqual(generate_filename(doc), "2020-05-21.pdf")
|
||||
|
||||
|
||||
def run():
|
||||
doc = Document.objects.create(checksum=str(uuid.uuid4()), title=str(uuid.uuid4()), content="wow")
|
||||
doc.filename = generate_unique_filename(doc, settings.ORIGINALS_DIR)
|
||||
Path(doc.thumbnail_path).touch()
|
||||
with open(doc.source_path, "w") as f:
|
||||
f.write(str(uuid.uuid4()))
|
||||
with open(doc.source_path, "rb") as f:
|
||||
doc.checksum = hashlib.md5(f.read()).hexdigest()
|
||||
|
||||
with open(doc.archive_path, "w") as f:
|
||||
f.write(str(uuid.uuid4()))
|
||||
with open(doc.archive_path, "rb") as f:
|
||||
doc.archive_checksum = hashlib.md5(f.read()).hexdigest()
|
||||
|
||||
doc.save()
|
||||
|
||||
for i in range(30):
|
||||
doc.title = str(random.randrange(1, 5))
|
||||
doc.save()
|
||||
|
@@ -1,6 +1,9 @@
|
||||
from django.test import TestCase
|
||||
|
||||
from documents import index
|
||||
from documents.index import JsonFormatter
|
||||
from documents.models import Document
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
|
||||
class JsonFormatterTest(TestCase):
|
||||
@@ -12,3 +15,21 @@ class JsonFormatterTest(TestCase):
|
||||
self.assertListEqual(self.formatter.format([]), [])
|
||||
|
||||
|
||||
class TestAutoComplete(DirectoriesMixin, TestCase):
|
||||
|
||||
def test_auto_complete(self):
|
||||
|
||||
doc1 = Document.objects.create(title="doc1", checksum="A", content="test test2 test3")
|
||||
doc2 = Document.objects.create(title="doc2", checksum="B", content="test test2")
|
||||
doc3 = Document.objects.create(title="doc3", checksum="C", content="test2")
|
||||
|
||||
index.add_or_update_document(doc1)
|
||||
index.add_or_update_document(doc2)
|
||||
index.add_or_update_document(doc3)
|
||||
|
||||
ix = index.open_index()
|
||||
|
||||
self.assertListEqual(index.autocomplete(ix, "tes"), [b"test3", b"test", b"test2"])
|
||||
self.assertListEqual(index.autocomplete(ix, "tes", limit=3), [b"test3", b"test", b"test2"])
|
||||
self.assertListEqual(index.autocomplete(ix, "tes", limit=1), [b"test3"])
|
||||
self.assertListEqual(index.autocomplete(ix, "tes", limit=0), [])
|
||||
|
135
src/documents/tests/test_management.py
Normal file
135
src/documents/tests/test_management.py
Normal file
@@ -0,0 +1,135 @@
|
||||
import hashlib
|
||||
import tempfile
|
||||
import filecmp
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
from django.test import TestCase, override_settings
|
||||
|
||||
|
||||
from django.core.management import call_command
|
||||
|
||||
from documents.file_handling import generate_filename
|
||||
from documents.management.commands.document_archiver import handle_document
|
||||
from documents.models import Document
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
|
||||
sample_file = os.path.join(os.path.dirname(__file__), "samples", "simple.pdf")
|
||||
|
||||
|
||||
class TestArchiver(DirectoriesMixin, TestCase):
|
||||
|
||||
def make_models(self):
|
||||
return Document.objects.create(checksum="A", title="A", content="first document", mime_type="application/pdf")
|
||||
|
||||
def test_archiver(self):
|
||||
|
||||
doc = self.make_models()
|
||||
shutil.copy(sample_file, os.path.join(self.dirs.originals_dir, f"{doc.id:07}.pdf"))
|
||||
|
||||
call_command('document_archiver')
|
||||
|
||||
def test_handle_document(self):
|
||||
|
||||
doc = self.make_models()
|
||||
shutil.copy(sample_file, os.path.join(self.dirs.originals_dir, f"{doc.id:07}.pdf"))
|
||||
|
||||
handle_document(doc.pk)
|
||||
|
||||
doc = Document.objects.get(id=doc.id)
|
||||
|
||||
self.assertIsNotNone(doc.checksum)
|
||||
self.assertTrue(os.path.isfile(doc.archive_path))
|
||||
self.assertTrue(os.path.isfile(doc.source_path))
|
||||
self.assertTrue(filecmp.cmp(sample_file, doc.source_path))
|
||||
|
||||
|
||||
class TestDecryptDocuments(TestCase):
|
||||
|
||||
@override_settings(
|
||||
ORIGINALS_DIR=os.path.join(os.path.dirname(__file__), "samples", "originals"),
|
||||
THUMBNAIL_DIR=os.path.join(os.path.dirname(__file__), "samples", "thumb"),
|
||||
PASSPHRASE="test",
|
||||
PAPERLESS_FILENAME_FORMAT=None
|
||||
)
|
||||
@mock.patch("documents.management.commands.decrypt_documents.input")
|
||||
def test_decrypt(self, m):
|
||||
|
||||
media_dir = tempfile.mkdtemp()
|
||||
originals_dir = os.path.join(media_dir, "documents", "originals")
|
||||
thumb_dir = os.path.join(media_dir, "documents", "thumbnails")
|
||||
os.makedirs(originals_dir, exist_ok=True)
|
||||
os.makedirs(thumb_dir, exist_ok=True)
|
||||
|
||||
override_settings(
|
||||
ORIGINALS_DIR=originals_dir,
|
||||
THUMBNAIL_DIR=thumb_dir,
|
||||
PASSPHRASE="test"
|
||||
).enable()
|
||||
|
||||
doc = Document.objects.create(checksum="9c9691e51741c1f4f41a20896af31770", title="wow", filename="0000002.pdf.gpg", mime_type="application/pdf", storage_type=Document.STORAGE_TYPE_GPG)
|
||||
|
||||
shutil.copy(os.path.join(os.path.dirname(__file__), "samples", "documents", "originals", "0000002.pdf.gpg"), os.path.join(originals_dir, "0000002.pdf.gpg"))
|
||||
shutil.copy(os.path.join(os.path.dirname(__file__), "samples", "documents", "thumbnails", f"0000002.png.gpg"), os.path.join(thumb_dir, f"{doc.id:07}.png.gpg"))
|
||||
|
||||
call_command('decrypt_documents')
|
||||
|
||||
doc.refresh_from_db()
|
||||
|
||||
self.assertEqual(doc.storage_type, Document.STORAGE_TYPE_UNENCRYPTED)
|
||||
self.assertEqual(doc.filename, "0000002.pdf")
|
||||
self.assertTrue(os.path.isfile(os.path.join(originals_dir, "0000002.pdf")))
|
||||
self.assertTrue(os.path.isfile(doc.source_path))
|
||||
self.assertTrue(os.path.isfile(os.path.join(thumb_dir, f"{doc.id:07}.png")))
|
||||
self.assertTrue(os.path.isfile(doc.thumbnail_path))
|
||||
|
||||
with doc.source_file as f:
|
||||
checksum = hashlib.md5(f.read()).hexdigest()
|
||||
self.assertEqual(checksum, doc.checksum)
|
||||
|
||||
|
||||
class TestMakeIndex(TestCase):
|
||||
|
||||
@mock.patch("documents.management.commands.document_index.index_reindex")
|
||||
def test_reindex(self, m):
|
||||
call_command("document_index", "reindex")
|
||||
m.assert_called_once()
|
||||
|
||||
@mock.patch("documents.management.commands.document_index.index_optimize")
|
||||
def test_optimize(self, m):
|
||||
call_command("document_index", "optimize")
|
||||
m.assert_called_once()
|
||||
|
||||
|
||||
class TestRenamer(DirectoriesMixin, TestCase):
|
||||
|
||||
def test_rename(self):
|
||||
doc = Document.objects.create(title="test", mime_type="application/pdf")
|
||||
doc.filename = generate_filename(doc)
|
||||
doc.save()
|
||||
|
||||
Path(doc.source_path).touch()
|
||||
|
||||
old_source_path = doc.source_path
|
||||
|
||||
with override_settings(PAPERLESS_FILENAME_FORMAT="{title}"):
|
||||
call_command("document_renamer")
|
||||
|
||||
doc2 = Document.objects.get(id=doc.id)
|
||||
|
||||
self.assertEqual(doc2.filename, "test.pdf")
|
||||
self.assertFalse(os.path.isfile(old_source_path))
|
||||
self.assertFalse(os.path.isfile(doc.source_path))
|
||||
self.assertTrue(os.path.isfile(doc2.source_path))
|
||||
|
||||
|
||||
class TestCreateClassifier(TestCase):
|
||||
|
||||
@mock.patch("documents.management.commands.document_create_classifier.train_classifier")
|
||||
def test_create_classifier(self, m):
|
||||
call_command("document_create_classifier")
|
||||
|
||||
m.assert_called_once()
|
@@ -1,42 +0,0 @@
|
||||
import filecmp
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from django.core.management import call_command
|
||||
from django.test import TestCase
|
||||
|
||||
from documents.management.commands.document_archiver import handle_document
|
||||
from documents.models import Document
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
|
||||
sample_file = os.path.join(os.path.dirname(__file__), "samples", "simple.pdf")
|
||||
|
||||
|
||||
class TestArchiver(DirectoriesMixin, TestCase):
|
||||
|
||||
def make_models(self):
|
||||
self.d1 = Document.objects.create(checksum="A", title="A", content="first document", pk=1, mime_type="application/pdf")
|
||||
#self.d2 = Document.objects.create(checksum="B", title="B", content="second document")
|
||||
#self.d3 = Document.objects.create(checksum="C", title="C", content="unrelated document")
|
||||
|
||||
def test_archiver(self):
|
||||
|
||||
shutil.copy(sample_file, os.path.join(self.dirs.originals_dir, "0000001.pdf"))
|
||||
self.make_models()
|
||||
|
||||
call_command('document_archiver')
|
||||
|
||||
def test_handle_document(self):
|
||||
|
||||
shutil.copy(sample_file, os.path.join(self.dirs.originals_dir, "0000001.pdf"))
|
||||
self.make_models()
|
||||
|
||||
handle_document(self.d1.pk)
|
||||
|
||||
doc = Document.objects.get(id=self.d1.id)
|
||||
|
||||
self.assertIsNotNone(doc.checksum)
|
||||
self.assertTrue(os.path.isfile(doc.archive_path))
|
||||
self.assertTrue(os.path.isfile(doc.source_path))
|
||||
self.assertTrue(filecmp.cmp(sample_file, doc.source_path))
|
@@ -230,7 +230,7 @@ class TestConsumerTags(DirectoriesMixin, ConsumerMixin, TransactionTestCase):
|
||||
|
||||
tag_names = ("existingTag", "Space Tag")
|
||||
# Create a Tag prior to consuming a file using it in path
|
||||
tag_ids = [Tag.objects.create(name=tag_names[0]).pk,]
|
||||
tag_ids = [Tag.objects.create(name="existingtag").pk,]
|
||||
|
||||
self.t_start()
|
||||
|
||||
|
@@ -1,57 +0,0 @@
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from unittest import mock
|
||||
|
||||
from django.core.management import call_command
|
||||
from django.test import TestCase, override_settings
|
||||
|
||||
from documents.management.commands import document_exporter
|
||||
from documents.models import Document, Tag, DocumentType, Correspondent
|
||||
|
||||
|
||||
class TestDecryptDocuments(TestCase):
|
||||
|
||||
@override_settings(
|
||||
ORIGINALS_DIR=os.path.join(os.path.dirname(__file__), "samples", "originals"),
|
||||
THUMBNAIL_DIR=os.path.join(os.path.dirname(__file__), "samples", "thumb"),
|
||||
PASSPHRASE="test",
|
||||
PAPERLESS_FILENAME_FORMAT=None
|
||||
)
|
||||
@mock.patch("documents.management.commands.decrypt_documents.input")
|
||||
def test_decrypt(self, m):
|
||||
|
||||
media_dir = tempfile.mkdtemp()
|
||||
originals_dir = os.path.join(media_dir, "documents", "originals")
|
||||
thumb_dir = os.path.join(media_dir, "documents", "thumbnails")
|
||||
os.makedirs(originals_dir, exist_ok=True)
|
||||
os.makedirs(thumb_dir, exist_ok=True)
|
||||
|
||||
override_settings(
|
||||
ORIGINALS_DIR=originals_dir,
|
||||
THUMBNAIL_DIR=thumb_dir,
|
||||
PASSPHRASE="test"
|
||||
).enable()
|
||||
|
||||
shutil.copy(os.path.join(os.path.dirname(__file__), "samples", "documents", "originals", "0000002.pdf.gpg"), os.path.join(originals_dir, "0000002.pdf.gpg"))
|
||||
shutil.copy(os.path.join(os.path.dirname(__file__), "samples", "documents", "thumbnails", "0000002.png.gpg"), os.path.join(thumb_dir, "0000002.png.gpg"))
|
||||
|
||||
Document.objects.create(checksum="9c9691e51741c1f4f41a20896af31770", title="wow", filename="0000002.pdf.gpg", id=2, mime_type="application/pdf", storage_type=Document.STORAGE_TYPE_GPG)
|
||||
|
||||
call_command('decrypt_documents')
|
||||
|
||||
doc = Document.objects.get(id=2)
|
||||
|
||||
self.assertEqual(doc.storage_type, Document.STORAGE_TYPE_UNENCRYPTED)
|
||||
self.assertEqual(doc.filename, "0000002.pdf")
|
||||
self.assertTrue(os.path.isfile(os.path.join(originals_dir, "0000002.pdf")))
|
||||
self.assertTrue(os.path.isfile(doc.source_path))
|
||||
self.assertTrue(os.path.isfile(os.path.join(thumb_dir, "0000002.png")))
|
||||
self.assertTrue(os.path.isfile(doc.thumbnail_path))
|
||||
|
||||
with doc.source_file as f:
|
||||
checksum = hashlib.md5(f.read()).hexdigest()
|
||||
self.assertEqual(checksum, doc.checksum)
|
||||
|
@@ -24,13 +24,20 @@ class TestExportImport(DirectoriesMixin, TestCase):
|
||||
|
||||
file = os.path.join(self.dirs.originals_dir, "0000001.pdf")
|
||||
|
||||
Document.objects.create(content="Content", checksum="42995833e01aea9b3edee44bbfdd7ce1", archive_checksum="62acb0bcbfbcaa62ca6ad3668e4e404b", title="wow", filename="0000001.pdf", id=1, mime_type="application/pdf")
|
||||
Document.objects.create(content="Content", checksum="9c9691e51741c1f4f41a20896af31770", title="wow", filename="0000002.pdf.gpg", id=2, mime_type="application/pdf", storage_type=Document.STORAGE_TYPE_GPG)
|
||||
Tag.objects.create(name="t")
|
||||
DocumentType.objects.create(name="dt")
|
||||
Correspondent.objects.create(name="c")
|
||||
d1 = Document.objects.create(content="Content", checksum="42995833e01aea9b3edee44bbfdd7ce1", archive_checksum="62acb0bcbfbcaa62ca6ad3668e4e404b", title="wow", filename="0000001.pdf", mime_type="application/pdf")
|
||||
d2 = Document.objects.create(content="Content", checksum="9c9691e51741c1f4f41a20896af31770", title="wow", filename="0000002.pdf.gpg", mime_type="application/pdf", storage_type=Document.STORAGE_TYPE_GPG)
|
||||
t1 = Tag.objects.create(name="t")
|
||||
dt1 = DocumentType.objects.create(name="dt")
|
||||
c1 = Correspondent.objects.create(name="c")
|
||||
|
||||
d1.tags.add(t1)
|
||||
d1.correspondents = c1
|
||||
d1.document_type = dt1
|
||||
d1.save()
|
||||
d2.save()
|
||||
|
||||
target = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, target)
|
||||
|
||||
call_command('document_exporter', target)
|
||||
|
||||
@@ -58,17 +65,28 @@ class TestExportImport(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(checksum, element['fields']['archive_checksum'])
|
||||
|
||||
with paperless_environment() as dirs:
|
||||
self.assertEqual(Document.objects.count(), 2)
|
||||
Document.objects.all().delete()
|
||||
Correspondent.objects.all().delete()
|
||||
DocumentType.objects.all().delete()
|
||||
Tag.objects.all().delete()
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
|
||||
call_command('document_importer', target)
|
||||
self.assertEqual(Document.objects.count(), 2)
|
||||
messages = check_sanity()
|
||||
# everything is alright after the test
|
||||
self.assertEqual(len(messages), 0, str([str(m) for m in messages]))
|
||||
|
||||
@override_settings(
|
||||
PAPERLESS_FILENAME_FORMAT="{title}"
|
||||
)
|
||||
def test_exporter_with_filename_format(self):
|
||||
self.test_exporter()
|
||||
|
||||
def test_export_missing_files(self):
|
||||
|
||||
target = tempfile.mkdtemp()
|
||||
Document.objects.create(checksum="AAAAAAAAAAAAAAAAA", title="wow", filename="0000004.pdf", id=3, mime_type="application/pdf")
|
||||
self.addCleanup(shutil.rmtree, target)
|
||||
Document.objects.create(checksum="AAAAAAAAAAAAAAAAA", title="wow", filename="0000004.pdf", mime_type="application/pdf")
|
||||
self.assertRaises(FileNotFoundError, call_command, 'document_exporter', target)
|
||||
|
||||
def test_duplicate_titles(self):
|
||||
# TODO
|
||||
pass
|
||||
|
@@ -14,6 +14,12 @@ class TestRetagger(DirectoriesMixin, TestCase):
|
||||
|
||||
self.tag_first = Tag.objects.create(name="tag1", match="first", matching_algorithm=Tag.MATCH_ANY)
|
||||
self.tag_second = Tag.objects.create(name="tag2", match="second", matching_algorithm=Tag.MATCH_ANY)
|
||||
self.tag_inbox = Tag.objects.create(name="test", is_inbox_tag=True)
|
||||
self.tag_no_match = Tag.objects.create(name="test2")
|
||||
|
||||
self.d3.tags.add(self.tag_inbox)
|
||||
self.d3.tags.add(self.tag_no_match)
|
||||
|
||||
|
||||
self.correspondent_first = Correspondent.objects.create(
|
||||
name="c1", match="first", matching_algorithm=Correspondent.MATCH_ANY)
|
||||
@@ -38,7 +44,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
|
||||
|
||||
self.assertEqual(d_first.tags.count(), 1)
|
||||
self.assertEqual(d_second.tags.count(), 1)
|
||||
self.assertEqual(d_unrelated.tags.count(), 0)
|
||||
self.assertEqual(d_unrelated.tags.count(), 2)
|
||||
|
||||
self.assertEqual(d_first.tags.first(), self.tag_first)
|
||||
self.assertEqual(d_second.tags.first(), self.tag_second)
|
||||
@@ -56,3 +62,17 @@ class TestRetagger(DirectoriesMixin, TestCase):
|
||||
|
||||
self.assertEqual(d_first.correspondent, self.correspondent_first)
|
||||
self.assertEqual(d_second.correspondent, self.correspondent_second)
|
||||
|
||||
def test_overwrite_preserve_inbox(self):
|
||||
self.d1.tags.add(self.tag_second)
|
||||
|
||||
call_command('document_retagger', '--tags', '--overwrite')
|
||||
|
||||
d_first, d_second, d_unrelated = self.get_updated_docs()
|
||||
|
||||
self.assertIsNotNone(Tag.objects.get(id=self.tag_second.id))
|
||||
|
||||
self.assertCountEqual([tag.id for tag in d_first.tags.all()], [self.tag_first.id])
|
||||
self.assertCountEqual([tag.id for tag in d_second.tags.all()], [self.tag_second.id])
|
||||
self.assertCountEqual([tag.id for tag in d_unrelated.tags.all()], [self.tag_inbox.id, self.tag_no_match.id])
|
||||
|
||||
|
129
src/documents/tests/test_migrations.py
Normal file
129
src/documents/tests/test_migrations.py
Normal file
@@ -0,0 +1,129 @@
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
from django.db.migrations.executor import MigrationExecutor
|
||||
from django.test import TestCase, TransactionTestCase, override_settings
|
||||
|
||||
from documents.models import Document
|
||||
from documents.parsers import get_default_file_extension
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
|
||||
class TestMigrations(TransactionTestCase):
|
||||
|
||||
@property
|
||||
def app(self):
|
||||
return apps.get_containing_app_config(type(self).__module__).name
|
||||
|
||||
migrate_from = None
|
||||
migrate_to = None
|
||||
|
||||
def setUp(self):
|
||||
super(TestMigrations, self).setUp()
|
||||
|
||||
assert self.migrate_from and self.migrate_to, \
|
||||
"TestCase '{}' must define migrate_from and migrate_to properties".format(type(self).__name__)
|
||||
self.migrate_from = [(self.app, self.migrate_from)]
|
||||
self.migrate_to = [(self.app, self.migrate_to)]
|
||||
executor = MigrationExecutor(connection)
|
||||
old_apps = executor.loader.project_state(self.migrate_from).apps
|
||||
|
||||
# Reverse to the original migration
|
||||
executor.migrate(self.migrate_from)
|
||||
|
||||
self.setUpBeforeMigration(old_apps)
|
||||
|
||||
# Run the migration to test
|
||||
executor = MigrationExecutor(connection)
|
||||
executor.loader.build_graph() # reload.
|
||||
executor.migrate(self.migrate_to)
|
||||
|
||||
self.apps = executor.loader.project_state(self.migrate_to).apps
|
||||
|
||||
def setUpBeforeMigration(self, apps):
|
||||
pass
|
||||
|
||||
|
||||
STORAGE_TYPE_UNENCRYPTED = "unencrypted"
|
||||
STORAGE_TYPE_GPG = "gpg"
|
||||
|
||||
|
||||
def source_path_before(self):
|
||||
if self.filename:
|
||||
fname = str(self.filename)
|
||||
else:
|
||||
fname = "{:07}.{}".format(self.pk, self.file_type)
|
||||
if self.storage_type == STORAGE_TYPE_GPG:
|
||||
fname += ".gpg"
|
||||
|
||||
return os.path.join(
|
||||
settings.ORIGINALS_DIR,
|
||||
fname
|
||||
)
|
||||
|
||||
|
||||
def file_type_after(self):
|
||||
return get_default_file_extension(self.mime_type)
|
||||
|
||||
|
||||
def source_path_after(doc):
|
||||
if doc.filename:
|
||||
fname = str(doc.filename)
|
||||
else:
|
||||
fname = "{:07}{}".format(doc.pk, file_type_after(doc))
|
||||
if doc.storage_type == STORAGE_TYPE_GPG:
|
||||
fname += ".gpg" # pragma: no cover
|
||||
|
||||
return os.path.join(
|
||||
settings.ORIGINALS_DIR,
|
||||
fname
|
||||
)
|
||||
|
||||
|
||||
@override_settings(PASSPHRASE="test")
|
||||
class TestMigrateMimeType(DirectoriesMixin, TestMigrations):
|
||||
|
||||
migrate_from = '1002_auto_20201111_1105'
|
||||
migrate_to = '1003_mime_types'
|
||||
|
||||
def setUpBeforeMigration(self, apps):
|
||||
Document = apps.get_model("documents", "Document")
|
||||
doc = Document.objects.create(title="test", file_type="pdf", filename="file1.pdf")
|
||||
self.doc_id = doc.id
|
||||
shutil.copy(os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"), source_path_before(doc))
|
||||
|
||||
doc2 = Document.objects.create(checksum="B", file_type="pdf", storage_type=STORAGE_TYPE_GPG)
|
||||
self.doc2_id = doc2.id
|
||||
shutil.copy(os.path.join(os.path.dirname(__file__), "samples", "documents", "originals", "0000002.pdf.gpg"), source_path_before(doc2))
|
||||
|
||||
def testMimeTypesMigrated(self):
|
||||
Document = self.apps.get_model('documents', 'Document')
|
||||
|
||||
doc = Document.objects.get(id=self.doc_id)
|
||||
self.assertEqual(doc.mime_type, "application/pdf")
|
||||
|
||||
doc2 = Document.objects.get(id=self.doc2_id)
|
||||
self.assertEqual(doc2.mime_type, "application/pdf")
|
||||
|
||||
|
||||
@override_settings(PASSPHRASE="test")
|
||||
class TestMigrateMimeTypeBackwards(DirectoriesMixin, TestMigrations):
|
||||
|
||||
migrate_from = '1003_mime_types'
|
||||
migrate_to = '1002_auto_20201111_1105'
|
||||
|
||||
def setUpBeforeMigration(self, apps):
|
||||
Document = apps.get_model("documents", "Document")
|
||||
doc = Document.objects.create(title="test", mime_type="application/pdf", filename="file1.pdf")
|
||||
self.doc_id = doc.id
|
||||
shutil.copy(os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"), source_path_after(doc))
|
||||
|
||||
def testMimeTypesReverted(self):
|
||||
Document = self.apps.get_model('documents', 'Document')
|
||||
|
||||
doc = Document.objects.get(id=self.doc_id)
|
||||
self.assertEqual(doc.file_type, "pdf")
|
@@ -1,56 +0,0 @@
|
||||
from unittest import mock
|
||||
|
||||
from django.test import TestCase, override_settings
|
||||
|
||||
from documents.models import Document, Tag, Correspondent
|
||||
from documents.signals.handlers import run_post_consume_script
|
||||
|
||||
|
||||
class PostConsumeTestCase(TestCase):
|
||||
|
||||
@mock.patch("documents.signals.handlers.Popen")
|
||||
@override_settings(POST_CONSUME_SCRIPT=None)
|
||||
def test_no_post_consume_script(self, m):
|
||||
doc = Document.objects.create(title="Test", mime_type="application/pdf")
|
||||
tag1 = Tag.objects.create(name="a")
|
||||
tag2 = Tag.objects.create(name="b")
|
||||
doc.tags.add(tag1)
|
||||
doc.tags.add(tag2)
|
||||
|
||||
run_post_consume_script(None, doc)
|
||||
|
||||
m.assert_not_called()
|
||||
|
||||
@mock.patch("documents.signals.handlers.Popen")
|
||||
@override_settings(POST_CONSUME_SCRIPT="script")
|
||||
def test_post_consume_script_simple(self, m):
|
||||
doc = Document.objects.create(title="Test", mime_type="application/pdf")
|
||||
|
||||
run_post_consume_script(None, doc)
|
||||
|
||||
m.assert_called_once()
|
||||
|
||||
@mock.patch("documents.signals.handlers.Popen")
|
||||
@override_settings(POST_CONSUME_SCRIPT="script")
|
||||
def test_post_consume_script_with_correspondent(self, m):
|
||||
c = Correspondent.objects.create(name="my_bank")
|
||||
doc = Document.objects.create(title="Test", mime_type="application/pdf", correspondent=c)
|
||||
tag1 = Tag.objects.create(name="a")
|
||||
tag2 = Tag.objects.create(name="b")
|
||||
doc.tags.add(tag1)
|
||||
doc.tags.add(tag2)
|
||||
|
||||
run_post_consume_script(None, doc)
|
||||
|
||||
m.assert_called_once()
|
||||
|
||||
args, kwargs = m.call_args
|
||||
|
||||
command = args[0]
|
||||
|
||||
self.assertEqual(command[0], "script")
|
||||
self.assertEqual(command[1], str(doc.pk))
|
||||
self.assertEqual(command[5], f"/api/documents/{doc.pk}/download/")
|
||||
self.assertEqual(command[6], f"/api/documents/{doc.pk}/thumb/")
|
||||
self.assertEqual(command[7], "my_bank")
|
||||
self.assertCountEqual(command[8].split(","), ["a", "b"])
|
@@ -2,6 +2,8 @@ import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
import filelock
|
||||
from django.conf import settings
|
||||
from django.test import TestCase
|
||||
|
||||
from documents.models import Document
|
||||
@@ -13,9 +15,11 @@ class TestSanityCheck(DirectoriesMixin, TestCase):
|
||||
|
||||
def make_test_data(self):
|
||||
|
||||
shutil.copy(os.path.join(os.path.dirname(__file__), "samples", "documents", "originals", "0000001.pdf"), os.path.join(self.dirs.originals_dir, "0000001.pdf"))
|
||||
shutil.copy(os.path.join(os.path.dirname(__file__), "samples", "documents", "archive", "0000001.pdf"), os.path.join(self.dirs.archive_dir, "0000001.pdf"))
|
||||
shutil.copy(os.path.join(os.path.dirname(__file__), "samples", "documents", "thumbnails", "0000001.png"), os.path.join(self.dirs.thumbnail_dir, "0000001.png"))
|
||||
with filelock.FileLock(settings.MEDIA_LOCK):
|
||||
# just make sure that the lockfile is present.
|
||||
shutil.copy(os.path.join(os.path.dirname(__file__), "samples", "documents", "originals", "0000001.pdf"), os.path.join(self.dirs.originals_dir, "0000001.pdf"))
|
||||
shutil.copy(os.path.join(os.path.dirname(__file__), "samples", "documents", "archive", "0000001.pdf"), os.path.join(self.dirs.archive_dir, "0000001.pdf"))
|
||||
shutil.copy(os.path.join(os.path.dirname(__file__), "samples", "documents", "thumbnails", "0000001.png"), os.path.join(self.dirs.thumbnail_dir, "0000001.png"))
|
||||
|
||||
return Document.objects.create(title="test", checksum="42995833e01aea9b3edee44bbfdd7ce1", archive_checksum="62acb0bcbfbcaa62ca6ad3668e4e404b", content="test", pk=1, filename="0000001.pdf", mime_type="application/pdf")
|
||||
|
||||
|
@@ -1,10 +1,12 @@
|
||||
from datetime import datetime
|
||||
from unittest import mock
|
||||
|
||||
from django.test import TestCase
|
||||
from django.utils import timezone
|
||||
|
||||
from documents import tasks
|
||||
from documents.models import Document
|
||||
from documents.sanity_checker import SanityError, SanityFailedError
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
|
||||
@@ -22,3 +24,19 @@ class TestTasks(DirectoriesMixin, TestCase):
|
||||
|
||||
def test_train_classifier(self):
|
||||
tasks.train_classifier()
|
||||
|
||||
@mock.patch("documents.tasks.sanity_checker.check_sanity")
|
||||
def test_sanity_check(self, m):
|
||||
m.return_value = []
|
||||
tasks.sanity_check()
|
||||
m.assert_called_once()
|
||||
m.reset_mock()
|
||||
m.return_value = [SanityError("")]
|
||||
self.assertRaises(SanityFailedError, tasks.sanity_check)
|
||||
m.assert_called_once()
|
||||
|
||||
def test_culk_update_documents(self):
|
||||
doc1 = Document.objects.create(title="test", content="my document", checksum="wow", added=timezone.now(),
|
||||
created=timezone.now(), modified=timezone.now())
|
||||
|
||||
tasks.bulk_update_documents([doc1.pk])
|
||||
|
@@ -34,7 +34,8 @@ def setup_directories():
|
||||
ARCHIVE_DIR=dirs.archive_dir,
|
||||
CONSUMPTION_DIR=dirs.consumption_dir,
|
||||
INDEX_DIR=dirs.index_dir,
|
||||
MODEL_FILE=os.path.join(dirs.data_dir, "classification_model.pickle")
|
||||
MODEL_FILE=os.path.join(dirs.data_dir, "classification_model.pickle"),
|
||||
MEDIA_LOCK=os.path.join(dirs.media_dir, "media.lock")
|
||||
|
||||
)
|
||||
dirs.settings_override.enable()
|
||||
|
@@ -4,8 +4,10 @@ from datetime import datetime
|
||||
from time import mktime
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import Count, Max
|
||||
from django.db.models import Count, Max, Case, When, IntegerField
|
||||
from django.db.models.functions import Lower
|
||||
from django.http import HttpResponse, HttpResponseBadRequest, Http404
|
||||
from django.utils.translation import get_language
|
||||
from django.views.decorators.cache import cache_control
|
||||
from django.views.generic import TemplateView
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
@@ -38,27 +40,55 @@ from .filters import (
|
||||
DocumentTypeFilterSet,
|
||||
LogFilterSet
|
||||
)
|
||||
from .models import Correspondent, Document, Log, Tag, DocumentType
|
||||
from .models import Correspondent, Document, Log, Tag, DocumentType, SavedView
|
||||
from .parsers import get_parser_class_for_mime_type
|
||||
from .serialisers import (
|
||||
CorrespondentSerializer,
|
||||
DocumentSerializer,
|
||||
LogSerializer,
|
||||
TagSerializer,
|
||||
DocumentTypeSerializer,
|
||||
PostDocumentSerializer
|
||||
PostDocumentSerializer,
|
||||
SavedViewSerializer,
|
||||
BulkEditSerializer, SelectionDataSerializer
|
||||
)
|
||||
|
||||
|
||||
class IndexView(TemplateView):
|
||||
template_name = "index.html"
|
||||
|
||||
def get_language(self):
|
||||
# This is here for the following reason:
|
||||
# Django identifies languages in the form "en-us"
|
||||
# However, angular generates locales as "en-US".
|
||||
# this translates between these two forms.
|
||||
lang = get_language()
|
||||
if "-" in lang:
|
||||
first = lang[:lang.index("-")]
|
||||
second = lang[lang.index("-")+1:]
|
||||
return f"{first}-{second.upper()}"
|
||||
else:
|
||||
return lang
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context['cookie_prefix'] = settings.COOKIE_PREFIX
|
||||
context['username'] = self.request.user.username
|
||||
context['full_name'] = self.request.user.get_full_name()
|
||||
context['styles_css'] = f"frontend/{self.get_language()}/styles.css"
|
||||
context['runtime_js'] = f"frontend/{self.get_language()}/runtime.js"
|
||||
context['polyfills_js'] = f"frontend/{self.get_language()}/polyfills.js" # NOQA: E501
|
||||
context['main_js'] = f"frontend/{self.get_language()}/main.js"
|
||||
context['manifest'] = f"frontend/{self.get_language()}/manifest.webmanifest" # NOQA: E501
|
||||
return context
|
||||
|
||||
|
||||
class CorrespondentViewSet(ModelViewSet):
|
||||
model = Correspondent
|
||||
|
||||
queryset = Correspondent.objects.annotate(
|
||||
document_count=Count('documents'),
|
||||
last_correspondence=Max('documents__created')).order_by('name')
|
||||
last_correspondence=Max('documents__created')).order_by(Lower('name'))
|
||||
|
||||
serializer_class = CorrespondentSerializer
|
||||
pagination_class = StandardPagination
|
||||
@@ -77,7 +107,7 @@ class TagViewSet(ModelViewSet):
|
||||
model = Tag
|
||||
|
||||
queryset = Tag.objects.annotate(
|
||||
document_count=Count('documents')).order_by('name')
|
||||
document_count=Count('documents')).order_by(Lower('name'))
|
||||
|
||||
serializer_class = TagSerializer
|
||||
pagination_class = StandardPagination
|
||||
@@ -91,7 +121,7 @@ class DocumentTypeViewSet(ModelViewSet):
|
||||
model = DocumentType
|
||||
|
||||
queryset = DocumentType.objects.annotate(
|
||||
document_count=Count('documents')).order_by('name')
|
||||
document_count=Count('documents')).order_by(Lower('name'))
|
||||
|
||||
serializer_class = DocumentTypeSerializer
|
||||
pagination_class = StandardPagination
|
||||
@@ -101,6 +131,10 @@ class DocumentTypeViewSet(ModelViewSet):
|
||||
ordering_fields = ("name", "matching_algorithm", "match", "document_count")
|
||||
|
||||
|
||||
class BulkEditForm(object):
|
||||
pass
|
||||
|
||||
|
||||
class DocumentViewSet(RetrieveModelMixin,
|
||||
UpdateModelMixin,
|
||||
DestroyModelMixin,
|
||||
@@ -124,6 +158,17 @@ class DocumentViewSet(RetrieveModelMixin,
|
||||
"added",
|
||||
"archive_serial_number")
|
||||
|
||||
def get_serializer(self, *args, **kwargs):
|
||||
fields_param = self.request.query_params.get('fields', None)
|
||||
if fields_param:
|
||||
fields = fields_param.split(",")
|
||||
else:
|
||||
fields = None
|
||||
serializer_class = self.get_serializer_class()
|
||||
kwargs.setdefault('context', self.get_serializer_context())
|
||||
kwargs.setdefault('fields', fields)
|
||||
return serializer_class(*args, **kwargs)
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
response = super(DocumentViewSet, self).update(
|
||||
request, *args, **kwargs)
|
||||
@@ -160,17 +205,48 @@ class DocumentViewSet(RetrieveModelMixin,
|
||||
disposition, filename)
|
||||
return response
|
||||
|
||||
def get_metadata(self, file, mime_type):
|
||||
if not os.path.isfile(file):
|
||||
return None
|
||||
|
||||
parser_class = get_parser_class_for_mime_type(mime_type)
|
||||
if parser_class:
|
||||
parser = parser_class(logging_group=None)
|
||||
|
||||
try:
|
||||
return parser.extract_metadata(file, mime_type)
|
||||
except Exception as e:
|
||||
# TODO: cover GPG errors, remove later.
|
||||
return []
|
||||
else:
|
||||
return []
|
||||
|
||||
@action(methods=['get'], detail=True)
|
||||
def metadata(self, request, pk=None):
|
||||
try:
|
||||
doc = Document.objects.get(pk=pk)
|
||||
return Response({
|
||||
"paperless__checksum": doc.checksum,
|
||||
"paperless__mime_type": doc.mime_type,
|
||||
"paperless__filename": doc.filename,
|
||||
"paperless__has_archive_version":
|
||||
os.path.isfile(doc.archive_path)
|
||||
})
|
||||
|
||||
meta = {
|
||||
"original_checksum": doc.checksum,
|
||||
"original_size": os.stat(doc.source_path).st_size,
|
||||
"original_mime_type": doc.mime_type,
|
||||
"media_filename": doc.filename,
|
||||
"has_archive_version": os.path.isfile(doc.archive_path),
|
||||
"original_metadata": self.get_metadata(
|
||||
doc.source_path, doc.mime_type)
|
||||
}
|
||||
|
||||
if doc.archive_checksum and os.path.isfile(doc.archive_path):
|
||||
meta['archive_checksum'] = doc.archive_checksum
|
||||
meta['archive_size'] = os.stat(doc.archive_path).st_size,
|
||||
meta['archive_metadata'] = self.get_metadata(
|
||||
doc.archive_path, "application/pdf")
|
||||
else:
|
||||
meta['archive_checksum'] = None
|
||||
meta['archive_size'] = None
|
||||
meta['archive_metadata'] = None
|
||||
|
||||
return Response(meta)
|
||||
except Document.DoesNotExist:
|
||||
raise Http404()
|
||||
|
||||
@@ -187,7 +263,12 @@ class DocumentViewSet(RetrieveModelMixin,
|
||||
@cache_control(public=False, max_age=315360000)
|
||||
def thumb(self, request, pk=None):
|
||||
try:
|
||||
return HttpResponse(Document.objects.get(id=pk).thumbnail_file,
|
||||
doc = Document.objects.get(id=pk)
|
||||
if doc.storage_type == Document.STORAGE_TYPE_GPG:
|
||||
handle = GnuPG.decrypted(doc.thumbnail_file)
|
||||
else:
|
||||
handle = doc.thumbnail_file
|
||||
return HttpResponse(handle,
|
||||
content_type='image/png')
|
||||
except (FileNotFoundError, Document.DoesNotExist):
|
||||
raise Http404()
|
||||
@@ -213,6 +294,55 @@ class LogViewSet(ReadOnlyModelViewSet):
|
||||
ordering_fields = ("created",)
|
||||
|
||||
|
||||
class SavedViewViewSet(ModelViewSet):
|
||||
model = SavedView
|
||||
|
||||
queryset = SavedView.objects.all()
|
||||
serializer_class = SavedViewSerializer
|
||||
pagination_class = StandardPagination
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
def get_queryset(self):
|
||||
user = self.request.user
|
||||
return SavedView.objects.filter(user=user)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
serializer.save(user=self.request.user)
|
||||
|
||||
|
||||
class BulkEditView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
serializer_class = BulkEditSerializer
|
||||
parser_classes = (parsers.JSONParser,)
|
||||
|
||||
def get_serializer_context(self):
|
||||
return {
|
||||
'request': self.request,
|
||||
'format': self.format_kwarg,
|
||||
'view': self
|
||||
}
|
||||
|
||||
def get_serializer(self, *args, **kwargs):
|
||||
kwargs['context'] = self.get_serializer_context()
|
||||
return self.serializer_class(*args, **kwargs)
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
method = serializer.validated_data.get("method")
|
||||
parameters = serializer.validated_data.get("parameters")
|
||||
documents = serializer.validated_data.get("documents")
|
||||
|
||||
try:
|
||||
# TODO: parameter validation
|
||||
result = method(documents, **parameters)
|
||||
return Response({"result": result})
|
||||
except Exception as e:
|
||||
return HttpResponseBadRequest(str(e))
|
||||
|
||||
|
||||
class PostDocumentView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
@@ -262,6 +392,63 @@ class PostDocumentView(APIView):
|
||||
return Response("OK")
|
||||
|
||||
|
||||
class SelectionDataView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
serializer_class = SelectionDataSerializer
|
||||
parser_classes = (parsers.MultiPartParser, parsers.JSONParser)
|
||||
|
||||
def get_serializer_context(self):
|
||||
return {
|
||||
'request': self.request,
|
||||
'format': self.format_kwarg,
|
||||
'view': self
|
||||
}
|
||||
|
||||
def get_serializer(self, *args, **kwargs):
|
||||
kwargs['context'] = self.get_serializer_context()
|
||||
return self.serializer_class(*args, **kwargs)
|
||||
|
||||
def post(self, request, format=None):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
ids = serializer.validated_data.get('documents')
|
||||
|
||||
correspondents = Correspondent.objects.annotate(
|
||||
document_count=Count(Case(
|
||||
When(documents__id__in=ids, then=1),
|
||||
output_field=IntegerField()
|
||||
)))
|
||||
|
||||
tags = Tag.objects.annotate(document_count=Count(Case(
|
||||
When(documents__id__in=ids, then=1),
|
||||
output_field=IntegerField()
|
||||
)))
|
||||
|
||||
types = DocumentType.objects.annotate(document_count=Count(Case(
|
||||
When(documents__id__in=ids, then=1),
|
||||
output_field=IntegerField()
|
||||
)))
|
||||
|
||||
r = Response({
|
||||
"selected_correspondents": [{
|
||||
"id": t.id,
|
||||
"document_count": t.document_count
|
||||
} for t in correspondents],
|
||||
"selected_tags": [{
|
||||
"id": t.id,
|
||||
"document_count": t.document_count
|
||||
} for t in tags],
|
||||
"selected_document_types": [{
|
||||
"id": t.id,
|
||||
"document_count": t.document_count
|
||||
} for t in types]
|
||||
})
|
||||
|
||||
return r
|
||||
|
||||
|
||||
class SearchView(APIView):
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
@@ -281,14 +468,27 @@ class SearchView(APIView):
|
||||
}
|
||||
|
||||
def get(self, request, format=None):
|
||||
if 'query' not in request.query_params:
|
||||
|
||||
if 'query' in request.query_params:
|
||||
query = request.query_params['query']
|
||||
else:
|
||||
query = None
|
||||
|
||||
if 'more_like' in request.query_params:
|
||||
more_like_id = request.query_params['more_like']
|
||||
more_like_content = Document.objects.get(id=more_like_id).content
|
||||
else:
|
||||
more_like_id = None
|
||||
more_like_content = None
|
||||
|
||||
if not query and not more_like_id:
|
||||
return Response({
|
||||
'count': 0,
|
||||
'page': 0,
|
||||
'page_count': 0,
|
||||
'corrected_query': None,
|
||||
'results': []})
|
||||
|
||||
query = request.query_params['query']
|
||||
try:
|
||||
page = int(request.query_params.get('page', 1))
|
||||
except (ValueError, TypeError):
|
||||
@@ -298,8 +498,7 @@ class SearchView(APIView):
|
||||
page = 1
|
||||
|
||||
try:
|
||||
with index.query_page(self.ix, query, page) as (result_page,
|
||||
corrected_query):
|
||||
with index.query_page(self.ix, page, query, more_like_id, more_like_content) as (result_page, corrected_query): # NOQA: E501
|
||||
return Response(
|
||||
{'count': len(result_page),
|
||||
'page': result_page.pagenum,
|
||||
|
Reference in New Issue
Block a user