mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-07-28 18:24:38 -05:00
Merge branch 'feature-websockets-status' into dev
This commit is contained in:
@@ -1,9 +1,12 @@
|
||||
import datetime
|
||||
import hashlib
|
||||
import os
|
||||
import uuid
|
||||
from subprocess import Popen
|
||||
|
||||
import magic
|
||||
from asgiref.sync import async_to_sync
|
||||
from channels.layers import get_channel_layer
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.db.models import Q
|
||||
@@ -27,8 +30,43 @@ class ConsumerError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
MESSAGE_DOCUMENT_ALREADY_EXISTS = "document_already_exists"
|
||||
MESSAGE_FILE_NOT_FOUND = "file_not_found"
|
||||
MESSAGE_PRE_CONSUME_SCRIPT_NOT_FOUND = "pre_consume_script_not_found"
|
||||
MESSAGE_PRE_CONSUME_SCRIPT_ERROR = "pre_consume_script_error"
|
||||
MESSAGE_POST_CONSUME_SCRIPT_NOT_FOUND = "post_consume_script_not_found"
|
||||
MESSAGE_POST_CONSUME_SCRIPT_ERROR = "post_consume_script_error"
|
||||
MESSAGE_NEW_FILE = "new_file"
|
||||
MESSAGE_UNSUPPORTED_TYPE = "unsupported_type"
|
||||
MESSAGE_PARSING_DOCUMENT = "parsing_document"
|
||||
MESSAGE_GENERATING_THUMBNAIL = "generating_thumbnail"
|
||||
MESSAGE_PARSE_DATE = "parse_date"
|
||||
MESSAGE_SAVE_DOCUMENT = "save_document"
|
||||
MESSAGE_FINISHED = "finished"
|
||||
|
||||
|
||||
class Consumer(LoggingMixin):
|
||||
|
||||
def _send_progress(self, current_progress, max_progress, status,
|
||||
message=None, document_id=None):
|
||||
payload = {
|
||||
'filename': os.path.basename(self.filename) if self.filename else None, # NOQA: E501
|
||||
'task_id': self.task_id,
|
||||
'current_progress': current_progress,
|
||||
'max_progress': max_progress,
|
||||
'status': status,
|
||||
'message': message,
|
||||
'document_id': document_id
|
||||
}
|
||||
async_to_sync(self.channel_layer.group_send)("status_updates",
|
||||
{'type': 'status_update',
|
||||
'data': payload})
|
||||
|
||||
def _fail(self, message, log_message=None):
|
||||
self._send_progress(100, 100, 'FAILED', message)
|
||||
self.log("error", log_message or message)
|
||||
raise ConsumerError(f"{self.filename}: {log_message or message}")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.path = None
|
||||
@@ -37,15 +75,16 @@ class Consumer(LoggingMixin):
|
||||
self.override_correspondent_id = None
|
||||
self.override_tag_ids = None
|
||||
self.override_document_type_id = None
|
||||
self.task_id = None
|
||||
|
||||
self.channel_layer = get_channel_layer()
|
||||
|
||||
def pre_check_file_exists(self):
|
||||
if not os.path.isfile(self.path):
|
||||
self.log(
|
||||
"error",
|
||||
"Cannot consume {}: It is not a file.".format(self.path)
|
||||
self._fail(
|
||||
MESSAGE_FILE_NOT_FOUND,
|
||||
f"Cannot consume {self.path}: File not found."
|
||||
)
|
||||
raise ConsumerError("Cannot consume {}: It is not a file".format(
|
||||
self.path))
|
||||
|
||||
def pre_check_duplicate(self):
|
||||
with open(self.path, "rb") as f:
|
||||
@@ -53,12 +92,9 @@ class Consumer(LoggingMixin):
|
||||
if Document.objects.filter(Q(checksum=checksum) | Q(archive_checksum=checksum)).exists(): # NOQA: E501
|
||||
if settings.CONSUMER_DELETE_DUPLICATES:
|
||||
os.unlink(self.path)
|
||||
self.log(
|
||||
"error",
|
||||
"Not consuming {}: It is a duplicate.".format(self.filename)
|
||||
)
|
||||
raise ConsumerError(
|
||||
"Not consuming {}: It is a duplicate.".format(self.filename)
|
||||
self._fail(
|
||||
MESSAGE_DOCUMENT_ALREADY_EXISTS,
|
||||
f"Not consuming {self.filename}: It is a duplicate."
|
||||
)
|
||||
|
||||
def pre_check_directories(self):
|
||||
@@ -72,14 +108,16 @@ class Consumer(LoggingMixin):
|
||||
return
|
||||
|
||||
if not os.path.isfile(settings.PRE_CONSUME_SCRIPT):
|
||||
raise ConsumerError(
|
||||
self._fail(
|
||||
MESSAGE_PRE_CONSUME_SCRIPT_NOT_FOUND,
|
||||
f"Configured pre-consume script "
|
||||
f"{settings.PRE_CONSUME_SCRIPT} does not exist.")
|
||||
|
||||
try:
|
||||
Popen((settings.PRE_CONSUME_SCRIPT, self.path)).wait()
|
||||
except Exception as e:
|
||||
raise ConsumerError(
|
||||
self._fail(
|
||||
MESSAGE_PRE_CONSUME_SCRIPT_ERROR,
|
||||
f"Error while executing pre-consume script: {e}"
|
||||
)
|
||||
|
||||
@@ -88,9 +126,11 @@ class Consumer(LoggingMixin):
|
||||
return
|
||||
|
||||
if not os.path.isfile(settings.POST_CONSUME_SCRIPT):
|
||||
raise ConsumerError(
|
||||
self._fail(
|
||||
MESSAGE_POST_CONSUME_SCRIPT_NOT_FOUND,
|
||||
f"Configured post-consume script "
|
||||
f"{settings.POST_CONSUME_SCRIPT} does not exist.")
|
||||
f"{settings.POST_CONSUME_SCRIPT} does not exist."
|
||||
)
|
||||
|
||||
try:
|
||||
Popen((
|
||||
@@ -106,8 +146,9 @@ class Consumer(LoggingMixin):
|
||||
"name", flat=True)))
|
||||
)).wait()
|
||||
except Exception as e:
|
||||
raise ConsumerError(
|
||||
f"Error while executing pre-consume script: {e}"
|
||||
self._fail(
|
||||
MESSAGE_POST_CONSUME_SCRIPT_ERROR,
|
||||
f"Error while executing post-consume script: {e}"
|
||||
)
|
||||
|
||||
def try_consume_file(self,
|
||||
@@ -116,7 +157,8 @@ class Consumer(LoggingMixin):
|
||||
override_title=None,
|
||||
override_correspondent_id=None,
|
||||
override_document_type_id=None,
|
||||
override_tag_ids=None):
|
||||
override_tag_ids=None,
|
||||
task_id=None):
|
||||
"""
|
||||
Return the document object if it was successfully created.
|
||||
"""
|
||||
@@ -127,6 +169,9 @@ class Consumer(LoggingMixin):
|
||||
self.override_correspondent_id = override_correspondent_id
|
||||
self.override_document_type_id = override_document_type_id
|
||||
self.override_tag_ids = override_tag_ids
|
||||
self.task_id = task_id or str(uuid.uuid4())
|
||||
|
||||
self._send_progress(0, 100, 'STARTING', MESSAGE_NEW_FILE)
|
||||
|
||||
# this is for grouping logging entries for this particular file
|
||||
# together.
|
||||
@@ -149,11 +194,12 @@ class Consumer(LoggingMixin):
|
||||
|
||||
parser_class = get_parser_class_for_mime_type(mime_type)
|
||||
if not parser_class:
|
||||
raise ConsumerError(
|
||||
f"Unsupported mime type {mime_type} of file {self.filename}")
|
||||
self._fail(
|
||||
MESSAGE_UNSUPPORTED_TYPE,
|
||||
f"Unsupported mime type {mime_type}"
|
||||
)
|
||||
else:
|
||||
self.log("debug",
|
||||
f"Parser: {parser_class.__name__}")
|
||||
self.log("debug", f"Parser: {parser_class.__name__}")
|
||||
|
||||
# Notify all listeners that we're going to do some work.
|
||||
|
||||
@@ -165,35 +211,50 @@ class Consumer(LoggingMixin):
|
||||
|
||||
self.run_pre_consume_script()
|
||||
|
||||
def progress_callback(current_progress, max_progress):
|
||||
# recalculate progress to be within 20 and 80
|
||||
p = int((current_progress / max_progress) * 50 + 20)
|
||||
self._send_progress(p, 100, "WORKING")
|
||||
|
||||
# This doesn't parse the document yet, but gives us a parser.
|
||||
|
||||
document_parser = parser_class(self.logging_group)
|
||||
document_parser = parser_class(self.logging_group, progress_callback)
|
||||
|
||||
# However, this already created working directories which we have to
|
||||
# clean up.
|
||||
|
||||
# Parse the document. This may take some time.
|
||||
|
||||
text = None
|
||||
date = None
|
||||
thumbnail = None
|
||||
archive_path = None
|
||||
|
||||
try:
|
||||
self._send_progress(20, 100, 'WORKING', MESSAGE_PARSING_DOCUMENT)
|
||||
self.log("debug", "Parsing {}...".format(self.filename))
|
||||
document_parser.parse(self.path, mime_type, self.filename)
|
||||
|
||||
self.log("debug", f"Generating thumbnail for {self.filename}...")
|
||||
self._send_progress(70, 100, 'WORKING',
|
||||
MESSAGE_GENERATING_THUMBNAIL)
|
||||
thumbnail = document_parser.get_optimised_thumbnail(
|
||||
self.path, mime_type)
|
||||
|
||||
text = document_parser.get_text()
|
||||
date = document_parser.get_date()
|
||||
if not date:
|
||||
self._send_progress(90, 100, 'WORKING',
|
||||
MESSAGE_PARSE_DATE)
|
||||
date = parse_date(self.filename, text)
|
||||
archive_path = document_parser.get_archive_path()
|
||||
|
||||
except ParseError as e:
|
||||
document_parser.cleanup()
|
||||
self.log(
|
||||
"error",
|
||||
f"Error while consuming document {self.filename}: {e}")
|
||||
raise ConsumerError(e)
|
||||
self._fail(
|
||||
str(e),
|
||||
f"Error while consuming document {self.filename}: {e}"
|
||||
)
|
||||
|
||||
# Prepare the document classifier.
|
||||
|
||||
@@ -203,6 +264,7 @@ class Consumer(LoggingMixin):
|
||||
|
||||
classifier = load_classifier()
|
||||
|
||||
self._send_progress(95, 100, 'WORKING', MESSAGE_SAVE_DOCUMENT)
|
||||
# now that everything is done, we can start to store the document
|
||||
# in the system. This will be a transaction and reasonably fast.
|
||||
try:
|
||||
@@ -256,12 +318,11 @@ class Consumer(LoggingMixin):
|
||||
os.unlink(self.path)
|
||||
|
||||
except Exception as e:
|
||||
self.log(
|
||||
"error",
|
||||
self._fail(
|
||||
str(e),
|
||||
f"The following error occured while consuming "
|
||||
f"{self.filename}: {e}"
|
||||
)
|
||||
raise ConsumerError(e)
|
||||
finally:
|
||||
document_parser.cleanup()
|
||||
|
||||
@@ -272,6 +333,8 @@ class Consumer(LoggingMixin):
|
||||
"Document {} consumption finished".format(document)
|
||||
)
|
||||
|
||||
self._send_progress(100, 100, 'SUCCESS', MESSAGE_FINISHED, document.id)
|
||||
|
||||
return document
|
||||
|
||||
def _store(self, text, date, mime_type):
|
||||
|
@@ -261,7 +261,7 @@ class DocumentParser(LoggingMixin):
|
||||
`paperless_tesseract.parsers` for inspiration.
|
||||
"""
|
||||
|
||||
def __init__(self, logging_group):
|
||||
def __init__(self, logging_group, progress_callback=None):
|
||||
super().__init__()
|
||||
self.logging_group = logging_group
|
||||
os.makedirs(settings.SCRATCH_DIR, exist_ok=True)
|
||||
@@ -271,6 +271,12 @@ class DocumentParser(LoggingMixin):
|
||||
self.archive_path = None
|
||||
self.text = None
|
||||
self.date = None
|
||||
self.progress_callback = progress_callback
|
||||
|
||||
def progress(self, current, max):
|
||||
print(self.progress_callback)
|
||||
if self.progress_callback:
|
||||
self.progress_callback(current, max)
|
||||
|
||||
def extract_metadata(self, document_path, mime_type):
|
||||
return []
|
||||
|
@@ -8,6 +8,8 @@ from .models import Correspondent, Tag, Document, Log, DocumentType, \
|
||||
SavedView, SavedViewFilterRule
|
||||
from .parsers import is_mime_type_supported
|
||||
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
|
||||
# https://www.django-rest-framework.org/api-guide/serializers/#example
|
||||
class DynamicFieldsModelSerializer(serializers.ModelSerializer):
|
||||
@@ -378,7 +380,9 @@ class PostDocumentSerializer(serializers.Serializer):
|
||||
|
||||
if not is_mime_type_supported(mime_type):
|
||||
raise serializers.ValidationError(
|
||||
"This file type is not supported.")
|
||||
_("File type %(type)s not supported") %
|
||||
{'type': mime_type}
|
||||
)
|
||||
|
||||
return document.name, document_data
|
||||
|
||||
|
@@ -66,7 +66,8 @@ def consume_file(path,
|
||||
override_title=None,
|
||||
override_correspondent_id=None,
|
||||
override_document_type_id=None,
|
||||
override_tag_ids=None):
|
||||
override_tag_ids=None,
|
||||
task_id=None):
|
||||
|
||||
document = Consumer().try_consume_file(
|
||||
path,
|
||||
@@ -74,7 +75,9 @@ def consume_file(path,
|
||||
override_title=override_title,
|
||||
override_correspondent_id=override_correspondent_id,
|
||||
override_document_type_id=override_document_type_id,
|
||||
override_tag_ids=override_tag_ids)
|
||||
override_tag_ids=override_tag_ids,
|
||||
task_id=task_id
|
||||
)
|
||||
|
||||
if document:
|
||||
return "Success. New document id {} created".format(
|
||||
|
@@ -170,7 +170,7 @@ class DummyParser(DocumentParser):
|
||||
raise NotImplementedError()
|
||||
|
||||
def __init__(self, logging_group, scratch_dir, archive_path):
|
||||
super(DummyParser, self).__init__(logging_group)
|
||||
super(DummyParser, self).__init__(logging_group, None)
|
||||
_, self.fake_thumb = tempfile.mkstemp(suffix=".png", dir=scratch_dir)
|
||||
self.archive_path = archive_path
|
||||
|
||||
@@ -212,10 +212,24 @@ def fake_magic_from_file(file, mime=False):
|
||||
@mock.patch("documents.consumer.magic.from_file", fake_magic_from_file)
|
||||
class TestConsumer(DirectoriesMixin, TestCase):
|
||||
|
||||
def make_dummy_parser(self, logging_group):
|
||||
def _assert_first_last_send_progress(self, first_status="STARTING", last_status="SUCCESS", first_progress=0, first_progress_max=100, last_progress=100, last_progress_max=100):
|
||||
|
||||
self._send_progress.assert_called()
|
||||
|
||||
args, kwargs = self._send_progress.call_args_list[0]
|
||||
self.assertEqual(args[0], first_progress)
|
||||
self.assertEqual(args[1], first_progress_max)
|
||||
self.assertEqual(args[2], first_status)
|
||||
|
||||
args, kwargs = self._send_progress.call_args_list[len(self._send_progress.call_args_list) - 1]
|
||||
self.assertEqual(args[0], last_progress)
|
||||
self.assertEqual(args[1], last_progress_max)
|
||||
self.assertEqual(args[2], last_status)
|
||||
|
||||
def make_dummy_parser(self, logging_group, progress_callback=None):
|
||||
return DummyParser(logging_group, self.dirs.scratch_dir, self.get_test_archive_file())
|
||||
|
||||
def make_faulty_parser(self, logging_group):
|
||||
def make_faulty_parser(self, logging_group, progress_callback=None):
|
||||
return FaultyParser(logging_group, self.dirs.scratch_dir)
|
||||
|
||||
def setUp(self):
|
||||
@@ -228,7 +242,11 @@ class TestConsumer(DirectoriesMixin, TestCase):
|
||||
"mime_types": {"application/pdf": ".pdf"},
|
||||
"weight": 0
|
||||
})]
|
||||
self.addCleanup(patcher.stop)
|
||||
|
||||
# this prevents websocket message reports during testing.
|
||||
patcher = mock.patch("documents.consumer.Consumer._send_progress")
|
||||
self._send_progress = patcher.start()
|
||||
self.addCleanup(patcher.stop)
|
||||
|
||||
self.consumer = Consumer()
|
||||
@@ -274,6 +292,8 @@ class TestConsumer(DirectoriesMixin, TestCase):
|
||||
|
||||
self.assertFalse(os.path.isfile(filename))
|
||||
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testOverrideFilename(self):
|
||||
filename = self.get_test_file()
|
||||
override_filename = "Statement for November.pdf"
|
||||
@@ -282,21 +302,26 @@ class TestConsumer(DirectoriesMixin, TestCase):
|
||||
|
||||
self.assertEqual(document.title, "Statement for November")
|
||||
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testOverrideTitle(self):
|
||||
document = self.consumer.try_consume_file(self.get_test_file(), override_title="Override Title")
|
||||
self.assertEqual(document.title, "Override Title")
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testOverrideCorrespondent(self):
|
||||
c = Correspondent.objects.create(name="test")
|
||||
|
||||
document = self.consumer.try_consume_file(self.get_test_file(), override_correspondent_id=c.pk)
|
||||
self.assertEqual(document.correspondent.id, c.id)
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testOverrideDocumentType(self):
|
||||
dt = DocumentType.objects.create(name="test")
|
||||
|
||||
document = self.consumer.try_consume_file(self.get_test_file(), override_document_type_id=dt.pk)
|
||||
self.assertEqual(document.document_type.id, dt.id)
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testOverrideTags(self):
|
||||
t1 = Tag.objects.create(name="t1")
|
||||
@@ -307,37 +332,42 @@ class TestConsumer(DirectoriesMixin, TestCase):
|
||||
self.assertIn(t1, document.tags.all())
|
||||
self.assertNotIn(t2, document.tags.all())
|
||||
self.assertIn(t3, document.tags.all())
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testNotAFile(self):
|
||||
try:
|
||||
self.consumer.try_consume_file("non-existing-file")
|
||||
except ConsumerError as e:
|
||||
self.assertTrue(str(e).endswith('It is not a file'))
|
||||
return
|
||||
|
||||
self.fail("Should throw exception")
|
||||
self.assertRaisesMessage(
|
||||
ConsumerError,
|
||||
"File not found",
|
||||
self.consumer.try_consume_file,
|
||||
"non-existing-file"
|
||||
)
|
||||
|
||||
self._assert_first_last_send_progress(last_status="FAILED")
|
||||
|
||||
def testDuplicates1(self):
|
||||
self.consumer.try_consume_file(self.get_test_file())
|
||||
|
||||
try:
|
||||
self.consumer.try_consume_file(self.get_test_file())
|
||||
except ConsumerError as e:
|
||||
self.assertTrue(str(e).endswith("It is a duplicate."))
|
||||
return
|
||||
self.assertRaisesMessage(
|
||||
ConsumerError,
|
||||
"It is a duplicate",
|
||||
self.consumer.try_consume_file,
|
||||
self.get_test_file()
|
||||
)
|
||||
|
||||
self.fail("Should throw exception")
|
||||
self._assert_first_last_send_progress(last_status="FAILED")
|
||||
|
||||
def testDuplicates2(self):
|
||||
self.consumer.try_consume_file(self.get_test_file())
|
||||
|
||||
try:
|
||||
self.consumer.try_consume_file(self.get_test_archive_file())
|
||||
except ConsumerError as e:
|
||||
self.assertTrue(str(e).endswith("It is a duplicate."))
|
||||
return
|
||||
self.assertRaisesMessage(
|
||||
ConsumerError,
|
||||
"It is a duplicate",
|
||||
self.consumer.try_consume_file,
|
||||
self.get_test_archive_file()
|
||||
)
|
||||
|
||||
self.fail("Should throw exception")
|
||||
self._assert_first_last_send_progress(last_status="FAILED")
|
||||
|
||||
def testDuplicates3(self):
|
||||
self.consumer.try_consume_file(self.get_test_archive_file())
|
||||
@@ -347,13 +377,15 @@ class TestConsumer(DirectoriesMixin, TestCase):
|
||||
def testNoParsers(self, m):
|
||||
m.return_value = []
|
||||
|
||||
try:
|
||||
self.consumer.try_consume_file(self.get_test_file())
|
||||
except ConsumerError as e:
|
||||
self.assertEqual("Unsupported mime type application/pdf of file sample.pdf", str(e))
|
||||
return
|
||||
self.assertRaisesMessage(
|
||||
ConsumerError,
|
||||
"sample.pdf: Unsupported mime type application/pdf",
|
||||
self.consumer.try_consume_file,
|
||||
self.get_test_file()
|
||||
)
|
||||
|
||||
self._assert_first_last_send_progress(last_status="FAILED")
|
||||
|
||||
self.fail("Should throw exception")
|
||||
|
||||
@mock.patch("documents.parsers.document_consumer_declaration.send")
|
||||
def testFaultyParser(self, m):
|
||||
@@ -363,24 +395,28 @@ class TestConsumer(DirectoriesMixin, TestCase):
|
||||
"weight": 0
|
||||
})]
|
||||
|
||||
try:
|
||||
self.consumer.try_consume_file(self.get_test_file())
|
||||
except ConsumerError as e:
|
||||
self.assertEqual(str(e), "Does not compute.")
|
||||
return
|
||||
self.assertRaisesMessage(
|
||||
ConsumerError,
|
||||
"sample.pdf: Error while consuming document sample.pdf: Does not compute.",
|
||||
self.consumer.try_consume_file,
|
||||
self.get_test_file()
|
||||
)
|
||||
|
||||
self.fail("Should throw exception.")
|
||||
self._assert_first_last_send_progress(last_status="FAILED")
|
||||
|
||||
@mock.patch("documents.consumer.Consumer._write")
|
||||
def testPostSaveError(self, m):
|
||||
filename = self.get_test_file()
|
||||
m.side_effect = OSError("NO.")
|
||||
try:
|
||||
self.consumer.try_consume_file(filename)
|
||||
except ConsumerError as e:
|
||||
self.assertEqual(str(e), "NO.")
|
||||
else:
|
||||
self.fail("Should raise exception")
|
||||
|
||||
self.assertRaisesMessage(
|
||||
ConsumerError,
|
||||
"sample.pdf: The following error occured while consuming sample.pdf: NO.",
|
||||
self.consumer.try_consume_file,
|
||||
filename
|
||||
)
|
||||
|
||||
self._assert_first_last_send_progress(last_status="FAILED")
|
||||
|
||||
# file not deleted
|
||||
self.assertTrue(os.path.isfile(filename))
|
||||
@@ -397,6 +433,8 @@ class TestConsumer(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(document.title, "new docs")
|
||||
self.assertEqual(document.filename, "none/new docs.pdf")
|
||||
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
@override_settings(PAPERLESS_FILENAME_FORMAT="{correspondent}/{title}")
|
||||
@mock.patch("documents.signals.handlers.generate_unique_filename")
|
||||
def testFilenameHandlingUnstableFormat(self, m):
|
||||
@@ -420,6 +458,8 @@ class TestConsumer(DirectoriesMixin, TestCase):
|
||||
self.assertIsNotNone(os.path.isfile(document.title))
|
||||
self.assertTrue(os.path.isfile(document.source_path))
|
||||
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
@mock.patch("documents.consumer.load_classifier")
|
||||
def testClassifyDocument(self, m):
|
||||
correspondent = Correspondent.objects.create(name="test")
|
||||
@@ -439,19 +479,26 @@ class TestConsumer(DirectoriesMixin, TestCase):
|
||||
self.assertIn(t1, document.tags.all())
|
||||
self.assertNotIn(t2, document.tags.all())
|
||||
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
@override_settings(CONSUMER_DELETE_DUPLICATES=True)
|
||||
def test_delete_duplicate(self):
|
||||
dst = self.get_test_file()
|
||||
self.assertTrue(os.path.isfile(dst))
|
||||
doc = self.consumer.try_consume_file(dst)
|
||||
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
self.assertFalse(os.path.isfile(dst))
|
||||
self.assertIsNotNone(doc)
|
||||
|
||||
self._send_progress.reset_mock()
|
||||
|
||||
dst = self.get_test_file()
|
||||
self.assertTrue(os.path.isfile(dst))
|
||||
self.assertRaises(ConsumerError, self.consumer.try_consume_file, dst)
|
||||
self.assertFalse(os.path.isfile(dst))
|
||||
self._assert_first_last_send_progress(last_status="FAILED")
|
||||
|
||||
@override_settings(CONSUMER_DELETE_DUPLICATES=False)
|
||||
def test_no_delete_duplicate(self):
|
||||
@@ -467,6 +514,8 @@ class TestConsumer(DirectoriesMixin, TestCase):
|
||||
self.assertRaises(ConsumerError, self.consumer.try_consume_file, dst)
|
||||
self.assertTrue(os.path.isfile(dst))
|
||||
|
||||
self._assert_first_last_send_progress(last_status="FAILED")
|
||||
|
||||
|
||||
class PreConsumeTestCase(TestCase):
|
||||
|
||||
@@ -479,9 +528,11 @@ class PreConsumeTestCase(TestCase):
|
||||
m.assert_not_called()
|
||||
|
||||
@mock.patch("documents.consumer.Popen")
|
||||
@mock.patch("documents.consumer.Consumer._send_progress")
|
||||
@override_settings(PRE_CONSUME_SCRIPT="does-not-exist")
|
||||
def test_pre_consume_script_not_found(self, m):
|
||||
def test_pre_consume_script_not_found(self, m, m2):
|
||||
c = Consumer()
|
||||
c.filename = "somefile.pdf"
|
||||
c.path = "path-to-file"
|
||||
self.assertRaises(ConsumerError, c.run_pre_consume_script)
|
||||
|
||||
@@ -503,7 +554,6 @@ class PreConsumeTestCase(TestCase):
|
||||
self.assertEqual(command[1], "path-to-file")
|
||||
|
||||
|
||||
|
||||
class PostConsumeTestCase(TestCase):
|
||||
|
||||
@mock.patch("documents.consumer.Popen")
|
||||
@@ -519,12 +569,13 @@ class PostConsumeTestCase(TestCase):
|
||||
|
||||
m.assert_not_called()
|
||||
|
||||
|
||||
@override_settings(POST_CONSUME_SCRIPT="does-not-exist")
|
||||
def test_post_consume_script_not_found(self):
|
||||
@mock.patch("documents.consumer.Consumer._send_progress")
|
||||
def test_post_consume_script_not_found(self, m):
|
||||
doc = Document.objects.create(title="Test", mime_type="application/pdf")
|
||||
|
||||
self.assertRaises(ConsumerError, Consumer().run_post_consume_script, doc)
|
||||
c = Consumer()
|
||||
c.filename = "somefile.pdf"
|
||||
self.assertRaises(ConsumerError, c.run_post_consume_script, doc)
|
||||
|
||||
@mock.patch("documents.consumer.Popen")
|
||||
def test_post_consume_script_simple(self, m):
|
||||
|
@@ -1,6 +1,7 @@
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from time import mktime
|
||||
|
||||
@@ -213,7 +214,7 @@ class DocumentViewSet(RetrieveModelMixin,
|
||||
|
||||
parser_class = get_parser_class_for_mime_type(mime_type)
|
||||
if parser_class:
|
||||
parser = parser_class(logging_group=None)
|
||||
parser = parser_class(progress_callback=None, logging_group=None)
|
||||
|
||||
try:
|
||||
return parser.extract_metadata(file, mime_type)
|
||||
@@ -403,6 +404,8 @@ class PostDocumentView(APIView):
|
||||
os.utime(f.name, times=(t, t))
|
||||
temp_filename = f.name
|
||||
|
||||
task_id = str(uuid.uuid4())
|
||||
|
||||
async_task("documents.tasks.consume_file",
|
||||
temp_filename,
|
||||
override_filename=doc_name,
|
||||
@@ -410,6 +413,7 @@ class PostDocumentView(APIView):
|
||||
override_correspondent_id=correspondent_id,
|
||||
override_document_type_id=document_type_id,
|
||||
override_tag_ids=tag_ids,
|
||||
task_id=task_id,
|
||||
task_name=os.path.basename(doc_name)[:100])
|
||||
|
||||
return Response("OK")
|
||||
|
@@ -8,7 +8,7 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2021-01-10 21:41+0000\n"
|
||||
"POT-Creation-Date: 2021-01-28 22:02+0100\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
@@ -21,323 +21,328 @@ msgstr ""
|
||||
msgid "Documents"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:32
|
||||
#: documents/models.py:33
|
||||
msgid "Any word"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:33
|
||||
#: documents/models.py:34
|
||||
msgid "All words"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:34
|
||||
#: documents/models.py:35
|
||||
msgid "Exact match"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:35
|
||||
#: documents/models.py:36
|
||||
msgid "Regular expression"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:36
|
||||
#: documents/models.py:37
|
||||
msgid "Fuzzy word"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:37
|
||||
#: documents/models.py:38
|
||||
msgid "Automatic"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:41 documents/models.py:354 paperless_mail/models.py:25
|
||||
#: documents/models.py:42 documents/models.py:352 paperless_mail/models.py:25
|
||||
#: paperless_mail/models.py:109
|
||||
msgid "name"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:45
|
||||
#: documents/models.py:46
|
||||
msgid "match"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:49
|
||||
#: documents/models.py:50
|
||||
msgid "matching algorithm"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:55
|
||||
#: documents/models.py:56
|
||||
msgid "is insensitive"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:80 documents/models.py:140
|
||||
#: documents/models.py:75 documents/models.py:135
|
||||
msgid "correspondent"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:81
|
||||
#: documents/models.py:76
|
||||
msgid "correspondents"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:103
|
||||
#: documents/models.py:98
|
||||
msgid "color"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:107
|
||||
#: documents/models.py:102
|
||||
msgid "is inbox tag"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:109
|
||||
#: documents/models.py:104
|
||||
msgid ""
|
||||
"Marks this tag as an inbox tag: All newly consumed documents will be tagged "
|
||||
"with inbox tags."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:114
|
||||
#: documents/models.py:109
|
||||
msgid "tag"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:115 documents/models.py:171
|
||||
#: documents/models.py:110 documents/models.py:166
|
||||
msgid "tags"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:121 documents/models.py:153
|
||||
#: documents/models.py:116 documents/models.py:148
|
||||
msgid "document type"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:122
|
||||
#: documents/models.py:117
|
||||
msgid "document types"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:130
|
||||
#: documents/models.py:125
|
||||
msgid "Unencrypted"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:131
|
||||
#: documents/models.py:126
|
||||
msgid "Encrypted with GNU Privacy Guard"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:144
|
||||
#: documents/models.py:139
|
||||
msgid "title"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:157
|
||||
#: documents/models.py:152
|
||||
msgid "content"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:159
|
||||
#: documents/models.py:154
|
||||
msgid ""
|
||||
"The raw, text-only data of the document. This field is primarily used for "
|
||||
"searching."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:164
|
||||
#: documents/models.py:159
|
||||
msgid "mime type"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:175
|
||||
#: documents/models.py:170
|
||||
msgid "checksum"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:179
|
||||
#: documents/models.py:174
|
||||
msgid "The checksum of the original document."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:183
|
||||
#: documents/models.py:178
|
||||
msgid "archive checksum"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:188
|
||||
#: documents/models.py:183
|
||||
msgid "The checksum of the archived document."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:192 documents/models.py:332
|
||||
#: documents/models.py:187 documents/models.py:330
|
||||
msgid "created"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:196
|
||||
#: documents/models.py:191
|
||||
msgid "modified"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:200
|
||||
#: documents/models.py:195
|
||||
msgid "storage type"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:208
|
||||
#: documents/models.py:203
|
||||
msgid "added"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:212
|
||||
#: documents/models.py:207
|
||||
msgid "filename"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:217
|
||||
#: documents/models.py:212
|
||||
msgid "Current filename in storage"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:221
|
||||
#: documents/models.py:216
|
||||
msgid "archive serial number"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:226
|
||||
#: documents/models.py:221
|
||||
msgid "The position of this document in your physical document archive."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:232
|
||||
#: documents/models.py:227
|
||||
msgid "document"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:233
|
||||
#: documents/models.py:228
|
||||
msgid "documents"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:315
|
||||
#: documents/models.py:313
|
||||
msgid "debug"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:316
|
||||
#: documents/models.py:314
|
||||
msgid "information"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:317
|
||||
#: documents/models.py:315
|
||||
msgid "warning"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:318
|
||||
#: documents/models.py:316
|
||||
msgid "error"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:319
|
||||
#: documents/models.py:317
|
||||
msgid "critical"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:323
|
||||
#: documents/models.py:321
|
||||
msgid "group"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:326
|
||||
#: documents/models.py:324
|
||||
msgid "message"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:329
|
||||
#: documents/models.py:327
|
||||
msgid "level"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:336
|
||||
#: documents/models.py:334
|
||||
msgid "log"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:337
|
||||
#: documents/models.py:335
|
||||
msgid "logs"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:348 documents/models.py:398
|
||||
#: documents/models.py:346 documents/models.py:396
|
||||
msgid "saved view"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:349
|
||||
#: documents/models.py:347
|
||||
msgid "saved views"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:352
|
||||
#: documents/models.py:350
|
||||
msgid "user"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:358
|
||||
#: documents/models.py:356
|
||||
msgid "show on dashboard"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:361
|
||||
#: documents/models.py:359
|
||||
msgid "show in sidebar"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:365
|
||||
#: documents/models.py:363
|
||||
msgid "sort field"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:368
|
||||
#: documents/models.py:366
|
||||
msgid "sort reverse"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:374
|
||||
#: documents/models.py:372
|
||||
msgid "title contains"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:375
|
||||
#: documents/models.py:373
|
||||
msgid "content contains"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:376
|
||||
#: documents/models.py:374
|
||||
msgid "ASN is"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:377
|
||||
#: documents/models.py:375
|
||||
msgid "correspondent is"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:378
|
||||
#: documents/models.py:376
|
||||
msgid "document type is"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:379
|
||||
#: documents/models.py:377
|
||||
msgid "is in inbox"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:380
|
||||
#: documents/models.py:378
|
||||
msgid "has tag"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:381
|
||||
#: documents/models.py:379
|
||||
msgid "has any tag"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:382
|
||||
#: documents/models.py:380
|
||||
msgid "created before"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:383
|
||||
#: documents/models.py:381
|
||||
msgid "created after"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:384
|
||||
#: documents/models.py:382
|
||||
msgid "created year is"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:385
|
||||
#: documents/models.py:383
|
||||
msgid "created month is"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:386
|
||||
#: documents/models.py:384
|
||||
msgid "created day is"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:387
|
||||
#: documents/models.py:385
|
||||
msgid "added before"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:388
|
||||
#: documents/models.py:386
|
||||
msgid "added after"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:389
|
||||
#: documents/models.py:387
|
||||
msgid "modified before"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:390
|
||||
#: documents/models.py:388
|
||||
msgid "modified after"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:391
|
||||
#: documents/models.py:389
|
||||
msgid "does not have tag"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:402
|
||||
#: documents/models.py:400
|
||||
msgid "rule type"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:406
|
||||
#: documents/models.py:404
|
||||
msgid "value"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:412
|
||||
#: documents/models.py:410
|
||||
msgid "filter rule"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:413
|
||||
#: documents/models.py:411
|
||||
msgid "filter rules"
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:383
|
||||
#, python-format
|
||||
msgid "File type %(type)s not supported"
|
||||
msgstr ""
|
||||
|
||||
#: documents/templates/index.html:20
|
||||
msgid "Paperless-ng is loading..."
|
||||
msgstr ""
|
||||
@@ -378,23 +383,23 @@ msgstr ""
|
||||
msgid "Sign in"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:268
|
||||
#: paperless/settings.py:286
|
||||
msgid "English"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:269
|
||||
#: paperless/settings.py:287
|
||||
msgid "German"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:270
|
||||
#: paperless/settings.py:288
|
||||
msgid "Dutch"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/settings.py:271
|
||||
#: paperless/settings.py:289
|
||||
msgid "French"
|
||||
msgstr ""
|
||||
|
||||
#: paperless/urls.py:108
|
||||
#: paperless/urls.py:114
|
||||
msgid "Paperless-ng administration"
|
||||
msgstr ""
|
||||
|
||||
|
23
src/paperless/asgi.py
Normal file
23
src/paperless/asgi.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import os
|
||||
|
||||
from django.core.asgi import get_asgi_application
|
||||
# Fetch Django ASGI application early to ensure AppRegistry is populated
|
||||
# before importing consumers and AuthMiddlewareStack that may import ORM
|
||||
# models.
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
|
||||
django_asgi_app = get_asgi_application()
|
||||
|
||||
from channels.auth import AuthMiddlewareStack # NOQA: E402
|
||||
from channels.routing import ProtocolTypeRouter, URLRouter # NOQA: E402
|
||||
|
||||
from paperless.urls import websocket_urlpatterns # NOQA: E402
|
||||
|
||||
application = ProtocolTypeRouter({
|
||||
"http": get_asgi_application(),
|
||||
"websocket": AuthMiddlewareStack(
|
||||
URLRouter(
|
||||
websocket_urlpatterns
|
||||
)
|
||||
),
|
||||
})
|
18
src/paperless/consumers.py
Normal file
18
src/paperless/consumers.py
Normal file
@@ -0,0 +1,18 @@
|
||||
import json
|
||||
|
||||
from asgiref.sync import async_to_sync
|
||||
from channels.generic.websocket import WebsocketConsumer
|
||||
|
||||
|
||||
class StatusConsumer(WebsocketConsumer):
|
||||
def connect(self):
|
||||
self.accept()
|
||||
async_to_sync(self.channel_layer.group_add)(
|
||||
'status_updates', self.channel_name)
|
||||
|
||||
def disconnect(self, close_code):
|
||||
async_to_sync(self.channel_layer.group_discard)(
|
||||
'status_updates', self.channel_name)
|
||||
|
||||
def status_update(self, event):
|
||||
self.send(json.dumps(event['data']))
|
@@ -100,6 +100,8 @@ INSTALLED_APPS = [
|
||||
|
||||
"django_q",
|
||||
|
||||
"channels",
|
||||
|
||||
] + env_apps
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
@@ -133,6 +135,7 @@ ROOT_URLCONF = 'paperless.urls'
|
||||
FORCE_SCRIPT_NAME = os.getenv("PAPERLESS_FORCE_SCRIPT_NAME")
|
||||
|
||||
WSGI_APPLICATION = 'paperless.wsgi.application'
|
||||
ASGI_APPLICATION = "paperless.asgi.application"
|
||||
|
||||
STATIC_URL = os.getenv("PAPERLESS_STATIC_URL", "/static/")
|
||||
|
||||
@@ -153,6 +156,15 @@ TEMPLATES = [
|
||||
},
|
||||
]
|
||||
|
||||
CHANNEL_LAYERS = {
|
||||
"default": {
|
||||
"BACKEND": "channels_redis.core.RedisChannelLayer",
|
||||
"CONFIG": {
|
||||
"hosts": [os.getenv("PAPERLESS_REDIS", "redis://localhost:6379")],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
# Security #
|
||||
###############################################################################
|
||||
|
@@ -9,6 +9,7 @@ from rest_framework.routers import DefaultRouter
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from paperless.consumers import StatusConsumer
|
||||
from documents.views import (
|
||||
CorrespondentViewSet,
|
||||
DocumentViewSet,
|
||||
@@ -100,6 +101,11 @@ urlpatterns = [
|
||||
re_path(r".*", login_required(IndexView.as_view())),
|
||||
]
|
||||
|
||||
|
||||
websocket_urlpatterns = [
|
||||
re_path(r'ws/status/$', StatusConsumer.as_asgi()),
|
||||
]
|
||||
|
||||
# Text in each page's <h1> (and above login form).
|
||||
admin.site.site_header = 'Paperless-ng'
|
||||
# Text at the end of each page's <title>.
|
||||
|
@@ -190,11 +190,11 @@ class RasterisedDocumentParser(DocumentParser):
|
||||
# Also, no archived file.
|
||||
if not self.text:
|
||||
# However, if we don't have anything, fail:
|
||||
raise ParseError(e)
|
||||
raise ParseError(e.__class__.__name__ + ": " + str(e))
|
||||
|
||||
except Exception as e:
|
||||
# Anything else is probably serious.
|
||||
raise ParseError(e)
|
||||
raise ParseError(e.__class__.__name__ + ": " + str(e))
|
||||
|
||||
if not self.text:
|
||||
# This may happen for files that don't have any text.
|
||||
|
Reference in New Issue
Block a user