mirror of
				https://github.com/paperless-ngx/paperless-ngx.git
				synced 2025-10-30 03:56:23 -05:00 
			
		
		
		
	Merge pull request #1049 from paperless-ngx/bugfix-better-sanity-msgs
Bugfix: Better sanity check messages
This commit is contained in:
		| @@ -1,6 +1,8 @@ | |||||||
| import hashlib | import hashlib | ||||||
| import logging | import logging | ||||||
| import os | from collections import defaultdict | ||||||
|  | from pathlib import Path | ||||||
|  | from typing import Final | ||||||
|  |  | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
| from documents.models import Document | from documents.models import Document | ||||||
| @@ -9,16 +11,20 @@ from tqdm import tqdm | |||||||
|  |  | ||||||
| class SanityCheckMessages: | class SanityCheckMessages: | ||||||
|     def __init__(self): |     def __init__(self): | ||||||
|         self._messages = [] |         self._messages = defaultdict(list) | ||||||
|  |         self.has_error = False | ||||||
|  |         self.has_warning = False | ||||||
|  |  | ||||||
|     def error(self, message): |     def error(self, doc_pk, message): | ||||||
|         self._messages.append({"level": logging.ERROR, "message": message}) |         self._messages[doc_pk].append({"level": logging.ERROR, "message": message}) | ||||||
|  |         self.has_error = True | ||||||
|  |  | ||||||
|     def warning(self, message): |     def warning(self, doc_pk, message): | ||||||
|         self._messages.append({"level": logging.WARNING, "message": message}) |         self._messages[doc_pk].append({"level": logging.WARNING, "message": message}) | ||||||
|  |         self.has_warning = True | ||||||
|  |  | ||||||
|     def info(self, message): |     def info(self, doc_pk, message): | ||||||
|         self._messages.append({"level": logging.INFO, "message": message}) |         self._messages[doc_pk].append({"level": logging.INFO, "message": message}) | ||||||
|  |  | ||||||
|     def log_messages(self): |     def log_messages(self): | ||||||
|         logger = logging.getLogger("paperless.sanity_checker") |         logger = logging.getLogger("paperless.sanity_checker") | ||||||
| @@ -26,8 +32,19 @@ class SanityCheckMessages: | |||||||
|         if len(self._messages) == 0: |         if len(self._messages) == 0: | ||||||
|             logger.info("Sanity checker detected no issues.") |             logger.info("Sanity checker detected no issues.") | ||||||
|         else: |         else: | ||||||
|             for msg in self._messages: |  | ||||||
|                 logger.log(msg["level"], msg["message"]) |             # Query once | ||||||
|  |             all_docs = Document.objects.all() | ||||||
|  |  | ||||||
|  |             for doc_pk in self._messages: | ||||||
|  |                 if doc_pk is not None: | ||||||
|  |                     doc = all_docs.get(pk=doc_pk) | ||||||
|  |                     logger.info( | ||||||
|  |                         f"Detected following issue(s) with document #{doc.pk}," | ||||||
|  |                         f" titled {doc.title}", | ||||||
|  |                     ) | ||||||
|  |                 for msg in self._messages[doc_pk]: | ||||||
|  |                     logger.log(msg["level"], msg["message"]) | ||||||
|  |  | ||||||
|     def __len__(self): |     def __len__(self): | ||||||
|         return len(self._messages) |         return len(self._messages) | ||||||
| @@ -35,99 +52,94 @@ class SanityCheckMessages: | |||||||
|     def __getitem__(self, item): |     def __getitem__(self, item): | ||||||
|         return self._messages[item] |         return self._messages[item] | ||||||
|  |  | ||||||
|     def has_error(self): |  | ||||||
|         return any([msg["level"] == logging.ERROR for msg in self._messages]) |  | ||||||
|  |  | ||||||
|     def has_warning(self): |  | ||||||
|         return any([msg["level"] == logging.WARNING for msg in self._messages]) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SanityCheckFailedException(Exception): | class SanityCheckFailedException(Exception): | ||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
| def check_sanity(progress=False): | def check_sanity(progress=False) -> SanityCheckMessages: | ||||||
|     messages = SanityCheckMessages() |     messages = SanityCheckMessages() | ||||||
|  |  | ||||||
|     present_files = [] |     present_files = { | ||||||
|     for root, subdirs, files in os.walk(settings.MEDIA_ROOT): |         x.resolve() for x in Path(settings.MEDIA_ROOT).glob("**/*") if not x.is_dir() | ||||||
|         for f in files: |     } | ||||||
|             present_files.append(os.path.normpath(os.path.join(root, f))) |  | ||||||
|  |  | ||||||
|     lockfile = os.path.normpath(settings.MEDIA_LOCK) |     lockfile = Path(settings.MEDIA_LOCK).resolve() | ||||||
|     if lockfile in present_files: |     if lockfile in present_files: | ||||||
|         present_files.remove(lockfile) |         present_files.remove(lockfile) | ||||||
|  |  | ||||||
|     for doc in tqdm(Document.objects.all(), disable=not progress): |     for doc in tqdm(Document.objects.all(), disable=not progress): | ||||||
|         # Check sanity of the thumbnail |         # Check sanity of the thumbnail | ||||||
|         if not os.path.isfile(doc.thumbnail_path): |         thumbnail_path: Final[Path] = Path(doc.thumbnail_path).resolve() | ||||||
|             messages.error(f"Thumbnail of document {doc.pk} does not exist.") |         if not thumbnail_path.exists() or not thumbnail_path.is_file(): | ||||||
|  |             messages.error(doc.pk, "Thumbnail of document does not exist.") | ||||||
|         else: |         else: | ||||||
|             if os.path.normpath(doc.thumbnail_path) in present_files: |             if thumbnail_path in present_files: | ||||||
|                 present_files.remove(os.path.normpath(doc.thumbnail_path)) |                 present_files.remove(thumbnail_path) | ||||||
|             try: |             try: | ||||||
|                 with doc.thumbnail_file as f: |                 _ = thumbnail_path.read_bytes() | ||||||
|                     f.read() |  | ||||||
|             except OSError as e: |             except OSError as e: | ||||||
|                 messages.error(f"Cannot read thumbnail file of document {doc.pk}: {e}") |                 messages.error(doc.pk, f"Cannot read thumbnail file of document: {e}") | ||||||
|  |  | ||||||
|         # Check sanity of the original file |         # Check sanity of the original file | ||||||
|         # TODO: extract method |         # TODO: extract method | ||||||
|         if not os.path.isfile(doc.source_path): |         source_path: Final[Path] = Path(doc.source_path).resolve() | ||||||
|             messages.error(f"Original of document {doc.pk} does not exist.") |         if not source_path.exists() or not source_path.is_file(): | ||||||
|  |             messages.error(doc.pk, "Original of document does not exist.") | ||||||
|         else: |         else: | ||||||
|             if os.path.normpath(doc.source_path) in present_files: |             if source_path in present_files: | ||||||
|                 present_files.remove(os.path.normpath(doc.source_path)) |                 present_files.remove(source_path) | ||||||
|             try: |             try: | ||||||
|                 with doc.source_file as f: |                 checksum = hashlib.md5(source_path.read_bytes()).hexdigest() | ||||||
|                     checksum = hashlib.md5(f.read()).hexdigest() |  | ||||||
|             except OSError as e: |             except OSError as e: | ||||||
|                 messages.error(f"Cannot read original file of document {doc.pk}: {e}") |                 messages.error(doc.pk, f"Cannot read original file of document: {e}") | ||||||
|             else: |             else: | ||||||
|                 if not checksum == doc.checksum: |                 if not checksum == doc.checksum: | ||||||
|                     messages.error( |                     messages.error( | ||||||
|                         f"Checksum mismatch of document {doc.pk}. " |                         doc.pk, | ||||||
|  |                         "Checksum mismatch. " | ||||||
|                         f"Stored: {doc.checksum}, actual: {checksum}.", |                         f"Stored: {doc.checksum}, actual: {checksum}.", | ||||||
|                     ) |                     ) | ||||||
|  |  | ||||||
|         # Check sanity of the archive file. |         # Check sanity of the archive file. | ||||||
|         if doc.archive_checksum and not doc.archive_filename: |         if doc.archive_checksum is not None and doc.archive_filename is None: | ||||||
|             messages.error( |             messages.error( | ||||||
|                 f"Document {doc.pk} has an archive file checksum, but no " |                 doc.pk, | ||||||
|                 f"archive filename.", |                 "Document has an archive file checksum, but no archive filename.", | ||||||
|             ) |             ) | ||||||
|         elif not doc.archive_checksum and doc.archive_filename: |         elif doc.archive_checksum is None and doc.archive_filename is not None: | ||||||
|             messages.error( |             messages.error( | ||||||
|                 f"Document {doc.pk} has an archive file, but its checksum is " |                 doc.pk, | ||||||
|                 f"missing.", |                 "Document has an archive file, but its checksum is missing.", | ||||||
|             ) |             ) | ||||||
|         elif doc.has_archive_version: |         elif doc.has_archive_version: | ||||||
|             if not os.path.isfile(doc.archive_path): |             archive_path: Final[Path] = Path(doc.archive_path).resolve() | ||||||
|                 messages.error(f"Archived version of document {doc.pk} does not exist.") |             if not archive_path.exists() or not archive_path.is_file(): | ||||||
|  |                 messages.error(doc.pk, "Archived version of document does not exist.") | ||||||
|             else: |             else: | ||||||
|                 if os.path.normpath(doc.archive_path) in present_files: |                 if archive_path in present_files: | ||||||
|                     present_files.remove(os.path.normpath(doc.archive_path)) |                     present_files.remove(archive_path) | ||||||
|                 try: |                 try: | ||||||
|                     with doc.archive_file as f: |                     checksum = hashlib.md5(archive_path.read_bytes()).hexdigest() | ||||||
|                         checksum = hashlib.md5(f.read()).hexdigest() |  | ||||||
|                 except OSError as e: |                 except OSError as e: | ||||||
|                     messages.error( |                     messages.error( | ||||||
|                         f"Cannot read archive file of document {doc.pk}: {e}", |                         doc.pk, | ||||||
|  |                         f"Cannot read archive file of document : {e}", | ||||||
|                     ) |                     ) | ||||||
|                 else: |                 else: | ||||||
|                     if not checksum == doc.archive_checksum: |                     if not checksum == doc.archive_checksum: | ||||||
|                         messages.error( |                         messages.error( | ||||||
|                             f"Checksum mismatch of archived document " |                             doc.pk, | ||||||
|                             f"{doc.pk}. " |                             "Checksum mismatch of archived document. " | ||||||
|                             f"Stored: {doc.archive_checksum}, " |                             f"Stored: {doc.archive_checksum}, " | ||||||
|                             f"actual: {checksum}.", |                             f"actual: {checksum}.", | ||||||
|                         ) |                         ) | ||||||
|  |  | ||||||
|         # other document checks |         # other document checks | ||||||
|         if not doc.content: |         if not doc.content: | ||||||
|             messages.info(f"Document {doc.pk} has no content.") |             messages.info(doc.pk, "Document contains no OCR data") | ||||||
|  |  | ||||||
|     for extra_file in present_files: |     for extra_file in present_files: | ||||||
|         messages.warning(f"Orphaned file in media dir: {extra_file}") |         messages.warning(None, f"Orphaned file in media dir: {extra_file}") | ||||||
|  |  | ||||||
|     return messages |     return messages | ||||||
|   | |||||||
| @@ -338,9 +338,9 @@ def sanity_check(): | |||||||
|  |  | ||||||
|     messages.log_messages() |     messages.log_messages() | ||||||
|  |  | ||||||
|     if messages.has_error(): |     if messages.has_error: | ||||||
|         raise SanityCheckFailedException("Sanity check failed with errors. See log.") |         raise SanityCheckFailedException("Sanity check failed with errors. See log.") | ||||||
|     elif messages.has_warning(): |     elif messages.has_warning: | ||||||
|         return "Sanity check exited with warnings. See log." |         return "Sanity check exited with warnings. See log." | ||||||
|     elif len(messages) > 0: |     elif len(messages) > 0: | ||||||
|         return "Sanity check exited with infos. See log." |         return "Sanity check exited with infos. See log." | ||||||
|   | |||||||
| @@ -238,5 +238,5 @@ class TestSanityChecker(DirectoriesMixin, TestCase): | |||||||
|         with self.assertLogs() as capture: |         with self.assertLogs() as capture: | ||||||
|             call_command("document_sanity_checker") |             call_command("document_sanity_checker") | ||||||
|  |  | ||||||
|         self.assertEqual(len(capture.output), 1) |         self.assertEqual(len(capture.output), 2) | ||||||
|         self.assertIn("Checksum mismatch of document", capture.output[0]) |         self.assertIn("Checksum mismatch. Stored: abc, actual:", capture.output[1]) | ||||||
|   | |||||||
| @@ -190,7 +190,7 @@ class TestExportImport(DirectoriesMixin, TestCase): | |||||||
|             self.assertEqual(Document.objects.get(id=self.d4.id).title, "wow_dec") |             self.assertEqual(Document.objects.get(id=self.d4.id).title, "wow_dec") | ||||||
|             messages = check_sanity() |             messages = check_sanity() | ||||||
|             # everything is alright after the test |             # everything is alright after the test | ||||||
|             self.assertEqual(len(messages), 0, str([str(m) for m in messages])) |             self.assertEqual(len(messages), 0) | ||||||
|  |  | ||||||
|     def test_exporter_with_filename_format(self): |     def test_exporter_with_filename_format(self): | ||||||
|         shutil.rmtree(os.path.join(self.dirs.media_dir, "documents")) |         shutil.rmtree(os.path.join(self.dirs.media_dir, "documents")) | ||||||
|   | |||||||
| @@ -8,62 +8,9 @@ from django.conf import settings | |||||||
| from django.test import TestCase | from django.test import TestCase | ||||||
| from documents.models import Document | from documents.models import Document | ||||||
| from documents.sanity_checker import check_sanity | from documents.sanity_checker import check_sanity | ||||||
| from documents.sanity_checker import SanityCheckMessages |  | ||||||
| from documents.tests.utils import DirectoriesMixin | from documents.tests.utils import DirectoriesMixin | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestSanityCheckMessages(TestCase): |  | ||||||
|     def test_no_messages(self): |  | ||||||
|         messages = SanityCheckMessages() |  | ||||||
|         self.assertEqual(len(messages), 0) |  | ||||||
|         self.assertFalse(messages.has_error()) |  | ||||||
|         self.assertFalse(messages.has_warning()) |  | ||||||
|         with self.assertLogs() as capture: |  | ||||||
|             messages.log_messages() |  | ||||||
|             self.assertEqual(len(capture.output), 1) |  | ||||||
|             self.assertEqual(capture.records[0].levelno, logging.INFO) |  | ||||||
|             self.assertEqual( |  | ||||||
|                 capture.records[0].message, |  | ||||||
|                 "Sanity checker detected no issues.", |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|     def test_info(self): |  | ||||||
|         messages = SanityCheckMessages() |  | ||||||
|         messages.info("Something might be wrong") |  | ||||||
|         self.assertEqual(len(messages), 1) |  | ||||||
|         self.assertFalse(messages.has_error()) |  | ||||||
|         self.assertFalse(messages.has_warning()) |  | ||||||
|         with self.assertLogs() as capture: |  | ||||||
|             messages.log_messages() |  | ||||||
|             self.assertEqual(len(capture.output), 1) |  | ||||||
|             self.assertEqual(capture.records[0].levelno, logging.INFO) |  | ||||||
|             self.assertEqual(capture.records[0].message, "Something might be wrong") |  | ||||||
|  |  | ||||||
|     def test_warning(self): |  | ||||||
|         messages = SanityCheckMessages() |  | ||||||
|         messages.warning("Something is wrong") |  | ||||||
|         self.assertEqual(len(messages), 1) |  | ||||||
|         self.assertFalse(messages.has_error()) |  | ||||||
|         self.assertTrue(messages.has_warning()) |  | ||||||
|         with self.assertLogs() as capture: |  | ||||||
|             messages.log_messages() |  | ||||||
|             self.assertEqual(len(capture.output), 1) |  | ||||||
|             self.assertEqual(capture.records[0].levelno, logging.WARNING) |  | ||||||
|             self.assertEqual(capture.records[0].message, "Something is wrong") |  | ||||||
|  |  | ||||||
|     def test_error(self): |  | ||||||
|         messages = SanityCheckMessages() |  | ||||||
|         messages.error("Something is seriously wrong") |  | ||||||
|         self.assertEqual(len(messages), 1) |  | ||||||
|         self.assertTrue(messages.has_error()) |  | ||||||
|         self.assertFalse(messages.has_warning()) |  | ||||||
|         with self.assertLogs() as capture: |  | ||||||
|             messages.log_messages() |  | ||||||
|             self.assertEqual(len(capture.output), 1) |  | ||||||
|             self.assertEqual(capture.records[0].levelno, logging.ERROR) |  | ||||||
|             self.assertEqual(capture.records[0].message, "Something is seriously wrong") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestSanityCheck(DirectoriesMixin, TestCase): | class TestSanityCheck(DirectoriesMixin, TestCase): | ||||||
|     def make_test_data(self): |     def make_test_data(self): | ||||||
|  |  | ||||||
| @@ -111,10 +58,30 @@ class TestSanityCheck(DirectoriesMixin, TestCase): | |||||||
|             archive_filename="0000001.pdf", |             archive_filename="0000001.pdf", | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|     def assertSanityError(self, messageRegex): |     def assertSanityError(self, doc: Document, messageRegex): | ||||||
|         messages = check_sanity() |         messages = check_sanity() | ||||||
|         self.assertTrue(messages.has_error()) |         self.assertTrue(messages.has_error) | ||||||
|         self.assertRegex(messages[0]["message"], messageRegex) |         with self.assertLogs() as capture: | ||||||
|  |             messages.log_messages() | ||||||
|  |             self.assertEqual( | ||||||
|  |                 capture.records[0].message, | ||||||
|  |                 f"Detected following issue(s) with document #{doc.pk}, titled {doc.title}", | ||||||
|  |             ) | ||||||
|  |             self.assertRegex(capture.records[1].message, messageRegex) | ||||||
|  |  | ||||||
|  |     def test_no_issues(self): | ||||||
|  |         self.make_test_data() | ||||||
|  |         messages = check_sanity() | ||||||
|  |         self.assertFalse(messages.has_error) | ||||||
|  |         self.assertFalse(messages.has_warning) | ||||||
|  |         with self.assertLogs() as capture: | ||||||
|  |             messages.log_messages() | ||||||
|  |             self.assertEqual(len(capture.output), 1) | ||||||
|  |             self.assertEqual(capture.records[0].levelno, logging.INFO) | ||||||
|  |             self.assertEqual( | ||||||
|  |                 capture.records[0].message, | ||||||
|  |                 "Sanity checker detected no issues.", | ||||||
|  |             ) | ||||||
|  |  | ||||||
|     def test_no_docs(self): |     def test_no_docs(self): | ||||||
|         self.assertEqual(len(check_sanity()), 0) |         self.assertEqual(len(check_sanity()), 0) | ||||||
| @@ -126,75 +93,82 @@ class TestSanityCheck(DirectoriesMixin, TestCase): | |||||||
|     def test_no_thumbnail(self): |     def test_no_thumbnail(self): | ||||||
|         doc = self.make_test_data() |         doc = self.make_test_data() | ||||||
|         os.remove(doc.thumbnail_path) |         os.remove(doc.thumbnail_path) | ||||||
|         self.assertSanityError("Thumbnail of document .* does not exist") |         self.assertSanityError(doc, "Thumbnail of document does not exist") | ||||||
|  |  | ||||||
|     def test_thumbnail_no_access(self): |     def test_thumbnail_no_access(self): | ||||||
|         doc = self.make_test_data() |         doc = self.make_test_data() | ||||||
|         os.chmod(doc.thumbnail_path, 0o000) |         os.chmod(doc.thumbnail_path, 0o000) | ||||||
|         self.assertSanityError("Cannot read thumbnail file of document") |         self.assertSanityError(doc, "Cannot read thumbnail file of document") | ||||||
|         os.chmod(doc.thumbnail_path, 0o777) |         os.chmod(doc.thumbnail_path, 0o777) | ||||||
|  |  | ||||||
|     def test_no_original(self): |     def test_no_original(self): | ||||||
|         doc = self.make_test_data() |         doc = self.make_test_data() | ||||||
|         os.remove(doc.source_path) |         os.remove(doc.source_path) | ||||||
|         self.assertSanityError("Original of document .* does not exist.") |         self.assertSanityError(doc, "Original of document does not exist.") | ||||||
|  |  | ||||||
|     def test_original_no_access(self): |     def test_original_no_access(self): | ||||||
|         doc = self.make_test_data() |         doc = self.make_test_data() | ||||||
|         os.chmod(doc.source_path, 0o000) |         os.chmod(doc.source_path, 0o000) | ||||||
|         self.assertSanityError("Cannot read original file of document") |         self.assertSanityError(doc, "Cannot read original file of document") | ||||||
|         os.chmod(doc.source_path, 0o777) |         os.chmod(doc.source_path, 0o777) | ||||||
|  |  | ||||||
|     def test_original_checksum_mismatch(self): |     def test_original_checksum_mismatch(self): | ||||||
|         doc = self.make_test_data() |         doc = self.make_test_data() | ||||||
|         doc.checksum = "WOW" |         doc.checksum = "WOW" | ||||||
|         doc.save() |         doc.save() | ||||||
|         self.assertSanityError("Checksum mismatch of document") |         self.assertSanityError(doc, "Checksum mismatch. Stored: WOW, actual: ") | ||||||
|  |  | ||||||
|     def test_no_archive(self): |     def test_no_archive(self): | ||||||
|         doc = self.make_test_data() |         doc = self.make_test_data() | ||||||
|         os.remove(doc.archive_path) |         os.remove(doc.archive_path) | ||||||
|         self.assertSanityError("Archived version of document .* does not exist.") |         self.assertSanityError(doc, "Archived version of document does not exist.") | ||||||
|  |  | ||||||
|     def test_archive_no_access(self): |     def test_archive_no_access(self): | ||||||
|         doc = self.make_test_data() |         doc = self.make_test_data() | ||||||
|         os.chmod(doc.archive_path, 0o000) |         os.chmod(doc.archive_path, 0o000) | ||||||
|         self.assertSanityError("Cannot read archive file of document") |         self.assertSanityError(doc, "Cannot read archive file of document") | ||||||
|         os.chmod(doc.archive_path, 0o777) |         os.chmod(doc.archive_path, 0o777) | ||||||
|  |  | ||||||
|     def test_archive_checksum_mismatch(self): |     def test_archive_checksum_mismatch(self): | ||||||
|         doc = self.make_test_data() |         doc = self.make_test_data() | ||||||
|         doc.archive_checksum = "WOW" |         doc.archive_checksum = "WOW" | ||||||
|         doc.save() |         doc.save() | ||||||
|         self.assertSanityError("Checksum mismatch of archived document") |         self.assertSanityError(doc, "Checksum mismatch of archived document") | ||||||
|  |  | ||||||
|     def test_empty_content(self): |     def test_empty_content(self): | ||||||
|         doc = self.make_test_data() |         doc = self.make_test_data() | ||||||
|         doc.content = "" |         doc.content = "" | ||||||
|         doc.save() |         doc.save() | ||||||
|         messages = check_sanity() |         messages = check_sanity() | ||||||
|         self.assertFalse(messages.has_error()) |         self.assertFalse(messages.has_error) | ||||||
|         self.assertFalse(messages.has_warning()) |         self.assertFalse(messages.has_warning) | ||||||
|         self.assertEqual(len(messages), 1) |         self.assertEqual(len(messages), 1) | ||||||
|         self.assertRegex(messages[0]["message"], "Document .* has no content.") |         self.assertRegex( | ||||||
|  |             messages[doc.pk][0]["message"], | ||||||
|  |             "Document contains no OCR data", | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     def test_orphaned_file(self): |     def test_orphaned_file(self): | ||||||
|         doc = self.make_test_data() |         doc = self.make_test_data() | ||||||
|         Path(self.dirs.originals_dir, "orphaned").touch() |         Path(self.dirs.originals_dir, "orphaned").touch() | ||||||
|         messages = check_sanity() |         messages = check_sanity() | ||||||
|         self.assertFalse(messages.has_error()) |         self.assertTrue(messages.has_warning) | ||||||
|         self.assertTrue(messages.has_warning()) |         self.assertRegex( | ||||||
|         self.assertEqual(len(messages), 1) |             messages._messages[None][0]["message"], | ||||||
|         self.assertRegex(messages[0]["message"], "Orphaned file in media dir") |             "Orphaned file in media dir", | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     def test_archive_filename_no_checksum(self): |     def test_archive_filename_no_checksum(self): | ||||||
|         doc = self.make_test_data() |         doc = self.make_test_data() | ||||||
|         doc.archive_checksum = None |         doc.archive_checksum = None | ||||||
|         doc.save() |         doc.save() | ||||||
|         self.assertSanityError("has an archive file, but its checksum is missing.") |         self.assertSanityError(doc, "has an archive file, but its checksum is missing.") | ||||||
|  |  | ||||||
|     def test_archive_checksum_no_filename(self): |     def test_archive_checksum_no_filename(self): | ||||||
|         doc = self.make_test_data() |         doc = self.make_test_data() | ||||||
|         doc.archive_filename = None |         doc.archive_filename = None | ||||||
|         doc.save() |         doc.save() | ||||||
|         self.assertSanityError("has an archive file checksum, but no archive filename.") |         self.assertSanityError( | ||||||
|  |             doc, | ||||||
|  |             "has an archive file checksum, but no archive filename.", | ||||||
|  |         ) | ||||||
|   | |||||||
| @@ -538,7 +538,7 @@ class TestTasks(DirectoriesMixin, TestCase): | |||||||
|     @mock.patch("documents.tasks.sanity_checker.check_sanity") |     @mock.patch("documents.tasks.sanity_checker.check_sanity") | ||||||
|     def test_sanity_check_error(self, m): |     def test_sanity_check_error(self, m): | ||||||
|         messages = SanityCheckMessages() |         messages = SanityCheckMessages() | ||||||
|         messages.error("Some error") |         messages.error(None, "Some error") | ||||||
|         m.return_value = messages |         m.return_value = messages | ||||||
|         self.assertRaises(SanityCheckFailedException, tasks.sanity_check) |         self.assertRaises(SanityCheckFailedException, tasks.sanity_check) | ||||||
|         m.assert_called_once() |         m.assert_called_once() | ||||||
| @@ -546,7 +546,7 @@ class TestTasks(DirectoriesMixin, TestCase): | |||||||
|     @mock.patch("documents.tasks.sanity_checker.check_sanity") |     @mock.patch("documents.tasks.sanity_checker.check_sanity") | ||||||
|     def test_sanity_check_warning(self, m): |     def test_sanity_check_warning(self, m): | ||||||
|         messages = SanityCheckMessages() |         messages = SanityCheckMessages() | ||||||
|         messages.warning("Some warning") |         messages.warning(None, "Some warning") | ||||||
|         m.return_value = messages |         m.return_value = messages | ||||||
|         self.assertEqual( |         self.assertEqual( | ||||||
|             tasks.sanity_check(), |             tasks.sanity_check(), | ||||||
| @@ -557,7 +557,7 @@ class TestTasks(DirectoriesMixin, TestCase): | |||||||
|     @mock.patch("documents.tasks.sanity_checker.check_sanity") |     @mock.patch("documents.tasks.sanity_checker.check_sanity") | ||||||
|     def test_sanity_check_info(self, m): |     def test_sanity_check_info(self, m): | ||||||
|         messages = SanityCheckMessages() |         messages = SanityCheckMessages() | ||||||
|         messages.info("Some info") |         messages.info(None, "Some info") | ||||||
|         m.return_value = messages |         m.return_value = messages | ||||||
|         self.assertEqual( |         self.assertEqual( | ||||||
|             tasks.sanity_check(), |             tasks.sanity_check(), | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user
	 shamoon
					shamoon