mirror of
				https://github.com/paperless-ngx/paperless-ngx.git
				synced 2025-11-03 03:16:10 -06:00 
			
		
		
		
	Runs pyupgrade to Python 3.8+ and adds a hook for it
This commit is contained in:
		@@ -37,7 +37,7 @@ repos:
 | 
				
			|||||||
        exclude: "(^Pipfile\\.lock$)"
 | 
					        exclude: "(^Pipfile\\.lock$)"
 | 
				
			||||||
  # Python hooks
 | 
					  # Python hooks
 | 
				
			||||||
  - repo: https://github.com/asottile/reorder_python_imports
 | 
					  - repo: https://github.com/asottile/reorder_python_imports
 | 
				
			||||||
    rev: v3.0.1
 | 
					    rev: v3.1.0
 | 
				
			||||||
    hooks:
 | 
					    hooks:
 | 
				
			||||||
      - id: reorder-python-imports
 | 
					      - id: reorder-python-imports
 | 
				
			||||||
        exclude: "(migrations)"
 | 
					        exclude: "(migrations)"
 | 
				
			||||||
@@ -62,6 +62,13 @@ repos:
 | 
				
			|||||||
    rev: 22.3.0
 | 
					    rev: 22.3.0
 | 
				
			||||||
    hooks:
 | 
					    hooks:
 | 
				
			||||||
      - id: black
 | 
					      - id: black
 | 
				
			||||||
 | 
					  - repo: https://github.com/asottile/pyupgrade
 | 
				
			||||||
 | 
					    rev: v2.32.1
 | 
				
			||||||
 | 
					    hooks:
 | 
				
			||||||
 | 
					      - id: pyupgrade
 | 
				
			||||||
 | 
					        exclude: "(migrations)"
 | 
				
			||||||
 | 
					        args:
 | 
				
			||||||
 | 
					          - "--py38-plus"
 | 
				
			||||||
  # Dockerfile hooks
 | 
					  # Dockerfile hooks
 | 
				
			||||||
  - repo: https://github.com/AleksaC/hadolint-py
 | 
					  - repo: https://github.com/AleksaC/hadolint-py
 | 
				
			||||||
    rev: v2.10.0
 | 
					    rev: v2.10.0
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -24,7 +24,7 @@ def worker_int(worker):
 | 
				
			|||||||
    ## get traceback info
 | 
					    ## get traceback info
 | 
				
			||||||
    import threading, sys, traceback
 | 
					    import threading, sys, traceback
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
 | 
					    id2name = {th.ident: th.name for th in threading.enumerate()}
 | 
				
			||||||
    code = []
 | 
					    code = []
 | 
				
			||||||
    for threadId, stack in sys._current_frames().items():
 | 
					    for threadId, stack in sys._current_frames().items():
 | 
				
			||||||
        code.append("\n# Thread: %s(%d)" % (id2name.get(threadId, ""), threadId))
 | 
					        code.append("\n# Thread: %s(%d)" % (id2name.get(threadId, ""), threadId))
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -74,19 +74,19 @@ class DocumentAdmin(admin.ModelAdmin):
 | 
				
			|||||||
            for o in queryset:
 | 
					            for o in queryset:
 | 
				
			||||||
                index.remove_document(writer, o)
 | 
					                index.remove_document(writer, o)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        super(DocumentAdmin, self).delete_queryset(request, queryset)
 | 
					        super().delete_queryset(request, queryset)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def delete_model(self, request, obj):
 | 
					    def delete_model(self, request, obj):
 | 
				
			||||||
        from documents import index
 | 
					        from documents import index
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        index.remove_document_from_index(obj)
 | 
					        index.remove_document_from_index(obj)
 | 
				
			||||||
        super(DocumentAdmin, self).delete_model(request, obj)
 | 
					        super().delete_model(request, obj)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def save_model(self, request, obj, form, change):
 | 
					    def save_model(self, request, obj, form, change):
 | 
				
			||||||
        from documents import index
 | 
					        from documents import index
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        index.add_or_update_document(obj)
 | 
					        index.add_or_update_document(obj)
 | 
				
			||||||
        super(DocumentAdmin, self).save_model(request, obj, form, change)
 | 
					        super().save_model(request, obj, form, change)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class RuleInline(admin.TabularInline):
 | 
					class RuleInline(admin.TabularInline):
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -32,7 +32,7 @@ class OriginalsOnlyStrategy(BulkArchiveStrategy):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
class ArchiveOnlyStrategy(BulkArchiveStrategy):
 | 
					class ArchiveOnlyStrategy(BulkArchiveStrategy):
 | 
				
			||||||
    def __init__(self, zipf):
 | 
					    def __init__(self, zipf):
 | 
				
			||||||
        super(ArchiveOnlyStrategy, self).__init__(zipf)
 | 
					        super().__init__(zipf)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def add_document(self, doc: Document):
 | 
					    def add_document(self, doc: Document):
 | 
				
			||||||
        if doc.has_archive_version:
 | 
					        if doc.has_archive_version:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -57,7 +57,7 @@ def load_classifier():
 | 
				
			|||||||
    return classifier
 | 
					    return classifier
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class DocumentClassifier(object):
 | 
					class DocumentClassifier:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # v7 - Updated scikit-learn package version
 | 
					    # v7 - Updated scikit-learn package version
 | 
				
			||||||
    FORMAT_VERSION = 7
 | 
					    FORMAT_VERSION = 7
 | 
				
			||||||
@@ -144,12 +144,10 @@ class DocumentClassifier(object):
 | 
				
			|||||||
            labels_correspondent.append(y)
 | 
					            labels_correspondent.append(y)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            tags = sorted(
 | 
					            tags = sorted(
 | 
				
			||||||
                [
 | 
					 | 
				
			||||||
                tag.pk
 | 
					                tag.pk
 | 
				
			||||||
                for tag in doc.tags.filter(
 | 
					                for tag in doc.tags.filter(
 | 
				
			||||||
                    matching_algorithm=MatchingModel.MATCH_AUTO,
 | 
					                    matching_algorithm=MatchingModel.MATCH_AUTO,
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
                ],
 | 
					 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
            for tag in tags:
 | 
					            for tag in tags:
 | 
				
			||||||
                m.update(tag.to_bytes(4, "little", signed=True))
 | 
					                m.update(tag.to_bytes(4, "little", signed=True))
 | 
				
			||||||
@@ -163,7 +161,7 @@ class DocumentClassifier(object):
 | 
				
			|||||||
        if self.data_hash and new_data_hash == self.data_hash:
 | 
					        if self.data_hash and new_data_hash == self.data_hash:
 | 
				
			||||||
            return False
 | 
					            return False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        labels_tags_unique = set([tag for tags in labels_tags for tag in tags])
 | 
					        labels_tags_unique = {tag for tags in labels_tags for tag in tags}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        num_tags = len(labels_tags_unique)
 | 
					        num_tags = len(labels_tags_unique)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -257,7 +257,7 @@ class Consumer(LoggingMixin):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            self._send_progress(20, 100, "WORKING", MESSAGE_PARSING_DOCUMENT)
 | 
					            self._send_progress(20, 100, "WORKING", MESSAGE_PARSING_DOCUMENT)
 | 
				
			||||||
            self.log("debug", "Parsing {}...".format(self.filename))
 | 
					            self.log("debug", f"Parsing {self.filename}...")
 | 
				
			||||||
            document_parser.parse(self.path, mime_type, self.filename)
 | 
					            document_parser.parse(self.path, mime_type, self.filename)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            self.log("debug", f"Generating thumbnail for {self.filename}...")
 | 
					            self.log("debug", f"Generating thumbnail for {self.filename}...")
 | 
				
			||||||
@@ -346,7 +346,7 @@ class Consumer(LoggingMixin):
 | 
				
			|||||||
                document.save()
 | 
					                document.save()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # Delete the file only if it was successfully consumed
 | 
					                # Delete the file only if it was successfully consumed
 | 
				
			||||||
                self.log("debug", "Deleting file {}".format(self.path))
 | 
					                self.log("debug", f"Deleting file {self.path}")
 | 
				
			||||||
                os.unlink(self.path)
 | 
					                os.unlink(self.path)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # https://github.com/jonaswinkler/paperless-ng/discussions/1037
 | 
					                # https://github.com/jonaswinkler/paperless-ng/discussions/1037
 | 
				
			||||||
@@ -356,7 +356,7 @@ class Consumer(LoggingMixin):
 | 
				
			|||||||
                )
 | 
					                )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                if os.path.isfile(shadow_file):
 | 
					                if os.path.isfile(shadow_file):
 | 
				
			||||||
                    self.log("debug", "Deleting file {}".format(shadow_file))
 | 
					                    self.log("debug", f"Deleting file {shadow_file}")
 | 
				
			||||||
                    os.unlink(shadow_file)
 | 
					                    os.unlink(shadow_file)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        except Exception as e:
 | 
					        except Exception as e:
 | 
				
			||||||
@@ -370,7 +370,7 @@ class Consumer(LoggingMixin):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        self.run_post_consume_script(document)
 | 
					        self.run_post_consume_script(document)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.log("info", "Document {} consumption finished".format(document))
 | 
					        self.log("info", f"Document {document} consumption finished")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self._send_progress(100, 100, "SUCCESS", MESSAGE_FINISHED, document.id)
 | 
					        self._send_progress(100, 100, "SUCCESS", MESSAGE_FINISHED, document.id)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -133,7 +133,7 @@ def generate_filename(doc, counter=0, append_gpg=True, archive_filename=False):
 | 
				
			|||||||
            tags = defaultdictNoStr(lambda: slugify(None), many_to_dictionary(doc.tags))
 | 
					            tags = defaultdictNoStr(lambda: slugify(None), many_to_dictionary(doc.tags))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            tag_list = pathvalidate.sanitize_filename(
 | 
					            tag_list = pathvalidate.sanitize_filename(
 | 
				
			||||||
                ",".join(sorted([tag.name for tag in doc.tags.all()])),
 | 
					                ",".join(sorted(tag.name for tag in doc.tags.all())),
 | 
				
			||||||
                replacement_text="-",
 | 
					                replacement_text="-",
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -35,7 +35,7 @@ class DocumentTypeFilterSet(FilterSet):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
class TagsFilter(Filter):
 | 
					class TagsFilter(Filter):
 | 
				
			||||||
    def __init__(self, exclude=False, in_list=False):
 | 
					    def __init__(self, exclude=False, in_list=False):
 | 
				
			||||||
        super(TagsFilter, self).__init__()
 | 
					        super().__init__()
 | 
				
			||||||
        self.exclude = exclude
 | 
					        self.exclude = exclude
 | 
				
			||||||
        self.in_list = in_list
 | 
					        self.in_list = in_list
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -55,7 +55,7 @@ class Command(BaseCommand):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        for document in encrypted_files:
 | 
					        for document in encrypted_files:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            print("Decrypting {}".format(document).encode("utf-8"))
 | 
					            print(f"Decrypting {document}".encode())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            old_paths = [document.source_path, document.thumbnail_path]
 | 
					            old_paths = [document.source_path, document.thumbnail_path]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -17,4 +17,4 @@ class Command(LoadDataCommand):
 | 
				
			|||||||
    def find_fixtures(self, fixture_label):
 | 
					    def find_fixtures(self, fixture_label):
 | 
				
			||||||
        if fixture_label == "-":
 | 
					        if fixture_label == "-":
 | 
				
			||||||
            return [("-", None, "-")]
 | 
					            return [("-", None, "-")]
 | 
				
			||||||
        return super(Command, self).find_fixtures(fixture_label)
 | 
					        return super().find_fixtures(fixture_label)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,4 +1,3 @@
 | 
				
			|||||||
# coding=utf-8
 | 
					 | 
				
			||||||
import datetime
 | 
					import datetime
 | 
				
			||||||
import logging
 | 
					import logging
 | 
				
			||||||
import os
 | 
					import os
 | 
				
			||||||
@@ -221,7 +220,7 @@ class Document(models.Model):
 | 
				
			|||||||
        if self.filename:
 | 
					        if self.filename:
 | 
				
			||||||
            fname = str(self.filename)
 | 
					            fname = str(self.filename)
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            fname = "{:07}{}".format(self.pk, self.file_type)
 | 
					            fname = f"{self.pk:07}{self.file_type}"
 | 
				
			||||||
            if self.storage_type == self.STORAGE_TYPE_GPG:
 | 
					            if self.storage_type == self.STORAGE_TYPE_GPG:
 | 
				
			||||||
                fname += ".gpg"  # pragma: no cover
 | 
					                fname += ".gpg"  # pragma: no cover
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -268,7 +267,7 @@ class Document(models.Model):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    @property
 | 
					    @property
 | 
				
			||||||
    def thumbnail_path(self):
 | 
					    def thumbnail_path(self):
 | 
				
			||||||
        file_name = "{:07}.png".format(self.pk)
 | 
					        file_name = f"{self.pk:07}.png"
 | 
				
			||||||
        if self.storage_type == self.STORAGE_TYPE_GPG:
 | 
					        if self.storage_type == self.STORAGE_TYPE_GPG:
 | 
				
			||||||
            file_name += ".gpg"
 | 
					            file_name += ".gpg"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -414,7 +413,7 @@ class FileInfo:
 | 
				
			|||||||
    @classmethod
 | 
					    @classmethod
 | 
				
			||||||
    def _get_created(cls, created):
 | 
					    def _get_created(cls, created):
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            return dateutil.parser.parse("{:0<14}Z".format(created[:-1]))
 | 
					            return dateutil.parser.parse(f"{created[:-1]:0<14}Z")
 | 
				
			||||||
        except ValueError:
 | 
					        except ValueError:
 | 
				
			||||||
            return None
 | 
					            return None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -425,7 +424,7 @@ class FileInfo:
 | 
				
			|||||||
    @classmethod
 | 
					    @classmethod
 | 
				
			||||||
    def _mangle_property(cls, properties, name):
 | 
					    def _mangle_property(cls, properties, name):
 | 
				
			||||||
        if name in properties:
 | 
					        if name in properties:
 | 
				
			||||||
            properties[name] = getattr(cls, "_get_{}".format(name))(properties[name])
 | 
					            properties[name] = getattr(cls, f"_get_{name}")(properties[name])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @classmethod
 | 
					    @classmethod
 | 
				
			||||||
    def from_filename(cls, filename):
 | 
					    def from_filename(cls, filename):
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -143,7 +143,7 @@ def run_convert(
 | 
				
			|||||||
    logger.debug("Execute: " + " ".join(args), extra={"group": logging_group})
 | 
					    logger.debug("Execute: " + " ".join(args), extra={"group": logging_group})
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if not subprocess.Popen(args, env=environment).wait() == 0:
 | 
					    if not subprocess.Popen(args, env=environment).wait() == 0:
 | 
				
			||||||
        raise ParseError("Convert failed at {}".format(args))
 | 
					        raise ParseError(f"Convert failed at {args}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def get_default_thumbnail():
 | 
					def get_default_thumbnail():
 | 
				
			||||||
@@ -164,7 +164,7 @@ def make_thumbnail_from_pdf_gs_fallback(in_path, temp_dir, logging_group=None):
 | 
				
			|||||||
    cmd = [settings.GS_BINARY, "-q", "-sDEVICE=pngalpha", "-o", gs_out_path, in_path]
 | 
					    cmd = [settings.GS_BINARY, "-q", "-sDEVICE=pngalpha", "-o", gs_out_path, in_path]
 | 
				
			||||||
    try:
 | 
					    try:
 | 
				
			||||||
        if not subprocess.Popen(cmd).wait() == 0:
 | 
					        if not subprocess.Popen(cmd).wait() == 0:
 | 
				
			||||||
            raise ParseError("Thumbnail (gs) failed at {}".format(cmd))
 | 
					            raise ParseError(f"Thumbnail (gs) failed at {cmd}")
 | 
				
			||||||
        # then run convert on the output from gs
 | 
					        # then run convert on the output from gs
 | 
				
			||||||
        run_convert(
 | 
					        run_convert(
 | 
				
			||||||
            density=300,
 | 
					            density=300,
 | 
				
			||||||
@@ -199,7 +199,7 @@ def make_thumbnail_from_pdf(in_path, temp_dir, logging_group=None):
 | 
				
			|||||||
            strip=True,
 | 
					            strip=True,
 | 
				
			||||||
            trim=False,
 | 
					            trim=False,
 | 
				
			||||||
            auto_orient=True,
 | 
					            auto_orient=True,
 | 
				
			||||||
            input_file="{}[0]".format(in_path),
 | 
					            input_file=f"{in_path}[0]",
 | 
				
			||||||
            output_file=out_path,
 | 
					            output_file=out_path,
 | 
				
			||||||
            logging_group=logging_group,
 | 
					            logging_group=logging_group,
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
@@ -333,7 +333,7 @@ class DocumentParser(LoggingMixin):
 | 
				
			|||||||
            self.log("debug", f"Execute: {' '.join(args)}")
 | 
					            self.log("debug", f"Execute: {' '.join(args)}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if not subprocess.Popen(args).wait() == 0:
 | 
					            if not subprocess.Popen(args).wait() == 0:
 | 
				
			||||||
                raise ParseError("Optipng failed at {}".format(args))
 | 
					                raise ParseError(f"Optipng failed at {args}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            return out_path
 | 
					            return out_path
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -30,7 +30,7 @@ class DynamicFieldsModelSerializer(serializers.ModelSerializer):
 | 
				
			|||||||
        fields = kwargs.pop("fields", None)
 | 
					        fields = kwargs.pop("fields", None)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Instantiate the superclass normally
 | 
					        # Instantiate the superclass normally
 | 
				
			||||||
        super(DynamicFieldsModelSerializer, self).__init__(*args, **kwargs)
 | 
					        super().__init__(*args, **kwargs)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if fields is not None:
 | 
					        if fields is not None:
 | 
				
			||||||
            # Drop any fields that are not specified in the `fields` argument.
 | 
					            # Drop any fields that are not specified in the `fields` argument.
 | 
				
			||||||
@@ -263,7 +263,7 @@ class SavedViewSerializer(serializers.ModelSerializer):
 | 
				
			|||||||
            rules_data = validated_data.pop("filter_rules")
 | 
					            rules_data = validated_data.pop("filter_rules")
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            rules_data = None
 | 
					            rules_data = None
 | 
				
			||||||
        super(SavedViewSerializer, self).update(instance, validated_data)
 | 
					        super().update(instance, validated_data)
 | 
				
			||||||
        if rules_data is not None:
 | 
					        if rules_data is not None:
 | 
				
			||||||
            SavedViewFilterRule.objects.filter(saved_view=instance).delete()
 | 
					            SavedViewFilterRule.objects.filter(saved_view=instance).delete()
 | 
				
			||||||
            for rule_data in rules_data:
 | 
					            for rule_data in rules_data:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -64,7 +64,7 @@ def train_classifier():
 | 
				
			|||||||
    try:
 | 
					    try:
 | 
				
			||||||
        if classifier.train():
 | 
					        if classifier.train():
 | 
				
			||||||
            logger.info(
 | 
					            logger.info(
 | 
				
			||||||
                "Saving updated classifier model to {}...".format(settings.MODEL_FILE),
 | 
					                f"Saving updated classifier model to {settings.MODEL_FILE}...",
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
            classifier.save()
 | 
					            classifier.save()
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
@@ -165,7 +165,7 @@ def separate_pages(filepath: str, pages_to_split_on: List[int]) -> List[str]:
 | 
				
			|||||||
        for n, page in enumerate(pdf.pages):
 | 
					        for n, page in enumerate(pdf.pages):
 | 
				
			||||||
            if n < pages_to_split_on[0]:
 | 
					            if n < pages_to_split_on[0]:
 | 
				
			||||||
                dst.pages.append(page)
 | 
					                dst.pages.append(page)
 | 
				
			||||||
        output_filename = "{}_document_0.pdf".format(fname)
 | 
					        output_filename = f"{fname}_document_0.pdf"
 | 
				
			||||||
        savepath = os.path.join(tempdir, output_filename)
 | 
					        savepath = os.path.join(tempdir, output_filename)
 | 
				
			||||||
        with open(savepath, "wb") as out:
 | 
					        with open(savepath, "wb") as out:
 | 
				
			||||||
            dst.save(out)
 | 
					            dst.save(out)
 | 
				
			||||||
@@ -185,7 +185,7 @@ def separate_pages(filepath: str, pages_to_split_on: List[int]) -> List[str]:
 | 
				
			|||||||
                    f"page_number: {str(page_number)} next_page: {str(next_page)}",
 | 
					                    f"page_number: {str(page_number)} next_page: {str(next_page)}",
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
                dst.pages.append(pdf.pages[page])
 | 
					                dst.pages.append(pdf.pages[page])
 | 
				
			||||||
            output_filename = "{}_document_{}.pdf".format(fname, str(count + 1))
 | 
					            output_filename = f"{fname}_document_{str(count + 1)}.pdf"
 | 
				
			||||||
            logger.debug(f"pdf no:{str(count)} has {str(len(dst.pages))} pages")
 | 
					            logger.debug(f"pdf no:{str(count)} has {str(len(dst.pages))} pages")
 | 
				
			||||||
            savepath = os.path.join(tempdir, output_filename)
 | 
					            savepath = os.path.join(tempdir, output_filename)
 | 
				
			||||||
            with open(savepath, "wb") as out:
 | 
					            with open(savepath, "wb") as out:
 | 
				
			||||||
@@ -266,9 +266,9 @@ def consume_file(
 | 
				
			|||||||
                # if we got here, the document was successfully split
 | 
					                # if we got here, the document was successfully split
 | 
				
			||||||
                # and can safely be deleted
 | 
					                # and can safely be deleted
 | 
				
			||||||
                if converted_tiff:
 | 
					                if converted_tiff:
 | 
				
			||||||
                    logger.debug("Deleting file {}".format(file_to_process))
 | 
					                    logger.debug(f"Deleting file {file_to_process}")
 | 
				
			||||||
                    os.unlink(file_to_process)
 | 
					                    os.unlink(file_to_process)
 | 
				
			||||||
                logger.debug("Deleting file {}".format(path))
 | 
					                logger.debug(f"Deleting file {path}")
 | 
				
			||||||
                os.unlink(path)
 | 
					                os.unlink(path)
 | 
				
			||||||
                # notify the sender, otherwise the progress bar
 | 
					                # notify the sender, otherwise the progress bar
 | 
				
			||||||
                # in the UI stays stuck
 | 
					                # in the UI stays stuck
 | 
				
			||||||
@@ -306,7 +306,7 @@ def consume_file(
 | 
				
			|||||||
    )
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if document:
 | 
					    if document:
 | 
				
			||||||
        return "Success. New document id {} created".format(document.pk)
 | 
					        return f"Success. New document id {document.pk} created"
 | 
				
			||||||
    else:
 | 
					    else:
 | 
				
			||||||
        raise ConsumerError(
 | 
					        raise ConsumerError(
 | 
				
			||||||
            "Unknown error: Returned document was null, but "
 | 
					            "Unknown error: Returned document was null, but "
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -16,7 +16,7 @@ class TestDocumentAdmin(DirectoriesMixin, TestCase):
 | 
				
			|||||||
            return searcher.document(id=doc.id)
 | 
					            return searcher.document(id=doc.id)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def setUp(self) -> None:
 | 
					    def setUp(self) -> None:
 | 
				
			||||||
        super(TestDocumentAdmin, self).setUp()
 | 
					        super().setUp()
 | 
				
			||||||
        self.doc_admin = DocumentAdmin(model=Document, admin_site=AdminSite())
 | 
					        self.doc_admin = DocumentAdmin(model=Document, admin_site=AdminSite())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def test_save_model(self):
 | 
					    def test_save_model(self):
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -27,7 +27,7 @@ from whoosh.writing import AsyncWriter
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
class TestDocumentApi(DirectoriesMixin, APITestCase):
 | 
					class TestDocumentApi(DirectoriesMixin, APITestCase):
 | 
				
			||||||
    def setUp(self):
 | 
					    def setUp(self):
 | 
				
			||||||
        super(TestDocumentApi, self).setUp()
 | 
					        super().setUp()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.user = User.objects.create_superuser(username="temp_admin")
 | 
					        self.user = User.objects.create_superuser(username="temp_admin")
 | 
				
			||||||
        self.client.force_login(user=self.user)
 | 
					        self.client.force_login(user=self.user)
 | 
				
			||||||
@@ -70,7 +70,7 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
 | 
				
			|||||||
        returned_doc["title"] = "the new title"
 | 
					        returned_doc["title"] = "the new title"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.put(
 | 
					        response = self.client.put(
 | 
				
			||||||
            "/api/documents/{}/".format(doc.pk),
 | 
					            f"/api/documents/{doc.pk}/",
 | 
				
			||||||
            returned_doc,
 | 
					            returned_doc,
 | 
				
			||||||
            format="json",
 | 
					            format="json",
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
@@ -82,7 +82,7 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
 | 
				
			|||||||
        self.assertEqual(doc_after_save.correspondent, c2)
 | 
					        self.assertEqual(doc_after_save.correspondent, c2)
 | 
				
			||||||
        self.assertEqual(doc_after_save.title, "the new title")
 | 
					        self.assertEqual(doc_after_save.title, "the new title")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.client.delete("/api/documents/{}/".format(doc_after_save.pk))
 | 
					        self.client.delete(f"/api/documents/{doc_after_save.pk}/")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.assertEqual(len(Document.objects.all()), 0)
 | 
					        self.assertEqual(len(Document.objects.all()), 0)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -163,22 +163,22 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
 | 
				
			|||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        with open(
 | 
					        with open(
 | 
				
			||||||
            os.path.join(self.dirs.thumbnail_dir, "{:07d}.png".format(doc.pk)),
 | 
					            os.path.join(self.dirs.thumbnail_dir, f"{doc.pk:07d}.png"),
 | 
				
			||||||
            "wb",
 | 
					            "wb",
 | 
				
			||||||
        ) as f:
 | 
					        ) as f:
 | 
				
			||||||
            f.write(content_thumbnail)
 | 
					            f.write(content_thumbnail)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get("/api/documents/{}/download/".format(doc.pk))
 | 
					        response = self.client.get(f"/api/documents/{doc.pk}/download/")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.assertEqual(response.status_code, 200)
 | 
					        self.assertEqual(response.status_code, 200)
 | 
				
			||||||
        self.assertEqual(response.content, content)
 | 
					        self.assertEqual(response.content, content)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get("/api/documents/{}/preview/".format(doc.pk))
 | 
					        response = self.client.get(f"/api/documents/{doc.pk}/preview/")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.assertEqual(response.status_code, 200)
 | 
					        self.assertEqual(response.status_code, 200)
 | 
				
			||||||
        self.assertEqual(response.content, content)
 | 
					        self.assertEqual(response.content, content)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get("/api/documents/{}/thumb/".format(doc.pk))
 | 
					        response = self.client.get(f"/api/documents/{doc.pk}/thumb/")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.assertEqual(response.status_code, 200)
 | 
					        self.assertEqual(response.status_code, 200)
 | 
				
			||||||
        self.assertEqual(response.content, content_thumbnail)
 | 
					        self.assertEqual(response.content, content_thumbnail)
 | 
				
			||||||
@@ -202,25 +202,25 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
 | 
				
			|||||||
        with open(doc.archive_path, "wb") as f:
 | 
					        with open(doc.archive_path, "wb") as f:
 | 
				
			||||||
            f.write(content_archive)
 | 
					            f.write(content_archive)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get("/api/documents/{}/download/".format(doc.pk))
 | 
					        response = self.client.get(f"/api/documents/{doc.pk}/download/")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.assertEqual(response.status_code, 200)
 | 
					        self.assertEqual(response.status_code, 200)
 | 
				
			||||||
        self.assertEqual(response.content, content_archive)
 | 
					        self.assertEqual(response.content, content_archive)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get(
 | 
					        response = self.client.get(
 | 
				
			||||||
            "/api/documents/{}/download/?original=true".format(doc.pk),
 | 
					            f"/api/documents/{doc.pk}/download/?original=true",
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.assertEqual(response.status_code, 200)
 | 
					        self.assertEqual(response.status_code, 200)
 | 
				
			||||||
        self.assertEqual(response.content, content)
 | 
					        self.assertEqual(response.content, content)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get("/api/documents/{}/preview/".format(doc.pk))
 | 
					        response = self.client.get(f"/api/documents/{doc.pk}/preview/")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.assertEqual(response.status_code, 200)
 | 
					        self.assertEqual(response.status_code, 200)
 | 
				
			||||||
        self.assertEqual(response.content, content_archive)
 | 
					        self.assertEqual(response.content, content_archive)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get(
 | 
					        response = self.client.get(
 | 
				
			||||||
            "/api/documents/{}/preview/?original=true".format(doc.pk),
 | 
					            f"/api/documents/{doc.pk}/preview/?original=true",
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.assertEqual(response.status_code, 200)
 | 
					        self.assertEqual(response.status_code, 200)
 | 
				
			||||||
@@ -234,13 +234,13 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
 | 
				
			|||||||
            mime_type="application/pdf",
 | 
					            mime_type="application/pdf",
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get("/api/documents/{}/download/".format(doc.pk))
 | 
					        response = self.client.get(f"/api/documents/{doc.pk}/download/")
 | 
				
			||||||
        self.assertEqual(response.status_code, 404)
 | 
					        self.assertEqual(response.status_code, 404)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get("/api/documents/{}/preview/".format(doc.pk))
 | 
					        response = self.client.get(f"/api/documents/{doc.pk}/preview/")
 | 
				
			||||||
        self.assertEqual(response.status_code, 404)
 | 
					        self.assertEqual(response.status_code, 404)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get("/api/documents/{}/thumb/".format(doc.pk))
 | 
					        response = self.client.get(f"/api/documents/{doc.pk}/thumb/")
 | 
				
			||||||
        self.assertEqual(response.status_code, 404)
 | 
					        self.assertEqual(response.status_code, 404)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def test_document_filters(self):
 | 
					    def test_document_filters(self):
 | 
				
			||||||
@@ -283,7 +283,7 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
 | 
				
			|||||||
        self.assertCountEqual([results[0]["id"], results[1]["id"]], [doc2.id, doc3.id])
 | 
					        self.assertCountEqual([results[0]["id"], results[1]["id"]], [doc2.id, doc3.id])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get(
 | 
					        response = self.client.get(
 | 
				
			||||||
            "/api/documents/?tags__id__in={},{}".format(tag_inbox.id, tag_3.id),
 | 
					            f"/api/documents/?tags__id__in={tag_inbox.id},{tag_3.id}",
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        self.assertEqual(response.status_code, 200)
 | 
					        self.assertEqual(response.status_code, 200)
 | 
				
			||||||
        results = response.data["results"]
 | 
					        results = response.data["results"]
 | 
				
			||||||
@@ -291,7 +291,7 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
 | 
				
			|||||||
        self.assertCountEqual([results[0]["id"], results[1]["id"]], [doc1.id, doc3.id])
 | 
					        self.assertCountEqual([results[0]["id"], results[1]["id"]], [doc1.id, doc3.id])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get(
 | 
					        response = self.client.get(
 | 
				
			||||||
            "/api/documents/?tags__id__in={},{}".format(tag_2.id, tag_3.id),
 | 
					            f"/api/documents/?tags__id__in={tag_2.id},{tag_3.id}",
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        self.assertEqual(response.status_code, 200)
 | 
					        self.assertEqual(response.status_code, 200)
 | 
				
			||||||
        results = response.data["results"]
 | 
					        results = response.data["results"]
 | 
				
			||||||
@@ -299,7 +299,7 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
 | 
				
			|||||||
        self.assertCountEqual([results[0]["id"], results[1]["id"]], [doc2.id, doc3.id])
 | 
					        self.assertCountEqual([results[0]["id"], results[1]["id"]], [doc2.id, doc3.id])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get(
 | 
					        response = self.client.get(
 | 
				
			||||||
            "/api/documents/?tags__id__all={},{}".format(tag_2.id, tag_3.id),
 | 
					            f"/api/documents/?tags__id__all={tag_2.id},{tag_3.id}",
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        self.assertEqual(response.status_code, 200)
 | 
					        self.assertEqual(response.status_code, 200)
 | 
				
			||||||
        results = response.data["results"]
 | 
					        results = response.data["results"]
 | 
				
			||||||
@@ -307,27 +307,27 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
 | 
				
			|||||||
        self.assertEqual(results[0]["id"], doc3.id)
 | 
					        self.assertEqual(results[0]["id"], doc3.id)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get(
 | 
					        response = self.client.get(
 | 
				
			||||||
            "/api/documents/?tags__id__all={},{}".format(tag_inbox.id, tag_3.id),
 | 
					            f"/api/documents/?tags__id__all={tag_inbox.id},{tag_3.id}",
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        self.assertEqual(response.status_code, 200)
 | 
					        self.assertEqual(response.status_code, 200)
 | 
				
			||||||
        results = response.data["results"]
 | 
					        results = response.data["results"]
 | 
				
			||||||
        self.assertEqual(len(results), 0)
 | 
					        self.assertEqual(len(results), 0)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get(
 | 
					        response = self.client.get(
 | 
				
			||||||
            "/api/documents/?tags__id__all={}a{}".format(tag_inbox.id, tag_3.id),
 | 
					            f"/api/documents/?tags__id__all={tag_inbox.id}a{tag_3.id}",
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        self.assertEqual(response.status_code, 200)
 | 
					        self.assertEqual(response.status_code, 200)
 | 
				
			||||||
        results = response.data["results"]
 | 
					        results = response.data["results"]
 | 
				
			||||||
        self.assertEqual(len(results), 3)
 | 
					        self.assertEqual(len(results), 3)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get("/api/documents/?tags__id__none={}".format(tag_3.id))
 | 
					        response = self.client.get(f"/api/documents/?tags__id__none={tag_3.id}")
 | 
				
			||||||
        self.assertEqual(response.status_code, 200)
 | 
					        self.assertEqual(response.status_code, 200)
 | 
				
			||||||
        results = response.data["results"]
 | 
					        results = response.data["results"]
 | 
				
			||||||
        self.assertEqual(len(results), 2)
 | 
					        self.assertEqual(len(results), 2)
 | 
				
			||||||
        self.assertCountEqual([results[0]["id"], results[1]["id"]], [doc1.id, doc2.id])
 | 
					        self.assertCountEqual([results[0]["id"], results[1]["id"]], [doc1.id, doc2.id])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get(
 | 
					        response = self.client.get(
 | 
				
			||||||
            "/api/documents/?tags__id__none={},{}".format(tag_3.id, tag_2.id),
 | 
					            f"/api/documents/?tags__id__none={tag_3.id},{tag_2.id}",
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        self.assertEqual(response.status_code, 200)
 | 
					        self.assertEqual(response.status_code, 200)
 | 
				
			||||||
        results = response.data["results"]
 | 
					        results = response.data["results"]
 | 
				
			||||||
@@ -335,7 +335,7 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
 | 
				
			|||||||
        self.assertEqual(results[0]["id"], doc1.id)
 | 
					        self.assertEqual(results[0]["id"], doc1.id)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        response = self.client.get(
 | 
					        response = self.client.get(
 | 
				
			||||||
            "/api/documents/?tags__id__none={},{}".format(tag_2.id, tag_inbox.id),
 | 
					            f"/api/documents/?tags__id__none={tag_2.id},{tag_inbox.id}",
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        self.assertEqual(response.status_code, 200)
 | 
					        self.assertEqual(response.status_code, 200)
 | 
				
			||||||
        results = response.data["results"]
 | 
					        results = response.data["results"]
 | 
				
			||||||
@@ -1284,7 +1284,7 @@ class TestDocumentApi(DirectoriesMixin, APITestCase):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
class TestDocumentApiV2(DirectoriesMixin, APITestCase):
 | 
					class TestDocumentApiV2(DirectoriesMixin, APITestCase):
 | 
				
			||||||
    def setUp(self):
 | 
					    def setUp(self):
 | 
				
			||||||
        super(TestDocumentApiV2, self).setUp()
 | 
					        super().setUp()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.user = User.objects.create_superuser(username="temp_admin")
 | 
					        self.user = User.objects.create_superuser(username="temp_admin")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -1365,7 +1365,7 @@ class TestDocumentApiV2(DirectoriesMixin, APITestCase):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
class TestBulkEdit(DirectoriesMixin, APITestCase):
 | 
					class TestBulkEdit(DirectoriesMixin, APITestCase):
 | 
				
			||||||
    def setUp(self):
 | 
					    def setUp(self):
 | 
				
			||||||
        super(TestBulkEdit, self).setUp()
 | 
					        super().setUp()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        user = User.objects.create_superuser(username="temp_admin")
 | 
					        user = User.objects.create_superuser(username="temp_admin")
 | 
				
			||||||
        self.client.force_login(user=user)
 | 
					        self.client.force_login(user=user)
 | 
				
			||||||
@@ -1886,7 +1886,7 @@ class TestBulkEdit(DirectoriesMixin, APITestCase):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
class TestBulkDownload(DirectoriesMixin, APITestCase):
 | 
					class TestBulkDownload(DirectoriesMixin, APITestCase):
 | 
				
			||||||
    def setUp(self):
 | 
					    def setUp(self):
 | 
				
			||||||
        super(TestBulkDownload, self).setUp()
 | 
					        super().setUp()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        user = User.objects.create_superuser(username="temp_admin")
 | 
					        user = User.objects.create_superuser(username="temp_admin")
 | 
				
			||||||
        self.client.force_login(user=user)
 | 
					        self.client.force_login(user=user)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -19,7 +19,7 @@ from documents.tests.utils import DirectoriesMixin
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
class TestClassifier(DirectoriesMixin, TestCase):
 | 
					class TestClassifier(DirectoriesMixin, TestCase):
 | 
				
			||||||
    def setUp(self):
 | 
					    def setUp(self):
 | 
				
			||||||
        super(TestClassifier, self).setUp()
 | 
					        super().setUp()
 | 
				
			||||||
        self.classifier = DocumentClassifier()
 | 
					        self.classifier = DocumentClassifier()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def generate_test_data(self):
 | 
					    def generate_test_data(self):
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -41,7 +41,7 @@ class TestAttributes(TestCase):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        self.assertEqual(file_info.title, title, filename)
 | 
					        self.assertEqual(file_info.title, title, filename)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.assertEqual(tuple([t.name for t in file_info.tags]), tags, filename)
 | 
					        self.assertEqual(tuple(t.name for t in file_info.tags), tags, filename)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def test_guess_attributes_from_name_when_title_starts_with_dash(self):
 | 
					    def test_guess_attributes_from_name_when_title_starts_with_dash(self):
 | 
				
			||||||
        self._test_guess_attributes_from_name(
 | 
					        self._test_guess_attributes_from_name(
 | 
				
			||||||
@@ -176,7 +176,7 @@ class DummyParser(DocumentParser):
 | 
				
			|||||||
        raise NotImplementedError()
 | 
					        raise NotImplementedError()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __init__(self, logging_group, scratch_dir, archive_path):
 | 
					    def __init__(self, logging_group, scratch_dir, archive_path):
 | 
				
			||||||
        super(DummyParser, self).__init__(logging_group, None)
 | 
					        super().__init__(logging_group, None)
 | 
				
			||||||
        _, self.fake_thumb = tempfile.mkstemp(suffix=".png", dir=scratch_dir)
 | 
					        _, self.fake_thumb = tempfile.mkstemp(suffix=".png", dir=scratch_dir)
 | 
				
			||||||
        self.archive_path = archive_path
 | 
					        self.archive_path = archive_path
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -195,7 +195,7 @@ class CopyParser(DocumentParser):
 | 
				
			|||||||
        return self.fake_thumb
 | 
					        return self.fake_thumb
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __init__(self, logging_group, progress_callback=None):
 | 
					    def __init__(self, logging_group, progress_callback=None):
 | 
				
			||||||
        super(CopyParser, self).__init__(logging_group, progress_callback)
 | 
					        super().__init__(logging_group, progress_callback)
 | 
				
			||||||
        _, self.fake_thumb = tempfile.mkstemp(suffix=".png", dir=self.tempdir)
 | 
					        _, self.fake_thumb = tempfile.mkstemp(suffix=".png", dir=self.tempdir)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def parse(self, document_path, mime_type, file_name=None):
 | 
					    def parse(self, document_path, mime_type, file_name=None):
 | 
				
			||||||
@@ -210,7 +210,7 @@ class FaultyParser(DocumentParser):
 | 
				
			|||||||
        raise NotImplementedError()
 | 
					        raise NotImplementedError()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __init__(self, logging_group, scratch_dir):
 | 
					    def __init__(self, logging_group, scratch_dir):
 | 
				
			||||||
        super(FaultyParser, self).__init__(logging_group)
 | 
					        super().__init__(logging_group)
 | 
				
			||||||
        _, self.fake_thumb = tempfile.mkstemp(suffix=".png", dir=scratch_dir)
 | 
					        _, self.fake_thumb = tempfile.mkstemp(suffix=".png", dir=scratch_dir)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def get_optimised_thumbnail(self, document_path, mime_type, file_name=None):
 | 
					    def get_optimised_thumbnail(self, document_path, mime_type, file_name=None):
 | 
				
			||||||
@@ -270,7 +270,7 @@ class TestConsumer(DirectoriesMixin, TestCase):
 | 
				
			|||||||
        return FaultyParser(logging_group, self.dirs.scratch_dir)
 | 
					        return FaultyParser(logging_group, self.dirs.scratch_dir)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def setUp(self):
 | 
					    def setUp(self):
 | 
				
			||||||
        super(TestConsumer, self).setUp()
 | 
					        super().setUp()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        patcher = mock.patch("documents.parsers.document_consumer_declaration.send")
 | 
					        patcher = mock.patch("documents.parsers.document_consumer_declaration.send")
 | 
				
			||||||
        m = patcher.start()
 | 
					        m = patcher.start()
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -16,7 +16,7 @@ class TestDate(TestCase):
 | 
				
			|||||||
        os.path.dirname(__file__),
 | 
					        os.path.dirname(__file__),
 | 
				
			||||||
        "../../paperless_tesseract/tests/samples",
 | 
					        "../../paperless_tesseract/tests/samples",
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
    SCRATCH = "/tmp/paperless-tests-{}".format(str(uuid4())[:8])
 | 
					    SCRATCH = f"/tmp/paperless-tests-{str(uuid4())[:8]}"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def setUp(self):
 | 
					    def setUp(self):
 | 
				
			||||||
        os.makedirs(self.SCRATCH, exist_ok=True)
 | 
					        os.makedirs(self.SCRATCH, exist_ok=True)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -32,12 +32,12 @@ class TestFileHandling(DirectoriesMixin, TestCase):
 | 
				
			|||||||
        document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
 | 
					        document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
 | 
				
			||||||
        document.save()
 | 
					        document.save()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.assertEqual(generate_filename(document), "{:07d}.pdf".format(document.pk))
 | 
					        self.assertEqual(generate_filename(document), f"{document.pk:07d}.pdf")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        document.storage_type = Document.STORAGE_TYPE_GPG
 | 
					        document.storage_type = Document.STORAGE_TYPE_GPG
 | 
				
			||||||
        self.assertEqual(
 | 
					        self.assertEqual(
 | 
				
			||||||
            generate_filename(document),
 | 
					            generate_filename(document),
 | 
				
			||||||
            "{:07d}.pdf.gpg".format(document.pk),
 | 
					            f"{document.pk:07d}.pdf.gpg",
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @override_settings(PAPERLESS_FILENAME_FORMAT="{correspondent}/{correspondent}")
 | 
					    @override_settings(PAPERLESS_FILENAME_FORMAT="{correspondent}/{correspondent}")
 | 
				
			||||||
@@ -50,7 +50,7 @@ class TestFileHandling(DirectoriesMixin, TestCase):
 | 
				
			|||||||
        # Test default source_path
 | 
					        # Test default source_path
 | 
				
			||||||
        self.assertEqual(
 | 
					        self.assertEqual(
 | 
				
			||||||
            document.source_path,
 | 
					            document.source_path,
 | 
				
			||||||
            settings.ORIGINALS_DIR + "/{:07d}.pdf".format(document.pk),
 | 
					            settings.ORIGINALS_DIR + f"/{document.pk:07d}.pdf",
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        document.filename = generate_filename(document)
 | 
					        document.filename = generate_filename(document)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -39,7 +39,7 @@ class ConsumerMixin:
 | 
				
			|||||||
    sample_file = os.path.join(os.path.dirname(__file__), "samples", "simple.pdf")
 | 
					    sample_file = os.path.join(os.path.dirname(__file__), "samples", "simple.pdf")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def setUp(self) -> None:
 | 
					    def setUp(self) -> None:
 | 
				
			||||||
        super(ConsumerMixin, self).setUp()
 | 
					        super().setUp()
 | 
				
			||||||
        self.t = None
 | 
					        self.t = None
 | 
				
			||||||
        patcher = mock.patch(
 | 
					        patcher = mock.patch(
 | 
				
			||||||
            "documents.management.commands.document_consumer.async_task",
 | 
					            "documents.management.commands.document_consumer.async_task",
 | 
				
			||||||
@@ -60,7 +60,7 @@ class ConsumerMixin:
 | 
				
			|||||||
            # wait for the consumer to exit.
 | 
					            # wait for the consumer to exit.
 | 
				
			||||||
            self.t.join()
 | 
					            self.t.join()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        super(ConsumerMixin, self).tearDown()
 | 
					        super().tearDown()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def wait_for_task_mock_call(self, excpeted_call_count=1):
 | 
					    def wait_for_task_mock_call(self, excpeted_call_count=1):
 | 
				
			||||||
        n = 0
 | 
					        n = 0
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -65,7 +65,7 @@ class TestExportImport(DirectoriesMixin, TestCase):
 | 
				
			|||||||
        self.d1.correspondent = self.c1
 | 
					        self.d1.correspondent = self.c1
 | 
				
			||||||
        self.d1.document_type = self.dt1
 | 
					        self.d1.document_type = self.dt1
 | 
				
			||||||
        self.d1.save()
 | 
					        self.d1.save()
 | 
				
			||||||
        super(TestExportImport, self).setUp()
 | 
					        super().setUp()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def _get_document_from_manifest(self, manifest, id):
 | 
					    def _get_document_from_manifest(self, manifest, id):
 | 
				
			||||||
        f = list(
 | 
					        f = list(
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -82,7 +82,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
 | 
				
			|||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def setUp(self) -> None:
 | 
					    def setUp(self) -> None:
 | 
				
			||||||
        super(TestRetagger, self).setUp()
 | 
					        super().setUp()
 | 
				
			||||||
        self.make_models()
 | 
					        self.make_models()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def test_add_tags(self):
 | 
					    def test_add_tags(self):
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -39,7 +39,7 @@ class TestMakeThumbnails(DirectoriesMixin, TestCase):
 | 
				
			|||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def setUp(self) -> None:
 | 
					    def setUp(self) -> None:
 | 
				
			||||||
        super(TestMakeThumbnails, self).setUp()
 | 
					        super().setUp()
 | 
				
			||||||
        self.make_models()
 | 
					        self.make_models()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def test_process_document(self):
 | 
					    def test_process_document(self):
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -36,13 +36,13 @@ class _TestMatchingBase(TestCase):
 | 
				
			|||||||
                doc = Document(content=string)
 | 
					                doc = Document(content=string)
 | 
				
			||||||
                self.assertTrue(
 | 
					                self.assertTrue(
 | 
				
			||||||
                    matching.matches(instance, doc),
 | 
					                    matching.matches(instance, doc),
 | 
				
			||||||
                    '"%s" should match "%s" but it does not' % (match_text, string),
 | 
					                    f'"{match_text}" should match "{string}" but it does not',
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
            for string in no_match:
 | 
					            for string in no_match:
 | 
				
			||||||
                doc = Document(content=string)
 | 
					                doc = Document(content=string)
 | 
				
			||||||
                self.assertFalse(
 | 
					                self.assertFalse(
 | 
				
			||||||
                    matching.matches(instance, doc),
 | 
					                    matching.matches(instance, doc),
 | 
				
			||||||
                    '"%s" should not match "%s" but it does' % (match_text, string),
 | 
					                    f'"{match_text}" should not match "{string}" but it does',
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -22,7 +22,7 @@ def archive_path_old(self):
 | 
				
			|||||||
    if self.filename:
 | 
					    if self.filename:
 | 
				
			||||||
        fname = archive_name_from_filename(self.filename)
 | 
					        fname = archive_name_from_filename(self.filename)
 | 
				
			||||||
    else:
 | 
					    else:
 | 
				
			||||||
        fname = "{:07}.pdf".format(self.pk)
 | 
					        fname = f"{self.pk:07}.pdf"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    return os.path.join(settings.ARCHIVE_DIR, fname)
 | 
					    return os.path.join(settings.ARCHIVE_DIR, fname)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -38,7 +38,7 @@ def source_path(doc):
 | 
				
			|||||||
    if doc.filename:
 | 
					    if doc.filename:
 | 
				
			||||||
        fname = str(doc.filename)
 | 
					        fname = str(doc.filename)
 | 
				
			||||||
    else:
 | 
					    else:
 | 
				
			||||||
        fname = "{:07}{}".format(doc.pk, doc.file_type)
 | 
					        fname = f"{doc.pk:07}{doc.file_type}"
 | 
				
			||||||
        if doc.storage_type == STORAGE_TYPE_GPG:
 | 
					        if doc.storage_type == STORAGE_TYPE_GPG:
 | 
				
			||||||
            fname += ".gpg"  # pragma: no cover
 | 
					            fname += ".gpg"  # pragma: no cover
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -46,7 +46,7 @@ def source_path(doc):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def thumbnail_path(doc):
 | 
					def thumbnail_path(doc):
 | 
				
			||||||
    file_name = "{:07}.png".format(doc.pk)
 | 
					    file_name = f"{doc.pk:07}.png"
 | 
				
			||||||
    if doc.storage_type == STORAGE_TYPE_GPG:
 | 
					    if doc.storage_type == STORAGE_TYPE_GPG:
 | 
				
			||||||
        file_name += ".gpg"
 | 
					        file_name += ".gpg"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -15,7 +15,7 @@ def source_path_before(self):
 | 
				
			|||||||
    if self.filename:
 | 
					    if self.filename:
 | 
				
			||||||
        fname = str(self.filename)
 | 
					        fname = str(self.filename)
 | 
				
			||||||
    else:
 | 
					    else:
 | 
				
			||||||
        fname = "{:07}.{}".format(self.pk, self.file_type)
 | 
					        fname = f"{self.pk:07}.{self.file_type}"
 | 
				
			||||||
        if self.storage_type == STORAGE_TYPE_GPG:
 | 
					        if self.storage_type == STORAGE_TYPE_GPG:
 | 
				
			||||||
            fname += ".gpg"
 | 
					            fname += ".gpg"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -30,7 +30,7 @@ def source_path_after(doc):
 | 
				
			|||||||
    if doc.filename:
 | 
					    if doc.filename:
 | 
				
			||||||
        fname = str(doc.filename)
 | 
					        fname = str(doc.filename)
 | 
				
			||||||
    else:
 | 
					    else:
 | 
				
			||||||
        fname = "{:07}{}".format(doc.pk, file_type_after(doc))
 | 
					        fname = f"{doc.pk:07}{file_type_after(doc)}"
 | 
				
			||||||
        if doc.storage_type == STORAGE_TYPE_GPG:
 | 
					        if doc.storage_type == STORAGE_TYPE_GPG:
 | 
				
			||||||
            fname += ".gpg"  # pragma: no cover
 | 
					            fname += ".gpg"  # pragma: no cover
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -31,7 +31,7 @@ def fake_magic_from_file(file, mime=False):
 | 
				
			|||||||
class TestParserDiscovery(TestCase):
 | 
					class TestParserDiscovery(TestCase):
 | 
				
			||||||
    @mock.patch("documents.parsers.document_consumer_declaration.send")
 | 
					    @mock.patch("documents.parsers.document_consumer_declaration.send")
 | 
				
			||||||
    def test__get_parser_class_1_parser(self, m, *args):
 | 
					    def test__get_parser_class_1_parser(self, m, *args):
 | 
				
			||||||
        class DummyParser(object):
 | 
					        class DummyParser:
 | 
				
			||||||
            pass
 | 
					            pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        m.return_value = (
 | 
					        m.return_value = (
 | 
				
			||||||
@@ -49,10 +49,10 @@ class TestParserDiscovery(TestCase):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    @mock.patch("documents.parsers.document_consumer_declaration.send")
 | 
					    @mock.patch("documents.parsers.document_consumer_declaration.send")
 | 
				
			||||||
    def test__get_parser_class_n_parsers(self, m, *args):
 | 
					    def test__get_parser_class_n_parsers(self, m, *args):
 | 
				
			||||||
        class DummyParser1(object):
 | 
					        class DummyParser1:
 | 
				
			||||||
            pass
 | 
					            pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        class DummyParser2(object):
 | 
					        class DummyParser2:
 | 
				
			||||||
            pass
 | 
					            pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        m.return_value = (
 | 
					        m.return_value = (
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -76,10 +76,10 @@ class DirectoriesMixin:
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    def setUp(self) -> None:
 | 
					    def setUp(self) -> None:
 | 
				
			||||||
        self.dirs = setup_directories()
 | 
					        self.dirs = setup_directories()
 | 
				
			||||||
        super(DirectoriesMixin, self).setUp()
 | 
					        super().setUp()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def tearDown(self) -> None:
 | 
					    def tearDown(self) -> None:
 | 
				
			||||||
        super(DirectoriesMixin, self).tearDown()
 | 
					        super().tearDown()
 | 
				
			||||||
        remove_dirs(self.dirs)
 | 
					        remove_dirs(self.dirs)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -93,7 +93,7 @@ class TestMigrations(TransactionTestCase):
 | 
				
			|||||||
    auto_migrate = True
 | 
					    auto_migrate = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def setUp(self):
 | 
					    def setUp(self):
 | 
				
			||||||
        super(TestMigrations, self).setUp()
 | 
					        super().setUp()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        assert (
 | 
					        assert (
 | 
				
			||||||
            self.migrate_from and self.migrate_to
 | 
					            self.migrate_from and self.migrate_to
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -210,7 +210,7 @@ class DocumentViewSet(
 | 
				
			|||||||
        return serializer_class(*args, **kwargs)
 | 
					        return serializer_class(*args, **kwargs)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def update(self, request, *args, **kwargs):
 | 
					    def update(self, request, *args, **kwargs):
 | 
				
			||||||
        response = super(DocumentViewSet, self).update(request, *args, **kwargs)
 | 
					        response = super().update(request, *args, **kwargs)
 | 
				
			||||||
        from documents import index
 | 
					        from documents import index
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        index.add_or_update_document(self.get_object())
 | 
					        index.add_or_update_document(self.get_object())
 | 
				
			||||||
@@ -220,7 +220,7 @@ class DocumentViewSet(
 | 
				
			|||||||
        from documents import index
 | 
					        from documents import index
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        index.remove_document_from_index(self.get_object())
 | 
					        index.remove_document_from_index(self.get_object())
 | 
				
			||||||
        return super(DocumentViewSet, self).destroy(request, *args, **kwargs)
 | 
					        return super().destroy(request, *args, **kwargs)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @staticmethod
 | 
					    @staticmethod
 | 
				
			||||||
    def original_requested(request):
 | 
					    def original_requested(request):
 | 
				
			||||||
@@ -362,7 +362,7 @@ class DocumentViewSet(
 | 
				
			|||||||
class SearchResultSerializer(DocumentSerializer):
 | 
					class SearchResultSerializer(DocumentSerializer):
 | 
				
			||||||
    def to_representation(self, instance):
 | 
					    def to_representation(self, instance):
 | 
				
			||||||
        doc = Document.objects.get(id=instance["id"])
 | 
					        doc = Document.objects.get(id=instance["id"])
 | 
				
			||||||
        r = super(SearchResultSerializer, self).to_representation(doc)
 | 
					        r = super().to_representation(doc)
 | 
				
			||||||
        r["__search_hit__"] = {
 | 
					        r["__search_hit__"] = {
 | 
				
			||||||
            "score": instance.score,
 | 
					            "score": instance.score,
 | 
				
			||||||
            "highlights": instance.highlights("content", text=doc.content)
 | 
					            "highlights": instance.highlights("content", text=doc.content)
 | 
				
			||||||
@@ -376,7 +376,7 @@ class SearchResultSerializer(DocumentSerializer):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
class UnifiedSearchViewSet(DocumentViewSet):
 | 
					class UnifiedSearchViewSet(DocumentViewSet):
 | 
				
			||||||
    def __init__(self, *args, **kwargs):
 | 
					    def __init__(self, *args, **kwargs):
 | 
				
			||||||
        super(UnifiedSearchViewSet, self).__init__(*args, **kwargs)
 | 
					        super().__init__(*args, **kwargs)
 | 
				
			||||||
        self.searcher = None
 | 
					        self.searcher = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def get_serializer_class(self):
 | 
					    def get_serializer_class(self):
 | 
				
			||||||
@@ -408,7 +408,7 @@ class UnifiedSearchViewSet(DocumentViewSet):
 | 
				
			|||||||
                self.paginator.get_page_size(self.request),
 | 
					                self.paginator.get_page_size(self.request),
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            return super(UnifiedSearchViewSet, self).filter_queryset(queryset)
 | 
					            return super().filter_queryset(queryset)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def list(self, request, *args, **kwargs):
 | 
					    def list(self, request, *args, **kwargs):
 | 
				
			||||||
        if self._is_search_request():
 | 
					        if self._is_search_request():
 | 
				
			||||||
@@ -417,13 +417,13 @@ class UnifiedSearchViewSet(DocumentViewSet):
 | 
				
			|||||||
            try:
 | 
					            try:
 | 
				
			||||||
                with index.open_index_searcher() as s:
 | 
					                with index.open_index_searcher() as s:
 | 
				
			||||||
                    self.searcher = s
 | 
					                    self.searcher = s
 | 
				
			||||||
                    return super(UnifiedSearchViewSet, self).list(request)
 | 
					                    return super().list(request)
 | 
				
			||||||
            except NotFound:
 | 
					            except NotFound:
 | 
				
			||||||
                raise
 | 
					                raise
 | 
				
			||||||
            except Exception as e:
 | 
					            except Exception as e:
 | 
				
			||||||
                return HttpResponseBadRequest(str(e))
 | 
					                return HttpResponseBadRequest(str(e))
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            return super(UnifiedSearchViewSet, self).list(request)
 | 
					            return super().list(request)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class LogViewSet(ViewSet):
 | 
					class LogViewSet(ViewSet):
 | 
				
			||||||
@@ -441,7 +441,7 @@ class LogViewSet(ViewSet):
 | 
				
			|||||||
        if not os.path.isfile(filename):
 | 
					        if not os.path.isfile(filename):
 | 
				
			||||||
            raise Http404()
 | 
					            raise Http404()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        with open(filename, "r") as f:
 | 
					        with open(filename) as f:
 | 
				
			||||||
            lines = [line.rstrip() for line in f.readlines()]
 | 
					            lines = [line.rstrip() for line in f.readlines()]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return Response(lines)
 | 
					        return Response(lines)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -27,7 +27,7 @@ class AngularApiAuthenticationOverride(authentication.BaseAuthentication):
 | 
				
			|||||||
            and request.headers["Referer"].startswith("http://localhost:4200/")
 | 
					            and request.headers["Referer"].startswith("http://localhost:4200/")
 | 
				
			||||||
        ):
 | 
					        ):
 | 
				
			||||||
            user = User.objects.filter(is_staff=True).first()
 | 
					            user = User.objects.filter(is_staff=True).first()
 | 
				
			||||||
            print("Auto-Login with user {}".format(user))
 | 
					            print(f"Auto-Login with user {user}")
 | 
				
			||||||
            return (user, None)
 | 
					            return (user, None)
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            return None
 | 
					            return None
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -28,7 +28,7 @@ from paperless_mail.models import MailRule
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@dataclasses.dataclass
 | 
					@dataclasses.dataclass
 | 
				
			||||||
class _AttachmentDef(object):
 | 
					class _AttachmentDef:
 | 
				
			||||||
    filename: str = "a_file.pdf"
 | 
					    filename: str = "a_file.pdf"
 | 
				
			||||||
    maintype: str = "application/pdf"
 | 
					    maintype: str = "application/pdf"
 | 
				
			||||||
    subtype: str = "pdf"
 | 
					    subtype: str = "pdf"
 | 
				
			||||||
@@ -45,7 +45,7 @@ class BogusFolderManager:
 | 
				
			|||||||
        self.current_folder = new_folder
 | 
					        self.current_folder = new_folder
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class BogusClient(object):
 | 
					class BogusClient:
 | 
				
			||||||
    def authenticate(self, mechanism, authobject):
 | 
					    def authenticate(self, mechanism, authobject):
 | 
				
			||||||
        # authobject must be a callable object
 | 
					        # authobject must be a callable object
 | 
				
			||||||
        auth_bytes = authobject(None)
 | 
					        auth_bytes = authobject(None)
 | 
				
			||||||
@@ -205,7 +205,7 @@ class TestMail(DirectoriesMixin, TestCase):
 | 
				
			|||||||
        self.reset_bogus_mailbox()
 | 
					        self.reset_bogus_mailbox()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.mail_account_handler = MailAccountHandler()
 | 
					        self.mail_account_handler = MailAccountHandler()
 | 
				
			||||||
        super(TestMail, self).setUp()
 | 
					        super().setUp()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def reset_bogus_mailbox(self):
 | 
					    def reset_bogus_mailbox(self):
 | 
				
			||||||
        self.bogus_mailbox.messages = []
 | 
					        self.bogus_mailbox.messages = []
 | 
				
			||||||
@@ -473,7 +473,7 @@ class TestMail(DirectoriesMixin, TestCase):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
            self.assertEqual(result, len(matches), f"Error with pattern: {pattern}")
 | 
					            self.assertEqual(result, len(matches), f"Error with pattern: {pattern}")
 | 
				
			||||||
            filenames = sorted(
 | 
					            filenames = sorted(
 | 
				
			||||||
                [a[1]["override_filename"] for a in self.async_task.call_args_list],
 | 
					                a[1]["override_filename"] for a in self.async_task.call_args_list
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
            self.assertListEqual(filenames, matches)
 | 
					            self.assertListEqual(filenames, matches)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -98,7 +98,7 @@ class RasterisedDocumentParser(DocumentParser):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    def extract_text(self, sidecar_file, pdf_file):
 | 
					    def extract_text(self, sidecar_file, pdf_file):
 | 
				
			||||||
        if sidecar_file and os.path.isfile(sidecar_file):
 | 
					        if sidecar_file and os.path.isfile(sidecar_file):
 | 
				
			||||||
            with open(sidecar_file, "r") as f:
 | 
					            with open(sidecar_file) as f:
 | 
				
			||||||
                text = f.read()
 | 
					                text = f.read()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if "[OCR skipped on page" not in text:
 | 
					            if "[OCR skipped on page" not in text:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -18,7 +18,7 @@ class TextDocumentParser(DocumentParser):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    def get_thumbnail(self, document_path, mime_type, file_name=None):
 | 
					    def get_thumbnail(self, document_path, mime_type, file_name=None):
 | 
				
			||||||
        def read_text():
 | 
					        def read_text():
 | 
				
			||||||
            with open(document_path, "r") as src:
 | 
					            with open(document_path) as src:
 | 
				
			||||||
                lines = [line.strip() for line in src.readlines()]
 | 
					                lines = [line.strip() for line in src.readlines()]
 | 
				
			||||||
                text = "\n".join(lines[:50])
 | 
					                text = "\n".join(lines[:50])
 | 
				
			||||||
                return text
 | 
					                return text
 | 
				
			||||||
@@ -38,5 +38,5 @@ class TextDocumentParser(DocumentParser):
 | 
				
			|||||||
        return out_path
 | 
					        return out_path
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def parse(self, document_path, mime_type, file_name=None):
 | 
					    def parse(self, document_path, mime_type, file_name=None):
 | 
				
			||||||
        with open(document_path, "r") as f:
 | 
					        with open(document_path) as f:
 | 
				
			||||||
            self.text = f.read()
 | 
					            self.text = f.read()
 | 
				
			||||||
 
 | 
				
			|||||||
		Reference in New Issue
	
	Block a user