diff --git a/src/documents/apps.py b/src/documents/apps.py index 32e49b160..d8200edac 100644 --- a/src/documents/apps.py +++ b/src/documents/apps.py @@ -7,7 +7,7 @@ class DocumentsConfig(AppConfig): verbose_name = _("Documents") - def ready(self): + def ready(self) -> None: from documents.signals import document_consumption_finished from documents.signals import document_updated from documents.signals.handlers import add_inbox_tags diff --git a/src/documents/bulk_edit.py b/src/documents/bulk_edit.py index 43cb13261..ec6217a0a 100644 --- a/src/documents/bulk_edit.py +++ b/src/documents/bulk_edit.py @@ -721,7 +721,7 @@ def reflect_doclinks( document: Document, field: CustomField, target_doc_ids: list[int], -): +) -> None: """ Add or remove 'symmetrical' links to `document` on all `target_doc_ids` """ @@ -784,7 +784,7 @@ def remove_doclink( document: Document, field: CustomField, target_doc_id: int, -): +) -> None: """ Removes a 'symmetrical' link to `document` from the target document's existing custom field instance """ diff --git a/src/documents/classifier.py b/src/documents/classifier.py index 613c1d5ad..1e9da7ce6 100644 --- a/src/documents/classifier.py +++ b/src/documents/classifier.py @@ -122,7 +122,7 @@ class DocumentClassifier: ) self._stop_words = None - def _update_data_vectorizer_hash(self): + def _update_data_vectorizer_hash(self) -> None: self.data_vectorizer_hash = sha256( pickle.dumps(self.data_vectorizer), ).hexdigest() diff --git a/src/documents/consumer.py b/src/documents/consumer.py index 1ff60220b..76aa293d0 100644 --- a/src/documents/consumer.py +++ b/src/documents/consumer.py @@ -120,7 +120,7 @@ class ConsumerPluginMixin: status: ProgressStatusOptions, message: ConsumerStatusShortMessage | str | None = None, document_id=None, - ): # pragma: no cover + ) -> None: # pragma: no cover self.status_mgr.send_progress( status, message, @@ -158,7 +158,7 @@ class ConsumerPlugin( ): logging_name = "paperless.consumer" - def run_pre_consume_script(self): + def run_pre_consume_script(self) -> None: """ If one is configured and exists, run the pre-consume script and handle its output and/or errors @@ -201,7 +201,7 @@ class ConsumerPlugin( exception=e, ) - def run_post_consume_script(self, document: Document): + def run_post_consume_script(self, document: Document) -> None: """ If one is configured and exists, run the pre-consume script and handle its output and/or errors @@ -361,7 +361,10 @@ class ConsumerPlugin( tempdir.cleanup() raise - def progress_callback(current_progress, max_progress): # pragma: no cover + def progress_callback( + current_progress, + max_progress, + ) -> None: # pragma: no cover # recalculate progress to be within 20 and 80 p = int((current_progress / max_progress) * 50 + 20) self._send_progress(p, 100, ProgressStatusOptions.WORKING) @@ -670,7 +673,7 @@ class ConsumerPlugin( return document - def apply_overrides(self, document): + def apply_overrides(self, document) -> None: if self.metadata.correspondent_id: document.correspondent = Correspondent.objects.get( pk=self.metadata.correspondent_id, @@ -730,7 +733,7 @@ class ConsumerPlugin( } CustomFieldInstance.objects.create(**args) # adds to document - def _write(self, source, target): + def _write(self, source, target) -> None: with ( Path(source).open("rb") as read_file, Path(target).open("wb") as write_file, @@ -755,7 +758,7 @@ class ConsumerPreflightPlugin( NAME: str = "ConsumerPreflightPlugin" logging_name = "paperless.consumer" - def pre_check_file_exists(self): + def pre_check_file_exists(self) -> None: """ Confirm the input file still exists where it should """ @@ -769,7 +772,7 @@ class ConsumerPreflightPlugin( f"Cannot consume {self.input_doc.original_file}: File not found.", ) - def pre_check_duplicate(self): + def pre_check_duplicate(self) -> None: """ Using the MD5 of the file, check this exact file doesn't already exist """ @@ -819,7 +822,7 @@ class ConsumerPreflightPlugin( failure_msg, ) - def pre_check_directories(self): + def pre_check_directories(self) -> None: """ Ensure all required directories exist before attempting to use them """ @@ -828,7 +831,7 @@ class ConsumerPreflightPlugin( settings.ORIGINALS_DIR.mkdir(parents=True, exist_ok=True) settings.ARCHIVE_DIR.mkdir(parents=True, exist_ok=True) - def pre_check_asn_value(self): + def pre_check_asn_value(self) -> None: """ Check that if override_asn is given, it is unique and within a valid range """ diff --git a/src/documents/data_models.py b/src/documents/data_models.py index a4b1150dd..7c023dc13 100644 --- a/src/documents/data_models.py +++ b/src/documents/data_models.py @@ -164,7 +164,7 @@ class ConsumableDocument: mailrule_id: int | None = None mime_type: str = dataclasses.field(init=False, default=None) - def __post_init__(self): + def __post_init__(self) -> None: """ After a dataclass is initialized, this is called to finalize some data 1. Make sure the original path is an absolute, fully qualified path diff --git a/src/documents/filters.py b/src/documents/filters.py index 9e53d01af..f1713882c 100644 --- a/src/documents/filters.py +++ b/src/documents/filters.py @@ -120,7 +120,7 @@ class StoragePathFilterSet(FilterSet): class ObjectFilter(Filter): - def __init__(self, *, exclude=False, in_list=False, field_name=""): + def __init__(self, *, exclude=False, in_list=False, field_name="") -> None: super().__init__() self.exclude = exclude self.in_list = in_list @@ -255,7 +255,7 @@ class MimeTypeFilter(Filter): class SelectField(serializers.CharField): - def __init__(self, custom_field: CustomField): + def __init__(self, custom_field: CustomField) -> None: self._options = custom_field.extra_data["select_options"] super().__init__(max_length=16) @@ -676,7 +676,7 @@ class CustomFieldQueryParser: @extend_schema_field(serializers.CharField) class CustomFieldQueryFilter(Filter): - def __init__(self, validation_prefix): + def __init__(self, validation_prefix) -> None: """ A filter that filters documents based on custom field name and value. diff --git a/src/documents/index.py b/src/documents/index.py index 8afc31fe9..be944b48b 100644 --- a/src/documents/index.py +++ b/src/documents/index.py @@ -414,13 +414,13 @@ class DelayedQuery: class ManualResultsPage(list): - def __init__(self, hits): + def __init__(self, hits) -> None: super().__init__(hits) self.results = ManualResults(hits) class ManualResults: - def __init__(self, hits): + def __init__(self, hits) -> None: self._docnums = [hit.docnum for hit in hits] def docs(self): diff --git a/src/documents/loggers.py b/src/documents/loggers.py index 87ee58868..f30c823f1 100644 --- a/src/documents/loggers.py +++ b/src/documents/loggers.py @@ -3,7 +3,7 @@ import uuid class LoggingMixin: - def renew_logging_group(self): + def renew_logging_group(self) -> None: """ Creates a new UUID to group subsequent log calls together with the extra data named group diff --git a/src/documents/management/commands/convert_mariadb_uuid.py b/src/documents/management/commands/convert_mariadb_uuid.py index 76ccf9e76..3533d03f3 100644 --- a/src/documents/management/commands/convert_mariadb_uuid.py +++ b/src/documents/management/commands/convert_mariadb_uuid.py @@ -9,7 +9,7 @@ class Command(BaseCommand): # This code is taken almost entirely from https://github.com/wagtail/wagtail/pull/11912 with all credit to the original author. help = "Converts UUID columns from char type to the native UUID type used in MariaDB 10.7+ and Django 5.0+." - def convert_field(self, model, field_name, *, null=False): + def convert_field(self, model, field_name, *, null=False) -> None: if model._meta.get_field(field_name).model != model: # pragma: no cover # Field is inherited from a parent model return diff --git a/src/documents/management/commands/document_exporter.py b/src/documents/management/commands/document_exporter.py index 77b3b6416..bd962efc4 100644 --- a/src/documents/management/commands/document_exporter.py +++ b/src/documents/management/commands/document_exporter.py @@ -67,7 +67,7 @@ class Command(CryptMixin, BaseCommand): "easy import." ) - def add_arguments(self, parser): + def add_arguments(self, parser) -> None: parser.add_argument("target") parser.add_argument( @@ -186,7 +186,7 @@ class Command(CryptMixin, BaseCommand): help="If provided, is used to encrypt sensitive data in the export", ) - def handle(self, *args, **options): + def handle(self, *args, **options) -> None: self.target = Path(options["target"]).resolve() self.split_manifest: bool = options["split_manifest"] self.compare_checksums: bool = options["compare_checksums"] @@ -244,7 +244,7 @@ class Command(CryptMixin, BaseCommand): if self.zip_export and temp_dir is not None: temp_dir.cleanup() - def dump(self): + def dump(self) -> None: # 1. Take a snapshot of what files exist in the current export folder for x in self.target.glob("**/*"): if x.is_file(): @@ -498,7 +498,7 @@ class Command(CryptMixin, BaseCommand): self, content: list[dict] | dict, target: Path, - ): + ) -> None: """ Writes the source content to the target json file. If --compare-json arg was used, don't write to target file if @@ -528,7 +528,7 @@ class Command(CryptMixin, BaseCommand): source: Path, source_checksum: str | None, target: Path, - ): + ) -> None: """ Copies the source to the target, if target doesn't exist or the target doesn't seem to match the source attributes diff --git a/src/documents/management/commands/document_importer.py b/src/documents/management/commands/document_importer.py index ba3d793b3..5cd743590 100644 --- a/src/documents/management/commands/document_importer.py +++ b/src/documents/management/commands/document_importer.py @@ -246,7 +246,7 @@ class Command(CryptMixin, BaseCommand): self.source = Path(tmp_dir) self._run_import() - def _run_import(self): + def _run_import(self) -> None: self.pre_check() self.load_metadata() self.load_manifest_files() diff --git a/src/documents/management/commands/document_thumbnails.py b/src/documents/management/commands/document_thumbnails.py index d4653f0b3..e50c837d3 100644 --- a/src/documents/management/commands/document_thumbnails.py +++ b/src/documents/management/commands/document_thumbnails.py @@ -12,7 +12,7 @@ from documents.models import Document from documents.parsers import get_parser_class_for_mime_type -def _process_document(doc_id): +def _process_document(doc_id) -> None: document: Document = Document.objects.get(id=doc_id) parser_class = get_parser_class_for_mime_type(document.mime_type) @@ -37,7 +37,7 @@ def _process_document(doc_id): class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand): help = "This will regenerate the thumbnails for all documents." - def add_arguments(self, parser): + def add_arguments(self, parser) -> None: parser.add_argument( "-d", "--document", diff --git a/src/documents/management/commands/manage_superuser.py b/src/documents/management/commands/manage_superuser.py index e0d238438..3a81a47c5 100644 --- a/src/documents/management/commands/manage_superuser.py +++ b/src/documents/management/commands/manage_superuser.py @@ -25,7 +25,7 @@ class Command(BaseCommand): parser.formatter_class = RawTextHelpFormatter return parser - def handle(self, *args, **options): + def handle(self, *args, **options) -> None: username = os.getenv("PAPERLESS_ADMIN_USER", "admin") mail = os.getenv("PAPERLESS_ADMIN_MAIL", "root@localhost") password = os.getenv("PAPERLESS_ADMIN_PASSWORD") diff --git a/src/documents/management/commands/mixins.py b/src/documents/management/commands/mixins.py index a2ad326e4..109f3aea7 100644 --- a/src/documents/management/commands/mixins.py +++ b/src/documents/management/commands/mixins.py @@ -27,7 +27,7 @@ class MultiProcessMixin: for the use of multiple processes """ - def add_argument_processes_mixin(self, parser: ArgumentParser): + def add_argument_processes_mixin(self, parser: ArgumentParser) -> None: parser.add_argument( "--processes", default=max(1, os.cpu_count() // 4), @@ -35,7 +35,7 @@ class MultiProcessMixin: help="Number of processes to distribute work amongst", ) - def handle_processes_mixin(self, *args, **options): + def handle_processes_mixin(self, *args, **options) -> None: self.process_count = options["processes"] if self.process_count < 1: raise CommandError("There must be at least 1 process") @@ -47,7 +47,7 @@ class ProgressBarMixin: via this class """ - def add_argument_progress_bar_mixin(self, parser: ArgumentParser): + def add_argument_progress_bar_mixin(self, parser: ArgumentParser) -> None: parser.add_argument( "--no-progress-bar", default=False, @@ -55,7 +55,7 @@ class ProgressBarMixin: help="If set, the progress bar will not be shown", ) - def handle_progress_bar_mixin(self, *args, **options): + def handle_progress_bar_mixin(self, *args, **options) -> None: self.no_progress_bar = options["no_progress_bar"] self.use_progress_bar = not self.no_progress_bar @@ -120,7 +120,7 @@ class CryptMixin: }, } - def load_crypt_params(self, metadata: dict): + def load_crypt_params(self, metadata: dict) -> None: # Load up the values for setting up decryption self.kdf_algorithm: str = metadata[EXPORTER_CRYPTO_SETTINGS_NAME][ EXPORTER_CRYPTO_ALGO_NAME @@ -135,7 +135,7 @@ class CryptMixin: EXPORTER_CRYPTO_SALT_NAME ] - def setup_crypto(self, *, passphrase: str, salt: str | None = None): + def setup_crypto(self, *, passphrase: str, salt: str | None = None) -> None: """ Constructs a class for encryption or decryption using the specified passphrase and salt diff --git a/src/documents/matching.py b/src/documents/matching.py index 9276ad583..a8cc79811 100644 --- a/src/documents/matching.py +++ b/src/documents/matching.py @@ -34,7 +34,7 @@ def log_reason( matching_model: MatchingModel | WorkflowTrigger, document: Document, reason: str, -): +) -> None: class_name = type(matching_model).__name__ name = ( matching_model.name if hasattr(matching_model, "name") else str(matching_model) diff --git a/src/documents/models.py b/src/documents/models.py index 72470ef6e..2e187e98c 100644 --- a/src/documents/models.py +++ b/src/documents/models.py @@ -118,7 +118,7 @@ class Tag(MatchingModel, TreeNodeModel): verbose_name = _("tag") verbose_name_plural = _("tags") - def clean(self): + def clean(self) -> None: # Prevent self-parenting and assigning a descendant as parent parent = self.get_parent() if parent == self: @@ -410,7 +410,7 @@ class Document(SoftDeleteModel, ModelWithOwner): def created_date(self): return self.created - def add_nested_tags(self, tags): + def add_nested_tags(self, tags) -> None: tag_ids = set() for tag in tags: tag_ids.add(tag.id) @@ -862,7 +862,7 @@ class ShareLinkBundle(models.Model): return None return (settings.SHARE_LINK_BUNDLE_DIR / Path(self.file_path)).resolve() - def remove_file(self): + def remove_file(self) -> None: if self.absolute_file_path is not None and self.absolute_file_path.exists(): try: self.absolute_file_path.unlink() diff --git a/src/documents/parsers.py b/src/documents/parsers.py index f6417e285..211fb61fe 100644 --- a/src/documents/parsers.py +++ b/src/documents/parsers.py @@ -340,7 +340,7 @@ class DocumentParser(LoggingMixin): logging_name = "paperless.parsing" - def __init__(self, logging_group, progress_callback=None): + def __init__(self, logging_group, progress_callback=None) -> None: super().__init__() self.renew_logging_group() self.logging_group = logging_group @@ -355,7 +355,7 @@ class DocumentParser(LoggingMixin): self.date: datetime.datetime | None = None self.progress_callback = progress_callback - def progress(self, current_progress, max_progress): + def progress(self, current_progress, max_progress) -> None: if self.progress_callback: self.progress_callback(current_progress, max_progress) @@ -380,7 +380,7 @@ class DocumentParser(LoggingMixin): def extract_metadata(self, document_path, mime_type): return [] - def get_page_count(self, document_path, mime_type): + def get_page_count(self, document_path, mime_type) -> None: return None def parse(self, document_path, mime_type, file_name=None): @@ -401,6 +401,6 @@ class DocumentParser(LoggingMixin): def get_date(self) -> datetime.datetime | None: return self.date - def cleanup(self): + def cleanup(self) -> None: self.log.debug(f"Deleting directory {self.tempdir}") shutil.rmtree(self.tempdir) diff --git a/src/documents/permissions.py b/src/documents/permissions.py index 9d5c9eb68..a47762c46 100644 --- a/src/documents/permissions.py +++ b/src/documents/permissions.py @@ -61,7 +61,12 @@ def get_groups_with_only_permission(obj, codename): return Group.objects.filter(id__in=group_object_perm_group_ids).distinct() -def set_permissions_for_object(permissions: dict, object, *, merge: bool = False): +def set_permissions_for_object( + permissions: dict, + object, + *, + merge: bool = False, +) -> None: """ Set permissions for an object. The permissions are given as a mapping of actions to a dict of user / group id lists, e.g. diff --git a/src/documents/sanity_checker.py b/src/documents/sanity_checker.py index 5e5510971..08763d937 100644 --- a/src/documents/sanity_checker.py +++ b/src/documents/sanity_checker.py @@ -16,23 +16,23 @@ from paperless.config import GeneralConfig class SanityCheckMessages: - def __init__(self): + def __init__(self) -> None: self._messages: dict[int, list[dict]] = defaultdict(list) self.has_error = False self.has_warning = False - def error(self, doc_pk, message): + def error(self, doc_pk, message) -> None: self._messages[doc_pk].append({"level": logging.ERROR, "message": message}) self.has_error = True - def warning(self, doc_pk, message): + def warning(self, doc_pk, message) -> None: self._messages[doc_pk].append({"level": logging.WARNING, "message": message}) self.has_warning = True - def info(self, doc_pk, message): + def info(self, doc_pk, message) -> None: self._messages[doc_pk].append({"level": logging.INFO, "message": message}) - def log_messages(self): + def log_messages(self) -> None: logger = logging.getLogger("paperless.sanity_checker") if len(self._messages) == 0: diff --git a/src/documents/serialisers.py b/src/documents/serialisers.py index f7ed197da..cfd2ad3cf 100644 --- a/src/documents/serialisers.py +++ b/src/documents/serialisers.py @@ -101,7 +101,7 @@ class DynamicFieldsModelSerializer(serializers.ModelSerializer): controls which fields should be displayed. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: # Don't pass the 'fields' arg up to the superclass fields = kwargs.pop("fields", None) @@ -205,12 +205,12 @@ class SetPermissionsMixin: del permissions_dict[action] return permissions_dict - def _set_permissions(self, permissions, object): + def _set_permissions(self, permissions, object) -> None: set_permissions_for_object(permissions, object) class SerializerWithPerms(serializers.Serializer): - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: self.user = kwargs.pop("user", None) self.full_perms = kwargs.pop("full_perms", False) self.all_fields = kwargs.pop("all_fields", False) @@ -259,7 +259,7 @@ class OwnedObjectSerializer( serializers.ModelSerializer, SetPermissionsMixin, ): - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) if not self.all_fields: @@ -409,7 +409,7 @@ class OwnedObjectSerializer( ) # other methods in mixin - def validate_unique_together(self, validated_data, instance=None): + def validate_unique_together(self, validated_data, instance=None) -> None: # workaround for https://github.com/encode/django-rest-framework/issues/9358 if "owner" in validated_data and "name" in self.Meta.fields: name = validated_data.get("name", instance.name if instance else None) @@ -720,7 +720,7 @@ class StoragePathField(serializers.PrimaryKeyRelatedField): class CustomFieldSerializer(serializers.ModelSerializer): - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: context = kwargs.get("context") self.api_version = int( context.get("request").version @@ -846,7 +846,7 @@ class ReadWriteSerializerMethodField(serializers.SerializerMethodField): Based on https://stackoverflow.com/a/62579804 """ - def __init__(self, method_name=None, *args, **kwargs): + def __init__(self, method_name=None, *args, **kwargs) -> None: self.method_name = method_name kwargs["source"] = "*" super(serializers.SerializerMethodField, self).__init__(*args, **kwargs) @@ -1261,7 +1261,7 @@ class DocumentSerializer( CustomFieldInstance.deleted_objects.filter(document=instance).delete() return instance - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: self.truncate_content = kwargs.pop("truncate_content", False) # return full permissions if we're doing a PATCH or PUT @@ -1456,7 +1456,7 @@ class DocumentListSerializer(serializers.Serializer): child=serializers.IntegerField(), ) - def _validate_document_id_list(self, documents, name="documents"): + def _validate_document_id_list(self, documents, name="documents") -> None: if not isinstance(documents, list): raise serializers.ValidationError(f"{name} must be a list") if not all(isinstance(i, int) for i in documents): @@ -1502,7 +1502,7 @@ class BulkEditSerializer( parameters = serializers.DictField(allow_empty=True, default={}, write_only=True) - def _validate_tag_id_list(self, tags, name="tags"): + def _validate_tag_id_list(self, tags, name="tags") -> None: if not isinstance(tags, list): raise serializers.ValidationError(f"{name} must be a list") if not all(isinstance(i, int) for i in tags): @@ -1517,7 +1517,7 @@ class BulkEditSerializer( self, custom_fields, name="custom_fields", - ): + ) -> None: ids = custom_fields if isinstance(custom_fields, dict): try: @@ -1576,7 +1576,7 @@ class BulkEditSerializer( # This will never happen as it is handled by the ChoiceField raise serializers.ValidationError("Unsupported method.") - def _validate_parameters_tags(self, parameters): + def _validate_parameters_tags(self, parameters) -> None: if "tag" in parameters: tag_id = parameters["tag"] try: @@ -1586,7 +1586,7 @@ class BulkEditSerializer( else: raise serializers.ValidationError("tag not specified") - def _validate_parameters_document_type(self, parameters): + def _validate_parameters_document_type(self, parameters) -> None: if "document_type" in parameters: document_type_id = parameters["document_type"] if document_type_id is None: @@ -1599,7 +1599,7 @@ class BulkEditSerializer( else: raise serializers.ValidationError("document_type not specified") - def _validate_parameters_correspondent(self, parameters): + def _validate_parameters_correspondent(self, parameters) -> None: if "correspondent" in parameters: correspondent_id = parameters["correspondent"] if correspondent_id is None: @@ -1611,7 +1611,7 @@ class BulkEditSerializer( else: raise serializers.ValidationError("correspondent not specified") - def _validate_storage_path(self, parameters): + def _validate_storage_path(self, parameters) -> None: if "storage_path" in parameters: storage_path_id = parameters["storage_path"] if storage_path_id is None: @@ -1625,7 +1625,7 @@ class BulkEditSerializer( else: raise serializers.ValidationError("storage path not specified") - def _validate_parameters_modify_tags(self, parameters): + def _validate_parameters_modify_tags(self, parameters) -> None: if "add_tags" in parameters: self._validate_tag_id_list(parameters["add_tags"], "add_tags") else: @@ -1636,7 +1636,7 @@ class BulkEditSerializer( else: raise serializers.ValidationError("remove_tags not specified") - def _validate_parameters_modify_custom_fields(self, parameters): + def _validate_parameters_modify_custom_fields(self, parameters) -> None: if "add_custom_fields" in parameters: self._validate_custom_field_id_list_or_dict( parameters["add_custom_fields"], @@ -1659,7 +1659,7 @@ class BulkEditSerializer( raise serializers.ValidationError("Specified owner cannot be found") return ownerUser - def _validate_parameters_set_permissions(self, parameters): + def _validate_parameters_set_permissions(self, parameters) -> None: parameters["set_permissions"] = self.validate_set_permissions( parameters["set_permissions"], ) @@ -1668,7 +1668,7 @@ class BulkEditSerializer( if "merge" not in parameters: parameters["merge"] = False - def _validate_parameters_rotate(self, parameters): + def _validate_parameters_rotate(self, parameters) -> None: try: if ( "degrees" not in parameters @@ -1678,7 +1678,7 @@ class BulkEditSerializer( except ValueError: raise serializers.ValidationError("invalid rotation degrees") - def _validate_parameters_split(self, parameters): + def _validate_parameters_split(self, parameters) -> None: if "pages" not in parameters: raise serializers.ValidationError("pages not specified") try: @@ -1707,7 +1707,7 @@ class BulkEditSerializer( else: parameters["delete_originals"] = False - def _validate_parameters_delete_pages(self, parameters): + def _validate_parameters_delete_pages(self, parameters) -> None: if "pages" not in parameters: raise serializers.ValidationError("pages not specified") if not isinstance(parameters["pages"], list): @@ -1715,7 +1715,7 @@ class BulkEditSerializer( if not all(isinstance(i, int) for i in parameters["pages"]): raise serializers.ValidationError("pages must be a list of integers") - def _validate_parameters_merge(self, parameters): + def _validate_parameters_merge(self, parameters) -> None: if "delete_originals" in parameters: if not isinstance(parameters["delete_originals"], bool): raise serializers.ValidationError("delete_originals must be a boolean") @@ -1727,7 +1727,7 @@ class BulkEditSerializer( else: parameters["archive_fallback"] = False - def _validate_parameters_edit_pdf(self, parameters, document_id): + def _validate_parameters_edit_pdf(self, parameters, document_id) -> None: if "operations" not in parameters: raise serializers.ValidationError("operations not specified") if not isinstance(parameters["operations"], list): @@ -2212,7 +2212,7 @@ class AcknowledgeTasksViewSerializer(serializers.Serializer): child=serializers.IntegerField(), ) - def _validate_task_id_list(self, tasks, name="tasks"): + def _validate_task_id_list(self, tasks, name="tasks") -> None: if not isinstance(tasks, list): raise serializers.ValidationError(f"{name} must be a list") if not all(isinstance(i, int) for i in tasks): @@ -2417,7 +2417,7 @@ class BulkEditObjectsSerializer(SerializerWithPerms, SetPermissionsMixin): ) return objects - def _validate_permissions(self, permissions): + def _validate_permissions(self, permissions) -> None: self.validate_set_permissions( permissions, ) @@ -2529,7 +2529,7 @@ class WorkflowTriggerSerializer(serializers.ModelSerializer): return attrs @staticmethod - def normalize_workflow_trigger_sources(trigger): + def normalize_workflow_trigger_sources(trigger) -> None: """ Convert sources to strings to handle django-multiselectfield v1.0 changes """ @@ -2703,7 +2703,12 @@ class WorkflowSerializer(serializers.ModelSerializer): "actions", ] - def update_triggers_and_actions(self, instance: Workflow, triggers, actions): + def update_triggers_and_actions( + self, + instance: Workflow, + triggers, + actions, + ) -> None: set_triggers = [] set_actions = [] @@ -2863,7 +2868,7 @@ class WorkflowSerializer(serializers.ModelSerializer): instance.actions.set(set_actions) instance.save() - def prune_triggers_and_actions(self): + def prune_triggers_and_actions(self) -> None: """ ManyToMany fields dont support e.g. on_delete so we need to discard unattached triggers and actions manually diff --git a/src/documents/signals/handlers.py b/src/documents/signals/handlers.py index cfd2f185b..8ef5cad04 100644 --- a/src/documents/signals/handlers.py +++ b/src/documents/signals/handlers.py @@ -64,7 +64,7 @@ if TYPE_CHECKING: logger = logging.getLogger("paperless.handlers") -def add_inbox_tags(sender, document: Document, logging_group=None, **kwargs): +def add_inbox_tags(sender, document: Document, logging_group=None, **kwargs) -> None: if document.owner is not None: tags = get_objects_for_user_owner_aware( document.owner, @@ -84,7 +84,7 @@ def _suggestion_printer( document: Document, selected: MatchingModel, base_url: str | None = None, -): +) -> None: """ Smaller helper to reduce duplication when just outputting suggestions to the console """ @@ -110,7 +110,7 @@ def set_correspondent( stdout=None, style_func=None, **kwargs, -): +) -> None: if document.correspondent and not replace: return @@ -166,7 +166,7 @@ def set_document_type( stdout=None, style_func=None, **kwargs, -): +) -> None: if document.document_type and not replace: return @@ -222,7 +222,7 @@ def set_tags( stdout=None, style_func=None, **kwargs, -): +) -> None: if replace: Document.tags.through.objects.filter(document=document).exclude( Q(tag__is_inbox_tag=True), @@ -279,7 +279,7 @@ def set_storage_path( stdout=None, style_func=None, **kwargs, -): +) -> None: if document.storage_path and not replace: return @@ -327,7 +327,7 @@ def set_storage_path( # see empty_trash in documents/tasks.py for signal handling -def cleanup_document_deletion(sender, instance, **kwargs): +def cleanup_document_deletion(sender, instance, **kwargs) -> None: with FileLock(settings.MEDIA_LOCK): if settings.EMPTY_TRASH_DIR: # Find a non-conflicting filename in case a document with the same @@ -415,13 +415,13 @@ def update_filename_and_move_files( sender, instance: Document | CustomFieldInstance, **kwargs, -): +) -> None: if isinstance(instance, CustomFieldInstance): if not _filename_template_uses_custom_fields(instance.document): return instance = instance.document - def validate_move(instance, old_path: Path, new_path: Path, root: Path): + def validate_move(instance, old_path: Path, new_path: Path, root: Path) -> None: if not new_path.is_relative_to(root): msg = ( f"Document {instance!s}: Refusing to move file outside root {root}: " @@ -594,7 +594,7 @@ def update_filename_and_move_files( @shared_task -def process_cf_select_update(custom_field: CustomField): +def process_cf_select_update(custom_field: CustomField) -> None: """ Update documents tied to a select custom field: @@ -620,7 +620,11 @@ def process_cf_select_update(custom_field: CustomField): # should be disabled in /src/documents/management/commands/document_importer.py handle @receiver(models.signals.post_save, sender=CustomField) -def check_paths_and_prune_custom_fields(sender, instance: CustomField, **kwargs): +def check_paths_and_prune_custom_fields( + sender, + instance: CustomField, + **kwargs, +) -> None: """ When a custom field is updated, check if we need to update any documents. Done async to avoid slowing down the save operation. """ @@ -633,7 +637,7 @@ def check_paths_and_prune_custom_fields(sender, instance: CustomField, **kwargs) @receiver(models.signals.post_delete, sender=CustomField) -def cleanup_custom_field_deletion(sender, instance: CustomField, **kwargs): +def cleanup_custom_field_deletion(sender, instance: CustomField, **kwargs) -> None: """ When a custom field is deleted, ensure no saved views reference it. """ @@ -670,7 +674,7 @@ def update_llm_suggestions_cache(sender, instance, **kwargs): @receiver(models.signals.post_delete, sender=User) @receiver(models.signals.post_delete, sender=Group) -def cleanup_user_deletion(sender, instance: User | Group, **kwargs): +def cleanup_user_deletion(sender, instance: User | Group, **kwargs) -> None: """ When a user or group is deleted, remove non-cascading references. At the moment, just the default permission settings in UiSettings. @@ -713,7 +717,7 @@ def cleanup_user_deletion(sender, instance: User | Group, **kwargs): ) -def add_to_index(sender, document, **kwargs): +def add_to_index(sender, document, **kwargs) -> None: from documents import index index.add_or_update_document(document) @@ -725,7 +729,7 @@ def run_workflows_added( logging_group=None, original_file=None, **kwargs, -): +) -> None: run_workflows( trigger_type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED, document=document, @@ -735,7 +739,12 @@ def run_workflows_added( ) -def run_workflows_updated(sender, document: Document, logging_group=None, **kwargs): +def run_workflows_updated( + sender, + document: Document, + logging_group=None, + **kwargs, +) -> None: run_workflows( trigger_type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, document=document, @@ -841,7 +850,7 @@ def run_workflows( @before_task_publish.connect -def before_task_publish_handler(sender=None, headers=None, body=None, **kwargs): +def before_task_publish_handler(sender=None, headers=None, body=None, **kwargs) -> None: """ Creates the PaperlessTask object in a pending state. This is sent before the task reaches the broker, but before it begins executing on a worker. @@ -883,7 +892,7 @@ def before_task_publish_handler(sender=None, headers=None, body=None, **kwargs): @task_prerun.connect -def task_prerun_handler(sender=None, task_id=None, task=None, **kwargs): +def task_prerun_handler(sender=None, task_id=None, task=None, **kwargs) -> None: """ Updates the PaperlessTask to be started. Sent before the task begins execution @@ -913,7 +922,7 @@ def task_postrun_handler( retval=None, state=None, **kwargs, -): +) -> None: """ Updates the result of the PaperlessTask. @@ -942,7 +951,7 @@ def task_failure_handler( args=None, traceback=None, **kwargs, -): +) -> None: """ Updates the result of a failed PaperlessTask. @@ -962,7 +971,7 @@ def task_failure_handler( @worker_process_init.connect -def close_connection_pool_on_worker_init(**kwargs): +def close_connection_pool_on_worker_init(**kwargs) -> None: """ Close the DB connection pool for each Celery child process after it starts. diff --git a/src/documents/tasks.py b/src/documents/tasks.py index fc8911705..91a266856 100644 --- a/src/documents/tasks.py +++ b/src/documents/tasks.py @@ -71,13 +71,13 @@ logger = logging.getLogger("paperless.tasks") @shared_task -def index_optimize(): +def index_optimize() -> None: ix = index.open_index() writer = AsyncWriter(ix) writer.commit(optimize=True) -def index_reindex(*, progress_bar_disable=False): +def index_reindex(*, progress_bar_disable=False) -> None: documents = Document.objects.all() ix = index.open_index(recreate=True) @@ -88,7 +88,7 @@ def index_reindex(*, progress_bar_disable=False): @shared_task -def train_classifier(*, scheduled=True): +def train_classifier(*, scheduled=True) -> None: task = PaperlessTask.objects.create( type=PaperlessTask.TaskType.SCHEDULED_TASK if scheduled @@ -234,7 +234,7 @@ def sanity_check(*, scheduled=True, raise_on_error=True): @shared_task -def bulk_update_documents(document_ids): +def bulk_update_documents(document_ids) -> None: documents = Document.objects.filter(id__in=document_ids) ix = index.open_index() @@ -261,7 +261,7 @@ def bulk_update_documents(document_ids): @shared_task -def update_document_content_maybe_archive_file(document_id): +def update_document_content_maybe_archive_file(document_id) -> None: """ Re-creates OCR content and thumbnail for a document, and archive file if it exists. @@ -373,7 +373,7 @@ def update_document_content_maybe_archive_file(document_id): @shared_task -def empty_trash(doc_ids=None): +def empty_trash(doc_ids=None) -> None: if doc_ids is None: logger.info("Emptying trash of all expired documents") documents = ( @@ -410,7 +410,7 @@ def empty_trash(doc_ids=None): @shared_task -def check_scheduled_workflows(): +def check_scheduled_workflows() -> None: """ Check and run all enabled scheduled workflows. @@ -588,7 +588,7 @@ def llmindex_index( rebuild=False, scheduled=True, auto=False, -): +) -> None: ai_config = AIConfig() if ai_config.llm_index_enabled: task = PaperlessTask.objects.create( @@ -624,17 +624,17 @@ def llmindex_index( @shared_task -def update_document_in_llm_index(document): +def update_document_in_llm_index(document) -> None: llm_index_add_or_update_document(document) @shared_task -def remove_document_from_llm_index(document): +def remove_document_from_llm_index(document) -> None: llm_index_remove_document(document) @shared_task -def build_share_link_bundle(bundle_id: int): +def build_share_link_bundle(bundle_id: int) -> None: try: bundle = ( ShareLinkBundle.objects.filter(pk=bundle_id) @@ -726,7 +726,7 @@ def build_share_link_bundle(bundle_id: int): @shared_task -def cleanup_expired_share_link_bundles(): +def cleanup_expired_share_link_bundles() -> None: now = timezone.now() expired_qs = ShareLinkBundle.objects.filter( expiration__isnull=False, diff --git a/src/documents/tests/test_admin.py b/src/documents/tests/test_admin.py index 61a579dc7..de2f07df5 100644 --- a/src/documents/tests/test_admin.py +++ b/src/documents/tests/test_admin.py @@ -27,7 +27,7 @@ class TestDocumentAdmin(DirectoriesMixin, TestCase): super().setUp() self.doc_admin = DocumentAdmin(model=Document, admin_site=AdminSite()) - def test_save_model(self): + def test_save_model(self) -> None: doc = Document.objects.create(title="test") doc.title = "new title" @@ -35,7 +35,7 @@ class TestDocumentAdmin(DirectoriesMixin, TestCase): self.assertEqual(Document.objects.get(id=doc.id).title, "new title") self.assertEqual(self.get_document_from_index(doc)["id"], doc.id) - def test_delete_model(self): + def test_delete_model(self) -> None: doc = Document.objects.create(title="test") index.add_or_update_document(doc) self.assertIsNotNone(self.get_document_from_index(doc)) @@ -45,7 +45,7 @@ class TestDocumentAdmin(DirectoriesMixin, TestCase): self.assertRaises(Document.DoesNotExist, Document.objects.get, id=doc.id) self.assertIsNone(self.get_document_from_index(doc)) - def test_delete_queryset(self): + def test_delete_queryset(self) -> None: docs = [] for i in range(42): doc = Document.objects.create( @@ -67,7 +67,7 @@ class TestDocumentAdmin(DirectoriesMixin, TestCase): for doc in docs: self.assertIsNone(self.get_document_from_index(doc)) - def test_created(self): + def test_created(self) -> None: doc = Document.objects.create( title="test", created=timezone.make_aware(timezone.datetime(2020, 4, 12)), @@ -98,7 +98,7 @@ class TestPaperlessAdmin(DirectoriesMixin, TestCase): super().setUp() self.user_admin = PaperlessUserAdmin(model=User, admin_site=AdminSite()) - def test_request_is_passed_to_form(self): + def test_request_is_passed_to_form(self) -> None: user = User.objects.create(username="test", is_superuser=False) non_superuser = User.objects.create(username="requestuser") request = types.SimpleNamespace(user=non_superuser) @@ -106,7 +106,7 @@ class TestPaperlessAdmin(DirectoriesMixin, TestCase): form = formType(data={}, instance=user) self.assertEqual(form.request, request) - def test_only_superuser_can_change_superuser(self): + def test_only_superuser_can_change_superuser(self) -> None: superuser = User.objects.create_superuser(username="superuser", password="test") non_superuser = User.objects.create(username="requestuser") user = User.objects.create(username="test", is_superuser=False) @@ -128,7 +128,7 @@ class TestPaperlessAdmin(DirectoriesMixin, TestCase): self.assertTrue(form.is_valid()) self.assertEqual({}, form.errors) - def test_superuser_can_only_be_modified_by_superuser(self): + def test_superuser_can_only_be_modified_by_superuser(self) -> None: superuser = User.objects.create_superuser(username="superuser", password="test") user = User.objects.create( username="test", diff --git a/src/documents/tests/test_api_app_config.py b/src/documents/tests/test_api_app_config.py index f2ed902f4..7717c3488 100644 --- a/src/documents/tests/test_api_app_config.py +++ b/src/documents/tests/test_api_app_config.py @@ -22,7 +22,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase): user = User.objects.create_superuser(username="temp_admin") self.client.force_authenticate(user=user) - def test_api_get_config(self): + def test_api_get_config(self) -> None: """ GIVEN: - API request to get app config @@ -78,7 +78,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase): }, ) - def test_api_get_ui_settings_with_config(self): + def test_api_get_ui_settings_with_config(self) -> None: """ GIVEN: - Existing config with app_title, app_logo specified @@ -101,7 +101,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase): | response.data["settings"], ) - def test_api_update_config(self): + def test_api_update_config(self) -> None: """ GIVEN: - API request to update app config @@ -124,7 +124,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase): config = ApplicationConfiguration.objects.first() self.assertEqual(config.color_conversion_strategy, ColorConvertChoices.RGB) - def test_api_update_config_empty_fields(self): + def test_api_update_config_empty_fields(self) -> None: """ GIVEN: - API request to update app config with empty string for user_args JSONField and language field @@ -151,7 +151,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase): self.assertEqual(config.language, None) self.assertEqual(config.barcode_tag_mapping, None) - def test_api_replace_app_logo(self): + def test_api_replace_app_logo(self) -> None: """ GIVEN: - Existing config with app_logo specified @@ -200,7 +200,7 @@ class TestApiAppConfig(DirectoriesMixin, APITestCase): ) self.assertFalse(Path(old_logo.path).exists()) - def test_api_rejects_malicious_svg_logo(self): + def test_api_rejects_malicious_svg_logo(self) -> None: """ GIVEN: - An SVG logo containing a