Configures ruff as the one stop linter and resolves warnings it raised

This commit is contained in:
Trenton H 2023-03-28 09:39:30 -07:00
parent 5869467db3
commit ce41ac9158
110 changed files with 507 additions and 491 deletions

View File

@ -1,4 +1,3 @@
#!/usr/bin/env python3
import json import json
import logging import logging
import os import os
@ -390,8 +389,6 @@ class LibraryTagsCleaner(RegistryTagsCleaner):
will need their own logic will need their own logic
""" """
pass
def _main(): def _main():
parser = ArgumentParser( parser = ArgumentParser(

View File

@ -1,4 +1,3 @@
#!/usr/bin/env python3
import logging import logging

1
.github/scripts/get-build-json.py vendored Executable file → Normal file
View File

@ -1,4 +1,3 @@
#!/usr/bin/env python3
""" """
This is a helper script for the mutli-stage Docker image builder. This is a helper script for the mutli-stage Docker image builder.
It provides a single point of configuration for package version control. It provides a single point of configuration for package version control.

View File

@ -1,4 +1,3 @@
#!/usr/bin/env python3
""" """
This module contains some useful classes for interacting with the Github API. This module contains some useful classes for interacting with the Github API.
The full documentation for the API can be found here: https://docs.github.com/en/rest The full documentation for the API can be found here: https://docs.github.com/en/rest
@ -162,10 +161,7 @@ class ContainerPackage(_EndpointResponse):
Returns True if the image has at least one tag which matches the given regex, Returns True if the image has at least one tag which matches the given regex,
False otherwise False otherwise
""" """
for tag in self.tags: return any(re.match(pattern, tag) is not None for tag in self.tags)
if re.match(pattern, tag) is not None:
return True
return False
def __repr__(self): def __repr__(self):
return f"Package {self.name}" return f"Package {self.name}"

1
.gitignore vendored
View File

@ -73,6 +73,7 @@ virtualenv
.venv/ .venv/
/docker-compose.env /docker-compose.env
/docker-compose.yml /docker-compose.yml
.ruff_cache/
# Used for development # Used for development
scripts/import-for-development scripts/import-for-development

View File

@ -36,39 +36,14 @@ repos:
- markdown - markdown
exclude: "(^Pipfile\\.lock$)" exclude: "(^Pipfile\\.lock$)"
# Python hooks # Python hooks
- repo: https://github.com/asottile/reorder_python_imports - repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v3.9.0 rev: 'v0.0.259'
hooks: hooks:
- id: reorder-python-imports - id: ruff
exclude: "(migrations)"
- repo: https://github.com/asottile/yesqa
rev: "v1.4.0"
hooks:
- id: yesqa
exclude: "(migrations)"
- repo: https://github.com/asottile/add-trailing-comma
rev: "v2.4.0"
hooks:
- id: add-trailing-comma
exclude: "(migrations)"
- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
hooks:
- id: flake8
files: ^src/
args:
- "--config=./src/setup.cfg"
- repo: https://github.com/psf/black - repo: https://github.com/psf/black
rev: 22.12.0 rev: 22.12.0
hooks: hooks:
- id: black - id: black
- repo: https://github.com/asottile/pyupgrade
rev: v3.3.1
hooks:
- id: pyupgrade
exclude: "(migrations)"
args:
- "--py38-plus"
# Dockerfile hooks # Dockerfile hooks
- repo: https://github.com/AleksaC/hadolint-py - repo: https://github.com/AleksaC/hadolint-py
rev: v2.10.0 rev: v2.10.0

23
.ruff.toml Normal file
View File

@ -0,0 +1,23 @@
# https://beta.ruff.rs/docs/settings/
# https://beta.ruff.rs/docs/rules/
select = ["F", "E", "W", "UP", "COM", "DJ", "EXE", "ISC", "ICN", "G201", "INP", "PIE", "RSE", "SIM", "TID", "PLC", "PLE", "RUF"]
# TODO PTH
ignore = ["DJ001", "SIM105"]
fix = true
line-length = 88
respect-gitignore = true
src = ["src"]
target-version = "py38"
format = "grouped"
show-fixes = true
[per-file-ignores]
".github/scripts/*.py" = ["E501", "INP001", "SIM117"]
"docker/wait-for-redis.py" = ["INP001"]
"*/tests/*.py" = ["E501", "SIM117"]
"*/migrations/*.py" = ["E501", "SIM"]
"src/paperless_tesseract/tests/test_parser.py" = ["RUF001"]
"src/documents/models.py" = ["SIM115"]
[isort]
force-single-line = true

View File

@ -78,6 +78,7 @@ black = "*"
pre-commit = "*" pre-commit = "*"
imagehash = "*" imagehash = "*"
mkdocs-material = "*" mkdocs-material = "*"
ruff = "*"
[typing-dev] [typing-dev]
mypy = "*" mypy = "*"

23
Pipfile.lock generated
View File

@ -3069,6 +3069,29 @@
"markers": "python_version >= '3.7' and python_version < '4'", "markers": "python_version >= '3.7' and python_version < '4'",
"version": "==2.28.2" "version": "==2.28.2"
}, },
"ruff": {
"hashes": [
"sha256:22e1e35bf5f12072cd644d22afd9203641ccf258bc14ff91aa1c43dc14f6047d",
"sha256:29e2b77b7d5da6a7dd5cf9b738b511355c5734ece56f78e500d4b5bffd58c1a0",
"sha256:38704f151323aa5858370a2f792e122cc25e5d1aabe7d42ceeab83da18f0b456",
"sha256:40ae87f2638484b7e8a7567b04a7af719f1c484c5bf132038b702bb32e1f6577",
"sha256:428507fb321b386dda70d66cd1a8aa0abf51d7c197983d83bb9e4fa5ee60300b",
"sha256:49e903bcda19f6bb0725a962c058eb5d61f40d84ef52ed53b61939b69402ab4e",
"sha256:5b3c1beacf6037e7f0781d4699d9a2dd4ba2462f475be5b1f45cf84c4ba3c69d",
"sha256:71f0ef1985e9a6696fa97da8459917fa34bdaa2c16bd33bd5edead585b7d44f7",
"sha256:79b02fa17ec1fd8d306ae302cb47fb614b71e1f539997858243769bcbe78c6d9",
"sha256:7cfef26619cba184d59aa7fa17b48af5891d51fc0b755a9bc533478a10d4d066",
"sha256:8b56496063ab3bfdf72339a5fbebb8bd46e5c5fee25ef11a9f03b208fa0562ec",
"sha256:aa9449b898287e621942cc71b9327eceb8f0c357e4065fecefb707ef2d978df8",
"sha256:c5fbaea9167f1852757f02133e5daacdb8c75b3431343205395da5b10499927a",
"sha256:d2fb20e89e85d147c85caa807707a1488bccc1f3854dc3d53533e89b52a0c5ff",
"sha256:daaea322e7e85f4c13d82be9536309e1c4b8b9851bb0cbc7eeb15d490fd46bf9",
"sha256:e4f39e18702de69faaaee3969934b92d7467285627f99a5b6ecd55a7d9f5d086",
"sha256:f3938dc45e2a3f818e9cbd53007265c22246fbfded8837b2c563bf0ebde1a226"
],
"index": "pypi",
"version": "==0.0.259"
},
"scipy": { "scipy": {
"hashes": [ "hashes": [
"sha256:02b567e722d62bddd4ac253dafb01ce7ed8742cf8031aea030a41414b86c1125", "sha256:02b567e722d62bddd4ac253dafb01ce7ed8742cf8031aea030a41414b86c1125",

View File

@ -18,7 +18,7 @@ if __name__ == "__main__":
REDIS_URL: Final[str] = os.getenv("PAPERLESS_REDIS", "redis://localhost:6379") REDIS_URL: Final[str] = os.getenv("PAPERLESS_REDIS", "redis://localhost:6379")
print(f"Waiting for Redis...", flush=True) print("Waiting for Redis...", flush=True)
attempt = 0 attempt = 0
with Redis.from_url(url=REDIS_URL) as client: with Redis.from_url(url=REDIS_URL) as client:
@ -37,8 +37,8 @@ if __name__ == "__main__":
attempt += 1 attempt += 1
if attempt >= MAX_RETRY_COUNT: if attempt >= MAX_RETRY_COUNT:
print(f"Failed to connect to redis using environment variable PAPERLESS_REDIS.") print("Failed to connect to redis using environment variable PAPERLESS_REDIS.")
sys.exit(os.EX_UNAVAILABLE) sys.exit(os.EX_UNAVAILABLE)
else: else:
print(f"Connected to Redis broker.") print("Connected to Redis broker.")
sys.exit(os.EX_OK) sys.exit(os.EX_OK)

View File

@ -30,7 +30,9 @@ def worker_int(worker):
worker.log.info("worker received INT or QUIT signal") worker.log.info("worker received INT or QUIT signal")
## get traceback info ## get traceback info
import threading, sys, traceback import sys
import threading
import traceback
id2name = {th.ident: th.name for th in threading.enumerate()} id2name = {th.ident: th.name for th in threading.enumerate()}
code = [] code = []

View File

@ -136,9 +136,8 @@ def convert_from_tiff_to_pdf(filepath: Path) -> Path:
filepath, filepath,
], ],
) )
with filepath.open("rb") as img_file: with filepath.open("rb") as img_file, newpath.open("wb") as pdf_file:
with newpath.open("wb") as pdf_file: pdf_file.write(img2pdf.convert(img_file))
pdf_file.write(img2pdf.convert(img_file))
return newpath return newpath

View File

@ -52,7 +52,7 @@ class BulkArchiveStrategy:
return in_archive_path return in_archive_path
def add_document(self, doc: Document): def add_document(self, doc: Document):
raise NotImplementedError() # pragma: no cover raise NotImplementedError # pragma: no cover
class OriginalsOnlyStrategy(BulkArchiveStrategy): class OriginalsOnlyStrategy(BulkArchiveStrategy):

View File

@ -104,7 +104,7 @@ class DocumentClassifier:
self.document_type_classifier = pickle.load(f) self.document_type_classifier = pickle.load(f)
self.storage_path_classifier = pickle.load(f) self.storage_path_classifier = pickle.load(f)
except Exception as err: except Exception as err:
raise ClassifierModelCorruptError() from err raise ClassifierModelCorruptError from err
# Check for the warning about unpickling from differing versions # Check for the warning about unpickling from differing versions
# and consider it incompatible # and consider it incompatible
@ -117,7 +117,7 @@ class DocumentClassifier:
if issubclass(warning.category, UserWarning): if issubclass(warning.category, UserWarning):
w_msg = str(warning.message) w_msg = str(warning.message)
if sk_learn_warning_url in w_msg: if sk_learn_warning_url in w_msg:
raise IncompatibleClassifierVersionError() raise IncompatibleClassifierVersionError
def save(self): def save(self):
target_file = settings.MODEL_FILE target_file = settings.MODEL_FILE

View File

@ -590,9 +590,8 @@ class Consumer(LoggingMixin):
) )
def _write(self, storage_type, source, target): def _write(self, storage_type, source, target):
with open(source, "rb") as read_file: with open(source, "rb") as read_file, open(target, "wb") as write_file:
with open(target, "wb") as write_file: write_file.write(read_file.read())
write_file.write(read_file.read())
def _log_script_outputs(self, completed_process: CompletedProcess): def _log_script_outputs(self, completed_process: CompletedProcess):
""" """

View File

@ -164,7 +164,7 @@ def remove_document_from_index(document):
class DelayedQuery: class DelayedQuery:
def _get_query(self): def _get_query(self):
raise NotImplementedError() raise NotImplementedError
def _get_query_filter(self): def _get_query_filter(self):
criterias = [] criterias = []

View File

@ -159,7 +159,7 @@ def _consume_wait_unmodified(file: str) -> None:
new_size = stat_data.st_size new_size = stat_data.st_size
except FileNotFoundError: except FileNotFoundError:
logger.debug( logger.debug(
f"File {file} moved while waiting for it to remain " f"unmodified.", f"File {file} moved while waiting for it to remain unmodified.",
) )
return return
if new_mtime == mtime and new_size == size: if new_mtime == mtime and new_size == size:
@ -293,10 +293,7 @@ class Command(BaseCommand):
while not finished: while not finished:
try: try:
for event in inotify.read(timeout=timeout): for event in inotify.read(timeout=timeout):
if recursive: path = inotify.get_path(event.wd) if recursive else directory
path = inotify.get_path(event.wd)
else:
path = directory
filepath = os.path.join(path, event.name) filepath = os.path.join(path, event.name)
notified_files[filepath] = monotonic() notified_files[filepath] = monotonic()

View File

@ -1,6 +1,6 @@
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from ...tasks import train_classifier from documents.tasks import train_classifier
class Command(BaseCommand): class Command(BaseCommand):

View File

@ -35,8 +35,8 @@ from paperless.db import GnuPG
from paperless_mail.models import MailAccount from paperless_mail.models import MailAccount
from paperless_mail.models import MailRule from paperless_mail.models import MailRule
from ...file_handling import delete_empty_directories from documents.file_handling import delete_empty_directories
from ...file_handling import generate_filename from documents.file_handling import generate_filename
class Command(BaseCommand): class Command(BaseCommand):
@ -403,9 +403,10 @@ class Command(BaseCommand):
if self.compare_checksums and source_checksum: if self.compare_checksums and source_checksum:
target_checksum = hashlib.md5(target.read_bytes()).hexdigest() target_checksum = hashlib.md5(target.read_bytes()).hexdigest()
perform_copy = target_checksum != source_checksum perform_copy = target_checksum != source_checksum
elif source_stat.st_mtime != target_stat.st_mtime: elif (
perform_copy = True source_stat.st_mtime != target_stat.st_mtime
elif source_stat.st_size != target_stat.st_size: or source_stat.st_size != target_stat.st_size
):
perform_copy = True perform_copy = True
else: else:
# Copy if it does not exist # Copy if it does not exist

View File

@ -22,8 +22,8 @@ from documents.settings import EXPORTER_THUMBNAIL_NAME
from filelock import FileLock from filelock import FileLock
from paperless import version from paperless import version
from ...file_handling import create_source_path_directory from documents.file_handling import create_source_path_directory
from ...signals.handlers import update_filename_and_move_files from documents.signals.handlers import update_filename_and_move_files
@contextmanager @contextmanager
@ -111,37 +111,36 @@ class Command(BaseCommand):
post_save, post_save,
receiver=update_filename_and_move_files, receiver=update_filename_and_move_files,
sender=Document, sender=Document,
), disable_signal(
m2m_changed,
receiver=update_filename_and_move_files,
sender=Document.tags.through,
): ):
with disable_signal( # Fill up the database with whatever is in the manifest
m2m_changed, try:
receiver=update_filename_and_move_files, for manifest_path in manifest_paths:
sender=Document.tags.through, call_command("loaddata", manifest_path)
): except (FieldDoesNotExist, DeserializationError) as e:
# Fill up the database with whatever is in the manifest self.stdout.write(self.style.ERROR("Database import failed"))
try: if (
for manifest_path in manifest_paths: self.version is not None
call_command("loaddata", manifest_path) and self.version != version.__full_version_str__
except (FieldDoesNotExist, DeserializationError) as e: ):
self.stdout.write(self.style.ERROR("Database import failed")) self.stdout.write(
if ( self.style.ERROR(
self.version is not None "Version mismatch: "
and self.version != version.__full_version_str__ f"Currently {version.__full_version_str__},"
): f" importing {self.version}",
self.stdout.write( ),
self.style.ERROR( )
"Version mismatch: " raise e
f"Currently {version.__full_version_str__}," else:
f" importing {self.version}", self.stdout.write(
), self.style.ERROR("No version information present"),
) )
raise e raise e
else:
self.stdout.write(
self.style.ERROR("No version information present"),
)
raise e
self._import_files_from_manifest(options["no_progress_bar"]) self._import_files_from_manifest(options["no_progress_bar"])
self.stdout.write("Updating search index...") self.stdout.write("Updating search index...")
call_command( call_command(
@ -154,14 +153,14 @@ class Command(BaseCommand):
def _check_manifest_exists(path): def _check_manifest_exists(path):
if not os.path.exists(path): if not os.path.exists(path):
raise CommandError( raise CommandError(
"That directory doesn't appear to contain a manifest.json " "file.", "That directory doesn't appear to contain a manifest.json file.",
) )
def _check_manifest(self): def _check_manifest(self):
for record in self.manifest: for record in self.manifest:
if not record["model"] == "documents.document": if record["model"] != "documents.document":
continue continue
if EXPORTER_FILE_NAME not in record: if EXPORTER_FILE_NAME not in record:

View File

@ -5,10 +5,10 @@ from django.core.management.base import BaseCommand
from documents.classifier import load_classifier from documents.classifier import load_classifier
from documents.models import Document from documents.models import Document
from ...signals.handlers import set_correspondent from documents.signals.handlers import set_correspondent
from ...signals.handlers import set_document_type from documents.signals.handlers import set_document_type
from ...signals.handlers import set_storage_path from documents.signals.handlers import set_storage_path
from ...signals.handlers import set_tags from documents.signals.handlers import set_tags
logger = logging.getLogger("paperless.management.retagger") logger = logging.getLogger("paperless.management.retagger")

View File

@ -7,7 +7,7 @@ from django import db
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from documents.models import Document from documents.models import Document
from ...parsers import get_parser_class_for_mime_type from documents.parsers import get_parser_class_for_mime_type
def _process_document(doc_in): def _process_document(doc_in):

View File

@ -20,10 +20,7 @@ def log_reason(matching_model, document, reason):
def match_correspondents(document, classifier): def match_correspondents(document, classifier):
if classifier: pred_id = classifier.predict_correspondent(document.content) if classifier else None
pred_id = classifier.predict_correspondent(document.content)
else:
pred_id = None
correspondents = Correspondent.objects.all() correspondents = Correspondent.objects.all()
@ -33,10 +30,7 @@ def match_correspondents(document, classifier):
def match_document_types(document, classifier): def match_document_types(document, classifier):
if classifier: pred_id = classifier.predict_document_type(document.content) if classifier else None
pred_id = classifier.predict_document_type(document.content)
else:
pred_id = None
document_types = DocumentType.objects.all() document_types = DocumentType.objects.all()
@ -46,10 +40,7 @@ def match_document_types(document, classifier):
def match_tags(document, classifier): def match_tags(document, classifier):
if classifier: predicted_tag_ids = classifier.predict_tags(document.content) if classifier else []
predicted_tag_ids = classifier.predict_tags(document.content)
else:
predicted_tag_ids = []
tags = Tag.objects.all() tags = Tag.objects.all()
@ -59,10 +50,7 @@ def match_tags(document, classifier):
def match_storage_paths(document, classifier): def match_storage_paths(document, classifier):
if classifier: pred_id = classifier.predict_storage_path(document.content) if classifier else None
pred_id = classifier.predict_storage_path(document.content)
else:
pred_id = None
storage_paths = StoragePath.objects.all() storage_paths = StoragePath.objects.all()
@ -80,7 +68,7 @@ def matches(matching_model, document):
document_content = document.content document_content = document.content
# Check that match is not empty # Check that match is not empty
if matching_model.match.strip() == "": if not matching_model.match.strip():
return False return False
if matching_model.is_insensitive: if matching_model.is_insensitive:
@ -132,7 +120,7 @@ def matches(matching_model, document):
) )
except re.error: except re.error:
logger.error( logger.error(
f"Error while processing regular expression " f"{matching_model.match}", f"Error while processing regular expression {matching_model.match}",
) )
return False return False
if match: if match:

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-20 19:10 # Generated by Django 1.9 on 2015-12-20 19:10
from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations, models
from django.conf import settings from django.conf import settings
@ -32,7 +30,7 @@ class Migration(migrations.Migration):
models.TextField( models.TextField(
db_index=( db_index=(
"mysql" not in settings.DATABASES["default"]["ENGINE"] "mysql" not in settings.DATABASES["default"]["ENGINE"]
) ),
), ),
), ),
("created", models.DateTimeField(auto_now_add=True)), ("created", models.DateTimeField(auto_now_add=True)),

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-26 13:16 # Generated by Django 1.9 on 2015-12-26 13:16
from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations, models
import django.utils.timezone import django.utils.timezone
@ -21,7 +19,8 @@ class Migration(migrations.Migration):
model_name="document", model_name="document",
name="created", name="created",
field=models.DateTimeField( field=models.DateTimeField(
default=django.utils.timezone.now, editable=False default=django.utils.timezone.now,
editable=False,
), ),
), ),
] ]

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-11 12:21 # Generated by Django 1.9 on 2016-01-11 12:21
from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations, models
from django.template.defaultfilters import slugify from django.template.defaultfilters import slugify
@ -23,7 +21,8 @@ def move_sender_strings_to_sender_model(apps, schema_editor):
DOCUMENT_SENDER_MAP[document.pk], DOCUMENT_SENDER_MAP[document.pk],
created, created,
) = sender_model.objects.get_or_create( ) = sender_model.objects.get_or_create(
name=document.sender, defaults={"slug": slugify(document.sender)} name=document.sender,
defaults={"slug": slugify(document.sender)},
) )

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-14 18:44 # Generated by Django 1.9 on 2016-01-14 18:44
from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations, models
import django.db.models.deletion import django.db.models.deletion

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-23 03:13 # Generated by Django 1.9 on 2016-01-23 03:13
from __future__ import unicode_literals
from django.db import migrations from django.db import migrations

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-23 04:30 # Generated by Django 1.9 on 2016-01-23 04:30
from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations, models

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-26 21:14 # Generated by Django 1.9 on 2016-01-26 21:14
from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations, models

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-29 22:58 # Generated by Django 1.9 on 2016-01-29 22:58
from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations, models
@ -33,7 +31,9 @@ class Migration(migrations.Migration):
model_name="document", model_name="document",
name="tags", name="tags",
field=models.ManyToManyField( field=models.ManyToManyField(
blank=True, related_name="documents", to="documents.Tag" blank=True,
related_name="documents",
to="documents.Tag",
), ),
), ),
] ]

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-02-14 00:40 # Generated by Django 1.9 on 2016-02-14 00:40
from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations, models

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-02-27 17:54 # Generated by Django 1.9 on 2016-02-27 17:54
from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations, models
@ -42,7 +40,7 @@ class Migration(migrations.Migration):
( (
"component", "component",
models.PositiveIntegerField( models.PositiveIntegerField(
choices=[(1, "Consumer"), (2, "Mail Fetcher")] choices=[(1, "Consumer"), (2, "Mail Fetcher")],
), ),
), ),
("created", models.DateTimeField(auto_now_add=True)), ("created", models.DateTimeField(auto_now_add=True)),

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-03-03 19:29 # Generated by Django 1.9.2 on 2016-03-03 19:29
from __future__ import unicode_literals
from django.db import migrations from django.db import migrations

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-03-05 00:40 # Generated by Django 1.9.2 on 2016-03-05 00:40
from __future__ import unicode_literals
import gnupg import gnupg
import os import os
@ -14,7 +12,7 @@ from django.db import migrations
from django.utils.termcolors import colorize as colourise # Spelling hurts me from django.utils.termcolors import colorize as colourise # Spelling hurts me
class GnuPG(object): class GnuPG:
""" """
A handy singleton to use when handling encrypted files. A handy singleton to use when handling encrypted files.
""" """
@ -28,17 +26,22 @@ class GnuPG(object):
@classmethod @classmethod
def encrypted(cls, file_handle): def encrypted(cls, file_handle):
return cls.gpg.encrypt_file( return cls.gpg.encrypt_file(
file_handle, recipients=None, passphrase=settings.PASSPHRASE, symmetric=True file_handle,
recipients=None,
passphrase=settings.PASSPHRASE,
symmetric=True,
).data ).data
def move_documents_and_create_thumbnails(apps, schema_editor): def move_documents_and_create_thumbnails(apps, schema_editor):
os.makedirs( os.makedirs(
os.path.join(settings.MEDIA_ROOT, "documents", "originals"), exist_ok=True os.path.join(settings.MEDIA_ROOT, "documents", "originals"),
exist_ok=True,
) )
os.makedirs( os.makedirs(
os.path.join(settings.MEDIA_ROOT, "documents", "thumbnails"), exist_ok=True os.path.join(settings.MEDIA_ROOT, "documents", "thumbnails"),
exist_ok=True,
) )
documents = os.listdir(os.path.join(settings.MEDIA_ROOT, "documents")) documents = os.listdir(os.path.join(settings.MEDIA_ROOT, "documents"))
@ -55,7 +58,7 @@ def move_documents_and_create_thumbnails(apps, schema_editor):
" in order." " in order."
"\n", "\n",
opts=("bold",), opts=("bold",),
) ),
) )
try: try:
@ -73,7 +76,7 @@ def move_documents_and_create_thumbnails(apps, schema_editor):
colourise("*", fg="green"), colourise("*", fg="green"),
colourise("Generating a thumbnail for", fg="white"), colourise("Generating a thumbnail for", fg="white"),
colourise(f, fg="cyan"), colourise(f, fg="cyan"),
) ),
) )
thumb_temp = tempfile.mkdtemp(prefix="paperless", dir=settings.SCRATCH_DIR) thumb_temp = tempfile.mkdtemp(prefix="paperless", dir=settings.SCRATCH_DIR)
@ -95,7 +98,7 @@ def move_documents_and_create_thumbnails(apps, schema_editor):
"remove", "remove",
orig_target, orig_target,
os.path.join(thumb_temp, "convert-%04d.png"), os.path.join(thumb_temp, "convert-%04d.png"),
) ),
).wait() ).wait()
thumb_source = os.path.join(thumb_temp, "convert-0000.png") thumb_source = os.path.join(thumb_temp, "convert-0000.png")

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-25 21:11 # Generated by Django 1.9.4 on 2016-03-25 21:11
from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations, models
import django.utils.timezone import django.utils.timezone

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-28 19:09 # Generated by Django 1.9.4 on 2016-03-28 19:09
from __future__ import unicode_literals
import gnupg import gnupg
import hashlib import hashlib
@ -13,7 +11,7 @@ from django.template.defaultfilters import slugify
from django.utils.termcolors import colorize as colourise # Spelling hurts me from django.utils.termcolors import colorize as colourise # Spelling hurts me
class GnuPG(object): class GnuPG:
""" """
A handy singleton to use when handling encrypted files. A handy singleton to use when handling encrypted files.
""" """
@ -27,11 +25,14 @@ class GnuPG(object):
@classmethod @classmethod
def encrypted(cls, file_handle): def encrypted(cls, file_handle):
return cls.gpg.encrypt_file( return cls.gpg.encrypt_file(
file_handle, recipients=None, passphrase=settings.PASSPHRASE, symmetric=True file_handle,
recipients=None,
passphrase=settings.PASSPHRASE,
symmetric=True,
).data ).data
class Document(object): class Document:
""" """
Django's migrations restrict access to model methods, so this is a snapshot Django's migrations restrict access to model methods, so this is a snapshot
of the methods that existed at the time this migration was written, since of the methods that existed at the time this migration was written, since
@ -49,9 +50,9 @@ class Document(object):
def __str__(self): def __str__(self):
created = self.created.strftime("%Y%m%d%H%M%S") created = self.created.strftime("%Y%m%d%H%M%S")
if self.correspondent and self.title: if self.correspondent and self.title:
return "{}: {} - {}".format(created, self.correspondent, self.title) return f"{created}: {self.correspondent} - {self.title}"
if self.correspondent or self.title: if self.correspondent or self.title:
return "{}: {}".format(created, self.correspondent or self.title) return f"{created}: {self.correspondent or self.title}"
return str(created) return str(created)
@property @property
@ -60,7 +61,7 @@ class Document(object):
settings.MEDIA_ROOT, settings.MEDIA_ROOT,
"documents", "documents",
"originals", "originals",
"{:07}.{}.gpg".format(self.pk, self.file_type), f"{self.pk:07}.{self.file_type}.gpg",
) )
@property @property
@ -88,7 +89,7 @@ def set_checksums(apps, schema_editor):
" order." " order."
"\n", "\n",
opts=("bold",), opts=("bold",),
) ),
) )
sums = {} sums = {}
@ -101,7 +102,7 @@ def set_checksums(apps, schema_editor):
colourise("*", fg="green"), colourise("*", fg="green"),
colourise("Generating a checksum for", fg="white"), colourise("Generating a checksum for", fg="white"),
colourise(document.file_name, fg="cyan"), colourise(document.file_name, fg="cyan"),
) ),
) )
with document.source_file as encrypted: with document.source_file as encrypted:
@ -122,15 +123,16 @@ def set_checksums(apps, schema_editor):
fg="yellow", fg="yellow",
), ),
doc1=colourise( doc1=colourise(
" * {} (id: {})".format(sums[checksum][1], sums[checksum][0]), f" * {sums[checksum][1]} (id: {sums[checksum][0]})",
fg="red", fg="red",
), ),
doc2=colourise( doc2=colourise(
" * {} (id: {})".format(document.file_name, document.pk), fg="red" f" * {document.file_name} (id: {document.pk})",
fg="red",
), ),
code=colourise( code=colourise(
" $ echo 'DELETE FROM documents_document WHERE id = {pk};' | ./manage.py dbshell".format( " $ echo 'DELETE FROM documents_document WHERE id = {pk};' | ./manage.py dbshell".format(
pk=document.pk pk=document.pk,
), ),
fg="green", fg="green",
), ),
@ -171,7 +173,8 @@ class Migration(migrations.Migration):
model_name="document", model_name="document",
name="created", name="created",
field=models.DateTimeField( field=models.DateTimeField(
db_index=True, default=django.utils.timezone.now db_index=True,
default=django.utils.timezone.now,
), ),
), ),
migrations.AlterField( migrations.AlterField(

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-05 21:38 # Generated by Django 1.10.2 on 2016-10-05 21:38
from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations, models

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-25 15:58 # Generated by Django 1.10.5 on 2017-03-25 15:58
from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations, models
from django.conf import settings from django.conf import settings

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-12 05:07 # Generated by Django 1.10.5 on 2017-05-12 05:07
from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations, models

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-07-15 17:12 # Generated by Django 1.10.5 on 2017-07-15 17:12
from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations, models
import django.db.models.deletion import django.db.models.deletion

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-07-15 17:12 # Generated by Django 1.10.5 on 2017-07-15 17:12
from __future__ import unicode_literals
from django.contrib.auth.models import User from django.contrib.auth.models import User
from django.db import migrations from django.db import migrations

View File

@ -1,6 +1,3 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations, models
import django.utils.timezone import django.utils.timezone
@ -22,7 +19,9 @@ class Migration(migrations.Migration):
model_name="document", model_name="document",
name="added", name="added",
field=models.DateTimeField( field=models.DateTimeField(
db_index=True, default=django.utils.timezone.now, editable=False db_index=True,
default=django.utils.timezone.now,
editable=False,
), ),
), ),
migrations.RunPython(set_added_time_to_created_time), migrations.RunPython(set_added_time_to_created_time),

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-02-04 13:07 # Generated by Django 1.11.10 on 2018-02-04 13:07
from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations, models

View File

@ -6,7 +6,7 @@ from django.db import migrations, models
def set_filename(apps, schema_editor): def set_filename(apps, schema_editor):
Document = apps.get_model("documents", "Document") Document = apps.get_model("documents", "Document")
for doc in Document.objects.all(): for doc in Document.objects.all():
file_name = "{:07}.{}".format(doc.pk, doc.file_type) file_name = f"{doc.pk:07}.{doc.file_type}"
if doc.storage_type == "gpg": if doc.storage_type == "gpg":
file_name += ".gpg" file_name += ".gpg"

View File

@ -10,5 +10,5 @@ class Migration(migrations.Migration):
] ]
operations = [ operations = [
migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop) migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop),
] ]

View File

@ -1,5 +1,4 @@
# Generated by Django 3.1.3 on 2020-11-20 11:21 # Generated by Django 3.1.3 on 2020-11-20 11:21
import mimetypes
import os import os
import magic import magic
@ -16,7 +15,7 @@ def source_path(self):
if self.filename: if self.filename:
fname = str(self.filename) fname = str(self.filename)
else: else:
fname = "{:07}.{}".format(self.pk, self.file_type) fname = f"{self.pk:07}.{self.file_type}"
if self.storage_type == STORAGE_TYPE_GPG: if self.storage_type == STORAGE_TYPE_GPG:
fname += ".gpg" fname += ".gpg"

View File

@ -73,7 +73,7 @@ class Migration(migrations.Migration):
(15, "Modified before"), (15, "Modified before"),
(16, "Modified after"), (16, "Modified after"),
(17, "Does not have tag"), (17, "Does not have tag"),
] ],
), ),
), ),
("value", models.CharField(max_length=128)), ("value", models.CharField(max_length=128)),

View File

@ -165,7 +165,9 @@ class Migration(migrations.Migration):
model_name="document", model_name="document",
name="created", name="created",
field=models.DateTimeField( field=models.DateTimeField(
db_index=True, default=django.utils.timezone.now, verbose_name="created" db_index=True,
default=django.utils.timezone.now,
verbose_name="created",
), ),
), ),
migrations.AlterField( migrations.AlterField(
@ -196,14 +198,18 @@ class Migration(migrations.Migration):
model_name="document", model_name="document",
name="mime_type", name="mime_type",
field=models.CharField( field=models.CharField(
editable=False, max_length=256, verbose_name="mime type" editable=False,
max_length=256,
verbose_name="mime type",
), ),
), ),
migrations.AlterField( migrations.AlterField(
model_name="document", model_name="document",
name="modified", name="modified",
field=models.DateTimeField( field=models.DateTimeField(
auto_now=True, db_index=True, verbose_name="modified" auto_now=True,
db_index=True,
verbose_name="modified",
), ),
), ),
migrations.AlterField( migrations.AlterField(
@ -234,7 +240,10 @@ class Migration(migrations.Migration):
model_name="document", model_name="document",
name="title", name="title",
field=models.CharField( field=models.CharField(
blank=True, db_index=True, max_length=128, verbose_name="title" blank=True,
db_index=True,
max_length=128,
verbose_name="title",
), ),
), ),
migrations.AlterField( migrations.AlterField(
@ -373,7 +382,10 @@ class Migration(migrations.Migration):
model_name="savedviewfilterrule", model_name="savedviewfilterrule",
name="value", name="value",
field=models.CharField( field=models.CharField(
blank=True, max_length=128, null=True, verbose_name="value" blank=True,
max_length=128,
null=True,
verbose_name="value",
), ),
), ),
migrations.AlterField( migrations.AlterField(

View File

@ -29,7 +29,7 @@ def archive_path_old(doc):
if doc.filename: if doc.filename:
fname = archive_name_from_filename(doc.filename) fname = archive_name_from_filename(doc.filename)
else: else:
fname = "{:07}.pdf".format(doc.pk) fname = f"{doc.pk:07}.pdf"
return os.path.join(settings.ARCHIVE_DIR, fname) return os.path.join(settings.ARCHIVE_DIR, fname)
@ -48,7 +48,7 @@ def source_path(doc):
if doc.filename: if doc.filename:
fname = str(doc.filename) fname = str(doc.filename)
else: else:
fname = "{:07}{}".format(doc.pk, doc.file_type) fname = f"{doc.pk:07}{doc.file_type}"
if doc.storage_type == STORAGE_TYPE_GPG: if doc.storage_type == STORAGE_TYPE_GPG:
fname += ".gpg" # pragma: no cover fname += ".gpg" # pragma: no cover
@ -67,7 +67,9 @@ def generate_unique_filename(doc, archive_filename=False):
while True: while True:
new_filename = generate_filename( new_filename = generate_filename(
doc, counter, archive_filename=archive_filename doc,
counter,
archive_filename=archive_filename,
) )
if new_filename == old_filename: if new_filename == old_filename:
# still the same as before. # still the same as before.
@ -93,14 +95,16 @@ def generate_filename(doc, counter=0, append_gpg=True, archive_filename=False):
if doc.correspondent: if doc.correspondent:
correspondent = pathvalidate.sanitize_filename( correspondent = pathvalidate.sanitize_filename(
doc.correspondent.name, replacement_text="-" doc.correspondent.name,
replacement_text="-",
) )
else: else:
correspondent = "none" correspondent = "none"
if doc.document_type: if doc.document_type:
document_type = pathvalidate.sanitize_filename( document_type = pathvalidate.sanitize_filename(
doc.document_type.name, replacement_text="-" doc.document_type.name,
replacement_text="-",
) )
else: else:
document_type = "none" document_type = "none"
@ -111,9 +115,7 @@ def generate_filename(doc, counter=0, append_gpg=True, archive_filename=False):
document_type=document_type, document_type=document_type,
created=datetime.date.isoformat(doc.created), created=datetime.date.isoformat(doc.created),
created_year=doc.created.year if doc.created else "none", created_year=doc.created.year if doc.created else "none",
created_month=f"{doc.created.month:02}" created_month=f"{doc.created.month:02}" if doc.created else "none",
if doc.created
else "none", # NOQA: E501
created_day=f"{doc.created.day:02}" if doc.created else "none", created_day=f"{doc.created.day:02}" if doc.created else "none",
added=datetime.date.isoformat(doc.added), added=datetime.date.isoformat(doc.added),
added_year=doc.added.year if doc.added else "none", added_year=doc.added.year if doc.added else "none",
@ -128,7 +130,7 @@ def generate_filename(doc, counter=0, append_gpg=True, archive_filename=False):
except (ValueError, KeyError, IndexError): except (ValueError, KeyError, IndexError):
logger.warning( logger.warning(
f"Invalid PAPERLESS_FILENAME_FORMAT: " f"Invalid PAPERLESS_FILENAME_FORMAT: "
f"{settings.FILENAME_FORMAT}, falling back to default" f"{settings.FILENAME_FORMAT}, falling back to default",
) )
counter_str = f"_{counter:02}" if counter else "" counter_str = f"_{counter:02}" if counter else ""
@ -170,13 +172,17 @@ def create_archive_version(doc, retry_count=3):
parser: DocumentParser = parser_class(None, None) parser: DocumentParser = parser_class(None, None)
try: try:
parse_wrapper( parse_wrapper(
parser, source_path(doc), doc.mime_type, os.path.basename(doc.filename) parser,
source_path(doc),
doc.mime_type,
os.path.basename(doc.filename),
) )
doc.content = parser.get_text() doc.content = parser.get_text()
if parser.get_archive_path() and os.path.isfile(parser.get_archive_path()): if parser.get_archive_path() and os.path.isfile(parser.get_archive_path()):
doc.archive_filename = generate_unique_filename( doc.archive_filename = generate_unique_filename(
doc, archive_filename=True doc,
archive_filename=True,
) )
with open(parser.get_archive_path(), "rb") as f: with open(parser.get_archive_path(), "rb") as f:
doc.archive_checksum = hashlib.md5(f.read()).hexdigest() doc.archive_checksum = hashlib.md5(f.read()).hexdigest()
@ -186,7 +192,7 @@ def create_archive_version(doc, retry_count=3):
doc.archive_checksum = None doc.archive_checksum = None
logger.error( logger.error(
f"Parser did not return an archive document for document " f"Parser did not return an archive document for document "
f"ID:{doc.id}. Removing archive document." f"ID:{doc.id}. Removing archive document.",
) )
doc.save() doc.save()
return return
@ -195,7 +201,7 @@ def create_archive_version(doc, retry_count=3):
logger.exception( logger.exception(
f"Unable to regenerate archive document for ID:{doc.id}. You " f"Unable to regenerate archive document for ID:{doc.id}. You "
f"need to invoke the document_archiver management command " f"need to invoke the document_archiver management command "
f"manually for that document." f"manually for that document.",
) )
doc.archive_checksum = None doc.archive_checksum = None
doc.save() doc.save()
@ -233,7 +239,7 @@ def move_old_to_new_locations(apps, schema_editor):
old_path = archive_path_old(doc) old_path = archive_path_old(doc)
if doc.id not in affected_document_ids and not os.path.isfile(old_path): if doc.id not in affected_document_ids and not os.path.isfile(old_path):
raise ValueError( raise ValueError(
f"Archived document ID:{doc.id} does not exist at: " f"{old_path}" f"Archived document ID:{doc.id} does not exist at: {old_path}",
) )
# check that we can regenerate affected archive versions # check that we can regenerate affected archive versions
@ -245,7 +251,7 @@ def move_old_to_new_locations(apps, schema_editor):
if not parser_class: if not parser_class:
raise ValueError( raise ValueError(
f"Document ID:{doc.id} has an invalid archived document, " f"Document ID:{doc.id} has an invalid archived document, "
f"but no parsers are available. Cannot migrate." f"but no parsers are available. Cannot migrate.",
) )
for doc in Document.objects.filter(archive_checksum__isnull=False): for doc in Document.objects.filter(archive_checksum__isnull=False):
@ -260,7 +266,7 @@ def move_old_to_new_locations(apps, schema_editor):
# Set archive path for unaffected files # Set archive path for unaffected files
doc.archive_filename = archive_name_from_filename(doc.filename) doc.archive_filename = archive_name_from_filename(doc.filename)
Document.objects.filter(id=doc.id).update( Document.objects.filter(id=doc.id).update(
archive_filename=doc.archive_filename archive_filename=doc.archive_filename,
) )
# regenerate archive documents # regenerate archive documents
@ -281,13 +287,13 @@ def move_new_to_old_locations(apps, schema_editor):
raise ValueError( raise ValueError(
f"Cannot migrate: Archive file name {old_archive_path} of " f"Cannot migrate: Archive file name {old_archive_path} of "
f"document {doc.filename} would clash with another archive " f"document {doc.filename} would clash with another archive "
f"filename." f"filename.",
) )
old_archive_paths.add(old_archive_path) old_archive_paths.add(old_archive_path)
if new_archive_path != old_archive_path and os.path.isfile(old_archive_path): if new_archive_path != old_archive_path and os.path.isfile(old_archive_path):
raise ValueError( raise ValueError(
f"Cannot migrate: Cannot move {new_archive_path} to " f"Cannot migrate: Cannot move {new_archive_path} to "
f"{old_archive_path}: file already exists." f"{old_archive_path}: file already exists.",
) )
for doc in Document.objects.filter(archive_checksum__isnull=False): for doc in Document.objects.filter(archive_checksum__isnull=False):

View File

@ -61,7 +61,9 @@ class Migration(migrations.Migration):
model_name="tag", model_name="tag",
name="color", name="color",
field=models.CharField( field=models.CharField(
default="#a6cee3", max_length=7, verbose_name="color" default="#a6cee3",
max_length=7,
verbose_name="color",
), ),
), ),
migrations.RunPython(forward, reverse), migrations.RunPython(forward, reverse),

View File

@ -25,5 +25,5 @@ class Migration(migrations.Migration):
] ]
operations = [ operations = [
migrations.RunPython(remove_null_characters, migrations.RunPython.noop) migrations.RunPython(remove_null_characters, migrations.RunPython.noop),
] ]

View File

@ -14,7 +14,10 @@ class Migration(migrations.Migration):
model_name="savedview", model_name="savedview",
name="sort_field", name="sort_field",
field=models.CharField( field=models.CharField(
blank=True, max_length=128, null=True, verbose_name="sort field" blank=True,
max_length=128,
null=True,
verbose_name="sort field",
), ),
), ),
migrations.AlterField( migrations.AlterField(

View File

@ -14,7 +14,10 @@ class Migration(migrations.Migration):
model_name="savedviewfilterrule", model_name="savedviewfilterrule",
name="value", name="value",
field=models.CharField( field=models.CharField(
blank=True, max_length=255, null=True, verbose_name="value" blank=True,
max_length=255,
null=True,
verbose_name="value",
), ),
), ),
] ]

View File

@ -48,5 +48,5 @@ class Migration(migrations.Migration):
), ),
), ),
], ],
) ),
] ]

View File

@ -46,12 +46,15 @@ class Migration(migrations.Migration):
# Drop the django-q tables entirely # Drop the django-q tables entirely
# Must be done last or there could be references here # Must be done last or there could be references here
migrations.RunSQL( migrations.RunSQL(
"DROP TABLE IF EXISTS django_q_ormq", reverse_sql=migrations.RunSQL.noop "DROP TABLE IF EXISTS django_q_ormq",
reverse_sql=migrations.RunSQL.noop,
), ),
migrations.RunSQL( migrations.RunSQL(
"DROP TABLE IF EXISTS django_q_schedule", reverse_sql=migrations.RunSQL.noop "DROP TABLE IF EXISTS django_q_schedule",
reverse_sql=migrations.RunSQL.noop,
), ),
migrations.RunSQL( migrations.RunSQL(
"DROP TABLE IF EXISTS django_q_task", reverse_sql=migrations.RunSQL.noop "DROP TABLE IF EXISTS django_q_task",
reverse_sql=migrations.RunSQL.noop,
), ),
] ]

View File

@ -79,7 +79,8 @@ class Migration(migrations.Migration):
migrations.AddConstraint( migrations.AddConstraint(
model_name="storagepath", model_name="storagepath",
constraint=models.UniqueConstraint( constraint=models.UniqueConstraint(
fields=("name", "owner"), name="documents_storagepath_unique_name_owner" fields=("name", "owner"),
name="documents_storagepath_unique_name_owner",
), ),
), ),
migrations.AddConstraint( migrations.AddConstraint(
@ -93,7 +94,8 @@ class Migration(migrations.Migration):
migrations.AddConstraint( migrations.AddConstraint(
model_name="tag", model_name="tag",
constraint=models.UniqueConstraint( constraint=models.UniqueConstraint(
fields=("name", "owner"), name="documents_tag_unique_name_owner" fields=("name", "owner"),
name="documents_tag_unique_name_owner",
), ),
), ),
migrations.AddConstraint( migrations.AddConstraint(

View File

@ -43,7 +43,9 @@ class Migration(migrations.Migration):
model_name="note", model_name="note",
name="note", name="note",
field=models.TextField( field=models.TextField(
blank=True, help_text="Note for the document", verbose_name="content" blank=True,
help_text="Note for the document",
verbose_name="content",
), ),
), ),
migrations.AlterField( migrations.AlterField(

View File

@ -269,7 +269,7 @@ class Document(ModelWithOwner):
MinValueValidator(ARCHIVE_SERIAL_NUMBER_MIN), MinValueValidator(ARCHIVE_SERIAL_NUMBER_MIN),
], ],
help_text=_( help_text=_(
"The position of this document in your physical document " "archive.", "The position of this document in your physical document archive.",
), ),
) )
@ -470,6 +470,9 @@ class SavedViewFilterRule(models.Model):
verbose_name = _("filter rule") verbose_name = _("filter rule")
verbose_name_plural = _("filter rules") verbose_name_plural = _("filter rules")
def __str__(self) -> str:
return f"SavedViewFilterRule: {self.rule_type} : {self.value}"
# TODO: why is this in the models file? # TODO: why is this in the models file?
# TODO: how about, what is this and where is it documented? # TODO: how about, what is this and where is it documented?
@ -483,7 +486,7 @@ class FileInfo:
( (
"created-title", "created-title",
re.compile( re.compile(
r"^(?P<created>\d{8}(\d{6})?Z) - " r"(?P<title>.*)$", r"^(?P<created>\d{8}(\d{6})?Z) - (?P<title>.*)$",
flags=re.IGNORECASE, flags=re.IGNORECASE,
), ),
), ),
@ -634,6 +637,9 @@ class PaperlessTask(models.Model):
), ),
) )
def __str__(self) -> str:
return f"Task {self.task_id}"
class Note(models.Model): class Note(models.Model):
note = models.TextField( note = models.TextField(

View File

@ -323,7 +323,7 @@ class DocumentParser(LoggingMixin):
return [] return []
def parse(self, document_path, mime_type, file_name=None): def parse(self, document_path, mime_type, file_name=None):
raise NotImplementedError() raise NotImplementedError
def get_archive_path(self): def get_archive_path(self):
return self.archive_path return self.archive_path
@ -332,7 +332,7 @@ class DocumentParser(LoggingMixin):
""" """
Returns the path to a file we can use as a thumbnail for this document. Returns the path to a file we can use as a thumbnail for this document.
""" """
raise NotImplementedError() raise NotImplementedError
def get_text(self): def get_text(self):
return self.text return self.text

View File

@ -94,7 +94,7 @@ def check_sanity(progress=False) -> SanityCheckMessages:
except OSError as e: except OSError as e:
messages.error(doc.pk, f"Cannot read original file of document: {e}") messages.error(doc.pk, f"Cannot read original file of document: {e}")
else: else:
if not checksum == doc.checksum: if checksum != doc.checksum:
messages.error( messages.error(
doc.pk, doc.pk,
"Checksum mismatch. " "Checksum mismatch. "
@ -127,7 +127,7 @@ def check_sanity(progress=False) -> SanityCheckMessages:
f"Cannot read archive file of document : {e}", f"Cannot read archive file of document : {e}",
) )
else: else:
if not checksum == doc.archive_checksum: if checksum != doc.archive_checksum:
messages.error( messages.error(
doc.pk, doc.pk,
"Checksum mismatch of archived document. " "Checksum mismatch of archived document. "

View File

@ -7,7 +7,7 @@ from celery import states
try: try:
import zoneinfo import zoneinfo
except ImportError: except ImportError:
import backports.zoneinfo as zoneinfo from backports import zoneinfo
import magic import magic
from django.conf import settings from django.conf import settings
from django.utils.text import slugify from django.utils.text import slugify
@ -152,7 +152,7 @@ class SetPermissionsMixin:
class OwnedObjectSerializer(serializers.ModelSerializer, SetPermissionsMixin): class OwnedObjectSerializer(serializers.ModelSerializer, SetPermissionsMixin):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self.user = kwargs.pop("user", None) self.user = kwargs.pop("user", None)
return super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
def get_permissions(self, obj): def get_permissions(self, obj):
view_codename = f"view_{obj.__class__.__name__.lower()}" view_codename = f"view_{obj.__class__.__name__.lower()}"
@ -282,7 +282,7 @@ class ColorField(serializers.Field):
for id, color in self.COLOURS: for id, color in self.COLOURS:
if id == data: if id == data:
return color return color
raise serializers.ValidationError() raise serializers.ValidationError
def to_representation(self, value): def to_representation(self, value):
for id, color in self.COLOURS: for id, color in self.COLOURS:
@ -513,12 +513,12 @@ class DocumentListSerializer(serializers.Serializer):
def _validate_document_id_list(self, documents, name="documents"): def _validate_document_id_list(self, documents, name="documents"):
if not type(documents) == list: if not type(documents) == list:
raise serializers.ValidationError(f"{name} must be a list") raise serializers.ValidationError(f"{name} must be a list")
if not all([type(i) == int for i in documents]): if not all(type(i) == int for i in documents):
raise serializers.ValidationError(f"{name} must be a list of integers") raise serializers.ValidationError(f"{name} must be a list of integers")
count = Document.objects.filter(id__in=documents).count() count = Document.objects.filter(id__in=documents).count()
if not count == len(documents): if not count == len(documents):
raise serializers.ValidationError( raise serializers.ValidationError(
f"Some documents in {name} don't exist or were " f"specified twice.", f"Some documents in {name} don't exist or were specified twice.",
) )
def validate_documents(self, documents): def validate_documents(self, documents):
@ -549,7 +549,7 @@ class BulkEditSerializer(DocumentListSerializer, SetPermissionsMixin):
def _validate_tag_id_list(self, tags, name="tags"): def _validate_tag_id_list(self, tags, name="tags"):
if not type(tags) == list: if not type(tags) == list:
raise serializers.ValidationError(f"{name} must be a list") raise serializers.ValidationError(f"{name} must be a list")
if not all([type(i) == int for i in tags]): if not all(type(i) == int for i in tags):
raise serializers.ValidationError(f"{name} must be a list of integers") raise serializers.ValidationError(f"{name} must be a list of integers")
count = Tag.objects.filter(id__in=tags).count() count = Tag.objects.filter(id__in=tags).count()
if not count == len(tags): if not count == len(tags):
@ -826,8 +826,8 @@ class StoragePathSerializer(MatchingModelSerializer, OwnedObjectSerializer):
original_name="testfile", original_name="testfile",
) )
except (KeyError): except KeyError as err:
raise serializers.ValidationError(_("Invalid variable detected.")) raise serializers.ValidationError(_("Invalid variable detected.")) from err
return path return path
@ -919,7 +919,7 @@ class AcknowledgeTasksViewSerializer(serializers.Serializer):
pass pass
if not type(tasks) == list: if not type(tasks) == list:
raise serializers.ValidationError(f"{name} must be a list") raise serializers.ValidationError(f"{name} must be a list")
if not all([type(i) == int for i in tasks]): if not all(type(i) == int for i in tasks):
raise serializers.ValidationError(f"{name} must be a list of integers") raise serializers.ValidationError(f"{name} must be a list of integers")
count = PaperlessTask.objects.filter(id__in=tasks).count() count = PaperlessTask.objects.filter(id__in=tasks).count()
if not count == len(tasks): if not count == len(tasks):

View File

@ -19,14 +19,14 @@ from django.utils import termcolors
from django.utils import timezone from django.utils import timezone
from filelock import FileLock from filelock import FileLock
from .. import matching from documents import matching
from ..file_handling import create_source_path_directory from documents.file_handling import create_source_path_directory
from ..file_handling import delete_empty_directories from documents.file_handling import delete_empty_directories
from ..file_handling import generate_unique_filename from documents.file_handling import generate_unique_filename
from ..models import Document from documents.models import Document
from ..models import MatchingModel from documents.models import MatchingModel
from ..models import PaperlessTask from documents.models import PaperlessTask
from ..models import Tag from documents.models import Tag
logger = logging.getLogger("paperless.handlers") logger = logging.getLogger("paperless.handlers")
@ -54,10 +54,7 @@ def set_correspondent(
potential_correspondents = matching.match_correspondents(document, classifier) potential_correspondents = matching.match_correspondents(document, classifier)
potential_count = len(potential_correspondents) potential_count = len(potential_correspondents)
if potential_correspondents: selected = potential_correspondents[0] if potential_correspondents else None
selected = potential_correspondents[0]
else:
selected = None
if potential_count > 1: if potential_count > 1:
if use_first: if use_first:
logger.debug( logger.debug(
@ -120,10 +117,7 @@ def set_document_type(
potential_document_type = matching.match_document_types(document, classifier) potential_document_type = matching.match_document_types(document, classifier)
potential_count = len(potential_document_type) potential_count = len(potential_document_type)
if potential_document_type: selected = potential_document_type[0] if potential_document_type else None
selected = potential_document_type[0]
else:
selected = None
if potential_count > 1: if potential_count > 1:
if use_first: if use_first:
@ -255,10 +249,7 @@ def set_storage_path(
) )
potential_count = len(potential_storage_path) potential_count = len(potential_storage_path)
if potential_storage_path: selected = potential_storage_path[0] if potential_storage_path else None
selected = potential_storage_path[0]
else:
selected = None
if potential_count > 1: if potential_count > 1:
if use_first: if use_first:
@ -370,7 +361,7 @@ def validate_move(instance, old_path, new_path):
if not os.path.isfile(old_path): if not os.path.isfile(old_path):
# Can't do anything if the old file does not exist anymore. # Can't do anything if the old file does not exist anymore.
logger.fatal(f"Document {str(instance)}: File {old_path} has gone.") logger.fatal(f"Document {str(instance)}: File {old_path} has gone.")
raise CannotMoveFilesException() raise CannotMoveFilesException
if os.path.isfile(new_path): if os.path.isfile(new_path):
# Can't do anything if the new file already exists. Skip updating file. # Can't do anything if the new file already exists. Skip updating file.
@ -378,7 +369,7 @@ def validate_move(instance, old_path, new_path):
f"Document {str(instance)}: Cannot rename file " f"Document {str(instance)}: Cannot rename file "
f"since target path {new_path} already exists.", f"since target path {new_path} already exists.",
) )
raise CannotMoveFilesException() raise CannotMoveFilesException
@receiver(models.signals.m2m_changed, sender=Document.tags.through) @receiver(models.signals.m2m_changed, sender=Document.tags.through)
@ -546,10 +537,10 @@ def before_task_publish_handler(sender=None, headers=None, body=None, **kwargs):
date_started=None, date_started=None,
date_done=None, date_done=None,
) )
except Exception as e: # pragma: no cover except Exception: # pragma: no cover
# Don't let an exception in the signal handlers prevent # Don't let an exception in the signal handlers prevent
# a document from being consumed. # a document from being consumed.
logger.error(f"Creating PaperlessTask failed: {e}", exc_info=True) logger.exception("Creating PaperlessTask failed")
@task_prerun.connect @task_prerun.connect
@ -568,15 +559,20 @@ def task_prerun_handler(sender=None, task_id=None, task=None, **kwargs):
task_instance.status = states.STARTED task_instance.status = states.STARTED
task_instance.date_started = timezone.now() task_instance.date_started = timezone.now()
task_instance.save() task_instance.save()
except Exception as e: # pragma: no cover except Exception: # pragma: no cover
# Don't let an exception in the signal handlers prevent # Don't let an exception in the signal handlers prevent
# a document from being consumed. # a document from being consumed.
logger.error(f"Setting PaperlessTask started failed: {e}", exc_info=True) logger.exception("Setting PaperlessTask started failed")
@task_postrun.connect @task_postrun.connect
def task_postrun_handler( def task_postrun_handler(
sender=None, task_id=None, task=None, retval=None, state=None, **kwargs sender=None,
task_id=None,
task=None,
retval=None,
state=None,
**kwargs,
): ):
""" """
Updates the result of the PaperlessTask. Updates the result of the PaperlessTask.
@ -591,7 +587,7 @@ def task_postrun_handler(
task_instance.result = retval task_instance.result = retval
task_instance.date_done = timezone.now() task_instance.date_done = timezone.now()
task_instance.save() task_instance.save()
except Exception as e: # pragma: no cover except Exception: # pragma: no cover
# Don't let an exception in the signal handlers prevent # Don't let an exception in the signal handlers prevent
# a document from being consumed. # a document from being consumed.
logger.error(f"Updating PaperlessTask failed: {e}", exc_info=True) logger.exception("Updating PaperlessTask failed")

View File

@ -297,7 +297,7 @@ def update_document_archive_file(document_id):
except Exception: except Exception:
logger.exception( logger.exception(
f"Error while parsing document {document} " f"(ID: {document_id})", f"Error while parsing document {document} (ID: {document_id})",
) )
finally: finally:
parser.cleanup() parser.cleanup()

View File

@ -1,8 +1,8 @@
from factory import Faker from factory import Faker
from factory.django import DjangoModelFactory from factory.django import DjangoModelFactory
from ..models import Correspondent from documents.models import Correspondent
from ..models import Document from documents.models import Document
class CorrespondentFactory(DjangoModelFactory): class CorrespondentFactory(DjangoModelFactory):

View File

@ -17,7 +17,7 @@ import celery
try: try:
import zoneinfo import zoneinfo
except ImportError: except ImportError:
import backports.zoneinfo as zoneinfo from backports import zoneinfo
import pytest import pytest
from django.conf import settings from django.conf import settings
@ -110,9 +110,9 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
def test_document_fields(self): def test_document_fields(self):
c = Correspondent.objects.create(name="c", pk=41) c = Correspondent.objects.create(name="c", pk=41)
dt = DocumentType.objects.create(name="dt", pk=63) dt = DocumentType.objects.create(name="dt", pk=63)
tag = Tag.objects.create(name="t", pk=85) Tag.objects.create(name="t", pk=85)
storage_path = StoragePath.objects.create(name="sp", pk=77, path="p") storage_path = StoragePath.objects.create(name="sp", pk=77, path="p")
doc = Document.objects.create( Document.objects.create(
title="WOW", title="WOW",
content="the content", content="the content",
correspondent=c, correspondent=c,
@ -877,7 +877,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
added=timezone.make_aware(datetime.datetime(2020, 7, 13)), added=timezone.make_aware(datetime.datetime(2020, 7, 13)),
content="test", content="test",
) )
d6 = Document.objects.create(checksum="6", content="test2") Document.objects.create(checksum="6", content="test2")
d7 = Document.objects.create(checksum="7", storage_path=sp, content="test") d7 = Document.objects.create(checksum="7", storage_path=sp, content="test")
with AsyncWriter(index.open_index()) as writer: with AsyncWriter(index.open_index()) as writer:
@ -1046,13 +1046,13 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
mime_type="application/pdf", mime_type="application/pdf",
content="abc", content="abc",
) )
doc2 = Document.objects.create( Document.objects.create(
title="none2", title="none2",
checksum="B", checksum="B",
mime_type="application/pdf", mime_type="application/pdf",
content="123", content="123",
) )
doc3 = Document.objects.create( Document.objects.create(
title="none3", title="none3",
checksum="C", checksum="C",
mime_type="text/plain", mime_type="text/plain",
@ -1546,14 +1546,14 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
show_on_dashboard=False, show_on_dashboard=False,
show_in_sidebar=False, show_in_sidebar=False,
) )
v2 = SavedView.objects.create( SavedView.objects.create(
owner=u2, owner=u2,
name="test2", name="test2",
sort_field="", sort_field="",
show_on_dashboard=False, show_on_dashboard=False,
show_in_sidebar=False, show_in_sidebar=False,
) )
v3 = SavedView.objects.create( SavedView.objects.create(
owner=u2, owner=u2,
name="test3", name="test3",
sort_field="", sort_field="",
@ -1594,7 +1594,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
def test_create_update_patch(self): def test_create_update_patch(self):
u1 = User.objects.create_user("user1") User.objects.create_user("user1")
view = { view = {
"name": "test", "name": "test",
@ -3020,7 +3020,7 @@ class TestBulkDownload(DirectoriesMixin, APITestCase):
self.assertEqual(f.read(), zipf.read("2021-01-01 document A_01.pdf")) self.assertEqual(f.read(), zipf.read("2021-01-01 document A_01.pdf"))
def test_compression(self): def test_compression(self):
response = self.client.post( self.client.post(
self.ENDPOINT, self.ENDPOINT,
json.dumps( json.dumps(
{"documents": [self.doc2.id, self.doc2b.id], "compression": "lzma"}, {"documents": [self.doc2.id, self.doc2b.id], "compression": "lzma"},
@ -3271,7 +3271,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
user = User.objects.create_user(username="test") user = User.objects.create_user(username="test")
self.client.force_authenticate(user) self.client.force_authenticate(user)
d = Document.objects.create(title="Test") Document.objects.create(title="Test")
self.assertEqual( self.assertEqual(
self.client.get("/api/documents/").status_code, self.client.get("/api/documents/").status_code,
@ -3305,7 +3305,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
user.user_permissions.add(*Permission.objects.all()) user.user_permissions.add(*Permission.objects.all())
self.client.force_authenticate(user) self.client.force_authenticate(user)
d = Document.objects.create(title="Test") Document.objects.create(title="Test")
self.assertEqual( self.assertEqual(
self.client.get("/api/documents/").status_code, self.client.get("/api/documents/").status_code,
@ -3696,7 +3696,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
THEN: THEN:
- No task data is returned - No task data is returned
""" """
task1 = PaperlessTask.objects.create( PaperlessTask.objects.create(
task_id=str(uuid.uuid4()), task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf", task_file_name="task_one.pdf",
) )
@ -3746,7 +3746,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
THEN: THEN:
- The returned data includes the task result - The returned data includes the task result
""" """
task = PaperlessTask.objects.create( PaperlessTask.objects.create(
task_id=str(uuid.uuid4()), task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf", task_file_name="task_one.pdf",
status=celery.states.SUCCESS, status=celery.states.SUCCESS,
@ -3772,7 +3772,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
THEN: THEN:
- The returned result is the exception info - The returned result is the exception info
""" """
task = PaperlessTask.objects.create( PaperlessTask.objects.create(
task_id=str(uuid.uuid4()), task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf", task_file_name="task_one.pdf",
status=celery.states.FAILURE, status=celery.states.FAILURE,
@ -3801,7 +3801,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
THEN: THEN:
- Returned data include the filename - Returned data include the filename
""" """
task = PaperlessTask.objects.create( PaperlessTask.objects.create(
task_id=str(uuid.uuid4()), task_id=str(uuid.uuid4()),
task_file_name="test.pdf", task_file_name="test.pdf",
task_name="documents.tasks.some_task", task_name="documents.tasks.some_task",
@ -3827,7 +3827,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
THEN: THEN:
- Returned data include the filename - Returned data include the filename
""" """
task = PaperlessTask.objects.create( PaperlessTask.objects.create(
task_id=str(uuid.uuid4()), task_id=str(uuid.uuid4()),
task_file_name="anothertest.pdf", task_file_name="anothertest.pdf",
task_name="documents.tasks.some_task", task_name="documents.tasks.some_task",

View File

@ -1,7 +1,7 @@
import os
import shutil import shutil
from pathlib import Path from pathlib import Path
from unittest import mock from unittest import mock
import platform
import pytest import pytest
from django.conf import settings from django.conf import settings
@ -11,19 +11,11 @@ from documents import barcodes
from documents import tasks from documents import tasks
from documents.consumer import ConsumerError from documents.consumer import ConsumerError
from documents.data_models import ConsumableDocument from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides
from documents.data_models import DocumentSource from documents.data_models import DocumentSource
from documents.tests.utils import DirectoriesMixin from documents.tests.utils import DirectoriesMixin
from documents.tests.utils import FileSystemAssertsMixin from documents.tests.utils import FileSystemAssertsMixin
from PIL import Image from PIL import Image
try:
import zxingcpp
ZXING_AVAILIBLE = True
except ImportError:
ZXING_AVAILIBLE = False
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR") @override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
class TestBarcode(DirectoriesMixin, FileSystemAssertsMixin, TestCase): class TestBarcode(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
@ -459,7 +451,7 @@ class TestBarcode(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
self.assertDictEqual(separator_page_numbers, {}) self.assertDictEqual(separator_page_numbers, {})
@override_settings(CONSUMER_BARCODE_STRING="ADAR-NEXTDOC") @override_settings(CONSUMER_BARCODE_STRING="ADAR-NEXTDOC")
def test_scan_file_for_separating_qr_barcodes(self): def test_scan_file_qr_barcodes_was_problem(self):
""" """
GIVEN: GIVEN:
- Input PDF with certain QR codes that aren't detected at current size - Input PDF with certain QR codes that aren't detected at current size
@ -1068,7 +1060,7 @@ class TestAsnBarcode(DirectoriesMixin, TestCase):
@pytest.mark.skipif( @pytest.mark.skipif(
not ZXING_AVAILIBLE, platform.machine().upper() not in {"AMD64"},
reason="No zxingcpp", reason="No zxingcpp",
) )
@override_settings(CONSUMER_BARCODE_SCANNER="ZXING") @override_settings(CONSUMER_BARCODE_SCANNER="ZXING")
@ -1077,7 +1069,7 @@ class TestBarcodeZxing(TestBarcode):
@pytest.mark.skipif( @pytest.mark.skipif(
not ZXING_AVAILIBLE, platform.machine().upper() not in {"AMD64"},
reason="No zxingcpp", reason="No zxingcpp",
) )
@override_settings(CONSUMER_BARCODE_SCANNER="ZXING") @override_settings(CONSUMER_BARCODE_SCANNER="ZXING")

View File

@ -386,7 +386,6 @@ class TestClassifier(DirectoriesMixin, TestCase):
# rebuilding the file and committing that. Not developer friendly # rebuilding the file and committing that. Not developer friendly
# Need to rethink how to pass the load through to a file with a single # Need to rethink how to pass the load through to a file with a single
# old model? # old model?
pass
def test_one_correspondent_predict(self): def test_one_correspondent_predict(self):
c1 = Correspondent.objects.create( c1 = Correspondent.objects.create(
@ -516,7 +515,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
self.assertListEqual(self.classifier.predict_tags(doc1.content), [t1.pk]) self.assertListEqual(self.classifier.predict_tags(doc1.content), [t1.pk])
def test_one_tag_predict_unassigned(self): def test_one_tag_predict_unassigned(self):
t1 = Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12) Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
doc1 = Document.objects.create( doc1 = Document.objects.create(
title="doc1", title="doc1",
@ -643,7 +642,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
self.assertIsNotNone(classifier) self.assertIsNotNone(classifier)
with mock.patch("documents.classifier.DocumentClassifier.load") as load: with mock.patch("documents.classifier.DocumentClassifier.load") as load:
classifier2 = load_classifier() load_classifier()
load.assert_not_called() load.assert_not_called()
@mock.patch("documents.classifier.DocumentClassifier.load") @mock.patch("documents.classifier.DocumentClassifier.load")

View File

@ -12,23 +12,23 @@ from dateutil import tz
try: try:
import zoneinfo import zoneinfo
except ImportError: except ImportError:
import backports.zoneinfo as zoneinfo from backports import zoneinfo
from django.conf import settings from django.conf import settings
from django.utils import timezone from django.utils import timezone
from django.test import override_settings from django.test import override_settings
from django.test import TestCase from django.test import TestCase
from ..consumer import Consumer from documents.consumer import Consumer
from ..consumer import ConsumerError from documents.consumer import ConsumerError
from ..models import Correspondent from documents.models import Correspondent
from ..models import Document from documents.models import Document
from ..models import DocumentType from documents.models import DocumentType
from ..models import FileInfo from documents.models import FileInfo
from ..models import Tag from documents.models import Tag
from ..parsers import DocumentParser from documents.parsers import DocumentParser
from ..parsers import ParseError from documents.parsers import ParseError
from ..tasks import sanity_check from documents.tasks import sanity_check
from .utils import DirectoriesMixin from .utils import DirectoriesMixin
from documents.tests.utils import FileSystemAssertsMixin from documents.tests.utils import FileSystemAssertsMixin
@ -72,8 +72,8 @@ class TestFieldPermutations(TestCase):
"20150102030405Z", "20150102030405Z",
"20150102Z", "20150102Z",
) )
valid_correspondents = ["timmy", "Dr. McWheelie", "Dash Gor-don", "ο Θερμαστής", ""] valid_correspondents = ["timmy", "Dr. McWheelie", "Dash Gor-don", "o Θεpμaoτής", ""]
valid_titles = ["title", "Title w Spaces", "Title a-dash", "Τίτλος", ""] valid_titles = ["title", "Title w Spaces", "Title a-dash", "Tίτλoς", ""]
valid_tags = ["tag", "tig,tag", "tag1,tag2,tag-3"] valid_tags = ["tag", "tig,tag", "tag1,tag2,tag-3"]
def _test_guessed_attributes( def _test_guessed_attributes(
@ -135,9 +135,7 @@ class TestFieldPermutations(TestCase):
filename = "tag1,tag2_20190908_180610_0001.pdf" filename = "tag1,tag2_20190908_180610_0001.pdf"
all_patt = re.compile("^.*$") all_patt = re.compile("^.*$")
none_patt = re.compile("$a") none_patt = re.compile("$a")
exact_patt = re.compile("^([a-z0-9,]+)_(\\d{8})_(\\d{6})_([0-9]+)\\.") re.compile("^([a-z0-9,]+)_(\\d{8})_(\\d{6})_([0-9]+)\\.")
repl1 = " - \\4 - \\1." # (empty) corrspondent, title and tags
repl2 = "\\2Z - " + repl1 # creation date + repl1
# No transformations configured (= default) # No transformations configured (= default)
info = FileInfo.from_filename(filename) info = FileInfo.from_filename(filename)
@ -177,10 +175,6 @@ class TestFieldPermutations(TestCase):
class DummyParser(DocumentParser): class DummyParser(DocumentParser):
def get_thumbnail(self, document_path, mime_type, file_name=None):
# not important during tests
raise NotImplementedError()
def __init__(self, logging_group, scratch_dir, archive_path): def __init__(self, logging_group, scratch_dir, archive_path):
super().__init__(logging_group, None) super().__init__(logging_group, None)
_, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=scratch_dir) _, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=scratch_dir)
@ -197,9 +191,6 @@ class CopyParser(DocumentParser):
def get_thumbnail(self, document_path, mime_type, file_name=None): def get_thumbnail(self, document_path, mime_type, file_name=None):
return self.fake_thumb return self.fake_thumb
def get_thumbnail(self, document_path, mime_type, file_name=None):
return self.fake_thumb
def __init__(self, logging_group, progress_callback=None): def __init__(self, logging_group, progress_callback=None):
super().__init__(logging_group, progress_callback) super().__init__(logging_group, progress_callback)
_, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=self.tempdir) _, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=self.tempdir)
@ -211,10 +202,6 @@ class CopyParser(DocumentParser):
class FaultyParser(DocumentParser): class FaultyParser(DocumentParser):
def get_thumbnail(self, document_path, mime_type, file_name=None):
# not important during tests
raise NotImplementedError()
def __init__(self, logging_group, scratch_dir): def __init__(self, logging_group, scratch_dir):
super().__init__(logging_group) super().__init__(logging_group)
_, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=scratch_dir) _, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=scratch_dir)

View File

@ -46,7 +46,7 @@ class TestDate(TestCase):
) )
def test_date_format_5(self): def test_date_format_5(self):
text = "lorem ipsum 130218, 2018, 20180213 and lorem 13.02.2018 lorem " "ipsum" text = "lorem ipsum 130218, 2018, 20180213 and lorem 13.02.2018 lorem ipsum"
date = parse_date("", text) date = parse_date("", text)
self.assertEqual( self.assertEqual(
date, date,
@ -68,7 +68,7 @@ class TestDate(TestCase):
self.assertEqual(parse_date("", text), None) self.assertEqual(parse_date("", text), None)
def test_date_format_7(self): def test_date_format_7(self):
text = "lorem ipsum\n" "März 2019\n" "lorem ipsum" text = "lorem ipsum\nMärz 2019\nlorem ipsum"
date = parse_date("", text) date = parse_date("", text)
self.assertEqual( self.assertEqual(
date, date,
@ -95,7 +95,7 @@ class TestDate(TestCase):
@override_settings(SCRATCH_DIR=SCRATCH) @override_settings(SCRATCH_DIR=SCRATCH)
def test_date_format_9(self): def test_date_format_9(self):
text = "lorem ipsum\n" "27. Nullmonth 2020\n" "März 2020\n" "lorem ipsum" text = "lorem ipsum\n27. Nullmonth 2020\nMärz 2020\nlorem ipsum"
self.assertEqual( self.assertEqual(
parse_date("", text), parse_date("", text),
datetime.datetime(2020, 3, 1, 0, 0, tzinfo=tz.gettz(settings.TIME_ZONE)), datetime.datetime(2020, 3, 1, 0, 0, tzinfo=tz.gettz(settings.TIME_ZONE)),
@ -262,7 +262,7 @@ class TestDate(TestCase):
THEN: THEN:
- Should parse the date non-ignored date from content - Should parse the date non-ignored date from content
""" """
text = "lorem ipsum 110319, 20200117 and lorem 13.02.2018 lorem " "ipsum" text = "lorem ipsum 110319, 20200117 and lorem 13.02.2018 lorem ipsum"
self.assertEqual( self.assertEqual(
parse_date("", text), parse_date("", text),
datetime.datetime(2018, 2, 13, 0, 0, tzinfo=tz.gettz(settings.TIME_ZONE)), datetime.datetime(2018, 2, 13, 0, 0, tzinfo=tz.gettz(settings.TIME_ZONE)),
@ -283,7 +283,7 @@ class TestDate(TestCase):
THEN: THEN:
- Should parse the date non-ignored date from content - Should parse the date non-ignored date from content
""" """
text = "lorem ipsum 190311, 20200117 and lorem 13.02.2018 lorem " "ipsum" text = "lorem ipsum 190311, 20200117 and lorem 13.02.2018 lorem ipsum"
self.assertEqual( self.assertEqual(
parse_date("", text), parse_date("", text),

View File

@ -6,14 +6,14 @@ from unittest import mock
try: try:
import zoneinfo import zoneinfo
except ImportError: except ImportError:
import backports.zoneinfo as zoneinfo from backports import zoneinfo
from django.test import override_settings from django.test import override_settings
from django.test import TestCase from django.test import TestCase
from django.utils import timezone from django.utils import timezone
from ..models import Correspondent from documents.models import Correspondent
from ..models import Document from documents.models import Document
class TestDocument(TestCase): class TestDocument(TestCase):

View File

@ -10,17 +10,16 @@ from django.db import DatabaseError
from django.test import override_settings from django.test import override_settings
from django.test import TestCase from django.test import TestCase
from django.utils import timezone from django.utils import timezone
from documents.tests.utils import FileSystemAssertsMixin
from ..file_handling import create_source_path_directory from documents.file_handling import create_source_path_directory
from ..file_handling import delete_empty_directories from documents.file_handling import delete_empty_directories
from ..file_handling import generate_filename from documents.file_handling import generate_filename
from ..models import Correspondent from documents.models import Correspondent
from ..models import Document from documents.models import Document
from ..models import DocumentType from documents.models import DocumentType
from ..models import StoragePath from documents.models import StoragePath
from .utils import DirectoriesMixin from documents.tests.utils import DirectoriesMixin
from .utils import FileSystemAssertsMixin from documents.tests.utils import FileSystemAssertsMixin
class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase): class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
@ -121,7 +120,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}") @override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
def test_file_renaming_database_error(self): def test_file_renaming_database_error(self):
document1 = Document.objects.create( Document.objects.create(
mime_type="application/pdf", mime_type="application/pdf",
storage_type=Document.STORAGE_TYPE_UNENCRYPTED, storage_type=Document.STORAGE_TYPE_UNENCRYPTED,
checksum="AAAAA", checksum="AAAAA",
@ -171,7 +170,6 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
Path(document.source_path).touch() Path(document.source_path).touch()
# Ensure file deletion after delete # Ensure file deletion after delete
pk = document.pk
document.delete() document.delete()
self.assertIsNotFile( self.assertIsNotFile(
os.path.join(settings.ORIGINALS_DIR, "none", "none.pdf"), os.path.join(settings.ORIGINALS_DIR, "none", "none.pdf"),
@ -440,7 +438,6 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
# Check proper handling of files # Check proper handling of files
self.assertIsDir(os.path.join(settings.ORIGINALS_DIR, "none/none")) self.assertIsDir(os.path.join(settings.ORIGINALS_DIR, "none/none"))
pk = document.pk
document.delete() document.delete()
self.assertIsNotFile( self.assertIsNotFile(
@ -705,7 +702,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
def test_move_archive_error(self, m): def test_move_archive_error(self, m):
def fake_rename(src, dst): def fake_rename(src, dst):
if "archive" in str(src): if "archive" in str(src):
raise OSError() raise OSError
else: else:
os.remove(src) os.remove(src)
Path(dst).touch() Path(dst).touch()
@ -756,7 +753,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
def test_move_file_error(self, m): def test_move_file_error(self, m):
def fake_rename(src, dst): def fake_rename(src, dst):
if "original" in str(src): if "original" in str(src):
raise OSError() raise OSError
else: else:
os.remove(src) os.remove(src)
Path(dst).touch() Path(dst).touch()

View File

@ -2,7 +2,7 @@ from django.core.management.base import CommandError
from django.test import TestCase from django.test import TestCase
from documents.settings import EXPORTER_FILE_NAME from documents.settings import EXPORTER_FILE_NAME
from ..management.commands.document_importer import Command from documents.management.commands.document_importer import Command
class TestImporter(TestCase): class TestImporter(TestCase):

View File

@ -13,7 +13,6 @@ from django.test import override_settings
from django.test import TransactionTestCase from django.test import TransactionTestCase
from documents.consumer import ConsumerError from documents.consumer import ConsumerError
from documents.data_models import ConsumableDocument from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides
from documents.management.commands import document_consumer from documents.management.commands import document_consumer
from documents.models import Tag from documents.models import Tag
from documents.tests.utils import DirectoriesMixin from documents.tests.utils import DirectoriesMixin

View File

@ -204,7 +204,7 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
self.assertEqual(element["fields"]["document"], self.d1.id) self.assertEqual(element["fields"]["document"], self.d1.id)
self.assertEqual(element["fields"]["user"], self.user.id) self.assertEqual(element["fields"]["user"], self.user.id)
with paperless_environment() as dirs: with paperless_environment():
self.assertEqual(Document.objects.count(), 4) self.assertEqual(Document.objects.count(), 4)
Document.objects.all().delete() Document.objects.all().delete()
Correspondent.objects.all().delete() Correspondent.objects.all().delete()
@ -345,7 +345,7 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
os.path.join(self.dirs.media_dir, "documents"), os.path.join(self.dirs.media_dir, "documents"),
) )
m = self._do_export(use_filename_format=True) self._do_export(use_filename_format=True)
self.assertIsFile(os.path.join(self.target, "wow1", "c.pdf")) self.assertIsFile(os.path.join(self.target, "wow1", "c.pdf"))
self.assertIsFile(os.path.join(self.target, "manifest.json")) self.assertIsFile(os.path.join(self.target, "manifest.json"))
@ -537,7 +537,7 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
) )
self.assertFalse(has_archive) self.assertFalse(has_archive)
with paperless_environment() as dirs: with paperless_environment():
self.assertEqual(Document.objects.count(), 4) self.assertEqual(Document.objects.count(), 4)
Document.objects.all().delete() Document.objects.all().delete()
self.assertEqual(Document.objects.count(), 0) self.assertEqual(Document.objects.count(), 0)
@ -580,7 +580,7 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
) )
self.assertFalse(has_thumbnail) self.assertFalse(has_thumbnail)
with paperless_environment() as dirs: with paperless_environment():
self.assertEqual(Document.objects.count(), 4) self.assertEqual(Document.objects.count(), 4)
Document.objects.all().delete() Document.objects.all().delete()
self.assertEqual(Document.objects.count(), 0) self.assertEqual(Document.objects.count(), 0)
@ -609,7 +609,7 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
has_document = has_document or element["model"] == "documents.document" has_document = has_document or element["model"] == "documents.document"
self.assertFalse(has_document) self.assertFalse(has_document)
with paperless_environment() as dirs: with paperless_environment():
self.assertEqual(Document.objects.count(), 4) self.assertEqual(Document.objects.count(), 4)
Document.objects.all().delete() Document.objects.all().delete()
self.assertEqual(Document.objects.count(), 0) self.assertEqual(Document.objects.count(), 0)
@ -631,9 +631,9 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
os.path.join(self.dirs.media_dir, "documents"), os.path.join(self.dirs.media_dir, "documents"),
) )
manifest = self._do_export(use_folder_prefix=True) self._do_export(use_folder_prefix=True)
with paperless_environment() as dirs: with paperless_environment():
self.assertEqual(Document.objects.count(), 4) self.assertEqual(Document.objects.count(), 4)
Document.objects.all().delete() Document.objects.all().delete()
self.assertEqual(Document.objects.count(), 0) self.assertEqual(Document.objects.count(), 0)

View File

@ -8,12 +8,12 @@ from django.contrib.auth.models import User
from django.test import override_settings from django.test import override_settings
from django.test import TestCase from django.test import TestCase
from .. import matching from documents import matching
from ..models import Correspondent from documents.models import Correspondent
from ..models import Document from documents.models import Document
from ..models import DocumentType from documents.models import DocumentType
from ..models import Tag from documents.models import Tag
from ..signals import document_consumption_finished from documents.signals import document_consumption_finished
class _TestMatchingBase(TestCase): class _TestMatchingBase(TestCase):

View File

@ -310,7 +310,7 @@ class TestMigrateArchiveFilesErrors(DirectoriesMixin, TestMigrations):
def test_parser_missing(self): def test_parser_missing(self):
Document = self.apps.get_model("documents", "Document") Document = self.apps.get_model("documents", "Document")
doc1 = make_test_document( make_test_document(
Document, Document,
"document", "document",
"invalid/typesss768", "invalid/typesss768",
@ -318,7 +318,7 @@ class TestMigrateArchiveFilesErrors(DirectoriesMixin, TestMigrations):
"document.png", "document.png",
simple_pdf, simple_pdf,
) )
doc2 = make_test_document( make_test_document(
Document, Document,
"document", "document",
"invalid/typesss768", "invalid/typesss768",
@ -462,7 +462,7 @@ class TestMigrateArchiveFilesBackwards(
Document = apps.get_model("documents", "Document") Document = apps.get_model("documents", "Document")
doc_unrelated = make_test_document( make_test_document(
Document, Document,
"unrelated", "unrelated",
"application/pdf", "application/pdf",
@ -471,14 +471,14 @@ class TestMigrateArchiveFilesBackwards(
simple_pdf2, simple_pdf2,
"unrelated.pdf", "unrelated.pdf",
) )
doc_no_archive = make_test_document( make_test_document(
Document, Document,
"no_archive", "no_archive",
"text/plain", "text/plain",
simple_txt, simple_txt,
"no_archive.txt", "no_archive.txt",
) )
clashB = make_test_document( make_test_document(
Document, Document,
"clash", "clash",
"image/jpeg", "image/jpeg",

View File

@ -1,14 +1,14 @@
from django.test import TestCase from django.test import TestCase
from ..models import Correspondent from documents.models import Correspondent
from ..models import Document from documents.models import Document
from .factories import CorrespondentFactory from .factories import CorrespondentFactory
from .factories import DocumentFactory from .factories import DocumentFactory
class CorrespondentTestCase(TestCase): class CorrespondentTestCase(TestCase):
def test___str__(self): def test___str__(self):
for s in ("test", "οχι", "test with fun_charÅc'\"terß"): for s in ("test", "oχi", "test with fun_charÅc'\"terß"):
correspondent = CorrespondentFactory.create(name=s) correspondent = CorrespondentFactory.create(name=s)
self.assertEqual(str(correspondent), s) self.assertEqual(str(correspondent), s)

View File

@ -94,7 +94,7 @@ class TestParserDiscovery(TestCase):
- No parser class is returned - No parser class is returned
""" """
m.return_value = [] m.return_value = []
with TemporaryDirectory() as tmpdir: with TemporaryDirectory():
self.assertIsNone(get_parser_class_for_mime_type("application/pdf")) self.assertIsNone(get_parser_class_for_mime_type("application/pdf"))
@mock.patch("documents.parsers.document_consumer_declaration.send") @mock.patch("documents.parsers.document_consumer_declaration.send")

View File

@ -149,7 +149,7 @@ class TestSanityCheck(DirectoriesMixin, TestCase):
) )
def test_orphaned_file(self): def test_orphaned_file(self):
doc = self.make_test_data() self.make_test_data()
Path(self.dirs.originals_dir, "orphaned").touch() Path(self.dirs.originals_dir, "orphaned").touch()
messages = check_sanity() messages = check_sanity()
self.assertTrue(messages.has_warning) self.assertTrue(messages.has_warning)

View File

@ -4,7 +4,6 @@ from unittest import mock
import celery import celery
from django.test import TestCase from django.test import TestCase
from documents.data_models import ConsumableDocument from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides
from documents.data_models import DocumentSource from documents.data_models import DocumentSource
from documents.models import PaperlessTask from documents.models import PaperlessTask
from documents.signals.handlers import before_task_publish_handler from documents.signals.handlers import before_task_publish_handler

View File

@ -47,7 +47,7 @@ class TestViews(TestCase):
self.client.cookies.load( self.client.cookies.load(
{settings.LANGUAGE_COOKIE_NAME: language_given}, {settings.LANGUAGE_COOKIE_NAME: language_given},
) )
elif settings.LANGUAGE_COOKIE_NAME in self.client.cookies.keys(): elif settings.LANGUAGE_COOKIE_NAME in self.client.cookies:
self.client.cookies.pop(settings.LANGUAGE_COOKIE_NAME) self.client.cookies.pop(settings.LANGUAGE_COOKIE_NAME)
response = self.client.get( response = self.client.get(

View File

@ -265,10 +265,7 @@ class DocumentViewSet(
def get_serializer(self, *args, **kwargs): def get_serializer(self, *args, **kwargs):
super().get_serializer(*args, **kwargs) super().get_serializer(*args, **kwargs)
fields_param = self.request.query_params.get("fields", None) fields_param = self.request.query_params.get("fields", None)
if fields_param: fields = fields_param.split(",") if fields_param else None
fields = fields_param.split(",")
else:
fields = None
truncate_content = self.request.query_params.get("truncate_content", "False") truncate_content = self.request.query_params.get("truncate_content", "False")
serializer_class = self.get_serializer_class() serializer_class = self.get_serializer_class()
kwargs.setdefault("context", self.get_serializer_context()) kwargs.setdefault("context", self.get_serializer_context())
@ -358,7 +355,7 @@ class DocumentViewSet(
try: try:
doc = Document.objects.get(pk=pk) doc = Document.objects.get(pk=pk)
except Document.DoesNotExist: except Document.DoesNotExist:
raise Http404() raise Http404
meta = { meta = {
"original_checksum": doc.checksum, "original_checksum": doc.checksum,
@ -422,7 +419,7 @@ class DocumentViewSet(
response = self.file_response(pk, request, "inline") response = self.file_response(pk, request, "inline")
return response return response
except (FileNotFoundError, Document.DoesNotExist): except (FileNotFoundError, Document.DoesNotExist):
raise Http404() raise Http404
@action(methods=["get"], detail=True) @action(methods=["get"], detail=True)
@method_decorator(cache_control(public=False, max_age=315360000)) @method_decorator(cache_control(public=False, max_age=315360000))
@ -438,14 +435,14 @@ class DocumentViewSet(
return HttpResponse(handle, content_type="image/webp") return HttpResponse(handle, content_type="image/webp")
except (FileNotFoundError, Document.DoesNotExist): except (FileNotFoundError, Document.DoesNotExist):
raise Http404() raise Http404
@action(methods=["get"], detail=True) @action(methods=["get"], detail=True)
def download(self, request, pk=None): def download(self, request, pk=None):
try: try:
return self.file_response(pk, request, "attachment") return self.file_response(pk, request, "attachment")
except (FileNotFoundError, Document.DoesNotExist): except (FileNotFoundError, Document.DoesNotExist):
raise Http404() raise Http404
def getNotes(self, doc): def getNotes(self, doc):
return [ return [
@ -468,7 +465,7 @@ class DocumentViewSet(
try: try:
doc = Document.objects.get(pk=pk) doc = Document.objects.get(pk=pk)
except Document.DoesNotExist: except Document.DoesNotExist:
raise Http404() raise Http404
currentUser = request.user currentUser = request.user
@ -569,7 +566,7 @@ class UnifiedSearchViewSet(DocumentViewSet):
elif "more_like_id" in self.request.query_params: elif "more_like_id" in self.request.query_params:
query_class = index.DelayedMoreLikeThisQuery query_class = index.DelayedMoreLikeThisQuery
else: else:
raise ValueError() raise ValueError
return query_class( return query_class(
self.searcher, self.searcher,
@ -606,12 +603,12 @@ class LogViewSet(ViewSet):
def retrieve(self, request, pk=None, *args, **kwargs): def retrieve(self, request, pk=None, *args, **kwargs):
if pk not in self.log_files: if pk not in self.log_files:
raise Http404() raise Http404
filename = self.get_log_filename(pk) filename = self.get_log_filename(pk)
if not os.path.isfile(filename): if not os.path.isfile(filename):
raise Http404() raise Http404
with open(filename) as f: with open(filename) as f:
lines = [line.rstrip() for line in f.readlines()] lines = [line.rstrip() for line in f.readlines()]

0
src/manage.py Normal file → Executable file
View File

View File

@ -42,7 +42,7 @@ def path_check(var, directory):
Error( Error(
writeable_message.format(var), writeable_message.format(var),
writeable_hint.format( writeable_hint.format(
f"\n{dir_mode} {dir_owner} {dir_group} " f"{directory}\n", f"\n{dir_mode} {dir_owner} {dir_group} {directory}\n",
), ),
), ),
) )
@ -158,7 +158,7 @@ def settings_values_check(app_configs, **kwargs):
try: try:
import zoneinfo import zoneinfo
except ImportError: # pragma: nocover except ImportError: # pragma: nocover
import backports.zoneinfo as zoneinfo from backports import zoneinfo
msgs = [] msgs = []
if settings.TIME_ZONE not in zoneinfo.available_timezones(): if settings.TIME_ZONE not in zoneinfo.available_timezones():
msgs.append( msgs.append(

View File

@ -12,13 +12,13 @@ class StatusConsumer(WebsocketConsumer):
def connect(self): def connect(self):
if not self._authenticated(): if not self._authenticated():
raise DenyConnection() raise DenyConnection
else: else:
async_to_sync(self.channel_layer.group_add)( async_to_sync(self.channel_layer.group_add)(
"status_updates", "status_updates",
self.channel_name, self.channel_name,
) )
raise AcceptConnection() raise AcceptConnection
def disconnect(self, close_code): def disconnect(self, close_code):
async_to_sync(self.channel_layer.group_discard)( async_to_sync(self.channel_layer.group_discard)(

View File

@ -65,9 +65,11 @@ class UserSerializer(serializers.ModelSerializer):
if "user_permissions" in validated_data: if "user_permissions" in validated_data:
user_permissions = validated_data.pop("user_permissions") user_permissions = validated_data.pop("user_permissions")
password = None password = None
if "password" in validated_data: if (
if len(validated_data.get("password").replace("*", "")) > 0: "password" in validated_data
password = validated_data.pop("password") and len(validated_data.get("password").replace("*", "")) > 0
):
password = validated_data.pop("password")
user = User.objects.create(**validated_data) user = User.objects.create(**validated_data)
# set groups # set groups
if groups: if groups:

View File

@ -282,7 +282,8 @@ INSTALLED_APPS = [
"django_filters", "django_filters",
"django_celery_results", "django_celery_results",
"guardian", "guardian",
] + env_apps *env_apps,
]
if DEBUG: if DEBUG:
INSTALLED_APPS.append("channels") INSTALLED_APPS.append("channels")
@ -398,10 +399,7 @@ if ENABLE_HTTP_REMOTE_USER:
) )
# X-Frame options for embedded PDF display: # X-Frame options for embedded PDF display:
if DEBUG: X_FRAME_OPTIONS = "ANY" if DEBUG else "SAMEORIGIN"
X_FRAME_OPTIONS = "ANY"
else:
X_FRAME_OPTIONS = "SAMEORIGIN"
# The next 3 settings can also be set using just PAPERLESS_URL # The next 3 settings can also be set using just PAPERLESS_URL
@ -424,7 +422,7 @@ if _paperless_url:
_paperless_uri = urlparse(_paperless_url) _paperless_uri = urlparse(_paperless_url)
CSRF_TRUSTED_ORIGINS.append(_paperless_url) CSRF_TRUSTED_ORIGINS.append(_paperless_url)
CORS_ALLOWED_ORIGINS.append(_paperless_url) CORS_ALLOWED_ORIGINS.append(_paperless_url)
if ALLOWED_HOSTS != ["*"]: if ["*"] != ALLOWED_HOSTS:
ALLOWED_HOSTS.append(_paperless_uri.hostname) ALLOWED_HOSTS.append(_paperless_uri.hostname)
else: else:
# always allow localhost. Necessary e.g. for healthcheck in docker. # always allow localhost. Necessary e.g. for healthcheck in docker.

View File

@ -15,18 +15,18 @@ def handle_failed_login(sender, credentials, request, **kwargs):
if client_ip is None: if client_ip is None:
logger.info( logger.info(
f"Login failed for user `{credentials['username']}`." f"Login failed for user `{credentials['username']}`."
+ " Unable to determine IP address.", " Unable to determine IP address.",
) )
else: else:
if is_routable: if is_routable:
# We got the client's IP address # We got the client's IP address
logger.info( logger.info(
f"Login failed for user `{credentials['username']}`" f"Login failed for user `{credentials['username']}`"
+ f" from IP `{client_ip}.`", f" from IP `{client_ip}.`",
) )
else: else:
# The client's IP address is private # The client's IP address is private
logger.info( logger.info(
f"Login failed for user `{credentials['username']}`" f"Login failed for user `{credentials['username']}`"
+ f" from private IP `{client_ip}.`", f" from private IP `{client_ip}.`",
) )

View File

View File

@ -56,61 +56,57 @@ urlpatterns = [
include( include(
[ [
re_path( re_path(
r"^auth/", "^auth/",
include( include(
("rest_framework.urls", "rest_framework"), ("rest_framework.urls", "rest_framework"),
namespace="rest_framework", namespace="rest_framework",
), ),
), ),
re_path( re_path(
r"^search/autocomplete/", "^search/autocomplete/",
SearchAutoCompleteView.as_view(), SearchAutoCompleteView.as_view(),
name="autocomplete", name="autocomplete",
), ),
re_path(r"^statistics/", StatisticsView.as_view(), name="statistics"), re_path("^statistics/", StatisticsView.as_view(), name="statistics"),
re_path( re_path(
r"^documents/post_document/", "^documents/post_document/",
PostDocumentView.as_view(), PostDocumentView.as_view(),
name="post_document", name="post_document",
), ),
re_path( re_path(
r"^documents/bulk_edit/", "^documents/bulk_edit/",
BulkEditView.as_view(), BulkEditView.as_view(),
name="bulk_edit", name="bulk_edit",
), ),
re_path( re_path(
r"^documents/selection_data/", "^documents/selection_data/",
SelectionDataView.as_view(), SelectionDataView.as_view(),
name="selection_data", name="selection_data",
), ),
re_path( re_path(
r"^documents/bulk_download/", "^documents/bulk_download/",
BulkDownloadView.as_view(), BulkDownloadView.as_view(),
name="bulk_download", name="bulk_download",
), ),
re_path( re_path(
r"^remote_version/", "^remote_version/",
RemoteVersionView.as_view(), RemoteVersionView.as_view(),
name="remoteversion", name="remoteversion",
), ),
re_path("^ui_settings/", UiSettingsView.as_view(), name="ui_settings"),
re_path( re_path(
r"^ui_settings/", "^acknowledge_tasks/",
UiSettingsView.as_view(),
name="ui_settings",
),
re_path(
r"^acknowledge_tasks/",
AcknowledgeTasksView.as_view(), AcknowledgeTasksView.as_view(),
name="acknowledge_tasks", name="acknowledge_tasks",
), ),
re_path( re_path(
r"^mail_accounts/test/", "^mail_accounts/test/",
MailAccountTestView.as_view(), MailAccountTestView.as_view(),
name="mail_accounts_test", name="mail_accounts_test",
), ),
path("token/", views.obtain_auth_token), path("token/", views.obtain_auth_token),
] *api_router.urls,
+ api_router.urls, ],
), ),
), ),
re_path(r"^favicon.ico$", FaviconView.as_view(), name="favicon"), re_path(r"^favicon.ico$", FaviconView.as_view(), name="favicon"),

View File

@ -18,7 +18,16 @@ class MailAccountAdminForm(forms.ModelForm):
widgets = { widgets = {
"password": forms.PasswordInput(), "password": forms.PasswordInput(),
} }
fields = "__all__" fields = [
"name",
"imap_server",
"username",
"imap_security",
"username",
"password",
"is_token",
"character_set",
]
class MailAccountAdmin(admin.ModelAdmin): class MailAccountAdmin(admin.ModelAdmin):
@ -27,7 +36,10 @@ class MailAccountAdmin(admin.ModelAdmin):
fieldsets = [ fieldsets = [
(None, {"fields": ["name", "imap_server", "imap_port"]}), (None, {"fields": ["name", "imap_server", "imap_port"]}),
(_("Authentication"), {"fields": ["imap_security", "username", "password"]}), (
_("Authentication"),
{"fields": ["imap_security", "username", "password", "is_token"]},
),
(_("Advanced settings"), {"fields": ["character_set"]}), (_("Advanced settings"), {"fields": ["character_set"]}),
] ]
form = MailAccountAdminForm form = MailAccountAdminForm

View File

@ -94,7 +94,7 @@ class BaseMailAction:
""" """
Perform mail action on the given mail uid in the mailbox. Perform mail action on the given mail uid in the mailbox.
""" """
raise NotImplementedError() raise NotImplementedError
class DeleteMailAction(BaseMailAction): class DeleteMailAction(BaseMailAction):
@ -152,7 +152,7 @@ class TagMailAction(BaseMailAction):
_, self.color = parameter.split(":") _, self.color = parameter.split(":")
self.color = self.color.strip() self.color = self.color.strip()
if not self.color.lower() in APPLE_MAIL_TAG_COLORS.keys(): if self.color.lower() not in APPLE_MAIL_TAG_COLORS.keys():
raise MailError("Not a valid AppleMail tag color.") raise MailError("Not a valid AppleMail tag color.")
self.keyword = None self.keyword = None
@ -274,7 +274,7 @@ def apply_mail_action(
status="SUCCESS", status="SUCCESS",
) )
except Exception as e: except Exception:
ProcessedMail.objects.create( ProcessedMail.objects.create(
owner=rule.owner, owner=rule.owner,
rule=rule, rule=rule,
@ -285,7 +285,7 @@ def apply_mail_action(
status="FAILED", status="FAILED",
error=traceback.format_exc(), error=traceback.format_exc(),
) )
raise e raise
@shared_task @shared_task
@ -548,7 +548,7 @@ class MailAccountHandler(LoggingMixin):
self.log( self.log(
"debug", "debug",
f"Rule {rule}: Searching folder with criteria " f"{str(criterias)}", f"Rule {rule}: Searching folder with criteria {str(criterias)}",
) )
try: try:
@ -582,7 +582,7 @@ class MailAccountHandler(LoggingMixin):
except Exception as e: except Exception as e:
self.log( self.log(
"error", "error",
f"Rule {rule}: Error while processing mail " f"{message.uid}: {e}", f"Rule {rule}: Error while processing mail {message.uid}: {e}",
exc_info=True, exc_info=True,
) )
@ -653,7 +653,7 @@ class MailAccountHandler(LoggingMixin):
for att in message.attachments: for att in message.attachments:
if ( if (
not att.content_disposition == "attachment" att.content_disposition != "attachment"
and rule.attachment_type and rule.attachment_type
== MailRule.AttachmentProcessing.ATTACHMENTS_ONLY == MailRule.AttachmentProcessing.ATTACHMENTS_ONLY
): ):
@ -665,14 +665,13 @@ class MailAccountHandler(LoggingMixin):
) )
continue continue
if rule.filter_attachment_filename: if rule.filter_attachment_filename and not fnmatch(
att.filename.lower(),
rule.filter_attachment_filename.lower(),
):
# Force the filename and pattern to the lowercase # Force the filename and pattern to the lowercase
# as this is system dependent otherwise # as this is system dependent otherwise
if not fnmatch( continue
att.filename.lower(),
rule.filter_attachment_filename.lower(),
):
continue
title = self._get_title(message, att, rule) title = self._get_title(message, att, rule)

View File

@ -27,7 +27,8 @@ class Migration(migrations.Migration):
model_name="mailrule", model_name="mailrule",
name="maximum_age", name="maximum_age",
field=models.PositiveIntegerField( field=models.PositiveIntegerField(
default=30, help_text="Specified in days." default=30,
help_text="Specified in days.",
), ),
), ),
] ]

View File

@ -160,35 +160,48 @@ class Migration(migrations.Migration):
model_name="mailrule", model_name="mailrule",
name="filter_body", name="filter_body",
field=models.CharField( field=models.CharField(
blank=True, max_length=256, null=True, verbose_name="filter body" blank=True,
max_length=256,
null=True,
verbose_name="filter body",
), ),
), ),
migrations.AlterField( migrations.AlterField(
model_name="mailrule", model_name="mailrule",
name="filter_from", name="filter_from",
field=models.CharField( field=models.CharField(
blank=True, max_length=256, null=True, verbose_name="filter from" blank=True,
max_length=256,
null=True,
verbose_name="filter from",
), ),
), ),
migrations.AlterField( migrations.AlterField(
model_name="mailrule", model_name="mailrule",
name="filter_subject", name="filter_subject",
field=models.CharField( field=models.CharField(
blank=True, max_length=256, null=True, verbose_name="filter subject" blank=True,
max_length=256,
null=True,
verbose_name="filter subject",
), ),
), ),
migrations.AlterField( migrations.AlterField(
model_name="mailrule", model_name="mailrule",
name="folder", name="folder",
field=models.CharField( field=models.CharField(
default="INBOX", max_length=256, verbose_name="folder" default="INBOX",
max_length=256,
verbose_name="folder",
), ),
), ),
migrations.AlterField( migrations.AlterField(
model_name="mailrule", model_name="mailrule",
name="maximum_age", name="maximum_age",
field=models.PositiveIntegerField( field=models.PositiveIntegerField(
default=30, help_text="Specified in days.", verbose_name="maximum age" default=30,
help_text="Specified in days.",
verbose_name="maximum age",
), ),
), ),
migrations.AlterField( migrations.AlterField(

View File

@ -14,7 +14,9 @@ class Migration(migrations.Migration):
model_name="mailrule", model_name="mailrule",
name="assign_tags", name="assign_tags",
field=models.ManyToManyField( field=models.ManyToManyField(
blank=True, to="documents.Tag", verbose_name="assign this tag" blank=True,
to="documents.Tag",
verbose_name="assign this tag",
), ),
), ),
] ]

View File

@ -29,19 +29,25 @@ class Migration(migrations.Migration):
( (
"folder", "folder",
models.CharField( models.CharField(
editable=False, max_length=256, verbose_name="folder" editable=False,
max_length=256,
verbose_name="folder",
), ),
), ),
( (
"uid", "uid",
models.CharField( models.CharField(
editable=False, max_length=256, verbose_name="uid" editable=False,
max_length=256,
verbose_name="uid",
), ),
), ),
( (
"subject", "subject",
models.CharField( models.CharField(
editable=False, max_length=256, verbose_name="subject" editable=False,
max_length=256,
verbose_name="subject",
), ),
), ),
( (
@ -59,13 +65,18 @@ class Migration(migrations.Migration):
( (
"status", "status",
models.CharField( models.CharField(
editable=False, max_length=256, verbose_name="status" editable=False,
max_length=256,
verbose_name="status",
), ),
), ),
( (
"error", "error",
models.TextField( models.TextField(
blank=True, editable=False, null=True, verbose_name="error" blank=True,
editable=False,
null=True,
verbose_name="error",
), ),
), ),
( (

View File

@ -13,7 +13,10 @@ class Migration(migrations.Migration):
model_name="mailrule", model_name="mailrule",
name="filter_to", name="filter_to",
field=models.CharField( field=models.CharField(
blank=True, max_length=256, null=True, verbose_name="filter to" blank=True,
max_length=256,
null=True,
verbose_name="filter to",
), ),
), ),
] ]

View File

@ -13,7 +13,8 @@ class Migration(migrations.Migration):
model_name="mailaccount", model_name="mailaccount",
name="is_token", name="is_token",
field=models.BooleanField( field=models.BooleanField(
default=False, verbose_name="Is token authentication" default=False,
verbose_name="Is token authentication",
), ),
), ),
] ]

View File

@ -69,7 +69,7 @@ class MailRule(document_models.ModelWithOwner):
class AttachmentProcessing(models.IntegerChoices): class AttachmentProcessing(models.IntegerChoices):
ATTACHMENTS_ONLY = 1, _("Only process attachments.") ATTACHMENTS_ONLY = 1, _("Only process attachments.")
EVERYTHING = 2, _("Process all files, including 'inline' " "attachments.") EVERYTHING = 2, _("Process all files, including 'inline' attachments.")
class MailAction(models.IntegerChoices): class MailAction(models.IntegerChoices):
DELETE = 1, _("Delete") DELETE = 1, _("Delete")

Some files were not shown because too many files have changed in this diff Show More