Configures ruff as the one stop linter and resolves warnings it raised

This commit is contained in:
Trenton H 2023-03-28 09:39:30 -07:00
parent 5869467db3
commit ce41ac9158
110 changed files with 507 additions and 491 deletions

View File

@ -1,4 +1,3 @@
#!/usr/bin/env python3
import json
import logging
import os
@ -390,8 +389,6 @@ class LibraryTagsCleaner(RegistryTagsCleaner):
will need their own logic
"""
pass
def _main():
parser = ArgumentParser(

View File

@ -1,4 +1,3 @@
#!/usr/bin/env python3
import logging

1
.github/scripts/get-build-json.py vendored Executable file → Normal file
View File

@ -1,4 +1,3 @@
#!/usr/bin/env python3
"""
This is a helper script for the mutli-stage Docker image builder.
It provides a single point of configuration for package version control.

View File

@ -1,4 +1,3 @@
#!/usr/bin/env python3
"""
This module contains some useful classes for interacting with the Github API.
The full documentation for the API can be found here: https://docs.github.com/en/rest
@ -162,10 +161,7 @@ class ContainerPackage(_EndpointResponse):
Returns True if the image has at least one tag which matches the given regex,
False otherwise
"""
for tag in self.tags:
if re.match(pattern, tag) is not None:
return True
return False
return any(re.match(pattern, tag) is not None for tag in self.tags)
def __repr__(self):
return f"Package {self.name}"

1
.gitignore vendored
View File

@ -73,6 +73,7 @@ virtualenv
.venv/
/docker-compose.env
/docker-compose.yml
.ruff_cache/
# Used for development
scripts/import-for-development

View File

@ -36,39 +36,14 @@ repos:
- markdown
exclude: "(^Pipfile\\.lock$)"
# Python hooks
- repo: https://github.com/asottile/reorder_python_imports
rev: v3.9.0
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: 'v0.0.259'
hooks:
- id: reorder-python-imports
exclude: "(migrations)"
- repo: https://github.com/asottile/yesqa
rev: "v1.4.0"
hooks:
- id: yesqa
exclude: "(migrations)"
- repo: https://github.com/asottile/add-trailing-comma
rev: "v2.4.0"
hooks:
- id: add-trailing-comma
exclude: "(migrations)"
- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
hooks:
- id: flake8
files: ^src/
args:
- "--config=./src/setup.cfg"
- id: ruff
- repo: https://github.com/psf/black
rev: 22.12.0
hooks:
- id: black
- repo: https://github.com/asottile/pyupgrade
rev: v3.3.1
hooks:
- id: pyupgrade
exclude: "(migrations)"
args:
- "--py38-plus"
# Dockerfile hooks
- repo: https://github.com/AleksaC/hadolint-py
rev: v2.10.0

23
.ruff.toml Normal file
View File

@ -0,0 +1,23 @@
# https://beta.ruff.rs/docs/settings/
# https://beta.ruff.rs/docs/rules/
select = ["F", "E", "W", "UP", "COM", "DJ", "EXE", "ISC", "ICN", "G201", "INP", "PIE", "RSE", "SIM", "TID", "PLC", "PLE", "RUF"]
# TODO PTH
ignore = ["DJ001", "SIM105"]
fix = true
line-length = 88
respect-gitignore = true
src = ["src"]
target-version = "py38"
format = "grouped"
show-fixes = true
[per-file-ignores]
".github/scripts/*.py" = ["E501", "INP001", "SIM117"]
"docker/wait-for-redis.py" = ["INP001"]
"*/tests/*.py" = ["E501", "SIM117"]
"*/migrations/*.py" = ["E501", "SIM"]
"src/paperless_tesseract/tests/test_parser.py" = ["RUF001"]
"src/documents/models.py" = ["SIM115"]
[isort]
force-single-line = true

View File

@ -78,6 +78,7 @@ black = "*"
pre-commit = "*"
imagehash = "*"
mkdocs-material = "*"
ruff = "*"
[typing-dev]
mypy = "*"

23
Pipfile.lock generated
View File

@ -3069,6 +3069,29 @@
"markers": "python_version >= '3.7' and python_version < '4'",
"version": "==2.28.2"
},
"ruff": {
"hashes": [
"sha256:22e1e35bf5f12072cd644d22afd9203641ccf258bc14ff91aa1c43dc14f6047d",
"sha256:29e2b77b7d5da6a7dd5cf9b738b511355c5734ece56f78e500d4b5bffd58c1a0",
"sha256:38704f151323aa5858370a2f792e122cc25e5d1aabe7d42ceeab83da18f0b456",
"sha256:40ae87f2638484b7e8a7567b04a7af719f1c484c5bf132038b702bb32e1f6577",
"sha256:428507fb321b386dda70d66cd1a8aa0abf51d7c197983d83bb9e4fa5ee60300b",
"sha256:49e903bcda19f6bb0725a962c058eb5d61f40d84ef52ed53b61939b69402ab4e",
"sha256:5b3c1beacf6037e7f0781d4699d9a2dd4ba2462f475be5b1f45cf84c4ba3c69d",
"sha256:71f0ef1985e9a6696fa97da8459917fa34bdaa2c16bd33bd5edead585b7d44f7",
"sha256:79b02fa17ec1fd8d306ae302cb47fb614b71e1f539997858243769bcbe78c6d9",
"sha256:7cfef26619cba184d59aa7fa17b48af5891d51fc0b755a9bc533478a10d4d066",
"sha256:8b56496063ab3bfdf72339a5fbebb8bd46e5c5fee25ef11a9f03b208fa0562ec",
"sha256:aa9449b898287e621942cc71b9327eceb8f0c357e4065fecefb707ef2d978df8",
"sha256:c5fbaea9167f1852757f02133e5daacdb8c75b3431343205395da5b10499927a",
"sha256:d2fb20e89e85d147c85caa807707a1488bccc1f3854dc3d53533e89b52a0c5ff",
"sha256:daaea322e7e85f4c13d82be9536309e1c4b8b9851bb0cbc7eeb15d490fd46bf9",
"sha256:e4f39e18702de69faaaee3969934b92d7467285627f99a5b6ecd55a7d9f5d086",
"sha256:f3938dc45e2a3f818e9cbd53007265c22246fbfded8837b2c563bf0ebde1a226"
],
"index": "pypi",
"version": "==0.0.259"
},
"scipy": {
"hashes": [
"sha256:02b567e722d62bddd4ac253dafb01ce7ed8742cf8031aea030a41414b86c1125",

View File

@ -18,7 +18,7 @@ if __name__ == "__main__":
REDIS_URL: Final[str] = os.getenv("PAPERLESS_REDIS", "redis://localhost:6379")
print(f"Waiting for Redis...", flush=True)
print("Waiting for Redis...", flush=True)
attempt = 0
with Redis.from_url(url=REDIS_URL) as client:
@ -37,8 +37,8 @@ if __name__ == "__main__":
attempt += 1
if attempt >= MAX_RETRY_COUNT:
print(f"Failed to connect to redis using environment variable PAPERLESS_REDIS.")
print("Failed to connect to redis using environment variable PAPERLESS_REDIS.")
sys.exit(os.EX_UNAVAILABLE)
else:
print(f"Connected to Redis broker.")
print("Connected to Redis broker.")
sys.exit(os.EX_OK)

View File

@ -30,7 +30,9 @@ def worker_int(worker):
worker.log.info("worker received INT or QUIT signal")
## get traceback info
import threading, sys, traceback
import sys
import threading
import traceback
id2name = {th.ident: th.name for th in threading.enumerate()}
code = []

View File

@ -136,9 +136,8 @@ def convert_from_tiff_to_pdf(filepath: Path) -> Path:
filepath,
],
)
with filepath.open("rb") as img_file:
with newpath.open("wb") as pdf_file:
pdf_file.write(img2pdf.convert(img_file))
with filepath.open("rb") as img_file, newpath.open("wb") as pdf_file:
pdf_file.write(img2pdf.convert(img_file))
return newpath

View File

@ -52,7 +52,7 @@ class BulkArchiveStrategy:
return in_archive_path
def add_document(self, doc: Document):
raise NotImplementedError() # pragma: no cover
raise NotImplementedError # pragma: no cover
class OriginalsOnlyStrategy(BulkArchiveStrategy):

View File

@ -104,7 +104,7 @@ class DocumentClassifier:
self.document_type_classifier = pickle.load(f)
self.storage_path_classifier = pickle.load(f)
except Exception as err:
raise ClassifierModelCorruptError() from err
raise ClassifierModelCorruptError from err
# Check for the warning about unpickling from differing versions
# and consider it incompatible
@ -117,7 +117,7 @@ class DocumentClassifier:
if issubclass(warning.category, UserWarning):
w_msg = str(warning.message)
if sk_learn_warning_url in w_msg:
raise IncompatibleClassifierVersionError()
raise IncompatibleClassifierVersionError
def save(self):
target_file = settings.MODEL_FILE

View File

@ -590,9 +590,8 @@ class Consumer(LoggingMixin):
)
def _write(self, storage_type, source, target):
with open(source, "rb") as read_file:
with open(target, "wb") as write_file:
write_file.write(read_file.read())
with open(source, "rb") as read_file, open(target, "wb") as write_file:
write_file.write(read_file.read())
def _log_script_outputs(self, completed_process: CompletedProcess):
"""

View File

@ -164,7 +164,7 @@ def remove_document_from_index(document):
class DelayedQuery:
def _get_query(self):
raise NotImplementedError()
raise NotImplementedError
def _get_query_filter(self):
criterias = []

View File

@ -159,7 +159,7 @@ def _consume_wait_unmodified(file: str) -> None:
new_size = stat_data.st_size
except FileNotFoundError:
logger.debug(
f"File {file} moved while waiting for it to remain " f"unmodified.",
f"File {file} moved while waiting for it to remain unmodified.",
)
return
if new_mtime == mtime and new_size == size:
@ -293,10 +293,7 @@ class Command(BaseCommand):
while not finished:
try:
for event in inotify.read(timeout=timeout):
if recursive:
path = inotify.get_path(event.wd)
else:
path = directory
path = inotify.get_path(event.wd) if recursive else directory
filepath = os.path.join(path, event.name)
notified_files[filepath] = monotonic()

View File

@ -1,6 +1,6 @@
from django.core.management.base import BaseCommand
from ...tasks import train_classifier
from documents.tasks import train_classifier
class Command(BaseCommand):

View File

@ -35,8 +35,8 @@ from paperless.db import GnuPG
from paperless_mail.models import MailAccount
from paperless_mail.models import MailRule
from ...file_handling import delete_empty_directories
from ...file_handling import generate_filename
from documents.file_handling import delete_empty_directories
from documents.file_handling import generate_filename
class Command(BaseCommand):
@ -403,9 +403,10 @@ class Command(BaseCommand):
if self.compare_checksums and source_checksum:
target_checksum = hashlib.md5(target.read_bytes()).hexdigest()
perform_copy = target_checksum != source_checksum
elif source_stat.st_mtime != target_stat.st_mtime:
perform_copy = True
elif source_stat.st_size != target_stat.st_size:
elif (
source_stat.st_mtime != target_stat.st_mtime
or source_stat.st_size != target_stat.st_size
):
perform_copy = True
else:
# Copy if it does not exist

View File

@ -22,8 +22,8 @@ from documents.settings import EXPORTER_THUMBNAIL_NAME
from filelock import FileLock
from paperless import version
from ...file_handling import create_source_path_directory
from ...signals.handlers import update_filename_and_move_files
from documents.file_handling import create_source_path_directory
from documents.signals.handlers import update_filename_and_move_files
@contextmanager
@ -111,37 +111,36 @@ class Command(BaseCommand):
post_save,
receiver=update_filename_and_move_files,
sender=Document,
), disable_signal(
m2m_changed,
receiver=update_filename_and_move_files,
sender=Document.tags.through,
):
with disable_signal(
m2m_changed,
receiver=update_filename_and_move_files,
sender=Document.tags.through,
):
# Fill up the database with whatever is in the manifest
try:
for manifest_path in manifest_paths:
call_command("loaddata", manifest_path)
except (FieldDoesNotExist, DeserializationError) as e:
self.stdout.write(self.style.ERROR("Database import failed"))
if (
self.version is not None
and self.version != version.__full_version_str__
):
self.stdout.write(
self.style.ERROR(
"Version mismatch: "
f"Currently {version.__full_version_str__},"
f" importing {self.version}",
),
)
raise e
else:
self.stdout.write(
self.style.ERROR("No version information present"),
)
raise e
# Fill up the database with whatever is in the manifest
try:
for manifest_path in manifest_paths:
call_command("loaddata", manifest_path)
except (FieldDoesNotExist, DeserializationError) as e:
self.stdout.write(self.style.ERROR("Database import failed"))
if (
self.version is not None
and self.version != version.__full_version_str__
):
self.stdout.write(
self.style.ERROR(
"Version mismatch: "
f"Currently {version.__full_version_str__},"
f" importing {self.version}",
),
)
raise e
else:
self.stdout.write(
self.style.ERROR("No version information present"),
)
raise e
self._import_files_from_manifest(options["no_progress_bar"])
self._import_files_from_manifest(options["no_progress_bar"])
self.stdout.write("Updating search index...")
call_command(
@ -154,14 +153,14 @@ class Command(BaseCommand):
def _check_manifest_exists(path):
if not os.path.exists(path):
raise CommandError(
"That directory doesn't appear to contain a manifest.json " "file.",
"That directory doesn't appear to contain a manifest.json file.",
)
def _check_manifest(self):
for record in self.manifest:
if not record["model"] == "documents.document":
if record["model"] != "documents.document":
continue
if EXPORTER_FILE_NAME not in record:

View File

@ -5,10 +5,10 @@ from django.core.management.base import BaseCommand
from documents.classifier import load_classifier
from documents.models import Document
from ...signals.handlers import set_correspondent
from ...signals.handlers import set_document_type
from ...signals.handlers import set_storage_path
from ...signals.handlers import set_tags
from documents.signals.handlers import set_correspondent
from documents.signals.handlers import set_document_type
from documents.signals.handlers import set_storage_path
from documents.signals.handlers import set_tags
logger = logging.getLogger("paperless.management.retagger")

View File

@ -7,7 +7,7 @@ from django import db
from django.core.management.base import BaseCommand
from documents.models import Document
from ...parsers import get_parser_class_for_mime_type
from documents.parsers import get_parser_class_for_mime_type
def _process_document(doc_in):

View File

@ -20,10 +20,7 @@ def log_reason(matching_model, document, reason):
def match_correspondents(document, classifier):
if classifier:
pred_id = classifier.predict_correspondent(document.content)
else:
pred_id = None
pred_id = classifier.predict_correspondent(document.content) if classifier else None
correspondents = Correspondent.objects.all()
@ -33,10 +30,7 @@ def match_correspondents(document, classifier):
def match_document_types(document, classifier):
if classifier:
pred_id = classifier.predict_document_type(document.content)
else:
pred_id = None
pred_id = classifier.predict_document_type(document.content) if classifier else None
document_types = DocumentType.objects.all()
@ -46,10 +40,7 @@ def match_document_types(document, classifier):
def match_tags(document, classifier):
if classifier:
predicted_tag_ids = classifier.predict_tags(document.content)
else:
predicted_tag_ids = []
predicted_tag_ids = classifier.predict_tags(document.content) if classifier else []
tags = Tag.objects.all()
@ -59,10 +50,7 @@ def match_tags(document, classifier):
def match_storage_paths(document, classifier):
if classifier:
pred_id = classifier.predict_storage_path(document.content)
else:
pred_id = None
pred_id = classifier.predict_storage_path(document.content) if classifier else None
storage_paths = StoragePath.objects.all()
@ -80,7 +68,7 @@ def matches(matching_model, document):
document_content = document.content
# Check that match is not empty
if matching_model.match.strip() == "":
if not matching_model.match.strip():
return False
if matching_model.is_insensitive:
@ -132,7 +120,7 @@ def matches(matching_model, document):
)
except re.error:
logger.error(
f"Error while processing regular expression " f"{matching_model.match}",
f"Error while processing regular expression {matching_model.match}",
)
return False
if match:

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-20 19:10
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
@ -32,7 +30,7 @@ class Migration(migrations.Migration):
models.TextField(
db_index=(
"mysql" not in settings.DATABASES["default"]["ENGINE"]
)
),
),
),
("created", models.DateTimeField(auto_now_add=True)),

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-26 13:16
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
@ -21,7 +19,8 @@ class Migration(migrations.Migration):
model_name="document",
name="created",
field=models.DateTimeField(
default=django.utils.timezone.now, editable=False
default=django.utils.timezone.now,
editable=False,
),
),
]

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-11 12:21
from __future__ import unicode_literals
from django.db import migrations, models
from django.template.defaultfilters import slugify
@ -23,7 +21,8 @@ def move_sender_strings_to_sender_model(apps, schema_editor):
DOCUMENT_SENDER_MAP[document.pk],
created,
) = sender_model.objects.get_or_create(
name=document.sender, defaults={"slug": slugify(document.sender)}
name=document.sender,
defaults={"slug": slugify(document.sender)},
)

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-14 18:44
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-23 03:13
from __future__ import unicode_literals
from django.db import migrations

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-23 04:30
from __future__ import unicode_literals
from django.db import migrations, models

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-26 21:14
from __future__ import unicode_literals
from django.db import migrations, models

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-29 22:58
from __future__ import unicode_literals
from django.db import migrations, models
@ -33,7 +31,9 @@ class Migration(migrations.Migration):
model_name="document",
name="tags",
field=models.ManyToManyField(
blank=True, related_name="documents", to="documents.Tag"
blank=True,
related_name="documents",
to="documents.Tag",
),
),
]

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-02-14 00:40
from __future__ import unicode_literals
from django.db import migrations, models

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-02-27 17:54
from __future__ import unicode_literals
from django.db import migrations, models
@ -42,7 +40,7 @@ class Migration(migrations.Migration):
(
"component",
models.PositiveIntegerField(
choices=[(1, "Consumer"), (2, "Mail Fetcher")]
choices=[(1, "Consumer"), (2, "Mail Fetcher")],
),
),
("created", models.DateTimeField(auto_now_add=True)),

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-03-03 19:29
from __future__ import unicode_literals
from django.db import migrations

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-03-05 00:40
from __future__ import unicode_literals
import gnupg
import os
@ -14,7 +12,7 @@ from django.db import migrations
from django.utils.termcolors import colorize as colourise # Spelling hurts me
class GnuPG(object):
class GnuPG:
"""
A handy singleton to use when handling encrypted files.
"""
@ -28,17 +26,22 @@ class GnuPG(object):
@classmethod
def encrypted(cls, file_handle):
return cls.gpg.encrypt_file(
file_handle, recipients=None, passphrase=settings.PASSPHRASE, symmetric=True
file_handle,
recipients=None,
passphrase=settings.PASSPHRASE,
symmetric=True,
).data
def move_documents_and_create_thumbnails(apps, schema_editor):
os.makedirs(
os.path.join(settings.MEDIA_ROOT, "documents", "originals"), exist_ok=True
os.path.join(settings.MEDIA_ROOT, "documents", "originals"),
exist_ok=True,
)
os.makedirs(
os.path.join(settings.MEDIA_ROOT, "documents", "thumbnails"), exist_ok=True
os.path.join(settings.MEDIA_ROOT, "documents", "thumbnails"),
exist_ok=True,
)
documents = os.listdir(os.path.join(settings.MEDIA_ROOT, "documents"))
@ -55,7 +58,7 @@ def move_documents_and_create_thumbnails(apps, schema_editor):
" in order."
"\n",
opts=("bold",),
)
),
)
try:
@ -73,7 +76,7 @@ def move_documents_and_create_thumbnails(apps, schema_editor):
colourise("*", fg="green"),
colourise("Generating a thumbnail for", fg="white"),
colourise(f, fg="cyan"),
)
),
)
thumb_temp = tempfile.mkdtemp(prefix="paperless", dir=settings.SCRATCH_DIR)
@ -95,7 +98,7 @@ def move_documents_and_create_thumbnails(apps, schema_editor):
"remove",
orig_target,
os.path.join(thumb_temp, "convert-%04d.png"),
)
),
).wait()
thumb_source = os.path.join(thumb_temp, "convert-0000.png")

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-25 21:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-28 19:09
from __future__ import unicode_literals
import gnupg
import hashlib
@ -13,7 +11,7 @@ from django.template.defaultfilters import slugify
from django.utils.termcolors import colorize as colourise # Spelling hurts me
class GnuPG(object):
class GnuPG:
"""
A handy singleton to use when handling encrypted files.
"""
@ -27,11 +25,14 @@ class GnuPG(object):
@classmethod
def encrypted(cls, file_handle):
return cls.gpg.encrypt_file(
file_handle, recipients=None, passphrase=settings.PASSPHRASE, symmetric=True
file_handle,
recipients=None,
passphrase=settings.PASSPHRASE,
symmetric=True,
).data
class Document(object):
class Document:
"""
Django's migrations restrict access to model methods, so this is a snapshot
of the methods that existed at the time this migration was written, since
@ -49,9 +50,9 @@ class Document(object):
def __str__(self):
created = self.created.strftime("%Y%m%d%H%M%S")
if self.correspondent and self.title:
return "{}: {} - {}".format(created, self.correspondent, self.title)
return f"{created}: {self.correspondent} - {self.title}"
if self.correspondent or self.title:
return "{}: {}".format(created, self.correspondent or self.title)
return f"{created}: {self.correspondent or self.title}"
return str(created)
@property
@ -60,7 +61,7 @@ class Document(object):
settings.MEDIA_ROOT,
"documents",
"originals",
"{:07}.{}.gpg".format(self.pk, self.file_type),
f"{self.pk:07}.{self.file_type}.gpg",
)
@property
@ -88,7 +89,7 @@ def set_checksums(apps, schema_editor):
" order."
"\n",
opts=("bold",),
)
),
)
sums = {}
@ -101,7 +102,7 @@ def set_checksums(apps, schema_editor):
colourise("*", fg="green"),
colourise("Generating a checksum for", fg="white"),
colourise(document.file_name, fg="cyan"),
)
),
)
with document.source_file as encrypted:
@ -122,15 +123,16 @@ def set_checksums(apps, schema_editor):
fg="yellow",
),
doc1=colourise(
" * {} (id: {})".format(sums[checksum][1], sums[checksum][0]),
f" * {sums[checksum][1]} (id: {sums[checksum][0]})",
fg="red",
),
doc2=colourise(
" * {} (id: {})".format(document.file_name, document.pk), fg="red"
f" * {document.file_name} (id: {document.pk})",
fg="red",
),
code=colourise(
" $ echo 'DELETE FROM documents_document WHERE id = {pk};' | ./manage.py dbshell".format(
pk=document.pk
pk=document.pk,
),
fg="green",
),
@ -171,7 +173,8 @@ class Migration(migrations.Migration):
model_name="document",
name="created",
field=models.DateTimeField(
db_index=True, default=django.utils.timezone.now
db_index=True,
default=django.utils.timezone.now,
),
),
migrations.AlterField(

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-05 21:38
from __future__ import unicode_literals
from django.db import migrations, models

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-25 15:58
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-12 05:07
from __future__ import unicode_literals
from django.db import migrations, models

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-07-15 17:12
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-07-15 17:12
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.db import migrations

View File

@ -1,6 +1,3 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
@ -22,7 +19,9 @@ class Migration(migrations.Migration):
model_name="document",
name="added",
field=models.DateTimeField(
db_index=True, default=django.utils.timezone.now, editable=False
db_index=True,
default=django.utils.timezone.now,
editable=False,
),
),
migrations.RunPython(set_added_time_to_created_time),

View File

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-02-04 13:07
from __future__ import unicode_literals
from django.db import migrations, models

View File

@ -6,7 +6,7 @@ from django.db import migrations, models
def set_filename(apps, schema_editor):
Document = apps.get_model("documents", "Document")
for doc in Document.objects.all():
file_name = "{:07}.{}".format(doc.pk, doc.file_type)
file_name = f"{doc.pk:07}.{doc.file_type}"
if doc.storage_type == "gpg":
file_name += ".gpg"

View File

@ -10,5 +10,5 @@ class Migration(migrations.Migration):
]
operations = [
migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop)
migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop),
]

View File

@ -1,5 +1,4 @@
# Generated by Django 3.1.3 on 2020-11-20 11:21
import mimetypes
import os
import magic
@ -16,7 +15,7 @@ def source_path(self):
if self.filename:
fname = str(self.filename)
else:
fname = "{:07}.{}".format(self.pk, self.file_type)
fname = f"{self.pk:07}.{self.file_type}"
if self.storage_type == STORAGE_TYPE_GPG:
fname += ".gpg"

View File

@ -73,7 +73,7 @@ class Migration(migrations.Migration):
(15, "Modified before"),
(16, "Modified after"),
(17, "Does not have tag"),
]
],
),
),
("value", models.CharField(max_length=128)),

View File

@ -165,7 +165,9 @@ class Migration(migrations.Migration):
model_name="document",
name="created",
field=models.DateTimeField(
db_index=True, default=django.utils.timezone.now, verbose_name="created"
db_index=True,
default=django.utils.timezone.now,
verbose_name="created",
),
),
migrations.AlterField(
@ -196,14 +198,18 @@ class Migration(migrations.Migration):
model_name="document",
name="mime_type",
field=models.CharField(
editable=False, max_length=256, verbose_name="mime type"
editable=False,
max_length=256,
verbose_name="mime type",
),
),
migrations.AlterField(
model_name="document",
name="modified",
field=models.DateTimeField(
auto_now=True, db_index=True, verbose_name="modified"
auto_now=True,
db_index=True,
verbose_name="modified",
),
),
migrations.AlterField(
@ -234,7 +240,10 @@ class Migration(migrations.Migration):
model_name="document",
name="title",
field=models.CharField(
blank=True, db_index=True, max_length=128, verbose_name="title"
blank=True,
db_index=True,
max_length=128,
verbose_name="title",
),
),
migrations.AlterField(
@ -373,7 +382,10 @@ class Migration(migrations.Migration):
model_name="savedviewfilterrule",
name="value",
field=models.CharField(
blank=True, max_length=128, null=True, verbose_name="value"
blank=True,
max_length=128,
null=True,
verbose_name="value",
),
),
migrations.AlterField(

View File

@ -29,7 +29,7 @@ def archive_path_old(doc):
if doc.filename:
fname = archive_name_from_filename(doc.filename)
else:
fname = "{:07}.pdf".format(doc.pk)
fname = f"{doc.pk:07}.pdf"
return os.path.join(settings.ARCHIVE_DIR, fname)
@ -48,7 +48,7 @@ def source_path(doc):
if doc.filename:
fname = str(doc.filename)
else:
fname = "{:07}{}".format(doc.pk, doc.file_type)
fname = f"{doc.pk:07}{doc.file_type}"
if doc.storage_type == STORAGE_TYPE_GPG:
fname += ".gpg" # pragma: no cover
@ -67,7 +67,9 @@ def generate_unique_filename(doc, archive_filename=False):
while True:
new_filename = generate_filename(
doc, counter, archive_filename=archive_filename
doc,
counter,
archive_filename=archive_filename,
)
if new_filename == old_filename:
# still the same as before.
@ -93,14 +95,16 @@ def generate_filename(doc, counter=0, append_gpg=True, archive_filename=False):
if doc.correspondent:
correspondent = pathvalidate.sanitize_filename(
doc.correspondent.name, replacement_text="-"
doc.correspondent.name,
replacement_text="-",
)
else:
correspondent = "none"
if doc.document_type:
document_type = pathvalidate.sanitize_filename(
doc.document_type.name, replacement_text="-"
doc.document_type.name,
replacement_text="-",
)
else:
document_type = "none"
@ -111,9 +115,7 @@ def generate_filename(doc, counter=0, append_gpg=True, archive_filename=False):
document_type=document_type,
created=datetime.date.isoformat(doc.created),
created_year=doc.created.year if doc.created else "none",
created_month=f"{doc.created.month:02}"
if doc.created
else "none", # NOQA: E501
created_month=f"{doc.created.month:02}" if doc.created else "none",
created_day=f"{doc.created.day:02}" if doc.created else "none",
added=datetime.date.isoformat(doc.added),
added_year=doc.added.year if doc.added else "none",
@ -128,7 +130,7 @@ def generate_filename(doc, counter=0, append_gpg=True, archive_filename=False):
except (ValueError, KeyError, IndexError):
logger.warning(
f"Invalid PAPERLESS_FILENAME_FORMAT: "
f"{settings.FILENAME_FORMAT}, falling back to default"
f"{settings.FILENAME_FORMAT}, falling back to default",
)
counter_str = f"_{counter:02}" if counter else ""
@ -170,13 +172,17 @@ def create_archive_version(doc, retry_count=3):
parser: DocumentParser = parser_class(None, None)
try:
parse_wrapper(
parser, source_path(doc), doc.mime_type, os.path.basename(doc.filename)
parser,
source_path(doc),
doc.mime_type,
os.path.basename(doc.filename),
)
doc.content = parser.get_text()
if parser.get_archive_path() and os.path.isfile(parser.get_archive_path()):
doc.archive_filename = generate_unique_filename(
doc, archive_filename=True
doc,
archive_filename=True,
)
with open(parser.get_archive_path(), "rb") as f:
doc.archive_checksum = hashlib.md5(f.read()).hexdigest()
@ -186,7 +192,7 @@ def create_archive_version(doc, retry_count=3):
doc.archive_checksum = None
logger.error(
f"Parser did not return an archive document for document "
f"ID:{doc.id}. Removing archive document."
f"ID:{doc.id}. Removing archive document.",
)
doc.save()
return
@ -195,7 +201,7 @@ def create_archive_version(doc, retry_count=3):
logger.exception(
f"Unable to regenerate archive document for ID:{doc.id}. You "
f"need to invoke the document_archiver management command "
f"manually for that document."
f"manually for that document.",
)
doc.archive_checksum = None
doc.save()
@ -233,7 +239,7 @@ def move_old_to_new_locations(apps, schema_editor):
old_path = archive_path_old(doc)
if doc.id not in affected_document_ids and not os.path.isfile(old_path):
raise ValueError(
f"Archived document ID:{doc.id} does not exist at: " f"{old_path}"
f"Archived document ID:{doc.id} does not exist at: {old_path}",
)
# check that we can regenerate affected archive versions
@ -245,7 +251,7 @@ def move_old_to_new_locations(apps, schema_editor):
if not parser_class:
raise ValueError(
f"Document ID:{doc.id} has an invalid archived document, "
f"but no parsers are available. Cannot migrate."
f"but no parsers are available. Cannot migrate.",
)
for doc in Document.objects.filter(archive_checksum__isnull=False):
@ -260,7 +266,7 @@ def move_old_to_new_locations(apps, schema_editor):
# Set archive path for unaffected files
doc.archive_filename = archive_name_from_filename(doc.filename)
Document.objects.filter(id=doc.id).update(
archive_filename=doc.archive_filename
archive_filename=doc.archive_filename,
)
# regenerate archive documents
@ -281,13 +287,13 @@ def move_new_to_old_locations(apps, schema_editor):
raise ValueError(
f"Cannot migrate: Archive file name {old_archive_path} of "
f"document {doc.filename} would clash with another archive "
f"filename."
f"filename.",
)
old_archive_paths.add(old_archive_path)
if new_archive_path != old_archive_path and os.path.isfile(old_archive_path):
raise ValueError(
f"Cannot migrate: Cannot move {new_archive_path} to "
f"{old_archive_path}: file already exists."
f"{old_archive_path}: file already exists.",
)
for doc in Document.objects.filter(archive_checksum__isnull=False):

View File

@ -61,7 +61,9 @@ class Migration(migrations.Migration):
model_name="tag",
name="color",
field=models.CharField(
default="#a6cee3", max_length=7, verbose_name="color"
default="#a6cee3",
max_length=7,
verbose_name="color",
),
),
migrations.RunPython(forward, reverse),

View File

@ -25,5 +25,5 @@ class Migration(migrations.Migration):
]
operations = [
migrations.RunPython(remove_null_characters, migrations.RunPython.noop)
migrations.RunPython(remove_null_characters, migrations.RunPython.noop),
]

View File

@ -14,7 +14,10 @@ class Migration(migrations.Migration):
model_name="savedview",
name="sort_field",
field=models.CharField(
blank=True, max_length=128, null=True, verbose_name="sort field"
blank=True,
max_length=128,
null=True,
verbose_name="sort field",
),
),
migrations.AlterField(

View File

@ -14,7 +14,10 @@ class Migration(migrations.Migration):
model_name="savedviewfilterrule",
name="value",
field=models.CharField(
blank=True, max_length=255, null=True, verbose_name="value"
blank=True,
max_length=255,
null=True,
verbose_name="value",
),
),
]

View File

@ -48,5 +48,5 @@ class Migration(migrations.Migration):
),
),
],
)
),
]

View File

@ -46,12 +46,15 @@ class Migration(migrations.Migration):
# Drop the django-q tables entirely
# Must be done last or there could be references here
migrations.RunSQL(
"DROP TABLE IF EXISTS django_q_ormq", reverse_sql=migrations.RunSQL.noop
"DROP TABLE IF EXISTS django_q_ormq",
reverse_sql=migrations.RunSQL.noop,
),
migrations.RunSQL(
"DROP TABLE IF EXISTS django_q_schedule", reverse_sql=migrations.RunSQL.noop
"DROP TABLE IF EXISTS django_q_schedule",
reverse_sql=migrations.RunSQL.noop,
),
migrations.RunSQL(
"DROP TABLE IF EXISTS django_q_task", reverse_sql=migrations.RunSQL.noop
"DROP TABLE IF EXISTS django_q_task",
reverse_sql=migrations.RunSQL.noop,
),
]

View File

@ -79,7 +79,8 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="storagepath",
constraint=models.UniqueConstraint(
fields=("name", "owner"), name="documents_storagepath_unique_name_owner"
fields=("name", "owner"),
name="documents_storagepath_unique_name_owner",
),
),
migrations.AddConstraint(
@ -93,7 +94,8 @@ class Migration(migrations.Migration):
migrations.AddConstraint(
model_name="tag",
constraint=models.UniqueConstraint(
fields=("name", "owner"), name="documents_tag_unique_name_owner"
fields=("name", "owner"),
name="documents_tag_unique_name_owner",
),
),
migrations.AddConstraint(

View File

@ -43,7 +43,9 @@ class Migration(migrations.Migration):
model_name="note",
name="note",
field=models.TextField(
blank=True, help_text="Note for the document", verbose_name="content"
blank=True,
help_text="Note for the document",
verbose_name="content",
),
),
migrations.AlterField(

View File

@ -269,7 +269,7 @@ class Document(ModelWithOwner):
MinValueValidator(ARCHIVE_SERIAL_NUMBER_MIN),
],
help_text=_(
"The position of this document in your physical document " "archive.",
"The position of this document in your physical document archive.",
),
)
@ -470,6 +470,9 @@ class SavedViewFilterRule(models.Model):
verbose_name = _("filter rule")
verbose_name_plural = _("filter rules")
def __str__(self) -> str:
return f"SavedViewFilterRule: {self.rule_type} : {self.value}"
# TODO: why is this in the models file?
# TODO: how about, what is this and where is it documented?
@ -483,7 +486,7 @@ class FileInfo:
(
"created-title",
re.compile(
r"^(?P<created>\d{8}(\d{6})?Z) - " r"(?P<title>.*)$",
r"^(?P<created>\d{8}(\d{6})?Z) - (?P<title>.*)$",
flags=re.IGNORECASE,
),
),
@ -634,6 +637,9 @@ class PaperlessTask(models.Model):
),
)
def __str__(self) -> str:
return f"Task {self.task_id}"
class Note(models.Model):
note = models.TextField(

View File

@ -323,7 +323,7 @@ class DocumentParser(LoggingMixin):
return []
def parse(self, document_path, mime_type, file_name=None):
raise NotImplementedError()
raise NotImplementedError
def get_archive_path(self):
return self.archive_path
@ -332,7 +332,7 @@ class DocumentParser(LoggingMixin):
"""
Returns the path to a file we can use as a thumbnail for this document.
"""
raise NotImplementedError()
raise NotImplementedError
def get_text(self):
return self.text

View File

@ -94,7 +94,7 @@ def check_sanity(progress=False) -> SanityCheckMessages:
except OSError as e:
messages.error(doc.pk, f"Cannot read original file of document: {e}")
else:
if not checksum == doc.checksum:
if checksum != doc.checksum:
messages.error(
doc.pk,
"Checksum mismatch. "
@ -127,7 +127,7 @@ def check_sanity(progress=False) -> SanityCheckMessages:
f"Cannot read archive file of document : {e}",
)
else:
if not checksum == doc.archive_checksum:
if checksum != doc.archive_checksum:
messages.error(
doc.pk,
"Checksum mismatch of archived document. "

View File

@ -7,7 +7,7 @@ from celery import states
try:
import zoneinfo
except ImportError:
import backports.zoneinfo as zoneinfo
from backports import zoneinfo
import magic
from django.conf import settings
from django.utils.text import slugify
@ -152,7 +152,7 @@ class SetPermissionsMixin:
class OwnedObjectSerializer(serializers.ModelSerializer, SetPermissionsMixin):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop("user", None)
return super().__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
def get_permissions(self, obj):
view_codename = f"view_{obj.__class__.__name__.lower()}"
@ -282,7 +282,7 @@ class ColorField(serializers.Field):
for id, color in self.COLOURS:
if id == data:
return color
raise serializers.ValidationError()
raise serializers.ValidationError
def to_representation(self, value):
for id, color in self.COLOURS:
@ -513,12 +513,12 @@ class DocumentListSerializer(serializers.Serializer):
def _validate_document_id_list(self, documents, name="documents"):
if not type(documents) == list:
raise serializers.ValidationError(f"{name} must be a list")
if not all([type(i) == int for i in documents]):
if not all(type(i) == int for i in documents):
raise serializers.ValidationError(f"{name} must be a list of integers")
count = Document.objects.filter(id__in=documents).count()
if not count == len(documents):
raise serializers.ValidationError(
f"Some documents in {name} don't exist or were " f"specified twice.",
f"Some documents in {name} don't exist or were specified twice.",
)
def validate_documents(self, documents):
@ -549,7 +549,7 @@ class BulkEditSerializer(DocumentListSerializer, SetPermissionsMixin):
def _validate_tag_id_list(self, tags, name="tags"):
if not type(tags) == list:
raise serializers.ValidationError(f"{name} must be a list")
if not all([type(i) == int for i in tags]):
if not all(type(i) == int for i in tags):
raise serializers.ValidationError(f"{name} must be a list of integers")
count = Tag.objects.filter(id__in=tags).count()
if not count == len(tags):
@ -826,8 +826,8 @@ class StoragePathSerializer(MatchingModelSerializer, OwnedObjectSerializer):
original_name="testfile",
)
except (KeyError):
raise serializers.ValidationError(_("Invalid variable detected."))
except KeyError as err:
raise serializers.ValidationError(_("Invalid variable detected.")) from err
return path
@ -919,7 +919,7 @@ class AcknowledgeTasksViewSerializer(serializers.Serializer):
pass
if not type(tasks) == list:
raise serializers.ValidationError(f"{name} must be a list")
if not all([type(i) == int for i in tasks]):
if not all(type(i) == int for i in tasks):
raise serializers.ValidationError(f"{name} must be a list of integers")
count = PaperlessTask.objects.filter(id__in=tasks).count()
if not count == len(tasks):

View File

@ -19,14 +19,14 @@ from django.utils import termcolors
from django.utils import timezone
from filelock import FileLock
from .. import matching
from ..file_handling import create_source_path_directory
from ..file_handling import delete_empty_directories
from ..file_handling import generate_unique_filename
from ..models import Document
from ..models import MatchingModel
from ..models import PaperlessTask
from ..models import Tag
from documents import matching
from documents.file_handling import create_source_path_directory
from documents.file_handling import delete_empty_directories
from documents.file_handling import generate_unique_filename
from documents.models import Document
from documents.models import MatchingModel
from documents.models import PaperlessTask
from documents.models import Tag
logger = logging.getLogger("paperless.handlers")
@ -54,10 +54,7 @@ def set_correspondent(
potential_correspondents = matching.match_correspondents(document, classifier)
potential_count = len(potential_correspondents)
if potential_correspondents:
selected = potential_correspondents[0]
else:
selected = None
selected = potential_correspondents[0] if potential_correspondents else None
if potential_count > 1:
if use_first:
logger.debug(
@ -120,10 +117,7 @@ def set_document_type(
potential_document_type = matching.match_document_types(document, classifier)
potential_count = len(potential_document_type)
if potential_document_type:
selected = potential_document_type[0]
else:
selected = None
selected = potential_document_type[0] if potential_document_type else None
if potential_count > 1:
if use_first:
@ -255,10 +249,7 @@ def set_storage_path(
)
potential_count = len(potential_storage_path)
if potential_storage_path:
selected = potential_storage_path[0]
else:
selected = None
selected = potential_storage_path[0] if potential_storage_path else None
if potential_count > 1:
if use_first:
@ -370,7 +361,7 @@ def validate_move(instance, old_path, new_path):
if not os.path.isfile(old_path):
# Can't do anything if the old file does not exist anymore.
logger.fatal(f"Document {str(instance)}: File {old_path} has gone.")
raise CannotMoveFilesException()
raise CannotMoveFilesException
if os.path.isfile(new_path):
# Can't do anything if the new file already exists. Skip updating file.
@ -378,7 +369,7 @@ def validate_move(instance, old_path, new_path):
f"Document {str(instance)}: Cannot rename file "
f"since target path {new_path} already exists.",
)
raise CannotMoveFilesException()
raise CannotMoveFilesException
@receiver(models.signals.m2m_changed, sender=Document.tags.through)
@ -546,10 +537,10 @@ def before_task_publish_handler(sender=None, headers=None, body=None, **kwargs):
date_started=None,
date_done=None,
)
except Exception as e: # pragma: no cover
except Exception: # pragma: no cover
# Don't let an exception in the signal handlers prevent
# a document from being consumed.
logger.error(f"Creating PaperlessTask failed: {e}", exc_info=True)
logger.exception("Creating PaperlessTask failed")
@task_prerun.connect
@ -568,15 +559,20 @@ def task_prerun_handler(sender=None, task_id=None, task=None, **kwargs):
task_instance.status = states.STARTED
task_instance.date_started = timezone.now()
task_instance.save()
except Exception as e: # pragma: no cover
except Exception: # pragma: no cover
# Don't let an exception in the signal handlers prevent
# a document from being consumed.
logger.error(f"Setting PaperlessTask started failed: {e}", exc_info=True)
logger.exception("Setting PaperlessTask started failed")
@task_postrun.connect
def task_postrun_handler(
sender=None, task_id=None, task=None, retval=None, state=None, **kwargs
sender=None,
task_id=None,
task=None,
retval=None,
state=None,
**kwargs,
):
"""
Updates the result of the PaperlessTask.
@ -591,7 +587,7 @@ def task_postrun_handler(
task_instance.result = retval
task_instance.date_done = timezone.now()
task_instance.save()
except Exception as e: # pragma: no cover
except Exception: # pragma: no cover
# Don't let an exception in the signal handlers prevent
# a document from being consumed.
logger.error(f"Updating PaperlessTask failed: {e}", exc_info=True)
logger.exception("Updating PaperlessTask failed")

View File

@ -297,7 +297,7 @@ def update_document_archive_file(document_id):
except Exception:
logger.exception(
f"Error while parsing document {document} " f"(ID: {document_id})",
f"Error while parsing document {document} (ID: {document_id})",
)
finally:
parser.cleanup()

View File

@ -1,8 +1,8 @@
from factory import Faker
from factory.django import DjangoModelFactory
from ..models import Correspondent
from ..models import Document
from documents.models import Correspondent
from documents.models import Document
class CorrespondentFactory(DjangoModelFactory):

View File

@ -17,7 +17,7 @@ import celery
try:
import zoneinfo
except ImportError:
import backports.zoneinfo as zoneinfo
from backports import zoneinfo
import pytest
from django.conf import settings
@ -110,9 +110,9 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
def test_document_fields(self):
c = Correspondent.objects.create(name="c", pk=41)
dt = DocumentType.objects.create(name="dt", pk=63)
tag = Tag.objects.create(name="t", pk=85)
Tag.objects.create(name="t", pk=85)
storage_path = StoragePath.objects.create(name="sp", pk=77, path="p")
doc = Document.objects.create(
Document.objects.create(
title="WOW",
content="the content",
correspondent=c,
@ -877,7 +877,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
added=timezone.make_aware(datetime.datetime(2020, 7, 13)),
content="test",
)
d6 = Document.objects.create(checksum="6", content="test2")
Document.objects.create(checksum="6", content="test2")
d7 = Document.objects.create(checksum="7", storage_path=sp, content="test")
with AsyncWriter(index.open_index()) as writer:
@ -1046,13 +1046,13 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
mime_type="application/pdf",
content="abc",
)
doc2 = Document.objects.create(
Document.objects.create(
title="none2",
checksum="B",
mime_type="application/pdf",
content="123",
)
doc3 = Document.objects.create(
Document.objects.create(
title="none3",
checksum="C",
mime_type="text/plain",
@ -1546,14 +1546,14 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
show_on_dashboard=False,
show_in_sidebar=False,
)
v2 = SavedView.objects.create(
SavedView.objects.create(
owner=u2,
name="test2",
sort_field="",
show_on_dashboard=False,
show_in_sidebar=False,
)
v3 = SavedView.objects.create(
SavedView.objects.create(
owner=u2,
name="test3",
sort_field="",
@ -1594,7 +1594,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
def test_create_update_patch(self):
u1 = User.objects.create_user("user1")
User.objects.create_user("user1")
view = {
"name": "test",
@ -3020,7 +3020,7 @@ class TestBulkDownload(DirectoriesMixin, APITestCase):
self.assertEqual(f.read(), zipf.read("2021-01-01 document A_01.pdf"))
def test_compression(self):
response = self.client.post(
self.client.post(
self.ENDPOINT,
json.dumps(
{"documents": [self.doc2.id, self.doc2b.id], "compression": "lzma"},
@ -3271,7 +3271,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
user = User.objects.create_user(username="test")
self.client.force_authenticate(user)
d = Document.objects.create(title="Test")
Document.objects.create(title="Test")
self.assertEqual(
self.client.get("/api/documents/").status_code,
@ -3305,7 +3305,7 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
user.user_permissions.add(*Permission.objects.all())
self.client.force_authenticate(user)
d = Document.objects.create(title="Test")
Document.objects.create(title="Test")
self.assertEqual(
self.client.get("/api/documents/").status_code,
@ -3696,7 +3696,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
THEN:
- No task data is returned
"""
task1 = PaperlessTask.objects.create(
PaperlessTask.objects.create(
task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf",
)
@ -3746,7 +3746,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
THEN:
- The returned data includes the task result
"""
task = PaperlessTask.objects.create(
PaperlessTask.objects.create(
task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf",
status=celery.states.SUCCESS,
@ -3772,7 +3772,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
THEN:
- The returned result is the exception info
"""
task = PaperlessTask.objects.create(
PaperlessTask.objects.create(
task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf",
status=celery.states.FAILURE,
@ -3801,7 +3801,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
THEN:
- Returned data include the filename
"""
task = PaperlessTask.objects.create(
PaperlessTask.objects.create(
task_id=str(uuid.uuid4()),
task_file_name="test.pdf",
task_name="documents.tasks.some_task",
@ -3827,7 +3827,7 @@ class TestTasks(DirectoriesMixin, APITestCase):
THEN:
- Returned data include the filename
"""
task = PaperlessTask.objects.create(
PaperlessTask.objects.create(
task_id=str(uuid.uuid4()),
task_file_name="anothertest.pdf",
task_name="documents.tasks.some_task",

View File

@ -1,7 +1,7 @@
import os
import shutil
from pathlib import Path
from unittest import mock
import platform
import pytest
from django.conf import settings
@ -11,19 +11,11 @@ from documents import barcodes
from documents import tasks
from documents.consumer import ConsumerError
from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides
from documents.data_models import DocumentSource
from documents.tests.utils import DirectoriesMixin
from documents.tests.utils import FileSystemAssertsMixin
from PIL import Image
try:
import zxingcpp
ZXING_AVAILIBLE = True
except ImportError:
ZXING_AVAILIBLE = False
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
class TestBarcode(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
@ -459,7 +451,7 @@ class TestBarcode(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
self.assertDictEqual(separator_page_numbers, {})
@override_settings(CONSUMER_BARCODE_STRING="ADAR-NEXTDOC")
def test_scan_file_for_separating_qr_barcodes(self):
def test_scan_file_qr_barcodes_was_problem(self):
"""
GIVEN:
- Input PDF with certain QR codes that aren't detected at current size
@ -1068,7 +1060,7 @@ class TestAsnBarcode(DirectoriesMixin, TestCase):
@pytest.mark.skipif(
not ZXING_AVAILIBLE,
platform.machine().upper() not in {"AMD64"},
reason="No zxingcpp",
)
@override_settings(CONSUMER_BARCODE_SCANNER="ZXING")
@ -1077,7 +1069,7 @@ class TestBarcodeZxing(TestBarcode):
@pytest.mark.skipif(
not ZXING_AVAILIBLE,
platform.machine().upper() not in {"AMD64"},
reason="No zxingcpp",
)
@override_settings(CONSUMER_BARCODE_SCANNER="ZXING")

View File

@ -386,7 +386,6 @@ class TestClassifier(DirectoriesMixin, TestCase):
# rebuilding the file and committing that. Not developer friendly
# Need to rethink how to pass the load through to a file with a single
# old model?
pass
def test_one_correspondent_predict(self):
c1 = Correspondent.objects.create(
@ -516,7 +515,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
self.assertListEqual(self.classifier.predict_tags(doc1.content), [t1.pk])
def test_one_tag_predict_unassigned(self):
t1 = Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
Tag.objects.create(name="t1", matching_algorithm=Tag.MATCH_AUTO, pk=12)
doc1 = Document.objects.create(
title="doc1",
@ -643,7 +642,7 @@ class TestClassifier(DirectoriesMixin, TestCase):
self.assertIsNotNone(classifier)
with mock.patch("documents.classifier.DocumentClassifier.load") as load:
classifier2 = load_classifier()
load_classifier()
load.assert_not_called()
@mock.patch("documents.classifier.DocumentClassifier.load")

View File

@ -12,23 +12,23 @@ from dateutil import tz
try:
import zoneinfo
except ImportError:
import backports.zoneinfo as zoneinfo
from backports import zoneinfo
from django.conf import settings
from django.utils import timezone
from django.test import override_settings
from django.test import TestCase
from ..consumer import Consumer
from ..consumer import ConsumerError
from ..models import Correspondent
from ..models import Document
from ..models import DocumentType
from ..models import FileInfo
from ..models import Tag
from ..parsers import DocumentParser
from ..parsers import ParseError
from ..tasks import sanity_check
from documents.consumer import Consumer
from documents.consumer import ConsumerError
from documents.models import Correspondent
from documents.models import Document
from documents.models import DocumentType
from documents.models import FileInfo
from documents.models import Tag
from documents.parsers import DocumentParser
from documents.parsers import ParseError
from documents.tasks import sanity_check
from .utils import DirectoriesMixin
from documents.tests.utils import FileSystemAssertsMixin
@ -72,8 +72,8 @@ class TestFieldPermutations(TestCase):
"20150102030405Z",
"20150102Z",
)
valid_correspondents = ["timmy", "Dr. McWheelie", "Dash Gor-don", "ο Θερμαστής", ""]
valid_titles = ["title", "Title w Spaces", "Title a-dash", "Τίτλος", ""]
valid_correspondents = ["timmy", "Dr. McWheelie", "Dash Gor-don", "o Θεpμaoτής", ""]
valid_titles = ["title", "Title w Spaces", "Title a-dash", "Tίτλoς", ""]
valid_tags = ["tag", "tig,tag", "tag1,tag2,tag-3"]
def _test_guessed_attributes(
@ -135,9 +135,7 @@ class TestFieldPermutations(TestCase):
filename = "tag1,tag2_20190908_180610_0001.pdf"
all_patt = re.compile("^.*$")
none_patt = re.compile("$a")
exact_patt = re.compile("^([a-z0-9,]+)_(\\d{8})_(\\d{6})_([0-9]+)\\.")
repl1 = " - \\4 - \\1." # (empty) corrspondent, title and tags
repl2 = "\\2Z - " + repl1 # creation date + repl1
re.compile("^([a-z0-9,]+)_(\\d{8})_(\\d{6})_([0-9]+)\\.")
# No transformations configured (= default)
info = FileInfo.from_filename(filename)
@ -177,10 +175,6 @@ class TestFieldPermutations(TestCase):
class DummyParser(DocumentParser):
def get_thumbnail(self, document_path, mime_type, file_name=None):
# not important during tests
raise NotImplementedError()
def __init__(self, logging_group, scratch_dir, archive_path):
super().__init__(logging_group, None)
_, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=scratch_dir)
@ -197,9 +191,6 @@ class CopyParser(DocumentParser):
def get_thumbnail(self, document_path, mime_type, file_name=None):
return self.fake_thumb
def get_thumbnail(self, document_path, mime_type, file_name=None):
return self.fake_thumb
def __init__(self, logging_group, progress_callback=None):
super().__init__(logging_group, progress_callback)
_, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=self.tempdir)
@ -211,10 +202,6 @@ class CopyParser(DocumentParser):
class FaultyParser(DocumentParser):
def get_thumbnail(self, document_path, mime_type, file_name=None):
# not important during tests
raise NotImplementedError()
def __init__(self, logging_group, scratch_dir):
super().__init__(logging_group)
_, self.fake_thumb = tempfile.mkstemp(suffix=".webp", dir=scratch_dir)

View File

@ -46,7 +46,7 @@ class TestDate(TestCase):
)
def test_date_format_5(self):
text = "lorem ipsum 130218, 2018, 20180213 and lorem 13.02.2018 lorem " "ipsum"
text = "lorem ipsum 130218, 2018, 20180213 and lorem 13.02.2018 lorem ipsum"
date = parse_date("", text)
self.assertEqual(
date,
@ -68,7 +68,7 @@ class TestDate(TestCase):
self.assertEqual(parse_date("", text), None)
def test_date_format_7(self):
text = "lorem ipsum\n" "März 2019\n" "lorem ipsum"
text = "lorem ipsum\nMärz 2019\nlorem ipsum"
date = parse_date("", text)
self.assertEqual(
date,
@ -95,7 +95,7 @@ class TestDate(TestCase):
@override_settings(SCRATCH_DIR=SCRATCH)
def test_date_format_9(self):
text = "lorem ipsum\n" "27. Nullmonth 2020\n" "März 2020\n" "lorem ipsum"
text = "lorem ipsum\n27. Nullmonth 2020\nMärz 2020\nlorem ipsum"
self.assertEqual(
parse_date("", text),
datetime.datetime(2020, 3, 1, 0, 0, tzinfo=tz.gettz(settings.TIME_ZONE)),
@ -262,7 +262,7 @@ class TestDate(TestCase):
THEN:
- Should parse the date non-ignored date from content
"""
text = "lorem ipsum 110319, 20200117 and lorem 13.02.2018 lorem " "ipsum"
text = "lorem ipsum 110319, 20200117 and lorem 13.02.2018 lorem ipsum"
self.assertEqual(
parse_date("", text),
datetime.datetime(2018, 2, 13, 0, 0, tzinfo=tz.gettz(settings.TIME_ZONE)),
@ -283,7 +283,7 @@ class TestDate(TestCase):
THEN:
- Should parse the date non-ignored date from content
"""
text = "lorem ipsum 190311, 20200117 and lorem 13.02.2018 lorem " "ipsum"
text = "lorem ipsum 190311, 20200117 and lorem 13.02.2018 lorem ipsum"
self.assertEqual(
parse_date("", text),

View File

@ -6,14 +6,14 @@ from unittest import mock
try:
import zoneinfo
except ImportError:
import backports.zoneinfo as zoneinfo
from backports import zoneinfo
from django.test import override_settings
from django.test import TestCase
from django.utils import timezone
from ..models import Correspondent
from ..models import Document
from documents.models import Correspondent
from documents.models import Document
class TestDocument(TestCase):

View File

@ -10,17 +10,16 @@ from django.db import DatabaseError
from django.test import override_settings
from django.test import TestCase
from django.utils import timezone
from documents.tests.utils import FileSystemAssertsMixin
from ..file_handling import create_source_path_directory
from ..file_handling import delete_empty_directories
from ..file_handling import generate_filename
from ..models import Correspondent
from ..models import Document
from ..models import DocumentType
from ..models import StoragePath
from .utils import DirectoriesMixin
from .utils import FileSystemAssertsMixin
from documents.file_handling import create_source_path_directory
from documents.file_handling import delete_empty_directories
from documents.file_handling import generate_filename
from documents.models import Correspondent
from documents.models import Document
from documents.models import DocumentType
from documents.models import StoragePath
from documents.tests.utils import DirectoriesMixin
from documents.tests.utils import FileSystemAssertsMixin
class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
@ -121,7 +120,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
def test_file_renaming_database_error(self):
document1 = Document.objects.create(
Document.objects.create(
mime_type="application/pdf",
storage_type=Document.STORAGE_TYPE_UNENCRYPTED,
checksum="AAAAA",
@ -171,7 +170,6 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
Path(document.source_path).touch()
# Ensure file deletion after delete
pk = document.pk
document.delete()
self.assertIsNotFile(
os.path.join(settings.ORIGINALS_DIR, "none", "none.pdf"),
@ -440,7 +438,6 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
# Check proper handling of files
self.assertIsDir(os.path.join(settings.ORIGINALS_DIR, "none/none"))
pk = document.pk
document.delete()
self.assertIsNotFile(
@ -705,7 +702,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
def test_move_archive_error(self, m):
def fake_rename(src, dst):
if "archive" in str(src):
raise OSError()
raise OSError
else:
os.remove(src)
Path(dst).touch()
@ -756,7 +753,7 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
def test_move_file_error(self, m):
def fake_rename(src, dst):
if "original" in str(src):
raise OSError()
raise OSError
else:
os.remove(src)
Path(dst).touch()

View File

@ -2,7 +2,7 @@ from django.core.management.base import CommandError
from django.test import TestCase
from documents.settings import EXPORTER_FILE_NAME
from ..management.commands.document_importer import Command
from documents.management.commands.document_importer import Command
class TestImporter(TestCase):

View File

@ -13,7 +13,6 @@ from django.test import override_settings
from django.test import TransactionTestCase
from documents.consumer import ConsumerError
from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides
from documents.management.commands import document_consumer
from documents.models import Tag
from documents.tests.utils import DirectoriesMixin

View File

@ -204,7 +204,7 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
self.assertEqual(element["fields"]["document"], self.d1.id)
self.assertEqual(element["fields"]["user"], self.user.id)
with paperless_environment() as dirs:
with paperless_environment():
self.assertEqual(Document.objects.count(), 4)
Document.objects.all().delete()
Correspondent.objects.all().delete()
@ -345,7 +345,7 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
os.path.join(self.dirs.media_dir, "documents"),
)
m = self._do_export(use_filename_format=True)
self._do_export(use_filename_format=True)
self.assertIsFile(os.path.join(self.target, "wow1", "c.pdf"))
self.assertIsFile(os.path.join(self.target, "manifest.json"))
@ -537,7 +537,7 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
)
self.assertFalse(has_archive)
with paperless_environment() as dirs:
with paperless_environment():
self.assertEqual(Document.objects.count(), 4)
Document.objects.all().delete()
self.assertEqual(Document.objects.count(), 0)
@ -580,7 +580,7 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
)
self.assertFalse(has_thumbnail)
with paperless_environment() as dirs:
with paperless_environment():
self.assertEqual(Document.objects.count(), 4)
Document.objects.all().delete()
self.assertEqual(Document.objects.count(), 0)
@ -609,7 +609,7 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
has_document = has_document or element["model"] == "documents.document"
self.assertFalse(has_document)
with paperless_environment() as dirs:
with paperless_environment():
self.assertEqual(Document.objects.count(), 4)
Document.objects.all().delete()
self.assertEqual(Document.objects.count(), 0)
@ -631,9 +631,9 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
os.path.join(self.dirs.media_dir, "documents"),
)
manifest = self._do_export(use_folder_prefix=True)
self._do_export(use_folder_prefix=True)
with paperless_environment() as dirs:
with paperless_environment():
self.assertEqual(Document.objects.count(), 4)
Document.objects.all().delete()
self.assertEqual(Document.objects.count(), 0)

View File

@ -8,12 +8,12 @@ from django.contrib.auth.models import User
from django.test import override_settings
from django.test import TestCase
from .. import matching
from ..models import Correspondent
from ..models import Document
from ..models import DocumentType
from ..models import Tag
from ..signals import document_consumption_finished
from documents import matching
from documents.models import Correspondent
from documents.models import Document
from documents.models import DocumentType
from documents.models import Tag
from documents.signals import document_consumption_finished
class _TestMatchingBase(TestCase):

View File

@ -310,7 +310,7 @@ class TestMigrateArchiveFilesErrors(DirectoriesMixin, TestMigrations):
def test_parser_missing(self):
Document = self.apps.get_model("documents", "Document")
doc1 = make_test_document(
make_test_document(
Document,
"document",
"invalid/typesss768",
@ -318,7 +318,7 @@ class TestMigrateArchiveFilesErrors(DirectoriesMixin, TestMigrations):
"document.png",
simple_pdf,
)
doc2 = make_test_document(
make_test_document(
Document,
"document",
"invalid/typesss768",
@ -462,7 +462,7 @@ class TestMigrateArchiveFilesBackwards(
Document = apps.get_model("documents", "Document")
doc_unrelated = make_test_document(
make_test_document(
Document,
"unrelated",
"application/pdf",
@ -471,14 +471,14 @@ class TestMigrateArchiveFilesBackwards(
simple_pdf2,
"unrelated.pdf",
)
doc_no_archive = make_test_document(
make_test_document(
Document,
"no_archive",
"text/plain",
simple_txt,
"no_archive.txt",
)
clashB = make_test_document(
make_test_document(
Document,
"clash",
"image/jpeg",

View File

@ -1,14 +1,14 @@
from django.test import TestCase
from ..models import Correspondent
from ..models import Document
from documents.models import Correspondent
from documents.models import Document
from .factories import CorrespondentFactory
from .factories import DocumentFactory
class CorrespondentTestCase(TestCase):
def test___str__(self):
for s in ("test", "οχι", "test with fun_charÅc'\"terß"):
for s in ("test", "oχi", "test with fun_charÅc'\"terß"):
correspondent = CorrespondentFactory.create(name=s)
self.assertEqual(str(correspondent), s)

View File

@ -94,7 +94,7 @@ class TestParserDiscovery(TestCase):
- No parser class is returned
"""
m.return_value = []
with TemporaryDirectory() as tmpdir:
with TemporaryDirectory():
self.assertIsNone(get_parser_class_for_mime_type("application/pdf"))
@mock.patch("documents.parsers.document_consumer_declaration.send")

View File

@ -149,7 +149,7 @@ class TestSanityCheck(DirectoriesMixin, TestCase):
)
def test_orphaned_file(self):
doc = self.make_test_data()
self.make_test_data()
Path(self.dirs.originals_dir, "orphaned").touch()
messages = check_sanity()
self.assertTrue(messages.has_warning)

View File

@ -4,7 +4,6 @@ from unittest import mock
import celery
from django.test import TestCase
from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides
from documents.data_models import DocumentSource
from documents.models import PaperlessTask
from documents.signals.handlers import before_task_publish_handler

View File

@ -47,7 +47,7 @@ class TestViews(TestCase):
self.client.cookies.load(
{settings.LANGUAGE_COOKIE_NAME: language_given},
)
elif settings.LANGUAGE_COOKIE_NAME in self.client.cookies.keys():
elif settings.LANGUAGE_COOKIE_NAME in self.client.cookies:
self.client.cookies.pop(settings.LANGUAGE_COOKIE_NAME)
response = self.client.get(

View File

@ -265,10 +265,7 @@ class DocumentViewSet(
def get_serializer(self, *args, **kwargs):
super().get_serializer(*args, **kwargs)
fields_param = self.request.query_params.get("fields", None)
if fields_param:
fields = fields_param.split(",")
else:
fields = None
fields = fields_param.split(",") if fields_param else None
truncate_content = self.request.query_params.get("truncate_content", "False")
serializer_class = self.get_serializer_class()
kwargs.setdefault("context", self.get_serializer_context())
@ -358,7 +355,7 @@ class DocumentViewSet(
try:
doc = Document.objects.get(pk=pk)
except Document.DoesNotExist:
raise Http404()
raise Http404
meta = {
"original_checksum": doc.checksum,
@ -422,7 +419,7 @@ class DocumentViewSet(
response = self.file_response(pk, request, "inline")
return response
except (FileNotFoundError, Document.DoesNotExist):
raise Http404()
raise Http404
@action(methods=["get"], detail=True)
@method_decorator(cache_control(public=False, max_age=315360000))
@ -438,14 +435,14 @@ class DocumentViewSet(
return HttpResponse(handle, content_type="image/webp")
except (FileNotFoundError, Document.DoesNotExist):
raise Http404()
raise Http404
@action(methods=["get"], detail=True)
def download(self, request, pk=None):
try:
return self.file_response(pk, request, "attachment")
except (FileNotFoundError, Document.DoesNotExist):
raise Http404()
raise Http404
def getNotes(self, doc):
return [
@ -468,7 +465,7 @@ class DocumentViewSet(
try:
doc = Document.objects.get(pk=pk)
except Document.DoesNotExist:
raise Http404()
raise Http404
currentUser = request.user
@ -569,7 +566,7 @@ class UnifiedSearchViewSet(DocumentViewSet):
elif "more_like_id" in self.request.query_params:
query_class = index.DelayedMoreLikeThisQuery
else:
raise ValueError()
raise ValueError
return query_class(
self.searcher,
@ -606,12 +603,12 @@ class LogViewSet(ViewSet):
def retrieve(self, request, pk=None, *args, **kwargs):
if pk not in self.log_files:
raise Http404()
raise Http404
filename = self.get_log_filename(pk)
if not os.path.isfile(filename):
raise Http404()
raise Http404
with open(filename) as f:
lines = [line.rstrip() for line in f.readlines()]

0
src/manage.py Normal file → Executable file
View File

View File

@ -42,7 +42,7 @@ def path_check(var, directory):
Error(
writeable_message.format(var),
writeable_hint.format(
f"\n{dir_mode} {dir_owner} {dir_group} " f"{directory}\n",
f"\n{dir_mode} {dir_owner} {dir_group} {directory}\n",
),
),
)
@ -158,7 +158,7 @@ def settings_values_check(app_configs, **kwargs):
try:
import zoneinfo
except ImportError: # pragma: nocover
import backports.zoneinfo as zoneinfo
from backports import zoneinfo
msgs = []
if settings.TIME_ZONE not in zoneinfo.available_timezones():
msgs.append(

View File

@ -12,13 +12,13 @@ class StatusConsumer(WebsocketConsumer):
def connect(self):
if not self._authenticated():
raise DenyConnection()
raise DenyConnection
else:
async_to_sync(self.channel_layer.group_add)(
"status_updates",
self.channel_name,
)
raise AcceptConnection()
raise AcceptConnection
def disconnect(self, close_code):
async_to_sync(self.channel_layer.group_discard)(

View File

@ -65,9 +65,11 @@ class UserSerializer(serializers.ModelSerializer):
if "user_permissions" in validated_data:
user_permissions = validated_data.pop("user_permissions")
password = None
if "password" in validated_data:
if len(validated_data.get("password").replace("*", "")) > 0:
password = validated_data.pop("password")
if (
"password" in validated_data
and len(validated_data.get("password").replace("*", "")) > 0
):
password = validated_data.pop("password")
user = User.objects.create(**validated_data)
# set groups
if groups:

View File

@ -282,7 +282,8 @@ INSTALLED_APPS = [
"django_filters",
"django_celery_results",
"guardian",
] + env_apps
*env_apps,
]
if DEBUG:
INSTALLED_APPS.append("channels")
@ -398,10 +399,7 @@ if ENABLE_HTTP_REMOTE_USER:
)
# X-Frame options for embedded PDF display:
if DEBUG:
X_FRAME_OPTIONS = "ANY"
else:
X_FRAME_OPTIONS = "SAMEORIGIN"
X_FRAME_OPTIONS = "ANY" if DEBUG else "SAMEORIGIN"
# The next 3 settings can also be set using just PAPERLESS_URL
@ -424,7 +422,7 @@ if _paperless_url:
_paperless_uri = urlparse(_paperless_url)
CSRF_TRUSTED_ORIGINS.append(_paperless_url)
CORS_ALLOWED_ORIGINS.append(_paperless_url)
if ALLOWED_HOSTS != ["*"]:
if ["*"] != ALLOWED_HOSTS:
ALLOWED_HOSTS.append(_paperless_uri.hostname)
else:
# always allow localhost. Necessary e.g. for healthcheck in docker.

View File

@ -15,18 +15,18 @@ def handle_failed_login(sender, credentials, request, **kwargs):
if client_ip is None:
logger.info(
f"Login failed for user `{credentials['username']}`."
+ " Unable to determine IP address.",
" Unable to determine IP address.",
)
else:
if is_routable:
# We got the client's IP address
logger.info(
f"Login failed for user `{credentials['username']}`"
+ f" from IP `{client_ip}.`",
f" from IP `{client_ip}.`",
)
else:
# The client's IP address is private
logger.info(
f"Login failed for user `{credentials['username']}`"
+ f" from private IP `{client_ip}.`",
f" from private IP `{client_ip}.`",
)

View File

View File

@ -56,61 +56,57 @@ urlpatterns = [
include(
[
re_path(
r"^auth/",
"^auth/",
include(
("rest_framework.urls", "rest_framework"),
namespace="rest_framework",
),
),
re_path(
r"^search/autocomplete/",
"^search/autocomplete/",
SearchAutoCompleteView.as_view(),
name="autocomplete",
),
re_path(r"^statistics/", StatisticsView.as_view(), name="statistics"),
re_path("^statistics/", StatisticsView.as_view(), name="statistics"),
re_path(
r"^documents/post_document/",
"^documents/post_document/",
PostDocumentView.as_view(),
name="post_document",
),
re_path(
r"^documents/bulk_edit/",
"^documents/bulk_edit/",
BulkEditView.as_view(),
name="bulk_edit",
),
re_path(
r"^documents/selection_data/",
"^documents/selection_data/",
SelectionDataView.as_view(),
name="selection_data",
),
re_path(
r"^documents/bulk_download/",
"^documents/bulk_download/",
BulkDownloadView.as_view(),
name="bulk_download",
),
re_path(
r"^remote_version/",
"^remote_version/",
RemoteVersionView.as_view(),
name="remoteversion",
),
re_path("^ui_settings/", UiSettingsView.as_view(), name="ui_settings"),
re_path(
r"^ui_settings/",
UiSettingsView.as_view(),
name="ui_settings",
),
re_path(
r"^acknowledge_tasks/",
"^acknowledge_tasks/",
AcknowledgeTasksView.as_view(),
name="acknowledge_tasks",
),
re_path(
r"^mail_accounts/test/",
"^mail_accounts/test/",
MailAccountTestView.as_view(),
name="mail_accounts_test",
),
path("token/", views.obtain_auth_token),
]
+ api_router.urls,
*api_router.urls,
],
),
),
re_path(r"^favicon.ico$", FaviconView.as_view(), name="favicon"),

View File

@ -18,7 +18,16 @@ class MailAccountAdminForm(forms.ModelForm):
widgets = {
"password": forms.PasswordInput(),
}
fields = "__all__"
fields = [
"name",
"imap_server",
"username",
"imap_security",
"username",
"password",
"is_token",
"character_set",
]
class MailAccountAdmin(admin.ModelAdmin):
@ -27,7 +36,10 @@ class MailAccountAdmin(admin.ModelAdmin):
fieldsets = [
(None, {"fields": ["name", "imap_server", "imap_port"]}),
(_("Authentication"), {"fields": ["imap_security", "username", "password"]}),
(
_("Authentication"),
{"fields": ["imap_security", "username", "password", "is_token"]},
),
(_("Advanced settings"), {"fields": ["character_set"]}),
]
form = MailAccountAdminForm

View File

@ -94,7 +94,7 @@ class BaseMailAction:
"""
Perform mail action on the given mail uid in the mailbox.
"""
raise NotImplementedError()
raise NotImplementedError
class DeleteMailAction(BaseMailAction):
@ -152,7 +152,7 @@ class TagMailAction(BaseMailAction):
_, self.color = parameter.split(":")
self.color = self.color.strip()
if not self.color.lower() in APPLE_MAIL_TAG_COLORS.keys():
if self.color.lower() not in APPLE_MAIL_TAG_COLORS.keys():
raise MailError("Not a valid AppleMail tag color.")
self.keyword = None
@ -274,7 +274,7 @@ def apply_mail_action(
status="SUCCESS",
)
except Exception as e:
except Exception:
ProcessedMail.objects.create(
owner=rule.owner,
rule=rule,
@ -285,7 +285,7 @@ def apply_mail_action(
status="FAILED",
error=traceback.format_exc(),
)
raise e
raise
@shared_task
@ -548,7 +548,7 @@ class MailAccountHandler(LoggingMixin):
self.log(
"debug",
f"Rule {rule}: Searching folder with criteria " f"{str(criterias)}",
f"Rule {rule}: Searching folder with criteria {str(criterias)}",
)
try:
@ -582,7 +582,7 @@ class MailAccountHandler(LoggingMixin):
except Exception as e:
self.log(
"error",
f"Rule {rule}: Error while processing mail " f"{message.uid}: {e}",
f"Rule {rule}: Error while processing mail {message.uid}: {e}",
exc_info=True,
)
@ -653,7 +653,7 @@ class MailAccountHandler(LoggingMixin):
for att in message.attachments:
if (
not att.content_disposition == "attachment"
att.content_disposition != "attachment"
and rule.attachment_type
== MailRule.AttachmentProcessing.ATTACHMENTS_ONLY
):
@ -665,14 +665,13 @@ class MailAccountHandler(LoggingMixin):
)
continue
if rule.filter_attachment_filename:
if rule.filter_attachment_filename and not fnmatch(
att.filename.lower(),
rule.filter_attachment_filename.lower(),
):
# Force the filename and pattern to the lowercase
# as this is system dependent otherwise
if not fnmatch(
att.filename.lower(),
rule.filter_attachment_filename.lower(),
):
continue
continue
title = self._get_title(message, att, rule)

View File

@ -27,7 +27,8 @@ class Migration(migrations.Migration):
model_name="mailrule",
name="maximum_age",
field=models.PositiveIntegerField(
default=30, help_text="Specified in days."
default=30,
help_text="Specified in days.",
),
),
]

View File

@ -160,35 +160,48 @@ class Migration(migrations.Migration):
model_name="mailrule",
name="filter_body",
field=models.CharField(
blank=True, max_length=256, null=True, verbose_name="filter body"
blank=True,
max_length=256,
null=True,
verbose_name="filter body",
),
),
migrations.AlterField(
model_name="mailrule",
name="filter_from",
field=models.CharField(
blank=True, max_length=256, null=True, verbose_name="filter from"
blank=True,
max_length=256,
null=True,
verbose_name="filter from",
),
),
migrations.AlterField(
model_name="mailrule",
name="filter_subject",
field=models.CharField(
blank=True, max_length=256, null=True, verbose_name="filter subject"
blank=True,
max_length=256,
null=True,
verbose_name="filter subject",
),
),
migrations.AlterField(
model_name="mailrule",
name="folder",
field=models.CharField(
default="INBOX", max_length=256, verbose_name="folder"
default="INBOX",
max_length=256,
verbose_name="folder",
),
),
migrations.AlterField(
model_name="mailrule",
name="maximum_age",
field=models.PositiveIntegerField(
default=30, help_text="Specified in days.", verbose_name="maximum age"
default=30,
help_text="Specified in days.",
verbose_name="maximum age",
),
),
migrations.AlterField(

View File

@ -14,7 +14,9 @@ class Migration(migrations.Migration):
model_name="mailrule",
name="assign_tags",
field=models.ManyToManyField(
blank=True, to="documents.Tag", verbose_name="assign this tag"
blank=True,
to="documents.Tag",
verbose_name="assign this tag",
),
),
]

View File

@ -29,19 +29,25 @@ class Migration(migrations.Migration):
(
"folder",
models.CharField(
editable=False, max_length=256, verbose_name="folder"
editable=False,
max_length=256,
verbose_name="folder",
),
),
(
"uid",
models.CharField(
editable=False, max_length=256, verbose_name="uid"
editable=False,
max_length=256,
verbose_name="uid",
),
),
(
"subject",
models.CharField(
editable=False, max_length=256, verbose_name="subject"
editable=False,
max_length=256,
verbose_name="subject",
),
),
(
@ -59,13 +65,18 @@ class Migration(migrations.Migration):
(
"status",
models.CharField(
editable=False, max_length=256, verbose_name="status"
editable=False,
max_length=256,
verbose_name="status",
),
),
(
"error",
models.TextField(
blank=True, editable=False, null=True, verbose_name="error"
blank=True,
editable=False,
null=True,
verbose_name="error",
),
),
(

View File

@ -13,7 +13,10 @@ class Migration(migrations.Migration):
model_name="mailrule",
name="filter_to",
field=models.CharField(
blank=True, max_length=256, null=True, verbose_name="filter to"
blank=True,
max_length=256,
null=True,
verbose_name="filter to",
),
),
]

View File

@ -13,7 +13,8 @@ class Migration(migrations.Migration):
model_name="mailaccount",
name="is_token",
field=models.BooleanField(
default=False, verbose_name="Is token authentication"
default=False,
verbose_name="Is token authentication",
),
),
]

View File

@ -69,7 +69,7 @@ class MailRule(document_models.ModelWithOwner):
class AttachmentProcessing(models.IntegerChoices):
ATTACHMENTS_ONLY = 1, _("Only process attachments.")
EVERYTHING = 2, _("Process all files, including 'inline' " "attachments.")
EVERYTHING = 2, _("Process all files, including 'inline' attachments.")
class MailAction(models.IntegerChoices):
DELETE = 1, _("Delete")

Some files were not shown because too many files have changed in this diff Show More