mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-04-19 10:19:27 -05:00
Enable ruff FBT002 check and fix resulting complaints
This commit is contained in:
parent
b274665e21
commit
4726be0d51
@ -32,6 +32,7 @@ extend-select = [
|
|||||||
"RUF", # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf
|
"RUF", # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf
|
||||||
"FLY", # https://docs.astral.sh/ruff/rules/#flynt-fly
|
"FLY", # https://docs.astral.sh/ruff/rules/#flynt-fly
|
||||||
"PTH", # https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth
|
"PTH", # https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth
|
||||||
|
"FBT002", # https://docs.astral.sh/ruff/rules/#flake8-boolean-trap-fbt
|
||||||
]
|
]
|
||||||
ignore = ["DJ001", "SIM105", "RUF012"]
|
ignore = ["DJ001", "SIM105", "RUF012"]
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
|
|
||||||
class BulkArchiveStrategy:
|
class BulkArchiveStrategy:
|
||||||
def __init__(self, zipf: ZipFile, follow_formatting: bool = False) -> None:
|
def __init__(self, zipf: ZipFile, *, follow_formatting: bool = False) -> None:
|
||||||
self.zipf: ZipFile = zipf
|
self.zipf: ZipFile = zipf
|
||||||
if follow_formatting:
|
if follow_formatting:
|
||||||
self.make_unique_filename: Callable[..., Path | str] = (
|
self.make_unique_filename: Callable[..., Path | str] = (
|
||||||
@ -22,6 +22,7 @@ class BulkArchiveStrategy:
|
|||||||
def _filename_only(
|
def _filename_only(
|
||||||
self,
|
self,
|
||||||
doc: Document,
|
doc: Document,
|
||||||
|
*,
|
||||||
archive: bool = False,
|
archive: bool = False,
|
||||||
folder: str = "",
|
folder: str = "",
|
||||||
) -> str:
|
) -> str:
|
||||||
@ -33,7 +34,10 @@ class BulkArchiveStrategy:
|
|||||||
"""
|
"""
|
||||||
counter = 0
|
counter = 0
|
||||||
while True:
|
while True:
|
||||||
filename: str = folder + doc.get_public_filename(archive, counter)
|
filename: str = folder + doc.get_public_filename(
|
||||||
|
archive=archive,
|
||||||
|
counter=counter,
|
||||||
|
)
|
||||||
if filename in self.zipf.namelist():
|
if filename in self.zipf.namelist():
|
||||||
counter += 1
|
counter += 1
|
||||||
else:
|
else:
|
||||||
@ -42,6 +46,7 @@ class BulkArchiveStrategy:
|
|||||||
def _formatted_filepath(
|
def _formatted_filepath(
|
||||||
self,
|
self,
|
||||||
doc: Document,
|
doc: Document,
|
||||||
|
*,
|
||||||
archive: bool = False,
|
archive: bool = False,
|
||||||
folder: str = "",
|
folder: str = "",
|
||||||
) -> Path:
|
) -> Path:
|
||||||
|
@ -245,6 +245,7 @@ def reprocess(doc_ids: list[int]) -> Literal["OK"]:
|
|||||||
def set_permissions(
|
def set_permissions(
|
||||||
doc_ids: list[int],
|
doc_ids: list[int],
|
||||||
set_permissions,
|
set_permissions,
|
||||||
|
*,
|
||||||
owner=None,
|
owner=None,
|
||||||
merge=False,
|
merge=False,
|
||||||
) -> Literal["OK"]:
|
) -> Literal["OK"]:
|
||||||
@ -309,6 +310,7 @@ def rotate(doc_ids: list[int], degrees: int) -> Literal["OK"]:
|
|||||||
|
|
||||||
def merge(
|
def merge(
|
||||||
doc_ids: list[int],
|
doc_ids: list[int],
|
||||||
|
*,
|
||||||
metadata_document_id: int | None = None,
|
metadata_document_id: int | None = None,
|
||||||
delete_originals: bool = False,
|
delete_originals: bool = False,
|
||||||
user: User | None = None,
|
user: User | None = None,
|
||||||
@ -387,6 +389,7 @@ def merge(
|
|||||||
def split(
|
def split(
|
||||||
doc_ids: list[int],
|
doc_ids: list[int],
|
||||||
pages: list[list[int]],
|
pages: list[list[int]],
|
||||||
|
*,
|
||||||
delete_originals: bool = False,
|
delete_originals: bool = False,
|
||||||
user: User | None = None,
|
user: User | None = None,
|
||||||
) -> Literal["OK"]:
|
) -> Literal["OK"]:
|
||||||
|
@ -43,7 +43,7 @@ def delete_empty_directories(directory, root):
|
|||||||
directory = os.path.normpath(os.path.dirname(directory))
|
directory = os.path.normpath(os.path.dirname(directory))
|
||||||
|
|
||||||
|
|
||||||
def generate_unique_filename(doc, archive_filename=False):
|
def generate_unique_filename(doc, *, archive_filename=False):
|
||||||
"""
|
"""
|
||||||
Generates a unique filename for doc in settings.ORIGINALS_DIR.
|
Generates a unique filename for doc in settings.ORIGINALS_DIR.
|
||||||
|
|
||||||
@ -77,7 +77,7 @@ def generate_unique_filename(doc, archive_filename=False):
|
|||||||
while True:
|
while True:
|
||||||
new_filename = generate_filename(
|
new_filename = generate_filename(
|
||||||
doc,
|
doc,
|
||||||
counter,
|
counter=counter,
|
||||||
archive_filename=archive_filename,
|
archive_filename=archive_filename,
|
||||||
)
|
)
|
||||||
if new_filename == old_filename:
|
if new_filename == old_filename:
|
||||||
@ -92,6 +92,7 @@ def generate_unique_filename(doc, archive_filename=False):
|
|||||||
|
|
||||||
def generate_filename(
|
def generate_filename(
|
||||||
doc: Document,
|
doc: Document,
|
||||||
|
*,
|
||||||
counter=0,
|
counter=0,
|
||||||
append_gpg=True,
|
append_gpg=True,
|
||||||
archive_filename=False,
|
archive_filename=False,
|
||||||
|
@ -97,7 +97,7 @@ class StoragePathFilterSet(FilterSet):
|
|||||||
|
|
||||||
|
|
||||||
class ObjectFilter(Filter):
|
class ObjectFilter(Filter):
|
||||||
def __init__(self, exclude=False, in_list=False, field_name=""):
|
def __init__(self, *, exclude=False, in_list=False, field_name=""):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.exclude = exclude
|
self.exclude = exclude
|
||||||
self.in_list = in_list
|
self.in_list = in_list
|
||||||
|
@ -85,7 +85,7 @@ def get_schema() -> Schema:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def open_index(recreate=False) -> FileIndex:
|
def open_index(*, recreate=False) -> FileIndex:
|
||||||
try:
|
try:
|
||||||
if exists_in(settings.INDEX_DIR) and not recreate:
|
if exists_in(settings.INDEX_DIR) and not recreate:
|
||||||
return open_dir(settings.INDEX_DIR, schema=get_schema())
|
return open_dir(settings.INDEX_DIR, schema=get_schema())
|
||||||
@ -101,7 +101,7 @@ def open_index(recreate=False) -> FileIndex:
|
|||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def open_index_writer(optimize=False) -> AsyncWriter:
|
def open_index_writer(*, optimize=False) -> AsyncWriter:
|
||||||
writer = AsyncWriter(open_index())
|
writer = AsyncWriter(open_index())
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -9,7 +9,7 @@ class Command(BaseCommand):
|
|||||||
# This code is taken almost entirely from https://github.com/wagtail/wagtail/pull/11912 with all credit to the original author.
|
# This code is taken almost entirely from https://github.com/wagtail/wagtail/pull/11912 with all credit to the original author.
|
||||||
help = "Converts UUID columns from char type to the native UUID type used in MariaDB 10.7+ and Django 5.0+."
|
help = "Converts UUID columns from char type to the native UUID type used in MariaDB 10.7+ and Django 5.0+."
|
||||||
|
|
||||||
def convert_field(self, model, field_name, null=False):
|
def convert_field(self, model, field_name, *, null=False):
|
||||||
if model._meta.get_field(field_name).model != model: # pragma: no cover
|
if model._meta.get_field(field_name).model != model: # pragma: no cover
|
||||||
# Field is inherited from a parent model
|
# Field is inherited from a parent model
|
||||||
return
|
return
|
||||||
|
@ -84,7 +84,7 @@ def source_path(doc):
|
|||||||
return os.path.join(settings.ORIGINALS_DIR, fname)
|
return os.path.join(settings.ORIGINALS_DIR, fname)
|
||||||
|
|
||||||
|
|
||||||
def generate_unique_filename(doc, archive_filename=False):
|
def generate_unique_filename(doc, *, archive_filename=False):
|
||||||
if archive_filename:
|
if archive_filename:
|
||||||
old_filename = doc.archive_filename
|
old_filename = doc.archive_filename
|
||||||
root = settings.ARCHIVE_DIR
|
root = settings.ARCHIVE_DIR
|
||||||
@ -97,7 +97,7 @@ def generate_unique_filename(doc, archive_filename=False):
|
|||||||
while True:
|
while True:
|
||||||
new_filename = generate_filename(
|
new_filename = generate_filename(
|
||||||
doc,
|
doc,
|
||||||
counter,
|
counter=counter,
|
||||||
archive_filename=archive_filename,
|
archive_filename=archive_filename,
|
||||||
)
|
)
|
||||||
if new_filename == old_filename:
|
if new_filename == old_filename:
|
||||||
@ -110,7 +110,7 @@ def generate_unique_filename(doc, archive_filename=False):
|
|||||||
return new_filename
|
return new_filename
|
||||||
|
|
||||||
|
|
||||||
def generate_filename(doc, counter=0, append_gpg=True, archive_filename=False):
|
def generate_filename(doc, *, counter=0, append_gpg=True, archive_filename=False):
|
||||||
path = ""
|
path = ""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -337,7 +337,7 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
|||||||
def archive_file(self):
|
def archive_file(self):
|
||||||
return open(self.archive_path, "rb")
|
return open(self.archive_path, "rb")
|
||||||
|
|
||||||
def get_public_filename(self, archive=False, counter=0, suffix=None) -> str:
|
def get_public_filename(self, *, archive=False, counter=0, suffix=None) -> str:
|
||||||
"""
|
"""
|
||||||
Returns a sanitized filename for the document, not including any paths.
|
Returns a sanitized filename for the document, not including any paths.
|
||||||
"""
|
"""
|
||||||
|
@ -133,6 +133,7 @@ def get_parser_class_for_mime_type(mime_type: str) -> type["DocumentParser"] | N
|
|||||||
def run_convert(
|
def run_convert(
|
||||||
input_file,
|
input_file,
|
||||||
output_file,
|
output_file,
|
||||||
|
*,
|
||||||
density=None,
|
density=None,
|
||||||
scale=None,
|
scale=None,
|
||||||
alpha=None,
|
alpha=None,
|
||||||
|
@ -58,7 +58,7 @@ def get_groups_with_only_permission(obj, codename):
|
|||||||
return Group.objects.filter(id__in=group_object_perm_group_ids).distinct()
|
return Group.objects.filter(id__in=group_object_perm_group_ids).distinct()
|
||||||
|
|
||||||
|
|
||||||
def set_permissions_for_object(permissions: list[str], object, merge: bool = False):
|
def set_permissions_for_object(permissions: list[str], object, *, merge: bool = False):
|
||||||
"""
|
"""
|
||||||
Set permissions for an object. The permissions are given as a list of strings
|
Set permissions for an object. The permissions are given as a list of strings
|
||||||
in the format "action_modelname", e.g. "view_document".
|
in the format "action_modelname", e.g. "view_document".
|
||||||
|
@ -57,7 +57,7 @@ class SanityCheckFailedException(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def check_sanity(progress=False) -> SanityCheckMessages:
|
def check_sanity(*, progress=False) -> SanityCheckMessages:
|
||||||
messages = SanityCheckMessages()
|
messages = SanityCheckMessages()
|
||||||
|
|
||||||
present_files = {
|
present_files = {
|
||||||
|
@ -85,6 +85,7 @@ def _suggestion_printer(
|
|||||||
def set_correspondent(
|
def set_correspondent(
|
||||||
sender,
|
sender,
|
||||||
document: Document,
|
document: Document,
|
||||||
|
*,
|
||||||
logging_group=None,
|
logging_group=None,
|
||||||
classifier: DocumentClassifier | None = None,
|
classifier: DocumentClassifier | None = None,
|
||||||
replace=False,
|
replace=False,
|
||||||
@ -140,6 +141,7 @@ def set_correspondent(
|
|||||||
def set_document_type(
|
def set_document_type(
|
||||||
sender,
|
sender,
|
||||||
document: Document,
|
document: Document,
|
||||||
|
*,
|
||||||
logging_group=None,
|
logging_group=None,
|
||||||
classifier: DocumentClassifier | None = None,
|
classifier: DocumentClassifier | None = None,
|
||||||
replace=False,
|
replace=False,
|
||||||
@ -196,6 +198,7 @@ def set_document_type(
|
|||||||
def set_tags(
|
def set_tags(
|
||||||
sender,
|
sender,
|
||||||
document: Document,
|
document: Document,
|
||||||
|
*,
|
||||||
logging_group=None,
|
logging_group=None,
|
||||||
classifier: DocumentClassifier | None = None,
|
classifier: DocumentClassifier | None = None,
|
||||||
replace=False,
|
replace=False,
|
||||||
@ -251,6 +254,7 @@ def set_tags(
|
|||||||
def set_storage_path(
|
def set_storage_path(
|
||||||
sender,
|
sender,
|
||||||
document: Document,
|
document: Document,
|
||||||
|
*,
|
||||||
logging_group=None,
|
logging_group=None,
|
||||||
classifier: DocumentClassifier | None = None,
|
classifier: DocumentClassifier | None = None,
|
||||||
replace=False,
|
replace=False,
|
||||||
|
@ -63,7 +63,7 @@ def index_optimize():
|
|||||||
writer.commit(optimize=True)
|
writer.commit(optimize=True)
|
||||||
|
|
||||||
|
|
||||||
def index_reindex(progress_bar_disable=False):
|
def index_reindex(*, progress_bar_disable=False):
|
||||||
documents = Document.objects.all()
|
documents = Document.objects.all()
|
||||||
|
|
||||||
ix = index.open_index(recreate=True)
|
ix = index.open_index(recreate=True)
|
||||||
|
@ -165,6 +165,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
|
|||||||
self,
|
self,
|
||||||
query: list,
|
query: list,
|
||||||
reference_predicate: Callable[[DocumentWrapper], bool],
|
reference_predicate: Callable[[DocumentWrapper], bool],
|
||||||
|
*,
|
||||||
match_nothing_ok=False,
|
match_nothing_ok=False,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
|
@ -535,7 +535,12 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
|||||||
metadata_document_id = self.doc1.id
|
metadata_document_id = self.doc1.id
|
||||||
user = User.objects.create(username="test_user")
|
user = User.objects.create(username="test_user")
|
||||||
|
|
||||||
result = bulk_edit.merge(doc_ids, None, False, user)
|
result = bulk_edit.merge(
|
||||||
|
doc_ids,
|
||||||
|
metadata_document_id=None,
|
||||||
|
delete_originals=False,
|
||||||
|
user=user,
|
||||||
|
)
|
||||||
|
|
||||||
expected_filename = (
|
expected_filename = (
|
||||||
f"{'_'.join([str(doc_id) for doc_id in doc_ids])[:100]}_merged.pdf"
|
f"{'_'.join([str(doc_id) for doc_id in doc_ids])[:100]}_merged.pdf"
|
||||||
@ -638,7 +643,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
|||||||
doc_ids = [self.doc2.id]
|
doc_ids = [self.doc2.id]
|
||||||
pages = [[1, 2], [3]]
|
pages = [[1, 2], [3]]
|
||||||
user = User.objects.create(username="test_user")
|
user = User.objects.create(username="test_user")
|
||||||
result = bulk_edit.split(doc_ids, pages, False, user)
|
result = bulk_edit.split(doc_ids, pages, delete_originals=False, user=user)
|
||||||
self.assertEqual(mock_consume_file.call_count, 2)
|
self.assertEqual(mock_consume_file.call_count, 2)
|
||||||
consume_file_args, _ = mock_consume_file.call_args
|
consume_file_args, _ = mock_consume_file.call_args
|
||||||
self.assertEqual(consume_file_args[1].title, "B (split 2)")
|
self.assertEqual(consume_file_args[1].title, "B (split 2)")
|
||||||
|
@ -233,7 +233,7 @@ class FaultyGenericExceptionParser(_BaseTestParser):
|
|||||||
raise Exception("Generic exception.")
|
raise Exception("Generic exception.")
|
||||||
|
|
||||||
|
|
||||||
def fake_magic_from_file(file, mime=False):
|
def fake_magic_from_file(file, *, mime=False):
|
||||||
if mime:
|
if mime:
|
||||||
if file.name.startswith("invalid_pdf"):
|
if file.name.startswith("invalid_pdf"):
|
||||||
return "application/octet-stream"
|
return "application/octet-stream"
|
||||||
|
@ -93,7 +93,7 @@ class ConsumerThreadMixin(DocumentConsumeDelayMixin):
|
|||||||
else:
|
else:
|
||||||
print("Consumed a perfectly valid file.") # noqa: T201
|
print("Consumed a perfectly valid file.") # noqa: T201
|
||||||
|
|
||||||
def slow_write_file(self, target, incomplete=False):
|
def slow_write_file(self, target, *, incomplete=False):
|
||||||
with open(self.sample_file, "rb") as f:
|
with open(self.sample_file, "rb") as f:
|
||||||
pdf_bytes = f.read()
|
pdf_bytes = f.read()
|
||||||
|
|
||||||
|
@ -188,7 +188,7 @@ class TestExportImport(
|
|||||||
|
|
||||||
return manifest
|
return manifest
|
||||||
|
|
||||||
def test_exporter(self, use_filename_format=False):
|
def test_exporter(self, *, use_filename_format=False):
|
||||||
shutil.rmtree(os.path.join(self.dirs.media_dir, "documents"))
|
shutil.rmtree(os.path.join(self.dirs.media_dir, "documents"))
|
||||||
shutil.copytree(
|
shutil.copytree(
|
||||||
os.path.join(os.path.dirname(__file__), "samples", "documents"),
|
os.path.join(os.path.dirname(__file__), "samples", "documents"),
|
||||||
|
@ -23,6 +23,7 @@ class _TestMatchingBase(TestCase):
|
|||||||
match_algorithm: str,
|
match_algorithm: str,
|
||||||
should_match: Iterable[str],
|
should_match: Iterable[str],
|
||||||
no_match: Iterable[str],
|
no_match: Iterable[str],
|
||||||
|
*,
|
||||||
case_sensitive: bool = False,
|
case_sensitive: bool = False,
|
||||||
):
|
):
|
||||||
for klass in (Tag, Correspondent, DocumentType):
|
for klass in (Tag, Correspondent, DocumentType):
|
||||||
|
@ -1608,7 +1608,7 @@ class BulkDownloadView(GenericAPIView):
|
|||||||
strategy_class = ArchiveOnlyStrategy
|
strategy_class = ArchiveOnlyStrategy
|
||||||
|
|
||||||
with zipfile.ZipFile(temp.name, "w", compression) as zipf:
|
with zipfile.ZipFile(temp.name, "w", compression) as zipf:
|
||||||
strategy = strategy_class(zipf, follow_filename_format)
|
strategy = strategy_class(zipf, follow_formatting=follow_filename_format)
|
||||||
for document in documents:
|
for document in documents:
|
||||||
strategy.add_document(document)
|
strategy.add_document(document)
|
||||||
|
|
||||||
|
@ -124,7 +124,7 @@ class BogusMailBox(AbstractContextManager):
|
|||||||
if username != self.USERNAME or access_token != self.ACCESS_TOKEN:
|
if username != self.USERNAME or access_token != self.ACCESS_TOKEN:
|
||||||
raise MailboxLoginError("BAD", "OK")
|
raise MailboxLoginError("BAD", "OK")
|
||||||
|
|
||||||
def fetch(self, criteria, mark_seen, charset="", bulk=True):
|
def fetch(self, criteria, mark_seen, charset="", *, bulk=True):
|
||||||
msg = self.messages
|
msg = self.messages
|
||||||
|
|
||||||
criteria = str(criteria).strip("()").split(" ")
|
criteria = str(criteria).strip("()").split(" ")
|
||||||
@ -190,7 +190,7 @@ class BogusMailBox(AbstractContextManager):
|
|||||||
raise Exception
|
raise Exception
|
||||||
|
|
||||||
|
|
||||||
def fake_magic_from_buffer(buffer, mime=False):
|
def fake_magic_from_buffer(buffer, *, mime=False):
|
||||||
if mime:
|
if mime:
|
||||||
if "PDF" in str(buffer):
|
if "PDF" in str(buffer):
|
||||||
return "application/pdf"
|
return "application/pdf"
|
||||||
@ -206,6 +206,7 @@ class MessageBuilder:
|
|||||||
|
|
||||||
def create_message(
|
def create_message(
|
||||||
self,
|
self,
|
||||||
|
*,
|
||||||
attachments: int | list[_AttachmentDef] = 1,
|
attachments: int | list[_AttachmentDef] = 1,
|
||||||
body: str = "",
|
body: str = "",
|
||||||
subject: str = "the subject",
|
subject: str = "the subject",
|
||||||
|
@ -214,6 +214,7 @@ class RasterisedDocumentParser(DocumentParser):
|
|||||||
mime_type,
|
mime_type,
|
||||||
output_file,
|
output_file,
|
||||||
sidecar_file,
|
sidecar_file,
|
||||||
|
*,
|
||||||
safe_fallback=False,
|
safe_fallback=False,
|
||||||
):
|
):
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user