mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-04-02 13:45:10 -05:00
Let ruff autofix some things from the newest version
This commit is contained in:
parent
71e4be2d5e
commit
70f3f98363
@ -28,7 +28,7 @@ if __name__ == "__main__":
|
||||
except Exception as e:
|
||||
print(
|
||||
f"Redis ping #{attempt} failed.\n"
|
||||
f"Error: {str(e)}.\n"
|
||||
f"Error: {e!s}.\n"
|
||||
f"Waiting {RETRY_SLEEP_SECONDS}s",
|
||||
flush=True,
|
||||
)
|
||||
|
@ -121,7 +121,7 @@ class BarcodeReader:
|
||||
if barcode.text:
|
||||
barcodes.append(barcode.text)
|
||||
logger.debug(
|
||||
f"Barcode of type {str(barcode.format)} found: {barcode.text}",
|
||||
f"Barcode of type {barcode.format} found: {barcode.text}",
|
||||
)
|
||||
|
||||
return barcodes
|
||||
@ -141,7 +141,7 @@ class BarcodeReader:
|
||||
decoded_barcode = barcode.data.decode("utf-8")
|
||||
barcodes.append(decoded_barcode)
|
||||
logger.debug(
|
||||
f"Barcode of type {str(barcode.type)} found: {decoded_barcode}",
|
||||
f"Barcode of type {barcode.type} found: {decoded_barcode}",
|
||||
)
|
||||
|
||||
return barcodes
|
||||
@ -348,7 +348,7 @@ class BarcodeReader:
|
||||
|
||||
for idx, document_path in enumerate(doc_paths):
|
||||
if override_name is not None:
|
||||
newname = f"{str(idx)}_{override_name}"
|
||||
newname = f"{idx}_{override_name}"
|
||||
dest = save_to_dir / newname
|
||||
else:
|
||||
dest = save_to_dir
|
||||
|
@ -346,7 +346,7 @@ def cleanup_document_deletion(sender, instance, using, **kwargs):
|
||||
logger.debug(f"Deleted file {filename}.")
|
||||
except OSError as e:
|
||||
logger.warning(
|
||||
f"While deleting document {str(instance)}, the file "
|
||||
f"While deleting document {instance!s}, the file "
|
||||
f"{filename} could not be deleted: {e}",
|
||||
)
|
||||
|
||||
@ -369,13 +369,13 @@ class CannotMoveFilesException(Exception):
|
||||
def validate_move(instance, old_path, new_path):
|
||||
if not os.path.isfile(old_path):
|
||||
# Can't do anything if the old file does not exist anymore.
|
||||
logger.fatal(f"Document {str(instance)}: File {old_path} has gone.")
|
||||
logger.fatal(f"Document {instance!s}: File {old_path} has gone.")
|
||||
raise CannotMoveFilesException
|
||||
|
||||
if os.path.isfile(new_path):
|
||||
# Can't do anything if the new file already exists. Skip updating file.
|
||||
logger.warning(
|
||||
f"Document {str(instance)}: Cannot rename file "
|
||||
f"Document {instance!s}: Cannot rename file "
|
||||
f"since target path {new_path} already exists.",
|
||||
)
|
||||
raise CannotMoveFilesException
|
||||
|
@ -116,7 +116,7 @@ def consume_file(
|
||||
{"type": "status_update", "data": payload},
|
||||
)
|
||||
except ConnectionError as e:
|
||||
logger.warning(f"ConnectionError on status send: {str(e)}")
|
||||
logger.warning(f"ConnectionError on status send: {e!s}")
|
||||
# consuming stops here, since the original document with
|
||||
# the barcodes has been split and will be consumed separately
|
||||
|
||||
|
@ -519,7 +519,7 @@ class DocumentViewSet(
|
||||
try:
|
||||
return Response(self.getNotes(doc))
|
||||
except Exception as e:
|
||||
logger.warning(f"An error occurred retrieving notes: {str(e)}")
|
||||
logger.warning(f"An error occurred retrieving notes: {e!s}")
|
||||
return Response(
|
||||
{"error": "Error retreiving notes, check logs for more detail."},
|
||||
)
|
||||
@ -538,7 +538,7 @@ class DocumentViewSet(
|
||||
|
||||
return Response(self.getNotes(doc))
|
||||
except Exception as e:
|
||||
logger.warning(f"An error occurred saving note: {str(e)}")
|
||||
logger.warning(f"An error occurred saving note: {e!s}")
|
||||
return Response(
|
||||
{
|
||||
"error": "Error saving note, check logs for more detail.",
|
||||
@ -628,7 +628,7 @@ class UnifiedSearchViewSet(DocumentViewSet):
|
||||
except NotFound:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.warning(f"An error occurred listing search results: {str(e)}")
|
||||
logger.warning(f"An error occurred listing search results: {e!s}")
|
||||
return HttpResponseBadRequest(
|
||||
"Error listing search results, check logs for more detail.",
|
||||
)
|
||||
@ -699,7 +699,7 @@ class BulkEditView(GenericAPIView):
|
||||
result = method(documents, **parameters)
|
||||
return Response({"result": result})
|
||||
except Exception as e:
|
||||
logger.warning(f"An error occurred performing bulk edit: {str(e)}")
|
||||
logger.warning(f"An error occurred performing bulk edit: {e!s}")
|
||||
return HttpResponseBadRequest(
|
||||
"Error performing bulk edit, check logs for more detail.",
|
||||
)
|
||||
|
@ -544,7 +544,7 @@ class MailAccountHandler(LoggingMixin):
|
||||
criterias = make_criterias(rule, supports_gmail_labels)
|
||||
|
||||
self.log.debug(
|
||||
f"Rule {rule}: Searching folder with criteria {str(criterias)}",
|
||||
f"Rule {rule}: Searching folder with criteria {criterias}",
|
||||
)
|
||||
|
||||
try:
|
||||
|
@ -335,7 +335,7 @@ class RasterisedDocumentParser(DocumentParser):
|
||||
self.text = text_original
|
||||
except (NoTextFoundException, InputFileError) as e:
|
||||
self.log.warning(
|
||||
f"Encountered an error while running OCR: {str(e)}. "
|
||||
f"Encountered an error while running OCR: {e!s}. "
|
||||
f"Attempting force OCR to get the text.",
|
||||
)
|
||||
|
||||
@ -370,11 +370,11 @@ class RasterisedDocumentParser(DocumentParser):
|
||||
|
||||
except Exception as e:
|
||||
# If this fails, we have a serious issue at hand.
|
||||
raise ParseError(f"{e.__class__.__name__}: {str(e)}") from e
|
||||
raise ParseError(f"{e.__class__.__name__}: {e!s}") from e
|
||||
|
||||
except Exception as e:
|
||||
# Anything else is probably serious.
|
||||
raise ParseError(f"{e.__class__.__name__}: {str(e)}") from e
|
||||
raise ParseError(f"{e.__class__.__name__}: {e!s}") from e
|
||||
|
||||
# As a last resort, if we still don't have any text for any reason,
|
||||
# try to extract the text from the original document.
|
||||
|
Loading…
x
Reference in New Issue
Block a user