Chore: Change the code formatter to Ruff (#6756)

* Changing the formatting to ruff-format

* Replaces references to black to ruff or ruff format, removes black from dependencies
This commit is contained in:
Trenton H 2024-05-17 19:26:50 -07:00 committed by GitHub
parent 3facdefa40
commit 622f624132
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
27 changed files with 78 additions and 120 deletions

View File

@ -53,7 +53,6 @@ updates:
development: development:
patterns: patterns:
- "*pytest*" - "*pytest*"
- "black"
- "ruff" - "ruff"
- "mkdocs-material" - "mkdocs-material"
django: django:

View File

@ -50,10 +50,7 @@ repos:
rev: 'v0.4.4' rev: 'v0.4.4'
hooks: hooks:
- id: ruff - id: ruff
- repo: https://github.com/psf/black-pre-commit-mirror - id: ruff-format
rev: 24.4.2
hooks:
- id: black
# Dockerfile hooks # Dockerfile hooks
- repo: https://github.com/AleksaC/hadolint-py - repo: https://github.com/AleksaC/hadolint-py
rev: v2.12.0.3 rev: v2.12.0.3

View File

@ -11,7 +11,7 @@ If you want to implement something big:
## Python ## Python
Paperless supports python 3.9 - 3.11. We format Python code with [Black](https://github.com/psf/black). Paperless supports python 3.9 - 3.11. We format Python code with [ruff](https://docs.astral.sh/ruff/formatter/).
## Branches ## Branches

View File

@ -59,7 +59,6 @@ zxing-cpp = {version = "*", platform_machine = "== 'x86_64'"}
[dev-packages] [dev-packages]
# Linting # Linting
black = "*"
pre-commit = "*" pre-commit = "*"
ruff = "*" ruff = "*"
# Testing # Testing

43
Pipfile.lock generated
View File

@ -1,7 +1,7 @@
{ {
"_meta": { "_meta": {
"hash": { "hash": {
"sha256": "6fd62d37d4c5f6ec99765b037073e8d07db40dbddb2c591664406035970f7d4a" "sha256": "4bb46e902c6ceb6e3647772809b7941e318f1408472c4eaf0a1d2f0360ddac3a"
}, },
"pipfile-spec": 6, "pipfile-spec": 6,
"requires": {}, "requires": {},
@ -466,10 +466,10 @@
"socialaccount" "socialaccount"
], ],
"hashes": [ "hashes": [
"sha256:fbe0148c6258eac44e701a8f36e0950b5d0b1c60bde3e178683a4239f49311fc" "sha256:ef52957992ac3dfc57dec4f7c078bb317b7b47bd7bf6b15f2c12a31c9892ad4c"
], ],
"markers": "python_version >= '3.7'", "markers": "python_version >= '3.7'",
"version": "==0.63.0" "version": "==0.63.1"
}, },
"django-auditlog": { "django-auditlog": {
"hashes": [ "hashes": [
@ -2500,35 +2500,6 @@
"markers": "python_version >= '3.8'", "markers": "python_version >= '3.8'",
"version": "==2.15.0" "version": "==2.15.0"
}, },
"black": {
"hashes": [
"sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474",
"sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1",
"sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0",
"sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8",
"sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96",
"sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1",
"sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04",
"sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021",
"sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94",
"sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d",
"sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c",
"sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7",
"sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c",
"sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc",
"sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7",
"sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d",
"sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c",
"sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741",
"sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce",
"sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb",
"sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063",
"sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
"version": "==24.4.2"
},
"certifi": { "certifi": {
"hashes": [ "hashes": [
"sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f", "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f",
@ -3099,14 +3070,6 @@
"markers": "python_version >= '3.8'", "markers": "python_version >= '3.8'",
"version": "==1.3.1" "version": "==1.3.1"
}, },
"mypy-extensions": {
"hashes": [
"sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d",
"sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.0"
},
"nodeenv": { "nodeenv": {
"hashes": [ "hashes": [
"sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2", "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2",

View File

@ -4,6 +4,7 @@ Simple script which attempts to ping the Redis broker as set in the environment
a certain number of times, waiting a little bit in between a certain number of times, waiting a little bit in between
""" """
import os import os
import sys import sys
import time import time

View File

@ -47,7 +47,7 @@ early on.
Once installed, hooks will run when you commit. If the formatting isn't Once installed, hooks will run when you commit. If the formatting isn't
quite right or a linter catches something, the commit will be rejected. quite right or a linter catches something, the commit will be rejected.
You'll need to look at the output and fix the issue. Some hooks, such You'll need to look at the output and fix the issue. Some hooks, such
as the Python formatting tool `black`, will format failing as the Python linting and formatting tool `ruff`, will format failing
files, so all you need to do is `git add` those files again files, so all you need to do is `git add` those files again
and retry your commit. and retry your commit.

View File

@ -107,7 +107,6 @@ class BarcodePlugin(ConsumeTaskPlugin):
if settings.CONSUMER_ENABLE_BARCODES and ( if settings.CONSUMER_ENABLE_BARCODES and (
separator_pages := self.get_separation_pages() separator_pages := self.get_separation_pages()
): ):
# We have pages to split against # We have pages to split against
# Note this does NOT use the base_temp_dir, as that will be removed # Note this does NOT use the base_temp_dir, as that will be removed

View File

@ -25,7 +25,6 @@ logger = logging.getLogger("paperless.bulk_edit")
def set_correspondent(doc_ids: list[int], correspondent): def set_correspondent(doc_ids: list[int], correspondent):
if correspondent: if correspondent:
correspondent = Correspondent.objects.only("pk").get(id=correspondent) correspondent = Correspondent.objects.only("pk").get(id=correspondent)
@ -81,7 +80,6 @@ def set_document_type(doc_ids: list[int], document_type):
def add_tag(doc_ids: list[int], tag: int): def add_tag(doc_ids: list[int], tag: int):
qs = Document.objects.filter(Q(id__in=doc_ids) & ~Q(tags__id=tag)).only("pk") qs = Document.objects.filter(Q(id__in=doc_ids) & ~Q(tags__id=tag)).only("pk")
affected_docs = list(qs.values_list("pk", flat=True)) affected_docs = list(qs.values_list("pk", flat=True))
@ -97,7 +95,6 @@ def add_tag(doc_ids: list[int], tag: int):
def remove_tag(doc_ids: list[int], tag: int): def remove_tag(doc_ids: list[int], tag: int):
qs = Document.objects.filter(Q(id__in=doc_ids) & Q(tags__id=tag)).only("pk") qs = Document.objects.filter(Q(id__in=doc_ids) & Q(tags__id=tag)).only("pk")
affected_docs = list(qs.values_list("pk", flat=True)) affected_docs = list(qs.values_list("pk", flat=True))

View File

@ -151,14 +151,17 @@ class Command(BaseCommand):
self._check_manifest_valid() self._check_manifest_valid()
with disable_signal( with (
post_save, disable_signal(
receiver=update_filename_and_move_files, post_save,
sender=Document, receiver=update_filename_and_move_files,
), disable_signal( sender=Document,
m2m_changed, ),
receiver=update_filename_and_move_files, disable_signal(
sender=Document.tags.through, m2m_changed,
receiver=update_filename_and_move_files,
sender=Document.tags.through,
),
): ):
if settings.AUDIT_LOG_ENABLED: if settings.AUDIT_LOG_ENABLED:
auditlog.unregister(Document) auditlog.unregister(Document)

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
("documents", "1044_workflow_workflowaction_workflowtrigger_and_more"), ("documents", "1044_workflow_workflowaction_workflowtrigger_and_more"),
] ]

View File

@ -7,7 +7,6 @@ from django.db import models
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
("auth", "0012_alter_user_first_name_max_length"), ("auth", "0012_alter_user_first_name_max_length"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL), migrations.swappable_dependency(settings.AUTH_USER_MODEL),

View File

@ -6,7 +6,6 @@ from django.db import models
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
("documents", "1046_workflowaction_remove_all_correspondents_and_more"), ("documents", "1046_workflowaction_remove_all_correspondents_and_more"),
] ]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
("documents", "1047_savedview_display_mode_and_more"), ("documents", "1047_savedview_display_mode_and_more"),
] ]

View File

@ -93,7 +93,9 @@ class MatchingModelSerializer(serializers.ModelSerializer):
owner = ( owner = (
data["owner"] data["owner"]
if "owner" in data if "owner" in data
else self.user if hasattr(self, "user") else None else self.user
if hasattr(self, "user")
else None
) )
pk = self.instance.pk if hasattr(self.instance, "pk") else None pk = self.instance.pk if hasattr(self.instance, "pk") else None
if ("name" in data or "owner" in data) and self.Meta.model.objects.filter( if ("name" in data or "owner" in data) and self.Meta.model.objects.filter(

View File

@ -117,10 +117,13 @@ def consume_file(
ConsumerPlugin, ConsumerPlugin,
] ]
with ProgressManager( with (
overrides.filename or input_doc.original_file.name, ProgressManager(
self.request.id, overrides.filename or input_doc.original_file.name,
) as status_mgr, TemporaryDirectory(dir=settings.SCRATCH_DIR) as tmp_dir: self.request.id,
) as status_mgr,
TemporaryDirectory(dir=settings.SCRATCH_DIR) as tmp_dir,
):
tmp_dir = Path(tmp_dir) tmp_dir = Path(tmp_dir)
for plugin_class in plugins: for plugin_class in plugins:
plugin_name = plugin_class.NAME plugin_name = plugin_class.NAME

View File

@ -136,7 +136,6 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
@mock.patch("documents.bulk_edit.bulk_update_documents.delay") @mock.patch("documents.bulk_edit.bulk_update_documents.delay")
def test_api_add_tag(self, bulk_update_task_mock): def test_api_add_tag(self, bulk_update_task_mock):
self.assertFalse(self.doc1.tags.filter(pk=self.t1.pk).exists()) self.assertFalse(self.doc1.tags.filter(pk=self.t1.pk).exists())
response = self.client.post( response = self.client.post(

View File

@ -425,7 +425,6 @@ class TestConsumer(
self._assert_first_last_send_progress() self._assert_first_last_send_progress()
def testOverrideTitle(self): def testOverrideTitle(self):
with self.get_consumer( with self.get_consumer(
self.get_test_file(), self.get_test_file(),
DocumentMetadataOverrides(title="Override Title"), DocumentMetadataOverrides(title="Override Title"),
@ -441,7 +440,6 @@ class TestConsumer(
def testOverrideTitleInvalidPlaceholders(self): def testOverrideTitleInvalidPlaceholders(self):
with self.assertLogs("paperless.consumer", level="ERROR") as cm: with self.assertLogs("paperless.consumer", level="ERROR") as cm:
with self.get_consumer( with self.get_consumer(
self.get_test_file(), self.get_test_file(),
DocumentMetadataOverrides(title="Override {correspondent]"), DocumentMetadataOverrides(title="Override {correspondent]"),
@ -546,7 +544,6 @@ class TestConsumer(
self._assert_first_last_send_progress() self._assert_first_last_send_progress()
def testOverrideAsn(self): def testOverrideAsn(self):
with self.get_consumer( with self.get_consumer(
self.get_test_file(), self.get_test_file(),
DocumentMetadataOverrides(asn=123), DocumentMetadataOverrides(asn=123),
@ -614,7 +611,6 @@ class TestConsumer(
self._assert_first_last_send_progress() self._assert_first_last_send_progress()
def testNotAFile(self): def testNotAFile(self):
with self.get_consumer(Path("non-existing-file")) as consumer: with self.get_consumer(Path("non-existing-file")) as consumer:
with self.assertRaisesMessage(ConsumerError, "File not found"): with self.assertRaisesMessage(ConsumerError, "File not found"):
consumer.run() consumer.run()
@ -725,7 +721,6 @@ class TestConsumer(
@override_settings(FILENAME_FORMAT="{correspondent}/{title}") @override_settings(FILENAME_FORMAT="{correspondent}/{title}")
def testFilenameHandling(self): def testFilenameHandling(self):
with self.get_consumer( with self.get_consumer(
self.get_test_file(), self.get_test_file(),
DocumentMetadataOverrides(title="new docs"), DocumentMetadataOverrides(title="new docs"),
@ -1055,7 +1050,6 @@ class PreConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
@override_settings(PRE_CONSUME_SCRIPT="does-not-exist") @override_settings(PRE_CONSUME_SCRIPT="does-not-exist")
def test_pre_consume_script_not_found(self, m): def test_pre_consume_script_not_found(self, m):
with self.get_consumer(self.test_file) as c: with self.get_consumer(self.test_file) as c:
self.assertRaises(ConsumerError, c.run) self.assertRaises(ConsumerError, c.run)
m.assert_not_called() m.assert_not_called()
@ -1254,7 +1248,6 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
os.chmod(script.name, st.st_mode | stat.S_IEXEC) os.chmod(script.name, st.st_mode | stat.S_IEXEC)
with override_settings(POST_CONSUME_SCRIPT=script.name): with override_settings(POST_CONSUME_SCRIPT=script.name):
doc = Document.objects.create(title="Test", mime_type="application/pdf") doc = Document.objects.create(title="Test", mime_type="application/pdf")
with self.get_consumer(self.test_file) as consumer: with self.get_consumer(self.test_file) as consumer:
with self.assertRaisesRegex( with self.assertRaisesRegex(

View File

@ -10,7 +10,6 @@ from documents.parsers import parse_date_generator
class TestDate(TestCase): class TestDate(TestCase):
def test_date_format_1(self): def test_date_format_1(self):
text = "lorem ipsum 130218 lorem ipsum" text = "lorem ipsum 130218 lorem ipsum"
self.assertEqual(parse_date("", text), None) self.assertEqual(parse_date("", text), None)

View File

@ -144,9 +144,12 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
# Set a correspondent and save the document # Set a correspondent and save the document
document.correspondent = Correspondent.objects.get_or_create(name="test")[0] document.correspondent = Correspondent.objects.get_or_create(name="test")[0]
with mock.patch( with (
"documents.signals.handlers.Document.objects.filter", mock.patch(
) as m, disable_auditlog(): "documents.signals.handlers.Document.objects.filter",
) as m,
disable_auditlog(),
):
m.side_effect = DatabaseError() m.side_effect = DatabaseError()
document.save() document.save()

View File

@ -618,7 +618,6 @@ class DocumentViewSet(
@action(methods=["get", "post", "delete"], detail=True) @action(methods=["get", "post", "delete"], detail=True)
def notes(self, request, pk=None): def notes(self, request, pk=None):
currentUser = request.user currentUser = request.user
try: try:
doc = ( doc = (
@ -1337,7 +1336,6 @@ class StatisticsView(APIView):
permission_classes = (IsAuthenticated,) permission_classes = (IsAuthenticated,)
def get(self, request, format=None): def get(self, request, format=None):
user = request.user if request.user is not None else None user = request.user if request.user is not None else None
documents = ( documents = (
@ -1533,9 +1531,9 @@ class UiSettingsView(GenericAPIView):
if hasattr(user, "ui_settings"): if hasattr(user, "ui_settings"):
ui_settings = user.ui_settings.settings ui_settings = user.ui_settings.settings
if "update_checking" in ui_settings: if "update_checking" in ui_settings:
ui_settings["update_checking"][ ui_settings["update_checking"]["backend_setting"] = (
"backend_setting" settings.ENABLE_UPDATE_CHECK
] = settings.ENABLE_UPDATE_CHECK )
else: else:
ui_settings["update_checking"] = { ui_settings["update_checking"] = {
"backend_setting": settings.ENABLE_UPDATE_CHECK, "backend_setting": settings.ENABLE_UPDATE_CHECK,

View File

@ -6,7 +6,6 @@ from django.db import models
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
("paperless", "0002_applicationconfiguration_app_logo_and_more"), ("paperless", "0002_applicationconfiguration_app_logo_and_more"),
] ]

View File

@ -171,8 +171,7 @@ def _parse_beat_schedule() -> dict:
"task": "paperless_mail.tasks.process_mail_accounts", "task": "paperless_mail.tasks.process_mail_accounts",
"options": { "options": {
# 1 minute before default schedule sends again # 1 minute before default schedule sends again
"expires": 9.0 "expires": 9.0 * 60.0,
* 60.0,
}, },
}, },
{ {
@ -183,8 +182,7 @@ def _parse_beat_schedule() -> dict:
"task": "documents.tasks.train_classifier", "task": "documents.tasks.train_classifier",
"options": { "options": {
# 1 minute before default schedule sends again # 1 minute before default schedule sends again
"expires": 59.0 "expires": 59.0 * 60.0,
* 60.0,
}, },
}, },
{ {
@ -195,9 +193,7 @@ def _parse_beat_schedule() -> dict:
"task": "documents.tasks.index_optimize", "task": "documents.tasks.index_optimize",
"options": { "options": {
# 1 hour before default schedule sends again # 1 hour before default schedule sends again
"expires": 23.0 "expires": 23.0 * 60.0 * 60.0,
* 60.0
* 60.0,
}, },
}, },
{ {
@ -208,9 +204,7 @@ def _parse_beat_schedule() -> dict:
"task": "documents.tasks.sanity_check", "task": "documents.tasks.sanity_check",
"options": { "options": {
# 1 hour before default schedule sends again # 1 hour before default schedule sends again
"expires": ((7.0 * 24.0) - 1.0) "expires": ((7.0 * 24.0) - 1.0) * 60.0 * 60.0,
* 60.0
* 60.0,
}, },
}, },
] ]
@ -822,9 +816,9 @@ CACHES = {
} }
if DEBUG and os.getenv("PAPERLESS_CACHE_BACKEND") is None: if DEBUG and os.getenv("PAPERLESS_CACHE_BACKEND") is None:
CACHES["default"][ CACHES["default"]["BACKEND"] = (
"BACKEND" "django.core.cache.backends.locmem.LocMemCache" # pragma: no cover
] = "django.core.cache.backends.locmem.LocMemCache" # pragma: no cover )
def default_threads_per_worker(task_workers) -> int: def default_threads_per_worker(task_workers) -> int:

View File

@ -29,7 +29,6 @@ class TestCustomAccountAdapter(TestCase):
with context.request_context(request): with context.request_context(request):
adapter = get_adapter() adapter = get_adapter()
with override_settings(ALLOWED_HOSTS=["*"]): with override_settings(ALLOWED_HOSTS=["*"]):
# True because request host is same # True because request host is same
url = "https://example.com" url = "https://example.com"
self.assertTrue(adapter.is_safe_url(url)) self.assertTrue(adapter.is_safe_url(url))

View File

@ -63,9 +63,12 @@ class MailRule(document_models.ModelWithOwner):
class ConsumptionScope(models.IntegerChoices): class ConsumptionScope(models.IntegerChoices):
ATTACHMENTS_ONLY = 1, _("Only process attachments.") ATTACHMENTS_ONLY = 1, _("Only process attachments.")
EML_ONLY = 2, _("Process full Mail (with embedded attachments in file) as .eml") EML_ONLY = 2, _("Process full Mail (with embedded attachments in file) as .eml")
EVERYTHING = 3, _( EVERYTHING = (
"Process full Mail (with embedded attachments in file) as .eml " 3,
"+ process attachments as separate documents", _(
"Process full Mail (with embedded attachments in file) as .eml "
"+ process attachments as separate documents",
),
) )
class AttachmentProcessing(models.IntegerChoices): class AttachmentProcessing(models.IntegerChoices):

View File

@ -222,10 +222,13 @@ class MailDocumentParser(DocumentParser):
self.log.debug("Merging email text and HTML content into single PDF") self.log.debug("Merging email text and HTML content into single PDF")
with GotenbergClient( with (
host=settings.TIKA_GOTENBERG_ENDPOINT, GotenbergClient(
timeout=settings.CELERY_TASK_TIME_LIMIT, host=settings.TIKA_GOTENBERG_ENDPOINT,
) as client, client.merge.merge() as route: timeout=settings.CELERY_TASK_TIME_LIMIT,
) as client,
client.merge.merge() as route,
):
# Configure requested PDF/A formatting, if any # Configure requested PDF/A formatting, if any
pdf_a_format = self._settings_to_gotenberg_pdfa() pdf_a_format = self._settings_to_gotenberg_pdfa()
if pdf_a_format is not None: if pdf_a_format is not None:
@ -310,10 +313,13 @@ class MailDocumentParser(DocumentParser):
css_file = Path(__file__).parent / "templates" / "output.css" css_file = Path(__file__).parent / "templates" / "output.css"
email_html_file = self.mail_to_html(mail) email_html_file = self.mail_to_html(mail)
with GotenbergClient( with (
host=settings.TIKA_GOTENBERG_ENDPOINT, GotenbergClient(
timeout=settings.CELERY_TASK_TIME_LIMIT, host=settings.TIKA_GOTENBERG_ENDPOINT,
) as client, client.chromium.html_to_pdf() as route: timeout=settings.CELERY_TASK_TIME_LIMIT,
) as client,
client.chromium.html_to_pdf() as route,
):
# Configure requested PDF/A formatting, if any # Configure requested PDF/A formatting, if any
pdf_a_format = self._settings_to_gotenberg_pdfa() pdf_a_format = self._settings_to_gotenberg_pdfa()
if pdf_a_format is not None: if pdf_a_format is not None:
@ -363,10 +369,13 @@ class MailDocumentParser(DocumentParser):
html_clean_file = tempdir / "index.html" html_clean_file = tempdir / "index.html"
html_clean_file.write_text(html_clean) html_clean_file.write_text(html_clean)
with GotenbergClient( with (
host=settings.TIKA_GOTENBERG_ENDPOINT, GotenbergClient(
timeout=settings.CELERY_TASK_TIME_LIMIT, host=settings.TIKA_GOTENBERG_ENDPOINT,
) as client, client.chromium.html_to_pdf() as route: timeout=settings.CELERY_TASK_TIME_LIMIT,
) as client,
client.chromium.html_to_pdf() as route,
):
# Configure requested PDF/A formatting, if any # Configure requested PDF/A formatting, if any
pdf_a_format = self._settings_to_gotenberg_pdfa() pdf_a_format = self._settings_to_gotenberg_pdfa()
if pdf_a_format is not None: if pdf_a_format is not None:

View File

@ -88,10 +88,13 @@ class TikaDocumentParser(DocumentParser):
self.log.info(f"Converting {document_path} to PDF as {pdf_path}") self.log.info(f"Converting {document_path} to PDF as {pdf_path}")
with GotenbergClient( with (
host=settings.TIKA_GOTENBERG_ENDPOINT, GotenbergClient(
timeout=settings.CELERY_TASK_TIME_LIMIT, host=settings.TIKA_GOTENBERG_ENDPOINT,
) as client, client.libre_office.to_pdf() as route: timeout=settings.CELERY_TASK_TIME_LIMIT,
) as client,
client.libre_office.to_pdf() as route,
):
# Set the output format of the resulting PDF # Set the output format of the resulting PDF
if settings.OCR_OUTPUT_TYPE in { if settings.OCR_OUTPUT_TYPE in {
OutputTypeChoices.PDF_A, OutputTypeChoices.PDF_A,