mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-09-22 00:52:42 -05:00
Chore(deps): Bump the small-changes group across 1 directory with 3 updates (#10880)
* Chore(deps): Bump the small-changes group across 1 directory with 3 updates Bumps the small-changes group with 3 updates in the / directory: [ocrmypdf](https://github.com/ocrmypdf/OCRmyPDF), [mkdocs-material](https://github.com/squidfunk/mkdocs-material) and [ruff](https://github.com/astral-sh/ruff). Updates `ocrmypdf` from 16.10.4 to 16.11.0 - [Release notes](https://github.com/ocrmypdf/OCRmyPDF/releases) - [Changelog](https://github.com/ocrmypdf/OCRmyPDF/blob/main/docs/release_notes.md) - [Commits](https://github.com/ocrmypdf/OCRmyPDF/compare/v16.10.4...v16.11.0) Updates `mkdocs-material` from 9.6.19 to 9.6.20 - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.6.19...9.6.20) Updates `ruff` from 0.12.12 to 0.13.0 - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.12.12...0.13.0) --- updated-dependencies: - dependency-name: ocrmypdf dependency-version: 16.11.0 dependency-type: direct:production update-type: version-update:semver-minor dependency-group: small-changes - dependency-name: mkdocs-material dependency-version: 9.6.20 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: small-changes - dependency-name: ruff dependency-version: 0.13.0 dependency-type: direct:production update-type: version-update:semver-minor dependency-group: small-changes ... Signed-off-by: dependabot[bot] <support@github.com> * Applies the new Ruff rule for unpacking --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Trenton H <797416+stumpylog@users.noreply.github.com>
This commit is contained in:
@@ -17,7 +17,7 @@ def move_sender_strings_to_sender_model(apps, schema_editor):
|
||||
if document.sender:
|
||||
(
|
||||
DOCUMENT_SENDER_MAP[document.pk],
|
||||
created,
|
||||
_,
|
||||
) = sender_model.objects.get_or_create(
|
||||
name=document.sender,
|
||||
defaults={"slug": slugify(document.sender)},
|
||||
|
@@ -839,7 +839,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
m.assert_called()
|
||||
args, kwargs = m.call_args
|
||||
_, kwargs = m.call_args
|
||||
self.assertEqual(kwargs["merge"], False)
|
||||
|
||||
response = self.client.post(
|
||||
@@ -857,7 +857,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
m.assert_called()
|
||||
args, kwargs = m.call_args
|
||||
_, kwargs = m.call_args
|
||||
self.assertEqual(kwargs["merge"], True)
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.set_storage_path")
|
||||
|
@@ -1528,7 +1528,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
|
||||
input_doc, overrides = self.get_last_consume_delay_call_args()
|
||||
|
||||
new_overrides, msg = run_workflows(
|
||||
new_overrides, _ = run_workflows(
|
||||
trigger_type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
||||
document=input_doc,
|
||||
logging_group=None,
|
||||
@@ -1557,7 +1557,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
|
||||
self.consume_file_mock.assert_called_once()
|
||||
|
||||
input_doc, overrides = self.get_last_consume_delay_call_args()
|
||||
input_doc, _ = self.get_last_consume_delay_call_args()
|
||||
|
||||
self.assertEqual(input_doc.source, WorkflowTrigger.DocumentSourceChoices.WEB_UI)
|
||||
|
||||
|
@@ -74,7 +74,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
)
|
||||
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 3)
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
_, kwargs = self.async_task.call_args
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
|
||||
|
||||
def test_unset_correspondent(self):
|
||||
@@ -82,7 +82,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
bulk_edit.set_correspondent([self.doc1.id, self.doc2.id, self.doc3.id], None)
|
||||
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 0)
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
_, kwargs = self.async_task.call_args
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
|
||||
|
||||
def test_set_document_type(self):
|
||||
@@ -93,7 +93,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
)
|
||||
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 3)
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
_, kwargs = self.async_task.call_args
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
|
||||
|
||||
def test_unset_document_type(self):
|
||||
@@ -101,7 +101,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
bulk_edit.set_document_type([self.doc1.id, self.doc2.id, self.doc3.id], None)
|
||||
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 0)
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
_, kwargs = self.async_task.call_args
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
|
||||
|
||||
def test_set_document_storage_path(self):
|
||||
@@ -123,7 +123,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(Document.objects.filter(storage_path=None).count(), 4)
|
||||
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
_, kwargs = self.async_task.call_args
|
||||
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id])
|
||||
|
||||
@@ -154,7 +154,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(Document.objects.filter(storage_path=None).count(), 5)
|
||||
|
||||
self.async_task.assert_called()
|
||||
args, kwargs = self.async_task.call_args
|
||||
_, kwargs = self.async_task.call_args
|
||||
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id])
|
||||
|
||||
@@ -166,7 +166,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
)
|
||||
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 4)
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
_, kwargs = self.async_task.call_args
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc3.id])
|
||||
|
||||
def test_remove_tag(self):
|
||||
@@ -174,7 +174,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
bulk_edit.remove_tag([self.doc1.id, self.doc3.id, self.doc4.id], self.t1.id)
|
||||
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 1)
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
_, kwargs = self.async_task.call_args
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc4.id])
|
||||
|
||||
def test_modify_tags(self):
|
||||
@@ -191,7 +191,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
self.assertCountEqual(list(self.doc3.tags.all()), [self.t2, tag_unrelated])
|
||||
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
_, kwargs = self.async_task.call_args
|
||||
# TODO: doc3 should not be affected, but the query for that is rather complicated
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
|
||||
|
||||
@@ -248,7 +248,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
)
|
||||
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
_, kwargs = self.async_task.call_args
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
|
||||
|
||||
def test_modify_custom_fields_with_values(self):
|
||||
@@ -325,7 +325,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
)
|
||||
|
||||
self.async_task.assert_called_once()
|
||||
args, kwargs = self.async_task.call_args
|
||||
_, kwargs = self.async_task.call_args
|
||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
|
||||
|
||||
# removal of document link cf, should also remove symmetric link
|
||||
|
@@ -123,14 +123,14 @@ class TestRetagger(DirectoriesMixin, TestCase):
|
||||
|
||||
def test_add_type(self):
|
||||
call_command("document_retagger", "--document_type")
|
||||
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
|
||||
d_first, d_second, _, _ = self.get_updated_docs()
|
||||
|
||||
self.assertEqual(d_first.document_type, self.doctype_first)
|
||||
self.assertEqual(d_second.document_type, self.doctype_second)
|
||||
|
||||
def test_add_correspondent(self):
|
||||
call_command("document_retagger", "--correspondent")
|
||||
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
|
||||
d_first, d_second, _, _ = self.get_updated_docs()
|
||||
|
||||
self.assertEqual(d_first.correspondent, self.correspondent_first)
|
||||
self.assertEqual(d_second.correspondent, self.correspondent_second)
|
||||
@@ -160,7 +160,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
|
||||
|
||||
def test_add_tags_suggest(self):
|
||||
call_command("document_retagger", "--tags", "--suggest")
|
||||
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
|
||||
d_first, d_second, _, d_auto = self.get_updated_docs()
|
||||
|
||||
self.assertEqual(d_first.tags.count(), 0)
|
||||
self.assertEqual(d_second.tags.count(), 0)
|
||||
@@ -168,14 +168,14 @@ class TestRetagger(DirectoriesMixin, TestCase):
|
||||
|
||||
def test_add_type_suggest(self):
|
||||
call_command("document_retagger", "--document_type", "--suggest")
|
||||
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
|
||||
d_first, d_second, _, _ = self.get_updated_docs()
|
||||
|
||||
self.assertIsNone(d_first.document_type)
|
||||
self.assertIsNone(d_second.document_type)
|
||||
|
||||
def test_add_correspondent_suggest(self):
|
||||
call_command("document_retagger", "--correspondent", "--suggest")
|
||||
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
|
||||
d_first, d_second, _, _ = self.get_updated_docs()
|
||||
|
||||
self.assertIsNone(d_first.correspondent)
|
||||
self.assertIsNone(d_second.correspondent)
|
||||
@@ -187,7 +187,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
|
||||
"--suggest",
|
||||
"--base-url=http://localhost",
|
||||
)
|
||||
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
|
||||
d_first, d_second, _, d_auto = self.get_updated_docs()
|
||||
|
||||
self.assertEqual(d_first.tags.count(), 0)
|
||||
self.assertEqual(d_second.tags.count(), 0)
|
||||
@@ -200,7 +200,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
|
||||
"--suggest",
|
||||
"--base-url=http://localhost",
|
||||
)
|
||||
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
|
||||
d_first, d_second, _, _ = self.get_updated_docs()
|
||||
|
||||
self.assertIsNone(d_first.document_type)
|
||||
self.assertIsNone(d_second.document_type)
|
||||
@@ -212,7 +212,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
|
||||
"--suggest",
|
||||
"--base-url=http://localhost",
|
||||
)
|
||||
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
|
||||
d_first, d_second, _, _ = self.get_updated_docs()
|
||||
|
||||
self.assertIsNone(d_first.correspondent)
|
||||
self.assertIsNone(d_second.correspondent)
|
||||
|
@@ -21,7 +21,7 @@ TEST_CHANNEL_LAYERS = {
|
||||
class TestWebSockets(TestCase):
|
||||
async def test_no_auth(self):
|
||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||
connected, subprotocol = await communicator.connect()
|
||||
connected, _ = await communicator.connect()
|
||||
self.assertFalse(connected)
|
||||
await communicator.disconnect()
|
||||
|
||||
@@ -31,7 +31,7 @@ class TestWebSockets(TestCase):
|
||||
_authenticated.return_value = True
|
||||
|
||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||
connected, subprotocol = await communicator.connect()
|
||||
connected, _ = await communicator.connect()
|
||||
self.assertTrue(connected)
|
||||
|
||||
message = {"type": "status_update", "data": {"task_id": "test"}}
|
||||
@@ -63,7 +63,7 @@ class TestWebSockets(TestCase):
|
||||
_authenticated.return_value = True
|
||||
|
||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||
connected, subprotocol = await communicator.connect()
|
||||
connected, _ = await communicator.connect()
|
||||
self.assertTrue(connected)
|
||||
|
||||
await communicator.disconnect()
|
||||
@@ -73,7 +73,7 @@ class TestWebSockets(TestCase):
|
||||
_authenticated.return_value = True
|
||||
|
||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||
connected, subprotocol = await communicator.connect()
|
||||
connected, _ = await communicator.connect()
|
||||
self.assertTrue(connected)
|
||||
|
||||
message = {"type": "status_update", "data": {"task_id": "test"}}
|
||||
@@ -98,7 +98,7 @@ class TestWebSockets(TestCase):
|
||||
communicator.scope["user"].is_superuser = False
|
||||
communicator.scope["user"].id = 1
|
||||
|
||||
connected, subprotocol = await communicator.connect()
|
||||
connected, _ = await communicator.connect()
|
||||
self.assertTrue(connected)
|
||||
|
||||
# Test as owner
|
||||
@@ -141,7 +141,7 @@ class TestWebSockets(TestCase):
|
||||
_authenticated.return_value = True
|
||||
|
||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||
connected, subprotocol = await communicator.connect()
|
||||
connected, _ = await communicator.connect()
|
||||
self.assertTrue(connected)
|
||||
|
||||
message = {"type": "documents_deleted", "data": {"documents": [1, 2, 3]}}
|
||||
|
@@ -132,7 +132,7 @@ class RasterisedDocumentParser(DocumentParser):
|
||||
def get_dpi(self, image) -> int | None:
|
||||
try:
|
||||
with Image.open(image) as im:
|
||||
x, y = im.info["dpi"]
|
||||
x, _ = im.info["dpi"]
|
||||
return round(x)
|
||||
except Exception as e:
|
||||
self.log.warning(f"Error while getting DPI from image {image}: {e}")
|
||||
@@ -141,7 +141,7 @@ class RasterisedDocumentParser(DocumentParser):
|
||||
def calculate_a4_dpi(self, image) -> int | None:
|
||||
try:
|
||||
with Image.open(image) as im:
|
||||
width, height = im.size
|
||||
width, _ = im.size
|
||||
# divide image width by A4 width (210mm) in inches.
|
||||
dpi = int(width / (21 / 2.54))
|
||||
self.log.debug(f"Estimated DPI {dpi} based on image width {width}")
|
||||
|
Reference in New Issue
Block a user