Compare commits

...

17 Commits

Author SHA1 Message Date
shamoon
1e54bfc174 Use id ordering 2026-01-14 09:47:36 -08:00
shamoon
5a2084a65b We dont need this anymore 2026-01-14 09:47:36 -08:00
shamoon
d8fadbf65a Fixhancement: use explicit order field workflow actions 2026-01-14 09:14:17 -08:00
dependabot[bot]
eeb5639990 Chore(deps): Bump azure-core from 1.33.0 to 1.38.0 (#11776)
Bumps [azure-core](https://github.com/Azure/azure-sdk-for-python) from 1.33.0 to 1.38.0.
- [Release notes](https://github.com/Azure/azure-sdk-for-python/releases)
- [Commits](https://github.com/Azure/azure-sdk-for-python/compare/azure-core_1.33.0...azure-core_1.38.0)

---
updated-dependencies:
- dependency-name: azure-core
  dependency-version: 1.38.0
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-13 15:02:22 -08:00
Trenton H
6cf8abc5d3 Chore: attempt to resolve Codecov patch coverage issues (#11773) 2026-01-13 12:25:36 -08:00
shamoon
9c0de249a6 Merge branch 'main' into dev 2026-01-13 11:53:40 -08:00
github-actions[bot]
71ecdc528e Documentation: Add v2.20.4 changelog (#11772)
---------

Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: shamoon <4887959+shamoon@users.noreply.github.com>
2026-01-13 11:51:37 -08:00
shamoon
00ec8a577b Merge branch 'hotfix/v2.20.4' 2026-01-13 11:45:55 -08:00
shamoon
3618c50b62 Bump version to 2.20.4 2026-01-13 10:01:42 -08:00
shamoon
6f4497185e Fix merge conflict 2026-01-13 10:01:41 -08:00
shamoon
e816269db5 Fix: recurring workflow to respect latest run time (#11735) 2026-01-13 09:36:53 -08:00
shamoon
d4e60e13bf Fixhancement: add error handling and retry when opening index (#11731) 2026-01-13 09:36:44 -08:00
shamoon
cb091665e2 Fix: validate cf integer values within PostgreSQL range (#11666) 2026-01-13 09:36:29 -08:00
shamoon
00bb92e3e1 Fix: support ordering by storage path name (#11661) 2026-01-13 09:36:14 -08:00
shamoon
11ec676909 Fix: propagate metadata override created value (#11659) 2026-01-13 09:36:07 -08:00
shamoon
7c457466b7 Security: prevent path traversal in storage paths 2026-01-13 09:29:48 -08:00
Antoine Mérino
65aed2405c Documentation: update notes for DB pool size (#11600) 2025-12-30 13:06:21 -08:00
19 changed files with 231 additions and 49 deletions

View File

@@ -1,6 +1,7 @@
# https://docs.codecov.com/docs/codecovyml-reference#codecov
codecov: codecov:
require_ci_to_pass: true require_ci_to_pass: true
# https://docs.codecov.com/docs/components # https://docs.codecov.com/docs/components
component_management: component_management:
individual_components: individual_components:
- component_id: backend - component_id: backend
@@ -9,35 +10,70 @@ component_management:
- component_id: frontend - component_id: frontend
paths: paths:
- src-ui/** - src-ui/**
# https://docs.codecov.com/docs/flags#step-2-flag-management-in-yaml
# https://docs.codecov.com/docs/carryforward-flags
flags: flags:
backend: # Backend Python versions
backend-python-3.10:
paths: paths:
- src/** - src/**
carryforward: true carryforward: true
frontend: backend-python-3.11:
paths:
- src/**
carryforward: true
backend-python-3.12:
paths:
- src/**
carryforward: true
# Frontend (shards merge into single flag)
frontend-node-24.x:
paths: paths:
- src-ui/** - src-ui/**
carryforward: true carryforward: true
# https://docs.codecov.com/docs/pull-request-comments
comment: comment:
layout: "header, diff, components, flags, files" layout: "header, diff, components, flags, files"
# https://docs.codecov.com/docs/javascript-bundle-analysis
require_bundle_changes: true require_bundle_changes: true
bundle_change_threshold: "50Kb" bundle_change_threshold: "50Kb"
coverage: coverage:
# https://docs.codecov.com/docs/commit-status
status: status:
project: project:
default: backend:
flags:
- backend-python-3.10
- backend-python-3.11
- backend-python-3.12
paths:
- src/**
# https://docs.codecov.com/docs/commit-status#threshold # https://docs.codecov.com/docs/commit-status#threshold
threshold: 1% threshold: 1%
removed_code_behavior: adjust_base
frontend:
flags:
- frontend-node-24.x
paths:
- src-ui/**
threshold: 1%
removed_code_behavior: adjust_base
patch: patch:
default: backend:
# For the changed lines only, target 100% covered, but flags:
# allow as low as 75% - backend-python-3.10
- backend-python-3.11
- backend-python-3.12
paths:
- src/**
target: 100%
threshold: 25%
frontend:
flags:
- frontend-node-24.x
paths:
- src-ui/**
target: 100% target: 100%
threshold: 25% threshold: 25%
# https://docs.codecov.com/docs/javascript-bundle-analysis # https://docs.codecov.com/docs/javascript-bundle-analysis
bundle_analysis: bundle_analysis:
# Fail if the bundle size increases by more than 1MB
warning_threshold: "1MB" warning_threshold: "1MB"
status: true status: true

View File

@@ -88,13 +88,13 @@ jobs:
if: always() if: always()
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v5
with: with:
flags: backend,backend-python-${{ matrix.python-version }} flags: backend-python-${{ matrix.python-version }}
files: junit.xml files: junit.xml
report_type: test_results report_type: test_results
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v5
with: with:
flags: backend,backend-python-${{ matrix.python-version }} flags: backend-python-${{ matrix.python-version }}
files: coverage.xml files: coverage.xml
report_type: coverage report_type: coverage
- name: Stop containers - name: Stop containers

View File

@@ -109,13 +109,13 @@ jobs:
if: always() if: always()
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v5
with: with:
flags: frontend,frontend-node-${{ matrix.node-version }} flags: frontend-node-${{ matrix.node-version }}
directory: src-ui/ directory: src-ui/
report_type: test_results report_type: test_results
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v5
with: with:
flags: frontend,frontend-node-${{ matrix.node-version }} flags: frontend-node-${{ matrix.node-version }}
directory: src-ui/coverage/ directory: src-ui/coverage/
e2e-tests: e2e-tests:
name: "E2E Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})" name: "E2E Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"

View File

@@ -1,9 +1,44 @@
# Changelog # Changelog
## paperless-ngx 2.20.4
### Security
- Resolve [GHSA-28cf-xvcf-hw6m](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-28cf-xvcf-hw6m)
### Bug Fixes
- Fix: propagate metadata override created value [@shamoon](https://github.com/shamoon) ([#11659](https://github.com/paperless-ngx/paperless-ngx/pull/11659))
- Fix: support ordering by storage path name [@shamoon](https://github.com/shamoon) ([#11661](https://github.com/paperless-ngx/paperless-ngx/pull/11661))
- Fix: validate cf integer values within PostgreSQL range [@shamoon](https://github.com/shamoon) ([#11666](https://github.com/paperless-ngx/paperless-ngx/pull/11666))
- Fixhancement: add error handling and retry when opening index [@shamoon](https://github.com/shamoon) ([#11731](https://github.com/paperless-ngx/paperless-ngx/pull/11731))
- Fix: fix recurring workflow to respect latest run time [@shamoon](https://github.com/shamoon) ([#11735](https://github.com/paperless-ngx/paperless-ngx/pull/11735))
### All App Changes
<details>
<summary>5 changes</summary>
- Fix: propagate metadata override created value [@shamoon](https://github.com/shamoon) ([#11659](https://github.com/paperless-ngx/paperless-ngx/pull/11659))
- Fix: support ordering by storage path name [@shamoon](https://github.com/shamoon) ([#11661](https://github.com/paperless-ngx/paperless-ngx/pull/11661))
- Fix: validate cf integer values within PostgreSQL range [@shamoon](https://github.com/shamoon) ([#11666](https://github.com/paperless-ngx/paperless-ngx/pull/11666))
- Fixhancement: add error handling and retry when opening index [@shamoon](https://github.com/shamoon) ([#11731](https://github.com/paperless-ngx/paperless-ngx/pull/11731))
- Fix: fix recurring workflow to respect latest run time [@shamoon](https://github.com/shamoon) ([#11735](https://github.com/paperless-ngx/paperless-ngx/pull/11735))
</details>
## paperless-ngx 2.20.3 ## paperless-ngx 2.20.3
### Security
- Resolve [GHSA-7cq3-mhxq-w946](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-7cq3-mhxq-w946)
## paperless-ngx 2.20.2 ## paperless-ngx 2.20.2
### Security
- Resolve [GHSA-6653-vcx4-69mc](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-6653-vcx4-69mc)
- Resolve [GHSA-24x5-wp64-9fcc](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-24x5-wp64-9fcc)
### Features / Enhancements ### Features / Enhancements
- Tweakhancement: dim inactive users in users-groups list [@shamoon](https://github.com/shamoon) ([#11537](https://github.com/paperless-ngx/paperless-ngx/pull/11537)) - Tweakhancement: dim inactive users in users-groups list [@shamoon](https://github.com/shamoon) ([#11537](https://github.com/paperless-ngx/paperless-ngx/pull/11537))

View File

@@ -170,11 +170,18 @@ Available options are `postgresql` and `mariadb`.
!!! note !!! note
A small pool is typically sufficient — for example, a size of 4. A pool of 8-10 connections per worker is typically sufficient.
Make sure your PostgreSQL server's max_connections setting is large enough to handle: If you encounter error messages such as `couldn't get a connection`
```(Paperless workers + Celery workers) × pool size + safety margin``` or database connection timeouts, you probably need to increase the pool size.
For example, with 4 Paperless workers and 2 Celery workers, and a pool size of 4:
(4 + 2) × 4 + 10 = 34 connections required. !!! warning
Make sure your PostgreSQL `max_connections` setting is large enough to handle the connection pools:
`(NB_PAPERLESS_WORKERS + NB_CELERY_WORKERS) × POOL_SIZE + SAFETY_MARGIN`. For example, with
4 Paperless workers and 2 Celery workers, and a pool size of 8:``(4 + 2) × 8 + 10 = 58`,
so `max_connections = 60` (or even more) is appropriate.
This assumes only Paperless-ngx connects to your PostgreSQL instance. If you have other applications,
you should increase `max_connections` accordingly.
#### [`PAPERLESS_DB_READ_CACHE_ENABLED=<bool>`](#PAPERLESS_DB_READ_CACHE_ENABLED) {#PAPERLESS_DB_READ_CACHE_ENABLED} #### [`PAPERLESS_DB_READ_CACHE_ENABLED=<bool>`](#PAPERLESS_DB_READ_CACHE_ENABLED) {#PAPERLESS_DB_READ_CACHE_ENABLED}

View File

@@ -1,6 +1,6 @@
[project] [project]
name = "paperless-ngx" name = "paperless-ngx"
version = "2.20.3" version = "2.20.4"
description = "A community-supported supercharged document management system: scan, index and archive all your physical documents" description = "A community-supported supercharged document management system: scan, index and archive all your physical documents"
readme = "README.md" readme = "README.md"
requires-python = ">=3.10" requires-python = ">=3.10"

View File

@@ -1,6 +1,6 @@
{ {
"name": "paperless-ngx-ui", "name": "paperless-ngx-ui",
"version": "2.20.3", "version": "2.20.4",
"scripts": { "scripts": {
"preinstall": "npx only-allow pnpm", "preinstall": "npx only-allow pnpm",
"ng": "ng", "ng": "ng",

View File

@@ -252,7 +252,7 @@ describe('WorkflowEditDialogComponent', () => {
expect(component.object.actions.length).toEqual(2) expect(component.object.actions.length).toEqual(2)
}) })
it('should update order and remove ids from actions on drag n drop', () => { it('should update order on drag n drop', () => {
const action1 = workflow.actions[0] const action1 = workflow.actions[0]
const action2 = workflow.actions[1] const action2 = workflow.actions[1]
component.object = workflow component.object = workflow
@@ -261,8 +261,6 @@ describe('WorkflowEditDialogComponent', () => {
WorkflowAction[] WorkflowAction[]
>) >)
expect(component.object.actions).toEqual([action2, action1]) expect(component.object.actions).toEqual([action2, action1])
expect(action1.id).toBeNull()
expect(action2.id).toBeNull()
}) })
it('should not include auto matching in algorithms', () => { it('should not include auto matching in algorithms', () => {

View File

@@ -1283,11 +1283,6 @@ export class WorkflowEditDialogComponent
const actionField = this.actionFields.at(event.previousIndex) const actionField = this.actionFields.at(event.previousIndex)
this.actionFields.removeAt(event.previousIndex) this.actionFields.removeAt(event.previousIndex)
this.actionFields.insert(event.currentIndex, actionField) this.actionFields.insert(event.currentIndex, actionField)
// removing id will effectively re-create the actions in this order
this.object.actions.forEach((a) => (a.id = null))
this.actionFields.controls.forEach((c) =>
c.get('id').setValue(null, { emitEvent: false })
)
} }
save(): void { save(): void {

View File

@@ -6,7 +6,7 @@ export const environment = {
apiVersion: '9', // match src/paperless/settings.py apiVersion: '9', // match src/paperless/settings.py
appTitle: 'Paperless-ngx', appTitle: 'Paperless-ngx',
tag: 'prod', tag: 'prod',
version: '2.20.3', version: '2.20.4',
webSocketHost: window.location.host, webSocketHost: window.location.host,
webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:', webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:',
webSocketBaseUrl: base_url.pathname + 'ws/', webSocketBaseUrl: base_url.pathname + 'ws/',

View File

@@ -0,0 +1,28 @@
# Generated by Django 5.2.7 on 2026-01-14 16:53
from django.db import migrations
from django.db import models
from django.db.models import F
def populate_action_order(apps, schema_editor):
WorkflowAction = apps.get_model("documents", "WorkflowAction")
WorkflowAction.objects.all().update(order=F("id"))
class Migration(migrations.Migration):
dependencies = [
("documents", "1075_alter_paperlesstask_task_name"),
]
operations = [
migrations.AddField(
model_name="workflowaction",
name="order",
field=models.PositiveIntegerField(default=0, verbose_name="order"),
),
migrations.RunPython(
populate_action_order,
reverse_code=migrations.RunPython.noop,
),
]

View File

@@ -1295,6 +1295,8 @@ class WorkflowAction(models.Model):
default=WorkflowActionType.ASSIGNMENT, default=WorkflowActionType.ASSIGNMENT,
) )
order = models.PositiveIntegerField(_("order"), default=0)
assign_title = models.TextField( assign_title = models.TextField(
_("assign title"), _("assign title"),
null=True, null=True,

View File

@@ -2577,7 +2577,8 @@ class WorkflowSerializer(serializers.ModelSerializer):
set_triggers.append(trigger_instance) set_triggers.append(trigger_instance)
if actions is not None and actions is not serializers.empty: if actions is not None and actions is not serializers.empty:
for action in actions: for index, action in enumerate(actions):
action["order"] = index
assign_tags = action.pop("assign_tags", None) assign_tags = action.pop("assign_tags", None)
assign_view_users = action.pop("assign_view_users", None) assign_view_users = action.pop("assign_view_users", None)
assign_view_groups = action.pop("assign_view_groups", None) assign_view_groups = action.pop("assign_view_groups", None)
@@ -2704,6 +2705,16 @@ class WorkflowSerializer(serializers.ModelSerializer):
return instance return instance
def to_representation(self, instance):
data = super().to_representation(instance)
actions = instance.actions.order_by("order", "pk")
data["actions"] = WorkflowActionSerializer(
actions,
many=True,
context=self.context,
).data
return data
class TrashSerializer(SerializerWithPerms): class TrashSerializer(SerializerWithPerms):
documents = serializers.ListField( documents = serializers.ListField(

View File

@@ -421,7 +421,15 @@ def update_filename_and_move_files(
return return
instance = instance.document instance = instance.document
def validate_move(instance, old_path: Path, new_path: Path): def validate_move(instance, old_path: Path, new_path: Path, root: Path):
if not new_path.is_relative_to(root):
msg = (
f"Document {instance!s}: Refusing to move file outside root {root}: "
f"{new_path}."
)
logger.warning(msg)
raise CannotMoveFilesException(msg)
if not old_path.is_file(): if not old_path.is_file():
# Can't do anything if the old file does not exist anymore. # Can't do anything if the old file does not exist anymore.
msg = f"Document {instance!s}: File {old_path} doesn't exist." msg = f"Document {instance!s}: File {old_path} doesn't exist."
@@ -510,12 +518,22 @@ def update_filename_and_move_files(
return return
if move_original: if move_original:
validate_move(instance, old_source_path, instance.source_path) validate_move(
instance,
old_source_path,
instance.source_path,
settings.ORIGINALS_DIR,
)
create_source_path_directory(instance.source_path) create_source_path_directory(instance.source_path)
shutil.move(old_source_path, instance.source_path) shutil.move(old_source_path, instance.source_path)
if move_archive: if move_archive:
validate_move(instance, old_archive_path, instance.archive_path) validate_move(
instance,
old_archive_path,
instance.archive_path,
settings.ARCHIVE_DIR,
)
create_source_path_directory(instance.archive_path) create_source_path_directory(instance.archive_path)
shutil.move(old_archive_path, instance.archive_path) shutil.move(old_archive_path, instance.archive_path)
@@ -763,7 +781,7 @@ def run_workflows(
if matching.document_matches_workflow(document, workflow, trigger_type): if matching.document_matches_workflow(document, workflow, trigger_type):
action: WorkflowAction action: WorkflowAction
for action in workflow.actions.all(): for action in workflow.actions.order_by("order", "pk"):
message = f"Applying {action} from {workflow}" message = f"Applying {action} from {workflow}"
if not use_overrides: if not use_overrides:
logger.info(message, extra={"group": logging_group}) logger.info(message, extra={"group": logging_group})

View File

@@ -262,6 +262,17 @@ def get_custom_fields_context(
return field_data return field_data
def _is_safe_relative_path(value: str) -> bool:
if value == "":
return True
path = PurePath(value)
if path.is_absolute() or path.drive:
return False
return ".." not in path.parts
def validate_filepath_template_and_render( def validate_filepath_template_and_render(
template_string: str, template_string: str,
document: Document | None = None, document: Document | None = None,
@@ -309,6 +320,12 @@ def validate_filepath_template_and_render(
) )
rendered_template = template.render(context) rendered_template = template.render(context)
if not _is_safe_relative_path(rendered_template):
logger.warning(
"Template rendered an unsafe path (absolute or containing traversal).",
)
return None
# We're good! # We're good!
return rendered_template return rendered_template
except UndefinedError: except UndefinedError:

View File

@@ -219,6 +219,30 @@ class TestApiStoragePaths(DirectoriesMixin, APITestCase):
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(StoragePath.objects.count(), 1) self.assertEqual(StoragePath.objects.count(), 1)
def test_api_create_storage_path_rejects_traversal(self):
"""
GIVEN:
- API request to create a storage paths
- Storage path attempts directory traversal
WHEN:
- API is called
THEN:
- Correct HTTP 400 response
- No storage path is created
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Traversal path",
"path": "../../../../../tmp/proof",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(StoragePath.objects.count(), 1)
def test_api_storage_path_placeholders(self): def test_api_storage_path_placeholders(self):
""" """
GIVEN: GIVEN:

View File

@@ -20,9 +20,6 @@ def get_workflows_for_trigger(
wrap it in a list; otherwise fetch enabled workflows for the trigger with wrap it in a list; otherwise fetch enabled workflows for the trigger with
the prefetches used by the runner. the prefetches used by the runner.
""" """
if workflow_to_run is not None:
return [workflow_to_run]
annotated_actions = ( annotated_actions = (
WorkflowAction.objects.select_related( WorkflowAction.objects.select_related(
"assign_correspondent", "assign_correspondent",
@@ -105,10 +102,25 @@ def get_workflows_for_trigger(
) )
) )
action_prefetch = Prefetch(
"actions",
queryset=annotated_actions.order_by("order", "pk"),
)
if workflow_to_run is not None:
return (
Workflow.objects.filter(pk=workflow_to_run.pk)
.prefetch_related(
action_prefetch,
"triggers",
)
.distinct()
)
return ( return (
Workflow.objects.filter(enabled=True, triggers__type=trigger_type) Workflow.objects.filter(enabled=True, triggers__type=trigger_type)
.prefetch_related( .prefetch_related(
Prefetch("actions", queryset=annotated_actions), action_prefetch,
"triggers", "triggers",
) )
.order_by("order") .order_by("order")

View File

@@ -1,6 +1,6 @@
from typing import Final from typing import Final
__version__: Final[tuple[int, int, int]] = (2, 20, 3) __version__: Final[tuple[int, int, int]] = (2, 20, 4)
# Version string like X.Y.Z # Version string like X.Y.Z
__full_version_str__: Final[str] = ".".join(map(str, __version__)) __full_version_str__: Final[str] = ".".join(map(str, __version__))
# Version string like X.Y # Version string like X.Y

17
uv.lock generated
View File

@@ -210,23 +210,22 @@ dependencies = [
{ name = "isodate", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "isodate", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/44/7b/8115cd713e2caa5e44def85f2b7ebd02a74ae74d7113ba20bdd41fd6dd80/azure_ai_documentintelligence-1.0.2.tar.gz", hash = "sha256:4d75a2513f2839365ebabc0e0e1772f5601b3a8c9a71e75da12440da13b63484", size = 170940 } sdist = { url = "https://files.pythonhosted.org/packages/44/7b/8115cd713e2caa5e44def85f2b7ebd02a74ae74d7113ba20bdd41fd6dd80/azure_ai_documentintelligence-1.0.2.tar.gz", hash = "sha256:4d75a2513f2839365ebabc0e0e1772f5601b3a8c9a71e75da12440da13b63484", size = 170940, upload-time = "2025-03-27T02:46:20.606Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/d9/75/c9ec040f23082f54ffb1977ff8f364c2d21c79a640a13d1c1809e7fd6b1a/azure_ai_documentintelligence-1.0.2-py3-none-any.whl", hash = "sha256:e1fb446abbdeccc9759d897898a0fe13141ed29f9ad11fc705f951925822ed59", size = 106005 }, { url = "https://files.pythonhosted.org/packages/d9/75/c9ec040f23082f54ffb1977ff8f364c2d21c79a640a13d1c1809e7fd6b1a/azure_ai_documentintelligence-1.0.2-py3-none-any.whl", hash = "sha256:e1fb446abbdeccc9759d897898a0fe13141ed29f9ad11fc705f951925822ed59", size = 106005, upload-time = "2025-03-27T02:46:22.356Z" },
] ]
[[package]] [[package]]
name = "azure-core" name = "azure-core"
version = "1.33.0" version = "1.38.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/75/aa/7c9db8edd626f1a7d99d09ef7926f6f4fb34d5f9fa00dc394afdfe8e2a80/azure_core-1.33.0.tar.gz", hash = "sha256:f367aa07b5e3005fec2c1e184b882b0b039910733907d001c20fb08ebb8c0eb9", size = 295633 } sdist = { url = "https://files.pythonhosted.org/packages/dc/1b/e503e08e755ea94e7d3419c9242315f888fc664211c90d032e40479022bf/azure_core-1.38.0.tar.gz", hash = "sha256:8194d2682245a3e4e3151a667c686464c3786fed7918b394d035bdcd61bb5993", size = 363033, upload-time = "2026-01-12T17:03:05.535Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/07/b7/76b7e144aa53bd206bf1ce34fa75350472c3f69bf30e5c8c18bc9881035d/azure_core-1.33.0-py3-none-any.whl", hash = "sha256:9b5b6d0223a1d38c37500e6971118c1e0f13f54951e6893968b38910bc9cda8f", size = 207071 }, { url = "https://files.pythonhosted.org/packages/fc/d8/b8fcba9464f02b121f39de2db2bf57f0b216fe11d014513d666e8634380d/azure_core-1.38.0-py3-none-any.whl", hash = "sha256:ab0c9b2cd71fecb1842d52c965c95285d3cfb38902f6766e4a471f1cd8905335", size = 217825, upload-time = "2026-01-12T17:03:07.291Z" },
] ]
[[package]] [[package]]
@@ -1849,9 +1848,9 @@ wheels = [
name = "isodate" name = "isodate"
version = "0.7.2" version = "0.7.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705 } sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705, upload-time = "2024-10-08T23:04:11.5Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320 }, { url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320, upload-time = "2024-10-08T23:04:09.501Z" },
] ]
[[package]] [[package]]
@@ -2961,7 +2960,7 @@ wheels = [
[[package]] [[package]]
name = "paperless-ngx" name = "paperless-ngx"
version = "2.20.3" version = "2.20.4"
source = { virtual = "." } source = { virtual = "." }
dependencies = [ dependencies = [
{ name = "azure-ai-documentintelligence", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "azure-ai-documentintelligence", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },