Compare commits

..

1 Commits

Author SHA1 Message Date
dependabot[bot]
7a1083dbde Chore(deps): Bump actions/checkout from 5 to 6 in the actions group
Bumps the actions group with 1 update: [actions/checkout](https://github.com/actions/checkout).


Updates `actions/checkout` from 5 to 6
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v5...v6)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: '6'
  dependency-type: direct:production
  update-type: version-update:semver-major
  dependency-group: actions
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-05 22:58:53 +00:00
25 changed files with 1327 additions and 1851 deletions

View File

@@ -1054,22 +1054,12 @@ should be a valid crontab(5) expression describing when to run.
#### [`PAPERLESS_SANITY_TASK_CRON=<cron expression>`](#PAPERLESS_SANITY_TASK_CRON) {#PAPERLESS_SANITY_TASK_CRON}
: Configures the scheduled sanity checker frequency. The value should be a
valid crontab(5) expression describing when to run.
: Configures the scheduled sanity checker frequency.
: If set to the string "disable", the sanity checker will not run automatically.
Defaults to `30 0 * * sun` or Sunday at 30 minutes past midnight.
#### [`PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON=<cron expression>`](#PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON) {#PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON}
: Configures the scheduled workflow check frequency. The value should be a
valid crontab(5) expression describing when to run.
: If set to the string "disable", scheduled workflows will not run.
Defaults to `5 */1 * * *` or every hour at 5 minutes past the hour.
#### [`PAPERLESS_ENABLE_COMPRESSION=<bool>`](#PAPERLESS_ENABLE_COMPRESSION) {#PAPERLESS_ENABLE_COMPRESSION}
: Enables compression of the responses from the webserver.
@@ -1281,6 +1271,30 @@ within your documents.
Defaults to false.
## Workflow webhooks
#### [`PAPERLESS_WEBHOOKS_ALLOWED_SCHEMES=<str>`](#PAPERLESS_WEBHOOKS_ALLOWED_SCHEMES) {#PAPERLESS_WEBHOOKS_ALLOWED_SCHEMES}
: A comma-separated list of allowed schemes for webhooks. This setting
controls which URL schemes are permitted for webhook URLs.
Defaults to `http,https`.
#### [`PAPERLESS_WEBHOOKS_ALLOWED_PORTS=<str>`](#PAPERLESS_WEBHOOKS_ALLOWED_PORTS) {#PAPERLESS_WEBHOOKS_ALLOWED_PORTS}
: A comma-separated list of allowed ports for webhooks. This setting
controls which ports are permitted for webhook URLs. For example, if you
set this to `80,443`, webhooks will only be sent to URLs that use these
ports.
Defaults to empty list, which allows all ports.
#### [`PAPERLESS_WEBHOOKS_ALLOW_INTERNAL_REQUESTS=<bool>`](#PAPERLESS_WEBHOOKS_ALLOW_INTERNAL_REQUESTS) {#PAPERLESS_WEBHOOKS_ALLOW_INTERNAL_REQUESTS}
: If set to false, webhooks cannot be sent to internal URLs (e.g., localhost).
Defaults to true, which allows internal requests.
### Polling {#polling}
#### [`PAPERLESS_CONSUMER_POLLING=<num>`](#PAPERLESS_CONSUMER_POLLING) {#PAPERLESS_CONSUMER_POLLING}
@@ -1324,30 +1338,6 @@ consumers working on the same file. Configure this to prevent that.
Defaults to 0.5 seconds.
## Workflow webhooks
#### [`PAPERLESS_WEBHOOKS_ALLOWED_SCHEMES=<str>`](#PAPERLESS_WEBHOOKS_ALLOWED_SCHEMES) {#PAPERLESS_WEBHOOKS_ALLOWED_SCHEMES}
: A comma-separated list of allowed schemes for webhooks. This setting
controls which URL schemes are permitted for webhook URLs.
Defaults to `http,https`.
#### [`PAPERLESS_WEBHOOKS_ALLOWED_PORTS=<str>`](#PAPERLESS_WEBHOOKS_ALLOWED_PORTS) {#PAPERLESS_WEBHOOKS_ALLOWED_PORTS}
: A comma-separated list of allowed ports for webhooks. This setting
controls which ports are permitted for webhook URLs. For example, if you
set this to `80,443`, webhooks will only be sent to URLs that use these
ports.
Defaults to empty list, which allows all ports.
#### [`PAPERLESS_WEBHOOKS_ALLOW_INTERNAL_REQUESTS=<bool>`](#PAPERLESS_WEBHOOKS_ALLOW_INTERNAL_REQUESTS) {#PAPERLESS_WEBHOOKS_ALLOW_INTERNAL_REQUESTS}
: If set to false, webhooks cannot be sent to internal URLs (e.g., localhost).
Defaults to true, which allows internal requests.
## Incoming Mail {#incoming_mail}
### Email OAuth {#email_oauth}

View File

@@ -443,10 +443,6 @@ flowchart TD
'Updated'
trigger(s)"}
scheduled{"Documents
matching
trigger(s)"}
A[New Document] --> consumption
consumption --> |Yes| C[Workflow Actions Run]
consumption --> |No| D
@@ -459,11 +455,6 @@ flowchart TD
updated --> |Yes| J[Workflow Actions Run]
updated --> |No| K
J --> K[Document Saved]
L[Scheduled Task Check<br/>hourly at :05] --> M[Get All Scheduled Triggers]
M --> scheduled
scheduled --> |Yes| N[Workflow Actions Run]
scheduled --> |No| O[Document Saved]
N --> O
```
#### Filters {#workflow-trigger-filters}

View File

@@ -63,7 +63,6 @@ dependencies = [
"pyzbar~=0.1.9",
"rapidfuzz~=3.14.0",
"redis[hiredis]~=5.2.1",
"regex>=2025.9.18",
"scikit-learn~=1.7.0",
"setproctitle~=1.3.4",
"tika-client~=0.10.0",
@@ -84,7 +83,7 @@ optional-dependencies.postgres = [
"psycopg-pool==3.2.7",
]
optional-dependencies.webserver = [
"granian[uvloop]~=2.6.0",
"granian[uvloop]~=2.5.1",
]
[dependency-groups]

View File

@@ -5,14 +5,14 @@
<trans-unit id="ngb.alert.close" datatype="html">
<source>Close</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/alert/alert.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/alert/alert.ts</context>
<context context-type="linenumber">50</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.carousel.slide-number" datatype="html">
<source> Slide <x id="INTERPOLATION" equiv-text="ueryList&lt;NgbSli"/> of <x id="INTERPOLATION_1" equiv-text="EventSource = N"/> </source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/carousel/carousel.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/carousel/carousel.ts</context>
<context context-type="linenumber">131,135</context>
</context-group>
<note priority="1" from="description">Currently selected slide number read by screen reader</note>
@@ -20,212 +20,212 @@
<trans-unit id="ngb.carousel.previous" datatype="html">
<source>Previous</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/carousel/carousel.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/carousel/carousel.ts</context>
<context context-type="linenumber">157,159</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.carousel.next" datatype="html">
<source>Next</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/carousel/carousel.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/carousel/carousel.ts</context>
<context context-type="linenumber">198</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.datepicker.previous-month" datatype="html">
<source>Previous month</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/datepicker/datepicker-navigation.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/datepicker/datepicker-navigation.ts</context>
<context context-type="linenumber">83,85</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/datepicker/datepicker-navigation.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/datepicker/datepicker-navigation.ts</context>
<context context-type="linenumber">112</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.datepicker.next-month" datatype="html">
<source>Next month</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/datepicker/datepicker-navigation.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/datepicker/datepicker-navigation.ts</context>
<context context-type="linenumber">112</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/datepicker/datepicker-navigation.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/datepicker/datepicker-navigation.ts</context>
<context context-type="linenumber">112</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.timepicker.HH" datatype="html">
<source>HH</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.toast.close-aria" datatype="html">
<source>Close</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.datepicker.select-month" datatype="html">
<source>Select month</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.pagination.first" datatype="html">
<source>««</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.timepicker.hours" datatype="html">
<source>Hours</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.pagination.previous" datatype="html">
<source>«</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.timepicker.MM" datatype="html">
<source>MM</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.pagination.next" datatype="html">
<source>»</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.datepicker.select-year" datatype="html">
<source>Select year</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.timepicker.minutes" datatype="html">
<source>Minutes</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.pagination.last" datatype="html">
<source>»»</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.pagination.first-aria" datatype="html">
<source>First</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.timepicker.increment-hours" datatype="html">
<source>Increment hours</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.pagination.previous-aria" datatype="html">
<source>Previous</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.timepicker.decrement-hours" datatype="html">
<source>Decrement hours</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.pagination.next-aria" datatype="html">
<source>Next</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.timepicker.increment-minutes" datatype="html">
<source>Increment minutes</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.pagination.last-aria" datatype="html">
<source>Last</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.timepicker.decrement-minutes" datatype="html">
<source>Decrement minutes</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.timepicker.SS" datatype="html">
<source>SS</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.timepicker.seconds" datatype="html">
<source>Seconds</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.timepicker.increment-seconds" datatype="html">
<source>Increment seconds</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.timepicker.decrement-seconds" datatype="html">
<source>Decrement seconds</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
<trans-unit id="ngb.timepicker.PM" datatype="html">
<source><x id="INTERPOLATION"/></source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
</trans-unit>
@@ -233,7 +233,7 @@
<source><x id="INTERPOLATION" equiv-text="barConfig);
pu"/></source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/progressbar/progressbar.ts</context>
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/progressbar/progressbar.ts</context>
<context context-type="linenumber">41,42</context>
</context-group>
</trans-unit>
@@ -816,7 +816,7 @@
<source>Jump to bottom</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/admin/logs/logs.component.html</context>
<context context-type="linenumber">62</context>
<context context-type="linenumber">60</context>
</context-group>
</trans-unit>
<trans-unit id="1255048712725285892" datatype="html">

View File

@@ -12,14 +12,14 @@
"private": true,
"dependencies": {
"@angular/cdk": "^20.2.13",
"@angular/common": "~20.3.15",
"@angular/compiler": "~20.3.15",
"@angular/core": "~20.3.15",
"@angular/forms": "~20.3.15",
"@angular/localize": "~20.3.15",
"@angular/platform-browser": "~20.3.15",
"@angular/platform-browser-dynamic": "~20.3.15",
"@angular/router": "~20.3.15",
"@angular/common": "~20.3.14",
"@angular/compiler": "~20.3.12",
"@angular/core": "~20.3.12",
"@angular/forms": "~20.3.12",
"@angular/localize": "~20.3.12",
"@angular/platform-browser": "~20.3.12",
"@angular/platform-browser-dynamic": "~20.3.12",
"@angular/router": "~20.3.12",
"@ng-bootstrap/ng-bootstrap": "^19.0.1",
"@ng-select/ng-select": "^20.7.0",
"@ngneat/dirty-check-forms": "^3.0.3",
@@ -42,23 +42,23 @@
"devDependencies": {
"@angular-builders/custom-webpack": "^20.0.0",
"@angular-builders/jest": "^20.0.0",
"@angular-devkit/core": "^20.3.13",
"@angular-devkit/schematics": "^20.3.13",
"@angular-devkit/core": "^20.3.10",
"@angular-devkit/schematics": "^20.3.10",
"@angular-eslint/builder": "20.6.0",
"@angular-eslint/eslint-plugin": "20.6.0",
"@angular-eslint/eslint-plugin-template": "20.6.0",
"@angular-eslint/schematics": "20.6.0",
"@angular-eslint/template-parser": "20.6.0",
"@angular/build": "^20.3.13",
"@angular/cli": "~20.3.13",
"@angular/compiler-cli": "~20.3.15",
"@angular/build": "^20.3.10",
"@angular/cli": "~20.3.10",
"@angular/compiler-cli": "~20.3.12",
"@codecov/webpack-plugin": "^1.9.1",
"@playwright/test": "^1.57.0",
"@playwright/test": "^1.56.1",
"@types/jest": "^30.0.0",
"@types/node": "^24.10.1",
"@typescript-eslint/eslint-plugin": "^8.48.1",
"@typescript-eslint/parser": "^8.48.1",
"@typescript-eslint/utils": "^8.48.1",
"@typescript-eslint/eslint-plugin": "^8.47.0",
"@typescript-eslint/parser": "^8.47.0",
"@typescript-eslint/utils": "^8.47.0",
"eslint": "^9.39.1",
"jest": "30.2.0",
"jest-environment-jsdom": "^30.2.0",
@@ -68,7 +68,7 @@
"prettier-plugin-organize-imports": "^4.3.0",
"ts-node": "~10.9.1",
"typescript": "^5.8.3",
"webpack": "^5.103.0"
"webpack": "^5.102.1"
},
"packageManager": "pnpm@10.17.1",
"pnpm": {

837
src-ui/pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -48,9 +48,7 @@
<ng-container i18n>Loading...</ng-container>
</div>
} @else {
@for (log of logs; track log) {
<p class="m-0 p-0" [ngClass]="'log-entry-' + log.level">{{log.message}}</p>
}
<p *ngFor="let log of logs" class="m-0 p-0" [ngClass]="'log-entry-' + log.level">{{log.message}}</p>
}
</div>
<button

View File

@@ -20,7 +20,6 @@ from documents.models import Tag
from documents.models import Workflow
from documents.models import WorkflowTrigger
from documents.permissions import get_objects_for_user_owner_aware
from documents.regex import safe_regex_search
if TYPE_CHECKING:
from django.db.models import QuerySet
@@ -153,7 +152,7 @@ def match_storage_paths(document: Document, classifier: DocumentClassifier, user
def matches(matching_model: MatchingModel, document: Document):
search_flags = 0
search_kwargs = {}
document_content = document.content
@@ -162,18 +161,14 @@ def matches(matching_model: MatchingModel, document: Document):
return False
if matching_model.is_insensitive:
search_flags = re.IGNORECASE
search_kwargs = {"flags": re.IGNORECASE}
if matching_model.matching_algorithm == MatchingModel.MATCH_NONE:
return False
elif matching_model.matching_algorithm == MatchingModel.MATCH_ALL:
for word in _split_match(matching_model):
search_result = re.search(
rf"\b{word}\b",
document_content,
flags=search_flags,
)
search_result = re.search(rf"\b{word}\b", document_content, **search_kwargs)
if not search_result:
return False
log_reason(
@@ -185,7 +180,7 @@ def matches(matching_model: MatchingModel, document: Document):
elif matching_model.matching_algorithm == MatchingModel.MATCH_ANY:
for word in _split_match(matching_model):
if re.search(rf"\b{word}\b", document_content, flags=search_flags):
if re.search(rf"\b{word}\b", document_content, **search_kwargs):
log_reason(matching_model, document, f"it contains this word: {word}")
return True
return False
@@ -195,7 +190,7 @@ def matches(matching_model: MatchingModel, document: Document):
re.search(
rf"\b{re.escape(matching_model.match)}\b",
document_content,
flags=search_flags,
**search_kwargs,
),
)
if result:
@@ -207,11 +202,16 @@ def matches(matching_model: MatchingModel, document: Document):
return result
elif matching_model.matching_algorithm == MatchingModel.MATCH_REGEX:
match = safe_regex_search(
matching_model.match,
document_content,
flags=search_flags,
)
try:
match = re.search(
re.compile(matching_model.match, **search_kwargs),
document_content,
)
except re.error:
logger.error(
f"Error while processing regular expression {matching_model.match}",
)
return False
if match:
log_reason(
matching_model,

View File

@@ -61,22 +61,21 @@ def get_groups_with_only_permission(obj, codename):
return Group.objects.filter(id__in=group_object_perm_group_ids).distinct()
def set_permissions_for_object(permissions: dict, object, *, merge: bool = False):
def set_permissions_for_object(permissions: list[str], object, *, merge: bool = False):
"""
Set permissions for an object. The permissions are given as a mapping of actions
to a dict of user / group id lists, e.g.
{"view": {"users": [1], "groups": [2]}, "change": {"users": [], "groups": []}}.
Set permissions for an object. The permissions are given as a list of strings
in the format "action_modelname", e.g. "view_document".
If merge is True, the permissions are merged with the existing permissions and
no users or groups are removed. If False, the permissions are set to exactly
the given list of users and groups.
"""
for action, entry in permissions.items():
for action in permissions:
permission = f"{action}_{object.__class__.__name__.lower()}"
if "users" in entry:
if "users" in permissions[action]:
# users
users_to_add = User.objects.filter(id__in=entry["users"])
users_to_add = User.objects.filter(id__in=permissions[action]["users"])
users_to_remove = (
get_users_with_perms(
object,
@@ -86,12 +85,12 @@ def set_permissions_for_object(permissions: dict, object, *, merge: bool = False
if not merge
else User.objects.none()
)
if users_to_add.exists() and users_to_remove.exists():
if len(users_to_add) > 0 and len(users_to_remove) > 0:
users_to_remove = users_to_remove.exclude(id__in=users_to_add)
if users_to_remove.exists():
if len(users_to_remove) > 0:
for user in users_to_remove:
remove_perm(permission, user, object)
if users_to_add.exists():
if len(users_to_add) > 0:
for user in users_to_add:
assign_perm(permission, user, object)
if action == "change":
@@ -101,9 +100,9 @@ def set_permissions_for_object(permissions: dict, object, *, merge: bool = False
user,
object,
)
if "groups" in entry:
if "groups" in permissions[action]:
# groups
groups_to_add = Group.objects.filter(id__in=entry["groups"])
groups_to_add = Group.objects.filter(id__in=permissions[action]["groups"])
groups_to_remove = (
get_groups_with_only_permission(
object,
@@ -112,12 +111,12 @@ def set_permissions_for_object(permissions: dict, object, *, merge: bool = False
if not merge
else Group.objects.none()
)
if groups_to_add.exists() and groups_to_remove.exists():
if len(groups_to_add) > 0 and len(groups_to_remove) > 0:
groups_to_remove = groups_to_remove.exclude(id__in=groups_to_add)
if groups_to_remove.exists():
if len(groups_to_remove) > 0:
for group in groups_to_remove:
remove_perm(permission, group, object)
if groups_to_add.exists():
if len(groups_to_add) > 0:
for group in groups_to_add:
assign_perm(permission, group, object)
if action == "change":

View File

@@ -1,50 +0,0 @@
from __future__ import annotations
import logging
import textwrap
import regex
from django.conf import settings
logger = logging.getLogger("paperless.regex")
REGEX_TIMEOUT_SECONDS: float = getattr(settings, "MATCH_REGEX_TIMEOUT_SECONDS", 0.1)
def validate_regex_pattern(pattern: str) -> None:
"""
Validate user provided regex for basic compile errors.
Raises ValueError on validation failure.
"""
try:
regex.compile(pattern)
except regex.error as exc:
raise ValueError(exc.msg) from exc
def safe_regex_search(pattern: str, text: str, *, flags: int = 0):
"""
Run a regex search with a timeout. Returns a match object or None.
Validation errors and timeouts are logged and treated as no match.
"""
try:
validate_regex_pattern(pattern)
compiled = regex.compile(pattern, flags=flags)
except (regex.error, ValueError) as exc:
logger.error(
"Error while processing regular expression %s: %s",
textwrap.shorten(pattern, width=80, placeholder=""),
exc,
)
return None
try:
return compiled.search(text, timeout=REGEX_TIMEOUT_SECONDS)
except TimeoutError:
logger.warning(
"Regular expression matching timed out for pattern %s",
textwrap.shorten(pattern, width=80, placeholder=""),
)
return None

View File

@@ -21,7 +21,6 @@ from django.core.validators import MaxLengthValidator
from django.core.validators import RegexValidator
from django.core.validators import integer_validator
from django.db.models import Count
from django.db.models.functions import Lower
from django.utils.crypto import get_random_string
from django.utils.dateparse import parse_datetime
from django.utils.text import slugify
@@ -39,7 +38,6 @@ from guardian.utils import get_user_obj_perms_model
from rest_framework import fields
from rest_framework import serializers
from rest_framework.fields import SerializerMethodField
from rest_framework.filters import OrderingFilter
if settings.AUDIT_LOG_ENABLED:
from auditlog.context import set_actor
@@ -71,7 +69,6 @@ from documents.parsers import is_mime_type_supported
from documents.permissions import get_document_count_filter_for_user
from documents.permissions import get_groups_with_only_permission
from documents.permissions import set_permissions_for_object
from documents.regex import validate_regex_pattern
from documents.templating.filepath import validate_filepath_template_and_render
from documents.templating.utils import convert_format_str_to_template_format
from documents.validators import uri_validator
@@ -142,10 +139,10 @@ class MatchingModelSerializer(serializers.ModelSerializer):
and self.initial_data["matching_algorithm"] == MatchingModel.MATCH_REGEX
):
try:
validate_regex_pattern(match)
except ValueError as e:
re.compile(match)
except re.error as e:
raise serializers.ValidationError(
_("Invalid regular expression: %(error)s") % {"error": str(e)},
_("Invalid regular expression: %(error)s") % {"error": str(e.msg)},
)
return match
@@ -578,33 +575,16 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
)
def get_children(self, obj):
filter_q = self.context.get("document_count_filter")
request = self.context.get("request")
if filter_q is None:
request = self.context.get("request")
user = getattr(request, "user", None) if request else None
filter_q = get_document_count_filter_for_user(user)
self.context["document_count_filter"] = filter_q
children_queryset = (
serializer = TagSerializer(
obj.get_children_queryset()
.select_related("owner")
.annotate(document_count=Count("documents", filter=filter_q))
)
view = self.context.get("view")
ordering = (
OrderingFilter().get_ordering(request, children_queryset, view)
if request and view
else None
)
ordering = ordering or (Lower("name"),)
children_queryset = children_queryset.order_by(*ordering)
serializer = TagSerializer(
children_queryset,
.annotate(document_count=Count("documents", filter=filter_q)),
many=True,
user=self.user,
full_perms=self.full_perms,
all_fields=self.all_fields,
context=self.context,
)
return serializer.data

View File

@@ -1,10 +1,14 @@
from __future__ import annotations
import ipaddress
import logging
import shutil
import socket
from pathlib import Path
from typing import TYPE_CHECKING
from urllib.parse import urlparse
import httpx
from celery import shared_task
from celery import states
from celery.signals import before_task_publish
@@ -23,16 +27,20 @@ from django.db.models import Q
from django.dispatch import receiver
from django.utils import timezone
from filelock import FileLock
from guardian.shortcuts import remove_perm
from documents import matching
from documents.caching import clear_document_caches
from documents.file_handling import create_source_path_directory
from documents.file_handling import delete_empty_directories
from documents.file_handling import generate_filename
from documents.file_handling import generate_unique_filename
from documents.mail import EmailAttachment
from documents.mail import send_email
from documents.models import Correspondent
from documents.models import CustomField
from documents.models import CustomFieldInstance
from documents.models import Document
from documents.models import DocumentType
from documents.models import MatchingModel
from documents.models import PaperlessTask
from documents.models import SavedView
@@ -43,15 +51,8 @@ from documents.models import WorkflowAction
from documents.models import WorkflowRun
from documents.models import WorkflowTrigger
from documents.permissions import get_objects_for_user_owner_aware
from documents.templating.utils import convert_format_str_to_template_format
from documents.workflows.actions import build_workflow_action_context
from documents.workflows.actions import execute_email_action
from documents.workflows.actions import execute_webhook_action
from documents.workflows.mutations import apply_assignment_to_document
from documents.workflows.mutations import apply_assignment_to_overrides
from documents.workflows.mutations import apply_removal_to_document
from documents.workflows.mutations import apply_removal_to_overrides
from documents.workflows.utils import get_workflows_for_trigger
from documents.permissions import set_permissions_for_object
from documents.templating.workflows import parse_w_workflow_placeholders
if TYPE_CHECKING:
from documents.classifier import DocumentClassifier
@@ -391,19 +392,6 @@ class CannotMoveFilesException(Exception):
pass
def _filename_template_uses_custom_fields(doc: Document) -> bool:
template = None
if doc.storage_path is not None:
template = doc.storage_path.path
elif settings.FILENAME_FORMAT is not None:
template = convert_format_str_to_template_format(settings.FILENAME_FORMAT)
if not template:
return False
return "custom_fields" in template
# should be disabled in /src/documents/management/commands/document_importer.py handle
@receiver(models.signals.post_save, sender=CustomFieldInstance, weak=False)
@receiver(models.signals.m2m_changed, sender=Document.tags.through, weak=False)
@@ -414,8 +402,6 @@ def update_filename_and_move_files(
**kwargs,
):
if isinstance(instance, CustomFieldInstance):
if not _filename_template_uses_custom_fields(instance.document):
return
instance = instance.document
def validate_move(instance, old_path: Path, new_path: Path):
@@ -453,47 +439,21 @@ def update_filename_and_move_files(
old_filename = instance.filename
old_source_path = instance.source_path
candidate_filename = generate_filename(instance)
candidate_source_path = (
settings.ORIGINALS_DIR / candidate_filename
).resolve()
if candidate_filename == Path(old_filename):
new_filename = Path(old_filename)
elif (
candidate_source_path.exists()
and candidate_source_path != old_source_path
):
# Only fall back to unique search when there is an actual conflict
new_filename = generate_unique_filename(instance)
else:
new_filename = candidate_filename
# Need to convert to string to be able to save it to the db
instance.filename = str(new_filename)
instance.filename = str(generate_unique_filename(instance))
move_original = old_filename != instance.filename
old_archive_filename = instance.archive_filename
old_archive_path = instance.archive_path
if instance.has_archive_version:
archive_candidate = generate_filename(instance, archive_filename=True)
archive_candidate_path = (
settings.ARCHIVE_DIR / archive_candidate
).resolve()
if archive_candidate == Path(old_archive_filename):
new_archive_filename = Path(old_archive_filename)
elif (
archive_candidate_path.exists()
and archive_candidate_path != old_archive_path
):
new_archive_filename = generate_unique_filename(
# Need to convert to string to be able to save it to the db
instance.archive_filename = str(
generate_unique_filename(
instance,
archive_filename=True,
)
else:
new_archive_filename = archive_candidate
instance.archive_filename = str(new_archive_filename)
),
)
move_archive = old_archive_filename != instance.archive_filename
else:
@@ -713,6 +673,92 @@ def run_workflows_updated(sender, document: Document, logging_group=None, **kwar
)
def _is_public_ip(ip: str) -> bool:
try:
obj = ipaddress.ip_address(ip)
return not (
obj.is_private
or obj.is_loopback
or obj.is_link_local
or obj.is_multicast
or obj.is_unspecified
)
except ValueError: # pragma: no cover
return False
def _resolve_first_ip(host: str) -> str | None:
try:
info = socket.getaddrinfo(host, None)
return info[0][4][0] if info else None
except Exception: # pragma: no cover
return None
@shared_task(
retry_backoff=True,
autoretry_for=(httpx.HTTPStatusError,),
max_retries=3,
throws=(httpx.HTTPError,),
)
def send_webhook(
url: str,
data: str | dict,
headers: dict,
files: dict,
*,
as_json: bool = False,
):
p = urlparse(url)
if p.scheme.lower() not in settings.WEBHOOKS_ALLOWED_SCHEMES or not p.hostname:
logger.warning("Webhook blocked: invalid scheme/hostname")
raise ValueError("Invalid URL scheme or hostname.")
port = p.port or (443 if p.scheme == "https" else 80)
if (
len(settings.WEBHOOKS_ALLOWED_PORTS) > 0
and port not in settings.WEBHOOKS_ALLOWED_PORTS
):
logger.warning("Webhook blocked: port not permitted")
raise ValueError("Destination port not permitted.")
ip = _resolve_first_ip(p.hostname)
if not ip or (
not _is_public_ip(ip) and not settings.WEBHOOKS_ALLOW_INTERNAL_REQUESTS
):
logger.warning("Webhook blocked: destination not allowed")
raise ValueError("Destination host is not allowed.")
try:
post_args = {
"url": url,
"headers": {
k: v for k, v in (headers or {}).items() if k.lower() != "host"
},
"files": files or None,
"timeout": 5.0,
"follow_redirects": False,
}
if as_json:
post_args["json"] = data
elif isinstance(data, dict):
post_args["data"] = data
else:
post_args["content"] = data
httpx.post(
**post_args,
).raise_for_status()
logger.info(
f"Webhook sent to {url}",
)
except Exception as e:
logger.error(
f"Failed attempt sending webhook to {url}: {e}",
)
raise e
def run_workflows(
trigger_type: WorkflowTrigger.WorkflowTriggerType,
document: Document | ConsumableDocument,
@@ -721,17 +767,573 @@ def run_workflows(
overrides: DocumentMetadataOverrides | None = None,
original_file: Path | None = None,
) -> tuple[DocumentMetadataOverrides, str] | None:
"""
Execute workflows matching a document for the given trigger. When `overrides` is provided
(consumption flow), actions mutate that object and the function returns `(overrides, messages)`.
Otherwise actions mutate the actual document and return nothing.
"""Run workflows which match a Document (or ConsumableDocument) for a specific trigger type or a single workflow if given.
Attachments for email/webhook actions use `original_file` when given, otherwise fall back to
`document.source_path` (Document) or `document.original_file` (ConsumableDocument).
Passing `workflow_to_run` skips the workflow query (currently only used by scheduled runs).
Assignment or removal actions are either applied directly to the document or an overrides object. If an overrides
object is provided, the function returns the object with the applied changes or None if no actions were applied and a string
of messages for each action. If no overrides object is provided, the changes are applied directly to the document and the
function returns None.
"""
def assignment_action():
if action.assign_tags.exists():
tag_ids_to_add: set[int] = set()
for tag in action.assign_tags.all():
tag_ids_to_add.add(tag.pk)
tag_ids_to_add.update(int(pk) for pk in tag.get_ancestors_pks())
if not use_overrides:
doc_tag_ids[:] = list(set(doc_tag_ids) | tag_ids_to_add)
else:
if overrides.tag_ids is None:
overrides.tag_ids = []
overrides.tag_ids = list(set(overrides.tag_ids) | tag_ids_to_add)
if action.assign_correspondent:
if not use_overrides:
document.correspondent = action.assign_correspondent
else:
overrides.correspondent_id = action.assign_correspondent.pk
if action.assign_document_type:
if not use_overrides:
document.document_type = action.assign_document_type
else:
overrides.document_type_id = action.assign_document_type.pk
if action.assign_storage_path:
if not use_overrides:
document.storage_path = action.assign_storage_path
else:
overrides.storage_path_id = action.assign_storage_path.pk
if action.assign_owner:
if not use_overrides:
document.owner = action.assign_owner
else:
overrides.owner_id = action.assign_owner.pk
if action.assign_title:
if not use_overrides:
try:
document.title = parse_w_workflow_placeholders(
action.assign_title,
document.correspondent.name if document.correspondent else "",
document.document_type.name if document.document_type else "",
document.owner.username if document.owner else "",
timezone.localtime(document.added),
document.original_filename or "",
document.filename or "",
document.created,
)
except Exception:
logger.exception(
f"Error occurred parsing title assignment '{action.assign_title}', falling back to original",
extra={"group": logging_group},
)
else:
overrides.title = action.assign_title
if any(
[
action.assign_view_users.exists(),
action.assign_view_groups.exists(),
action.assign_change_users.exists(),
action.assign_change_groups.exists(),
],
):
permissions = {
"view": {
"users": action.assign_view_users.values_list("id", flat=True),
"groups": action.assign_view_groups.values_list("id", flat=True),
},
"change": {
"users": action.assign_change_users.values_list("id", flat=True),
"groups": action.assign_change_groups.values_list("id", flat=True),
},
}
if not use_overrides:
set_permissions_for_object(
permissions=permissions,
object=document,
merge=True,
)
else:
overrides.view_users = list(
set(
(overrides.view_users or [])
+ list(permissions["view"]["users"]),
),
)
overrides.view_groups = list(
set(
(overrides.view_groups or [])
+ list(permissions["view"]["groups"]),
),
)
overrides.change_users = list(
set(
(overrides.change_users or [])
+ list(permissions["change"]["users"]),
),
)
overrides.change_groups = list(
set(
(overrides.change_groups or [])
+ list(permissions["change"]["groups"]),
),
)
if action.assign_custom_fields.exists():
if not use_overrides:
for field in action.assign_custom_fields.all():
value_field_name = CustomFieldInstance.get_value_field_name(
data_type=field.data_type,
)
args = {
value_field_name: action.assign_custom_fields_values.get(
str(field.pk),
None,
),
}
# for some reason update_or_create doesn't work here
instance = CustomFieldInstance.objects.filter(
field=field,
document=document,
).first()
if instance and args[value_field_name] is not None:
setattr(instance, value_field_name, args[value_field_name])
instance.save()
elif not instance:
CustomFieldInstance.objects.create(
**args,
field=field,
document=document,
)
else:
if overrides.custom_fields is None:
overrides.custom_fields = {}
overrides.custom_fields.update(
{
field.pk: action.assign_custom_fields_values.get(
str(field.pk),
None,
)
for field in action.assign_custom_fields.all()
},
)
def removal_action():
if action.remove_all_tags:
if not use_overrides:
doc_tag_ids.clear()
else:
overrides.tag_ids = None
else:
tag_ids_to_remove: set[int] = set()
for tag in action.remove_tags.all():
tag_ids_to_remove.add(tag.pk)
tag_ids_to_remove.update(int(pk) for pk in tag.get_descendants_pks())
if not use_overrides:
doc_tag_ids[:] = [t for t in doc_tag_ids if t not in tag_ids_to_remove]
elif overrides.tag_ids:
overrides.tag_ids = [
t for t in overrides.tag_ids if t not in tag_ids_to_remove
]
if not use_overrides and (
action.remove_all_correspondents
or (
document.correspondent
and action.remove_correspondents.filter(
pk=document.correspondent.pk,
).exists()
)
):
document.correspondent = None
elif use_overrides and (
action.remove_all_correspondents
or (
overrides.correspondent_id
and action.remove_correspondents.filter(
pk=overrides.correspondent_id,
).exists()
)
):
overrides.correspondent_id = None
if not use_overrides and (
action.remove_all_document_types
or (
document.document_type
and action.remove_document_types.filter(
pk=document.document_type.pk,
).exists()
)
):
document.document_type = None
elif use_overrides and (
action.remove_all_document_types
or (
overrides.document_type_id
and action.remove_document_types.filter(
pk=overrides.document_type_id,
).exists()
)
):
overrides.document_type_id = None
if not use_overrides and (
action.remove_all_storage_paths
or (
document.storage_path
and action.remove_storage_paths.filter(
pk=document.storage_path.pk,
).exists()
)
):
document.storage_path = None
elif use_overrides and (
action.remove_all_storage_paths
or (
overrides.storage_path_id
and action.remove_storage_paths.filter(
pk=overrides.storage_path_id,
).exists()
)
):
overrides.storage_path_id = None
if not use_overrides and (
action.remove_all_owners
or (
document.owner
and action.remove_owners.filter(pk=document.owner.pk).exists()
)
):
document.owner = None
elif use_overrides and (
action.remove_all_owners
or (
overrides.owner_id
and action.remove_owners.filter(pk=overrides.owner_id).exists()
)
):
overrides.owner_id = None
if action.remove_all_permissions:
if not use_overrides:
permissions = {
"view": {"users": [], "groups": []},
"change": {"users": [], "groups": []},
}
set_permissions_for_object(
permissions=permissions,
object=document,
merge=False,
)
else:
overrides.view_users = None
overrides.view_groups = None
overrides.change_users = None
overrides.change_groups = None
elif any(
[
action.remove_view_users.exists(),
action.remove_view_groups.exists(),
action.remove_change_users.exists(),
action.remove_change_groups.exists(),
],
):
if not use_overrides:
for user in action.remove_view_users.all():
remove_perm("view_document", user, document)
for user in action.remove_change_users.all():
remove_perm("change_document", user, document)
for group in action.remove_view_groups.all():
remove_perm("view_document", group, document)
for group in action.remove_change_groups.all():
remove_perm("change_document", group, document)
else:
if overrides.view_users:
for user in action.remove_view_users.filter(
pk__in=overrides.view_users,
):
overrides.view_users.remove(user.pk)
if overrides.change_users:
for user in action.remove_change_users.filter(
pk__in=overrides.change_users,
):
overrides.change_users.remove(user.pk)
if overrides.view_groups:
for group in action.remove_view_groups.filter(
pk__in=overrides.view_groups,
):
overrides.view_groups.remove(group.pk)
if overrides.change_groups:
for group in action.remove_change_groups.filter(
pk__in=overrides.change_groups,
):
overrides.change_groups.remove(group.pk)
if action.remove_all_custom_fields:
if not use_overrides:
CustomFieldInstance.objects.filter(document=document).hard_delete()
else:
overrides.custom_fields = None
elif action.remove_custom_fields.exists():
if not use_overrides:
CustomFieldInstance.objects.filter(
field__in=action.remove_custom_fields.all(),
document=document,
).hard_delete()
elif overrides.custom_fields:
for field in action.remove_custom_fields.filter(
pk__in=overrides.custom_fields.keys(),
):
overrides.custom_fields.pop(field.pk, None)
def email_action():
if not settings.EMAIL_ENABLED:
logger.error(
"Email backend has not been configured, cannot send email notifications",
extra={"group": logging_group},
)
return
if not use_overrides:
title = document.title
doc_url = (
f"{settings.PAPERLESS_URL}{settings.BASE_URL}documents/{document.pk}/"
)
correspondent = (
document.correspondent.name if document.correspondent else ""
)
document_type = (
document.document_type.name if document.document_type else ""
)
owner_username = document.owner.username if document.owner else ""
filename = document.original_filename or ""
current_filename = document.filename or ""
added = timezone.localtime(document.added)
created = document.created
else:
title = overrides.title if overrides.title else str(document.original_file)
doc_url = ""
correspondent = (
Correspondent.objects.filter(pk=overrides.correspondent_id).first()
if overrides.correspondent_id
else ""
)
document_type = (
DocumentType.objects.filter(pk=overrides.document_type_id).first().name
if overrides.document_type_id
else ""
)
owner_username = (
User.objects.filter(pk=overrides.owner_id).first().username
if overrides.owner_id
else ""
)
filename = document.original_file if document.original_file else ""
current_filename = filename
added = timezone.localtime(timezone.now())
created = overrides.created
subject = (
parse_w_workflow_placeholders(
action.email.subject,
correspondent,
document_type,
owner_username,
added,
filename,
current_filename,
created,
title,
doc_url,
)
if action.email.subject
else ""
)
body = (
parse_w_workflow_placeholders(
action.email.body,
correspondent,
document_type,
owner_username,
added,
filename,
current_filename,
created,
title,
doc_url,
)
if action.email.body
else ""
)
try:
attachments: list[EmailAttachment] = []
if action.email.include_document:
attachment: EmailAttachment | None = None
if trigger_type in [
WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
] and isinstance(document, Document):
friendly_name = (
Path(current_filename).name
if current_filename
else document.source_path.name
)
attachment = EmailAttachment(
path=document.source_path,
mime_type=document.mime_type,
friendly_name=friendly_name,
)
elif original_file:
friendly_name = (
Path(current_filename).name
if current_filename
else original_file.name
)
attachment = EmailAttachment(
path=original_file,
mime_type=document.mime_type,
friendly_name=friendly_name,
)
if attachment:
attachments = [attachment]
n_messages = send_email(
subject=subject,
body=body,
to=action.email.to.split(","),
attachments=attachments,
)
logger.debug(
f"Sent {n_messages} notification email(s) to {action.email.to}",
extra={"group": logging_group},
)
except Exception as e:
logger.exception(
f"Error occurred sending notification email: {e}",
extra={"group": logging_group},
)
def webhook_action():
if not use_overrides:
title = document.title
doc_url = (
f"{settings.PAPERLESS_URL}{settings.BASE_URL}documents/{document.pk}/"
)
correspondent = (
document.correspondent.name if document.correspondent else ""
)
document_type = (
document.document_type.name if document.document_type else ""
)
owner_username = document.owner.username if document.owner else ""
filename = document.original_filename or ""
current_filename = document.filename or ""
added = timezone.localtime(document.added)
created = document.created
else:
title = overrides.title if overrides.title else str(document.original_file)
doc_url = ""
correspondent = (
Correspondent.objects.filter(pk=overrides.correspondent_id).first()
if overrides.correspondent_id
else ""
)
document_type = (
DocumentType.objects.filter(pk=overrides.document_type_id).first().name
if overrides.document_type_id
else ""
)
owner_username = (
User.objects.filter(pk=overrides.owner_id).first().username
if overrides.owner_id
else ""
)
filename = document.original_file if document.original_file else ""
current_filename = filename
added = timezone.localtime(timezone.now())
created = overrides.created
try:
data = {}
if action.webhook.use_params:
if action.webhook.params:
try:
for key, value in action.webhook.params.items():
data[key] = parse_w_workflow_placeholders(
value,
correspondent,
document_type,
owner_username,
added,
filename,
current_filename,
created,
title,
doc_url,
)
except Exception as e:
logger.error(
f"Error occurred parsing webhook params: {e}",
extra={"group": logging_group},
)
elif action.webhook.body:
data = parse_w_workflow_placeholders(
action.webhook.body,
correspondent,
document_type,
owner_username,
added,
filename,
current_filename,
created,
title,
doc_url,
)
headers = {}
if action.webhook.headers:
try:
headers = {
str(k): str(v) for k, v in action.webhook.headers.items()
}
except Exception as e:
logger.error(
f"Error occurred parsing webhook headers: {e}",
extra={"group": logging_group},
)
files = None
if action.webhook.include_document:
with original_file.open("rb") as f:
files = {
"file": (
filename,
f.read(),
document.mime_type,
),
}
send_webhook.delay(
url=action.webhook.url,
data=data,
headers=headers,
files=files,
as_json=action.webhook.as_json,
)
logger.debug(
f"Webhook to {action.webhook.url} queued",
extra={"group": logging_group},
)
except Exception as e:
logger.exception(
f"Error occurred sending webhook: {e}",
extra={"group": logging_group},
)
use_overrides = overrides is not None
if original_file is None:
original_file = (
@@ -739,7 +1341,30 @@ def run_workflows(
)
messages = []
workflows = get_workflows_for_trigger(trigger_type, workflow_to_run)
workflows = (
(
Workflow.objects.filter(enabled=True, triggers__type=trigger_type)
.prefetch_related(
"actions",
"actions__assign_view_users",
"actions__assign_view_groups",
"actions__assign_change_users",
"actions__assign_change_groups",
"actions__assign_custom_fields",
"actions__remove_tags",
"actions__remove_correspondents",
"actions__remove_document_types",
"actions__remove_storage_paths",
"actions__remove_custom_fields",
"actions__remove_owners",
"triggers",
)
.order_by("order")
.distinct()
)
if workflow_to_run is None
else [workflow_to_run]
)
for workflow in workflows:
if not use_overrides:
@@ -759,39 +1384,13 @@ def run_workflows(
messages.append(message)
if action.type == WorkflowAction.WorkflowActionType.ASSIGNMENT:
if use_overrides and overrides:
apply_assignment_to_overrides(action, overrides)
else:
apply_assignment_to_document(
action,
document,
doc_tag_ids,
logging_group,
)
assignment_action()
elif action.type == WorkflowAction.WorkflowActionType.REMOVAL:
if use_overrides and overrides:
apply_removal_to_overrides(action, overrides)
else:
apply_removal_to_document(action, document, doc_tag_ids)
removal_action()
elif action.type == WorkflowAction.WorkflowActionType.EMAIL:
context = build_workflow_action_context(document, overrides)
execute_email_action(
action,
document,
context,
logging_group,
original_file,
trigger_type,
)
email_action()
elif action.type == WorkflowAction.WorkflowActionType.WEBHOOK:
context = build_workflow_action_context(document, overrides)
execute_webhook_action(
action,
document,
context,
logging_group,
original_file,
)
webhook_action()
if not use_overrides:
# limit title to 128 characters

View File

@@ -41,6 +41,7 @@ from documents.models import DocumentType
from documents.models import PaperlessTask
from documents.models import StoragePath
from documents.models import Tag
from documents.models import Workflow
from documents.models import WorkflowRun
from documents.models import WorkflowTrigger
from documents.parsers import DocumentParser
@@ -53,7 +54,6 @@ from documents.sanity_checker import SanityCheckFailedException
from documents.signals import document_updated
from documents.signals.handlers import cleanup_document_deletion
from documents.signals.handlers import run_workflows
from documents.workflows.utils import get_workflows_for_trigger
if settings.AUDIT_LOG_ENABLED:
from auditlog.models import LogEntry
@@ -400,8 +400,13 @@ def check_scheduled_workflows():
Once a document satisfies this condition, and recurring/non-recurring constraints are met, the workflow is run.
"""
scheduled_workflows = get_workflows_for_trigger(
WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
scheduled_workflows: list[Workflow] = (
Workflow.objects.filter(
triggers__type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
enabled=True,
)
.distinct()
.prefetch_related("triggers")
)
if scheduled_workflows.count() > 0:
logger.debug(f"Checking {len(scheduled_workflows)} scheduled workflows")

View File

@@ -1289,7 +1289,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
content_type__app_label="admin",
),
)
set_permissions([4, 5], set_permissions={}, owner=user2, merge=False)
set_permissions([4, 5], set_permissions=[], owner=user2, merge=False)
with index.open_index_writer() as writer:
index.update_document(writer, d1)

View File

@@ -16,7 +16,6 @@ from django.utils import timezone
from documents.file_handling import create_source_path_directory
from documents.file_handling import delete_empty_directories
from documents.file_handling import generate_filename
from documents.file_handling import generate_unique_filename
from documents.models import Correspondent
from documents.models import CustomField
from documents.models import CustomFieldInstance
@@ -1633,73 +1632,6 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
)
class TestCustomFieldFilenameUpdates(
DirectoriesMixin,
FileSystemAssertsMixin,
TestCase,
):
def setUp(self):
self.cf = CustomField.objects.create(
name="flavor",
data_type=CustomField.FieldDataType.STRING,
)
self.doc = Document.objects.create(
title="document",
mime_type="application/pdf",
checksum="abc123",
)
self.cfi = CustomFieldInstance.objects.create(
field=self.cf,
document=self.doc,
value_text="initial",
)
return super().setUp()
@override_settings(FILENAME_FORMAT=None)
def test_custom_field_not_in_template_skips_filename_work(self):
storage_path = StoragePath.objects.create(path="{{created}}/{{ title }}")
self.doc.storage_path = storage_path
self.doc.save()
initial_filename = generate_filename(self.doc)
Document.objects.filter(pk=self.doc.pk).update(filename=str(initial_filename))
self.doc.refresh_from_db()
Path(self.doc.source_path).parent.mkdir(parents=True, exist_ok=True)
Path(self.doc.source_path).touch()
with mock.patch("documents.signals.handlers.generate_unique_filename") as m:
m.side_effect = generate_unique_filename
self.cfi.value_text = "updated"
self.cfi.save()
self.doc.refresh_from_db()
self.assertEqual(Path(self.doc.filename), initial_filename)
self.assertEqual(m.call_count, 0)
@override_settings(FILENAME_FORMAT=None)
def test_custom_field_in_template_triggers_filename_update(self):
storage_path = StoragePath.objects.create(
path="{{ custom_fields|get_cf_value('flavor') }}/{{ title }}",
)
self.doc.storage_path = storage_path
self.doc.save()
initial_filename = generate_filename(self.doc)
Document.objects.filter(pk=self.doc.pk).update(filename=str(initial_filename))
self.doc.refresh_from_db()
Path(self.doc.source_path).parent.mkdir(parents=True, exist_ok=True)
Path(self.doc.source_path).touch()
with mock.patch("documents.signals.handlers.generate_unique_filename") as m:
m.side_effect = generate_unique_filename
self.cfi.value_text = "updated"
self.cfi.save()
self.doc.refresh_from_db()
expected_filename = Path("updated/document.pdf")
self.assertEqual(Path(self.doc.filename), expected_filename)
self.assertTrue(Path(self.doc.source_path).is_file())
self.assertLessEqual(m.call_count, 1)
class TestPathDateLocalization:
"""
Groups all tests related to the `localize_date` function.

View File

@@ -206,22 +206,6 @@ class TestMatching(_TestMatchingBase):
def test_tach_invalid_regex(self):
self._test_matching("[", "MATCH_REGEX", [], ["Don't match this"])
def test_match_regex_timeout_returns_false(self):
tag = Tag.objects.create(
name="slow",
match=r"(a+)+$",
matching_algorithm=Tag.MATCH_REGEX,
)
document = Document(content=("a" * 5000) + "X")
with self.assertLogs("paperless.regex", level="WARNING") as cm:
self.assertFalse(matching.matches(tag, document))
self.assertTrue(
any("timed out" in message for message in cm.output),
f"Expected timeout log, got {cm.output}",
)
def test_match_fuzzy(self):
self._test_matching(
"Springfield, Miss.",

View File

@@ -17,7 +17,6 @@ from django.utils import timezone
from guardian.shortcuts import assign_perm
from guardian.shortcuts import get_groups_with_perms
from guardian.shortcuts import get_users_with_perms
from httpx import ConnectError
from httpx import HTTPError
from httpx import HTTPStatusError
from pytest_httpx import HTTPXMock
@@ -27,7 +26,7 @@ from rest_framework.test import APITestCase
from documents.file_handling import create_source_path_directory
from documents.file_handling import generate_unique_filename
from documents.signals.handlers import run_workflows
from documents.workflows.webhooks import send_webhook
from documents.signals.handlers import send_webhook
if TYPE_CHECKING:
from django.db.models import QuerySet
@@ -2859,7 +2858,7 @@ class TestWorkflows(
mock_email_send.return_value = 1
with self.assertNoLogs("paperless.workflows", level="ERROR"):
with self.assertNoLogs("paperless.handlers", level="ERROR"):
run_workflows(
WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
consumable_document,
@@ -3097,7 +3096,7 @@ class TestWorkflows(
original_filename="sample.pdf",
)
with self.assertLogs("paperless.workflows.actions", level="ERROR") as cm:
with self.assertLogs("paperless.handlers", level="ERROR") as cm:
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
expected_str = "Email backend has not been configured"
@@ -3145,7 +3144,7 @@ class TestWorkflows(
original_filename="sample.pdf",
)
with self.assertLogs("paperless.workflows", level="ERROR") as cm:
with self.assertLogs("paperless.handlers", level="ERROR") as cm:
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
expected_str = "Error occurred sending email"
@@ -3216,7 +3215,7 @@ class TestWorkflows(
PAPERLESS_FORCE_SCRIPT_NAME="/paperless",
BASE_URL="/paperless/",
)
@mock.patch("documents.workflows.webhooks.send_webhook.delay")
@mock.patch("documents.signals.handlers.send_webhook.delay")
def test_workflow_webhook_action_body(self, mock_post):
"""
GIVEN:
@@ -3275,7 +3274,7 @@ class TestWorkflows(
@override_settings(
PAPERLESS_URL="http://localhost:8000",
)
@mock.patch("documents.workflows.webhooks.send_webhook.delay")
@mock.patch("documents.signals.handlers.send_webhook.delay")
def test_workflow_webhook_action_w_files(self, mock_post):
"""
GIVEN:
@@ -3378,7 +3377,7 @@ class TestWorkflows(
)
# fails because no file
with self.assertLogs("paperless.workflows", level="ERROR") as cm:
with self.assertLogs("paperless.handlers", level="ERROR") as cm:
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
expected_str = "Error occurred sending webhook"
@@ -3421,7 +3420,7 @@ class TestWorkflows(
original_filename="sample.pdf",
)
with self.assertLogs("paperless.workflows", level="ERROR") as cm:
with self.assertLogs("paperless.handlers", level="ERROR") as cm:
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
expected_str = "Error occurred parsing webhook params"
@@ -3429,7 +3428,7 @@ class TestWorkflows(
expected_str = "Error occurred parsing webhook headers"
self.assertIn(expected_str, cm.output[1])
@mock.patch("httpx.Client.post")
@mock.patch("httpx.post")
def test_workflow_webhook_send_webhook_task(self, mock_post):
mock_post.return_value = mock.Mock(
status_code=200,
@@ -3437,7 +3436,7 @@ class TestWorkflows(
raise_for_status=mock.Mock(),
)
with self.assertLogs("paperless.workflows") as cm:
with self.assertLogs("paperless.handlers") as cm:
send_webhook(
url="http://paperless-ngx.com",
data="Test message",
@@ -3450,6 +3449,8 @@ class TestWorkflows(
content="Test message",
headers={},
files=None,
follow_redirects=False,
timeout=5,
)
expected_str = "Webhook sent to http://paperless-ngx.com"
@@ -3467,9 +3468,11 @@ class TestWorkflows(
data={"message": "Test message"},
headers={},
files=None,
follow_redirects=False,
timeout=5,
)
@mock.patch("httpx.Client.post")
@mock.patch("httpx.post")
def test_workflow_webhook_send_webhook_retry(self, mock_http):
mock_http.return_value.raise_for_status = mock.Mock(
side_effect=HTTPStatusError(
@@ -3479,7 +3482,7 @@ class TestWorkflows(
),
)
with self.assertLogs("paperless.workflows") as cm:
with self.assertLogs("paperless.handlers") as cm:
with self.assertRaises(HTTPStatusError):
send_webhook(
url="http://paperless-ngx.com",
@@ -3495,7 +3498,7 @@ class TestWorkflows(
)
self.assertIn(expected_str, cm.output[0])
@mock.patch("documents.workflows.webhooks.send_webhook.delay")
@mock.patch("documents.signals.handlers.send_webhook.delay")
def test_workflow_webhook_action_consumption(self, mock_post):
"""
GIVEN:
@@ -3665,7 +3668,7 @@ class TestWebhookSecurity:
- ValueError is raised
"""
resolve_to("127.0.0.1")
with pytest.raises(ConnectError):
with pytest.raises(ValueError):
send_webhook(
"http://paperless-ngx.com",
data="",
@@ -3695,8 +3698,7 @@ class TestWebhookSecurity:
)
req = httpx_mock.get_request()
assert req.url.host == "52.207.186.75"
assert req.headers["host"] == "paperless-ngx.com"
assert req.url.host == "paperless-ngx.com"
def test_follow_redirects_disabled(self, httpx_mock: HTTPXMock, resolve_to):
"""

View File

@@ -1,261 +0,0 @@
import logging
from pathlib import Path
from django.conf import settings
from django.contrib.auth.models import User
from django.utils import timezone
from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides
from documents.mail import EmailAttachment
from documents.mail import send_email
from documents.models import Correspondent
from documents.models import Document
from documents.models import DocumentType
from documents.models import WorkflowAction
from documents.models import WorkflowTrigger
from documents.templating.workflows import parse_w_workflow_placeholders
from documents.workflows.webhooks import send_webhook
logger = logging.getLogger("paperless.workflows.actions")
def build_workflow_action_context(
document: Document | ConsumableDocument,
overrides: DocumentMetadataOverrides | None,
) -> dict:
"""
Build context dictionary for workflow action placeholder parsing.
"""
use_overrides = overrides is not None
if not use_overrides:
return {
"title": document.title,
"doc_url": f"{settings.PAPERLESS_URL}{settings.BASE_URL}documents/{document.pk}/",
"correspondent": document.correspondent.name
if document.correspondent
else "",
"document_type": document.document_type.name
if document.document_type
else "",
"owner_username": document.owner.username if document.owner else "",
"filename": document.original_filename or "",
"current_filename": document.filename or "",
"added": timezone.localtime(document.added),
"created": document.created,
}
correspondent_obj = (
Correspondent.objects.filter(pk=overrides.correspondent_id).first()
if overrides and overrides.correspondent_id
else None
)
document_type_obj = (
DocumentType.objects.filter(pk=overrides.document_type_id).first()
if overrides and overrides.document_type_id
else None
)
owner_obj = (
User.objects.filter(pk=overrides.owner_id).first()
if overrides and overrides.owner_id
else None
)
filename = document.original_file if document.original_file else ""
return {
"title": overrides.title
if overrides and overrides.title
else str(document.original_file),
"doc_url": "",
"correspondent": correspondent_obj.name if correspondent_obj else "",
"document_type": document_type_obj.name if document_type_obj else "",
"owner_username": owner_obj.username if owner_obj else "",
"filename": filename,
"current_filename": filename,
"added": timezone.localtime(timezone.now()),
"created": overrides.created if overrides else None,
}
def execute_email_action(
action: WorkflowAction,
document: Document | ConsumableDocument,
context: dict,
logging_group,
original_file: Path,
trigger_type: WorkflowTrigger.WorkflowTriggerType,
) -> None:
"""
Execute an email action for a workflow.
"""
if not settings.EMAIL_ENABLED:
logger.error(
"Email backend has not been configured, cannot send email notifications",
extra={"group": logging_group},
)
return
subject = (
parse_w_workflow_placeholders(
action.email.subject,
context["correspondent"],
context["document_type"],
context["owner_username"],
context["added"],
context["filename"],
context["current_filename"],
context["created"],
context["title"],
context["doc_url"],
)
if action.email.subject
else ""
)
body = (
parse_w_workflow_placeholders(
action.email.body,
context["correspondent"],
context["document_type"],
context["owner_username"],
context["added"],
context["filename"],
context["current_filename"],
context["created"],
context["title"],
context["doc_url"],
)
if action.email.body
else ""
)
try:
attachments: list[EmailAttachment] = []
if action.email.include_document:
attachment: EmailAttachment | None = None
if trigger_type in [
WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
] and isinstance(document, Document):
friendly_name = (
Path(context["current_filename"]).name
if context["current_filename"]
else document.source_path.name
)
attachment = EmailAttachment(
path=document.source_path,
mime_type=document.mime_type,
friendly_name=friendly_name,
)
elif original_file:
friendly_name = (
Path(context["current_filename"]).name
if context["current_filename"]
else original_file.name
)
attachment = EmailAttachment(
path=original_file,
mime_type=document.mime_type,
friendly_name=friendly_name,
)
if attachment:
attachments = [attachment]
n_messages = send_email(
subject=subject,
body=body,
to=action.email.to.split(","),
attachments=attachments,
)
logger.debug(
f"Sent {n_messages} notification email(s) to {action.email.to}",
extra={"group": logging_group},
)
except Exception as e:
logger.exception(
f"Error occurred sending notification email: {e}",
extra={"group": logging_group},
)
def execute_webhook_action(
action: WorkflowAction,
document: Document | ConsumableDocument,
context: dict,
logging_group,
original_file: Path,
):
try:
data = {}
if action.webhook.use_params:
if action.webhook.params:
try:
for key, value in action.webhook.params.items():
data[key] = parse_w_workflow_placeholders(
value,
context["correspondent"],
context["document_type"],
context["owner_username"],
context["added"],
context["filename"],
context["current_filename"],
context["created"],
context["title"],
context["doc_url"],
)
except Exception as e:
logger.error(
f"Error occurred parsing webhook params: {e}",
extra={"group": logging_group},
)
elif action.webhook.body:
data = parse_w_workflow_placeholders(
action.webhook.body,
context["correspondent"],
context["document_type"],
context["owner_username"],
context["added"],
context["filename"],
context["current_filename"],
context["created"],
context["title"],
context["doc_url"],
)
headers = {}
if action.webhook.headers:
try:
headers = {str(k): str(v) for k, v in action.webhook.headers.items()}
except Exception as e:
logger.error(
f"Error occurred parsing webhook headers: {e}",
extra={"group": logging_group},
)
files = None
if action.webhook.include_document:
with original_file.open("rb") as f:
files = {
"file": (
str(context["filename"])
if context["filename"]
else original_file.name,
f.read(),
document.mime_type,
),
}
send_webhook.delay(
url=action.webhook.url,
data=data,
headers=headers,
files=files,
as_json=action.webhook.as_json,
)
logger.debug(
f"Webhook to {action.webhook.url} queued",
extra={"group": logging_group},
)
except Exception as e:
logger.exception(
f"Error occurred sending webhook: {e}",
extra={"group": logging_group},
)

View File

@@ -1,357 +0,0 @@
import logging
from django.utils import timezone
from guardian.shortcuts import remove_perm
from documents.data_models import DocumentMetadataOverrides
from documents.models import CustomFieldInstance
from documents.models import Document
from documents.models import WorkflowAction
from documents.permissions import set_permissions_for_object
from documents.templating.workflows import parse_w_workflow_placeholders
logger = logging.getLogger("paperless.workflows.mutations")
def apply_assignment_to_document(
action: WorkflowAction,
document: Document,
doc_tag_ids: list[int],
logging_group,
):
"""
Apply assignment actions to a Document instance.
action: WorkflowAction, annotated with 'has_assign_*' boolean fields
"""
if action.has_assign_tags:
tag_ids_to_add: set[int] = set()
for tag in action.assign_tags.all():
tag_ids_to_add.add(tag.pk)
tag_ids_to_add.update(int(pk) for pk in tag.get_ancestors_pks())
doc_tag_ids[:] = list(set(doc_tag_ids) | tag_ids_to_add)
if action.assign_correspondent:
document.correspondent = action.assign_correspondent
if action.assign_document_type:
document.document_type = action.assign_document_type
if action.assign_storage_path:
document.storage_path = action.assign_storage_path
if action.assign_owner:
document.owner = action.assign_owner
if action.assign_title:
try:
document.title = parse_w_workflow_placeholders(
action.assign_title,
document.correspondent.name if document.correspondent else "",
document.document_type.name if document.document_type else "",
document.owner.username if document.owner else "",
timezone.localtime(document.added),
document.original_filename or "",
document.filename or "",
document.created,
)
except Exception: # pragma: no cover
logger.exception(
f"Error occurred parsing title assignment '{action.assign_title}', falling back to original",
extra={"group": logging_group},
)
if any(
[
action.has_assign_view_users,
action.has_assign_view_groups,
action.has_assign_change_users,
action.has_assign_change_groups,
],
):
permissions = {
"view": {
"users": action.assign_view_users.values_list("id", flat=True),
"groups": action.assign_view_groups.values_list("id", flat=True),
},
"change": {
"users": action.assign_change_users.values_list("id", flat=True),
"groups": action.assign_change_groups.values_list("id", flat=True),
},
}
set_permissions_for_object(
permissions=permissions,
object=document,
merge=True,
)
if action.has_assign_custom_fields:
for field in action.assign_custom_fields.all():
value_field_name = CustomFieldInstance.get_value_field_name(
data_type=field.data_type,
)
args = {
value_field_name: action.assign_custom_fields_values.get(
str(field.pk),
None,
),
}
# for some reason update_or_create doesn't work here
instance = CustomFieldInstance.objects.filter(
field=field,
document=document,
).first()
if instance and args[value_field_name] is not None:
setattr(instance, value_field_name, args[value_field_name])
instance.save()
elif not instance:
CustomFieldInstance.objects.create(
**args,
field=field,
document=document,
)
def apply_assignment_to_overrides(
action: WorkflowAction,
overrides: DocumentMetadataOverrides,
):
"""
Apply assignment actions to DocumentMetadataOverrides.
action: WorkflowAction, annotated with 'has_assign_*' boolean fields
"""
if action.has_assign_tags:
if overrides.tag_ids is None:
overrides.tag_ids = []
tag_ids_to_add: set[int] = set()
for tag in action.assign_tags.all():
tag_ids_to_add.add(tag.pk)
tag_ids_to_add.update(int(pk) for pk in tag.get_ancestors_pks())
overrides.tag_ids = list(set(overrides.tag_ids) | tag_ids_to_add)
if action.assign_correspondent:
overrides.correspondent_id = action.assign_correspondent.pk
if action.assign_document_type:
overrides.document_type_id = action.assign_document_type.pk
if action.assign_storage_path:
overrides.storage_path_id = action.assign_storage_path.pk
if action.assign_owner:
overrides.owner_id = action.assign_owner.pk
if action.assign_title:
overrides.title = action.assign_title
if any(
[
action.has_assign_view_users,
action.has_assign_view_groups,
action.has_assign_change_users,
action.has_assign_change_groups,
],
):
overrides.view_users = list(
set(
(overrides.view_users or [])
+ list(action.assign_view_users.values_list("id", flat=True)),
),
)
overrides.view_groups = list(
set(
(overrides.view_groups or [])
+ list(action.assign_view_groups.values_list("id", flat=True)),
),
)
overrides.change_users = list(
set(
(overrides.change_users or [])
+ list(action.assign_change_users.values_list("id", flat=True)),
),
)
overrides.change_groups = list(
set(
(overrides.change_groups or [])
+ list(action.assign_change_groups.values_list("id", flat=True)),
),
)
if action.has_assign_custom_fields:
if overrides.custom_fields is None:
overrides.custom_fields = {}
overrides.custom_fields.update(
{
field.pk: action.assign_custom_fields_values.get(
str(field.pk),
None,
)
for field in action.assign_custom_fields.all()
},
)
def apply_removal_to_document(
action: WorkflowAction,
document: Document,
doc_tag_ids: list[int],
):
"""
Apply removal actions to a Document instance.
action: WorkflowAction, annotated with 'has_remove_*' boolean fields
"""
if action.remove_all_tags:
doc_tag_ids.clear()
else:
tag_ids_to_remove: set[int] = set()
for tag in action.remove_tags.all():
tag_ids_to_remove.add(tag.pk)
tag_ids_to_remove.update(int(pk) for pk in tag.get_descendants_pks())
doc_tag_ids[:] = [t for t in doc_tag_ids if t not in tag_ids_to_remove]
if action.remove_all_correspondents or (
document.correspondent
and action.remove_correspondents.filter(pk=document.correspondent.pk).exists()
):
document.correspondent = None
if action.remove_all_document_types or (
document.document_type
and action.remove_document_types.filter(pk=document.document_type.pk).exists()
):
document.document_type = None
if action.remove_all_storage_paths or (
document.storage_path
and action.remove_storage_paths.filter(pk=document.storage_path.pk).exists()
):
document.storage_path = None
if action.remove_all_owners or (
document.owner and action.remove_owners.filter(pk=document.owner.pk).exists()
):
document.owner = None
if action.remove_all_permissions:
permissions = {
"view": {"users": [], "groups": []},
"change": {"users": [], "groups": []},
}
set_permissions_for_object(
permissions=permissions,
object=document,
merge=False,
)
if any(
[
action.has_remove_view_users,
action.has_remove_view_groups,
action.has_remove_change_users,
action.has_remove_change_groups,
],
):
for user in action.remove_view_users.all():
remove_perm("view_document", user, document)
for user in action.remove_change_users.all():
remove_perm("change_document", user, document)
for group in action.remove_view_groups.all():
remove_perm("view_document", group, document)
for group in action.remove_change_groups.all():
remove_perm("change_document", group, document)
if action.remove_all_custom_fields:
CustomFieldInstance.objects.filter(document=document).hard_delete()
elif action.has_remove_custom_fields:
CustomFieldInstance.objects.filter(
field__in=action.remove_custom_fields.all(),
document=document,
).hard_delete()
def apply_removal_to_overrides(
action: WorkflowAction,
overrides: DocumentMetadataOverrides,
):
"""
Apply removal actions to DocumentMetadataOverrides.
action: WorkflowAction, annotated with 'has_remove_*' boolean fields
"""
if action.remove_all_tags:
overrides.tag_ids = None
elif overrides.tag_ids:
tag_ids_to_remove: set[int] = set()
for tag in action.remove_tags.all():
tag_ids_to_remove.add(tag.pk)
tag_ids_to_remove.update(int(pk) for pk in tag.get_descendants_pks())
overrides.tag_ids = [t for t in overrides.tag_ids if t not in tag_ids_to_remove]
if action.remove_all_correspondents or (
overrides.correspondent_id
and action.remove_correspondents.filter(pk=overrides.correspondent_id).exists()
):
overrides.correspondent_id = None
if action.remove_all_document_types or (
overrides.document_type_id
and action.remove_document_types.filter(pk=overrides.document_type_id).exists()
):
overrides.document_type_id = None
if action.remove_all_storage_paths or (
overrides.storage_path_id
and action.remove_storage_paths.filter(pk=overrides.storage_path_id).exists()
):
overrides.storage_path_id = None
if action.remove_all_owners or (
overrides.owner_id
and action.remove_owners.filter(pk=overrides.owner_id).exists()
):
overrides.owner_id = None
if action.remove_all_permissions:
overrides.view_users = None
overrides.view_groups = None
overrides.change_users = None
overrides.change_groups = None
elif any(
[
action.has_remove_view_users,
action.has_remove_view_groups,
action.has_remove_change_users,
action.has_remove_change_groups,
],
):
if overrides.view_users:
for user in action.remove_view_users.filter(pk__in=overrides.view_users):
overrides.view_users.remove(user.pk)
if overrides.change_users:
for user in action.remove_change_users.filter(
pk__in=overrides.change_users,
):
overrides.change_users.remove(user.pk)
if overrides.view_groups:
for group in action.remove_view_groups.filter(pk__in=overrides.view_groups):
overrides.view_groups.remove(group.pk)
if overrides.change_groups:
for group in action.remove_change_groups.filter(
pk__in=overrides.change_groups,
):
overrides.change_groups.remove(group.pk)
if action.remove_all_custom_fields:
overrides.custom_fields = None
elif action.has_remove_custom_fields and overrides.custom_fields:
for field in action.remove_custom_fields.filter(
pk__in=overrides.custom_fields.keys(),
):
overrides.custom_fields.pop(field.pk, None)

View File

@@ -1,116 +0,0 @@
import logging
from django.db.models import Exists
from django.db.models import OuterRef
from django.db.models import Prefetch
from documents.models import Workflow
from documents.models import WorkflowAction
from documents.models import WorkflowTrigger
logger = logging.getLogger("paperless.workflows")
def get_workflows_for_trigger(
trigger_type: WorkflowTrigger.WorkflowTriggerType,
workflow_to_run: Workflow | None = None,
):
"""
Return workflows relevant to a trigger. If a specific workflow is given,
wrap it in a list; otherwise fetch enabled workflows for the trigger with
the prefetches used by the runner.
"""
if workflow_to_run is not None:
return [workflow_to_run]
annotated_actions = (
WorkflowAction.objects.select_related(
"assign_correspondent",
"assign_document_type",
"assign_storage_path",
"assign_owner",
"email",
"webhook",
)
.prefetch_related(
"assign_tags",
"assign_view_users",
"assign_view_groups",
"assign_change_users",
"assign_change_groups",
"assign_custom_fields",
"remove_tags",
"remove_correspondents",
"remove_document_types",
"remove_storage_paths",
"remove_custom_fields",
"remove_owners",
)
.annotate(
has_assign_tags=Exists(
WorkflowAction.assign_tags.through.objects.filter(
workflowaction_id=OuterRef("pk"),
),
),
has_assign_view_users=Exists(
WorkflowAction.assign_view_users.through.objects.filter(
workflowaction_id=OuterRef("pk"),
),
),
has_assign_view_groups=Exists(
WorkflowAction.assign_view_groups.through.objects.filter(
workflowaction_id=OuterRef("pk"),
),
),
has_assign_change_users=Exists(
WorkflowAction.assign_change_users.through.objects.filter(
workflowaction_id=OuterRef("pk"),
),
),
has_assign_change_groups=Exists(
WorkflowAction.assign_change_groups.through.objects.filter(
workflowaction_id=OuterRef("pk"),
),
),
has_assign_custom_fields=Exists(
WorkflowAction.assign_custom_fields.through.objects.filter(
workflowaction_id=OuterRef("pk"),
),
),
has_remove_view_users=Exists(
WorkflowAction.remove_view_users.through.objects.filter(
workflowaction_id=OuterRef("pk"),
),
),
has_remove_view_groups=Exists(
WorkflowAction.remove_view_groups.through.objects.filter(
workflowaction_id=OuterRef("pk"),
),
),
has_remove_change_users=Exists(
WorkflowAction.remove_change_users.through.objects.filter(
workflowaction_id=OuterRef("pk"),
),
),
has_remove_change_groups=Exists(
WorkflowAction.remove_change_groups.through.objects.filter(
workflowaction_id=OuterRef("pk"),
),
),
has_remove_custom_fields=Exists(
WorkflowAction.remove_custom_fields.through.objects.filter(
workflowaction_id=OuterRef("pk"),
),
),
)
)
return (
Workflow.objects.filter(enabled=True, triggers__type=trigger_type)
.prefetch_related(
Prefetch("actions", queryset=annotated_actions),
"triggers",
)
.order_by("order")
.distinct()
)

View File

@@ -1,171 +0,0 @@
import ipaddress
import logging
import socket
from urllib.parse import urlparse
import httpx
from celery import shared_task
from django.conf import settings
logger = logging.getLogger("paperless.workflows.webhooks")
class WebhookTransport(httpx.HTTPTransport):
"""
Transport that resolves/validates hostnames and rewrites to a vetted IP
while keeping Host/SNI as the original hostname.
"""
def __init__(
self,
hostname: str,
*args,
allow_internal: bool = False,
**kwargs,
) -> None:
super().__init__(*args, **kwargs)
self.hostname = hostname
self.allow_internal = allow_internal
def handle_request(self, request: httpx.Request) -> httpx.Response:
hostname = request.url.host
if not hostname:
raise httpx.ConnectError("No hostname in request URL")
try:
addr_info = socket.getaddrinfo(hostname, None)
except socket.gaierror as e:
raise httpx.ConnectError(f"Could not resolve hostname: {hostname}") from e
ips = [info[4][0] for info in addr_info if info and info[4]]
if not ips:
raise httpx.ConnectError(f"Could not resolve hostname: {hostname}")
if not self.allow_internal:
for ip_str in ips:
if not WebhookTransport.is_public_ip(ip_str):
raise httpx.ConnectError(
f"Connection blocked: {hostname} resolves to a non-public address",
)
ip_str = ips[0]
formatted_ip = self._format_ip_for_url(ip_str)
new_headers = httpx.Headers(request.headers)
if "host" in new_headers:
del new_headers["host"]
new_headers["Host"] = hostname
new_url = request.url.copy_with(host=formatted_ip)
request = httpx.Request(
method=request.method,
url=new_url,
headers=new_headers,
content=request.content,
extensions=request.extensions,
)
request.extensions["sni_hostname"] = hostname
return super().handle_request(request)
def _format_ip_for_url(self, ip: str) -> str:
"""
Format IP address for use in URL (wrap IPv6 in brackets)
"""
try:
ip_obj = ipaddress.ip_address(ip)
if ip_obj.version == 6:
return f"[{ip}]"
return ip
except ValueError:
return ip
@staticmethod
def is_public_ip(ip: str | int) -> bool:
try:
obj = ipaddress.ip_address(ip)
return not (
obj.is_private
or obj.is_loopback
or obj.is_link_local
or obj.is_multicast
or obj.is_unspecified
)
except ValueError: # pragma: no cover
return False
@staticmethod
def resolve_first_ip(host: str) -> str | None:
try:
info = socket.getaddrinfo(host, None)
return info[0][4][0] if info else None
except Exception: # pragma: no cover
return None
@shared_task(
retry_backoff=True,
autoretry_for=(httpx.HTTPStatusError,),
max_retries=3,
throws=(httpx.HTTPError,),
)
def send_webhook(
url: str,
data: str | dict,
headers: dict,
files: dict,
*,
as_json: bool = False,
):
p = urlparse(url)
if p.scheme.lower() not in settings.WEBHOOKS_ALLOWED_SCHEMES or not p.hostname:
logger.warning("Webhook blocked: invalid scheme/hostname")
raise ValueError("Invalid URL scheme or hostname.")
port = p.port or (443 if p.scheme == "https" else 80)
if (
len(settings.WEBHOOKS_ALLOWED_PORTS) > 0
and port not in settings.WEBHOOKS_ALLOWED_PORTS
):
logger.warning("Webhook blocked: port not permitted")
raise ValueError("Destination port not permitted.")
transport = WebhookTransport(
hostname=p.hostname,
allow_internal=settings.WEBHOOKS_ALLOW_INTERNAL_REQUESTS,
)
try:
post_args = {
"url": url,
"headers": {
k: v for k, v in (headers or {}).items() if k.lower() != "host"
},
"files": files or None,
}
if as_json:
post_args["json"] = data
elif isinstance(data, dict):
post_args["data"] = data
else:
post_args["content"] = data
with httpx.Client(
transport=transport,
timeout=5.0,
follow_redirects=False,
) as client:
client.post(
**post_args,
).raise_for_status()
logger.info(
f"Webhook sent to {url}",
)
except Exception as e:
logger.error(
f"Failed attempt sending webhook to {url}: {e}",
)
raise e
finally:
transport.close()

View File

@@ -2,7 +2,7 @@ msgid ""
msgstr ""
"Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2025-12-12 16:43+0000\n"
"POT-Creation-Date: 2025-11-14 16:09+0000\n"
"PO-Revision-Date: 2022-02-17 04:17\n"
"Last-Translator: \n"
"Language-Team: English\n"
@@ -1219,40 +1219,40 @@ msgstr ""
msgid "workflow runs"
msgstr ""
#: documents/serialisers.py:147
#: documents/serialisers.py:145
#, python-format
msgid "Invalid regular expression: %(error)s"
msgstr ""
#: documents/serialisers.py:638
#: documents/serialisers.py:619
msgid "Invalid color."
msgstr ""
#: documents/serialisers.py:1824
#: documents/serialisers.py:1805
#, python-format
msgid "File type %(type)s not supported"
msgstr ""
#: documents/serialisers.py:1868
#: documents/serialisers.py:1849
#, python-format
msgid "Custom field id must be an integer: %(id)s"
msgstr ""
#: documents/serialisers.py:1875
#: documents/serialisers.py:1856
#, python-format
msgid "Custom field with id %(id)s does not exist"
msgstr ""
#: documents/serialisers.py:1892 documents/serialisers.py:1902
#: documents/serialisers.py:1873 documents/serialisers.py:1883
msgid ""
"Custom fields must be a list of integers or an object mapping ids to values."
msgstr ""
#: documents/serialisers.py:1897
#: documents/serialisers.py:1878
msgid "Some custom fields don't exist or were specified twice."
msgstr ""
#: documents/serialisers.py:2012
#: documents/serialisers.py:1993
msgid "Invalid variable detected."
msgstr ""

View File

@@ -14,14 +14,13 @@ ALLOWED_SVG_TAGS: set[str] = {
"text",
"tspan",
"defs",
"lineargradient",
"radialgradient",
"linearGradient",
"radialGradient",
"stop",
"clippath",
"clipPath",
"use",
"title",
"desc",
"style",
}
ALLOWED_SVG_ATTRIBUTES: set[str] = {
@@ -30,7 +29,6 @@ ALLOWED_SVG_ATTRIBUTES: set[str] = {
"style",
"d",
"fill",
"fill-opacity",
"fill-rule",
"stroke",
"stroke-width",
@@ -54,14 +52,14 @@ ALLOWED_SVG_ATTRIBUTES: set[str] = {
"y1",
"x2",
"y2",
"gradienttransform",
"gradientunits",
"gradientTransform",
"gradientUnits",
"offset",
"stop-color",
"stop-opacity",
"clip-path",
"viewbox",
"preserveaspectratio",
"viewBox",
"preserveAspectRatio",
"href",
"xlink:href",
"font-family",
@@ -70,8 +68,6 @@ ALLOWED_SVG_ATTRIBUTES: set[str] = {
"text-anchor",
"xmlns",
"xmlns:xlink",
"version",
"type",
}

151
uv.lock generated
View File

@@ -1086,83 +1086,86 @@ wheels = [
[[package]]
name = "granian"
version = "2.6.0"
version = "2.5.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ea/1e/0a33c4b68b054b9d5f7963371dd06978da5f4f58f58ddcb77854018abfdb/granian-2.6.0.tar.gz", hash = "sha256:d9b773633e411c7bf51590704e608e757dab09cd452fb18971a50a7d7c439677", size = 115955, upload-time = "2025-11-16T16:07:27.082Z" }
sdist = { url = "https://files.pythonhosted.org/packages/78/9b/6ac903de211e5874824e7349387c9e0467459dc1ad0cd960cb4196f38ae6/granian-2.5.4.tar.gz", hash = "sha256:85989a08052f1bbb174fd73759e1ae505e50b4c0690af366ca6ba844203dd463", size = 112016, upload-time = "2025-09-18T11:52:16.004Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/76/71/e543f91d1a01515ff7211a19e18ee7dcf843dc25655d6cc18039901e2fb1/granian-2.6.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:759e8be4481c5aede0080e3c20a9e1bc7c00258cd4810f88ebcfb6bdac298f03", size = 3078973, upload-time = "2025-11-16T16:05:30.886Z" },
{ url = "https://files.pythonhosted.org/packages/ce/ae/ef87e76e5ade5633c11e892b663b922f8fda5ef804576373516a445d244f/granian-2.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6af5d9a088536798ee3188f1cbcffc5690ed38a53851825e4125c3bf3c9cfef3", size = 2810530, upload-time = "2025-11-16T16:05:32.703Z" },
{ url = "https://files.pythonhosted.org/packages/cf/9c/16a3ee4dad81e0dd446f391dad9ced17e7e685d97cce28188adb2e846004/granian-2.6.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:50c1cad7b2b0fb7c66169a12ab069e2f76f4d2a7390638e5b327504372976518", size = 3331648, upload-time = "2025-11-16T16:05:34.15Z" },
{ url = "https://files.pythonhosted.org/packages/8b/27/c9325343522ed89ac6f885995178c95f90052a5894fc681ec84df24a3ba6/granian-2.6.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a811d0b80099fe1da945e6d137d64dfe8e1dd07d3bf20e2e1eeae6f2c83adbb9", size = 3151584, upload-time = "2025-11-16T16:05:35.584Z" },
{ url = "https://files.pythonhosted.org/packages/4f/73/376f08e3de394e50888bd9f8fa27be5dd60e1fd6cbbec3683f780ddaf5fc/granian-2.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61c7b04e5520ec3d749e53da414ba0ccc7773d7b24e8049539d47a4171aa922a", size = 3375838, upload-time = "2025-11-16T16:05:37.425Z" },
{ url = "https://files.pythonhosted.org/packages/5b/9c/f2e32c826fc7fe0c65a6cf0ff0b4c459f71adc78f2721083ff50fa60c29a/granian-2.6.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:d1bbe669228ba475adfdbebbae962f958be3002c742370000b7f5d06f895cacb", size = 3234478, upload-time = "2025-11-16T16:05:38.664Z" },
{ url = "https://files.pythonhosted.org/packages/dc/09/70bb969fcd4b35a357c93490efc7cf97185b521c90fcf21c2483de49cce8/granian-2.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bdef48aab0846fd5c215acd1779328d067708859bbf44c4e9363daa51b8c98bd", size = 3300577, upload-time = "2025-11-16T16:05:39.792Z" },
{ url = "https://files.pythonhosted.org/packages/c7/94/1722f6bf1a64475e390595c0b7a1b0dff40a4279fc215cb3695be7fd5168/granian-2.6.0-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:8459a8b2335689ecb04b2ccba63cbcdf030c242a64ae77be68fb6e263984a150", size = 3475443, upload-time = "2025-11-16T16:05:41.374Z" },
{ url = "https://files.pythonhosted.org/packages/64/92/8a353cdb800b0c390b3c6d3bc0ab5a815221319bec65419a86a959e64acd/granian-2.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dcde0783cb546304f0e20a1f99feb1a8795adfb0540c9278e5f9ef583edffb36", size = 3467863, upload-time = "2025-11-16T16:05:42.82Z" },
{ url = "https://files.pythonhosted.org/packages/3c/56/efb12bda35ce3d6ac89ec8a5b02036d17dfaec6bb2cab16f142dc9ee389f/granian-2.6.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:38029b6c25ac5a9f8a6975b65846eee23d9fa7b91089a3ff6d11770d089020f3", size = 3078748, upload-time = "2025-11-16T16:05:45.593Z" },
{ url = "https://files.pythonhosted.org/packages/5e/84/6d640c3439d532792a7668d66089df53d74ffb06455075b9db2a25fbb02d/granian-2.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:efd9c92bc5d245f10d6924847c25d7f20046c976a4817a87fd8476c22c222b16", size = 2810326, upload-time = "2025-11-16T16:05:47.085Z" },
{ url = "https://files.pythonhosted.org/packages/92/60/909057f8f21e2d6f196f8c9380a755d5453a493cd071afa7f04c9de83725/granian-2.6.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:43e6a25d995206ba0a2fef65fea2789f36dde1006932ea2dcd9a096937c1afdd", size = 3331727, upload-time = "2025-11-16T16:05:48.245Z" },
{ url = "https://files.pythonhosted.org/packages/64/07/27701a5b9aa27873ce92730e80e5c0ad3e7fe80674ba1660996c1463c53a/granian-2.6.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5d7ac1be5c65fef4e04fb9860ca7c985b9c305f8468d03c8527f006c23100c83", size = 3151437, upload-time = "2025-11-16T16:05:49.413Z" },
{ url = "https://files.pythonhosted.org/packages/b6/1b/dfc6782dad69b02ab6d50a320b54b2e28c573954e0697a3f24a68f7aa3c9/granian-2.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:318a7db03e771e2611a976a8c4ecc7ae39e43e2ebffd20a4c2371a71cdc5659c", size = 3375815, upload-time = "2025-11-16T16:05:50.497Z" },
{ url = "https://files.pythonhosted.org/packages/ad/ab/de57fcf406a9da5b28f83af71bd7b8e2fc944b786f95b01188b4f8c1c049/granian-2.6.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:cdb1ab7a0cedfa834c6e8e7c9e2530d80d6fd6f04076c2f6998629688f8ecb00", size = 3234158, upload-time = "2025-11-16T16:05:51.664Z" },
{ url = "https://files.pythonhosted.org/packages/a7/d0/a2d3a14bfce05f62f3ec10cb1c1609fcfe983e6ae929b1656bff8784812c/granian-2.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fd11a9275ad01c2d99a322c1d0c8af0ad162c541515ad1d55ef585fd321cd2b9", size = 3300040, upload-time = "2025-11-16T16:05:53.233Z" },
{ url = "https://files.pythonhosted.org/packages/db/e3/d9b58bacf40da8f937a8a04f2fbc61424f551d0589f3bd6eb0755b57c3be/granian-2.6.0-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:489b1e24b4360ecdaf08d404e13549d4377e77756d1911454abed9e0b559345a", size = 3475356, upload-time = "2025-11-16T16:05:54.459Z" },
{ url = "https://files.pythonhosted.org/packages/df/50/b45f53dea5ec3d9a94f720f4a0b3a7c2043a298151b52ac389db14025b61/granian-2.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:17ba9fb67931852cf9d8eee23d1adb78c0e3106bd4ad440cf3b37ce124b4380c", size = 3467883, upload-time = "2025-11-16T16:05:56.017Z" },
{ url = "https://files.pythonhosted.org/packages/ef/db/c7d10c2b61dd40014346af3274500b72654710cdfe400f37358c63481f28/granian-2.6.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b05b4fc5ce5855eb64a02b6e2c70b0d7e24632ee0d1193badfc0dace56688c11", size = 3076177, upload-time = "2025-11-16T16:05:58.824Z" },
{ url = "https://files.pythonhosted.org/packages/9c/54/095eb0cea6976f3aeaab434f9009521b4d50aa37f9efda54a70da5b465ec/granian-2.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b6aad6e7ded7a0a916119cd3ee28aa989e619074a6ca1ba3dc19cf5ad608832c", size = 2801793, upload-time = "2025-11-16T16:06:00.396Z" },
{ url = "https://files.pythonhosted.org/packages/6d/f5/4177070ec6942b0467c0da59b53cf83ac5b939cfcdf687daeaebaef31299/granian-2.6.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e77509ad3a5654da1268db9d78d49357bf91ac2d3dcb1c58a00cda162d922a7", size = 3325958, upload-time = "2025-11-16T16:06:01.906Z" },
{ url = "https://files.pythonhosted.org/packages/ad/5a/973e77414882df01ef75801d4c7e51bc2796475c0e7d72356d4a8f7701a5/granian-2.6.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3a7cc82cdc5d0c7371d805f00866f51ece71bb0cb2e1f192b84834cf1a6844b", size = 3146873, upload-time = "2025-11-16T16:06:03.183Z" },
{ url = "https://files.pythonhosted.org/packages/d6/97/410127ee96129c8f0746935b7be6703ad6f31232e0c33edec30496596d26/granian-2.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbbce087a055eb64896b809a9a1f88161751815b112de4aa02ee4348f49cb73", size = 3387122, upload-time = "2025-11-16T16:06:05.194Z" },
{ url = "https://files.pythonhosted.org/packages/cf/37/36e74876d324fe6326af32a01607afc3f0f0fcb9e674092755da4146c40c/granian-2.6.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:a7fa2728d32dfaf3b1b2bf5b0b7c6d23bb75eaf62bd08b71b61797d292381970", size = 3234994, upload-time = "2025-11-16T16:06:06.978Z" },
{ url = "https://files.pythonhosted.org/packages/bc/6e/5da9af1fdf7eeff9c7568f35171a0cdd63d73ab87a3deea560393b746d71/granian-2.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70b3867c33e5c95d6eb722a5c8b847c36c670fc189821bf7aef9934e943c2574", size = 3303337, upload-time = "2025-11-16T16:06:08.263Z" },
{ url = "https://files.pythonhosted.org/packages/e2/ab/d133ed75e9940abc9bed56cb096709b8c4a1dfe6221e61d43bd23939afad/granian-2.6.0-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:7fb0448a292f2dda9c4130e394ac09ef1164713d873882fd3106ca6949ff0897", size = 3472100, upload-time = "2025-11-16T16:06:09.494Z" },
{ url = "https://files.pythonhosted.org/packages/0d/25/064406ade99fa7153e1a2b129f69af56cc1e50176a2fbec25911d9a121a9/granian-2.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a5bd3c59fe3a7acb22e434749ff2258606a93bc5848fa96332a6ed4c752f4dc8", size = 3480023, upload-time = "2025-11-16T16:06:10.718Z" },
{ url = "https://files.pythonhosted.org/packages/4b/b0/a7be659186bf9de644a5214c31ce337342170de71c5cb1e3ea61e1feeebe/granian-2.6.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:74f579e7295945119394dc05dd1565be1ac700f6f26c8271d8327dfabc95ec34", size = 3075590, upload-time = "2025-11-16T16:06:13.662Z" },
{ url = "https://files.pythonhosted.org/packages/5a/d8/eb55f3657d7c104f96f2d20bd459908842a954f4d95c5769c46bf485d656/granian-2.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f4e0e925d016e3dc43ae5950021c9ea0e9ee2ef1334a76ba7fbb80cc9e17c044", size = 2801601, upload-time = "2025-11-16T16:06:14.908Z" },
{ url = "https://files.pythonhosted.org/packages/2a/a3/45c79b3b2388a066e05ae3af171cde13540467efb0ec6404a52c12fcc449/granian-2.6.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b568459abe4813e4968310312e26add3dab80c3ce5044b537ebfe464601fe9a", size = 3325246, upload-time = "2025-11-16T16:06:16.9Z" },
{ url = "https://files.pythonhosted.org/packages/2f/2c/570df011d8c53a59d945db1b8b6adedf04f43d92bfd72f4149ee60c3aeaf/granian-2.6.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0041ba59e4b89818db1772ea677bb619f5e3030060dcb6c57e8a17d72dc6210b", size = 3146313, upload-time = "2025-11-16T16:06:18.339Z" },
{ url = "https://files.pythonhosted.org/packages/a9/cd/8e9b183db4190fac1401eeab62669ebe35d962ba9b490c6deca421e3daa4/granian-2.6.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c032dca04171e4fbd54e587fe03aeef1825739d02ff3e3c49d578a8b5cc752c", size = 3386170, upload-time = "2025-11-16T16:06:19.946Z" },
{ url = "https://files.pythonhosted.org/packages/df/c5/9ccc0d04c1cefdb4bb42f671a0c27df4f68ba872a61edc7fc3bae6077ea9/granian-2.6.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:d5686da7358fede8e9a1e1344310c6e3cb2c4d02a1aca52c31c990fe6b7d6281", size = 3235277, upload-time = "2025-11-16T16:06:21.754Z" },
{ url = "https://files.pythonhosted.org/packages/96/7d/a082bec08c1d54ce73dd237d6da0f35633cd5f2bfd1aec2f0a2590e6782a/granian-2.6.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:62c69bb23efe26a33ac39e4b6ca0237d84ed6d3bf47a5bb817e00a46c27369f2", size = 3302908, upload-time = "2025-11-16T16:06:22.988Z" },
{ url = "https://files.pythonhosted.org/packages/b1/2e/c8a53c92f0e98c4b36a24c03a4243b53410804f78f1876ca3ea497831381/granian-2.6.0-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:2ee5087e4b876f29dd1a11e9c2dd8d864ecb207278767a33bba60975260f225d", size = 3470938, upload-time = "2025-11-16T16:06:24.666Z" },
{ url = "https://files.pythonhosted.org/packages/7a/c7/0615d97cc666c6b5c1af24abbb08c6fd536a5f3c055fd09a3cd6b178283e/granian-2.6.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3627b69f391a769acfad4ae26bbfce03b50c31eb5fbea18ec0a44f37c89cf0fd", size = 3479291, upload-time = "2025-11-16T16:06:25.984Z" },
{ url = "https://files.pythonhosted.org/packages/08/2c/8256710307e32cc4aff58d730f3db9e87471121725adc92d700fa0190136/granian-2.6.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:c66877f2b2a1ad6046a228ee228ed4faa43dd4949fbe07f61d5c38ad57506e02", size = 3027712, upload-time = "2025-11-16T16:06:28.819Z" },
{ url = "https://files.pythonhosted.org/packages/63/88/bb3dc2a67f146d03ffd1b3d912c92795ecf52aa2b7ea1375735c522a5e6c/granian-2.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:919ccfe3273c6325c82ecb2e62b5af4d1b57fdc9858ce725b8223af2e1d6e2cd", size = 2753501, upload-time = "2025-11-16T16:06:30.267Z" },
{ url = "https://files.pythonhosted.org/packages/0d/6e/86cea4a4cd0c9adbae74d865468f298083fcefd4d9f8f8f21910078b069a/granian-2.6.0-cp313-cp313t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7d6368281f9f1bfde53a71f67570b70df773e01329f7a113b248de453e5991c1", size = 2966948, upload-time = "2025-11-16T16:06:31.932Z" },
{ url = "https://files.pythonhosted.org/packages/e0/01/092337f9aae6cb6fb66516894a3a39d723de9ab263d3a144511d07d2ccef/granian-2.6.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b3d0b7dd32a630336120c9a12e7ba7ca4e415bebd22d9735b19df593e01ffa40", size = 3317466, upload-time = "2025-11-16T16:06:33.222Z" },
{ url = "https://files.pythonhosted.org/packages/b7/60/0d3635ef8f1f73789cb1779574493668a76675ef18115826a4a2dcb415d7/granian-2.6.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acb18fca86ff560ea5a3bf9dc342245e388409844c257d1125ff9a988c81080b", size = 3273204, upload-time = "2025-11-16T16:06:34.513Z" },
{ url = "https://files.pythonhosted.org/packages/f3/26/09bc5016ae7faac0af40a07934d4d4d41f9e5bd7e97560aac957f7aa9605/granian-2.6.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:3606f13ba2fd9afde1d879ef556afcccd17b55c57a9f6be8487626867fe94a20", size = 3107339, upload-time = "2025-11-16T16:06:36.121Z" },
{ url = "https://files.pythonhosted.org/packages/c6/cb/91a13e42965a3e20a4c7398c63843cac9ca1a1c36925bd3ff69e6c17775f/granian-2.6.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:ca8188119daba0a343d2736dd4ed4d8d71ca5c0ca016a3f93599710906aa036f", size = 3298057, upload-time = "2025-11-16T16:06:37.567Z" },
{ url = "https://files.pythonhosted.org/packages/23/8b/19bb0f679b74ddb58e1c6de2e4c85ba986b2040d7446fd7e5a498e5a67cf/granian-2.6.0-cp313-cp313t-musllinux_1_1_armv7l.whl", hash = "sha256:6ac9d479d4795ab9c7222829d220636250ee034d266ad89a9657b64fb6770b93", size = 3465623, upload-time = "2025-11-16T16:06:39.144Z" },
{ url = "https://files.pythonhosted.org/packages/41/25/4af1f3e0cfea237912d04d57e97193d350b06f93255bde16040780e75589/granian-2.6.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:b8cc3635676639c1c6fc336571e7cdd4d4f0be6e05c33ae06721a570b346ce21", size = 3476874, upload-time = "2025-11-16T16:06:40.868Z" },
{ url = "https://files.pythonhosted.org/packages/bb/53/9ed1a1f710a78eaad2897b9264bb6ae1190dc251af463b87be41f1963dfe/granian-2.6.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:cba1d4ac5b101c41fa916fb1ca5d5c359892b63d1470a9587055605c68850df8", size = 3072924, upload-time = "2025-11-16T16:06:43.949Z" },
{ url = "https://files.pythonhosted.org/packages/b1/58/8fa09896c88937a95b92185a1377b09f7cd1b8ac1e0f06a251e02ce96164/granian-2.6.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2c829ece96a49d431c01553e0c020a675624798659c74e3d800867a415376fef", size = 2800675, upload-time = "2025-11-16T16:06:45.831Z" },
{ url = "https://files.pythonhosted.org/packages/4f/53/779e15fb6372cf00d2c66f392d754e0816bf0597e8346459c22bde9de219/granian-2.6.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:637153b653a85e1bb0cba2e10e321e2cbb1af1e9abab0feafd34eb612fe3fcdd", size = 3323029, upload-time = "2025-11-16T16:06:47.682Z" },
{ url = "https://files.pythonhosted.org/packages/32/ad/3af7388f51b4df3a781ecfc6f1ec18331ec74ea413fb2c62fe24c65e7935/granian-2.6.0-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6c09792ca3807059ef8e58a7c7bc5620586414f03ebd4bb2e6cd044044f0165", size = 3142617, upload-time = "2025-11-16T16:06:48.964Z" },
{ url = "https://files.pythonhosted.org/packages/4c/4d/6a7766fd9fe09f3f887c2168d5607cc2eb2ee9fe5c9364a877942c05de41/granian-2.6.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79e2f68d99f454d1c51aacc86bed346693c074f27c51fb19b8afe5dc375e1b70", size = 3383669, upload-time = "2025-11-16T16:06:50.363Z" },
{ url = "https://files.pythonhosted.org/packages/b8/1c/b4bbdcd6bbe9c3290a2ac76eac3ae8916fdb38269f9f981e5b933ff02664/granian-2.6.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:995f7b496b16484c97e8da9f44ead66307d6f532135a9890b0d27c88b8232df3", size = 3233040, upload-time = "2025-11-16T16:06:52.787Z" },
{ url = "https://files.pythonhosted.org/packages/75/26/ca7afabab2b31101eabc78df26772bd679e0a2bc879c58e8fcbb9732d57e/granian-2.6.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:6d5db194527d52183b2dc17be9d68c59647bc66459c01a960407d446aa686c98", size = 3302090, upload-time = "2025-11-16T16:06:54.112Z" },
{ url = "https://files.pythonhosted.org/packages/e8/3b/3e6992ac60f8d2e7f6eb5ae7845ba8f77d9373379e7d8ec7dbdfac89c00b/granian-2.6.0-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:c9fd40f3db242eece303ab4e9da4c7c398c865d628d58ff747680c54775ea9e4", size = 3469619, upload-time = "2025-11-16T16:06:55.488Z" },
{ url = "https://files.pythonhosted.org/packages/5d/96/8e78858630d7ca51751502c323f22841a56847db827a73d946a9303108c1/granian-2.6.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:99dfa23b29e6a4f8cc2ec9b55946746f94ce305e474defef5c3c0e496471821e", size = 3479330, upload-time = "2025-11-16T16:06:56.989Z" },
{ url = "https://files.pythonhosted.org/packages/5a/5c/5770f1270c2e59b7d27e25792ed62f3164b8b962ccf19b4a351429fd34fe/granian-2.6.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:a8b356e8d96481c0fa67c2646a516b1f8985171995c0a40c5548352b75cae513", size = 3026090, upload-time = "2025-11-16T16:07:00.105Z" },
{ url = "https://files.pythonhosted.org/packages/28/89/85b40c55ddd270a31e047b368b4d82f32c0f6388511a0affcf6c8459821b/granian-2.6.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f026d7a2f1395b02cba2b59613edfd463d9ef43aae33b3c5e41f2ac8d0752507", size = 2752890, upload-time = "2025-11-16T16:07:01.507Z" },
{ url = "https://files.pythonhosted.org/packages/bf/4e/369700caefaad0526fc36d43510e9274f430a5bdeea54b97f907e2dd387d/granian-2.6.0-cp314-cp314t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:15c888605406c9b29b7da8e3afa0ce31dabad7d446cf42a2033d1f184e280ef3", size = 2965483, upload-time = "2025-11-16T16:07:02.836Z" },
{ url = "https://files.pythonhosted.org/packages/75/47/d6d95615b94a8bac94efca7a634cb3160fb7cd3235039e4d1708e0399453/granian-2.6.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d9ce9ff8d4d9da73eb2e90c72eae986f823ab46b2c8c7ee091ec06e3c835a94e", size = 3313071, upload-time = "2025-11-16T16:07:04.128Z" },
{ url = "https://files.pythonhosted.org/packages/6a/76/f9098765797adfc142d503ee8a18fe137324558a028db6322753d88305d9/granian-2.6.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:681f44fa950b50721250536477b068315322c447f60b6a7018a9d61385202d67", size = 3271503, upload-time = "2025-11-16T16:07:05.482Z" },
{ url = "https://files.pythonhosted.org/packages/7f/f9/55be32f079af772054284aa917eb7bd77f1f8ba840f0773db9ac47279149/granian-2.6.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:cf23f25826e7c87c2cd9d984a358c14106d589febcd71af0f5392bb65fafb07a", size = 3106398, upload-time = "2025-11-16T16:07:07.396Z" },
{ url = "https://files.pythonhosted.org/packages/79/ab/e63f54a8432b2b877d83c5f2921a54791a420685002854dc7005bbd48817/granian-2.6.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:559985641cc1f0497f2c35d75126214f9cf9286ec6cea083fb1d0324712dbc47", size = 3296156, upload-time = "2025-11-16T16:07:08.858Z" },
{ url = "https://files.pythonhosted.org/packages/ef/3c/a37c038be10441a27cfde65a64c4406556ee64ab5deba4a782eaaa5ce7cf/granian-2.6.0-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:98718c08713d4afdf0e444f6236eeac6d01fdf08d0587f3c15da37fd12ee03f6", size = 3460301, upload-time = "2025-11-16T16:07:10.476Z" },
{ url = "https://files.pythonhosted.org/packages/44/05/bcc03661028df91808440f24ae9923cda4fc53938c6bb85a87e3d47540a5/granian-2.6.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:c115726904be2186b1e621a2b4a292f8d0ccc4b0f41ac89dcbe4b50cbaa67414", size = 3474889, upload-time = "2025-11-16T16:07:11.873Z" },
{ url = "https://files.pythonhosted.org/packages/d5/ee/88767d70d21e6c35e44b40176abd25e1adb8f93103b0abc6035c580a52aa/granian-2.6.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:323c096c7ebac19a16306708b4ed6abc9e57be572f0b9ff5dc65532be76f5d59", size = 3089586, upload-time = "2025-11-16T16:07:14.65Z" },
{ url = "https://files.pythonhosted.org/packages/5c/22/2405b36c01b5c32fc4bbc622f7c30b89a4ec9162cc3408a38c41d03e1c27/granian-2.6.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:b9736ab48a1b3d70152e495374d4c5b61e90ea2a79f1fc19889f8bba6c68b3b5", size = 2805061, upload-time = "2025-11-16T16:07:16.582Z" },
{ url = "https://files.pythonhosted.org/packages/33/38/79e13366729f0f2561b33abef7deb326860443abbbb1d2247679feaeebdc/granian-2.6.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee4b1f5f7ec7096bdffc98171b559cb703c0be68e1c49ff59c208b90870c6bba", size = 3381989, upload-time = "2025-11-16T16:07:17.906Z" },
{ url = "https://files.pythonhosted.org/packages/90/9f/fcff1978ca3cbf138291a29fe09f2af5d939cab9e5f77acc49510092c0d8/granian-2.6.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e1e27e9527cdcd8e767c52e091e69ade0082c9868107164e32331a9bf9ead621", size = 3237042, upload-time = "2025-11-16T16:07:19.381Z" },
{ url = "https://files.pythonhosted.org/packages/a1/9d/06dc6b5f411cac8d6a6ef4824dc102b1818173027ab4293e4ae57c620cfe/granian-2.6.0-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f9f9c5384f9370179d849c876c35da82f0ebd7389d04a3923c094a9e4e80afc5", size = 3316073, upload-time = "2025-11-16T16:07:20.95Z" },
{ url = "https://files.pythonhosted.org/packages/1c/e1/45e9861df695c743b57738d9b8c15b3c98ebd34ba16a79884372b2006b32/granian-2.6.0-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:561a3b86523a0b0e5d636229e3f0dcd80118ace2b1d2d061ecddeba0044ae8ac", size = 3483622, upload-time = "2025-11-16T16:07:22.567Z" },
{ url = "https://files.pythonhosted.org/packages/5f/14/cfe0648b2e1779ed2a2215a97de9acc74f94941bb60c6f2c9fb7061ae4bb/granian-2.6.0-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:8e12a70bdb3b5845f62dc2013527d5b150b6a4bc484f2dec555e6d27f4852e59", size = 3460175, upload-time = "2025-11-16T16:07:24.327Z" },
{ url = "https://files.pythonhosted.org/packages/e3/a0/b6782563716dfd178f094fe7fe6d28fc6c13857926bb9efac6ddc73dec54/granian-2.5.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:907d17f94a039b1047a82386b4979a6a7db7f4c37598225c6184a2b89f0ae12d", size = 2860831, upload-time = "2025-09-18T11:49:41.521Z" },
{ url = "https://files.pythonhosted.org/packages/a9/a5/6ae10379f21415255dd36b4d26a69a0a8ec80d4ba4fe26ca563e46a1ca62/granian-2.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a009e99d3c4a2a70a15a97391566753045a81641e5a3e651ff346d8bb7fe7450", size = 2550345, upload-time = "2025-09-18T11:49:43.015Z" },
{ url = "https://files.pythonhosted.org/packages/7c/ca/1cdbd669ee4bf85208b96e0bcaf5b51cba67907b71679c18a1da6bea61e6/granian-2.5.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3cb602ac3ea567476c339e8683a0fa2ffe7fd8432798bd63c371d5b32502bdb9", size = 3048013, upload-time = "2025-09-18T11:49:44.538Z" },
{ url = "https://files.pythonhosted.org/packages/cf/9e/aba367c3c372d641e78aaaaa4ec8a4452bb8a2259bdb8b7484d537969864/granian-2.5.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52aee85459304f1e74ff4cb5bb60d23db267b671b1199ff589b1a5a65f5638f", size = 2862464, upload-time = "2025-09-18T11:49:45.976Z" },
{ url = "https://files.pythonhosted.org/packages/3e/aa/542ef36aee53a21ff868a38c4d567eb253b1338501091e36b6ad8090c862/granian-2.5.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8478d777971d6c1601b479c70a5c1aaaba7b656fa5044b1c38b4ba5e172f0fc7", size = 3147423, upload-time = "2025-09-18T11:49:47.266Z" },
{ url = "https://files.pythonhosted.org/packages/e2/a3/1ba8d0d534ab993e1f84eab3320b4e3071e9bec166131e663c60160e5192/granian-2.5.4-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:1da588e951b3e0bce94f2743158750c9733efcbe5c27b31f50e9bda6af8aac1f", size = 2914051, upload-time = "2025-09-18T11:49:48.616Z" },
{ url = "https://files.pythonhosted.org/packages/4b/1e/0e43ee8a4a97c4b2a413964448917cabe154aabc99be46ae0f487ce094e8/granian-2.5.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:79db7d0eac7445a383e22b6d3e9323882bc9a9c1d2fd62097c0452822c4de526", size = 2919482, upload-time = "2025-09-18T11:49:49.789Z" },
{ url = "https://files.pythonhosted.org/packages/c7/74/6857f59e1ae9a556b7293c60258963063d74ad154c0411f94dc235aa02ab/granian-2.5.4-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:cc75f15876415054c094e9ef941cf49c315ee5f0f20701fdfb3ffc698054c727", size = 3157058, upload-time = "2025-09-18T11:49:51.406Z" },
{ url = "https://files.pythonhosted.org/packages/02/e9/05eaa62200693b31a75e3767f5716a55aeb07572f1a41e2d31b6f8bb115d/granian-2.5.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2caeee9d12144c9c285d3074c7979cdf1ad3d84a86204dec9035ca6cec5d713f", size = 3194238, upload-time = "2025-09-18T11:49:52.575Z" },
{ url = "https://files.pythonhosted.org/packages/a9/a3/89471ae2ff6d3111964ef9e0b8ac00c5a68046aca93965048b94ec0ec952/granian-2.5.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a404bff75dc29c01566a4e896237f6cb8eda49a71b90770b8316ebe1b08a3d46", size = 2861005, upload-time = "2025-09-18T11:49:55.442Z" },
{ url = "https://files.pythonhosted.org/packages/85/90/81706bbe0f23737c3c1cf8a4e76a6e2c9ec9c5a950a023aea2aed6ef74c4/granian-2.5.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d91b4642283ea8169aad64b74b242c364a3ce24d6aeed9b5a4358f99a5ab4d84", size = 2550393, upload-time = "2025-09-18T11:49:57.24Z" },
{ url = "https://files.pythonhosted.org/packages/66/67/7bdd9b1b63c811439ac7b8b4e52112abb5a38b575f033b9c7672d0355a70/granian-2.5.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aa6b4ad4d479fe3e7d42ca4321ae7febad9cdae5c269032234b8b4ac8dbd017", size = 3048134, upload-time = "2025-09-18T11:49:59.263Z" },
{ url = "https://files.pythonhosted.org/packages/39/94/9bd7e8248c438ac7861653ea4fb071a2ed2dcb4b7a1ec5cf034282d4079a/granian-2.5.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2466523c14724d2d68497cd081ffd2aa913381be199e7eb71347847a3651224c", size = 2862817, upload-time = "2025-09-18T11:50:00.932Z" },
{ url = "https://files.pythonhosted.org/packages/c1/f7/c98302718f58a3ee47ab1db83e8c0834b91016fc3c83f3a23f7b256a02a7/granian-2.5.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce9ec6baebb83ba7d1ed507dc7d301f7f29725f9b7a8c9c974f96479dea3a090", size = 3147500, upload-time = "2025-09-18T11:50:02.569Z" },
{ url = "https://files.pythonhosted.org/packages/4d/c0/eb3b94d2eb40d5b94bfad94da018f5daf539e09e8fc33742a8330707913a/granian-2.5.4-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:8b3faa2eec6dbbb072aae575d6a6a5e5577aef13c93d38d454a6a9fffc954ce7", size = 2914076, upload-time = "2025-09-18T11:50:04.074Z" },
{ url = "https://files.pythonhosted.org/packages/0f/77/42e06595c441c14f934858351acacc28fb2552798ab7eefed2e0e3920d15/granian-2.5.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:25a1d03fc93184009d5e76a5bfb5b29222e7debacfc225dd5d3732f6f6f99c10", size = 2919154, upload-time = "2025-09-18T11:50:05.497Z" },
{ url = "https://files.pythonhosted.org/packages/c4/af/1f87d4bfabf09d16dcf3a355cc356daa33185c561a5f3c5904f6fd0f0e5f/granian-2.5.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:1e580f5fa30ed04e724c71de099dcacc4722613f8a53e41454bac86242887da7", size = 3157119, upload-time = "2025-09-18T11:50:07.088Z" },
{ url = "https://files.pythonhosted.org/packages/ab/34/18b038e4b67a97eb776bf84307a0d5c8bff79290024973def614d8052596/granian-2.5.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5a4e74bf6d91dd7df6ffc7edb74e74147057fc947c04684d2d9af03e5e71ad71", size = 3193906, upload-time = "2025-09-18T11:50:08.305Z" },
{ url = "https://files.pythonhosted.org/packages/ea/9f/2a419461f2696bd95ba6b4d2a1c09b7372b79e66ac3b7dd4a985bf35f7d6/granian-2.5.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c4387cca4e38ec7579cac71a2da27fd177ced682b1de8bf917b9610f2ac0ba5e", size = 2846208, upload-time = "2025-09-18T11:50:10.934Z" },
{ url = "https://files.pythonhosted.org/packages/0a/bd/f9b9f57e14f778665e5b56a5b98d20187136517188a39ac404b13812bb34/granian-2.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a126b75927583292a9d2cfae627cd4b1da3e68d04dd87ba5a53b4860768d9e04", size = 2537995, upload-time = "2025-09-18T11:50:12.461Z" },
{ url = "https://files.pythonhosted.org/packages/55/eb/4df4fd10fb0ca0aa7ccbbe6b805e8019dc83d3a7861a8e0ec73a4f671bcf/granian-2.5.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b44dc391bf9bc1303bcb2cb344bbb5c35de92f43a3e8584f2a984dfda2fea8e3", size = 3033917, upload-time = "2025-09-18T11:50:16.535Z" },
{ url = "https://files.pythonhosted.org/packages/1a/ad/a3b8a773ee347f1a9b52d37caeb373eff590363c478cd6d9d20422a842de/granian-2.5.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07c47847163a1bcce0b7c323093b20be8a8ec9d4f4eba596b4d27f85ddbe669f", size = 2860524, upload-time = "2025-09-18T11:50:17.811Z" },
{ url = "https://files.pythonhosted.org/packages/f3/ae/188342234ed4f842ad63fd6a0328a05a8e2b991293496527b30654b6710c/granian-2.5.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6c50539f654ce5f8fadd9b360fac0361d812c39c7a5f1e525889c51899a10f0", size = 3139768, upload-time = "2025-09-18T11:50:19.006Z" },
{ url = "https://files.pythonhosted.org/packages/28/40/babaaf6b95bf690cf3af1ff0c3a1d9c33b23f2c18dabb373c805653834bd/granian-2.5.4-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:e52f65acd4da0a3af7a5d2d6a6445d530e84fe52057ee39f52ce92a6598fe37b", size = 2915038, upload-time = "2025-09-18T11:50:20.194Z" },
{ url = "https://files.pythonhosted.org/packages/a2/01/3d2eda00cbaa09f5a734d57fb4f52f68a1a48137a262e46d59ffecb54bd6/granian-2.5.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b78ab23495e384521085c33fecb3987779e1b1e43f34acd5b25e864b699933f9", size = 2915368, upload-time = "2025-09-18T11:50:21.335Z" },
{ url = "https://files.pythonhosted.org/packages/8a/3c/51670c8d83334ea1ce1b54aae93f6066ff101ca81ccd6ab01832309b2156/granian-2.5.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:6a477b204fca30218b3cc16721df38f1e159c5ee27252b305c78982af1995974", size = 3142893, upload-time = "2025-09-18T11:50:22.843Z" },
{ url = "https://files.pythonhosted.org/packages/68/42/81f848d9cf6cd77f24d0625d5c49caf6471477c97a760827d41cbb90a214/granian-2.5.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7f58116ab1300ca744a861482ce3194e9be5f1adad9ac4adda89d47b1ba3fa50", size = 3206519, upload-time = "2025-09-18T11:50:24.124Z" },
{ url = "https://files.pythonhosted.org/packages/50/aa/ed7cee53c0663fbb4d64b9a143d176f76000100f8a5ccef8a166df2bb9a9/granian-2.5.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:533bf842b56c8531705048211e3152fb1234d7611f83257a71cbf7e734c0f4a1", size = 2845936, upload-time = "2025-09-18T11:50:27.394Z" },
{ url = "https://files.pythonhosted.org/packages/04/a3/a63b04b67fb44578ce5d2d6c2b669932d6adb9a4844d86dc0f0cd0adb409/granian-2.5.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d1efb111f84236a72d5864d64a0198e04e699014119c33d957fac34a0efb2474", size = 2537660, upload-time = "2025-09-18T11:50:28.953Z" },
{ url = "https://files.pythonhosted.org/packages/a6/99/4c630712f95ce6105f070631242fbafe4c045a00cd5e00f437a03085a9cc/granian-2.5.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b0341a553fe913b4a741c10f532f5315d57deaa34877494d4c4b09c666f5266c", size = 3033766, upload-time = "2025-09-18T11:50:30.515Z" },
{ url = "https://files.pythonhosted.org/packages/44/73/d05f0cd49764feedfc91b418e060c212a35077154209e22654891c08d2dc/granian-2.5.4-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b3b24b7620df45752bbf34f93250f733a6996a7409879efbea6ab38f57eff69", size = 2860740, upload-time = "2025-09-18T11:50:31.799Z" },
{ url = "https://files.pythonhosted.org/packages/6e/85/061748715e5c213c8f5e58c9a7f95741fc5787a0c45c7b066b1df0f59453/granian-2.5.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bb902636271f9e61f6653625670982f7a0e19cbc7ae56fc65cd26bf330c612f", size = 3139932, upload-time = "2025-09-18T11:50:33.07Z" },
{ url = "https://files.pythonhosted.org/packages/7e/5b/f361708fd275763f1436bc1584bf3171ea5b6b12255e7cf3d5a2d7280546/granian-2.5.4-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:23b2e86ea97320bbe80866a94e6855752f0c73c0ec07772e0241e8409384cde5", size = 2914531, upload-time = "2025-09-18T11:50:34.364Z" },
{ url = "https://files.pythonhosted.org/packages/0d/84/9f1dbcb6a2228fd9bbf548d81f17225ae26dbbcef33f61d7e14971d78d2f/granian-2.5.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:328ed82315ccbd5dedc8f5318a1f80d49e07eb34ebc0753bc2930d2f30070a34", size = 2915108, upload-time = "2025-09-18T11:50:35.667Z" },
{ url = "https://files.pythonhosted.org/packages/77/10/13e04a2ef44f711474ca68a095e65f72fb6ced28f5c0930980a012386f8b/granian-2.5.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:9bd438bb41cbac25f5f3155924947f0e2594b69f8a5f78e43c453c35fa28a1f0", size = 3142589, upload-time = "2025-09-18T11:50:37.349Z" },
{ url = "https://files.pythonhosted.org/packages/43/a5/826dab4703261d0f500b5d1d0cc8a965157856b0266ca3679185d316d5f7/granian-2.5.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d6d1b462ccb57af3051def8eae13f1df81dc902e9deff3cc6dfbb692c40a5a1f", size = 3206072, upload-time = "2025-09-18T11:50:38.602Z" },
{ url = "https://files.pythonhosted.org/packages/d5/40/a9d9e976bfbd6274d1fe052676fb02c055c1476a9936961218a211785cef/granian-2.5.4-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:d04a1432ed98b7e4b4e5cff188819f34bd680785352237477d7886eb9305f692", size = 2763126, upload-time = "2025-09-18T11:50:41.404Z" },
{ url = "https://files.pythonhosted.org/packages/ad/84/0a302005e3c1c254c592d29aaa11a281ef4a84dfab45ccfe3072223f9c5b/granian-2.5.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c6309d729f1b022b09fd051a277096a732bd8ed39986ac4b9849f6e79b660880", size = 2479158, upload-time = "2025-09-18T11:50:42.767Z" },
{ url = "https://files.pythonhosted.org/packages/74/9f/a889cc30f2ee67acdcfccd10c3da239c828c584cb9e7f04e54717ff0c42b/granian-2.5.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a067c27e733b0851e6f66175c1aac8badda60b698457181881c08a4e17baecf", size = 3016245, upload-time = "2025-09-18T11:50:44.3Z" },
{ url = "https://files.pythonhosted.org/packages/b7/37/22a86b369b140b3684f8aecfd9c80ed2765421c09cb3d3ef06245a6aaaf8/granian-2.5.4-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:54bd0604db172a964b1bc4b8709329b7f4e9cff6b8f468104ca7603a5e61d529", size = 2795566, upload-time = "2025-09-18T11:50:45.626Z" },
{ url = "https://files.pythonhosted.org/packages/c3/cc/65073d6f08d9251ceaa5eb9a485e1a8fda9cfb4bac3c03c9f28e01abf416/granian-2.5.4-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:487bdc40b234ef84510eac1d63f0720ca92daca08feb7d2d98a1f0a84cc54b0e", size = 2909489, upload-time = "2025-09-18T11:50:48.98Z" },
{ url = "https://files.pythonhosted.org/packages/d9/76/c52fdfcd536584ed01a878e13139e338083fac4fde32d6fbfb0f3b29f380/granian-2.5.4-cp313-cp313t-musllinux_1_1_armv7l.whl", hash = "sha256:76cd55ab521cc501a977f50ace9d72a6a4f9a6849e6490b14af2e9acc614ce55", size = 3124827, upload-time = "2025-09-18T11:50:50.45Z" },
{ url = "https://files.pythonhosted.org/packages/f3/fc/4d93870feebc547bbc68dcbfa3a6c491107a6ac634636c7e332441ef9077/granian-2.5.4-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:27b3e7906916ad6e84a8b16d89517d8652bece62888cb9421597eb14767a9d92", size = 3193471, upload-time = "2025-09-18T11:50:52.493Z" },
{ url = "https://files.pythonhosted.org/packages/aa/32/d1776652352df81c420e61a1d79711c9992ba6dcd1a419b1c9df83c925ce/granian-2.5.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:c237db56e5ff3fdad6539a3fbfcb9b57ce71463db55a016ba08296828043112f", size = 2827836, upload-time = "2025-09-18T11:50:55.444Z" },
{ url = "https://files.pythonhosted.org/packages/73/60/0f7b994feaca68c2585fa96338369fc8f928281cb1f7da2a377ec698f6a2/granian-2.5.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a76d7033a8d68c8353293fae8365e3b649bb155ab39af14387f3e9e870d503fb", size = 2525297, upload-time = "2025-09-18T11:50:56.765Z" },
{ url = "https://files.pythonhosted.org/packages/ff/06/73580eef85ac4ac4da7205d320e449a46db0e613b6df706a5feb46809e96/granian-2.5.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6778b9f7ecef8a423dd203aa5b0644a18d53eb749e830b2fe33abecad5d7e84", size = 3028201, upload-time = "2025-09-18T11:50:58.047Z" },
{ url = "https://files.pythonhosted.org/packages/19/78/5c0b43af33b18bd528b10ff770aaeb158a4fbeb0ac5cc0371921759b6b59/granian-2.5.4-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3accc02c981436e783772b12ea8cead35e8e644437881d7da842ff474f8e30f9", size = 2852319, upload-time = "2025-09-18T11:50:59.573Z" },
{ url = "https://files.pythonhosted.org/packages/ff/ae/ad9cc1729d12b9699318c66015999efef14c7fd17836393a09a1bbbf5185/granian-2.5.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3eaaf38851308af616ad5fdc35f333857f128493072ea208c1bb2fb557dcf2e", size = 3135211, upload-time = "2025-09-18T11:51:01.022Z" },
{ url = "https://files.pythonhosted.org/packages/7d/41/cbda436041e46116647d804d2eada3fab144883b0e6ce75f3d967a116c6c/granian-2.5.4-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:3cad64e8e41d6f3daf3e7a3eea88023aa7e64ee81450443ac9f4e6cae005079d", size = 2914810, upload-time = "2025-09-18T11:51:02.36Z" },
{ url = "https://files.pythonhosted.org/packages/9f/28/ab53dcab1d55636eca417a4263114d24872958609ceb853b84887b12cc38/granian-2.5.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4bb60b122971326d0d1baf10336c67bdecdd7adc708cf0b09bf1cde5563e8f5", size = 2914603, upload-time = "2025-09-18T11:51:03.701Z" },
{ url = "https://files.pythonhosted.org/packages/a0/33/e8810b11004e9139b6dd71dfa4a1a56ae60331e8d72dfa8bc7121158abff/granian-2.5.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:14129339f0ed9bbd2d129f82ed16e0c330edca7300482bd53cef466cc7b3ec6d", size = 3137585, upload-time = "2025-09-18T11:51:04.921Z" },
{ url = "https://files.pythonhosted.org/packages/17/4c/78650e1d54a9a8460add62643a57a0042880592e5a6f5574370954d7e91d/granian-2.5.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:294c574fcd8005587671476b751e5853b262ecb1b1011e634ac160d6a0259abd", size = 3195738, upload-time = "2025-09-18T11:51:06.328Z" },
{ url = "https://files.pythonhosted.org/packages/d8/d0/cdab820b2f5c692dc4c67879f937fb223eae1599784755362a53872f512c/granian-2.5.4-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:3f0940963f6c6e50de19195014902d9a565a612aa0583082c9082bd83f259446", size = 2747894, upload-time = "2025-09-18T11:51:09.927Z" },
{ url = "https://files.pythonhosted.org/packages/71/15/1d27cd429a4fb0cb066848ca7ba432e5887b2506873832136444a6aa24d2/granian-2.5.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:bc27bff6ea5a80fd5bf28297ac53fa31771cbdfa35650a6eb4f2c4efc751097d", size = 2463984, upload-time = "2025-09-18T11:51:11.294Z" },
{ url = "https://files.pythonhosted.org/packages/18/c0/2f3594892055c465c0035d5fc174da95c01a4902f9c63e7ea05d49fad892/granian-2.5.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73c136bac23cd1d2c969a52374972ec7da6e0920768bf0bcce65e00cabb4ebb9", size = 3009713, upload-time = "2025-09-18T11:51:12.713Z" },
{ url = "https://files.pythonhosted.org/packages/31/e7/09e44ece7ebcd5a70cd313ca3a0fa5b21632697c20412ddf169c51f16894/granian-2.5.4-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:2dc03e2f375354b95d477c9455fb2fb427a922740f45e036cdf60da660adbf13", size = 2794570, upload-time = "2025-09-18T11:51:14.015Z" },
{ url = "https://files.pythonhosted.org/packages/e4/91/e76301bf0411ede4739b273c972f959c4675702418b00228ed7c278aac05/granian-2.5.4-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:539ee12b02281929358349e01a0c42c0594ebcf4f44033c8a4d7a446f837e034", size = 2909702, upload-time = "2025-09-18T11:51:16.252Z" },
{ url = "https://files.pythonhosted.org/packages/4a/6e/928b37a9a8a863339f5e37d4154de6faf1b8c58c8684799e117caf66b3a4/granian-2.5.4-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:97735bdbc2877583ea1c8dbfca31bcaf118a6e818afe6000eb8a9d09fd9d07e0", size = 3118746, upload-time = "2025-09-18T11:51:17.928Z" },
{ url = "https://files.pythonhosted.org/packages/94/7c/5c24c3d17ae24eee4af34bc46ecf6d9fc5d3a23b7b974c7ef3ff0f51cea9/granian-2.5.4-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:5f642a4fa1d41943d288db714bd1e0d218537bfa8bc6355d7063e8959b84c32b", size = 3187333, upload-time = "2025-09-18T11:51:19.427Z" },
{ url = "https://files.pythonhosted.org/packages/7b/7f/37896fc4180fd0f59146e3bd88e21fb71ce28c705d482540cfc8fc532cc9/granian-2.5.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b970a50230ae437615d754e1bc4aaa740fbe3f1418cc0c8933b260a691bb8f5", size = 2853053, upload-time = "2025-09-18T11:51:37.959Z" },
{ url = "https://files.pythonhosted.org/packages/a2/f4/fb23d1958b52a1546f25da03c459e11ea634d166dc7ef6024c7662c949fc/granian-2.5.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a09d2bef7805f10093aa356d976fdb3607d274252ef9429c6c1a24d239032c29", size = 2553878, upload-time = "2025-09-18T11:51:39.251Z" },
{ url = "https://files.pythonhosted.org/packages/54/43/e105894f0ae2711080c827f71d04d63ec7b4b881e0c4ae8a22a41500c800/granian-2.5.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6d5bd05c6e833d54b77c2ee19130cfa5d54ae4eb301ffca56744f712c4a9d03", size = 3139712, upload-time = "2025-09-18T11:51:40.999Z" },
{ url = "https://files.pythonhosted.org/packages/b1/ae/d1df545edeafbfd787c73863e47b269ff5646ec68b025db692a1c6aafbd5/granian-2.5.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a7782a25ab78a55b61cb9b06f8aac471e9fafa3e1c20d6cdf970e93c834f6ddf", size = 2919335, upload-time = "2025-09-18T11:51:42.616Z" },
{ url = "https://files.pythonhosted.org/packages/6f/95/c55889554ef4471b786853707c3fa68f8ef4fafa3257054bd6990042db1f/granian-2.5.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4403600ac0273d4169c4c73773f57c5a3b44cc8aa8384a2f468c98c4294a3f27", size = 2927449, upload-time = "2025-09-18T11:51:44.273Z" },
{ url = "https://files.pythonhosted.org/packages/0b/52/34d3e17753c20ea56b88e48778e8c2b6fa01593e6febc0123bb6edfae7d7/granian-2.5.4-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c711335619a6936728b7773052fb0ec9d612b19abb2c786972ce3efee836df9d", size = 3172721, upload-time = "2025-09-18T11:51:45.76Z" },
{ url = "https://files.pythonhosted.org/packages/d2/b0/0323368449a05b2693ebbbbd4f61bcaef858a3df0255b6ee3dd460112549/granian-2.5.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ff9996929a16a72a61fb1f03a9e79e830bf7a6a4e9eb0470c6ef03f67d5ea5c0", size = 3183851, upload-time = "2025-09-18T11:51:47.651Z" },
{ url = "https://files.pythonhosted.org/packages/6a/69/e4c77826239bf3d612dced6a3a0007ef378bb454d602d5751a16c8b74d2f/granian-2.5.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f3fcf2e6c8a150f48e4b77271b33ebfc7c2d8381e692b5177d4bd7fcefbb691d", size = 2853059, upload-time = "2025-09-18T11:51:52.136Z" },
{ url = "https://files.pythonhosted.org/packages/6f/74/4ac398a54f718db5fd13e92ed1198a76c7c332dee4ab7af9c06ded51c884/granian-2.5.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:c5550d533a61053fc4d95044bdc80ba00118ca312ed85867ed63163fa5878f85", size = 2553706, upload-time = "2025-09-18T11:51:53.714Z" },
{ url = "https://files.pythonhosted.org/packages/32/10/871e8d09eae976613b3b0812eb927d39e7960fecf1ac885e21962bbc8ff3/granian-2.5.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48356114113ac3d48f70ea05cf42e560384c93318f5ef8f5638cb666f8243f2b", size = 3139539, upload-time = "2025-09-18T11:51:55.133Z" },
{ url = "https://files.pythonhosted.org/packages/ec/f6/dd0ef6a7f3f4fea28b57014bc69b925995c7c6006451653b115440bf432b/granian-2.5.4-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:cbadc8e49f90716b8f8aa2c2cee7a2e82c5a37dab5f6fbd449e76185ce205715", size = 2919295, upload-time = "2025-09-18T11:51:56.496Z" },
{ url = "https://files.pythonhosted.org/packages/27/dd/3a8f99363b05d33049ea8cd94e0d40792aebddfa39f6f14120b71613fdca/granian-2.5.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d1712a8ede152c2648557de6a23dbeb05ed499bfd83c42dad0689d9f2ba0621d", size = 2927578, upload-time = "2025-09-18T11:51:58.081Z" },
{ url = "https://files.pythonhosted.org/packages/49/42/ecb762dbf7a1447a89b4884907d07fa505fd89f904c1e2d3bd4f30aeb9d1/granian-2.5.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:b78b8a6f30d0534b2db3f9cb99702d06be110b6e91f5639837a6f52f4891fc1d", size = 3172980, upload-time = "2025-09-18T11:51:59.428Z" },
{ url = "https://files.pythonhosted.org/packages/4c/2f/2abc969b206033d789c87d0ed7ee17a8831b0740e30590348abb544dba13/granian-2.5.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:10ae5ce26b1048888ac5aa3747415d8be8bbb014106b27ef0e77d23d2e00c51d", size = 3183479, upload-time = "2025-09-18T11:52:01.133Z" },
]
[package.optional-dependencies]
@@ -2160,7 +2163,6 @@ dependencies = [
{ name = "pyzbar", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "rapidfuzz", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "redis", extra = ["hiredis"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "regex", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "scikit-learn", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "setproctitle", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "tika-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
@@ -2280,7 +2282,7 @@ requires-dist = [
{ name = "filelock", specifier = "~=3.20.0" },
{ name = "flower", specifier = "~=2.0.1" },
{ name = "gotenberg-client", specifier = "~=0.12.0" },
{ name = "granian", extras = ["uvloop"], marker = "extra == 'webserver'", specifier = "~=2.6.0" },
{ name = "granian", extras = ["uvloop"], marker = "extra == 'webserver'", specifier = "~=2.5.1" },
{ name = "httpx-oauth", specifier = "~=0.16" },
{ name = "imap-tools", specifier = "~=1.11.0" },
{ name = "inotifyrecursive", specifier = "~=0.3" },
@@ -2304,7 +2306,6 @@ requires-dist = [
{ name = "pyzbar", specifier = "~=0.1.9" },
{ name = "rapidfuzz", specifier = "~=3.14.0" },
{ name = "redis", extras = ["hiredis"], specifier = "~=5.2.1" },
{ name = "regex", specifier = ">=2025.9.18" },
{ name = "scikit-learn", specifier = "~=1.7.0" },
{ name = "setproctitle", specifier = "~=1.3.4" },
{ name = "tika-client", specifier = "~=0.10.0" },