mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-12-14 01:21:14 -06:00
Compare commits
29 Commits
6331865623
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0631ce4200 | ||
|
|
7130c0bd06 | ||
|
|
d391fdec64 | ||
|
|
4d7aa8e1a2 | ||
|
|
9bdbfd362f | ||
|
|
9ba1d93e15 | ||
|
|
a9c73e2846 | ||
|
|
332136df8b | ||
|
|
3a1d33225e | ||
|
|
e770ff572e | ||
|
|
402f2ead59 | ||
|
|
3b4d958b97 | ||
|
|
3f81b432ec | ||
|
|
66d363bdc5 | ||
|
|
c845cf0a19 | ||
|
|
317f239d09 | ||
|
|
128c3539d5 | ||
|
|
26975868a0 | ||
|
|
f3fc3febf1 | ||
|
|
8efc998687 | ||
|
|
3f47900f06 | ||
|
|
963a519e5c | ||
|
|
59e5d15cf0 | ||
|
|
ef2f65fcb8 | ||
|
|
555ba8bb19 | ||
|
|
01992bb5c6 | ||
|
|
21032ac008 | ||
|
|
b63e095a60 | ||
|
|
ce642409e8 |
20
.github/workflows/ci.yml
vendored
20
.github/workflows/ci.yml
vendored
@@ -67,7 +67,7 @@ jobs:
|
|||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
- name: Install python
|
- name: Install python
|
||||||
uses: actions/setup-python@v6
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
@@ -81,7 +81,7 @@ jobs:
|
|||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
id: setup-python
|
id: setup-python
|
||||||
uses: actions/setup-python@v6
|
uses: actions/setup-python@v6
|
||||||
@@ -131,7 +131,7 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
- name: Start containers
|
- name: Start containers
|
||||||
run: |
|
run: |
|
||||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml pull --quiet
|
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml pull --quiet
|
||||||
@@ -202,7 +202,7 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- name: Install pnpm
|
- name: Install pnpm
|
||||||
uses: pnpm/action-setup@v4
|
uses: pnpm/action-setup@v4
|
||||||
with:
|
with:
|
||||||
@@ -235,7 +235,7 @@ jobs:
|
|||||||
shard-index: [1, 2, 3, 4]
|
shard-index: [1, 2, 3, 4]
|
||||||
shard-count: [4]
|
shard-count: [4]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- name: Install pnpm
|
- name: Install pnpm
|
||||||
uses: pnpm/action-setup@v4
|
uses: pnpm/action-setup@v4
|
||||||
with:
|
with:
|
||||||
@@ -284,7 +284,7 @@ jobs:
|
|||||||
shard-index: [1, 2]
|
shard-index: [1, 2]
|
||||||
shard-count: [2]
|
shard-count: [2]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- name: Install pnpm
|
- name: Install pnpm
|
||||||
uses: pnpm/action-setup@v4
|
uses: pnpm/action-setup@v4
|
||||||
with:
|
with:
|
||||||
@@ -327,7 +327,7 @@ jobs:
|
|||||||
- tests-frontend
|
- tests-frontend
|
||||||
- tests-frontend-e2e
|
- tests-frontend-e2e
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v6
|
||||||
- name: Install pnpm
|
- name: Install pnpm
|
||||||
uses: pnpm/action-setup@v4
|
uses: pnpm/action-setup@v4
|
||||||
with:
|
with:
|
||||||
@@ -424,7 +424,7 @@ jobs:
|
|||||||
type=semver,pattern={{version}}
|
type=semver,pattern={{version}}
|
||||||
type=semver,pattern={{major}}.{{minor}}
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
# If https://github.com/docker/buildx/issues/1044 is resolved,
|
# If https://github.com/docker/buildx/issues/1044 is resolved,
|
||||||
# the append input with a native arm64 arch could be used to
|
# the append input with a native arm64 arch could be used to
|
||||||
# significantly speed up building
|
# significantly speed up building
|
||||||
@@ -497,7 +497,7 @@ jobs:
|
|||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
id: setup-python
|
id: setup-python
|
||||||
uses: actions/setup-python@v6
|
uses: actions/setup-python@v6
|
||||||
@@ -643,7 +643,7 @@ jobs:
|
|||||||
if: needs.publish-release.outputs.prerelease == 'false'
|
if: needs.publish-release.outputs.prerelease == 'false'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
ref: main
|
ref: main
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
|
|||||||
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@@ -34,7 +34,7 @@ jobs:
|
|||||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v4
|
uses: github/codeql-action/init@v4
|
||||||
|
|||||||
2
.github/workflows/crowdin.yml
vendored
2
.github/workflows/crowdin.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
|||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.PNGX_BOT_PAT }}
|
token: ${{ secrets.PNGX_BOT_PAT }}
|
||||||
- name: crowdin action
|
- name: crowdin action
|
||||||
|
|||||||
2
.github/workflows/translate-strings.yml
vendored
2
.github/workflows/translate-strings.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
|||||||
contents: write
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.PNGX_BOT_PAT }}
|
token: ${{ secrets.PNGX_BOT_PAT }}
|
||||||
ref: ${{ github.head_ref }}
|
ref: ${{ github.head_ref }}
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ RUN set -eux \
|
|||||||
# Purpose: Installs s6-overlay and rootfs
|
# Purpose: Installs s6-overlay and rootfs
|
||||||
# Comments:
|
# Comments:
|
||||||
# - Don't leave anything extra in here either
|
# - Don't leave anything extra in here either
|
||||||
FROM ghcr.io/astral-sh/uv:0.9.14-python3.12-trixie-slim AS s6-overlay-base
|
FROM ghcr.io/astral-sh/uv:0.9.15-python3.12-trixie-slim AS s6-overlay-base
|
||||||
|
|
||||||
WORKDIR /usr/src/s6
|
WORKDIR /usr/src/s6
|
||||||
|
|
||||||
|
|||||||
@@ -1054,12 +1054,22 @@ should be a valid crontab(5) expression describing when to run.
|
|||||||
|
|
||||||
#### [`PAPERLESS_SANITY_TASK_CRON=<cron expression>`](#PAPERLESS_SANITY_TASK_CRON) {#PAPERLESS_SANITY_TASK_CRON}
|
#### [`PAPERLESS_SANITY_TASK_CRON=<cron expression>`](#PAPERLESS_SANITY_TASK_CRON) {#PAPERLESS_SANITY_TASK_CRON}
|
||||||
|
|
||||||
: Configures the scheduled sanity checker frequency.
|
: Configures the scheduled sanity checker frequency. The value should be a
|
||||||
|
valid crontab(5) expression describing when to run.
|
||||||
|
|
||||||
: If set to the string "disable", the sanity checker will not run automatically.
|
: If set to the string "disable", the sanity checker will not run automatically.
|
||||||
|
|
||||||
Defaults to `30 0 * * sun` or Sunday at 30 minutes past midnight.
|
Defaults to `30 0 * * sun` or Sunday at 30 minutes past midnight.
|
||||||
|
|
||||||
|
#### [`PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON=<cron expression>`](#PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON) {#PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON}
|
||||||
|
|
||||||
|
: Configures the scheduled workflow check frequency. The value should be a
|
||||||
|
valid crontab(5) expression describing when to run.
|
||||||
|
|
||||||
|
: If set to the string "disable", scheduled workflows will not run.
|
||||||
|
|
||||||
|
Defaults to `5 */1 * * *` or every hour at 5 minutes past the hour.
|
||||||
|
|
||||||
#### [`PAPERLESS_ENABLE_COMPRESSION=<bool>`](#PAPERLESS_ENABLE_COMPRESSION) {#PAPERLESS_ENABLE_COMPRESSION}
|
#### [`PAPERLESS_ENABLE_COMPRESSION=<bool>`](#PAPERLESS_ENABLE_COMPRESSION) {#PAPERLESS_ENABLE_COMPRESSION}
|
||||||
|
|
||||||
: Enables compression of the responses from the webserver.
|
: Enables compression of the responses from the webserver.
|
||||||
@@ -1271,30 +1281,6 @@ within your documents.
|
|||||||
|
|
||||||
Defaults to false.
|
Defaults to false.
|
||||||
|
|
||||||
## Workflow webhooks
|
|
||||||
|
|
||||||
#### [`PAPERLESS_WEBHOOKS_ALLOWED_SCHEMES=<str>`](#PAPERLESS_WEBHOOKS_ALLOWED_SCHEMES) {#PAPERLESS_WEBHOOKS_ALLOWED_SCHEMES}
|
|
||||||
|
|
||||||
: A comma-separated list of allowed schemes for webhooks. This setting
|
|
||||||
controls which URL schemes are permitted for webhook URLs.
|
|
||||||
|
|
||||||
Defaults to `http,https`.
|
|
||||||
|
|
||||||
#### [`PAPERLESS_WEBHOOKS_ALLOWED_PORTS=<str>`](#PAPERLESS_WEBHOOKS_ALLOWED_PORTS) {#PAPERLESS_WEBHOOKS_ALLOWED_PORTS}
|
|
||||||
|
|
||||||
: A comma-separated list of allowed ports for webhooks. This setting
|
|
||||||
controls which ports are permitted for webhook URLs. For example, if you
|
|
||||||
set this to `80,443`, webhooks will only be sent to URLs that use these
|
|
||||||
ports.
|
|
||||||
|
|
||||||
Defaults to empty list, which allows all ports.
|
|
||||||
|
|
||||||
#### [`PAPERLESS_WEBHOOKS_ALLOW_INTERNAL_REQUESTS=<bool>`](#PAPERLESS_WEBHOOKS_ALLOW_INTERNAL_REQUESTS) {#PAPERLESS_WEBHOOKS_ALLOW_INTERNAL_REQUESTS}
|
|
||||||
|
|
||||||
: If set to false, webhooks cannot be sent to internal URLs (e.g., localhost).
|
|
||||||
|
|
||||||
Defaults to true, which allows internal requests.
|
|
||||||
|
|
||||||
### Polling {#polling}
|
### Polling {#polling}
|
||||||
|
|
||||||
#### [`PAPERLESS_CONSUMER_POLLING=<num>`](#PAPERLESS_CONSUMER_POLLING) {#PAPERLESS_CONSUMER_POLLING}
|
#### [`PAPERLESS_CONSUMER_POLLING=<num>`](#PAPERLESS_CONSUMER_POLLING) {#PAPERLESS_CONSUMER_POLLING}
|
||||||
@@ -1338,6 +1324,30 @@ consumers working on the same file. Configure this to prevent that.
|
|||||||
|
|
||||||
Defaults to 0.5 seconds.
|
Defaults to 0.5 seconds.
|
||||||
|
|
||||||
|
## Workflow webhooks
|
||||||
|
|
||||||
|
#### [`PAPERLESS_WEBHOOKS_ALLOWED_SCHEMES=<str>`](#PAPERLESS_WEBHOOKS_ALLOWED_SCHEMES) {#PAPERLESS_WEBHOOKS_ALLOWED_SCHEMES}
|
||||||
|
|
||||||
|
: A comma-separated list of allowed schemes for webhooks. This setting
|
||||||
|
controls which URL schemes are permitted for webhook URLs.
|
||||||
|
|
||||||
|
Defaults to `http,https`.
|
||||||
|
|
||||||
|
#### [`PAPERLESS_WEBHOOKS_ALLOWED_PORTS=<str>`](#PAPERLESS_WEBHOOKS_ALLOWED_PORTS) {#PAPERLESS_WEBHOOKS_ALLOWED_PORTS}
|
||||||
|
|
||||||
|
: A comma-separated list of allowed ports for webhooks. This setting
|
||||||
|
controls which ports are permitted for webhook URLs. For example, if you
|
||||||
|
set this to `80,443`, webhooks will only be sent to URLs that use these
|
||||||
|
ports.
|
||||||
|
|
||||||
|
Defaults to empty list, which allows all ports.
|
||||||
|
|
||||||
|
#### [`PAPERLESS_WEBHOOKS_ALLOW_INTERNAL_REQUESTS=<bool>`](#PAPERLESS_WEBHOOKS_ALLOW_INTERNAL_REQUESTS) {#PAPERLESS_WEBHOOKS_ALLOW_INTERNAL_REQUESTS}
|
||||||
|
|
||||||
|
: If set to false, webhooks cannot be sent to internal URLs (e.g., localhost).
|
||||||
|
|
||||||
|
Defaults to true, which allows internal requests.
|
||||||
|
|
||||||
## Incoming Mail {#incoming_mail}
|
## Incoming Mail {#incoming_mail}
|
||||||
|
|
||||||
### Email OAuth {#email_oauth}
|
### Email OAuth {#email_oauth}
|
||||||
|
|||||||
@@ -443,6 +443,10 @@ flowchart TD
|
|||||||
'Updated'
|
'Updated'
|
||||||
trigger(s)"}
|
trigger(s)"}
|
||||||
|
|
||||||
|
scheduled{"Documents
|
||||||
|
matching
|
||||||
|
trigger(s)"}
|
||||||
|
|
||||||
A[New Document] --> consumption
|
A[New Document] --> consumption
|
||||||
consumption --> |Yes| C[Workflow Actions Run]
|
consumption --> |Yes| C[Workflow Actions Run]
|
||||||
consumption --> |No| D
|
consumption --> |No| D
|
||||||
@@ -455,6 +459,11 @@ flowchart TD
|
|||||||
updated --> |Yes| J[Workflow Actions Run]
|
updated --> |Yes| J[Workflow Actions Run]
|
||||||
updated --> |No| K
|
updated --> |No| K
|
||||||
J --> K[Document Saved]
|
J --> K[Document Saved]
|
||||||
|
L[Scheduled Task Check<br/>hourly at :05] --> M[Get All Scheduled Triggers]
|
||||||
|
M --> scheduled
|
||||||
|
scheduled --> |Yes| N[Workflow Actions Run]
|
||||||
|
scheduled --> |No| O[Document Saved]
|
||||||
|
N --> O
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Filters {#workflow-trigger-filters}
|
#### Filters {#workflow-trigger-filters}
|
||||||
|
|||||||
@@ -63,6 +63,7 @@ dependencies = [
|
|||||||
"pyzbar~=0.1.9",
|
"pyzbar~=0.1.9",
|
||||||
"rapidfuzz~=3.14.0",
|
"rapidfuzz~=3.14.0",
|
||||||
"redis[hiredis]~=5.2.1",
|
"redis[hiredis]~=5.2.1",
|
||||||
|
"regex>=2025.9.18",
|
||||||
"scikit-learn~=1.7.0",
|
"scikit-learn~=1.7.0",
|
||||||
"setproctitle~=1.3.4",
|
"setproctitle~=1.3.4",
|
||||||
"tika-client~=0.10.0",
|
"tika-client~=0.10.0",
|
||||||
|
|||||||
@@ -5,14 +5,14 @@
|
|||||||
<trans-unit id="ngb.alert.close" datatype="html">
|
<trans-unit id="ngb.alert.close" datatype="html">
|
||||||
<source>Close</source>
|
<source>Close</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/alert/alert.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/alert/alert.ts</context>
|
||||||
<context context-type="linenumber">50</context>
|
<context context-type="linenumber">50</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.carousel.slide-number" datatype="html">
|
<trans-unit id="ngb.carousel.slide-number" datatype="html">
|
||||||
<source> Slide <x id="INTERPOLATION" equiv-text="ueryList<NgbSli"/> of <x id="INTERPOLATION_1" equiv-text="EventSource = N"/> </source>
|
<source> Slide <x id="INTERPOLATION" equiv-text="ueryList<NgbSli"/> of <x id="INTERPOLATION_1" equiv-text="EventSource = N"/> </source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/carousel/carousel.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/carousel/carousel.ts</context>
|
||||||
<context context-type="linenumber">131,135</context>
|
<context context-type="linenumber">131,135</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
<note priority="1" from="description">Currently selected slide number read by screen reader</note>
|
<note priority="1" from="description">Currently selected slide number read by screen reader</note>
|
||||||
@@ -20,212 +20,212 @@
|
|||||||
<trans-unit id="ngb.carousel.previous" datatype="html">
|
<trans-unit id="ngb.carousel.previous" datatype="html">
|
||||||
<source>Previous</source>
|
<source>Previous</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/carousel/carousel.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/carousel/carousel.ts</context>
|
||||||
<context context-type="linenumber">157,159</context>
|
<context context-type="linenumber">157,159</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.carousel.next" datatype="html">
|
<trans-unit id="ngb.carousel.next" datatype="html">
|
||||||
<source>Next</source>
|
<source>Next</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/carousel/carousel.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/carousel/carousel.ts</context>
|
||||||
<context context-type="linenumber">198</context>
|
<context context-type="linenumber">198</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.datepicker.previous-month" datatype="html">
|
<trans-unit id="ngb.datepicker.previous-month" datatype="html">
|
||||||
<source>Previous month</source>
|
<source>Previous month</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/datepicker/datepicker-navigation.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/datepicker/datepicker-navigation.ts</context>
|
||||||
<context context-type="linenumber">83,85</context>
|
<context context-type="linenumber">83,85</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/datepicker/datepicker-navigation.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/datepicker/datepicker-navigation.ts</context>
|
||||||
<context context-type="linenumber">112</context>
|
<context context-type="linenumber">112</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.datepicker.next-month" datatype="html">
|
<trans-unit id="ngb.datepicker.next-month" datatype="html">
|
||||||
<source>Next month</source>
|
<source>Next month</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/datepicker/datepicker-navigation.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/datepicker/datepicker-navigation.ts</context>
|
||||||
<context context-type="linenumber">112</context>
|
<context context-type="linenumber">112</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/datepicker/datepicker-navigation.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/datepicker/datepicker-navigation.ts</context>
|
||||||
<context context-type="linenumber">112</context>
|
<context context-type="linenumber">112</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.HH" datatype="html">
|
<trans-unit id="ngb.timepicker.HH" datatype="html">
|
||||||
<source>HH</source>
|
<source>HH</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.toast.close-aria" datatype="html">
|
<trans-unit id="ngb.toast.close-aria" datatype="html">
|
||||||
<source>Close</source>
|
<source>Close</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.datepicker.select-month" datatype="html">
|
<trans-unit id="ngb.datepicker.select-month" datatype="html">
|
||||||
<source>Select month</source>
|
<source>Select month</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.pagination.first" datatype="html">
|
<trans-unit id="ngb.pagination.first" datatype="html">
|
||||||
<source>««</source>
|
<source>««</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.hours" datatype="html">
|
<trans-unit id="ngb.timepicker.hours" datatype="html">
|
||||||
<source>Hours</source>
|
<source>Hours</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.pagination.previous" datatype="html">
|
<trans-unit id="ngb.pagination.previous" datatype="html">
|
||||||
<source>«</source>
|
<source>«</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.MM" datatype="html">
|
<trans-unit id="ngb.timepicker.MM" datatype="html">
|
||||||
<source>MM</source>
|
<source>MM</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.pagination.next" datatype="html">
|
<trans-unit id="ngb.pagination.next" datatype="html">
|
||||||
<source>»</source>
|
<source>»</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.datepicker.select-year" datatype="html">
|
<trans-unit id="ngb.datepicker.select-year" datatype="html">
|
||||||
<source>Select year</source>
|
<source>Select year</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.minutes" datatype="html">
|
<trans-unit id="ngb.timepicker.minutes" datatype="html">
|
||||||
<source>Minutes</source>
|
<source>Minutes</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.pagination.last" datatype="html">
|
<trans-unit id="ngb.pagination.last" datatype="html">
|
||||||
<source>»»</source>
|
<source>»»</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.pagination.first-aria" datatype="html">
|
<trans-unit id="ngb.pagination.first-aria" datatype="html">
|
||||||
<source>First</source>
|
<source>First</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.increment-hours" datatype="html">
|
<trans-unit id="ngb.timepicker.increment-hours" datatype="html">
|
||||||
<source>Increment hours</source>
|
<source>Increment hours</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.pagination.previous-aria" datatype="html">
|
<trans-unit id="ngb.pagination.previous-aria" datatype="html">
|
||||||
<source>Previous</source>
|
<source>Previous</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.decrement-hours" datatype="html">
|
<trans-unit id="ngb.timepicker.decrement-hours" datatype="html">
|
||||||
<source>Decrement hours</source>
|
<source>Decrement hours</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.pagination.next-aria" datatype="html">
|
<trans-unit id="ngb.pagination.next-aria" datatype="html">
|
||||||
<source>Next</source>
|
<source>Next</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.increment-minutes" datatype="html">
|
<trans-unit id="ngb.timepicker.increment-minutes" datatype="html">
|
||||||
<source>Increment minutes</source>
|
<source>Increment minutes</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.pagination.last-aria" datatype="html">
|
<trans-unit id="ngb.pagination.last-aria" datatype="html">
|
||||||
<source>Last</source>
|
<source>Last</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.decrement-minutes" datatype="html">
|
<trans-unit id="ngb.timepicker.decrement-minutes" datatype="html">
|
||||||
<source>Decrement minutes</source>
|
<source>Decrement minutes</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.SS" datatype="html">
|
<trans-unit id="ngb.timepicker.SS" datatype="html">
|
||||||
<source>SS</source>
|
<source>SS</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.seconds" datatype="html">
|
<trans-unit id="ngb.timepicker.seconds" datatype="html">
|
||||||
<source>Seconds</source>
|
<source>Seconds</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.increment-seconds" datatype="html">
|
<trans-unit id="ngb.timepicker.increment-seconds" datatype="html">
|
||||||
<source>Increment seconds</source>
|
<source>Increment seconds</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.decrement-seconds" datatype="html">
|
<trans-unit id="ngb.timepicker.decrement-seconds" datatype="html">
|
||||||
<source>Decrement seconds</source>
|
<source>Decrement seconds</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="ngb.timepicker.PM" datatype="html">
|
<trans-unit id="ngb.timepicker.PM" datatype="html">
|
||||||
<source><x id="INTERPOLATION"/></source>
|
<source><x id="INTERPOLATION"/></source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/ngb-config.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/ngb-config.ts</context>
|
||||||
<context context-type="linenumber">13</context>
|
<context context-type="linenumber">13</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
@@ -233,7 +233,7 @@
|
|||||||
<source><x id="INTERPOLATION" equiv-text="barConfig);
|
<source><x id="INTERPOLATION" equiv-text="barConfig);
|
||||||
pu"/></source>
|
pu"/></source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.14_@angular+core@20.3.12_@angula_f6978d5a33be250eb7b5e8e65faf7a7d/node_modules/src/progressbar/progressbar.ts</context>
|
<context context-type="sourcefile">node_modules/.pnpm/@ng-bootstrap+ng-bootstrap@19.0.1_@angular+common@20.3.15_@angular+core@20.3.15_@angula_40533c760dbaadbd90323f0d78d15fb8/node_modules/src/progressbar/progressbar.ts</context>
|
||||||
<context context-type="linenumber">41,42</context>
|
<context context-type="linenumber">41,42</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
@@ -816,7 +816,7 @@
|
|||||||
<source>Jump to bottom</source>
|
<source>Jump to bottom</source>
|
||||||
<context-group purpose="location">
|
<context-group purpose="location">
|
||||||
<context context-type="sourcefile">src/app/components/admin/logs/logs.component.html</context>
|
<context context-type="sourcefile">src/app/components/admin/logs/logs.component.html</context>
|
||||||
<context context-type="linenumber">60</context>
|
<context context-type="linenumber">62</context>
|
||||||
</context-group>
|
</context-group>
|
||||||
</trans-unit>
|
</trans-unit>
|
||||||
<trans-unit id="1255048712725285892" datatype="html">
|
<trans-unit id="1255048712725285892" datatype="html">
|
||||||
|
|||||||
@@ -12,14 +12,14 @@
|
|||||||
"private": true,
|
"private": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@angular/cdk": "^20.2.13",
|
"@angular/cdk": "^20.2.13",
|
||||||
"@angular/common": "~20.3.14",
|
"@angular/common": "~20.3.15",
|
||||||
"@angular/compiler": "~20.3.12",
|
"@angular/compiler": "~20.3.15",
|
||||||
"@angular/core": "~20.3.12",
|
"@angular/core": "~20.3.15",
|
||||||
"@angular/forms": "~20.3.12",
|
"@angular/forms": "~20.3.15",
|
||||||
"@angular/localize": "~20.3.12",
|
"@angular/localize": "~20.3.15",
|
||||||
"@angular/platform-browser": "~20.3.12",
|
"@angular/platform-browser": "~20.3.15",
|
||||||
"@angular/platform-browser-dynamic": "~20.3.12",
|
"@angular/platform-browser-dynamic": "~20.3.15",
|
||||||
"@angular/router": "~20.3.12",
|
"@angular/router": "~20.3.15",
|
||||||
"@ng-bootstrap/ng-bootstrap": "^19.0.1",
|
"@ng-bootstrap/ng-bootstrap": "^19.0.1",
|
||||||
"@ng-select/ng-select": "^20.7.0",
|
"@ng-select/ng-select": "^20.7.0",
|
||||||
"@ngneat/dirty-check-forms": "^3.0.3",
|
"@ngneat/dirty-check-forms": "^3.0.3",
|
||||||
@@ -42,23 +42,23 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@angular-builders/custom-webpack": "^20.0.0",
|
"@angular-builders/custom-webpack": "^20.0.0",
|
||||||
"@angular-builders/jest": "^20.0.0",
|
"@angular-builders/jest": "^20.0.0",
|
||||||
"@angular-devkit/core": "^20.3.10",
|
"@angular-devkit/core": "^20.3.13",
|
||||||
"@angular-devkit/schematics": "^20.3.10",
|
"@angular-devkit/schematics": "^20.3.13",
|
||||||
"@angular-eslint/builder": "20.6.0",
|
"@angular-eslint/builder": "20.6.0",
|
||||||
"@angular-eslint/eslint-plugin": "20.6.0",
|
"@angular-eslint/eslint-plugin": "20.6.0",
|
||||||
"@angular-eslint/eslint-plugin-template": "20.6.0",
|
"@angular-eslint/eslint-plugin-template": "20.6.0",
|
||||||
"@angular-eslint/schematics": "20.6.0",
|
"@angular-eslint/schematics": "20.6.0",
|
||||||
"@angular-eslint/template-parser": "20.6.0",
|
"@angular-eslint/template-parser": "20.6.0",
|
||||||
"@angular/build": "^20.3.10",
|
"@angular/build": "^20.3.13",
|
||||||
"@angular/cli": "~20.3.10",
|
"@angular/cli": "~20.3.13",
|
||||||
"@angular/compiler-cli": "~20.3.12",
|
"@angular/compiler-cli": "~20.3.15",
|
||||||
"@codecov/webpack-plugin": "^1.9.1",
|
"@codecov/webpack-plugin": "^1.9.1",
|
||||||
"@playwright/test": "^1.56.1",
|
"@playwright/test": "^1.57.0",
|
||||||
"@types/jest": "^30.0.0",
|
"@types/jest": "^30.0.0",
|
||||||
"@types/node": "^24.10.1",
|
"@types/node": "^24.10.1",
|
||||||
"@typescript-eslint/eslint-plugin": "^8.47.0",
|
"@typescript-eslint/eslint-plugin": "^8.48.1",
|
||||||
"@typescript-eslint/parser": "^8.47.0",
|
"@typescript-eslint/parser": "^8.48.1",
|
||||||
"@typescript-eslint/utils": "^8.47.0",
|
"@typescript-eslint/utils": "^8.48.1",
|
||||||
"eslint": "^9.39.1",
|
"eslint": "^9.39.1",
|
||||||
"jest": "30.2.0",
|
"jest": "30.2.0",
|
||||||
"jest-environment-jsdom": "^30.2.0",
|
"jest-environment-jsdom": "^30.2.0",
|
||||||
@@ -68,7 +68,7 @@
|
|||||||
"prettier-plugin-organize-imports": "^4.3.0",
|
"prettier-plugin-organize-imports": "^4.3.0",
|
||||||
"ts-node": "~10.9.1",
|
"ts-node": "~10.9.1",
|
||||||
"typescript": "^5.8.3",
|
"typescript": "^5.8.3",
|
||||||
"webpack": "^5.102.1"
|
"webpack": "^5.103.0"
|
||||||
},
|
},
|
||||||
"packageManager": "pnpm@10.17.1",
|
"packageManager": "pnpm@10.17.1",
|
||||||
"pnpm": {
|
"pnpm": {
|
||||||
|
|||||||
837
src-ui/pnpm-lock.yaml
generated
837
src-ui/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -48,7 +48,9 @@
|
|||||||
<ng-container i18n>Loading...</ng-container>
|
<ng-container i18n>Loading...</ng-container>
|
||||||
</div>
|
</div>
|
||||||
} @else {
|
} @else {
|
||||||
<p *ngFor="let log of logs" class="m-0 p-0" [ngClass]="'log-entry-' + log.level">{{log.message}}</p>
|
@for (log of logs; track log) {
|
||||||
|
<p class="m-0 p-0" [ngClass]="'log-entry-' + log.level">{{log.message}}</p>
|
||||||
|
}
|
||||||
}
|
}
|
||||||
</div>
|
</div>
|
||||||
<button
|
<button
|
||||||
|
|||||||
@@ -26,7 +26,7 @@
|
|||||||
@for (user of users; track user) {
|
@for (user of users; track user) {
|
||||||
<li class="list-group-item">
|
<li class="list-group-item">
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col d-flex align-items-center"><button class="btn btn-link p-0 text-start" type="button" (click)="editUser(user)" [disabled]="!permissionsService.currentUserCan(PermissionAction.Change, PermissionType.User)">{{user.username}}</button></div>
|
<div class="col d-flex align-items-center" [class.opacity-50]="!user.is_active"><button class="btn btn-link p-0 text-start" type="button" (click)="editUser(user)" [disabled]="!permissionsService.currentUserCan(PermissionAction.Change, PermissionType.User)">{{user.username}}</button></div>
|
||||||
<div class="col d-flex align-items-center">{{user.first_name}} {{user.last_name}}</div>
|
<div class="col d-flex align-items-center">{{user.first_name}} {{user.last_name}}</div>
|
||||||
<div class="col d-flex align-items-center">{{user.groups?.map(getGroupName, this).join(', ')}}</div>
|
<div class="col d-flex align-items-center">{{user.groups?.map(getGroupName, this).join(', ')}}</div>
|
||||||
<div class="col">
|
<div class="col">
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ from documents.models import Tag
|
|||||||
from documents.models import Workflow
|
from documents.models import Workflow
|
||||||
from documents.models import WorkflowTrigger
|
from documents.models import WorkflowTrigger
|
||||||
from documents.permissions import get_objects_for_user_owner_aware
|
from documents.permissions import get_objects_for_user_owner_aware
|
||||||
|
from documents.regex import safe_regex_search
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from django.db.models import QuerySet
|
from django.db.models import QuerySet
|
||||||
@@ -152,7 +153,7 @@ def match_storage_paths(document: Document, classifier: DocumentClassifier, user
|
|||||||
|
|
||||||
|
|
||||||
def matches(matching_model: MatchingModel, document: Document):
|
def matches(matching_model: MatchingModel, document: Document):
|
||||||
search_kwargs = {}
|
search_flags = 0
|
||||||
|
|
||||||
document_content = document.content
|
document_content = document.content
|
||||||
|
|
||||||
@@ -161,14 +162,18 @@ def matches(matching_model: MatchingModel, document: Document):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
if matching_model.is_insensitive:
|
if matching_model.is_insensitive:
|
||||||
search_kwargs = {"flags": re.IGNORECASE}
|
search_flags = re.IGNORECASE
|
||||||
|
|
||||||
if matching_model.matching_algorithm == MatchingModel.MATCH_NONE:
|
if matching_model.matching_algorithm == MatchingModel.MATCH_NONE:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
elif matching_model.matching_algorithm == MatchingModel.MATCH_ALL:
|
elif matching_model.matching_algorithm == MatchingModel.MATCH_ALL:
|
||||||
for word in _split_match(matching_model):
|
for word in _split_match(matching_model):
|
||||||
search_result = re.search(rf"\b{word}\b", document_content, **search_kwargs)
|
search_result = re.search(
|
||||||
|
rf"\b{word}\b",
|
||||||
|
document_content,
|
||||||
|
flags=search_flags,
|
||||||
|
)
|
||||||
if not search_result:
|
if not search_result:
|
||||||
return False
|
return False
|
||||||
log_reason(
|
log_reason(
|
||||||
@@ -180,7 +185,7 @@ def matches(matching_model: MatchingModel, document: Document):
|
|||||||
|
|
||||||
elif matching_model.matching_algorithm == MatchingModel.MATCH_ANY:
|
elif matching_model.matching_algorithm == MatchingModel.MATCH_ANY:
|
||||||
for word in _split_match(matching_model):
|
for word in _split_match(matching_model):
|
||||||
if re.search(rf"\b{word}\b", document_content, **search_kwargs):
|
if re.search(rf"\b{word}\b", document_content, flags=search_flags):
|
||||||
log_reason(matching_model, document, f"it contains this word: {word}")
|
log_reason(matching_model, document, f"it contains this word: {word}")
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
@@ -190,7 +195,7 @@ def matches(matching_model: MatchingModel, document: Document):
|
|||||||
re.search(
|
re.search(
|
||||||
rf"\b{re.escape(matching_model.match)}\b",
|
rf"\b{re.escape(matching_model.match)}\b",
|
||||||
document_content,
|
document_content,
|
||||||
**search_kwargs,
|
flags=search_flags,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
if result:
|
if result:
|
||||||
@@ -202,16 +207,11 @@ def matches(matching_model: MatchingModel, document: Document):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
elif matching_model.matching_algorithm == MatchingModel.MATCH_REGEX:
|
elif matching_model.matching_algorithm == MatchingModel.MATCH_REGEX:
|
||||||
try:
|
match = safe_regex_search(
|
||||||
match = re.search(
|
matching_model.match,
|
||||||
re.compile(matching_model.match, **search_kwargs),
|
document_content,
|
||||||
document_content,
|
flags=search_flags,
|
||||||
)
|
)
|
||||||
except re.error:
|
|
||||||
logger.error(
|
|
||||||
f"Error while processing regular expression {matching_model.match}",
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
if match:
|
if match:
|
||||||
log_reason(
|
log_reason(
|
||||||
matching_model,
|
matching_model,
|
||||||
|
|||||||
@@ -61,21 +61,22 @@ def get_groups_with_only_permission(obj, codename):
|
|||||||
return Group.objects.filter(id__in=group_object_perm_group_ids).distinct()
|
return Group.objects.filter(id__in=group_object_perm_group_ids).distinct()
|
||||||
|
|
||||||
|
|
||||||
def set_permissions_for_object(permissions: list[str], object, *, merge: bool = False):
|
def set_permissions_for_object(permissions: dict, object, *, merge: bool = False):
|
||||||
"""
|
"""
|
||||||
Set permissions for an object. The permissions are given as a list of strings
|
Set permissions for an object. The permissions are given as a mapping of actions
|
||||||
in the format "action_modelname", e.g. "view_document".
|
to a dict of user / group id lists, e.g.
|
||||||
|
{"view": {"users": [1], "groups": [2]}, "change": {"users": [], "groups": []}}.
|
||||||
|
|
||||||
If merge is True, the permissions are merged with the existing permissions and
|
If merge is True, the permissions are merged with the existing permissions and
|
||||||
no users or groups are removed. If False, the permissions are set to exactly
|
no users or groups are removed. If False, the permissions are set to exactly
|
||||||
the given list of users and groups.
|
the given list of users and groups.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
for action in permissions:
|
for action, entry in permissions.items():
|
||||||
permission = f"{action}_{object.__class__.__name__.lower()}"
|
permission = f"{action}_{object.__class__.__name__.lower()}"
|
||||||
if "users" in permissions[action]:
|
if "users" in entry:
|
||||||
# users
|
# users
|
||||||
users_to_add = User.objects.filter(id__in=permissions[action]["users"])
|
users_to_add = User.objects.filter(id__in=entry["users"])
|
||||||
users_to_remove = (
|
users_to_remove = (
|
||||||
get_users_with_perms(
|
get_users_with_perms(
|
||||||
object,
|
object,
|
||||||
@@ -85,12 +86,12 @@ def set_permissions_for_object(permissions: list[str], object, *, merge: bool =
|
|||||||
if not merge
|
if not merge
|
||||||
else User.objects.none()
|
else User.objects.none()
|
||||||
)
|
)
|
||||||
if len(users_to_add) > 0 and len(users_to_remove) > 0:
|
if users_to_add.exists() and users_to_remove.exists():
|
||||||
users_to_remove = users_to_remove.exclude(id__in=users_to_add)
|
users_to_remove = users_to_remove.exclude(id__in=users_to_add)
|
||||||
if len(users_to_remove) > 0:
|
if users_to_remove.exists():
|
||||||
for user in users_to_remove:
|
for user in users_to_remove:
|
||||||
remove_perm(permission, user, object)
|
remove_perm(permission, user, object)
|
||||||
if len(users_to_add) > 0:
|
if users_to_add.exists():
|
||||||
for user in users_to_add:
|
for user in users_to_add:
|
||||||
assign_perm(permission, user, object)
|
assign_perm(permission, user, object)
|
||||||
if action == "change":
|
if action == "change":
|
||||||
@@ -100,9 +101,9 @@ def set_permissions_for_object(permissions: list[str], object, *, merge: bool =
|
|||||||
user,
|
user,
|
||||||
object,
|
object,
|
||||||
)
|
)
|
||||||
if "groups" in permissions[action]:
|
if "groups" in entry:
|
||||||
# groups
|
# groups
|
||||||
groups_to_add = Group.objects.filter(id__in=permissions[action]["groups"])
|
groups_to_add = Group.objects.filter(id__in=entry["groups"])
|
||||||
groups_to_remove = (
|
groups_to_remove = (
|
||||||
get_groups_with_only_permission(
|
get_groups_with_only_permission(
|
||||||
object,
|
object,
|
||||||
@@ -111,12 +112,12 @@ def set_permissions_for_object(permissions: list[str], object, *, merge: bool =
|
|||||||
if not merge
|
if not merge
|
||||||
else Group.objects.none()
|
else Group.objects.none()
|
||||||
)
|
)
|
||||||
if len(groups_to_add) > 0 and len(groups_to_remove) > 0:
|
if groups_to_add.exists() and groups_to_remove.exists():
|
||||||
groups_to_remove = groups_to_remove.exclude(id__in=groups_to_add)
|
groups_to_remove = groups_to_remove.exclude(id__in=groups_to_add)
|
||||||
if len(groups_to_remove) > 0:
|
if groups_to_remove.exists():
|
||||||
for group in groups_to_remove:
|
for group in groups_to_remove:
|
||||||
remove_perm(permission, group, object)
|
remove_perm(permission, group, object)
|
||||||
if len(groups_to_add) > 0:
|
if groups_to_add.exists():
|
||||||
for group in groups_to_add:
|
for group in groups_to_add:
|
||||||
assign_perm(permission, group, object)
|
assign_perm(permission, group, object)
|
||||||
if action == "change":
|
if action == "change":
|
||||||
|
|||||||
50
src/documents/regex.py
Normal file
50
src/documents/regex.py
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import textwrap
|
||||||
|
|
||||||
|
import regex
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
logger = logging.getLogger("paperless.regex")
|
||||||
|
|
||||||
|
REGEX_TIMEOUT_SECONDS: float = getattr(settings, "MATCH_REGEX_TIMEOUT_SECONDS", 0.1)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_regex_pattern(pattern: str) -> None:
|
||||||
|
"""
|
||||||
|
Validate user provided regex for basic compile errors.
|
||||||
|
Raises ValueError on validation failure.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
regex.compile(pattern)
|
||||||
|
except regex.error as exc:
|
||||||
|
raise ValueError(exc.msg) from exc
|
||||||
|
|
||||||
|
|
||||||
|
def safe_regex_search(pattern: str, text: str, *, flags: int = 0):
|
||||||
|
"""
|
||||||
|
Run a regex search with a timeout. Returns a match object or None.
|
||||||
|
Validation errors and timeouts are logged and treated as no match.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
validate_regex_pattern(pattern)
|
||||||
|
compiled = regex.compile(pattern, flags=flags)
|
||||||
|
except (regex.error, ValueError) as exc:
|
||||||
|
logger.error(
|
||||||
|
"Error while processing regular expression %s: %s",
|
||||||
|
textwrap.shorten(pattern, width=80, placeholder="…"),
|
||||||
|
exc,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
return compiled.search(text, timeout=REGEX_TIMEOUT_SECONDS)
|
||||||
|
except TimeoutError:
|
||||||
|
logger.warning(
|
||||||
|
"Regular expression matching timed out for pattern %s",
|
||||||
|
textwrap.shorten(pattern, width=80, placeholder="…"),
|
||||||
|
)
|
||||||
|
return None
|
||||||
@@ -21,6 +21,7 @@ from django.core.validators import MaxLengthValidator
|
|||||||
from django.core.validators import RegexValidator
|
from django.core.validators import RegexValidator
|
||||||
from django.core.validators import integer_validator
|
from django.core.validators import integer_validator
|
||||||
from django.db.models import Count
|
from django.db.models import Count
|
||||||
|
from django.db.models.functions import Lower
|
||||||
from django.utils.crypto import get_random_string
|
from django.utils.crypto import get_random_string
|
||||||
from django.utils.dateparse import parse_datetime
|
from django.utils.dateparse import parse_datetime
|
||||||
from django.utils.text import slugify
|
from django.utils.text import slugify
|
||||||
@@ -38,6 +39,7 @@ from guardian.utils import get_user_obj_perms_model
|
|||||||
from rest_framework import fields
|
from rest_framework import fields
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
from rest_framework.fields import SerializerMethodField
|
from rest_framework.fields import SerializerMethodField
|
||||||
|
from rest_framework.filters import OrderingFilter
|
||||||
|
|
||||||
if settings.AUDIT_LOG_ENABLED:
|
if settings.AUDIT_LOG_ENABLED:
|
||||||
from auditlog.context import set_actor
|
from auditlog.context import set_actor
|
||||||
@@ -69,6 +71,7 @@ from documents.parsers import is_mime_type_supported
|
|||||||
from documents.permissions import get_document_count_filter_for_user
|
from documents.permissions import get_document_count_filter_for_user
|
||||||
from documents.permissions import get_groups_with_only_permission
|
from documents.permissions import get_groups_with_only_permission
|
||||||
from documents.permissions import set_permissions_for_object
|
from documents.permissions import set_permissions_for_object
|
||||||
|
from documents.regex import validate_regex_pattern
|
||||||
from documents.templating.filepath import validate_filepath_template_and_render
|
from documents.templating.filepath import validate_filepath_template_and_render
|
||||||
from documents.templating.utils import convert_format_str_to_template_format
|
from documents.templating.utils import convert_format_str_to_template_format
|
||||||
from documents.validators import uri_validator
|
from documents.validators import uri_validator
|
||||||
@@ -139,10 +142,11 @@ class MatchingModelSerializer(serializers.ModelSerializer):
|
|||||||
and self.initial_data["matching_algorithm"] == MatchingModel.MATCH_REGEX
|
and self.initial_data["matching_algorithm"] == MatchingModel.MATCH_REGEX
|
||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
re.compile(match)
|
validate_regex_pattern(match)
|
||||||
except re.error as e:
|
except ValueError as e:
|
||||||
|
logger.debug(f"Invalid regular expression: {e!s}")
|
||||||
raise serializers.ValidationError(
|
raise serializers.ValidationError(
|
||||||
_("Invalid regular expression: %(error)s") % {"error": str(e.msg)},
|
"Invalid regular expression, see log for details.",
|
||||||
)
|
)
|
||||||
return match
|
return match
|
||||||
|
|
||||||
@@ -575,16 +579,33 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
|
|||||||
)
|
)
|
||||||
def get_children(self, obj):
|
def get_children(self, obj):
|
||||||
filter_q = self.context.get("document_count_filter")
|
filter_q = self.context.get("document_count_filter")
|
||||||
|
request = self.context.get("request")
|
||||||
if filter_q is None:
|
if filter_q is None:
|
||||||
request = self.context.get("request")
|
|
||||||
user = getattr(request, "user", None) if request else None
|
user = getattr(request, "user", None) if request else None
|
||||||
filter_q = get_document_count_filter_for_user(user)
|
filter_q = get_document_count_filter_for_user(user)
|
||||||
self.context["document_count_filter"] = filter_q
|
self.context["document_count_filter"] = filter_q
|
||||||
serializer = TagSerializer(
|
|
||||||
|
children_queryset = (
|
||||||
obj.get_children_queryset()
|
obj.get_children_queryset()
|
||||||
.select_related("owner")
|
.select_related("owner")
|
||||||
.annotate(document_count=Count("documents", filter=filter_q)),
|
.annotate(document_count=Count("documents", filter=filter_q))
|
||||||
|
)
|
||||||
|
|
||||||
|
view = self.context.get("view")
|
||||||
|
ordering = (
|
||||||
|
OrderingFilter().get_ordering(request, children_queryset, view)
|
||||||
|
if request and view
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
ordering = ordering or (Lower("name"),)
|
||||||
|
children_queryset = children_queryset.order_by(*ordering)
|
||||||
|
|
||||||
|
serializer = TagSerializer(
|
||||||
|
children_queryset,
|
||||||
many=True,
|
many=True,
|
||||||
|
user=self.user,
|
||||||
|
full_perms=self.full_perms,
|
||||||
|
all_fields=self.all_fields,
|
||||||
context=self.context,
|
context=self.context,
|
||||||
)
|
)
|
||||||
return serializer.data
|
return serializer.data
|
||||||
|
|||||||
@@ -1,14 +1,10 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import ipaddress
|
|
||||||
import logging
|
import logging
|
||||||
import shutil
|
import shutil
|
||||||
import socket
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
from urllib.parse import urlparse
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from celery import states
|
from celery import states
|
||||||
from celery.signals import before_task_publish
|
from celery.signals import before_task_publish
|
||||||
@@ -27,20 +23,16 @@ from django.db.models import Q
|
|||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from filelock import FileLock
|
from filelock import FileLock
|
||||||
from guardian.shortcuts import remove_perm
|
|
||||||
|
|
||||||
from documents import matching
|
from documents import matching
|
||||||
from documents.caching import clear_document_caches
|
from documents.caching import clear_document_caches
|
||||||
from documents.file_handling import create_source_path_directory
|
from documents.file_handling import create_source_path_directory
|
||||||
from documents.file_handling import delete_empty_directories
|
from documents.file_handling import delete_empty_directories
|
||||||
|
from documents.file_handling import generate_filename
|
||||||
from documents.file_handling import generate_unique_filename
|
from documents.file_handling import generate_unique_filename
|
||||||
from documents.mail import EmailAttachment
|
|
||||||
from documents.mail import send_email
|
|
||||||
from documents.models import Correspondent
|
|
||||||
from documents.models import CustomField
|
from documents.models import CustomField
|
||||||
from documents.models import CustomFieldInstance
|
from documents.models import CustomFieldInstance
|
||||||
from documents.models import Document
|
from documents.models import Document
|
||||||
from documents.models import DocumentType
|
|
||||||
from documents.models import MatchingModel
|
from documents.models import MatchingModel
|
||||||
from documents.models import PaperlessTask
|
from documents.models import PaperlessTask
|
||||||
from documents.models import SavedView
|
from documents.models import SavedView
|
||||||
@@ -51,8 +43,15 @@ from documents.models import WorkflowAction
|
|||||||
from documents.models import WorkflowRun
|
from documents.models import WorkflowRun
|
||||||
from documents.models import WorkflowTrigger
|
from documents.models import WorkflowTrigger
|
||||||
from documents.permissions import get_objects_for_user_owner_aware
|
from documents.permissions import get_objects_for_user_owner_aware
|
||||||
from documents.permissions import set_permissions_for_object
|
from documents.templating.utils import convert_format_str_to_template_format
|
||||||
from documents.templating.workflows import parse_w_workflow_placeholders
|
from documents.workflows.actions import build_workflow_action_context
|
||||||
|
from documents.workflows.actions import execute_email_action
|
||||||
|
from documents.workflows.actions import execute_webhook_action
|
||||||
|
from documents.workflows.mutations import apply_assignment_to_document
|
||||||
|
from documents.workflows.mutations import apply_assignment_to_overrides
|
||||||
|
from documents.workflows.mutations import apply_removal_to_document
|
||||||
|
from documents.workflows.mutations import apply_removal_to_overrides
|
||||||
|
from documents.workflows.utils import get_workflows_for_trigger
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from documents.classifier import DocumentClassifier
|
from documents.classifier import DocumentClassifier
|
||||||
@@ -392,6 +391,19 @@ class CannotMoveFilesException(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _filename_template_uses_custom_fields(doc: Document) -> bool:
|
||||||
|
template = None
|
||||||
|
if doc.storage_path is not None:
|
||||||
|
template = doc.storage_path.path
|
||||||
|
elif settings.FILENAME_FORMAT is not None:
|
||||||
|
template = convert_format_str_to_template_format(settings.FILENAME_FORMAT)
|
||||||
|
|
||||||
|
if not template:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return "custom_fields" in template
|
||||||
|
|
||||||
|
|
||||||
# should be disabled in /src/documents/management/commands/document_importer.py handle
|
# should be disabled in /src/documents/management/commands/document_importer.py handle
|
||||||
@receiver(models.signals.post_save, sender=CustomFieldInstance, weak=False)
|
@receiver(models.signals.post_save, sender=CustomFieldInstance, weak=False)
|
||||||
@receiver(models.signals.m2m_changed, sender=Document.tags.through, weak=False)
|
@receiver(models.signals.m2m_changed, sender=Document.tags.through, weak=False)
|
||||||
@@ -402,6 +414,8 @@ def update_filename_and_move_files(
|
|||||||
**kwargs,
|
**kwargs,
|
||||||
):
|
):
|
||||||
if isinstance(instance, CustomFieldInstance):
|
if isinstance(instance, CustomFieldInstance):
|
||||||
|
if not _filename_template_uses_custom_fields(instance.document):
|
||||||
|
return
|
||||||
instance = instance.document
|
instance = instance.document
|
||||||
|
|
||||||
def validate_move(instance, old_path: Path, new_path: Path):
|
def validate_move(instance, old_path: Path, new_path: Path):
|
||||||
@@ -439,21 +453,47 @@ def update_filename_and_move_files(
|
|||||||
old_filename = instance.filename
|
old_filename = instance.filename
|
||||||
old_source_path = instance.source_path
|
old_source_path = instance.source_path
|
||||||
|
|
||||||
|
candidate_filename = generate_filename(instance)
|
||||||
|
candidate_source_path = (
|
||||||
|
settings.ORIGINALS_DIR / candidate_filename
|
||||||
|
).resolve()
|
||||||
|
if candidate_filename == Path(old_filename):
|
||||||
|
new_filename = Path(old_filename)
|
||||||
|
elif (
|
||||||
|
candidate_source_path.exists()
|
||||||
|
and candidate_source_path != old_source_path
|
||||||
|
):
|
||||||
|
# Only fall back to unique search when there is an actual conflict
|
||||||
|
new_filename = generate_unique_filename(instance)
|
||||||
|
else:
|
||||||
|
new_filename = candidate_filename
|
||||||
|
|
||||||
# Need to convert to string to be able to save it to the db
|
# Need to convert to string to be able to save it to the db
|
||||||
instance.filename = str(generate_unique_filename(instance))
|
instance.filename = str(new_filename)
|
||||||
move_original = old_filename != instance.filename
|
move_original = old_filename != instance.filename
|
||||||
|
|
||||||
old_archive_filename = instance.archive_filename
|
old_archive_filename = instance.archive_filename
|
||||||
old_archive_path = instance.archive_path
|
old_archive_path = instance.archive_path
|
||||||
|
|
||||||
if instance.has_archive_version:
|
if instance.has_archive_version:
|
||||||
# Need to convert to string to be able to save it to the db
|
archive_candidate = generate_filename(instance, archive_filename=True)
|
||||||
instance.archive_filename = str(
|
archive_candidate_path = (
|
||||||
generate_unique_filename(
|
settings.ARCHIVE_DIR / archive_candidate
|
||||||
|
).resolve()
|
||||||
|
if archive_candidate == Path(old_archive_filename):
|
||||||
|
new_archive_filename = Path(old_archive_filename)
|
||||||
|
elif (
|
||||||
|
archive_candidate_path.exists()
|
||||||
|
and archive_candidate_path != old_archive_path
|
||||||
|
):
|
||||||
|
new_archive_filename = generate_unique_filename(
|
||||||
instance,
|
instance,
|
||||||
archive_filename=True,
|
archive_filename=True,
|
||||||
),
|
)
|
||||||
)
|
else:
|
||||||
|
new_archive_filename = archive_candidate
|
||||||
|
|
||||||
|
instance.archive_filename = str(new_archive_filename)
|
||||||
|
|
||||||
move_archive = old_archive_filename != instance.archive_filename
|
move_archive = old_archive_filename != instance.archive_filename
|
||||||
else:
|
else:
|
||||||
@@ -673,92 +713,6 @@ def run_workflows_updated(sender, document: Document, logging_group=None, **kwar
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _is_public_ip(ip: str) -> bool:
|
|
||||||
try:
|
|
||||||
obj = ipaddress.ip_address(ip)
|
|
||||||
return not (
|
|
||||||
obj.is_private
|
|
||||||
or obj.is_loopback
|
|
||||||
or obj.is_link_local
|
|
||||||
or obj.is_multicast
|
|
||||||
or obj.is_unspecified
|
|
||||||
)
|
|
||||||
except ValueError: # pragma: no cover
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _resolve_first_ip(host: str) -> str | None:
|
|
||||||
try:
|
|
||||||
info = socket.getaddrinfo(host, None)
|
|
||||||
return info[0][4][0] if info else None
|
|
||||||
except Exception: # pragma: no cover
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(
|
|
||||||
retry_backoff=True,
|
|
||||||
autoretry_for=(httpx.HTTPStatusError,),
|
|
||||||
max_retries=3,
|
|
||||||
throws=(httpx.HTTPError,),
|
|
||||||
)
|
|
||||||
def send_webhook(
|
|
||||||
url: str,
|
|
||||||
data: str | dict,
|
|
||||||
headers: dict,
|
|
||||||
files: dict,
|
|
||||||
*,
|
|
||||||
as_json: bool = False,
|
|
||||||
):
|
|
||||||
p = urlparse(url)
|
|
||||||
if p.scheme.lower() not in settings.WEBHOOKS_ALLOWED_SCHEMES or not p.hostname:
|
|
||||||
logger.warning("Webhook blocked: invalid scheme/hostname")
|
|
||||||
raise ValueError("Invalid URL scheme or hostname.")
|
|
||||||
|
|
||||||
port = p.port or (443 if p.scheme == "https" else 80)
|
|
||||||
if (
|
|
||||||
len(settings.WEBHOOKS_ALLOWED_PORTS) > 0
|
|
||||||
and port not in settings.WEBHOOKS_ALLOWED_PORTS
|
|
||||||
):
|
|
||||||
logger.warning("Webhook blocked: port not permitted")
|
|
||||||
raise ValueError("Destination port not permitted.")
|
|
||||||
|
|
||||||
ip = _resolve_first_ip(p.hostname)
|
|
||||||
if not ip or (
|
|
||||||
not _is_public_ip(ip) and not settings.WEBHOOKS_ALLOW_INTERNAL_REQUESTS
|
|
||||||
):
|
|
||||||
logger.warning("Webhook blocked: destination not allowed")
|
|
||||||
raise ValueError("Destination host is not allowed.")
|
|
||||||
|
|
||||||
try:
|
|
||||||
post_args = {
|
|
||||||
"url": url,
|
|
||||||
"headers": {
|
|
||||||
k: v for k, v in (headers or {}).items() if k.lower() != "host"
|
|
||||||
},
|
|
||||||
"files": files or None,
|
|
||||||
"timeout": 5.0,
|
|
||||||
"follow_redirects": False,
|
|
||||||
}
|
|
||||||
if as_json:
|
|
||||||
post_args["json"] = data
|
|
||||||
elif isinstance(data, dict):
|
|
||||||
post_args["data"] = data
|
|
||||||
else:
|
|
||||||
post_args["content"] = data
|
|
||||||
|
|
||||||
httpx.post(
|
|
||||||
**post_args,
|
|
||||||
).raise_for_status()
|
|
||||||
logger.info(
|
|
||||||
f"Webhook sent to {url}",
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
f"Failed attempt sending webhook to {url}: {e}",
|
|
||||||
)
|
|
||||||
raise e
|
|
||||||
|
|
||||||
|
|
||||||
def run_workflows(
|
def run_workflows(
|
||||||
trigger_type: WorkflowTrigger.WorkflowTriggerType,
|
trigger_type: WorkflowTrigger.WorkflowTriggerType,
|
||||||
document: Document | ConsumableDocument,
|
document: Document | ConsumableDocument,
|
||||||
@@ -767,572 +721,16 @@ def run_workflows(
|
|||||||
overrides: DocumentMetadataOverrides | None = None,
|
overrides: DocumentMetadataOverrides | None = None,
|
||||||
original_file: Path | None = None,
|
original_file: Path | None = None,
|
||||||
) -> tuple[DocumentMetadataOverrides, str] | None:
|
) -> tuple[DocumentMetadataOverrides, str] | None:
|
||||||
"""Run workflows which match a Document (or ConsumableDocument) for a specific trigger type or a single workflow if given.
|
|
||||||
|
|
||||||
Assignment or removal actions are either applied directly to the document or an overrides object. If an overrides
|
|
||||||
object is provided, the function returns the object with the applied changes or None if no actions were applied and a string
|
|
||||||
of messages for each action. If no overrides object is provided, the changes are applied directly to the document and the
|
|
||||||
function returns None.
|
|
||||||
"""
|
"""
|
||||||
|
Execute workflows matching a document for the given trigger. When `overrides` is provided
|
||||||
|
(consumption flow), actions mutate that object and the function returns `(overrides, messages)`.
|
||||||
|
Otherwise actions mutate the actual document and return nothing.
|
||||||
|
|
||||||
def assignment_action():
|
Attachments for email/webhook actions use `original_file` when given, otherwise fall back to
|
||||||
if action.assign_tags.exists():
|
`document.source_path` (Document) or `document.original_file` (ConsumableDocument).
|
||||||
tag_ids_to_add: set[int] = set()
|
|
||||||
for tag in action.assign_tags.all():
|
|
||||||
tag_ids_to_add.add(tag.pk)
|
|
||||||
tag_ids_to_add.update(int(pk) for pk in tag.get_ancestors_pks())
|
|
||||||
|
|
||||||
if not use_overrides:
|
Passing `workflow_to_run` skips the workflow query (currently only used by scheduled runs).
|
||||||
doc_tag_ids[:] = list(set(doc_tag_ids) | tag_ids_to_add)
|
"""
|
||||||
else:
|
|
||||||
if overrides.tag_ids is None:
|
|
||||||
overrides.tag_ids = []
|
|
||||||
overrides.tag_ids = list(set(overrides.tag_ids) | tag_ids_to_add)
|
|
||||||
|
|
||||||
if action.assign_correspondent:
|
|
||||||
if not use_overrides:
|
|
||||||
document.correspondent = action.assign_correspondent
|
|
||||||
else:
|
|
||||||
overrides.correspondent_id = action.assign_correspondent.pk
|
|
||||||
|
|
||||||
if action.assign_document_type:
|
|
||||||
if not use_overrides:
|
|
||||||
document.document_type = action.assign_document_type
|
|
||||||
else:
|
|
||||||
overrides.document_type_id = action.assign_document_type.pk
|
|
||||||
|
|
||||||
if action.assign_storage_path:
|
|
||||||
if not use_overrides:
|
|
||||||
document.storage_path = action.assign_storage_path
|
|
||||||
else:
|
|
||||||
overrides.storage_path_id = action.assign_storage_path.pk
|
|
||||||
|
|
||||||
if action.assign_owner:
|
|
||||||
if not use_overrides:
|
|
||||||
document.owner = action.assign_owner
|
|
||||||
else:
|
|
||||||
overrides.owner_id = action.assign_owner.pk
|
|
||||||
|
|
||||||
if action.assign_title:
|
|
||||||
if not use_overrides:
|
|
||||||
try:
|
|
||||||
document.title = parse_w_workflow_placeholders(
|
|
||||||
action.assign_title,
|
|
||||||
document.correspondent.name if document.correspondent else "",
|
|
||||||
document.document_type.name if document.document_type else "",
|
|
||||||
document.owner.username if document.owner else "",
|
|
||||||
timezone.localtime(document.added),
|
|
||||||
document.original_filename or "",
|
|
||||||
document.filename or "",
|
|
||||||
document.created,
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
logger.exception(
|
|
||||||
f"Error occurred parsing title assignment '{action.assign_title}', falling back to original",
|
|
||||||
extra={"group": logging_group},
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
overrides.title = action.assign_title
|
|
||||||
|
|
||||||
if any(
|
|
||||||
[
|
|
||||||
action.assign_view_users.exists(),
|
|
||||||
action.assign_view_groups.exists(),
|
|
||||||
action.assign_change_users.exists(),
|
|
||||||
action.assign_change_groups.exists(),
|
|
||||||
],
|
|
||||||
):
|
|
||||||
permissions = {
|
|
||||||
"view": {
|
|
||||||
"users": action.assign_view_users.values_list("id", flat=True),
|
|
||||||
"groups": action.assign_view_groups.values_list("id", flat=True),
|
|
||||||
},
|
|
||||||
"change": {
|
|
||||||
"users": action.assign_change_users.values_list("id", flat=True),
|
|
||||||
"groups": action.assign_change_groups.values_list("id", flat=True),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
if not use_overrides:
|
|
||||||
set_permissions_for_object(
|
|
||||||
permissions=permissions,
|
|
||||||
object=document,
|
|
||||||
merge=True,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
overrides.view_users = list(
|
|
||||||
set(
|
|
||||||
(overrides.view_users or [])
|
|
||||||
+ list(permissions["view"]["users"]),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
overrides.view_groups = list(
|
|
||||||
set(
|
|
||||||
(overrides.view_groups or [])
|
|
||||||
+ list(permissions["view"]["groups"]),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
overrides.change_users = list(
|
|
||||||
set(
|
|
||||||
(overrides.change_users or [])
|
|
||||||
+ list(permissions["change"]["users"]),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
overrides.change_groups = list(
|
|
||||||
set(
|
|
||||||
(overrides.change_groups or [])
|
|
||||||
+ list(permissions["change"]["groups"]),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
if action.assign_custom_fields.exists():
|
|
||||||
if not use_overrides:
|
|
||||||
for field in action.assign_custom_fields.all():
|
|
||||||
value_field_name = CustomFieldInstance.get_value_field_name(
|
|
||||||
data_type=field.data_type,
|
|
||||||
)
|
|
||||||
args = {
|
|
||||||
value_field_name: action.assign_custom_fields_values.get(
|
|
||||||
str(field.pk),
|
|
||||||
None,
|
|
||||||
),
|
|
||||||
}
|
|
||||||
# for some reason update_or_create doesn't work here
|
|
||||||
instance = CustomFieldInstance.objects.filter(
|
|
||||||
field=field,
|
|
||||||
document=document,
|
|
||||||
).first()
|
|
||||||
if instance and args[value_field_name] is not None:
|
|
||||||
setattr(instance, value_field_name, args[value_field_name])
|
|
||||||
instance.save()
|
|
||||||
elif not instance:
|
|
||||||
CustomFieldInstance.objects.create(
|
|
||||||
**args,
|
|
||||||
field=field,
|
|
||||||
document=document,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
if overrides.custom_fields is None:
|
|
||||||
overrides.custom_fields = {}
|
|
||||||
overrides.custom_fields.update(
|
|
||||||
{
|
|
||||||
field.pk: action.assign_custom_fields_values.get(
|
|
||||||
str(field.pk),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
for field in action.assign_custom_fields.all()
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
def removal_action():
|
|
||||||
if action.remove_all_tags:
|
|
||||||
if not use_overrides:
|
|
||||||
doc_tag_ids.clear()
|
|
||||||
else:
|
|
||||||
overrides.tag_ids = None
|
|
||||||
else:
|
|
||||||
tag_ids_to_remove: set[int] = set()
|
|
||||||
for tag in action.remove_tags.all():
|
|
||||||
tag_ids_to_remove.add(tag.pk)
|
|
||||||
tag_ids_to_remove.update(int(pk) for pk in tag.get_descendants_pks())
|
|
||||||
|
|
||||||
if not use_overrides:
|
|
||||||
doc_tag_ids[:] = [t for t in doc_tag_ids if t not in tag_ids_to_remove]
|
|
||||||
elif overrides.tag_ids:
|
|
||||||
overrides.tag_ids = [
|
|
||||||
t for t in overrides.tag_ids if t not in tag_ids_to_remove
|
|
||||||
]
|
|
||||||
|
|
||||||
if not use_overrides and (
|
|
||||||
action.remove_all_correspondents
|
|
||||||
or (
|
|
||||||
document.correspondent
|
|
||||||
and action.remove_correspondents.filter(
|
|
||||||
pk=document.correspondent.pk,
|
|
||||||
).exists()
|
|
||||||
)
|
|
||||||
):
|
|
||||||
document.correspondent = None
|
|
||||||
elif use_overrides and (
|
|
||||||
action.remove_all_correspondents
|
|
||||||
or (
|
|
||||||
overrides.correspondent_id
|
|
||||||
and action.remove_correspondents.filter(
|
|
||||||
pk=overrides.correspondent_id,
|
|
||||||
).exists()
|
|
||||||
)
|
|
||||||
):
|
|
||||||
overrides.correspondent_id = None
|
|
||||||
|
|
||||||
if not use_overrides and (
|
|
||||||
action.remove_all_document_types
|
|
||||||
or (
|
|
||||||
document.document_type
|
|
||||||
and action.remove_document_types.filter(
|
|
||||||
pk=document.document_type.pk,
|
|
||||||
).exists()
|
|
||||||
)
|
|
||||||
):
|
|
||||||
document.document_type = None
|
|
||||||
elif use_overrides and (
|
|
||||||
action.remove_all_document_types
|
|
||||||
or (
|
|
||||||
overrides.document_type_id
|
|
||||||
and action.remove_document_types.filter(
|
|
||||||
pk=overrides.document_type_id,
|
|
||||||
).exists()
|
|
||||||
)
|
|
||||||
):
|
|
||||||
overrides.document_type_id = None
|
|
||||||
|
|
||||||
if not use_overrides and (
|
|
||||||
action.remove_all_storage_paths
|
|
||||||
or (
|
|
||||||
document.storage_path
|
|
||||||
and action.remove_storage_paths.filter(
|
|
||||||
pk=document.storage_path.pk,
|
|
||||||
).exists()
|
|
||||||
)
|
|
||||||
):
|
|
||||||
document.storage_path = None
|
|
||||||
elif use_overrides and (
|
|
||||||
action.remove_all_storage_paths
|
|
||||||
or (
|
|
||||||
overrides.storage_path_id
|
|
||||||
and action.remove_storage_paths.filter(
|
|
||||||
pk=overrides.storage_path_id,
|
|
||||||
).exists()
|
|
||||||
)
|
|
||||||
):
|
|
||||||
overrides.storage_path_id = None
|
|
||||||
|
|
||||||
if not use_overrides and (
|
|
||||||
action.remove_all_owners
|
|
||||||
or (
|
|
||||||
document.owner
|
|
||||||
and action.remove_owners.filter(pk=document.owner.pk).exists()
|
|
||||||
)
|
|
||||||
):
|
|
||||||
document.owner = None
|
|
||||||
elif use_overrides and (
|
|
||||||
action.remove_all_owners
|
|
||||||
or (
|
|
||||||
overrides.owner_id
|
|
||||||
and action.remove_owners.filter(pk=overrides.owner_id).exists()
|
|
||||||
)
|
|
||||||
):
|
|
||||||
overrides.owner_id = None
|
|
||||||
|
|
||||||
if action.remove_all_permissions:
|
|
||||||
if not use_overrides:
|
|
||||||
permissions = {
|
|
||||||
"view": {"users": [], "groups": []},
|
|
||||||
"change": {"users": [], "groups": []},
|
|
||||||
}
|
|
||||||
set_permissions_for_object(
|
|
||||||
permissions=permissions,
|
|
||||||
object=document,
|
|
||||||
merge=False,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
overrides.view_users = None
|
|
||||||
overrides.view_groups = None
|
|
||||||
overrides.change_users = None
|
|
||||||
overrides.change_groups = None
|
|
||||||
elif any(
|
|
||||||
[
|
|
||||||
action.remove_view_users.exists(),
|
|
||||||
action.remove_view_groups.exists(),
|
|
||||||
action.remove_change_users.exists(),
|
|
||||||
action.remove_change_groups.exists(),
|
|
||||||
],
|
|
||||||
):
|
|
||||||
if not use_overrides:
|
|
||||||
for user in action.remove_view_users.all():
|
|
||||||
remove_perm("view_document", user, document)
|
|
||||||
for user in action.remove_change_users.all():
|
|
||||||
remove_perm("change_document", user, document)
|
|
||||||
for group in action.remove_view_groups.all():
|
|
||||||
remove_perm("view_document", group, document)
|
|
||||||
for group in action.remove_change_groups.all():
|
|
||||||
remove_perm("change_document", group, document)
|
|
||||||
else:
|
|
||||||
if overrides.view_users:
|
|
||||||
for user in action.remove_view_users.filter(
|
|
||||||
pk__in=overrides.view_users,
|
|
||||||
):
|
|
||||||
overrides.view_users.remove(user.pk)
|
|
||||||
if overrides.change_users:
|
|
||||||
for user in action.remove_change_users.filter(
|
|
||||||
pk__in=overrides.change_users,
|
|
||||||
):
|
|
||||||
overrides.change_users.remove(user.pk)
|
|
||||||
if overrides.view_groups:
|
|
||||||
for group in action.remove_view_groups.filter(
|
|
||||||
pk__in=overrides.view_groups,
|
|
||||||
):
|
|
||||||
overrides.view_groups.remove(group.pk)
|
|
||||||
if overrides.change_groups:
|
|
||||||
for group in action.remove_change_groups.filter(
|
|
||||||
pk__in=overrides.change_groups,
|
|
||||||
):
|
|
||||||
overrides.change_groups.remove(group.pk)
|
|
||||||
|
|
||||||
if action.remove_all_custom_fields:
|
|
||||||
if not use_overrides:
|
|
||||||
CustomFieldInstance.objects.filter(document=document).hard_delete()
|
|
||||||
else:
|
|
||||||
overrides.custom_fields = None
|
|
||||||
elif action.remove_custom_fields.exists():
|
|
||||||
if not use_overrides:
|
|
||||||
CustomFieldInstance.objects.filter(
|
|
||||||
field__in=action.remove_custom_fields.all(),
|
|
||||||
document=document,
|
|
||||||
).hard_delete()
|
|
||||||
elif overrides.custom_fields:
|
|
||||||
for field in action.remove_custom_fields.filter(
|
|
||||||
pk__in=overrides.custom_fields.keys(),
|
|
||||||
):
|
|
||||||
overrides.custom_fields.pop(field.pk, None)
|
|
||||||
|
|
||||||
def email_action():
|
|
||||||
if not settings.EMAIL_ENABLED:
|
|
||||||
logger.error(
|
|
||||||
"Email backend has not been configured, cannot send email notifications",
|
|
||||||
extra={"group": logging_group},
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
if not use_overrides:
|
|
||||||
title = document.title
|
|
||||||
doc_url = (
|
|
||||||
f"{settings.PAPERLESS_URL}{settings.BASE_URL}documents/{document.pk}/"
|
|
||||||
)
|
|
||||||
correspondent = (
|
|
||||||
document.correspondent.name if document.correspondent else ""
|
|
||||||
)
|
|
||||||
document_type = (
|
|
||||||
document.document_type.name if document.document_type else ""
|
|
||||||
)
|
|
||||||
owner_username = document.owner.username if document.owner else ""
|
|
||||||
filename = document.original_filename or ""
|
|
||||||
current_filename = document.filename or ""
|
|
||||||
added = timezone.localtime(document.added)
|
|
||||||
created = document.created
|
|
||||||
else:
|
|
||||||
title = overrides.title if overrides.title else str(document.original_file)
|
|
||||||
doc_url = ""
|
|
||||||
correspondent = (
|
|
||||||
Correspondent.objects.filter(pk=overrides.correspondent_id).first()
|
|
||||||
if overrides.correspondent_id
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
document_type = (
|
|
||||||
DocumentType.objects.filter(pk=overrides.document_type_id).first().name
|
|
||||||
if overrides.document_type_id
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
owner_username = (
|
|
||||||
User.objects.filter(pk=overrides.owner_id).first().username
|
|
||||||
if overrides.owner_id
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
filename = document.original_file if document.original_file else ""
|
|
||||||
current_filename = filename
|
|
||||||
added = timezone.localtime(timezone.now())
|
|
||||||
created = overrides.created
|
|
||||||
|
|
||||||
subject = (
|
|
||||||
parse_w_workflow_placeholders(
|
|
||||||
action.email.subject,
|
|
||||||
correspondent,
|
|
||||||
document_type,
|
|
||||||
owner_username,
|
|
||||||
added,
|
|
||||||
filename,
|
|
||||||
current_filename,
|
|
||||||
created,
|
|
||||||
title,
|
|
||||||
doc_url,
|
|
||||||
)
|
|
||||||
if action.email.subject
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
body = (
|
|
||||||
parse_w_workflow_placeholders(
|
|
||||||
action.email.body,
|
|
||||||
correspondent,
|
|
||||||
document_type,
|
|
||||||
owner_username,
|
|
||||||
added,
|
|
||||||
filename,
|
|
||||||
current_filename,
|
|
||||||
created,
|
|
||||||
title,
|
|
||||||
doc_url,
|
|
||||||
)
|
|
||||||
if action.email.body
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
attachments: list[EmailAttachment] = []
|
|
||||||
if action.email.include_document:
|
|
||||||
attachment: EmailAttachment | None = None
|
|
||||||
if trigger_type in [
|
|
||||||
WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
|
||||||
WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
|
||||||
] and isinstance(document, Document):
|
|
||||||
friendly_name = (
|
|
||||||
Path(current_filename).name
|
|
||||||
if current_filename
|
|
||||||
else document.source_path.name
|
|
||||||
)
|
|
||||||
attachment = EmailAttachment(
|
|
||||||
path=document.source_path,
|
|
||||||
mime_type=document.mime_type,
|
|
||||||
friendly_name=friendly_name,
|
|
||||||
)
|
|
||||||
elif original_file:
|
|
||||||
friendly_name = (
|
|
||||||
Path(current_filename).name
|
|
||||||
if current_filename
|
|
||||||
else original_file.name
|
|
||||||
)
|
|
||||||
attachment = EmailAttachment(
|
|
||||||
path=original_file,
|
|
||||||
mime_type=document.mime_type,
|
|
||||||
friendly_name=friendly_name,
|
|
||||||
)
|
|
||||||
if attachment:
|
|
||||||
attachments = [attachment]
|
|
||||||
n_messages = send_email(
|
|
||||||
subject=subject,
|
|
||||||
body=body,
|
|
||||||
to=action.email.to.split(","),
|
|
||||||
attachments=attachments,
|
|
||||||
)
|
|
||||||
logger.debug(
|
|
||||||
f"Sent {n_messages} notification email(s) to {action.email.to}",
|
|
||||||
extra={"group": logging_group},
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception(
|
|
||||||
f"Error occurred sending notification email: {e}",
|
|
||||||
extra={"group": logging_group},
|
|
||||||
)
|
|
||||||
|
|
||||||
def webhook_action():
|
|
||||||
if not use_overrides:
|
|
||||||
title = document.title
|
|
||||||
doc_url = (
|
|
||||||
f"{settings.PAPERLESS_URL}{settings.BASE_URL}documents/{document.pk}/"
|
|
||||||
)
|
|
||||||
correspondent = (
|
|
||||||
document.correspondent.name if document.correspondent else ""
|
|
||||||
)
|
|
||||||
document_type = (
|
|
||||||
document.document_type.name if document.document_type else ""
|
|
||||||
)
|
|
||||||
owner_username = document.owner.username if document.owner else ""
|
|
||||||
filename = document.original_filename or ""
|
|
||||||
current_filename = document.filename or ""
|
|
||||||
added = timezone.localtime(document.added)
|
|
||||||
created = document.created
|
|
||||||
else:
|
|
||||||
title = overrides.title if overrides.title else str(document.original_file)
|
|
||||||
doc_url = ""
|
|
||||||
correspondent = (
|
|
||||||
Correspondent.objects.filter(pk=overrides.correspondent_id).first()
|
|
||||||
if overrides.correspondent_id
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
document_type = (
|
|
||||||
DocumentType.objects.filter(pk=overrides.document_type_id).first().name
|
|
||||||
if overrides.document_type_id
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
owner_username = (
|
|
||||||
User.objects.filter(pk=overrides.owner_id).first().username
|
|
||||||
if overrides.owner_id
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
filename = document.original_file if document.original_file else ""
|
|
||||||
current_filename = filename
|
|
||||||
added = timezone.localtime(timezone.now())
|
|
||||||
created = overrides.created
|
|
||||||
|
|
||||||
try:
|
|
||||||
data = {}
|
|
||||||
if action.webhook.use_params:
|
|
||||||
if action.webhook.params:
|
|
||||||
try:
|
|
||||||
for key, value in action.webhook.params.items():
|
|
||||||
data[key] = parse_w_workflow_placeholders(
|
|
||||||
value,
|
|
||||||
correspondent,
|
|
||||||
document_type,
|
|
||||||
owner_username,
|
|
||||||
added,
|
|
||||||
filename,
|
|
||||||
current_filename,
|
|
||||||
created,
|
|
||||||
title,
|
|
||||||
doc_url,
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
f"Error occurred parsing webhook params: {e}",
|
|
||||||
extra={"group": logging_group},
|
|
||||||
)
|
|
||||||
elif action.webhook.body:
|
|
||||||
data = parse_w_workflow_placeholders(
|
|
||||||
action.webhook.body,
|
|
||||||
correspondent,
|
|
||||||
document_type,
|
|
||||||
owner_username,
|
|
||||||
added,
|
|
||||||
filename,
|
|
||||||
current_filename,
|
|
||||||
created,
|
|
||||||
title,
|
|
||||||
doc_url,
|
|
||||||
)
|
|
||||||
headers = {}
|
|
||||||
if action.webhook.headers:
|
|
||||||
try:
|
|
||||||
headers = {
|
|
||||||
str(k): str(v) for k, v in action.webhook.headers.items()
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
f"Error occurred parsing webhook headers: {e}",
|
|
||||||
extra={"group": logging_group},
|
|
||||||
)
|
|
||||||
files = None
|
|
||||||
if action.webhook.include_document:
|
|
||||||
with original_file.open("rb") as f:
|
|
||||||
files = {
|
|
||||||
"file": (
|
|
||||||
filename,
|
|
||||||
f.read(),
|
|
||||||
document.mime_type,
|
|
||||||
),
|
|
||||||
}
|
|
||||||
send_webhook.delay(
|
|
||||||
url=action.webhook.url,
|
|
||||||
data=data,
|
|
||||||
headers=headers,
|
|
||||||
files=files,
|
|
||||||
as_json=action.webhook.as_json,
|
|
||||||
)
|
|
||||||
logger.debug(
|
|
||||||
f"Webhook to {action.webhook.url} queued",
|
|
||||||
extra={"group": logging_group},
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception(
|
|
||||||
f"Error occurred sending webhook: {e}",
|
|
||||||
extra={"group": logging_group},
|
|
||||||
)
|
|
||||||
|
|
||||||
use_overrides = overrides is not None
|
use_overrides = overrides is not None
|
||||||
if original_file is None:
|
if original_file is None:
|
||||||
@@ -1341,30 +739,7 @@ def run_workflows(
|
|||||||
)
|
)
|
||||||
messages = []
|
messages = []
|
||||||
|
|
||||||
workflows = (
|
workflows = get_workflows_for_trigger(trigger_type, workflow_to_run)
|
||||||
(
|
|
||||||
Workflow.objects.filter(enabled=True, triggers__type=trigger_type)
|
|
||||||
.prefetch_related(
|
|
||||||
"actions",
|
|
||||||
"actions__assign_view_users",
|
|
||||||
"actions__assign_view_groups",
|
|
||||||
"actions__assign_change_users",
|
|
||||||
"actions__assign_change_groups",
|
|
||||||
"actions__assign_custom_fields",
|
|
||||||
"actions__remove_tags",
|
|
||||||
"actions__remove_correspondents",
|
|
||||||
"actions__remove_document_types",
|
|
||||||
"actions__remove_storage_paths",
|
|
||||||
"actions__remove_custom_fields",
|
|
||||||
"actions__remove_owners",
|
|
||||||
"triggers",
|
|
||||||
)
|
|
||||||
.order_by("order")
|
|
||||||
.distinct()
|
|
||||||
)
|
|
||||||
if workflow_to_run is None
|
|
||||||
else [workflow_to_run]
|
|
||||||
)
|
|
||||||
|
|
||||||
for workflow in workflows:
|
for workflow in workflows:
|
||||||
if not use_overrides:
|
if not use_overrides:
|
||||||
@@ -1384,13 +759,39 @@ def run_workflows(
|
|||||||
messages.append(message)
|
messages.append(message)
|
||||||
|
|
||||||
if action.type == WorkflowAction.WorkflowActionType.ASSIGNMENT:
|
if action.type == WorkflowAction.WorkflowActionType.ASSIGNMENT:
|
||||||
assignment_action()
|
if use_overrides and overrides:
|
||||||
|
apply_assignment_to_overrides(action, overrides)
|
||||||
|
else:
|
||||||
|
apply_assignment_to_document(
|
||||||
|
action,
|
||||||
|
document,
|
||||||
|
doc_tag_ids,
|
||||||
|
logging_group,
|
||||||
|
)
|
||||||
elif action.type == WorkflowAction.WorkflowActionType.REMOVAL:
|
elif action.type == WorkflowAction.WorkflowActionType.REMOVAL:
|
||||||
removal_action()
|
if use_overrides and overrides:
|
||||||
|
apply_removal_to_overrides(action, overrides)
|
||||||
|
else:
|
||||||
|
apply_removal_to_document(action, document, doc_tag_ids)
|
||||||
elif action.type == WorkflowAction.WorkflowActionType.EMAIL:
|
elif action.type == WorkflowAction.WorkflowActionType.EMAIL:
|
||||||
email_action()
|
context = build_workflow_action_context(document, overrides)
|
||||||
|
execute_email_action(
|
||||||
|
action,
|
||||||
|
document,
|
||||||
|
context,
|
||||||
|
logging_group,
|
||||||
|
original_file,
|
||||||
|
trigger_type,
|
||||||
|
)
|
||||||
elif action.type == WorkflowAction.WorkflowActionType.WEBHOOK:
|
elif action.type == WorkflowAction.WorkflowActionType.WEBHOOK:
|
||||||
webhook_action()
|
context = build_workflow_action_context(document, overrides)
|
||||||
|
execute_webhook_action(
|
||||||
|
action,
|
||||||
|
document,
|
||||||
|
context,
|
||||||
|
logging_group,
|
||||||
|
original_file,
|
||||||
|
)
|
||||||
|
|
||||||
if not use_overrides:
|
if not use_overrides:
|
||||||
# limit title to 128 characters
|
# limit title to 128 characters
|
||||||
|
|||||||
@@ -41,7 +41,6 @@ from documents.models import DocumentType
|
|||||||
from documents.models import PaperlessTask
|
from documents.models import PaperlessTask
|
||||||
from documents.models import StoragePath
|
from documents.models import StoragePath
|
||||||
from documents.models import Tag
|
from documents.models import Tag
|
||||||
from documents.models import Workflow
|
|
||||||
from documents.models import WorkflowRun
|
from documents.models import WorkflowRun
|
||||||
from documents.models import WorkflowTrigger
|
from documents.models import WorkflowTrigger
|
||||||
from documents.parsers import DocumentParser
|
from documents.parsers import DocumentParser
|
||||||
@@ -54,6 +53,7 @@ from documents.sanity_checker import SanityCheckFailedException
|
|||||||
from documents.signals import document_updated
|
from documents.signals import document_updated
|
||||||
from documents.signals.handlers import cleanup_document_deletion
|
from documents.signals.handlers import cleanup_document_deletion
|
||||||
from documents.signals.handlers import run_workflows
|
from documents.signals.handlers import run_workflows
|
||||||
|
from documents.workflows.utils import get_workflows_for_trigger
|
||||||
|
|
||||||
if settings.AUDIT_LOG_ENABLED:
|
if settings.AUDIT_LOG_ENABLED:
|
||||||
from auditlog.models import LogEntry
|
from auditlog.models import LogEntry
|
||||||
@@ -400,13 +400,8 @@ def check_scheduled_workflows():
|
|||||||
|
|
||||||
Once a document satisfies this condition, and recurring/non-recurring constraints are met, the workflow is run.
|
Once a document satisfies this condition, and recurring/non-recurring constraints are met, the workflow is run.
|
||||||
"""
|
"""
|
||||||
scheduled_workflows: list[Workflow] = (
|
scheduled_workflows = get_workflows_for_trigger(
|
||||||
Workflow.objects.filter(
|
WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||||
triggers__type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
|
||||||
enabled=True,
|
|
||||||
)
|
|
||||||
.distinct()
|
|
||||||
.prefetch_related("triggers")
|
|
||||||
)
|
)
|
||||||
if scheduled_workflows.count() > 0:
|
if scheduled_workflows.count() > 0:
|
||||||
logger.debug(f"Checking {len(scheduled_workflows)} scheduled workflows")
|
logger.debug(f"Checking {len(scheduled_workflows)} scheduled workflows")
|
||||||
|
|||||||
@@ -1289,7 +1289,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
|
|||||||
content_type__app_label="admin",
|
content_type__app_label="admin",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
set_permissions([4, 5], set_permissions=[], owner=user2, merge=False)
|
set_permissions([4, 5], set_permissions={}, owner=user2, merge=False)
|
||||||
|
|
||||||
with index.open_index_writer() as writer:
|
with index.open_index_writer() as writer:
|
||||||
index.update_document(writer, d1)
|
index.update_document(writer, d1)
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ from django.utils import timezone
|
|||||||
from documents.file_handling import create_source_path_directory
|
from documents.file_handling import create_source_path_directory
|
||||||
from documents.file_handling import delete_empty_directories
|
from documents.file_handling import delete_empty_directories
|
||||||
from documents.file_handling import generate_filename
|
from documents.file_handling import generate_filename
|
||||||
|
from documents.file_handling import generate_unique_filename
|
||||||
from documents.models import Correspondent
|
from documents.models import Correspondent
|
||||||
from documents.models import CustomField
|
from documents.models import CustomField
|
||||||
from documents.models import CustomFieldInstance
|
from documents.models import CustomFieldInstance
|
||||||
@@ -1632,6 +1633,73 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestCustomFieldFilenameUpdates(
|
||||||
|
DirectoriesMixin,
|
||||||
|
FileSystemAssertsMixin,
|
||||||
|
TestCase,
|
||||||
|
):
|
||||||
|
def setUp(self):
|
||||||
|
self.cf = CustomField.objects.create(
|
||||||
|
name="flavor",
|
||||||
|
data_type=CustomField.FieldDataType.STRING,
|
||||||
|
)
|
||||||
|
self.doc = Document.objects.create(
|
||||||
|
title="document",
|
||||||
|
mime_type="application/pdf",
|
||||||
|
checksum="abc123",
|
||||||
|
)
|
||||||
|
self.cfi = CustomFieldInstance.objects.create(
|
||||||
|
field=self.cf,
|
||||||
|
document=self.doc,
|
||||||
|
value_text="initial",
|
||||||
|
)
|
||||||
|
return super().setUp()
|
||||||
|
|
||||||
|
@override_settings(FILENAME_FORMAT=None)
|
||||||
|
def test_custom_field_not_in_template_skips_filename_work(self):
|
||||||
|
storage_path = StoragePath.objects.create(path="{{created}}/{{ title }}")
|
||||||
|
self.doc.storage_path = storage_path
|
||||||
|
self.doc.save()
|
||||||
|
initial_filename = generate_filename(self.doc)
|
||||||
|
Document.objects.filter(pk=self.doc.pk).update(filename=str(initial_filename))
|
||||||
|
self.doc.refresh_from_db()
|
||||||
|
Path(self.doc.source_path).parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
Path(self.doc.source_path).touch()
|
||||||
|
|
||||||
|
with mock.patch("documents.signals.handlers.generate_unique_filename") as m:
|
||||||
|
m.side_effect = generate_unique_filename
|
||||||
|
self.cfi.value_text = "updated"
|
||||||
|
self.cfi.save()
|
||||||
|
|
||||||
|
self.doc.refresh_from_db()
|
||||||
|
self.assertEqual(Path(self.doc.filename), initial_filename)
|
||||||
|
self.assertEqual(m.call_count, 0)
|
||||||
|
|
||||||
|
@override_settings(FILENAME_FORMAT=None)
|
||||||
|
def test_custom_field_in_template_triggers_filename_update(self):
|
||||||
|
storage_path = StoragePath.objects.create(
|
||||||
|
path="{{ custom_fields|get_cf_value('flavor') }}/{{ title }}",
|
||||||
|
)
|
||||||
|
self.doc.storage_path = storage_path
|
||||||
|
self.doc.save()
|
||||||
|
initial_filename = generate_filename(self.doc)
|
||||||
|
Document.objects.filter(pk=self.doc.pk).update(filename=str(initial_filename))
|
||||||
|
self.doc.refresh_from_db()
|
||||||
|
Path(self.doc.source_path).parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
Path(self.doc.source_path).touch()
|
||||||
|
|
||||||
|
with mock.patch("documents.signals.handlers.generate_unique_filename") as m:
|
||||||
|
m.side_effect = generate_unique_filename
|
||||||
|
self.cfi.value_text = "updated"
|
||||||
|
self.cfi.save()
|
||||||
|
|
||||||
|
self.doc.refresh_from_db()
|
||||||
|
expected_filename = Path("updated/document.pdf")
|
||||||
|
self.assertEqual(Path(self.doc.filename), expected_filename)
|
||||||
|
self.assertTrue(Path(self.doc.source_path).is_file())
|
||||||
|
self.assertLessEqual(m.call_count, 1)
|
||||||
|
|
||||||
|
|
||||||
class TestPathDateLocalization:
|
class TestPathDateLocalization:
|
||||||
"""
|
"""
|
||||||
Groups all tests related to the `localize_date` function.
|
Groups all tests related to the `localize_date` function.
|
||||||
|
|||||||
@@ -206,6 +206,22 @@ class TestMatching(_TestMatchingBase):
|
|||||||
def test_tach_invalid_regex(self):
|
def test_tach_invalid_regex(self):
|
||||||
self._test_matching("[", "MATCH_REGEX", [], ["Don't match this"])
|
self._test_matching("[", "MATCH_REGEX", [], ["Don't match this"])
|
||||||
|
|
||||||
|
def test_match_regex_timeout_returns_false(self):
|
||||||
|
tag = Tag.objects.create(
|
||||||
|
name="slow",
|
||||||
|
match=r"(a+)+$",
|
||||||
|
matching_algorithm=Tag.MATCH_REGEX,
|
||||||
|
)
|
||||||
|
document = Document(content=("a" * 5000) + "X")
|
||||||
|
|
||||||
|
with self.assertLogs("paperless.regex", level="WARNING") as cm:
|
||||||
|
self.assertFalse(matching.matches(tag, document))
|
||||||
|
|
||||||
|
self.assertTrue(
|
||||||
|
any("timed out" in message for message in cm.output),
|
||||||
|
f"Expected timeout log, got {cm.output}",
|
||||||
|
)
|
||||||
|
|
||||||
def test_match_fuzzy(self):
|
def test_match_fuzzy(self):
|
||||||
self._test_matching(
|
self._test_matching(
|
||||||
"Springfield, Miss.",
|
"Springfield, Miss.",
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ from django.utils import timezone
|
|||||||
from guardian.shortcuts import assign_perm
|
from guardian.shortcuts import assign_perm
|
||||||
from guardian.shortcuts import get_groups_with_perms
|
from guardian.shortcuts import get_groups_with_perms
|
||||||
from guardian.shortcuts import get_users_with_perms
|
from guardian.shortcuts import get_users_with_perms
|
||||||
|
from httpx import ConnectError
|
||||||
from httpx import HTTPError
|
from httpx import HTTPError
|
||||||
from httpx import HTTPStatusError
|
from httpx import HTTPStatusError
|
||||||
from pytest_httpx import HTTPXMock
|
from pytest_httpx import HTTPXMock
|
||||||
@@ -26,7 +27,7 @@ from rest_framework.test import APITestCase
|
|||||||
from documents.file_handling import create_source_path_directory
|
from documents.file_handling import create_source_path_directory
|
||||||
from documents.file_handling import generate_unique_filename
|
from documents.file_handling import generate_unique_filename
|
||||||
from documents.signals.handlers import run_workflows
|
from documents.signals.handlers import run_workflows
|
||||||
from documents.signals.handlers import send_webhook
|
from documents.workflows.webhooks import send_webhook
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from django.db.models import QuerySet
|
from django.db.models import QuerySet
|
||||||
@@ -2858,7 +2859,7 @@ class TestWorkflows(
|
|||||||
|
|
||||||
mock_email_send.return_value = 1
|
mock_email_send.return_value = 1
|
||||||
|
|
||||||
with self.assertNoLogs("paperless.handlers", level="ERROR"):
|
with self.assertNoLogs("paperless.workflows", level="ERROR"):
|
||||||
run_workflows(
|
run_workflows(
|
||||||
WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
||||||
consumable_document,
|
consumable_document,
|
||||||
@@ -3096,7 +3097,7 @@ class TestWorkflows(
|
|||||||
original_filename="sample.pdf",
|
original_filename="sample.pdf",
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.assertLogs("paperless.handlers", level="ERROR") as cm:
|
with self.assertLogs("paperless.workflows.actions", level="ERROR") as cm:
|
||||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||||
|
|
||||||
expected_str = "Email backend has not been configured"
|
expected_str = "Email backend has not been configured"
|
||||||
@@ -3144,7 +3145,7 @@ class TestWorkflows(
|
|||||||
original_filename="sample.pdf",
|
original_filename="sample.pdf",
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.assertLogs("paperless.handlers", level="ERROR") as cm:
|
with self.assertLogs("paperless.workflows", level="ERROR") as cm:
|
||||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||||
|
|
||||||
expected_str = "Error occurred sending email"
|
expected_str = "Error occurred sending email"
|
||||||
@@ -3215,7 +3216,7 @@ class TestWorkflows(
|
|||||||
PAPERLESS_FORCE_SCRIPT_NAME="/paperless",
|
PAPERLESS_FORCE_SCRIPT_NAME="/paperless",
|
||||||
BASE_URL="/paperless/",
|
BASE_URL="/paperless/",
|
||||||
)
|
)
|
||||||
@mock.patch("documents.signals.handlers.send_webhook.delay")
|
@mock.patch("documents.workflows.webhooks.send_webhook.delay")
|
||||||
def test_workflow_webhook_action_body(self, mock_post):
|
def test_workflow_webhook_action_body(self, mock_post):
|
||||||
"""
|
"""
|
||||||
GIVEN:
|
GIVEN:
|
||||||
@@ -3274,7 +3275,7 @@ class TestWorkflows(
|
|||||||
@override_settings(
|
@override_settings(
|
||||||
PAPERLESS_URL="http://localhost:8000",
|
PAPERLESS_URL="http://localhost:8000",
|
||||||
)
|
)
|
||||||
@mock.patch("documents.signals.handlers.send_webhook.delay")
|
@mock.patch("documents.workflows.webhooks.send_webhook.delay")
|
||||||
def test_workflow_webhook_action_w_files(self, mock_post):
|
def test_workflow_webhook_action_w_files(self, mock_post):
|
||||||
"""
|
"""
|
||||||
GIVEN:
|
GIVEN:
|
||||||
@@ -3377,7 +3378,7 @@ class TestWorkflows(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# fails because no file
|
# fails because no file
|
||||||
with self.assertLogs("paperless.handlers", level="ERROR") as cm:
|
with self.assertLogs("paperless.workflows", level="ERROR") as cm:
|
||||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||||
|
|
||||||
expected_str = "Error occurred sending webhook"
|
expected_str = "Error occurred sending webhook"
|
||||||
@@ -3420,7 +3421,7 @@ class TestWorkflows(
|
|||||||
original_filename="sample.pdf",
|
original_filename="sample.pdf",
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.assertLogs("paperless.handlers", level="ERROR") as cm:
|
with self.assertLogs("paperless.workflows", level="ERROR") as cm:
|
||||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||||
|
|
||||||
expected_str = "Error occurred parsing webhook params"
|
expected_str = "Error occurred parsing webhook params"
|
||||||
@@ -3428,7 +3429,7 @@ class TestWorkflows(
|
|||||||
expected_str = "Error occurred parsing webhook headers"
|
expected_str = "Error occurred parsing webhook headers"
|
||||||
self.assertIn(expected_str, cm.output[1])
|
self.assertIn(expected_str, cm.output[1])
|
||||||
|
|
||||||
@mock.patch("httpx.post")
|
@mock.patch("httpx.Client.post")
|
||||||
def test_workflow_webhook_send_webhook_task(self, mock_post):
|
def test_workflow_webhook_send_webhook_task(self, mock_post):
|
||||||
mock_post.return_value = mock.Mock(
|
mock_post.return_value = mock.Mock(
|
||||||
status_code=200,
|
status_code=200,
|
||||||
@@ -3436,7 +3437,7 @@ class TestWorkflows(
|
|||||||
raise_for_status=mock.Mock(),
|
raise_for_status=mock.Mock(),
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.assertLogs("paperless.handlers") as cm:
|
with self.assertLogs("paperless.workflows") as cm:
|
||||||
send_webhook(
|
send_webhook(
|
||||||
url="http://paperless-ngx.com",
|
url="http://paperless-ngx.com",
|
||||||
data="Test message",
|
data="Test message",
|
||||||
@@ -3449,8 +3450,6 @@ class TestWorkflows(
|
|||||||
content="Test message",
|
content="Test message",
|
||||||
headers={},
|
headers={},
|
||||||
files=None,
|
files=None,
|
||||||
follow_redirects=False,
|
|
||||||
timeout=5,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
expected_str = "Webhook sent to http://paperless-ngx.com"
|
expected_str = "Webhook sent to http://paperless-ngx.com"
|
||||||
@@ -3468,11 +3467,9 @@ class TestWorkflows(
|
|||||||
data={"message": "Test message"},
|
data={"message": "Test message"},
|
||||||
headers={},
|
headers={},
|
||||||
files=None,
|
files=None,
|
||||||
follow_redirects=False,
|
|
||||||
timeout=5,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@mock.patch("httpx.post")
|
@mock.patch("httpx.Client.post")
|
||||||
def test_workflow_webhook_send_webhook_retry(self, mock_http):
|
def test_workflow_webhook_send_webhook_retry(self, mock_http):
|
||||||
mock_http.return_value.raise_for_status = mock.Mock(
|
mock_http.return_value.raise_for_status = mock.Mock(
|
||||||
side_effect=HTTPStatusError(
|
side_effect=HTTPStatusError(
|
||||||
@@ -3482,7 +3479,7 @@ class TestWorkflows(
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.assertLogs("paperless.handlers") as cm:
|
with self.assertLogs("paperless.workflows") as cm:
|
||||||
with self.assertRaises(HTTPStatusError):
|
with self.assertRaises(HTTPStatusError):
|
||||||
send_webhook(
|
send_webhook(
|
||||||
url="http://paperless-ngx.com",
|
url="http://paperless-ngx.com",
|
||||||
@@ -3498,7 +3495,7 @@ class TestWorkflows(
|
|||||||
)
|
)
|
||||||
self.assertIn(expected_str, cm.output[0])
|
self.assertIn(expected_str, cm.output[0])
|
||||||
|
|
||||||
@mock.patch("documents.signals.handlers.send_webhook.delay")
|
@mock.patch("documents.workflows.webhooks.send_webhook.delay")
|
||||||
def test_workflow_webhook_action_consumption(self, mock_post):
|
def test_workflow_webhook_action_consumption(self, mock_post):
|
||||||
"""
|
"""
|
||||||
GIVEN:
|
GIVEN:
|
||||||
@@ -3668,7 +3665,7 @@ class TestWebhookSecurity:
|
|||||||
- ValueError is raised
|
- ValueError is raised
|
||||||
"""
|
"""
|
||||||
resolve_to("127.0.0.1")
|
resolve_to("127.0.0.1")
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ConnectError):
|
||||||
send_webhook(
|
send_webhook(
|
||||||
"http://paperless-ngx.com",
|
"http://paperless-ngx.com",
|
||||||
data="",
|
data="",
|
||||||
@@ -3698,7 +3695,8 @@ class TestWebhookSecurity:
|
|||||||
)
|
)
|
||||||
|
|
||||||
req = httpx_mock.get_request()
|
req = httpx_mock.get_request()
|
||||||
assert req.url.host == "paperless-ngx.com"
|
assert req.url.host == "52.207.186.75"
|
||||||
|
assert req.headers["host"] == "paperless-ngx.com"
|
||||||
|
|
||||||
def test_follow_redirects_disabled(self, httpx_mock: HTTPXMock, resolve_to):
|
def test_follow_redirects_disabled(self, httpx_mock: HTTPXMock, resolve_to):
|
||||||
"""
|
"""
|
||||||
|
|||||||
0
src/documents/workflows/__init__.py
Normal file
0
src/documents/workflows/__init__.py
Normal file
261
src/documents/workflows/actions.py
Normal file
261
src/documents/workflows/actions.py
Normal file
@@ -0,0 +1,261 @@
|
|||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.contrib.auth.models import User
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
from documents.data_models import ConsumableDocument
|
||||||
|
from documents.data_models import DocumentMetadataOverrides
|
||||||
|
from documents.mail import EmailAttachment
|
||||||
|
from documents.mail import send_email
|
||||||
|
from documents.models import Correspondent
|
||||||
|
from documents.models import Document
|
||||||
|
from documents.models import DocumentType
|
||||||
|
from documents.models import WorkflowAction
|
||||||
|
from documents.models import WorkflowTrigger
|
||||||
|
from documents.templating.workflows import parse_w_workflow_placeholders
|
||||||
|
from documents.workflows.webhooks import send_webhook
|
||||||
|
|
||||||
|
logger = logging.getLogger("paperless.workflows.actions")
|
||||||
|
|
||||||
|
|
||||||
|
def build_workflow_action_context(
|
||||||
|
document: Document | ConsumableDocument,
|
||||||
|
overrides: DocumentMetadataOverrides | None,
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Build context dictionary for workflow action placeholder parsing.
|
||||||
|
"""
|
||||||
|
use_overrides = overrides is not None
|
||||||
|
|
||||||
|
if not use_overrides:
|
||||||
|
return {
|
||||||
|
"title": document.title,
|
||||||
|
"doc_url": f"{settings.PAPERLESS_URL}{settings.BASE_URL}documents/{document.pk}/",
|
||||||
|
"correspondent": document.correspondent.name
|
||||||
|
if document.correspondent
|
||||||
|
else "",
|
||||||
|
"document_type": document.document_type.name
|
||||||
|
if document.document_type
|
||||||
|
else "",
|
||||||
|
"owner_username": document.owner.username if document.owner else "",
|
||||||
|
"filename": document.original_filename or "",
|
||||||
|
"current_filename": document.filename or "",
|
||||||
|
"added": timezone.localtime(document.added),
|
||||||
|
"created": document.created,
|
||||||
|
}
|
||||||
|
|
||||||
|
correspondent_obj = (
|
||||||
|
Correspondent.objects.filter(pk=overrides.correspondent_id).first()
|
||||||
|
if overrides and overrides.correspondent_id
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
document_type_obj = (
|
||||||
|
DocumentType.objects.filter(pk=overrides.document_type_id).first()
|
||||||
|
if overrides and overrides.document_type_id
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
owner_obj = (
|
||||||
|
User.objects.filter(pk=overrides.owner_id).first()
|
||||||
|
if overrides and overrides.owner_id
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
filename = document.original_file if document.original_file else ""
|
||||||
|
return {
|
||||||
|
"title": overrides.title
|
||||||
|
if overrides and overrides.title
|
||||||
|
else str(document.original_file),
|
||||||
|
"doc_url": "",
|
||||||
|
"correspondent": correspondent_obj.name if correspondent_obj else "",
|
||||||
|
"document_type": document_type_obj.name if document_type_obj else "",
|
||||||
|
"owner_username": owner_obj.username if owner_obj else "",
|
||||||
|
"filename": filename,
|
||||||
|
"current_filename": filename,
|
||||||
|
"added": timezone.localtime(timezone.now()),
|
||||||
|
"created": overrides.created if overrides else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def execute_email_action(
|
||||||
|
action: WorkflowAction,
|
||||||
|
document: Document | ConsumableDocument,
|
||||||
|
context: dict,
|
||||||
|
logging_group,
|
||||||
|
original_file: Path,
|
||||||
|
trigger_type: WorkflowTrigger.WorkflowTriggerType,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Execute an email action for a workflow.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not settings.EMAIL_ENABLED:
|
||||||
|
logger.error(
|
||||||
|
"Email backend has not been configured, cannot send email notifications",
|
||||||
|
extra={"group": logging_group},
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
subject = (
|
||||||
|
parse_w_workflow_placeholders(
|
||||||
|
action.email.subject,
|
||||||
|
context["correspondent"],
|
||||||
|
context["document_type"],
|
||||||
|
context["owner_username"],
|
||||||
|
context["added"],
|
||||||
|
context["filename"],
|
||||||
|
context["current_filename"],
|
||||||
|
context["created"],
|
||||||
|
context["title"],
|
||||||
|
context["doc_url"],
|
||||||
|
)
|
||||||
|
if action.email.subject
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
body = (
|
||||||
|
parse_w_workflow_placeholders(
|
||||||
|
action.email.body,
|
||||||
|
context["correspondent"],
|
||||||
|
context["document_type"],
|
||||||
|
context["owner_username"],
|
||||||
|
context["added"],
|
||||||
|
context["filename"],
|
||||||
|
context["current_filename"],
|
||||||
|
context["created"],
|
||||||
|
context["title"],
|
||||||
|
context["doc_url"],
|
||||||
|
)
|
||||||
|
if action.email.body
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
attachments: list[EmailAttachment] = []
|
||||||
|
if action.email.include_document:
|
||||||
|
attachment: EmailAttachment | None = None
|
||||||
|
if trigger_type in [
|
||||||
|
WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||||
|
WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||||
|
] and isinstance(document, Document):
|
||||||
|
friendly_name = (
|
||||||
|
Path(context["current_filename"]).name
|
||||||
|
if context["current_filename"]
|
||||||
|
else document.source_path.name
|
||||||
|
)
|
||||||
|
attachment = EmailAttachment(
|
||||||
|
path=document.source_path,
|
||||||
|
mime_type=document.mime_type,
|
||||||
|
friendly_name=friendly_name,
|
||||||
|
)
|
||||||
|
elif original_file:
|
||||||
|
friendly_name = (
|
||||||
|
Path(context["current_filename"]).name
|
||||||
|
if context["current_filename"]
|
||||||
|
else original_file.name
|
||||||
|
)
|
||||||
|
attachment = EmailAttachment(
|
||||||
|
path=original_file,
|
||||||
|
mime_type=document.mime_type,
|
||||||
|
friendly_name=friendly_name,
|
||||||
|
)
|
||||||
|
if attachment:
|
||||||
|
attachments = [attachment]
|
||||||
|
|
||||||
|
n_messages = send_email(
|
||||||
|
subject=subject,
|
||||||
|
body=body,
|
||||||
|
to=action.email.to.split(","),
|
||||||
|
attachments=attachments,
|
||||||
|
)
|
||||||
|
logger.debug(
|
||||||
|
f"Sent {n_messages} notification email(s) to {action.email.to}",
|
||||||
|
extra={"group": logging_group},
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(
|
||||||
|
f"Error occurred sending notification email: {e}",
|
||||||
|
extra={"group": logging_group},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def execute_webhook_action(
|
||||||
|
action: WorkflowAction,
|
||||||
|
document: Document | ConsumableDocument,
|
||||||
|
context: dict,
|
||||||
|
logging_group,
|
||||||
|
original_file: Path,
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
data = {}
|
||||||
|
if action.webhook.use_params:
|
||||||
|
if action.webhook.params:
|
||||||
|
try:
|
||||||
|
for key, value in action.webhook.params.items():
|
||||||
|
data[key] = parse_w_workflow_placeholders(
|
||||||
|
value,
|
||||||
|
context["correspondent"],
|
||||||
|
context["document_type"],
|
||||||
|
context["owner_username"],
|
||||||
|
context["added"],
|
||||||
|
context["filename"],
|
||||||
|
context["current_filename"],
|
||||||
|
context["created"],
|
||||||
|
context["title"],
|
||||||
|
context["doc_url"],
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Error occurred parsing webhook params: {e}",
|
||||||
|
extra={"group": logging_group},
|
||||||
|
)
|
||||||
|
elif action.webhook.body:
|
||||||
|
data = parse_w_workflow_placeholders(
|
||||||
|
action.webhook.body,
|
||||||
|
context["correspondent"],
|
||||||
|
context["document_type"],
|
||||||
|
context["owner_username"],
|
||||||
|
context["added"],
|
||||||
|
context["filename"],
|
||||||
|
context["current_filename"],
|
||||||
|
context["created"],
|
||||||
|
context["title"],
|
||||||
|
context["doc_url"],
|
||||||
|
)
|
||||||
|
headers = {}
|
||||||
|
if action.webhook.headers:
|
||||||
|
try:
|
||||||
|
headers = {str(k): str(v) for k, v in action.webhook.headers.items()}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Error occurred parsing webhook headers: {e}",
|
||||||
|
extra={"group": logging_group},
|
||||||
|
)
|
||||||
|
files = None
|
||||||
|
if action.webhook.include_document:
|
||||||
|
with original_file.open("rb") as f:
|
||||||
|
files = {
|
||||||
|
"file": (
|
||||||
|
str(context["filename"])
|
||||||
|
if context["filename"]
|
||||||
|
else original_file.name,
|
||||||
|
f.read(),
|
||||||
|
document.mime_type,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
send_webhook.delay(
|
||||||
|
url=action.webhook.url,
|
||||||
|
data=data,
|
||||||
|
headers=headers,
|
||||||
|
files=files,
|
||||||
|
as_json=action.webhook.as_json,
|
||||||
|
)
|
||||||
|
logger.debug(
|
||||||
|
f"Webhook to {action.webhook.url} queued",
|
||||||
|
extra={"group": logging_group},
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(
|
||||||
|
f"Error occurred sending webhook: {e}",
|
||||||
|
extra={"group": logging_group},
|
||||||
|
)
|
||||||
357
src/documents/workflows/mutations.py
Normal file
357
src/documents/workflows/mutations.py
Normal file
@@ -0,0 +1,357 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from django.utils import timezone
|
||||||
|
from guardian.shortcuts import remove_perm
|
||||||
|
|
||||||
|
from documents.data_models import DocumentMetadataOverrides
|
||||||
|
from documents.models import CustomFieldInstance
|
||||||
|
from documents.models import Document
|
||||||
|
from documents.models import WorkflowAction
|
||||||
|
from documents.permissions import set_permissions_for_object
|
||||||
|
from documents.templating.workflows import parse_w_workflow_placeholders
|
||||||
|
|
||||||
|
logger = logging.getLogger("paperless.workflows.mutations")
|
||||||
|
|
||||||
|
|
||||||
|
def apply_assignment_to_document(
|
||||||
|
action: WorkflowAction,
|
||||||
|
document: Document,
|
||||||
|
doc_tag_ids: list[int],
|
||||||
|
logging_group,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Apply assignment actions to a Document instance.
|
||||||
|
|
||||||
|
action: WorkflowAction, annotated with 'has_assign_*' boolean fields
|
||||||
|
"""
|
||||||
|
if action.has_assign_tags:
|
||||||
|
tag_ids_to_add: set[int] = set()
|
||||||
|
for tag in action.assign_tags.all():
|
||||||
|
tag_ids_to_add.add(tag.pk)
|
||||||
|
tag_ids_to_add.update(int(pk) for pk in tag.get_ancestors_pks())
|
||||||
|
|
||||||
|
doc_tag_ids[:] = list(set(doc_tag_ids) | tag_ids_to_add)
|
||||||
|
|
||||||
|
if action.assign_correspondent:
|
||||||
|
document.correspondent = action.assign_correspondent
|
||||||
|
|
||||||
|
if action.assign_document_type:
|
||||||
|
document.document_type = action.assign_document_type
|
||||||
|
|
||||||
|
if action.assign_storage_path:
|
||||||
|
document.storage_path = action.assign_storage_path
|
||||||
|
|
||||||
|
if action.assign_owner:
|
||||||
|
document.owner = action.assign_owner
|
||||||
|
|
||||||
|
if action.assign_title:
|
||||||
|
try:
|
||||||
|
document.title = parse_w_workflow_placeholders(
|
||||||
|
action.assign_title,
|
||||||
|
document.correspondent.name if document.correspondent else "",
|
||||||
|
document.document_type.name if document.document_type else "",
|
||||||
|
document.owner.username if document.owner else "",
|
||||||
|
timezone.localtime(document.added),
|
||||||
|
document.original_filename or "",
|
||||||
|
document.filename or "",
|
||||||
|
document.created,
|
||||||
|
)
|
||||||
|
except Exception: # pragma: no cover
|
||||||
|
logger.exception(
|
||||||
|
f"Error occurred parsing title assignment '{action.assign_title}', falling back to original",
|
||||||
|
extra={"group": logging_group},
|
||||||
|
)
|
||||||
|
|
||||||
|
if any(
|
||||||
|
[
|
||||||
|
action.has_assign_view_users,
|
||||||
|
action.has_assign_view_groups,
|
||||||
|
action.has_assign_change_users,
|
||||||
|
action.has_assign_change_groups,
|
||||||
|
],
|
||||||
|
):
|
||||||
|
permissions = {
|
||||||
|
"view": {
|
||||||
|
"users": action.assign_view_users.values_list("id", flat=True),
|
||||||
|
"groups": action.assign_view_groups.values_list("id", flat=True),
|
||||||
|
},
|
||||||
|
"change": {
|
||||||
|
"users": action.assign_change_users.values_list("id", flat=True),
|
||||||
|
"groups": action.assign_change_groups.values_list("id", flat=True),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
set_permissions_for_object(
|
||||||
|
permissions=permissions,
|
||||||
|
object=document,
|
||||||
|
merge=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if action.has_assign_custom_fields:
|
||||||
|
for field in action.assign_custom_fields.all():
|
||||||
|
value_field_name = CustomFieldInstance.get_value_field_name(
|
||||||
|
data_type=field.data_type,
|
||||||
|
)
|
||||||
|
args = {
|
||||||
|
value_field_name: action.assign_custom_fields_values.get(
|
||||||
|
str(field.pk),
|
||||||
|
None,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
# for some reason update_or_create doesn't work here
|
||||||
|
instance = CustomFieldInstance.objects.filter(
|
||||||
|
field=field,
|
||||||
|
document=document,
|
||||||
|
).first()
|
||||||
|
if instance and args[value_field_name] is not None:
|
||||||
|
setattr(instance, value_field_name, args[value_field_name])
|
||||||
|
instance.save()
|
||||||
|
elif not instance:
|
||||||
|
CustomFieldInstance.objects.create(
|
||||||
|
**args,
|
||||||
|
field=field,
|
||||||
|
document=document,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def apply_assignment_to_overrides(
|
||||||
|
action: WorkflowAction,
|
||||||
|
overrides: DocumentMetadataOverrides,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Apply assignment actions to DocumentMetadataOverrides.
|
||||||
|
|
||||||
|
action: WorkflowAction, annotated with 'has_assign_*' boolean fields
|
||||||
|
"""
|
||||||
|
if action.has_assign_tags:
|
||||||
|
if overrides.tag_ids is None:
|
||||||
|
overrides.tag_ids = []
|
||||||
|
tag_ids_to_add: set[int] = set()
|
||||||
|
for tag in action.assign_tags.all():
|
||||||
|
tag_ids_to_add.add(tag.pk)
|
||||||
|
tag_ids_to_add.update(int(pk) for pk in tag.get_ancestors_pks())
|
||||||
|
|
||||||
|
overrides.tag_ids = list(set(overrides.tag_ids) | tag_ids_to_add)
|
||||||
|
|
||||||
|
if action.assign_correspondent:
|
||||||
|
overrides.correspondent_id = action.assign_correspondent.pk
|
||||||
|
|
||||||
|
if action.assign_document_type:
|
||||||
|
overrides.document_type_id = action.assign_document_type.pk
|
||||||
|
|
||||||
|
if action.assign_storage_path:
|
||||||
|
overrides.storage_path_id = action.assign_storage_path.pk
|
||||||
|
|
||||||
|
if action.assign_owner:
|
||||||
|
overrides.owner_id = action.assign_owner.pk
|
||||||
|
|
||||||
|
if action.assign_title:
|
||||||
|
overrides.title = action.assign_title
|
||||||
|
|
||||||
|
if any(
|
||||||
|
[
|
||||||
|
action.has_assign_view_users,
|
||||||
|
action.has_assign_view_groups,
|
||||||
|
action.has_assign_change_users,
|
||||||
|
action.has_assign_change_groups,
|
||||||
|
],
|
||||||
|
):
|
||||||
|
overrides.view_users = list(
|
||||||
|
set(
|
||||||
|
(overrides.view_users or [])
|
||||||
|
+ list(action.assign_view_users.values_list("id", flat=True)),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
overrides.view_groups = list(
|
||||||
|
set(
|
||||||
|
(overrides.view_groups or [])
|
||||||
|
+ list(action.assign_view_groups.values_list("id", flat=True)),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
overrides.change_users = list(
|
||||||
|
set(
|
||||||
|
(overrides.change_users or [])
|
||||||
|
+ list(action.assign_change_users.values_list("id", flat=True)),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
overrides.change_groups = list(
|
||||||
|
set(
|
||||||
|
(overrides.change_groups or [])
|
||||||
|
+ list(action.assign_change_groups.values_list("id", flat=True)),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
if action.has_assign_custom_fields:
|
||||||
|
if overrides.custom_fields is None:
|
||||||
|
overrides.custom_fields = {}
|
||||||
|
overrides.custom_fields.update(
|
||||||
|
{
|
||||||
|
field.pk: action.assign_custom_fields_values.get(
|
||||||
|
str(field.pk),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
for field in action.assign_custom_fields.all()
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def apply_removal_to_document(
|
||||||
|
action: WorkflowAction,
|
||||||
|
document: Document,
|
||||||
|
doc_tag_ids: list[int],
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Apply removal actions to a Document instance.
|
||||||
|
|
||||||
|
action: WorkflowAction, annotated with 'has_remove_*' boolean fields
|
||||||
|
"""
|
||||||
|
|
||||||
|
if action.remove_all_tags:
|
||||||
|
doc_tag_ids.clear()
|
||||||
|
else:
|
||||||
|
tag_ids_to_remove: set[int] = set()
|
||||||
|
for tag in action.remove_tags.all():
|
||||||
|
tag_ids_to_remove.add(tag.pk)
|
||||||
|
tag_ids_to_remove.update(int(pk) for pk in tag.get_descendants_pks())
|
||||||
|
|
||||||
|
doc_tag_ids[:] = [t for t in doc_tag_ids if t not in tag_ids_to_remove]
|
||||||
|
|
||||||
|
if action.remove_all_correspondents or (
|
||||||
|
document.correspondent
|
||||||
|
and action.remove_correspondents.filter(pk=document.correspondent.pk).exists()
|
||||||
|
):
|
||||||
|
document.correspondent = None
|
||||||
|
|
||||||
|
if action.remove_all_document_types or (
|
||||||
|
document.document_type
|
||||||
|
and action.remove_document_types.filter(pk=document.document_type.pk).exists()
|
||||||
|
):
|
||||||
|
document.document_type = None
|
||||||
|
|
||||||
|
if action.remove_all_storage_paths or (
|
||||||
|
document.storage_path
|
||||||
|
and action.remove_storage_paths.filter(pk=document.storage_path.pk).exists()
|
||||||
|
):
|
||||||
|
document.storage_path = None
|
||||||
|
|
||||||
|
if action.remove_all_owners or (
|
||||||
|
document.owner and action.remove_owners.filter(pk=document.owner.pk).exists()
|
||||||
|
):
|
||||||
|
document.owner = None
|
||||||
|
|
||||||
|
if action.remove_all_permissions:
|
||||||
|
permissions = {
|
||||||
|
"view": {"users": [], "groups": []},
|
||||||
|
"change": {"users": [], "groups": []},
|
||||||
|
}
|
||||||
|
set_permissions_for_object(
|
||||||
|
permissions=permissions,
|
||||||
|
object=document,
|
||||||
|
merge=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
if any(
|
||||||
|
[
|
||||||
|
action.has_remove_view_users,
|
||||||
|
action.has_remove_view_groups,
|
||||||
|
action.has_remove_change_users,
|
||||||
|
action.has_remove_change_groups,
|
||||||
|
],
|
||||||
|
):
|
||||||
|
for user in action.remove_view_users.all():
|
||||||
|
remove_perm("view_document", user, document)
|
||||||
|
for user in action.remove_change_users.all():
|
||||||
|
remove_perm("change_document", user, document)
|
||||||
|
for group in action.remove_view_groups.all():
|
||||||
|
remove_perm("view_document", group, document)
|
||||||
|
for group in action.remove_change_groups.all():
|
||||||
|
remove_perm("change_document", group, document)
|
||||||
|
|
||||||
|
if action.remove_all_custom_fields:
|
||||||
|
CustomFieldInstance.objects.filter(document=document).hard_delete()
|
||||||
|
elif action.has_remove_custom_fields:
|
||||||
|
CustomFieldInstance.objects.filter(
|
||||||
|
field__in=action.remove_custom_fields.all(),
|
||||||
|
document=document,
|
||||||
|
).hard_delete()
|
||||||
|
|
||||||
|
|
||||||
|
def apply_removal_to_overrides(
|
||||||
|
action: WorkflowAction,
|
||||||
|
overrides: DocumentMetadataOverrides,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Apply removal actions to DocumentMetadataOverrides.
|
||||||
|
|
||||||
|
action: WorkflowAction, annotated with 'has_remove_*' boolean fields
|
||||||
|
"""
|
||||||
|
if action.remove_all_tags:
|
||||||
|
overrides.tag_ids = None
|
||||||
|
elif overrides.tag_ids:
|
||||||
|
tag_ids_to_remove: set[int] = set()
|
||||||
|
for tag in action.remove_tags.all():
|
||||||
|
tag_ids_to_remove.add(tag.pk)
|
||||||
|
tag_ids_to_remove.update(int(pk) for pk in tag.get_descendants_pks())
|
||||||
|
|
||||||
|
overrides.tag_ids = [t for t in overrides.tag_ids if t not in tag_ids_to_remove]
|
||||||
|
|
||||||
|
if action.remove_all_correspondents or (
|
||||||
|
overrides.correspondent_id
|
||||||
|
and action.remove_correspondents.filter(pk=overrides.correspondent_id).exists()
|
||||||
|
):
|
||||||
|
overrides.correspondent_id = None
|
||||||
|
|
||||||
|
if action.remove_all_document_types or (
|
||||||
|
overrides.document_type_id
|
||||||
|
and action.remove_document_types.filter(pk=overrides.document_type_id).exists()
|
||||||
|
):
|
||||||
|
overrides.document_type_id = None
|
||||||
|
|
||||||
|
if action.remove_all_storage_paths or (
|
||||||
|
overrides.storage_path_id
|
||||||
|
and action.remove_storage_paths.filter(pk=overrides.storage_path_id).exists()
|
||||||
|
):
|
||||||
|
overrides.storage_path_id = None
|
||||||
|
|
||||||
|
if action.remove_all_owners or (
|
||||||
|
overrides.owner_id
|
||||||
|
and action.remove_owners.filter(pk=overrides.owner_id).exists()
|
||||||
|
):
|
||||||
|
overrides.owner_id = None
|
||||||
|
|
||||||
|
if action.remove_all_permissions:
|
||||||
|
overrides.view_users = None
|
||||||
|
overrides.view_groups = None
|
||||||
|
overrides.change_users = None
|
||||||
|
overrides.change_groups = None
|
||||||
|
elif any(
|
||||||
|
[
|
||||||
|
action.has_remove_view_users,
|
||||||
|
action.has_remove_view_groups,
|
||||||
|
action.has_remove_change_users,
|
||||||
|
action.has_remove_change_groups,
|
||||||
|
],
|
||||||
|
):
|
||||||
|
if overrides.view_users:
|
||||||
|
for user in action.remove_view_users.filter(pk__in=overrides.view_users):
|
||||||
|
overrides.view_users.remove(user.pk)
|
||||||
|
if overrides.change_users:
|
||||||
|
for user in action.remove_change_users.filter(
|
||||||
|
pk__in=overrides.change_users,
|
||||||
|
):
|
||||||
|
overrides.change_users.remove(user.pk)
|
||||||
|
if overrides.view_groups:
|
||||||
|
for group in action.remove_view_groups.filter(pk__in=overrides.view_groups):
|
||||||
|
overrides.view_groups.remove(group.pk)
|
||||||
|
if overrides.change_groups:
|
||||||
|
for group in action.remove_change_groups.filter(
|
||||||
|
pk__in=overrides.change_groups,
|
||||||
|
):
|
||||||
|
overrides.change_groups.remove(group.pk)
|
||||||
|
|
||||||
|
if action.remove_all_custom_fields:
|
||||||
|
overrides.custom_fields = None
|
||||||
|
elif action.has_remove_custom_fields and overrides.custom_fields:
|
||||||
|
for field in action.remove_custom_fields.filter(
|
||||||
|
pk__in=overrides.custom_fields.keys(),
|
||||||
|
):
|
||||||
|
overrides.custom_fields.pop(field.pk, None)
|
||||||
116
src/documents/workflows/utils.py
Normal file
116
src/documents/workflows/utils.py
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from django.db.models import Exists
|
||||||
|
from django.db.models import OuterRef
|
||||||
|
from django.db.models import Prefetch
|
||||||
|
|
||||||
|
from documents.models import Workflow
|
||||||
|
from documents.models import WorkflowAction
|
||||||
|
from documents.models import WorkflowTrigger
|
||||||
|
|
||||||
|
logger = logging.getLogger("paperless.workflows")
|
||||||
|
|
||||||
|
|
||||||
|
def get_workflows_for_trigger(
|
||||||
|
trigger_type: WorkflowTrigger.WorkflowTriggerType,
|
||||||
|
workflow_to_run: Workflow | None = None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Return workflows relevant to a trigger. If a specific workflow is given,
|
||||||
|
wrap it in a list; otherwise fetch enabled workflows for the trigger with
|
||||||
|
the prefetches used by the runner.
|
||||||
|
"""
|
||||||
|
if workflow_to_run is not None:
|
||||||
|
return [workflow_to_run]
|
||||||
|
|
||||||
|
annotated_actions = (
|
||||||
|
WorkflowAction.objects.select_related(
|
||||||
|
"assign_correspondent",
|
||||||
|
"assign_document_type",
|
||||||
|
"assign_storage_path",
|
||||||
|
"assign_owner",
|
||||||
|
"email",
|
||||||
|
"webhook",
|
||||||
|
)
|
||||||
|
.prefetch_related(
|
||||||
|
"assign_tags",
|
||||||
|
"assign_view_users",
|
||||||
|
"assign_view_groups",
|
||||||
|
"assign_change_users",
|
||||||
|
"assign_change_groups",
|
||||||
|
"assign_custom_fields",
|
||||||
|
"remove_tags",
|
||||||
|
"remove_correspondents",
|
||||||
|
"remove_document_types",
|
||||||
|
"remove_storage_paths",
|
||||||
|
"remove_custom_fields",
|
||||||
|
"remove_owners",
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
has_assign_tags=Exists(
|
||||||
|
WorkflowAction.assign_tags.through.objects.filter(
|
||||||
|
workflowaction_id=OuterRef("pk"),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
has_assign_view_users=Exists(
|
||||||
|
WorkflowAction.assign_view_users.through.objects.filter(
|
||||||
|
workflowaction_id=OuterRef("pk"),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
has_assign_view_groups=Exists(
|
||||||
|
WorkflowAction.assign_view_groups.through.objects.filter(
|
||||||
|
workflowaction_id=OuterRef("pk"),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
has_assign_change_users=Exists(
|
||||||
|
WorkflowAction.assign_change_users.through.objects.filter(
|
||||||
|
workflowaction_id=OuterRef("pk"),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
has_assign_change_groups=Exists(
|
||||||
|
WorkflowAction.assign_change_groups.through.objects.filter(
|
||||||
|
workflowaction_id=OuterRef("pk"),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
has_assign_custom_fields=Exists(
|
||||||
|
WorkflowAction.assign_custom_fields.through.objects.filter(
|
||||||
|
workflowaction_id=OuterRef("pk"),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
has_remove_view_users=Exists(
|
||||||
|
WorkflowAction.remove_view_users.through.objects.filter(
|
||||||
|
workflowaction_id=OuterRef("pk"),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
has_remove_view_groups=Exists(
|
||||||
|
WorkflowAction.remove_view_groups.through.objects.filter(
|
||||||
|
workflowaction_id=OuterRef("pk"),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
has_remove_change_users=Exists(
|
||||||
|
WorkflowAction.remove_change_users.through.objects.filter(
|
||||||
|
workflowaction_id=OuterRef("pk"),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
has_remove_change_groups=Exists(
|
||||||
|
WorkflowAction.remove_change_groups.through.objects.filter(
|
||||||
|
workflowaction_id=OuterRef("pk"),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
has_remove_custom_fields=Exists(
|
||||||
|
WorkflowAction.remove_custom_fields.through.objects.filter(
|
||||||
|
workflowaction_id=OuterRef("pk"),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return (
|
||||||
|
Workflow.objects.filter(enabled=True, triggers__type=trigger_type)
|
||||||
|
.prefetch_related(
|
||||||
|
Prefetch("actions", queryset=annotated_actions),
|
||||||
|
"triggers",
|
||||||
|
)
|
||||||
|
.order_by("order")
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
171
src/documents/workflows/webhooks.py
Normal file
171
src/documents/workflows/webhooks.py
Normal file
@@ -0,0 +1,171 @@
|
|||||||
|
import ipaddress
|
||||||
|
import logging
|
||||||
|
import socket
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from celery import shared_task
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
logger = logging.getLogger("paperless.workflows.webhooks")
|
||||||
|
|
||||||
|
|
||||||
|
class WebhookTransport(httpx.HTTPTransport):
|
||||||
|
"""
|
||||||
|
Transport that resolves/validates hostnames and rewrites to a vetted IP
|
||||||
|
while keeping Host/SNI as the original hostname.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
hostname: str,
|
||||||
|
*args,
|
||||||
|
allow_internal: bool = False,
|
||||||
|
**kwargs,
|
||||||
|
) -> None:
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.hostname = hostname
|
||||||
|
self.allow_internal = allow_internal
|
||||||
|
|
||||||
|
def handle_request(self, request: httpx.Request) -> httpx.Response:
|
||||||
|
hostname = request.url.host
|
||||||
|
|
||||||
|
if not hostname:
|
||||||
|
raise httpx.ConnectError("No hostname in request URL")
|
||||||
|
|
||||||
|
try:
|
||||||
|
addr_info = socket.getaddrinfo(hostname, None)
|
||||||
|
except socket.gaierror as e:
|
||||||
|
raise httpx.ConnectError(f"Could not resolve hostname: {hostname}") from e
|
||||||
|
|
||||||
|
ips = [info[4][0] for info in addr_info if info and info[4]]
|
||||||
|
if not ips:
|
||||||
|
raise httpx.ConnectError(f"Could not resolve hostname: {hostname}")
|
||||||
|
|
||||||
|
if not self.allow_internal:
|
||||||
|
for ip_str in ips:
|
||||||
|
if not WebhookTransport.is_public_ip(ip_str):
|
||||||
|
raise httpx.ConnectError(
|
||||||
|
f"Connection blocked: {hostname} resolves to a non-public address",
|
||||||
|
)
|
||||||
|
|
||||||
|
ip_str = ips[0]
|
||||||
|
formatted_ip = self._format_ip_for_url(ip_str)
|
||||||
|
|
||||||
|
new_headers = httpx.Headers(request.headers)
|
||||||
|
if "host" in new_headers:
|
||||||
|
del new_headers["host"]
|
||||||
|
new_headers["Host"] = hostname
|
||||||
|
new_url = request.url.copy_with(host=formatted_ip)
|
||||||
|
|
||||||
|
request = httpx.Request(
|
||||||
|
method=request.method,
|
||||||
|
url=new_url,
|
||||||
|
headers=new_headers,
|
||||||
|
content=request.content,
|
||||||
|
extensions=request.extensions,
|
||||||
|
)
|
||||||
|
request.extensions["sni_hostname"] = hostname
|
||||||
|
|
||||||
|
return super().handle_request(request)
|
||||||
|
|
||||||
|
def _format_ip_for_url(self, ip: str) -> str:
|
||||||
|
"""
|
||||||
|
Format IP address for use in URL (wrap IPv6 in brackets)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
ip_obj = ipaddress.ip_address(ip)
|
||||||
|
if ip_obj.version == 6:
|
||||||
|
return f"[{ip}]"
|
||||||
|
return ip
|
||||||
|
except ValueError:
|
||||||
|
return ip
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_public_ip(ip: str | int) -> bool:
|
||||||
|
try:
|
||||||
|
obj = ipaddress.ip_address(ip)
|
||||||
|
return not (
|
||||||
|
obj.is_private
|
||||||
|
or obj.is_loopback
|
||||||
|
or obj.is_link_local
|
||||||
|
or obj.is_multicast
|
||||||
|
or obj.is_unspecified
|
||||||
|
)
|
||||||
|
except ValueError: # pragma: no cover
|
||||||
|
return False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_first_ip(host: str) -> str | None:
|
||||||
|
try:
|
||||||
|
info = socket.getaddrinfo(host, None)
|
||||||
|
return info[0][4][0] if info else None
|
||||||
|
except Exception: # pragma: no cover
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(
|
||||||
|
retry_backoff=True,
|
||||||
|
autoretry_for=(httpx.HTTPStatusError,),
|
||||||
|
max_retries=3,
|
||||||
|
throws=(httpx.HTTPError,),
|
||||||
|
)
|
||||||
|
def send_webhook(
|
||||||
|
url: str,
|
||||||
|
data: str | dict,
|
||||||
|
headers: dict,
|
||||||
|
files: dict,
|
||||||
|
*,
|
||||||
|
as_json: bool = False,
|
||||||
|
):
|
||||||
|
p = urlparse(url)
|
||||||
|
if p.scheme.lower() not in settings.WEBHOOKS_ALLOWED_SCHEMES or not p.hostname:
|
||||||
|
logger.warning("Webhook blocked: invalid scheme/hostname")
|
||||||
|
raise ValueError("Invalid URL scheme or hostname.")
|
||||||
|
|
||||||
|
port = p.port or (443 if p.scheme == "https" else 80)
|
||||||
|
if (
|
||||||
|
len(settings.WEBHOOKS_ALLOWED_PORTS) > 0
|
||||||
|
and port not in settings.WEBHOOKS_ALLOWED_PORTS
|
||||||
|
):
|
||||||
|
logger.warning("Webhook blocked: port not permitted")
|
||||||
|
raise ValueError("Destination port not permitted.")
|
||||||
|
|
||||||
|
transport = WebhookTransport(
|
||||||
|
hostname=p.hostname,
|
||||||
|
allow_internal=settings.WEBHOOKS_ALLOW_INTERNAL_REQUESTS,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
post_args = {
|
||||||
|
"url": url,
|
||||||
|
"headers": {
|
||||||
|
k: v for k, v in (headers or {}).items() if k.lower() != "host"
|
||||||
|
},
|
||||||
|
"files": files or None,
|
||||||
|
}
|
||||||
|
if as_json:
|
||||||
|
post_args["json"] = data
|
||||||
|
elif isinstance(data, dict):
|
||||||
|
post_args["data"] = data
|
||||||
|
else:
|
||||||
|
post_args["content"] = data
|
||||||
|
|
||||||
|
with httpx.Client(
|
||||||
|
transport=transport,
|
||||||
|
timeout=5.0,
|
||||||
|
follow_redirects=False,
|
||||||
|
) as client:
|
||||||
|
client.post(
|
||||||
|
**post_args,
|
||||||
|
).raise_for_status()
|
||||||
|
logger.info(
|
||||||
|
f"Webhook sent to {url}",
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Failed attempt sending webhook to {url}: {e}",
|
||||||
|
)
|
||||||
|
raise e
|
||||||
|
finally:
|
||||||
|
transport.close()
|
||||||
@@ -2,7 +2,7 @@ msgid ""
|
|||||||
msgstr ""
|
msgstr ""
|
||||||
"Project-Id-Version: paperless-ngx\n"
|
"Project-Id-Version: paperless-ngx\n"
|
||||||
"Report-Msgid-Bugs-To: \n"
|
"Report-Msgid-Bugs-To: \n"
|
||||||
"POT-Creation-Date: 2025-11-14 16:09+0000\n"
|
"POT-Creation-Date: 2025-12-12 17:41+0000\n"
|
||||||
"PO-Revision-Date: 2022-02-17 04:17\n"
|
"PO-Revision-Date: 2022-02-17 04:17\n"
|
||||||
"Last-Translator: \n"
|
"Last-Translator: \n"
|
||||||
"Language-Team: English\n"
|
"Language-Team: English\n"
|
||||||
@@ -1219,40 +1219,35 @@ msgstr ""
|
|||||||
msgid "workflow runs"
|
msgid "workflow runs"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:145
|
#: documents/serialisers.py:640
|
||||||
#, python-format
|
|
||||||
msgid "Invalid regular expression: %(error)s"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#: documents/serialisers.py:619
|
|
||||||
msgid "Invalid color."
|
msgid "Invalid color."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:1805
|
#: documents/serialisers.py:1826
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "File type %(type)s not supported"
|
msgid "File type %(type)s not supported"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:1849
|
#: documents/serialisers.py:1870
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "Custom field id must be an integer: %(id)s"
|
msgid "Custom field id must be an integer: %(id)s"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:1856
|
#: documents/serialisers.py:1877
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "Custom field with id %(id)s does not exist"
|
msgid "Custom field with id %(id)s does not exist"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:1873 documents/serialisers.py:1883
|
#: documents/serialisers.py:1894 documents/serialisers.py:1904
|
||||||
msgid ""
|
msgid ""
|
||||||
"Custom fields must be a list of integers or an object mapping ids to values."
|
"Custom fields must be a list of integers or an object mapping ids to values."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:1878
|
#: documents/serialisers.py:1899
|
||||||
msgid "Some custom fields don't exist or were specified twice."
|
msgid "Some custom fields don't exist or were specified twice."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: documents/serialisers.py:1993
|
#: documents/serialisers.py:2014
|
||||||
msgid "Invalid variable detected."
|
msgid "Invalid variable detected."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
|||||||
@@ -137,3 +137,25 @@ class CustomSocialAccountAdapter(DefaultSocialAccountAdapter):
|
|||||||
user.save()
|
user.save()
|
||||||
handle_social_account_updated(None, request, sociallogin)
|
handle_social_account_updated(None, request, sociallogin)
|
||||||
return user
|
return user
|
||||||
|
|
||||||
|
def on_authentication_error(
|
||||||
|
self,
|
||||||
|
request,
|
||||||
|
provider,
|
||||||
|
error=None,
|
||||||
|
exception=None,
|
||||||
|
extra_context=None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Just log errors and pass them along.
|
||||||
|
"""
|
||||||
|
logger.warning(
|
||||||
|
f"Social authentication error for provider `{provider!s}`: {error!s} ({exception!s})",
|
||||||
|
)
|
||||||
|
return super().on_authentication_error(
|
||||||
|
request,
|
||||||
|
provider,
|
||||||
|
error,
|
||||||
|
exception,
|
||||||
|
extra_context,
|
||||||
|
)
|
||||||
|
|||||||
@@ -167,3 +167,17 @@ class TestCustomSocialAccountAdapter(TestCase):
|
|||||||
self.assertEqual(user.groups.count(), 1)
|
self.assertEqual(user.groups.count(), 1)
|
||||||
self.assertTrue(user.groups.filter(name="group1").exists())
|
self.assertTrue(user.groups.filter(name="group1").exists())
|
||||||
self.assertFalse(user.groups.filter(name="group2").exists())
|
self.assertFalse(user.groups.filter(name="group2").exists())
|
||||||
|
|
||||||
|
def test_error_logged_on_authentication_error(self):
|
||||||
|
adapter = get_social_adapter()
|
||||||
|
request = HttpRequest()
|
||||||
|
with self.assertLogs("paperless.auth", level="INFO") as log_cm:
|
||||||
|
adapter.on_authentication_error(
|
||||||
|
request,
|
||||||
|
provider="test-provider",
|
||||||
|
error="Error",
|
||||||
|
exception="Test authentication error",
|
||||||
|
)
|
||||||
|
self.assertTrue(
|
||||||
|
any("Test authentication error" in message for message in log_cm.output),
|
||||||
|
)
|
||||||
|
|||||||
@@ -14,13 +14,14 @@ ALLOWED_SVG_TAGS: set[str] = {
|
|||||||
"text",
|
"text",
|
||||||
"tspan",
|
"tspan",
|
||||||
"defs",
|
"defs",
|
||||||
"linearGradient",
|
"lineargradient",
|
||||||
"radialGradient",
|
"radialgradient",
|
||||||
"stop",
|
"stop",
|
||||||
"clipPath",
|
"clippath",
|
||||||
"use",
|
"use",
|
||||||
"title",
|
"title",
|
||||||
"desc",
|
"desc",
|
||||||
|
"style",
|
||||||
}
|
}
|
||||||
|
|
||||||
ALLOWED_SVG_ATTRIBUTES: set[str] = {
|
ALLOWED_SVG_ATTRIBUTES: set[str] = {
|
||||||
@@ -29,6 +30,7 @@ ALLOWED_SVG_ATTRIBUTES: set[str] = {
|
|||||||
"style",
|
"style",
|
||||||
"d",
|
"d",
|
||||||
"fill",
|
"fill",
|
||||||
|
"fill-opacity",
|
||||||
"fill-rule",
|
"fill-rule",
|
||||||
"stroke",
|
"stroke",
|
||||||
"stroke-width",
|
"stroke-width",
|
||||||
@@ -52,14 +54,14 @@ ALLOWED_SVG_ATTRIBUTES: set[str] = {
|
|||||||
"y1",
|
"y1",
|
||||||
"x2",
|
"x2",
|
||||||
"y2",
|
"y2",
|
||||||
"gradientTransform",
|
"gradienttransform",
|
||||||
"gradientUnits",
|
"gradientunits",
|
||||||
"offset",
|
"offset",
|
||||||
"stop-color",
|
"stop-color",
|
||||||
"stop-opacity",
|
"stop-opacity",
|
||||||
"clip-path",
|
"clip-path",
|
||||||
"viewBox",
|
"viewbox",
|
||||||
"preserveAspectRatio",
|
"preserveaspectratio",
|
||||||
"href",
|
"href",
|
||||||
"xlink:href",
|
"xlink:href",
|
||||||
"font-family",
|
"font-family",
|
||||||
@@ -68,6 +70,8 @@ ALLOWED_SVG_ATTRIBUTES: set[str] = {
|
|||||||
"text-anchor",
|
"text-anchor",
|
||||||
"xmlns",
|
"xmlns",
|
||||||
"xmlns:xlink",
|
"xmlns:xlink",
|
||||||
|
"version",
|
||||||
|
"type",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
54
uv.lock
generated
54
uv.lock
generated
@@ -331,15 +331,15 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "channels"
|
name = "channels"
|
||||||
version = "4.3.1"
|
version = "4.3.2"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "asgiref", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "asgiref", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/12/a0/46450fcf9e56af18a6b0440ba49db6635419bb7bc84142c35f4143b1a66c/channels-4.3.1.tar.gz", hash = "sha256:97413ffd674542db08e16a9ef09cd86ec0113e5f8125fbd33cf0854adcf27cdb", size = 26896, upload-time = "2025-08-01T13:25:19.952Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/74/92/b18d4bb54d14986a8b35215a1c9e6a7f9f4d57ca63ac9aee8290ebb4957d/channels-4.3.2.tar.gz", hash = "sha256:f2bb6bfb73ad7fb4705041d07613c7b4e69528f01ef8cb9fb6c21d9295f15667", size = 27023, upload-time = "2025-11-20T15:13:05.102Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/89/1c/eae1c2a8c195760376e7f65d0bdcc3e966695d29cfbe5c54841ce5c71408/channels-4.3.1-py3-none-any.whl", hash = "sha256:b091d4b26f91d807de3e84aead7ba785314f27eaf5bac31dd51b1c956b883859", size = 31286, upload-time = "2025-08-01T13:25:18.845Z" },
|
{ url = "https://files.pythonhosted.org/packages/16/34/c32915288b7ef482377b6adc401192f98c6a99b3a145423d3b8aed807898/channels-4.3.2-py3-none-any.whl", hash = "sha256:fef47e9055a603900cf16cef85f050d522d9ac4b3daccf24835bd9580705c176", size = 31313, upload-time = "2025-11-20T15:13:02.357Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -839,14 +839,14 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "django-soft-delete"
|
name = "django-soft-delete"
|
||||||
version = "1.0.21"
|
version = "1.0.22"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/da/bf/13996c18bffee3bbcf294830c1737bfb5564164b8319c51e6714b6bdf783/django_soft_delete-1.0.21.tar.gz", hash = "sha256:542bd4650d2769105a4363ea7bb7fbdb3c28429dbaa66417160f8f4b5dc689d5", size = 21153, upload-time = "2025-09-17T08:46:30.476Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/98/d1/c990b731676f93bd4594dee4b5133df52f5d0eee1eb8a969b4030014ac54/django_soft_delete-1.0.22.tar.gz", hash = "sha256:32d0bb95f180c28a40163e78a558acc18901fd56011f91f8ee735c171a6d4244", size = 21982, upload-time = "2025-10-25T13:11:46.199Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/fa/e6/8f4fed14499c63e35ca33cf9f424ad2e14e963ec5545594d7c7dc2f710f4/django_soft_delete-1.0.21-py3-none-any.whl", hash = "sha256:dd91e671d9d431ff96f4db727ce03e7fbb4008ae4541b1d162d5d06cc9becd2a", size = 18681, upload-time = "2025-09-17T08:46:29.272Z" },
|
{ url = "https://files.pythonhosted.org/packages/f5/c2/fca2bf69b7ca7e18aed9ac059e89f1043663e207a514e8fb652450e49631/django_soft_delete-1.0.22-py3-none-any.whl", hash = "sha256:81973c541d21452d249151085d617ebbfb5ec463899f47cd6b1306677481e94c", size = 19221, upload-time = "2025-10-25T13:11:44.755Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -885,11 +885,11 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "django-treenode"
|
name = "django-treenode"
|
||||||
version = "0.23.2"
|
version = "0.23.3"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/75/f3/274b84607fd64c0844e98659985f964190a46c2460f2523a446c4a946216/django_treenode-0.23.2.tar.gz", hash = "sha256:3c5a6ff5e0c83e34da88749f602b3013dd1ab0527f51952c616e3c21bf265d52", size = 26700, upload-time = "2025-09-04T21:16:53.497Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/25/58/86edbbd1075bb8bc0962c6feb13bc06822405a10fea8352ad73ab2babdd9/django_treenode-0.23.3.tar.gz", hash = "sha256:714c825d5b925a3d2848d0709f29973941ea41a606b8e2b64cbec46010a8cce3", size = 27812, upload-time = "2025-12-01T23:01:24.847Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/00/61/e17d3dee5c6bb24b8faf0c101e17f9a8cafeba6384166176e066c80e8cbb/django_treenode-0.23.2-py3-none-any.whl", hash = "sha256:9363cb50f753654a9acfad6ec4df2a664a5f89dfdf8b55ffd964f27461bef85e", size = 21879, upload-time = "2025-09-04T21:16:51.811Z" },
|
{ url = "https://files.pythonhosted.org/packages/bc/52/696db237167483324ef38d8d090fb0fcc33dbb70ebe66c75868005fb7c75/django_treenode-0.23.3-py3-none-any.whl", hash = "sha256:8072e1ac688c1ed3ab95a98a797c5e965380de5228a389d60a4ef8b9a6449387", size = 22014, upload-time = "2025-12-01T23:01:23.266Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -2163,6 +2163,7 @@ dependencies = [
|
|||||||
{ name = "pyzbar", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "pyzbar", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "rapidfuzz", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "rapidfuzz", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "redis", extra = ["hiredis"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "redis", extra = ["hiredis"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
|
{ name = "regex", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "scikit-learn", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "scikit-learn", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "setproctitle", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "setproctitle", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "tika-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "tika-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
@@ -2306,6 +2307,7 @@ requires-dist = [
|
|||||||
{ name = "pyzbar", specifier = "~=0.1.9" },
|
{ name = "pyzbar", specifier = "~=0.1.9" },
|
||||||
{ name = "rapidfuzz", specifier = "~=3.14.0" },
|
{ name = "rapidfuzz", specifier = "~=3.14.0" },
|
||||||
{ name = "redis", extras = ["hiredis"], specifier = "~=5.2.1" },
|
{ name = "redis", extras = ["hiredis"], specifier = "~=5.2.1" },
|
||||||
|
{ name = "regex", specifier = ">=2025.9.18" },
|
||||||
{ name = "scikit-learn", specifier = "~=1.7.0" },
|
{ name = "scikit-learn", specifier = "~=1.7.0" },
|
||||||
{ name = "setproctitle", specifier = "~=1.3.4" },
|
{ name = "setproctitle", specifier = "~=1.3.4" },
|
||||||
{ name = "tika-client", specifier = "~=0.10.0" },
|
{ name = "tika-client", specifier = "~=0.10.0" },
|
||||||
@@ -3502,25 +3504,25 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ruff"
|
name = "ruff"
|
||||||
version = "0.14.5"
|
version = "0.14.9"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/82/fa/fbb67a5780ae0f704876cb8ac92d6d76da41da4dc72b7ed3565ab18f2f52/ruff-0.14.5.tar.gz", hash = "sha256:8d3b48d7d8aad423d3137af7ab6c8b1e38e4de104800f0d596990f6ada1a9fc1", size = 5615944, upload-time = "2025-11-13T19:58:51.155Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/f6/1b/ab712a9d5044435be8e9a2beb17cbfa4c241aa9b5e4413febac2a8b79ef2/ruff-0.14.9.tar.gz", hash = "sha256:35f85b25dd586381c0cc053f48826109384c81c00ad7ef1bd977bfcc28119d5b", size = 5809165, upload-time = "2025-12-11T21:39:47.381Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/68/31/c07e9c535248d10836a94e4f4e8c5a31a1beed6f169b31405b227872d4f4/ruff-0.14.5-py3-none-linux_armv6l.whl", hash = "sha256:f3b8248123b586de44a8018bcc9fefe31d23dda57a34e6f0e1e53bd51fd63594", size = 13171630, upload-time = "2025-11-13T19:57:54.894Z" },
|
{ url = "https://files.pythonhosted.org/packages/b8/1c/d1b1bba22cffec02351c78ab9ed4f7d7391876e12720298448b29b7229c1/ruff-0.14.9-py3-none-linux_armv6l.whl", hash = "sha256:f1ec5de1ce150ca6e43691f4a9ef5c04574ad9ca35c8b3b0e18877314aba7e75", size = 13576541, upload-time = "2025-12-11T21:39:14.806Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/8e/5c/283c62516dca697cd604c2796d1487396b7a436b2f0ecc3fd412aca470e0/ruff-0.14.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f7a75236570318c7a30edd7f5491945f0169de738d945ca8784500b517163a72", size = 13413925, upload-time = "2025-11-13T19:57:59.181Z" },
|
{ url = "https://files.pythonhosted.org/packages/94/ab/ffe580e6ea1fca67f6337b0af59fc7e683344a43642d2d55d251ff83ceae/ruff-0.14.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ed9d7417a299fc6030b4f26333bf1117ed82a61ea91238558c0268c14e00d0c2", size = 13779363, upload-time = "2025-12-11T21:39:20.29Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/b6/f3/aa319f4afc22cb6fcba2b9cdfc0f03bbf747e59ab7a8c5e90173857a1361/ruff-0.14.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6d146132d1ee115f8802356a2dc9a634dbf58184c51bff21f313e8cd1c74899a", size = 12574040, upload-time = "2025-11-13T19:58:02.056Z" },
|
{ url = "https://files.pythonhosted.org/packages/7d/f8/2be49047f929d6965401855461e697ab185e1a6a683d914c5c19c7962d9e/ruff-0.14.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d5dc3473c3f0e4a1008d0ef1d75cee24a48e254c8bed3a7afdd2b4392657ed2c", size = 12925292, upload-time = "2025-12-11T21:39:38.757Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/f9/7f/cb5845fcc7c7e88ed57f58670189fc2ff517fe2134c3821e77e29fd3b0c8/ruff-0.14.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2380596653dcd20b057794d55681571a257a42327da8894b93bbd6111aa801f", size = 13009755, upload-time = "2025-11-13T19:58:05.172Z" },
|
{ url = "https://files.pythonhosted.org/packages/9e/e9/08840ff5127916bb989c86f18924fd568938b06f58b60e206176f327c0fe/ruff-0.14.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84bf7c698fc8f3cb8278830fb6b5a47f9bcc1ed8cb4f689b9dd02698fa840697", size = 13362894, upload-time = "2025-12-11T21:39:02.524Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/21/d2/bcbedbb6bcb9253085981730687ddc0cc7b2e18e8dc13cf4453de905d7a0/ruff-0.14.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d1fa985a42b1f075a098fa1ab9d472b712bdb17ad87a8ec86e45e7fa6273e68", size = 12937641, upload-time = "2025-11-13T19:58:08.345Z" },
|
{ url = "https://files.pythonhosted.org/packages/31/1c/5b4e8e7750613ef43390bb58658eaf1d862c0cc3352d139cd718a2cea164/ruff-0.14.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aa733093d1f9d88a5d98988d8834ef5d6f9828d03743bf5e338bf980a19fce27", size = 13311482, upload-time = "2025-12-11T21:39:17.51Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/a4/58/e25de28a572bdd60ffc6bb71fc7fd25a94ec6a076942e372437649cbb02a/ruff-0.14.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88f0770d42b7fa02bbefddde15d235ca3aa24e2f0137388cc15b2dcbb1f7c7a7", size = 13610854, upload-time = "2025-11-13T19:58:11.419Z" },
|
{ url = "https://files.pythonhosted.org/packages/5b/3a/459dce7a8cb35ba1ea3e9c88f19077667a7977234f3b5ab197fad240b404/ruff-0.14.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a1cfb04eda979b20c8c19550c8b5f498df64ff8da151283311ce3199e8b3648", size = 14016100, upload-time = "2025-12-11T21:39:41.948Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/7d/24/43bb3fd23ecee9861970978ea1a7a63e12a204d319248a7e8af539984280/ruff-0.14.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3676cb02b9061fee7294661071c4709fa21419ea9176087cb77e64410926eb78", size = 15061088, upload-time = "2025-11-13T19:58:14.551Z" },
|
{ url = "https://files.pythonhosted.org/packages/a6/31/f064f4ec32524f9956a0890fc6a944e5cf06c63c554e39957d208c0ffc45/ruff-0.14.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1e5cb521e5ccf0008bd74d5595a4580313844a42b9103b7388eca5a12c970743", size = 15477729, upload-time = "2025-12-11T21:39:23.279Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/23/44/a022f288d61c2f8c8645b24c364b719aee293ffc7d633a2ca4d116b9c716/ruff-0.14.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b595bedf6bc9cab647c4a173a61acf4f1ac5f2b545203ba82f30fcb10b0318fb", size = 14734717, upload-time = "2025-11-13T19:58:17.518Z" },
|
{ url = "https://files.pythonhosted.org/packages/7a/6d/f364252aad36ccd443494bc5f02e41bf677f964b58902a17c0b16c53d890/ruff-0.14.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd429a8926be6bba4befa8cdcf3f4dd2591c413ea5066b1e99155ed245ae42bb", size = 15122386, upload-time = "2025-12-11T21:39:33.125Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/58/81/5c6ba44de7e44c91f68073e0658109d8373b0590940efe5bd7753a2585a3/ruff-0.14.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f55382725ad0bdb2e8ee2babcbbfb16f124f5a59496a2f6a46f1d9d99d93e6e2", size = 14028812, upload-time = "2025-11-13T19:58:20.533Z" },
|
{ url = "https://files.pythonhosted.org/packages/20/02/e848787912d16209aba2799a4d5a1775660b6a3d0ab3944a4ccc13e64a02/ruff-0.14.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab208c1b7a492e37caeaf290b1378148f75e13c2225af5d44628b95fd7834273", size = 14497124, upload-time = "2025-12-11T21:38:59.33Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/ad/ef/41a8b60f8462cb320f68615b00299ebb12660097c952c600c762078420f8/ruff-0.14.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7497d19dce23976bdaca24345ae131a1d38dcfe1b0850ad8e9e6e4fa321a6e19", size = 13825656, upload-time = "2025-11-13T19:58:23.345Z" },
|
{ url = "https://files.pythonhosted.org/packages/f3/51/0489a6a5595b7760b5dbac0dd82852b510326e7d88d51dbffcd2e07e3ff3/ruff-0.14.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72034534e5b11e8a593f517b2f2f2b273eb68a30978c6a2d40473ad0aaa4cb4a", size = 14195343, upload-time = "2025-12-11T21:39:44.866Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/7c/00/207e5de737fdb59b39eb1fac806904fe05681981b46d6a6db9468501062e/ruff-0.14.5-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:410e781f1122d6be4f446981dd479470af86537fb0b8857f27a6e872f65a38e4", size = 13959922, upload-time = "2025-11-13T19:58:26.537Z" },
|
{ url = "https://files.pythonhosted.org/packages/f6/53/3bb8d2fa73e4c2f80acc65213ee0830fa0c49c6479313f7a68a00f39e208/ruff-0.14.9-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:712ff04f44663f1b90a1195f51525836e3413c8a773574a7b7775554269c30ed", size = 14346425, upload-time = "2025-12-11T21:39:05.927Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/bc/7e/fa1f5c2776db4be405040293618846a2dece5c70b050874c2d1f10f24776/ruff-0.14.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c01be527ef4c91a6d55e53b337bfe2c0f82af024cc1a33c44792d6844e2331e1", size = 12932501, upload-time = "2025-11-13T19:58:29.822Z" },
|
{ url = "https://files.pythonhosted.org/packages/ad/04/bdb1d0ab876372da3e983896481760867fc84f969c5c09d428e8f01b557f/ruff-0.14.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a111fee1db6f1d5d5810245295527cda1d367c5aa8f42e0fca9a78ede9b4498b", size = 13258768, upload-time = "2025-12-11T21:39:08.691Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/67/d8/d86bf784d693a764b59479a6bbdc9515ae42c340a5dc5ab1dabef847bfaa/ruff-0.14.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f66e9bb762e68d66e48550b59c74314168ebb46199886c5c5aa0b0fbcc81b151", size = 12927319, upload-time = "2025-11-13T19:58:32.923Z" },
|
{ url = "https://files.pythonhosted.org/packages/40/d9/8bf8e1e41a311afd2abc8ad12be1b6c6c8b925506d9069b67bb5e9a04af3/ruff-0.14.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8769efc71558fecc25eb295ddec7d1030d41a51e9dcf127cbd63ec517f22d567", size = 13326939, upload-time = "2025-12-11T21:39:53.842Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/ac/de/ee0b304d450ae007ce0cb3e455fe24fbcaaedae4ebaad6c23831c6663651/ruff-0.14.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d93be8f1fa01022337f1f8f3bcaa7ffee2d0b03f00922c45c2207954f351f465", size = 13206209, upload-time = "2025-11-13T19:58:35.952Z" },
|
{ url = "https://files.pythonhosted.org/packages/f4/56/a213fa9edb6dd849f1cfbc236206ead10913693c72a67fb7ddc1833bf95d/ruff-0.14.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:347e3bf16197e8a2de17940cd75fd6491e25c0aa7edf7d61aa03f146a1aa885a", size = 13578888, upload-time = "2025-12-11T21:39:35.988Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/33/aa/193ca7e3a92d74f17d9d5771a765965d2cf42c86e6f0fd95b13969115723/ruff-0.14.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c135d4b681f7401fe0e7312017e41aba9b3160861105726b76cfa14bc25aa367", size = 13953709, upload-time = "2025-11-13T19:58:39.002Z" },
|
{ url = "https://files.pythonhosted.org/packages/33/09/6a4a67ffa4abae6bf44c972a4521337ffce9cbc7808faadede754ef7a79c/ruff-0.14.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7715d14e5bccf5b660f54516558aa94781d3eb0838f8e706fb60e3ff6eff03a8", size = 14314473, upload-time = "2025-12-11T21:39:50.78Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|||||||
Reference in New Issue
Block a user