mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-02-03 23:22:42 -06:00
Compare commits
8 Commits
dependabot
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d604ca19df | ||
|
|
fb7abf7a6e | ||
|
|
6ad2fc0356 | ||
|
|
2ec8ec96c8 | ||
|
|
276dc13e3f | ||
|
|
d0c02e7a8d | ||
|
|
e45fca475a | ||
|
|
63c0e2f72b |
@@ -28,3 +28,4 @@
|
||||
./resources
|
||||
# Other stuff
|
||||
**/*.drawio.png
|
||||
.mypy_baseline
|
||||
|
||||
44
.github/workflows/ci-backend.yml
vendored
44
.github/workflows/ci-backend.yml
vendored
@@ -99,3 +99,47 @@ jobs:
|
||||
run: |
|
||||
docker compose --file docker/compose/docker-compose.ci-test.yml logs
|
||||
docker compose --file docker/compose/docker-compose.ci-test.yml down
|
||||
typing:
|
||||
name: Check project typing
|
||||
runs-on: ubuntu-24.04
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6.0.1
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v6.2.0
|
||||
with:
|
||||
python-version: "${{ env.DEFAULT_PYTHON }}"
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7.2.1
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--group testing \
|
||||
--group typing \
|
||||
--frozen
|
||||
- name: List installed Python dependencies
|
||||
run: |
|
||||
uv pip list
|
||||
- name: Cache Mypy
|
||||
uses: actions/cache@v5.0.3
|
||||
with:
|
||||
path: .mypy_cache
|
||||
# Keyed by OS, Python version, and dependency hashes
|
||||
key: ${{ runner.os }}-mypy-py${{ env.DEFAULT_PYTHON }}-${{ hashFiles('pyproject.toml', 'uv.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-mypy-py${{ env.DEFAULT_PYTHON }}-
|
||||
${{ runner.os }}-mypy-
|
||||
- name: Check typing
|
||||
run: |
|
||||
uv run mypy \
|
||||
--show-error-codes \
|
||||
--warn-unused-configs \
|
||||
src/ | uv run mypy-baseline filter
|
||||
|
||||
2499
.mypy-baseline.txt
Normal file
2499
.mypy-baseline.txt
Normal file
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@
|
||||
# correct networking for the tests
|
||||
services:
|
||||
gotenberg:
|
||||
image: docker.io/gotenberg/gotenberg:8.26
|
||||
image: docker.io/gotenberg/gotenberg:8.25
|
||||
hostname: gotenberg
|
||||
container_name: gotenberg
|
||||
network_mode: host
|
||||
|
||||
@@ -72,7 +72,7 @@ services:
|
||||
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||
gotenberg:
|
||||
image: docker.io/gotenberg/gotenberg:8.26
|
||||
image: docker.io/gotenberg/gotenberg:8.25
|
||||
restart: unless-stopped
|
||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||
# want to allow external content like tracking pixels or even javascript.
|
||||
|
||||
@@ -66,7 +66,7 @@ services:
|
||||
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||
gotenberg:
|
||||
image: docker.io/gotenberg/gotenberg:8.26
|
||||
image: docker.io/gotenberg/gotenberg:8.25
|
||||
restart: unless-stopped
|
||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||
# want to allow external content like tracking pixels or even javascript.
|
||||
|
||||
@@ -55,7 +55,7 @@ services:
|
||||
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||
gotenberg:
|
||||
image: docker.io/gotenberg/gotenberg:8.26
|
||||
image: docker.io/gotenberg/gotenberg:8.25
|
||||
restart: unless-stopped
|
||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||
# want to allow external content like tracking pixels or even javascript.
|
||||
|
||||
@@ -481,3 +481,147 @@ To get started:
|
||||
|
||||
5. The project is ready for debugging, start either run the fullstack debug or individual debug
|
||||
processes. Yo spin up the project without debugging run the task **Project Start: Run all Services**
|
||||
|
||||
## Developing Date Parser Plugins
|
||||
|
||||
Paperless-ngx uses a plugin system for date parsing, allowing you to extend or replace the default date parsing behavior. Plugins are discovered using [Python entry points](https://setuptools.pypa.io/en/latest/userguide/entry_point.html).
|
||||
|
||||
### Creating a Date Parser Plugin
|
||||
|
||||
To create a custom date parser plugin, you need to:
|
||||
|
||||
1. Create a class that inherits from `DateParserPluginBase`
|
||||
2. Implement the required abstract method
|
||||
3. Register your plugin via an entry point
|
||||
|
||||
#### 1. Implementing the Parser Class
|
||||
|
||||
Your parser must extend `documents.plugins.date_parsing.DateParserPluginBase` and implement the `parse` method:
|
||||
|
||||
```python
|
||||
from collections.abc import Iterator
|
||||
import datetime
|
||||
|
||||
from documents.plugins.date_parsing import DateParserPluginBase
|
||||
|
||||
|
||||
class MyDateParserPlugin(DateParserPluginBase):
|
||||
"""
|
||||
Custom date parser implementation.
|
||||
"""
|
||||
|
||||
def parse(self, filename: str, content: str) -> Iterator[datetime.datetime]:
|
||||
"""
|
||||
Parse dates from the document's filename and content.
|
||||
|
||||
Args:
|
||||
filename: The original filename of the document
|
||||
content: The extracted text content of the document
|
||||
|
||||
Yields:
|
||||
datetime.datetime: Valid datetime objects found in the document
|
||||
"""
|
||||
# Your parsing logic here
|
||||
# Use self.config to access configuration settings
|
||||
|
||||
# Example: parse dates from filename first
|
||||
if self.config.filename_date_order:
|
||||
# Your filename parsing logic
|
||||
yield some_datetime
|
||||
|
||||
# Then parse dates from content
|
||||
# Your content parsing logic
|
||||
yield another_datetime
|
||||
```
|
||||
|
||||
#### 2. Configuration and Helper Methods
|
||||
|
||||
Your parser instance is initialized with a `DateParserConfig` object accessible via `self.config`. This provides:
|
||||
|
||||
- `languages: list[str]` - List of language codes for date parsing
|
||||
- `timezone_str: str` - Timezone string for date localization
|
||||
- `ignore_dates: set[datetime.date]` - Dates that should be filtered out
|
||||
- `reference_time: datetime.datetime` - Current time for filtering future dates
|
||||
- `filename_date_order: str | None` - Date order preference for filenames (e.g., "DMY", "MDY")
|
||||
- `content_date_order: str` - Date order preference for content
|
||||
|
||||
The base class provides two helper methods you can use:
|
||||
|
||||
```python
|
||||
def _parse_string(
|
||||
self,
|
||||
date_string: str,
|
||||
date_order: str,
|
||||
) -> datetime.datetime | None:
|
||||
"""
|
||||
Parse a single date string using dateparser with configured settings.
|
||||
"""
|
||||
|
||||
def _filter_date(
|
||||
self,
|
||||
date: datetime.datetime | None,
|
||||
) -> datetime.datetime | None:
|
||||
"""
|
||||
Validate a parsed datetime against configured rules.
|
||||
Filters out dates before 1900, future dates, and ignored dates.
|
||||
"""
|
||||
```
|
||||
|
||||
#### 3. Resource Management (Optional)
|
||||
|
||||
If your plugin needs to acquire or release resources (database connections, API clients, etc.), override the context manager methods. Paperless-ngx will always use plugins as context managers, ensuring resources can be released even in the event of errors.
|
||||
|
||||
#### 4. Registering Your Plugin
|
||||
|
||||
Register your plugin using a setuptools entry point in your package's `pyproject.toml`:
|
||||
|
||||
```toml
|
||||
[project.entry-points."paperless_ngx.date_parsers"]
|
||||
my_parser = "my_package.parsers:MyDateParserPlugin"
|
||||
```
|
||||
|
||||
The entry point name (e.g., `"my_parser"`) is used for sorting when multiple plugins are found. Paperless-ngx will use the first plugin alphabetically by name if multiple plugins are discovered.
|
||||
|
||||
### Plugin Discovery
|
||||
|
||||
Paperless-ngx automatically discovers and loads date parser plugins at runtime. The discovery process:
|
||||
|
||||
1. Queries the `paperless_ngx.date_parsers` entry point group
|
||||
2. Validates that each plugin is a subclass of `DateParserPluginBase`
|
||||
3. Sorts valid plugins alphabetically by entry point name
|
||||
4. Uses the first valid plugin, or falls back to the default `RegexDateParserPlugin` if none are found
|
||||
|
||||
If multiple plugins are installed, a warning is logged indicating which plugin was selected.
|
||||
|
||||
### Example: Simple Date Parser
|
||||
|
||||
Here's a minimal example that only looks for ISO 8601 dates:
|
||||
|
||||
```python
|
||||
import datetime
|
||||
import re
|
||||
from collections.abc import Iterator
|
||||
|
||||
from documents.plugins.date_parsing.base import DateParserPluginBase
|
||||
|
||||
|
||||
class ISODateParserPlugin(DateParserPluginBase):
|
||||
"""
|
||||
Parser that only matches ISO 8601 formatted dates (YYYY-MM-DD).
|
||||
"""
|
||||
|
||||
ISO_REGEX = re.compile(r"\b(\d{4}-\d{2}-\d{2})\b")
|
||||
|
||||
def parse(self, filename: str, content: str) -> Iterator[datetime.datetime]:
|
||||
# Combine filename and content for searching
|
||||
text = f"{filename} {content}"
|
||||
|
||||
for match in self.ISO_REGEX.finditer(text):
|
||||
date_string = match.group(1)
|
||||
# Use helper method to parse with configured timezone
|
||||
date = self._parse_string(date_string, "YMD")
|
||||
# Use helper method to validate the date
|
||||
filtered_date = self._filter_date(date)
|
||||
if filtered_date is not None:
|
||||
yield filtered_date
|
||||
```
|
||||
|
||||
@@ -562,8 +562,8 @@ you may want to adjust these settings to prevent abuse.
|
||||
|
||||
#### Workflow placeholders
|
||||
|
||||
Titles can be assigned by workflows using [Jinja templates](https://jinja.palletsprojects.com/en/3.1.x/templates/).
|
||||
This allows for complex logic to be used to generate the title, including [logical structures](https://jinja.palletsprojects.com/en/3.1.x/templates/#list-of-control-structures)
|
||||
Titles and webhook payloads can be generated by workflows using [Jinja templates](https://jinja.palletsprojects.com/en/3.1.x/templates/).
|
||||
This allows for complex logic to be used, including [logical structures](https://jinja.palletsprojects.com/en/3.1.x/templates/#list-of-control-structures)
|
||||
and [filters](https://jinja.palletsprojects.com/en/3.1.x/templates/#id11).
|
||||
The template is provided as a string.
|
||||
|
||||
@@ -586,7 +586,7 @@ applied. You can use the following placeholders in the template with any trigger
|
||||
- `{{added_time}}`: added time in HH:MM format
|
||||
- `{{original_filename}}`: original file name without extension
|
||||
- `{{filename}}`: current file name without extension
|
||||
- `{{doc_title}}`: current document title
|
||||
- `{{doc_title}}`: current document title (cannot be used in title assignment)
|
||||
|
||||
The following placeholders are only available for "added" or "updated" triggers
|
||||
|
||||
|
||||
@@ -94,7 +94,7 @@ optional-dependencies.postgres = [
|
||||
"psycopg-pool==3.3",
|
||||
]
|
||||
optional-dependencies.webserver = [
|
||||
"granian[uvloop]~=2.6.0",
|
||||
"granian[uvloop]~=2.7.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
@@ -138,7 +138,9 @@ typing = [
|
||||
"django-stubs[compatible-mypy]",
|
||||
"djangorestframework-stubs[compatible-mypy]",
|
||||
"lxml-stubs",
|
||||
"microsoft-python-type-stubs @ git+https://github.com/microsoft/python-type-stubs.git",
|
||||
"mypy",
|
||||
"mypy-baseline",
|
||||
"types-bleach",
|
||||
"types-colorama",
|
||||
"types-dateparser",
|
||||
@@ -306,6 +308,7 @@ markers = [
|
||||
"gotenberg: Tests requiring Gotenberg service",
|
||||
"tika: Tests requiring Tika service",
|
||||
"greenmail: Tests requiring Greenmail service",
|
||||
"date_parsing: Tests which cover date parsing from content or filename",
|
||||
]
|
||||
|
||||
[tool.pytest_env]
|
||||
@@ -345,3 +348,7 @@ warn_unused_ignores = true
|
||||
|
||||
[tool.django-stubs]
|
||||
django_settings_module = "paperless.settings"
|
||||
|
||||
[tool.mypy-baseline]
|
||||
baseline_path = ".mypy-baseline.txt"
|
||||
sort_baseline = true
|
||||
|
||||
@@ -5359,6 +5359,27 @@
|
||||
<context context-type="linenumber">429</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="32686762098259088" datatype="html">
|
||||
<source> One password per line. The workflow will try them in order until one succeeds. </source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">436,438</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="3853121441237751087" datatype="html">
|
||||
<source>Passwords</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">441</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="3653669613103848563" datatype="html">
|
||||
<source>Passwords are stored in plain text. Use with caution.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.html</context>
|
||||
<context context-type="linenumber">445</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4626030417479279989" datatype="html">
|
||||
<source>Consume Folder</source>
|
||||
<context-group purpose="location">
|
||||
@@ -5454,109 +5475,116 @@
|
||||
<context context-type="linenumber">140</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4824906895380506720" datatype="html">
|
||||
<source>Password removal</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">144</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4522609911791833187" datatype="html">
|
||||
<source>Has any of these tags</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">209</context>
|
||||
<context context-type="linenumber">213</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4166903555074156852" datatype="html">
|
||||
<source>Has all of these tags</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">216</context>
|
||||
<context context-type="linenumber">220</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6624363795312783141" datatype="html">
|
||||
<source>Does not have these tags</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">223</context>
|
||||
<context context-type="linenumber">227</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="7168528512669831184" datatype="html">
|
||||
<source>Has any of these correspondents</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">230</context>
|
||||
<context context-type="linenumber">234</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="5281365940563983618" datatype="html">
|
||||
<source>Has correspondent</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">238</context>
|
||||
<context context-type="linenumber">242</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6884498632428600393" datatype="html">
|
||||
<source>Does not have correspondents</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">246</context>
|
||||
<context context-type="linenumber">250</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4806713133917046341" datatype="html">
|
||||
<source>Has document type</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">254</context>
|
||||
<context context-type="linenumber">258</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8801397520369995032" datatype="html">
|
||||
<source>Has any of these document types</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">262</context>
|
||||
<context context-type="linenumber">266</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="1507843981661822403" datatype="html">
|
||||
<source>Does not have document types</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">270</context>
|
||||
<context context-type="linenumber">274</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="4277260190522078330" datatype="html">
|
||||
<source>Has storage path</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">278</context>
|
||||
<context context-type="linenumber">282</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8858580062214623097" datatype="html">
|
||||
<source>Has any of these storage paths</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">286</context>
|
||||
<context context-type="linenumber">290</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6070943364927280151" datatype="html">
|
||||
<source>Does not have storage paths</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">294</context>
|
||||
<context context-type="linenumber">298</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="6250799006816371860" datatype="html">
|
||||
<source>Matches custom field query</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">302</context>
|
||||
<context context-type="linenumber">306</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="3138206142174978019" datatype="html">
|
||||
<source>Create new workflow</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">531</context>
|
||||
<context context-type="linenumber">535</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="5996779210524133604" datatype="html">
|
||||
<source>Edit workflow</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/common/edit-dialog/workflow-edit-dialog/workflow-edit-dialog.component.ts</context>
|
||||
<context context-type="linenumber">535</context>
|
||||
<context context-type="linenumber">539</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="5457837313196342910" datatype="html">
|
||||
|
||||
@@ -430,6 +430,24 @@
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
@case (WorkflowActionType.PasswordRemoval) {
|
||||
<div class="row">
|
||||
<div class="col">
|
||||
<p class="small" i18n>
|
||||
One password per line. The workflow will try them in order until one succeeds.
|
||||
</p>
|
||||
<pngx-input-textarea
|
||||
i18n-title
|
||||
title="Passwords"
|
||||
formControlName="passwords"
|
||||
rows="4"
|
||||
[error]="error?.actions?.[i]?.passwords"
|
||||
hint="Passwords are stored in plain text. Use with caution."
|
||||
i18n-hint
|
||||
></pngx-input-textarea>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
}
|
||||
</div>
|
||||
</ng-template>
|
||||
|
||||
@@ -3,6 +3,7 @@ import { provideHttpClient, withInterceptorsFromDi } from '@angular/common/http'
|
||||
import { provideHttpClientTesting } from '@angular/common/http/testing'
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing'
|
||||
import {
|
||||
FormArray,
|
||||
FormControl,
|
||||
FormGroup,
|
||||
FormsModule,
|
||||
@@ -994,4 +995,32 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
component.removeSelectedCustomField(3, formGroup)
|
||||
expect(formGroup.get('assign_custom_fields').value).toEqual([])
|
||||
})
|
||||
|
||||
it('should handle parsing of passwords from array to string and back on save', () => {
|
||||
const passwordAction: WorkflowAction = {
|
||||
id: 1,
|
||||
type: WorkflowActionType.PasswordRemoval,
|
||||
passwords: ['pass1', 'pass2'],
|
||||
}
|
||||
component.object = {
|
||||
name: 'Workflow with Passwords',
|
||||
id: 1,
|
||||
order: 1,
|
||||
enabled: true,
|
||||
triggers: [],
|
||||
actions: [passwordAction],
|
||||
}
|
||||
component.ngOnInit()
|
||||
|
||||
const formActions = component.objectForm.get('actions') as FormArray
|
||||
expect(formActions.value[0].passwords).toBe('pass1\npass2')
|
||||
formActions.at(0).get('passwords').setValue('pass1\npass2\npass3')
|
||||
component.save()
|
||||
|
||||
expect(component.objectForm.get('actions').value[0].passwords).toEqual([
|
||||
'pass1',
|
||||
'pass2',
|
||||
'pass3',
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
@@ -139,6 +139,10 @@ export const WORKFLOW_ACTION_OPTIONS = [
|
||||
id: WorkflowActionType.Webhook,
|
||||
name: $localize`Webhook`,
|
||||
},
|
||||
{
|
||||
id: WorkflowActionType.PasswordRemoval,
|
||||
name: $localize`Password removal`,
|
||||
},
|
||||
]
|
||||
|
||||
export enum TriggerFilterType {
|
||||
@@ -1202,11 +1206,25 @@ export class WorkflowEditDialogComponent
|
||||
headers: new FormControl(action.webhook?.headers),
|
||||
include_document: new FormControl(!!action.webhook?.include_document),
|
||||
}),
|
||||
passwords: new FormControl(
|
||||
this.formatPasswords(action.passwords ?? [])
|
||||
),
|
||||
}),
|
||||
{ emitEvent }
|
||||
)
|
||||
}
|
||||
|
||||
private formatPasswords(passwords: string[] = []): string {
|
||||
return passwords.join('\n')
|
||||
}
|
||||
|
||||
private parsePasswords(value: string = ''): string[] {
|
||||
return value
|
||||
.split(/[\n,]+/)
|
||||
.map((entry) => entry.trim())
|
||||
.filter((entry) => entry.length > 0)
|
||||
}
|
||||
|
||||
private updateAllTriggerActionFields(emitEvent: boolean = false) {
|
||||
this.triggerFields.clear({ emitEvent: false })
|
||||
this.object?.triggers.forEach((trigger) => {
|
||||
@@ -1331,6 +1349,7 @@ export class WorkflowEditDialogComponent
|
||||
headers: null,
|
||||
include_document: false,
|
||||
},
|
||||
passwords: [],
|
||||
}
|
||||
this.object.actions.push(action)
|
||||
this.createActionField(action)
|
||||
@@ -1367,6 +1386,7 @@ export class WorkflowEditDialogComponent
|
||||
if (action.type !== WorkflowActionType.Email) {
|
||||
action.email = null
|
||||
}
|
||||
action.passwords = this.parsePasswords(action.passwords as any)
|
||||
})
|
||||
super.save()
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ export enum WorkflowActionType {
|
||||
Removal = 2,
|
||||
Email = 3,
|
||||
Webhook = 4,
|
||||
PasswordRemoval = 5,
|
||||
}
|
||||
|
||||
export interface WorkflowActionEmail extends ObjectWithId {
|
||||
@@ -97,4 +98,6 @@ export interface WorkflowAction extends ObjectWithId {
|
||||
email?: WorkflowActionEmail
|
||||
|
||||
webhook?: WorkflowActionWebhook
|
||||
|
||||
passwords?: string[]
|
||||
}
|
||||
|
||||
@@ -33,12 +33,12 @@ from documents.models import WorkflowTrigger
|
||||
from documents.parsers import DocumentParser
|
||||
from documents.parsers import ParseError
|
||||
from documents.parsers import get_parser_class_for_mime_type
|
||||
from documents.parsers import parse_date
|
||||
from documents.permissions import set_permissions_for_object
|
||||
from documents.plugins.base import AlwaysRunPluginMixin
|
||||
from documents.plugins.base import ConsumeTaskPlugin
|
||||
from documents.plugins.base import NoCleanupPluginMixin
|
||||
from documents.plugins.base import NoSetupPluginMixin
|
||||
from documents.plugins.date_parsing import get_date_parser
|
||||
from documents.plugins.helpers import ProgressManager
|
||||
from documents.plugins.helpers import ProgressStatusOptions
|
||||
from documents.signals import document_consumption_finished
|
||||
@@ -432,7 +432,8 @@ class ConsumerPlugin(
|
||||
ProgressStatusOptions.WORKING,
|
||||
ConsumerStatusShortMessage.PARSE_DATE,
|
||||
)
|
||||
date = parse_date(self.filename, text)
|
||||
with get_date_parser() as date_parser:
|
||||
date = next(date_parser.parse(self.filename, text), None)
|
||||
archive_path = document_parser.get_archive_path()
|
||||
page_count = document_parser.get_page_count(self.working_copy, mime_type)
|
||||
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
# Generated by Django 5.2.7 on 2025-12-29 03:56
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0008_sharelinkbundle"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="workflowaction",
|
||||
name="passwords",
|
||||
field=models.JSONField(
|
||||
blank=True,
|
||||
help_text="Passwords to try when removing PDF protection. Separate with commas or new lines.",
|
||||
null=True,
|
||||
verbose_name="passwords",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="workflowaction",
|
||||
name="type",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Assignment"),
|
||||
(2, "Removal"),
|
||||
(3, "Email"),
|
||||
(4, "Webhook"),
|
||||
(5, "Password removal"),
|
||||
],
|
||||
default=1,
|
||||
verbose_name="Workflow Action Type",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1405,6 +1405,10 @@ class WorkflowAction(models.Model):
|
||||
4,
|
||||
_("Webhook"),
|
||||
)
|
||||
PASSWORD_REMOVAL = (
|
||||
5,
|
||||
_("Password removal"),
|
||||
)
|
||||
|
||||
type = models.PositiveIntegerField(
|
||||
_("Workflow Action Type"),
|
||||
@@ -1634,6 +1638,15 @@ class WorkflowAction(models.Model):
|
||||
verbose_name=_("webhook"),
|
||||
)
|
||||
|
||||
passwords = models.JSONField(
|
||||
_("passwords"),
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text=_(
|
||||
"Passwords to try when removing PDF protection. Separate with commas or new lines.",
|
||||
),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("workflow action")
|
||||
verbose_name_plural = _("workflow actions")
|
||||
|
||||
@@ -9,22 +9,17 @@ import subprocess
|
||||
import tempfile
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
from re import Match
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
from documents.loggers import LoggingMixin
|
||||
from documents.signals import document_consumer_declaration
|
||||
from documents.utils import copy_file_with_basic_stats
|
||||
from documents.utils import run_subprocess
|
||||
from paperless.config import OcrConfig
|
||||
from paperless.utils import ocr_to_dateparser_languages
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import datetime
|
||||
from collections.abc import Iterator
|
||||
|
||||
# This regular expression will try to find dates in the document at
|
||||
# hand and will match the following formats:
|
||||
@@ -259,75 +254,6 @@ def make_thumbnail_from_pdf(in_path: Path, temp_dir: Path, logging_group=None) -
|
||||
return out_path
|
||||
|
||||
|
||||
def parse_date(filename, text) -> datetime.datetime | None:
|
||||
return next(parse_date_generator(filename, text), None)
|
||||
|
||||
|
||||
def parse_date_generator(filename, text) -> Iterator[datetime.datetime]:
|
||||
"""
|
||||
Returns the date of the document.
|
||||
"""
|
||||
|
||||
def __parser(ds: str, date_order: str) -> datetime.datetime:
|
||||
"""
|
||||
Call dateparser.parse with a particular date ordering
|
||||
"""
|
||||
import dateparser
|
||||
|
||||
ocr_config = OcrConfig()
|
||||
languages = settings.DATE_PARSER_LANGUAGES or ocr_to_dateparser_languages(
|
||||
ocr_config.language,
|
||||
)
|
||||
|
||||
return dateparser.parse(
|
||||
ds,
|
||||
settings={
|
||||
"DATE_ORDER": date_order,
|
||||
"PREFER_DAY_OF_MONTH": "first",
|
||||
"RETURN_AS_TIMEZONE_AWARE": True,
|
||||
"TIMEZONE": settings.TIME_ZONE,
|
||||
},
|
||||
locales=languages,
|
||||
)
|
||||
|
||||
def __filter(date: datetime.datetime) -> datetime.datetime | None:
|
||||
if (
|
||||
date is not None
|
||||
and date.year > 1900
|
||||
and date <= timezone.now()
|
||||
and date.date() not in settings.IGNORE_DATES
|
||||
):
|
||||
return date
|
||||
return None
|
||||
|
||||
def __process_match(
|
||||
match: Match[str],
|
||||
date_order: str,
|
||||
) -> datetime.datetime | None:
|
||||
date_string = match.group(0)
|
||||
|
||||
try:
|
||||
date = __parser(date_string, date_order)
|
||||
except Exception:
|
||||
# Skip all matches that do not parse to a proper date
|
||||
date = None
|
||||
|
||||
return __filter(date)
|
||||
|
||||
def __process_content(content: str, date_order: str) -> Iterator[datetime.datetime]:
|
||||
for m in re.finditer(DATE_REGEX, content):
|
||||
date = __process_match(m, date_order)
|
||||
if date is not None:
|
||||
yield date
|
||||
|
||||
# if filename date parsing is enabled, search there first:
|
||||
if settings.FILENAME_DATE_ORDER:
|
||||
yield from __process_content(filename, settings.FILENAME_DATE_ORDER)
|
||||
|
||||
# Iterate through all regex matches in text and try to parse the date
|
||||
yield from __process_content(text, settings.DATE_ORDER)
|
||||
|
||||
|
||||
class ParseError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
101
src/documents/plugins/date_parsing/__init__.py
Normal file
101
src/documents/plugins/date_parsing/__init__.py
Normal file
@@ -0,0 +1,101 @@
|
||||
import logging
|
||||
from functools import lru_cache
|
||||
from importlib.metadata import EntryPoint
|
||||
from importlib.metadata import entry_points
|
||||
from typing import Final
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
from documents.plugins.date_parsing.base import DateParserConfig
|
||||
from documents.plugins.date_parsing.base import DateParserPluginBase
|
||||
from documents.plugins.date_parsing.regex_parser import RegexDateParserPlugin
|
||||
from paperless.config import OcrConfig
|
||||
from paperless.utils import ocr_to_dateparser_languages
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DATE_PARSER_ENTRY_POINT_GROUP: Final = "paperless_ngx.date_parsers"
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _discover_parser_class() -> type[DateParserPluginBase]:
|
||||
"""
|
||||
Discovers the date parser plugin class to use.
|
||||
|
||||
- If one or more plugins are found, sorts them by name and returns the first.
|
||||
- If no plugins are found, returns the default RegexDateParser.
|
||||
"""
|
||||
|
||||
eps: tuple[EntryPoint, ...]
|
||||
try:
|
||||
eps = entry_points(group=DATE_PARSER_ENTRY_POINT_GROUP)
|
||||
except Exception as e:
|
||||
# Log a warning
|
||||
logger.warning(f"Could not query entry points for date parsers: {e}")
|
||||
eps = ()
|
||||
|
||||
valid_plugins: list[EntryPoint] = []
|
||||
for ep in eps:
|
||||
try:
|
||||
plugin_class = ep.load()
|
||||
if plugin_class and issubclass(plugin_class, DateParserPluginBase):
|
||||
valid_plugins.append(ep)
|
||||
else:
|
||||
logger.warning(f"Plugin {ep.name} does not subclass DateParser.")
|
||||
except Exception as e:
|
||||
logger.error(f"Unable to load date parser plugin {ep.name}: {e}")
|
||||
|
||||
if not valid_plugins:
|
||||
return RegexDateParserPlugin
|
||||
|
||||
valid_plugins.sort(key=lambda ep: ep.name)
|
||||
|
||||
if len(valid_plugins) > 1:
|
||||
logger.warning(
|
||||
f"Multiple date parsers found: "
|
||||
f"{[ep.name for ep in valid_plugins]}. "
|
||||
f"Using the first one by name: '{valid_plugins[0].name}'.",
|
||||
)
|
||||
|
||||
return valid_plugins[0].load()
|
||||
|
||||
|
||||
def get_date_parser() -> DateParserPluginBase:
|
||||
"""
|
||||
Factory function to get an initialized date parser instance.
|
||||
|
||||
This function is responsible for:
|
||||
1. Discovering the correct parser class (plugin or default).
|
||||
2. Loading configuration from Django settings.
|
||||
3. Instantiating the parser with the configuration.
|
||||
"""
|
||||
# 1. Discover the class (this is cached)
|
||||
parser_class = _discover_parser_class()
|
||||
|
||||
# 2. Load configuration from settings
|
||||
# TODO: Get the language from the settings and/or configuration object, depending
|
||||
ocr_config = OcrConfig()
|
||||
languages = settings.DATE_PARSER_LANGUAGES or ocr_to_dateparser_languages(
|
||||
ocr_config.language,
|
||||
)
|
||||
|
||||
config = DateParserConfig(
|
||||
languages=languages,
|
||||
timezone_str=settings.TIME_ZONE,
|
||||
ignore_dates=settings.IGNORE_DATES,
|
||||
reference_time=timezone.now(),
|
||||
filename_date_order=settings.FILENAME_DATE_ORDER,
|
||||
content_date_order=settings.DATE_ORDER,
|
||||
)
|
||||
|
||||
# 3. Instantiate the discovered class with the config
|
||||
return parser_class(config=config)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"DateParserConfig",
|
||||
"DateParserPluginBase",
|
||||
"RegexDateParserPlugin",
|
||||
"get_date_parser",
|
||||
]
|
||||
124
src/documents/plugins/date_parsing/base.py
Normal file
124
src/documents/plugins/date_parsing/base.py
Normal file
@@ -0,0 +1,124 @@
|
||||
import datetime
|
||||
import logging
|
||||
from abc import ABC
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Iterator
|
||||
from dataclasses import dataclass
|
||||
from types import TracebackType
|
||||
|
||||
try:
|
||||
from typing import Self
|
||||
except ImportError:
|
||||
from typing_extensions import Self
|
||||
|
||||
import dateparser
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class DateParserConfig:
|
||||
"""
|
||||
Configuration for a DateParser instance.
|
||||
|
||||
This object is created by the factory and passed to the
|
||||
parser's constructor, decoupling the parser from settings.
|
||||
"""
|
||||
|
||||
languages: list[str]
|
||||
timezone_str: str
|
||||
ignore_dates: set[datetime.date]
|
||||
|
||||
# A "now" timestamp for filtering future dates.
|
||||
# Passed in by the factory.
|
||||
reference_time: datetime.datetime
|
||||
|
||||
# Settings for the default RegexDateParser
|
||||
# Other plugins should use or consider these, but it is not required
|
||||
filename_date_order: str | None
|
||||
content_date_order: str
|
||||
|
||||
|
||||
class DateParserPluginBase(ABC):
|
||||
"""
|
||||
Abstract base class for date parsing strategies.
|
||||
|
||||
Instances are configured via a DateParserConfig object.
|
||||
"""
|
||||
|
||||
def __init__(self, config: DateParserConfig):
|
||||
"""
|
||||
Initializes the parser with its configuration.
|
||||
"""
|
||||
self.config = config
|
||||
|
||||
def __enter__(self) -> Self:
|
||||
"""
|
||||
Enter the runtime context related to this object.
|
||||
|
||||
Subclasses can override this to acquire resources (connections, handles).
|
||||
"""
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_val: BaseException | None,
|
||||
exc_tb: TracebackType | None,
|
||||
) -> None:
|
||||
"""
|
||||
Exit the runtime context related to this object.
|
||||
|
||||
Subclasses can override this to release resources.
|
||||
"""
|
||||
# Default implementation does nothing.
|
||||
# Returning None implies exceptions are propagated.
|
||||
|
||||
def _parse_string(
|
||||
self,
|
||||
date_string: str,
|
||||
date_order: str,
|
||||
) -> datetime.datetime | None:
|
||||
"""
|
||||
Helper method to parse a single date string using dateparser.
|
||||
|
||||
Uses configuration from `self.config`.
|
||||
"""
|
||||
try:
|
||||
return dateparser.parse(
|
||||
date_string,
|
||||
settings={
|
||||
"DATE_ORDER": date_order,
|
||||
"PREFER_DAY_OF_MONTH": "first",
|
||||
"RETURN_AS_TIMEZONE_AWARE": True,
|
||||
"TIMEZONE": self.config.timezone_str,
|
||||
},
|
||||
locales=self.config.languages,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error while parsing date string '{date_string}': {e}")
|
||||
return None
|
||||
|
||||
def _filter_date(
|
||||
self,
|
||||
date: datetime.datetime | None,
|
||||
) -> datetime.datetime | None:
|
||||
"""
|
||||
Helper method to validate a parsed datetime object.
|
||||
|
||||
Uses configuration from `self.config`.
|
||||
"""
|
||||
if (
|
||||
date is not None
|
||||
and date.year > 1900
|
||||
and date <= self.config.reference_time
|
||||
and date.date() not in self.config.ignore_dates
|
||||
):
|
||||
return date
|
||||
return None
|
||||
|
||||
@abstractmethod
|
||||
def parse(self, filename: str, content: str) -> Iterator[datetime.datetime]:
|
||||
"""
|
||||
Parses a document's filename and content, yielding valid datetime objects.
|
||||
"""
|
||||
65
src/documents/plugins/date_parsing/regex_parser.py
Normal file
65
src/documents/plugins/date_parsing/regex_parser.py
Normal file
@@ -0,0 +1,65 @@
|
||||
import datetime
|
||||
import re
|
||||
from collections.abc import Iterator
|
||||
from re import Match
|
||||
|
||||
from documents.plugins.date_parsing.base import DateParserPluginBase
|
||||
|
||||
|
||||
class RegexDateParserPlugin(DateParserPluginBase):
|
||||
"""
|
||||
The default date parser, using a series of regular expressions.
|
||||
|
||||
It is configured entirely by the DateParserConfig object
|
||||
passed to its constructor.
|
||||
"""
|
||||
|
||||
DATE_REGEX = re.compile(
|
||||
r"(\b|(?!=([_-])))(\d{1,2})[\.\/-](\d{1,2})[\.\/-](\d{4}|\d{2})(\b|(?=([_-])))|"
|
||||
r"(\b|(?!=([_-])))(\d{4}|\d{2})[\.\/-](\d{1,2})[\.\/-](\d{1,2})(\b|(?=([_-])))|"
|
||||
r"(\b|(?!=([_-])))(\d{1,2}[\. ]+[a-zéûäëčžúřěáíóńźçŞğü]{3,9} \d{4}|[a-zéûäëčžúřěáíóńźçŞğü]{3,9} \d{1,2}, \d{4})(\b|(?=([_-])))|"
|
||||
r"(\b|(?!=([_-])))([^\W\d_]{3,9} \d{1,2}, (\d{4}))(\b|(?=([_-])))|"
|
||||
r"(\b|(?!=([_-])))([^\W\d_]{3,9} \d{4})(\b|(?=([_-])))|"
|
||||
r"(\b|(?!=([_-])))(\d{1,2}[^ 0-9]{2}[\. ]+[^ ]{3,9}[ \.\/-]\d{4})(\b|(?=([_-])))|"
|
||||
r"(\b|(?!=([_-])))(\b\d{1,2}[ \.\/-][a-zéûäëčžúřěáíóńźçŞğü]{3}[ \.\/-]\d{4})(\b|(?=([_-])))",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
def _process_match(
|
||||
self,
|
||||
match: Match[str],
|
||||
date_order: str,
|
||||
) -> datetime.datetime | None:
|
||||
"""
|
||||
Processes a single regex match using the base class helpers.
|
||||
"""
|
||||
date_string = match.group(0)
|
||||
date = self._parse_string(date_string, date_order)
|
||||
return self._filter_date(date)
|
||||
|
||||
def _process_content(
|
||||
self,
|
||||
content: str,
|
||||
date_order: str,
|
||||
) -> Iterator[datetime.datetime]:
|
||||
"""
|
||||
Finds all regex matches in content and yields valid dates.
|
||||
"""
|
||||
for m in re.finditer(self.DATE_REGEX, content):
|
||||
date = self._process_match(m, date_order)
|
||||
if date is not None:
|
||||
yield date
|
||||
|
||||
def parse(self, filename: str, content: str) -> Iterator[datetime.datetime]:
|
||||
"""
|
||||
Implementation of the abstract parse method.
|
||||
|
||||
Reads its configuration from `self.config`.
|
||||
"""
|
||||
if self.config.filename_date_order:
|
||||
yield from self._process_content(
|
||||
filename,
|
||||
self.config.filename_date_order,
|
||||
)
|
||||
|
||||
yield from self._process_content(content, self.config.content_date_order)
|
||||
@@ -2627,6 +2627,7 @@ class WorkflowActionSerializer(serializers.ModelSerializer):
|
||||
"remove_change_groups",
|
||||
"email",
|
||||
"webhook",
|
||||
"passwords",
|
||||
]
|
||||
|
||||
def validate(self, attrs):
|
||||
@@ -2683,6 +2684,23 @@ class WorkflowActionSerializer(serializers.ModelSerializer):
|
||||
"Webhook data is required for webhook actions",
|
||||
)
|
||||
|
||||
if (
|
||||
"type" in attrs
|
||||
and attrs["type"] == WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL
|
||||
):
|
||||
passwords = attrs.get("passwords")
|
||||
# ensure passwords is a non-empty list of non-empty strings
|
||||
if (
|
||||
passwords is None
|
||||
or not isinstance(passwords, list)
|
||||
or len(passwords) == 0
|
||||
or any(not isinstance(pw, str) for pw in passwords)
|
||||
or any(len(pw.strip()) == 0 for pw in passwords)
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"Passwords are required for password removal actions",
|
||||
)
|
||||
|
||||
return attrs
|
||||
|
||||
|
||||
|
||||
@@ -48,6 +48,7 @@ from documents.permissions import get_objects_for_user_owner_aware
|
||||
from documents.templating.utils import convert_format_str_to_template_format
|
||||
from documents.workflows.actions import build_workflow_action_context
|
||||
from documents.workflows.actions import execute_email_action
|
||||
from documents.workflows.actions import execute_password_removal_action
|
||||
from documents.workflows.actions import execute_webhook_action
|
||||
from documents.workflows.mutations import apply_assignment_to_document
|
||||
from documents.workflows.mutations import apply_assignment_to_overrides
|
||||
@@ -831,6 +832,8 @@ def run_workflows(
|
||||
logging_group,
|
||||
original_file,
|
||||
)
|
||||
elif action.type == WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL:
|
||||
execute_password_removal_action(action, document, logging_group)
|
||||
|
||||
if not use_overrides:
|
||||
# limit title to 128 characters
|
||||
|
||||
0
src/documents/tests/date_parsing/__init__.py
Normal file
0
src/documents/tests/date_parsing/__init__.py
Normal file
82
src/documents/tests/date_parsing/conftest.py
Normal file
82
src/documents/tests/date_parsing/conftest.py
Normal file
@@ -0,0 +1,82 @@
|
||||
import datetime
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
import pytest_django
|
||||
|
||||
from documents.plugins.date_parsing import _discover_parser_class
|
||||
from documents.plugins.date_parsing.base import DateParserConfig
|
||||
from documents.plugins.date_parsing.regex_parser import RegexDateParserPlugin
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def base_config() -> DateParserConfig:
|
||||
"""Basic configuration for date parser testing."""
|
||||
return DateParserConfig(
|
||||
languages=["en"],
|
||||
timezone_str="UTC",
|
||||
ignore_dates=set(),
|
||||
reference_time=datetime.datetime(
|
||||
2024,
|
||||
1,
|
||||
15,
|
||||
12,
|
||||
0,
|
||||
0,
|
||||
tzinfo=datetime.timezone.utc,
|
||||
),
|
||||
filename_date_order="YMD",
|
||||
content_date_order="DMY",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def config_with_ignore_dates() -> DateParserConfig:
|
||||
"""Configuration with dates to ignore."""
|
||||
return DateParserConfig(
|
||||
languages=["en", "de"],
|
||||
timezone_str="America/New_York",
|
||||
ignore_dates={datetime.date(2024, 1, 1), datetime.date(2024, 12, 25)},
|
||||
reference_time=datetime.datetime(
|
||||
2024,
|
||||
1,
|
||||
15,
|
||||
12,
|
||||
0,
|
||||
0,
|
||||
tzinfo=datetime.timezone.utc,
|
||||
),
|
||||
filename_date_order="DMY",
|
||||
content_date_order="MDY",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def regex_parser(base_config: DateParserConfig) -> RegexDateParserPlugin:
|
||||
"""Instance of RegexDateParser with base config."""
|
||||
return RegexDateParserPlugin(base_config)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def clear_lru_cache() -> Generator[None, None, None]:
|
||||
"""
|
||||
Ensure the LRU cache for _discover_parser_class is cleared
|
||||
before and after any test that depends on it.
|
||||
"""
|
||||
_discover_parser_class.cache_clear()
|
||||
yield
|
||||
_discover_parser_class.cache_clear()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_date_parser_settings(settings: pytest_django.fixtures.SettingsWrapper) -> Any:
|
||||
"""
|
||||
Override Django settings for the duration of date parser tests.
|
||||
"""
|
||||
settings.DATE_PARSER_LANGUAGES = ["en", "de"]
|
||||
settings.TIME_ZONE = "UTC"
|
||||
settings.IGNORE_DATES = [datetime.date(1900, 1, 1)]
|
||||
settings.FILENAME_DATE_ORDER = "YMD"
|
||||
settings.DATE_ORDER = "DMY"
|
||||
return settings
|
||||
@@ -0,0 +1,229 @@
|
||||
import datetime
|
||||
import logging
|
||||
from collections.abc import Iterator
|
||||
from importlib.metadata import EntryPoint
|
||||
|
||||
import pytest
|
||||
import pytest_mock
|
||||
from django.utils import timezone
|
||||
|
||||
from documents.plugins.date_parsing import DATE_PARSER_ENTRY_POINT_GROUP
|
||||
from documents.plugins.date_parsing import _discover_parser_class
|
||||
from documents.plugins.date_parsing import get_date_parser
|
||||
from documents.plugins.date_parsing.base import DateParserConfig
|
||||
from documents.plugins.date_parsing.base import DateParserPluginBase
|
||||
from documents.plugins.date_parsing.regex_parser import RegexDateParserPlugin
|
||||
|
||||
|
||||
class AlphaParser(DateParserPluginBase):
|
||||
def parse(self, filename: str, content: str) -> Iterator[datetime.datetime]:
|
||||
yield timezone.now()
|
||||
|
||||
|
||||
class BetaParser(DateParserPluginBase):
|
||||
def parse(self, filename: str, content: str) -> Iterator[datetime.datetime]:
|
||||
yield timezone.now()
|
||||
|
||||
|
||||
@pytest.mark.date_parsing
|
||||
@pytest.mark.usefixtures("clear_lru_cache")
|
||||
class TestDiscoverParserClass:
|
||||
"""Tests for the _discover_parser_class() function."""
|
||||
|
||||
def test_returns_default_when_no_plugins_found(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
) -> None:
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing.entry_points",
|
||||
return_value=(),
|
||||
)
|
||||
result = _discover_parser_class()
|
||||
assert result is RegexDateParserPlugin
|
||||
|
||||
def test_returns_default_when_entrypoint_query_fails(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing.entry_points",
|
||||
side_effect=RuntimeError("boom"),
|
||||
)
|
||||
result = _discover_parser_class()
|
||||
assert result is RegexDateParserPlugin
|
||||
assert "Could not query entry points" in caplog.text
|
||||
|
||||
def test_filters_out_invalid_plugins(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
fake_ep = mocker.MagicMock(spec=EntryPoint)
|
||||
fake_ep.name = "bad_plugin"
|
||||
fake_ep.load.return_value = object # not subclass of DateParser
|
||||
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing.entry_points",
|
||||
return_value=(fake_ep,),
|
||||
)
|
||||
|
||||
result = _discover_parser_class()
|
||||
assert result is RegexDateParserPlugin
|
||||
assert "does not subclass DateParser" in caplog.text
|
||||
|
||||
def test_skips_plugins_that_fail_to_load(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
fake_ep = mocker.MagicMock(spec=EntryPoint)
|
||||
fake_ep.name = "failing_plugin"
|
||||
fake_ep.load.side_effect = ImportError("cannot import")
|
||||
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing.entry_points",
|
||||
return_value=(fake_ep,),
|
||||
)
|
||||
|
||||
result = _discover_parser_class()
|
||||
assert result is RegexDateParserPlugin
|
||||
assert "Unable to load date parser plugin failing_plugin" in caplog.text
|
||||
|
||||
def test_returns_single_valid_plugin_without_warning(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""If exactly one valid plugin is discovered, it should be returned without logging a warning."""
|
||||
|
||||
ep = mocker.MagicMock(spec=EntryPoint)
|
||||
ep.name = "alpha"
|
||||
ep.load.return_value = AlphaParser
|
||||
|
||||
mock_entry_points = mocker.patch(
|
||||
"documents.plugins.date_parsing.entry_points",
|
||||
return_value=(ep,),
|
||||
)
|
||||
|
||||
with caplog.at_level(
|
||||
logging.WARNING,
|
||||
logger="documents.plugins.date_parsing",
|
||||
):
|
||||
result = _discover_parser_class()
|
||||
|
||||
# It should have called entry_points with the correct group
|
||||
mock_entry_points.assert_called_once_with(group=DATE_PARSER_ENTRY_POINT_GROUP)
|
||||
|
||||
# The discovered class should be exactly our AlphaParser
|
||||
assert result is AlphaParser
|
||||
|
||||
# No warnings should have been logged
|
||||
assert not any(
|
||||
"Multiple date parsers found" in record.message for record in caplog.records
|
||||
), "Unexpected warning logged when only one plugin was found"
|
||||
|
||||
def test_returns_first_valid_plugin_by_name(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
) -> None:
|
||||
ep_a = mocker.MagicMock(spec=EntryPoint)
|
||||
ep_a.name = "alpha"
|
||||
ep_a.load.return_value = AlphaParser
|
||||
|
||||
ep_b = mocker.MagicMock(spec=EntryPoint)
|
||||
ep_b.name = "beta"
|
||||
ep_b.load.return_value = BetaParser
|
||||
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing.entry_points",
|
||||
return_value=(ep_b, ep_a),
|
||||
)
|
||||
|
||||
result = _discover_parser_class()
|
||||
assert result is AlphaParser
|
||||
|
||||
def test_logs_warning_if_multiple_plugins_found(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
ep1 = mocker.MagicMock(spec=EntryPoint)
|
||||
ep1.name = "a"
|
||||
ep1.load.return_value = AlphaParser
|
||||
|
||||
ep2 = mocker.MagicMock(spec=EntryPoint)
|
||||
ep2.name = "b"
|
||||
ep2.load.return_value = BetaParser
|
||||
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing.entry_points",
|
||||
return_value=(ep1, ep2),
|
||||
)
|
||||
|
||||
with caplog.at_level(
|
||||
logging.WARNING,
|
||||
logger="documents.plugins.date_parsing",
|
||||
):
|
||||
result = _discover_parser_class()
|
||||
|
||||
# Should select alphabetically first plugin ("a")
|
||||
assert result is AlphaParser
|
||||
|
||||
# Should log a warning mentioning multiple parsers
|
||||
assert any(
|
||||
"Multiple date parsers found" in record.message for record in caplog.records
|
||||
), "Expected a warning about multiple date parsers"
|
||||
|
||||
def test_cache_behavior_only_runs_once(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
) -> None:
|
||||
mock_entry_points = mocker.patch(
|
||||
"documents.plugins.date_parsing.entry_points",
|
||||
return_value=(),
|
||||
)
|
||||
|
||||
# First call populates cache
|
||||
_discover_parser_class()
|
||||
# Second call should not re-invoke entry_points
|
||||
_discover_parser_class()
|
||||
mock_entry_points.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.date_parsing
|
||||
@pytest.mark.usefixtures("mock_date_parser_settings")
|
||||
class TestGetDateParser:
|
||||
"""Tests for the get_date_parser() factory function."""
|
||||
|
||||
def test_returns_instance_of_discovered_class(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
) -> None:
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing._discover_parser_class",
|
||||
return_value=AlphaParser,
|
||||
)
|
||||
parser = get_date_parser()
|
||||
assert isinstance(parser, AlphaParser)
|
||||
assert isinstance(parser.config, DateParserConfig)
|
||||
assert parser.config.languages == ["en", "de"]
|
||||
assert parser.config.timezone_str == "UTC"
|
||||
assert parser.config.ignore_dates == [datetime.date(1900, 1, 1)]
|
||||
assert parser.config.filename_date_order == "YMD"
|
||||
assert parser.config.content_date_order == "DMY"
|
||||
# Check reference_time near now
|
||||
delta = abs((parser.config.reference_time - timezone.now()).total_seconds())
|
||||
assert delta < 2
|
||||
|
||||
def test_uses_default_regex_parser_when_no_plugins(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
) -> None:
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing._discover_parser_class",
|
||||
return_value=RegexDateParserPlugin,
|
||||
)
|
||||
parser = get_date_parser()
|
||||
assert isinstance(parser, RegexDateParserPlugin)
|
||||
433
src/documents/tests/date_parsing/test_date_parsing.py
Normal file
433
src/documents/tests/date_parsing/test_date_parsing.py
Normal file
@@ -0,0 +1,433 @@
|
||||
import datetime
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
import pytest_mock
|
||||
|
||||
from documents.plugins.date_parsing.base import DateParserConfig
|
||||
from documents.plugins.date_parsing.regex_parser import RegexDateParserPlugin
|
||||
|
||||
|
||||
@pytest.mark.date_parsing
|
||||
class TestParseString:
|
||||
"""Tests for DateParser._parse_string method via RegexDateParser."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("date_string", "date_order", "expected_year"),
|
||||
[
|
||||
pytest.param("15/01/2024", "DMY", 2024, id="dmy_slash"),
|
||||
pytest.param("01/15/2024", "MDY", 2024, id="mdy_slash"),
|
||||
pytest.param("2024/01/15", "YMD", 2024, id="ymd_slash"),
|
||||
pytest.param("January 15, 2024", "DMY", 2024, id="month_name_comma"),
|
||||
pytest.param("15 Jan 2024", "DMY", 2024, id="day_abbr_month_year"),
|
||||
pytest.param("15.01.2024", "DMY", 2024, id="dmy_dot"),
|
||||
pytest.param("2024-01-15", "YMD", 2024, id="ymd_dash"),
|
||||
],
|
||||
)
|
||||
def test_parse_string_valid_formats(
|
||||
self,
|
||||
regex_parser: RegexDateParserPlugin,
|
||||
date_string: str,
|
||||
date_order: str,
|
||||
expected_year: int,
|
||||
) -> None:
|
||||
"""Should correctly parse various valid date formats."""
|
||||
result = regex_parser._parse_string(date_string, date_order)
|
||||
|
||||
assert result is not None
|
||||
assert result.year == expected_year
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_string",
|
||||
[
|
||||
pytest.param("not a date", id="plain_text"),
|
||||
pytest.param("32/13/2024", id="invalid_day_month"),
|
||||
pytest.param("", id="empty_string"),
|
||||
pytest.param("abc123xyz", id="alphanumeric_gibberish"),
|
||||
pytest.param("99/99/9999", id="out_of_range"),
|
||||
],
|
||||
)
|
||||
def test_parse_string_invalid_input(
|
||||
self,
|
||||
regex_parser: RegexDateParserPlugin,
|
||||
invalid_string: str,
|
||||
) -> None:
|
||||
"""Should return None for invalid date strings."""
|
||||
result = regex_parser._parse_string(invalid_string, "DMY")
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_parse_string_handles_exceptions(
|
||||
self,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
regex_parser: RegexDateParserPlugin,
|
||||
) -> None:
|
||||
"""Should handle and log exceptions from dateparser gracefully."""
|
||||
with caplog.at_level(
|
||||
logging.ERROR,
|
||||
logger="documents.plugins.date_parsing.base",
|
||||
):
|
||||
# We still need to mock dateparser.parse to force the exception
|
||||
mocker.patch(
|
||||
"documents.plugins.date_parsing.base.dateparser.parse",
|
||||
side_effect=ValueError(
|
||||
"Parsing error: 01/01/2024",
|
||||
),
|
||||
)
|
||||
|
||||
# 1. Execute the function under test
|
||||
result = regex_parser._parse_string("01/01/2024", "DMY")
|
||||
|
||||
assert result is None
|
||||
|
||||
# Check if an error was logged
|
||||
assert len(caplog.records) == 1
|
||||
assert caplog.records[0].levelname == "ERROR"
|
||||
|
||||
# Check if the specific error message is present
|
||||
assert "Error while parsing date string" in caplog.text
|
||||
# Optional: Check for the exact exception message if it's included in the log
|
||||
assert "Parsing error: 01/01/2024" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.date_parsing
|
||||
class TestFilterDate:
|
||||
"""Tests for DateParser._filter_date method via RegexDateParser."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("date", "expected_output"),
|
||||
[
|
||||
# Valid Dates
|
||||
pytest.param(
|
||||
datetime.datetime(2024, 1, 10, tzinfo=datetime.timezone.utc),
|
||||
datetime.datetime(2024, 1, 10, tzinfo=datetime.timezone.utc),
|
||||
id="valid_past_date",
|
||||
),
|
||||
pytest.param(
|
||||
datetime.datetime(2024, 1, 15, 12, 0, 0, tzinfo=datetime.timezone.utc),
|
||||
datetime.datetime(2024, 1, 15, 12, 0, 0, tzinfo=datetime.timezone.utc),
|
||||
id="exactly_at_reference",
|
||||
),
|
||||
pytest.param(
|
||||
datetime.datetime(1901, 1, 1, tzinfo=datetime.timezone.utc),
|
||||
datetime.datetime(1901, 1, 1, tzinfo=datetime.timezone.utc),
|
||||
id="year_1901_valid",
|
||||
),
|
||||
# Date is > reference_time
|
||||
pytest.param(
|
||||
datetime.datetime(2024, 1, 16, tzinfo=datetime.timezone.utc),
|
||||
None,
|
||||
id="future_date_day_after",
|
||||
),
|
||||
# date.date() in ignore_dates
|
||||
pytest.param(
|
||||
datetime.datetime(2024, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc),
|
||||
None,
|
||||
id="ignored_date_midnight_jan1",
|
||||
),
|
||||
pytest.param(
|
||||
datetime.datetime(2024, 1, 1, 10, 30, 0, tzinfo=datetime.timezone.utc),
|
||||
None,
|
||||
id="ignored_date_midday_jan1",
|
||||
),
|
||||
pytest.param(
|
||||
datetime.datetime(2024, 12, 25, 15, 0, 0, tzinfo=datetime.timezone.utc),
|
||||
None,
|
||||
id="ignored_date_dec25_future",
|
||||
),
|
||||
# date.year <= 1900
|
||||
pytest.param(
|
||||
datetime.datetime(1899, 12, 31, tzinfo=datetime.timezone.utc),
|
||||
None,
|
||||
id="year_1899",
|
||||
),
|
||||
pytest.param(
|
||||
datetime.datetime(1900, 1, 1, tzinfo=datetime.timezone.utc),
|
||||
None,
|
||||
id="year_1900_boundary",
|
||||
),
|
||||
# date is None
|
||||
pytest.param(None, None, id="none_input"),
|
||||
],
|
||||
)
|
||||
def test_filter_date_validation_rules(
|
||||
self,
|
||||
config_with_ignore_dates: DateParserConfig,
|
||||
date: datetime.datetime | None,
|
||||
expected_output: datetime.datetime | None,
|
||||
) -> None:
|
||||
"""Should correctly validate dates against various rules."""
|
||||
parser = RegexDateParserPlugin(config_with_ignore_dates)
|
||||
result = parser._filter_date(date)
|
||||
assert result == expected_output
|
||||
|
||||
def test_filter_date_respects_ignore_dates(
|
||||
self,
|
||||
config_with_ignore_dates: DateParserConfig,
|
||||
) -> None:
|
||||
"""Should filter out dates in the ignore_dates set."""
|
||||
parser = RegexDateParserPlugin(config_with_ignore_dates)
|
||||
|
||||
ignored_date = datetime.datetime(
|
||||
2024,
|
||||
1,
|
||||
1,
|
||||
12,
|
||||
0,
|
||||
tzinfo=datetime.timezone.utc,
|
||||
)
|
||||
another_ignored = datetime.datetime(
|
||||
2024,
|
||||
12,
|
||||
25,
|
||||
15,
|
||||
30,
|
||||
tzinfo=datetime.timezone.utc,
|
||||
)
|
||||
allowed_date = datetime.datetime(
|
||||
2024,
|
||||
1,
|
||||
2,
|
||||
12,
|
||||
0,
|
||||
tzinfo=datetime.timezone.utc,
|
||||
)
|
||||
|
||||
assert parser._filter_date(ignored_date) is None
|
||||
assert parser._filter_date(another_ignored) is None
|
||||
assert parser._filter_date(allowed_date) == allowed_date
|
||||
|
||||
def test_filter_date_timezone_aware(
|
||||
self,
|
||||
regex_parser: RegexDateParserPlugin,
|
||||
) -> None:
|
||||
"""Should work with timezone-aware datetimes."""
|
||||
date_utc = datetime.datetime(2024, 1, 10, 12, 0, tzinfo=datetime.timezone.utc)
|
||||
|
||||
result = regex_parser._filter_date(date_utc)
|
||||
|
||||
assert result is not None
|
||||
assert result.tzinfo is not None
|
||||
|
||||
|
||||
@pytest.mark.date_parsing
|
||||
class TestRegexDateParser:
|
||||
@pytest.mark.parametrize(
|
||||
("filename", "content", "expected"),
|
||||
[
|
||||
pytest.param(
|
||||
"report-2023-12-25.txt",
|
||||
"Event recorded on 25/12/2022.",
|
||||
[
|
||||
datetime.datetime(2023, 12, 25, tzinfo=datetime.timezone.utc),
|
||||
datetime.datetime(2022, 12, 25, tzinfo=datetime.timezone.utc),
|
||||
],
|
||||
id="filename-y-m-d_and_content-d-m-y",
|
||||
),
|
||||
pytest.param(
|
||||
"img_2023.01.02.jpg",
|
||||
"Taken on 01/02/2023",
|
||||
[
|
||||
datetime.datetime(2023, 1, 2, tzinfo=datetime.timezone.utc),
|
||||
datetime.datetime(2023, 2, 1, tzinfo=datetime.timezone.utc),
|
||||
],
|
||||
id="ambiguous-dates-respect-orders",
|
||||
),
|
||||
pytest.param(
|
||||
"notes.txt",
|
||||
"bad date 99/99/9999 and 25/12/2022",
|
||||
[
|
||||
datetime.datetime(2022, 12, 25, tzinfo=datetime.timezone.utc),
|
||||
],
|
||||
id="parse-exception-skips-bad-and-yields-good",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_parse_returns_expected_dates(
|
||||
self,
|
||||
base_config: DateParserConfig,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
filename: str,
|
||||
content: str,
|
||||
expected: list[datetime.datetime],
|
||||
) -> None:
|
||||
"""
|
||||
High-level tests that exercise RegexDateParser.parse only.
|
||||
dateparser.parse is mocked so tests are deterministic.
|
||||
"""
|
||||
parser = RegexDateParserPlugin(base_config)
|
||||
|
||||
# Patch the dateparser.parse
|
||||
target = "documents.plugins.date_parsing.base.dateparser.parse"
|
||||
|
||||
def fake_parse(
|
||||
date_string: str,
|
||||
settings: dict[str, Any] | None = None,
|
||||
locales: None = None,
|
||||
) -> datetime.datetime | None:
|
||||
date_order = settings.get("DATE_ORDER") if settings else None
|
||||
|
||||
# Filename-style YYYY-MM-DD / YYYY.MM.DD
|
||||
if (
|
||||
"2023-12-25" in date_string
|
||||
or "2023.12.25" in date_string
|
||||
or "2023-12-25" in date_string
|
||||
):
|
||||
return datetime.datetime(2023, 12, 25, tzinfo=datetime.timezone.utc)
|
||||
|
||||
# content DMY 25/12/2022
|
||||
if "25/12/2022" in date_string or "25-12-2022" in date_string:
|
||||
return datetime.datetime(2022, 12, 25, tzinfo=datetime.timezone.utc)
|
||||
|
||||
# filename YMD 2023.01.02
|
||||
if "2023.01.02" in date_string or "2023-01-02" in date_string:
|
||||
return datetime.datetime(2023, 1, 2, tzinfo=datetime.timezone.utc)
|
||||
|
||||
# ambiguous 01/02/2023 -> respect DATE_ORDER setting
|
||||
if "01/02/2023" in date_string:
|
||||
if date_order == "DMY":
|
||||
return datetime.datetime(2023, 2, 1, tzinfo=datetime.timezone.utc)
|
||||
if date_order == "YMD":
|
||||
return datetime.datetime(2023, 1, 2, tzinfo=datetime.timezone.utc)
|
||||
# fallback
|
||||
return datetime.datetime(2023, 2, 1, tzinfo=datetime.timezone.utc)
|
||||
|
||||
# simulate parse failure for malformed input
|
||||
if "99/99/9999" in date_string or "bad date" in date_string:
|
||||
raise Exception("parse failed for malformed date")
|
||||
|
||||
return None
|
||||
|
||||
mocker.patch(target, side_effect=fake_parse)
|
||||
|
||||
results = list(parser.parse(filename, content))
|
||||
|
||||
assert results == expected
|
||||
for dt in results:
|
||||
assert dt.tzinfo is not None
|
||||
|
||||
def test_parse_filters_future_and_ignored_dates(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
) -> None:
|
||||
"""
|
||||
Ensure parser filters out:
|
||||
- dates after reference_time
|
||||
- dates whose .date() are in ignore_dates
|
||||
"""
|
||||
cfg = DateParserConfig(
|
||||
languages=["en"],
|
||||
timezone_str="UTC",
|
||||
ignore_dates={datetime.date(2023, 12, 10)},
|
||||
reference_time=datetime.datetime(
|
||||
2024,
|
||||
1,
|
||||
15,
|
||||
12,
|
||||
0,
|
||||
0,
|
||||
tzinfo=datetime.timezone.utc,
|
||||
),
|
||||
filename_date_order="YMD",
|
||||
content_date_order="DMY",
|
||||
)
|
||||
parser = RegexDateParserPlugin(cfg)
|
||||
|
||||
target = "documents.plugins.date_parsing.base.dateparser.parse"
|
||||
|
||||
def fake_parse(
|
||||
date_string: str,
|
||||
settings: dict[str, Any] | None = None,
|
||||
locales: None = None,
|
||||
) -> datetime.datetime | None:
|
||||
if "10/12/2023" in date_string or "10-12-2023" in date_string:
|
||||
# ignored date
|
||||
return datetime.datetime(2023, 12, 10, tzinfo=datetime.timezone.utc)
|
||||
if "01/02/2024" in date_string or "01-02-2024" in date_string:
|
||||
# future relative to reference_time -> filtered
|
||||
return datetime.datetime(2024, 2, 1, tzinfo=datetime.timezone.utc)
|
||||
if "05/01/2023" in date_string or "05-01-2023" in date_string:
|
||||
# valid
|
||||
return datetime.datetime(2023, 1, 5, tzinfo=datetime.timezone.utc)
|
||||
return None
|
||||
|
||||
mocker.patch(target, side_effect=fake_parse)
|
||||
|
||||
content = "Ignored: 10/12/2023, Future: 01/02/2024, Keep: 05/01/2023"
|
||||
results = list(parser.parse("whatever.txt", content))
|
||||
|
||||
assert results == [datetime.datetime(2023, 1, 5, tzinfo=datetime.timezone.utc)]
|
||||
|
||||
def test_parse_handles_no_matches_and_returns_empty_list(
|
||||
self,
|
||||
base_config: DateParserConfig,
|
||||
) -> None:
|
||||
"""
|
||||
When there are no matching date-like substrings, parse should yield nothing.
|
||||
"""
|
||||
parser = RegexDateParserPlugin(base_config)
|
||||
results = list(
|
||||
parser.parse("no-dates.txt", "this has no dates whatsoever"),
|
||||
)
|
||||
assert results == []
|
||||
|
||||
def test_parse_skips_filename_when_filename_date_order_none(
|
||||
self,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
) -> None:
|
||||
"""
|
||||
When filename_date_order is None the parser must not attempt to parse the filename.
|
||||
Only dates found in the content should be passed to dateparser.parse.
|
||||
"""
|
||||
cfg = DateParserConfig(
|
||||
languages=["en"],
|
||||
timezone_str="UTC",
|
||||
ignore_dates=set(),
|
||||
reference_time=datetime.datetime(
|
||||
2024,
|
||||
1,
|
||||
15,
|
||||
12,
|
||||
0,
|
||||
0,
|
||||
tzinfo=datetime.timezone.utc,
|
||||
),
|
||||
filename_date_order=None,
|
||||
content_date_order="DMY",
|
||||
)
|
||||
parser = RegexDateParserPlugin(cfg)
|
||||
|
||||
# Patch the module's dateparser.parse so we can inspect calls
|
||||
target = "documents.plugins.date_parsing.base.dateparser.parse"
|
||||
|
||||
def fake_parse(
|
||||
date_string: str,
|
||||
settings: dict[str, Any] | None = None,
|
||||
locales: None = None,
|
||||
) -> datetime.datetime | None:
|
||||
# return distinct datetimes so we can tell which source was parsed
|
||||
if "25/12/2022" in date_string:
|
||||
return datetime.datetime(2022, 12, 25, tzinfo=datetime.timezone.utc)
|
||||
if "2023-12-25" in date_string:
|
||||
return datetime.datetime(2023, 12, 25, tzinfo=datetime.timezone.utc)
|
||||
return None
|
||||
|
||||
mock = mocker.patch(target, side_effect=fake_parse)
|
||||
|
||||
filename = "report-2023-12-25.txt"
|
||||
content = "Event recorded on 25/12/2022."
|
||||
|
||||
results = list(parser.parse(filename, content))
|
||||
|
||||
# Only the content date should have been parsed -> one call
|
||||
assert mock.call_count == 1
|
||||
|
||||
# # first call, first positional arg
|
||||
called_date_string = mock.call_args_list[0][0][0]
|
||||
assert "25/12/2022" in called_date_string
|
||||
# And the parser should have yielded the corresponding datetime
|
||||
assert results == [
|
||||
datetime.datetime(2022, 12, 25, tzinfo=datetime.timezone.utc),
|
||||
]
|
||||
@@ -1989,11 +1989,11 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
response = self.client.get(f"/api/documents/{doc.pk}/suggestions/")
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
@mock.patch("documents.parsers.parse_date_generator")
|
||||
@mock.patch("documents.views.get_date_parser")
|
||||
@override_settings(NUMBER_OF_SUGGESTED_DATES=0)
|
||||
def test_get_suggestions_dates_disabled(
|
||||
self,
|
||||
parse_date_generator,
|
||||
mock_get_date_parser: mock.MagicMock,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -2010,7 +2010,8 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
)
|
||||
|
||||
self.client.get(f"/api/documents/{doc.pk}/suggestions/")
|
||||
self.assertFalse(parse_date_generator.called)
|
||||
|
||||
mock_get_date_parser.assert_not_called()
|
||||
|
||||
def test_saved_views(self) -> None:
|
||||
u1 = User.objects.create_superuser("user1")
|
||||
|
||||
@@ -838,3 +838,61 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.action.refresh_from_db()
|
||||
self.assertEqual(self.action.assign_title, "Patched Title")
|
||||
|
||||
def test_password_action_passwords_field(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Nothing
|
||||
WHEN:
|
||||
- A workflow password removal action is created with passwords set
|
||||
THEN:
|
||||
- The passwords field is correctly stored and retrieved
|
||||
"""
|
||||
passwords = ["password1", "password2", "password3"]
|
||||
response = self.client.post(
|
||||
"/api/workflow_actions/",
|
||||
json.dumps(
|
||||
{
|
||||
"type": WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL,
|
||||
"passwords": passwords,
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
||||
self.assertEqual(response.data["passwords"], passwords)
|
||||
|
||||
def test_password_action_invalid_passwords_field(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Nothing
|
||||
WHEN:
|
||||
- A workflow password removal action is created with invalid passwords field
|
||||
THEN:
|
||||
- The required validation error is raised
|
||||
"""
|
||||
for payload in [
|
||||
{"type": WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL},
|
||||
{
|
||||
"type": WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL,
|
||||
"passwords": "",
|
||||
},
|
||||
{
|
||||
"type": WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL,
|
||||
"passwords": [],
|
||||
},
|
||||
{
|
||||
"type": WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL,
|
||||
"passwords": ["", "password2"],
|
||||
},
|
||||
]:
|
||||
response = self.client.post(
|
||||
"/api/workflow_actions/",
|
||||
json.dumps(payload),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn(
|
||||
"Passwords are required",
|
||||
str(response.data["non_field_errors"][0]),
|
||||
)
|
||||
|
||||
@@ -1,538 +0,0 @@
|
||||
import datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import pytest
|
||||
from pytest_django.fixtures import SettingsWrapper
|
||||
|
||||
from documents.parsers import parse_date
|
||||
from documents.parsers import parse_date_generator
|
||||
|
||||
|
||||
@pytest.mark.django_db()
|
||||
class TestDate:
|
||||
def test_date_format_1(self) -> None:
|
||||
text = "lorem ipsum 130218 lorem ipsum"
|
||||
assert parse_date("", text) is None
|
||||
|
||||
def test_date_format_2(self) -> None:
|
||||
text = "lorem ipsum 2018 lorem ipsum"
|
||||
assert parse_date("", text) is None
|
||||
|
||||
def test_date_format_3(self) -> None:
|
||||
text = "lorem ipsum 20180213 lorem ipsum"
|
||||
assert parse_date("", text) is None
|
||||
|
||||
def test_date_format_4(self, settings_timezone: ZoneInfo) -> None:
|
||||
text = "lorem ipsum 13.02.2018 lorem ipsum"
|
||||
date = parse_date("", text)
|
||||
assert date == datetime.datetime(2018, 2, 13, 0, 0, tzinfo=settings_timezone)
|
||||
|
||||
def test_date_format_5(self, settings_timezone: ZoneInfo) -> None:
|
||||
text = "lorem ipsum 130218, 2018, 20180213 and lorem 13.02.2018 lorem ipsum"
|
||||
date = parse_date("", text)
|
||||
assert date == datetime.datetime(2018, 2, 13, 0, 0, tzinfo=settings_timezone)
|
||||
|
||||
def test_date_format_6(self) -> None:
|
||||
text = (
|
||||
"lorem ipsum\n"
|
||||
"Wohnort\n"
|
||||
"3100\n"
|
||||
"IBAN\n"
|
||||
"AT87 4534\n"
|
||||
"1234\n"
|
||||
"1234 5678\n"
|
||||
"BIC\n"
|
||||
"lorem ipsum"
|
||||
)
|
||||
assert parse_date("", text) is None
|
||||
|
||||
def test_date_format_7(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
) -> None:
|
||||
settings.DATE_PARSER_LANGUAGES = ["de"]
|
||||
text = "lorem ipsum\nMärz 2019\nlorem ipsum"
|
||||
date = parse_date("", text)
|
||||
assert date == datetime.datetime(2019, 3, 1, 0, 0, tzinfo=settings_timezone)
|
||||
|
||||
def test_date_format_8(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
) -> None:
|
||||
settings.DATE_PARSER_LANGUAGES = ["de"]
|
||||
text = (
|
||||
"lorem ipsum\n"
|
||||
"Wohnort\n"
|
||||
"3100\n"
|
||||
"IBAN\n"
|
||||
"AT87 4534\n"
|
||||
"1234\n"
|
||||
"1234 5678\n"
|
||||
"BIC\n"
|
||||
"lorem ipsum\n"
|
||||
"März 2020"
|
||||
)
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2020,
|
||||
3,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_9(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
) -> None:
|
||||
settings.DATE_PARSER_LANGUAGES = ["de"]
|
||||
text = "lorem ipsum\n27. Nullmonth 2020\nMärz 2020\nlorem ipsum"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2020,
|
||||
3,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_10(self, settings_timezone: ZoneInfo) -> None:
|
||||
text = "Customer Number Currency 22-MAR-2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
22,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_11(self, settings_timezone: ZoneInfo) -> None:
|
||||
text = "Customer Number Currency 22 MAR 2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
22,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_12(self, settings_timezone: ZoneInfo) -> None:
|
||||
text = "Customer Number Currency 22/MAR/2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
22,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_13(self, settings_timezone: ZoneInfo) -> None:
|
||||
text = "Customer Number Currency 22.MAR.2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
22,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_14(self, settings_timezone: ZoneInfo) -> None:
|
||||
text = "Customer Number Currency 22.MAR 2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
22,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_15(self) -> None:
|
||||
text = "Customer Number Currency 22.MAR.22 Credit Card 1934829304"
|
||||
assert parse_date("", text) is None
|
||||
|
||||
def test_date_format_16(self) -> None:
|
||||
text = "Customer Number Currency 22.MAR,22 Credit Card 1934829304"
|
||||
assert parse_date("", text) is None
|
||||
|
||||
def test_date_format_17(self) -> None:
|
||||
text = "Customer Number Currency 22,MAR,2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) is None
|
||||
|
||||
def test_date_format_18(self) -> None:
|
||||
text = "Customer Number Currency 22 MAR,2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) is None
|
||||
|
||||
def test_date_format_19(self, settings_timezone: ZoneInfo) -> None:
|
||||
text = "Customer Number Currency 21st MAR 2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
21,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_20(self, settings_timezone: ZoneInfo) -> None:
|
||||
text = "Customer Number Currency 22nd March 2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
22,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_21(self, settings_timezone: ZoneInfo) -> None:
|
||||
text = "Customer Number Currency 2nd MAR 2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
2,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_22(self, settings_timezone: ZoneInfo) -> None:
|
||||
text = "Customer Number Currency 23rd MAR 2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
23,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_23(self, settings_timezone: ZoneInfo) -> None:
|
||||
text = "Customer Number Currency 24th MAR 2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
24,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_24(self, settings_timezone: ZoneInfo) -> None:
|
||||
text = "Customer Number Currency 21-MAR-2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
21,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_25(self, settings_timezone: ZoneInfo) -> None:
|
||||
text = "Customer Number Currency 25TH MAR 2022 Credit Card 1934829304"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2022,
|
||||
3,
|
||||
25,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_date_format_26(self, settings_timezone: ZoneInfo) -> None:
|
||||
text = "CHASE 0 September 25, 2019 JPMorgan Chase Bank, NA. P0 Box 182051"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2019,
|
||||
9,
|
||||
25,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_crazy_date_past(self) -> None:
|
||||
assert parse_date("", "01-07-0590 00:00:00") is None
|
||||
|
||||
def test_crazy_date_future(self) -> None:
|
||||
assert parse_date("", "01-07-2350 00:00:00") is None
|
||||
|
||||
def test_crazy_date_with_spaces(self) -> None:
|
||||
assert parse_date("", "20 408000l 2475") is None
|
||||
|
||||
def test_utf_month_names(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
) -> None:
|
||||
settings.DATE_PARSER_LANGUAGES = ["fr", "de", "hr", "cs", "pl", "tr"]
|
||||
assert parse_date("", "13 décembre 2023") == datetime.datetime(
|
||||
2023,
|
||||
12,
|
||||
13,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "13 août 2022") == datetime.datetime(
|
||||
2022,
|
||||
8,
|
||||
13,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "11 März 2020") == datetime.datetime(
|
||||
2020,
|
||||
3,
|
||||
11,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "17. ožujka 2018.") == datetime.datetime(
|
||||
2018,
|
||||
3,
|
||||
17,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "1. veljače 2016.") == datetime.datetime(
|
||||
2016,
|
||||
2,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "15. února 1985") == datetime.datetime(
|
||||
1985,
|
||||
2,
|
||||
15,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "30. září 2011") == datetime.datetime(
|
||||
2011,
|
||||
9,
|
||||
30,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "28. května 1990") == datetime.datetime(
|
||||
1990,
|
||||
5,
|
||||
28,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "1. grudzień 1997") == datetime.datetime(
|
||||
1997,
|
||||
12,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "17 Şubat 2024") == datetime.datetime(
|
||||
2024,
|
||||
2,
|
||||
17,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "30 Ağustos 2012") == datetime.datetime(
|
||||
2012,
|
||||
8,
|
||||
30,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "17 Eylül 2000") == datetime.datetime(
|
||||
2000,
|
||||
9,
|
||||
17,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
assert parse_date("", "5. október 1992") == datetime.datetime(
|
||||
1992,
|
||||
10,
|
||||
5,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_multiple_dates(self, settings_timezone: ZoneInfo) -> None:
|
||||
text = """This text has multiple dates.
|
||||
For example 02.02.2018, 22 July 2022 and December 2021.
|
||||
But not 24-12-9999 because it's in the future..."""
|
||||
dates = list(parse_date_generator("", text))
|
||||
|
||||
assert dates == [
|
||||
datetime.datetime(2018, 2, 2, 0, 0, tzinfo=settings_timezone),
|
||||
datetime.datetime(
|
||||
2022,
|
||||
7,
|
||||
22,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
),
|
||||
datetime.datetime(
|
||||
2021,
|
||||
12,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
),
|
||||
]
|
||||
|
||||
def test_filename_date_parse_valid_ymd(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Date parsing from the filename is enabled
|
||||
- Filename date format is with Year Month Day (YMD)
|
||||
- Filename contains date matching the format
|
||||
|
||||
THEN:
|
||||
- Should parse the date from the filename
|
||||
"""
|
||||
settings.FILENAME_DATE_ORDER = "YMD"
|
||||
|
||||
assert parse_date(
|
||||
"/tmp/Scan-2022-04-01.pdf",
|
||||
"No date in here",
|
||||
) == datetime.datetime(2022, 4, 1, 0, 0, tzinfo=settings_timezone)
|
||||
|
||||
def test_filename_date_parse_valid_dmy(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Date parsing from the filename is enabled
|
||||
- Filename date format is with Day Month Year (DMY)
|
||||
- Filename contains date matching the format
|
||||
|
||||
THEN:
|
||||
- Should parse the date from the filename
|
||||
"""
|
||||
settings.FILENAME_DATE_ORDER = "DMY"
|
||||
assert parse_date(
|
||||
"/tmp/Scan-10.01.2021.pdf",
|
||||
"No date in here",
|
||||
) == datetime.datetime(2021, 1, 10, 0, 0, tzinfo=settings_timezone)
|
||||
|
||||
def test_filename_date_parse_invalid(self, settings: SettingsWrapper) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Date parsing from the filename is enabled
|
||||
- Filename includes no date
|
||||
- File content includes no date
|
||||
|
||||
THEN:
|
||||
- No date is parsed
|
||||
"""
|
||||
settings.FILENAME_DATE_ORDER = "YMD"
|
||||
assert parse_date("/tmp/20 408000l 2475 - test.pdf", "No date in here") is None
|
||||
|
||||
def test_filename_date_ignored_use_content(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Date parsing from the filename is enabled
|
||||
- Filename date format is with Day Month Year (YMD)
|
||||
- Date order is Day Month Year (DMY, the default)
|
||||
- Filename contains date matching the format
|
||||
- Filename date is an ignored date
|
||||
- File content includes a date
|
||||
|
||||
THEN:
|
||||
- Should parse the date from the content not filename
|
||||
"""
|
||||
settings.FILENAME_DATE_ORDER = "YMD"
|
||||
settings.IGNORE_DATES = (datetime.date(2022, 4, 1),)
|
||||
assert parse_date(
|
||||
"/tmp/Scan-2022-04-01.pdf",
|
||||
"The matching date is 24.03.2022",
|
||||
) == datetime.datetime(2022, 3, 24, 0, 0, tzinfo=settings_timezone)
|
||||
|
||||
def test_ignored_dates_default_order(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Ignore dates have been set
|
||||
- File content includes ignored dates
|
||||
- File content includes 1 non-ignored date
|
||||
|
||||
THEN:
|
||||
- Should parse the date non-ignored date from content
|
||||
"""
|
||||
settings.IGNORE_DATES = (datetime.date(2019, 11, 3), datetime.date(2020, 1, 17))
|
||||
text = "lorem ipsum 110319, 20200117 and lorem 13.02.2018 lorem ipsum"
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2018,
|
||||
2,
|
||||
13,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
|
||||
def test_ignored_dates_order_ymd(
|
||||
self,
|
||||
settings: SettingsWrapper,
|
||||
settings_timezone: ZoneInfo,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Ignore dates have been set
|
||||
- Date order is Year Month Date (YMD)
|
||||
- File content includes ignored dates
|
||||
- File content includes 1 non-ignored date
|
||||
|
||||
THEN:
|
||||
- Should parse the date non-ignored date from content
|
||||
"""
|
||||
|
||||
settings.FILENAME_DATE_ORDER = "YMD"
|
||||
settings.IGNORE_DATES = (datetime.date(2019, 11, 3), datetime.date(2020, 1, 17))
|
||||
|
||||
text = "lorem ipsum 190311, 20200117 and lorem 13.02.2018 lorem ipsum"
|
||||
|
||||
assert parse_date("", text) == datetime.datetime(
|
||||
2018,
|
||||
2,
|
||||
13,
|
||||
0,
|
||||
0,
|
||||
tzinfo=settings_timezone,
|
||||
)
|
||||
@@ -2,6 +2,7 @@ import datetime
|
||||
import json
|
||||
import shutil
|
||||
import socket
|
||||
import tempfile
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
@@ -60,6 +61,7 @@ from documents.tests.utils import DirectoriesMixin
|
||||
from documents.tests.utils import DummyProgressManager
|
||||
from documents.tests.utils import FileSystemAssertsMixin
|
||||
from documents.tests.utils import SampleDirMixin
|
||||
from documents.workflows.actions import execute_password_removal_action
|
||||
from paperless_mail.models import MailAccount
|
||||
from paperless_mail.models import MailRule
|
||||
|
||||
@@ -3722,6 +3724,196 @@ class TestWorkflows(
|
||||
|
||||
mock_post.assert_called_once()
|
||||
|
||||
@mock.patch("documents.bulk_edit.remove_password")
|
||||
def test_password_removal_action_attempts_multiple_passwords(
|
||||
self,
|
||||
mock_remove_password,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Workflow password removal action
|
||||
- Multiple passwords provided
|
||||
WHEN:
|
||||
- Document updated triggering the workflow
|
||||
THEN:
|
||||
- Password removal is attempted until one succeeds
|
||||
"""
|
||||
doc = Document.objects.create(
|
||||
title="Protected",
|
||||
checksum="pw-checksum",
|
||||
)
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL,
|
||||
passwords="wrong, right\n extra ",
|
||||
)
|
||||
workflow = Workflow.objects.create(name="Password workflow")
|
||||
workflow.triggers.add(trigger)
|
||||
workflow.actions.add(action)
|
||||
|
||||
mock_remove_password.side_effect = [
|
||||
ValueError("wrong password"),
|
||||
"OK",
|
||||
]
|
||||
|
||||
run_workflows(trigger.type, doc)
|
||||
|
||||
assert mock_remove_password.call_count == 2
|
||||
mock_remove_password.assert_has_calls(
|
||||
[
|
||||
mock.call(
|
||||
[doc.id],
|
||||
password="wrong",
|
||||
update_document=True,
|
||||
user=doc.owner,
|
||||
),
|
||||
mock.call(
|
||||
[doc.id],
|
||||
password="right",
|
||||
update_document=True,
|
||||
user=doc.owner,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
@mock.patch("documents.bulk_edit.remove_password")
|
||||
def test_password_removal_action_fails_without_correct_password(
|
||||
self,
|
||||
mock_remove_password,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Workflow password removal action
|
||||
- No correct password provided
|
||||
WHEN:
|
||||
- Document updated triggering the workflow
|
||||
THEN:
|
||||
- Password removal is attempted for all passwords and fails
|
||||
"""
|
||||
doc = Document.objects.create(
|
||||
title="Protected",
|
||||
checksum="pw-checksum-2",
|
||||
)
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL,
|
||||
passwords=" \n , ",
|
||||
)
|
||||
workflow = Workflow.objects.create(name="Password workflow missing passwords")
|
||||
workflow.triggers.add(trigger)
|
||||
workflow.actions.add(action)
|
||||
|
||||
run_workflows(trigger.type, doc)
|
||||
|
||||
mock_remove_password.assert_not_called()
|
||||
|
||||
@mock.patch("documents.bulk_edit.remove_password")
|
||||
def test_password_removal_action_skips_without_passwords(
|
||||
self,
|
||||
mock_remove_password,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Workflow password removal action with no passwords
|
||||
WHEN:
|
||||
- Workflow is run
|
||||
THEN:
|
||||
- Password removal is not attempted
|
||||
"""
|
||||
doc = Document.objects.create(
|
||||
title="Protected",
|
||||
checksum="pw-checksum-2",
|
||||
)
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL,
|
||||
passwords="",
|
||||
)
|
||||
workflow = Workflow.objects.create(name="Password workflow missing passwords")
|
||||
workflow.triggers.add(trigger)
|
||||
workflow.actions.add(action)
|
||||
|
||||
run_workflows(trigger.type, doc)
|
||||
|
||||
mock_remove_password.assert_not_called()
|
||||
|
||||
@mock.patch("documents.bulk_edit.remove_password")
|
||||
def test_password_removal_consumable_document_deferred(
|
||||
self,
|
||||
mock_remove_password,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Workflow password removal action
|
||||
- Simulated consumption trigger (a ConsumableDocument is used)
|
||||
WHEN:
|
||||
- Document consumption is finished
|
||||
THEN:
|
||||
- Password removal is attempted
|
||||
"""
|
||||
action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL,
|
||||
passwords="first, second",
|
||||
)
|
||||
|
||||
temp_dir = Path(tempfile.mkdtemp())
|
||||
original_file = temp_dir / "file.pdf"
|
||||
original_file.write_bytes(b"pdf content")
|
||||
consumable = ConsumableDocument(
|
||||
source=DocumentSource.ApiUpload,
|
||||
original_file=original_file,
|
||||
)
|
||||
|
||||
execute_password_removal_action(action, consumable, logging_group=None)
|
||||
|
||||
mock_remove_password.assert_not_called()
|
||||
|
||||
mock_remove_password.side_effect = [
|
||||
ValueError("bad password"),
|
||||
"OK",
|
||||
]
|
||||
|
||||
doc = Document.objects.create(
|
||||
checksum="pw-checksum-consumed",
|
||||
title="Protected",
|
||||
)
|
||||
|
||||
document_consumption_finished.send(
|
||||
sender=self.__class__,
|
||||
document=doc,
|
||||
)
|
||||
|
||||
assert mock_remove_password.call_count == 2
|
||||
mock_remove_password.assert_has_calls(
|
||||
[
|
||||
mock.call(
|
||||
[doc.id],
|
||||
password="first",
|
||||
update_document=True,
|
||||
user=doc.owner,
|
||||
),
|
||||
mock.call(
|
||||
[doc.id],
|
||||
password="second",
|
||||
update_document=True,
|
||||
user=doc.owner,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
# ensure handler disconnected after first run
|
||||
document_consumption_finished.send(
|
||||
sender=self.__class__,
|
||||
document=doc,
|
||||
)
|
||||
assert mock_remove_password.call_count == 2
|
||||
|
||||
|
||||
class TestWebhookSend:
|
||||
def test_send_webhook_data_or_json(
|
||||
|
||||
@@ -148,7 +148,6 @@ from documents.models import Workflow
|
||||
from documents.models import WorkflowAction
|
||||
from documents.models import WorkflowTrigger
|
||||
from documents.parsers import get_parser_class_for_mime_type
|
||||
from documents.parsers import parse_date_generator
|
||||
from documents.permissions import AcknowledgeTasksPermissions
|
||||
from documents.permissions import PaperlessAdminPermissions
|
||||
from documents.permissions import PaperlessNotePermissions
|
||||
@@ -158,6 +157,7 @@ from documents.permissions import get_document_count_filter_for_user
|
||||
from documents.permissions import get_objects_for_user_owner_aware
|
||||
from documents.permissions import has_perms_owner_aware
|
||||
from documents.permissions import set_permissions_for_object
|
||||
from documents.plugins.date_parsing import get_date_parser
|
||||
from documents.schema import generate_object_with_permissions_schema
|
||||
from documents.serialisers import AcknowledgeTasksViewSerializer
|
||||
from documents.serialisers import BulkDownloadSerializer
|
||||
@@ -1023,16 +1023,17 @@ class DocumentViewSet(
|
||||
|
||||
dates = []
|
||||
if settings.NUMBER_OF_SUGGESTED_DATES > 0:
|
||||
gen = parse_date_generator(doc.filename, doc.content)
|
||||
dates = sorted(
|
||||
{
|
||||
i
|
||||
for i in itertools.islice(
|
||||
gen,
|
||||
settings.NUMBER_OF_SUGGESTED_DATES,
|
||||
)
|
||||
},
|
||||
)
|
||||
with get_date_parser() as date_parser:
|
||||
gen = date_parser.parse(doc.filename, doc.content)
|
||||
dates = sorted(
|
||||
{
|
||||
i
|
||||
for i in itertools.islice(
|
||||
gen,
|
||||
settings.NUMBER_OF_SUGGESTED_DATES,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
resp_data = {
|
||||
"correspondents": [
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import logging
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
@@ -14,6 +15,7 @@ from documents.models import Document
|
||||
from documents.models import DocumentType
|
||||
from documents.models import WorkflowAction
|
||||
from documents.models import WorkflowTrigger
|
||||
from documents.signals import document_consumption_finished
|
||||
from documents.templating.workflows import parse_w_workflow_placeholders
|
||||
from documents.workflows.webhooks import send_webhook
|
||||
|
||||
@@ -265,3 +267,74 @@ def execute_webhook_action(
|
||||
f"Error occurred sending webhook: {e}",
|
||||
extra={"group": logging_group},
|
||||
)
|
||||
|
||||
|
||||
def execute_password_removal_action(
|
||||
action: WorkflowAction,
|
||||
document: Document | ConsumableDocument,
|
||||
logging_group,
|
||||
) -> None:
|
||||
"""
|
||||
Try to remove a password from a document using the configured list.
|
||||
"""
|
||||
passwords = action.passwords
|
||||
if not passwords:
|
||||
logger.warning(
|
||||
"Password removal action %s has no passwords configured",
|
||||
action.pk,
|
||||
extra={"group": logging_group},
|
||||
)
|
||||
return
|
||||
|
||||
passwords = [
|
||||
password.strip()
|
||||
for password in re.split(r"[,\n]", passwords)
|
||||
if password.strip()
|
||||
]
|
||||
|
||||
if isinstance(document, ConsumableDocument):
|
||||
# hook the consumption-finished signal to attempt password removal later
|
||||
def handler(sender, **kwargs):
|
||||
consumed_document: Document = kwargs.get("document")
|
||||
if consumed_document is not None:
|
||||
execute_password_removal_action(
|
||||
action,
|
||||
consumed_document,
|
||||
logging_group,
|
||||
)
|
||||
document_consumption_finished.disconnect(handler)
|
||||
|
||||
document_consumption_finished.connect(handler, weak=False)
|
||||
return
|
||||
|
||||
# import here to avoid circular dependency
|
||||
from documents.bulk_edit import remove_password
|
||||
|
||||
for password in passwords:
|
||||
try:
|
||||
remove_password(
|
||||
[document.id],
|
||||
password=password,
|
||||
update_document=True,
|
||||
user=document.owner,
|
||||
)
|
||||
logger.info(
|
||||
"Removed password from document %s using workflow action %s",
|
||||
document.pk,
|
||||
action.pk,
|
||||
extra={"group": logging_group},
|
||||
)
|
||||
return
|
||||
except ValueError as e:
|
||||
logger.warning(
|
||||
"Password removal failed for document %s with supplied password: %s",
|
||||
document.pk,
|
||||
e,
|
||||
extra={"group": logging_group},
|
||||
)
|
||||
|
||||
logger.error(
|
||||
"Password removal failed for document %s after trying all provided passwords",
|
||||
document.pk,
|
||||
extra={"group": logging_group},
|
||||
)
|
||||
|
||||
@@ -2,7 +2,7 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: paperless-ngx\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2026-01-31 17:12+0000\n"
|
||||
"POT-Creation-Date: 2026-02-03 20:10+0000\n"
|
||||
"PO-Revision-Date: 2022-02-17 04:17\n"
|
||||
"Last-Translator: \n"
|
||||
"Language-Team: English\n"
|
||||
@@ -89,7 +89,7 @@ msgstr ""
|
||||
msgid "Automatic"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:66 documents/models.py:444 documents/models.py:1646
|
||||
#: documents/models.py:66 documents/models.py:444 documents/models.py:1659
|
||||
#: paperless_mail/models.py:23 paperless_mail/models.py:143
|
||||
msgid "name"
|
||||
msgstr ""
|
||||
@@ -252,7 +252,7 @@ msgid "The position of this document in your physical document archive."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:313 documents/models.py:688 documents/models.py:742
|
||||
#: documents/models.py:1689
|
||||
#: documents/models.py:1702
|
||||
msgid "document"
|
||||
msgstr ""
|
||||
|
||||
@@ -1089,183 +1089,197 @@ msgid "Webhook"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1410
|
||||
msgid "Password removal"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1414
|
||||
msgid "Workflow Action Type"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1415 documents/models.py:1648
|
||||
#: documents/models.py:1419 documents/models.py:1661
|
||||
#: paperless_mail/models.py:145
|
||||
msgid "order"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1418
|
||||
#: documents/models.py:1422
|
||||
msgid "assign title"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1422
|
||||
#: documents/models.py:1426
|
||||
msgid "Assign a document title, must be a Jinja2 template, see documentation."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1430 paperless_mail/models.py:274
|
||||
#: documents/models.py:1434 paperless_mail/models.py:274
|
||||
msgid "assign this tag"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1439 paperless_mail/models.py:282
|
||||
#: documents/models.py:1443 paperless_mail/models.py:282
|
||||
msgid "assign this document type"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1448 paperless_mail/models.py:296
|
||||
#: documents/models.py:1452 paperless_mail/models.py:296
|
||||
msgid "assign this correspondent"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1457
|
||||
#: documents/models.py:1461
|
||||
msgid "assign this storage path"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1466
|
||||
#: documents/models.py:1470
|
||||
msgid "assign this owner"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1473
|
||||
#: documents/models.py:1477
|
||||
msgid "grant view permissions to these users"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1480
|
||||
#: documents/models.py:1484
|
||||
msgid "grant view permissions to these groups"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1487
|
||||
#: documents/models.py:1491
|
||||
msgid "grant change permissions to these users"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1494
|
||||
#: documents/models.py:1498
|
||||
msgid "grant change permissions to these groups"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1501
|
||||
#: documents/models.py:1505
|
||||
msgid "assign these custom fields"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1505
|
||||
#: documents/models.py:1509
|
||||
msgid "custom field values"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1509
|
||||
#: documents/models.py:1513
|
||||
msgid "Optional values to assign to the custom fields."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1518
|
||||
#: documents/models.py:1522
|
||||
msgid "remove these tag(s)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1523
|
||||
#: documents/models.py:1527
|
||||
msgid "remove all tags"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1530
|
||||
#: documents/models.py:1534
|
||||
msgid "remove these document type(s)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1535
|
||||
#: documents/models.py:1539
|
||||
msgid "remove all document types"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1542
|
||||
#: documents/models.py:1546
|
||||
msgid "remove these correspondent(s)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1547
|
||||
#: documents/models.py:1551
|
||||
msgid "remove all correspondents"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1554
|
||||
#: documents/models.py:1558
|
||||
msgid "remove these storage path(s)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1559
|
||||
#: documents/models.py:1563
|
||||
msgid "remove all storage paths"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1566
|
||||
#: documents/models.py:1570
|
||||
msgid "remove these owner(s)"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1571
|
||||
#: documents/models.py:1575
|
||||
msgid "remove all owners"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1578
|
||||
#: documents/models.py:1582
|
||||
msgid "remove view permissions for these users"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1585
|
||||
#: documents/models.py:1589
|
||||
msgid "remove view permissions for these groups"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1592
|
||||
#: documents/models.py:1596
|
||||
msgid "remove change permissions for these users"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1599
|
||||
#: documents/models.py:1603
|
||||
msgid "remove change permissions for these groups"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1604
|
||||
#: documents/models.py:1608
|
||||
msgid "remove all permissions"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1611
|
||||
#: documents/models.py:1615
|
||||
msgid "remove these custom fields"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1616
|
||||
#: documents/models.py:1620
|
||||
msgid "remove all custom fields"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1625
|
||||
#: documents/models.py:1629
|
||||
msgid "email"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1634
|
||||
#: documents/models.py:1638
|
||||
msgid "webhook"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1638
|
||||
#: documents/models.py:1642
|
||||
msgid "passwords"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1646
|
||||
msgid ""
|
||||
"Passwords to try when removing PDF protection. Separate with commas or new "
|
||||
"lines."
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1651
|
||||
msgid "workflow action"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1639
|
||||
#: documents/models.py:1652
|
||||
msgid "workflow actions"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1654
|
||||
#: documents/models.py:1667
|
||||
msgid "triggers"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1661
|
||||
#: documents/models.py:1674
|
||||
msgid "actions"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1664 paperless_mail/models.py:154
|
||||
#: documents/models.py:1677 paperless_mail/models.py:154
|
||||
msgid "enabled"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1675
|
||||
#: documents/models.py:1688
|
||||
msgid "workflow"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1679
|
||||
#: documents/models.py:1692
|
||||
msgid "workflow trigger type"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1693
|
||||
#: documents/models.py:1706
|
||||
msgid "date run"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1699
|
||||
#: documents/models.py:1712
|
||||
msgid "workflow run"
|
||||
msgstr ""
|
||||
|
||||
#: documents/models.py:1700
|
||||
#: documents/models.py:1713
|
||||
msgid "workflow runs"
|
||||
msgstr ""
|
||||
|
||||
@@ -1309,7 +1323,7 @@ msgstr ""
|
||||
msgid "Duplicate document identifiers are not allowed."
|
||||
msgstr ""
|
||||
|
||||
#: documents/serialisers.py:2330 documents/views.py:2838
|
||||
#: documents/serialisers.py:2330 documents/views.py:2839
|
||||
#, python-format
|
||||
msgid "Documents not found: %(ids)s"
|
||||
msgstr ""
|
||||
@@ -1573,20 +1587,20 @@ msgstr ""
|
||||
msgid "Unable to parse URI {value}"
|
||||
msgstr ""
|
||||
|
||||
#: documents/views.py:2850
|
||||
#: documents/views.py:2851
|
||||
#, python-format
|
||||
msgid "Insufficient permissions to share document %(id)s."
|
||||
msgstr ""
|
||||
|
||||
#: documents/views.py:2893
|
||||
#: documents/views.py:2894
|
||||
msgid "Bundle is already being processed."
|
||||
msgstr ""
|
||||
|
||||
#: documents/views.py:2950
|
||||
#: documents/views.py:2951
|
||||
msgid "The share link bundle is still being prepared. Please try again later."
|
||||
msgstr ""
|
||||
|
||||
#: documents/views.py:2960
|
||||
#: documents/views.py:2961
|
||||
msgid "The share link bundle is unavailable."
|
||||
msgstr ""
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import dataclasses
|
||||
import email.contentmanager
|
||||
import random
|
||||
import time
|
||||
import uuid
|
||||
from collections import namedtuple
|
||||
@@ -148,11 +147,7 @@ class BogusMailBox(AbstractContextManager):
|
||||
|
||||
if "TO" in criteria:
|
||||
to_ = criteria[criteria.index("TO") + 1].strip('"')
|
||||
msg = []
|
||||
for m in self.messages:
|
||||
for to_addrs in m.to:
|
||||
if to_ in to_addrs:
|
||||
msg.append(m)
|
||||
msg = filter(lambda m: any(to_ in to_addr for to_addr in m.to), msg)
|
||||
|
||||
if "UNFLAGGED" in criteria:
|
||||
msg = filter(lambda m: not m.flagged, msg)
|
||||
@@ -204,7 +199,7 @@ def fake_magic_from_buffer(buffer, *, mime=False):
|
||||
|
||||
class MessageBuilder:
|
||||
def __init__(self) -> None:
|
||||
self._used_uids = set()
|
||||
self._next_uid = 1
|
||||
|
||||
def create_message(
|
||||
self,
|
||||
@@ -257,10 +252,8 @@ class MessageBuilder:
|
||||
# TODO: Unsure how to add a uid to the actual EmailMessage. This hacks it in,
|
||||
# based on how imap_tools uses regex to extract it.
|
||||
# This should be a large enough pool
|
||||
uid = random.randint(1, 10000)
|
||||
while uid in self._used_uids:
|
||||
uid = random.randint(1, 10000)
|
||||
self._used_uids.add(uid)
|
||||
uid = self._next_uid
|
||||
self._next_uid += 1
|
||||
|
||||
imap_msg._raw_uid_data = f"UID {uid}".encode()
|
||||
|
||||
|
||||
166
uv.lock
generated
166
uv.lock
generated
@@ -1305,7 +1305,7 @@ name = "exceptiongroup"
|
||||
version = "1.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions", marker = "(python_full_version < '3.13' and platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or (python_full_version < '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.12' and platform_machine == 'x86_64' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'darwin')" },
|
||||
{ name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" }
|
||||
wheels = [
|
||||
@@ -1542,83 +1542,83 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "granian"
|
||||
version = "2.6.1"
|
||||
version = "2.7.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/aa/22/93016f4f9e9115ba981f51fc17c7c369a34772f11a93043320a2a3d5c6ea/granian-2.6.1.tar.gz", hash = "sha256:d209065b12f18b6d7e78f1c16ff9444e5367dddeb41e3225c2cf024762740590", size = 115480, upload-time = "2026-01-07T11:08:55.927Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/43/75/bdea4ab49a02772a3007e667284764081d401169e96d0270d95509e3e240/granian-2.7.0.tar.gz", hash = "sha256:bee8e8a81a259e6f08613c973062df9db5f8451b521bb0259ed8f27d3e2bab23", size = 127963, upload-time = "2026-02-02T11:39:57.525Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/54/3f/9a78d70beaa2dafc54c2147dd03ce1b75a97d12b61e9319eacb6ad536e30/granian-2.6.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b8a9f6006142ed64082ec726a9de40f4eb2ebe65f842c199f254b253362b8ab4", size = 3073952, upload-time = "2026-01-07T11:06:51.277Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/40/52e01a382d58ba416d12e165a2ac1e3270367267ced12ddfe1dd1fb03b65/granian-2.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:811a8eec0f96c7a1c9f991a2922f7c21561ecb2c52263666e14474aeea37ff6b", size = 2827739, upload-time = "2026-01-07T11:06:52.637Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/65/439a866076b378611eeebc3ecb285cc606aad4046b3f3b26035cc82d8c8b/granian-2.6.1-cp310-cp310-manylinux_2_24_armv7l.whl", hash = "sha256:3b78caa06c44d73551038aba9918d03d59f4d37e2bf39623e5572800d110e121", size = 3327232, upload-time = "2026-01-07T11:06:54.813Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/7b/e9611920b7a4c3fe4881fa204322eb3944decd57ce3b38db2a3da00bb876/granian-2.6.1-cp310-cp310-manylinux_2_24_i686.whl", hash = "sha256:7cce9865bab5c2ca29d80224caad390b15c385ec23903bf0781f4cc6cee006c6", size = 3140534, upload-time = "2026-01-07T11:06:56.074Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/a7/3840c2a9ed7fcf1c606dc8796aa3c4e43bcc4169ba04d2f840d576d8a238/granian-2.6.1-cp310-cp310-manylinux_2_24_x86_64.whl", hash = "sha256:b6df9ffdcbd85e4151661e878127186a6a54a8ba4048cf5f883d9093ae628b99", size = 3372279, upload-time = "2026-01-07T11:06:57.483Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/e8/4b78ed5fab45e83c69f6e1ea8805dbc638e7694d7946e97495c507026e6b/granian-2.6.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:66e5e25bc901c4bd54ab0f6449318c678e49ef6dcfd4fd62f15188680ed9a24d", size = 3239371, upload-time = "2026-01-07T11:06:59.49Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/7d/a9e70763e9c99158edcdaca74c4b4fd4d57a46d2ea39a0ef32df4e8262f3/granian-2.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5429fb374accfa48f6c6459b3e0278f3ad58bba701127b62163086b70c076d11", size = 3309145, upload-time = "2026-01-07T11:07:01.166Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/07/c899ba39d1be5810e25981d7c72a53437e2d099393fad9c8e6c273690f05/granian-2.6.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:8276b24da71771bc282057f9d144452695d54a99522adcf3a02175e613929d09", size = 3492752, upload-time = "2026-01-07T11:07:03.018Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/99/590ad3ad2f97c0e2f32558f0cf630ab72aa7c71f94e865084f3fb89b7cc5/granian-2.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6a3248c965f6ca71dca1285c7f5e16e4209a779a218fc2f64fe2f6cbe5ace709", size = 3498818, upload-time = "2026-01-07T11:07:04.536Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/a3/12e30c4a16761f6db3cff71a93351678dca535c6348d3c1f65f6461c8848/granian-2.6.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:66cb53293a721bf2da651150cb5ba501d5536c3bec284dcbcb10a9f044a4b23e", size = 3073572, upload-time = "2026-01-07T11:07:07.447Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/c0/0582a42e5b3e3c9e34eb9060585ed6cd11807852d645c19a0a79953be571/granian-2.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fef1c4b75d3827101a103b134abf43076703b6143b788f022e821dc4180b602", size = 2827569, upload-time = "2026-01-07T11:07:08.964Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/b8/306dad81288330c5c1043434ac57a246a7cd3a70cd5877570efcd7888879/granian-2.6.1-cp311-cp311-manylinux_2_24_armv7l.whl", hash = "sha256:2375c346cafd2afd944a8b014f7dd882b416161ffe321c126d6d87984499396c", size = 3326925, upload-time = "2026-01-07T11:07:10.288Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/bb/f76654e4e5679d000335101922653c809adacaa675f861646aef95e9673c/granian-2.6.1-cp311-cp311-manylinux_2_24_i686.whl", hash = "sha256:6c0e9367956c1cdd23b41d571159e59b5530c8f44ff4c340fe69065ffd1bfe70", size = 3140557, upload-time = "2026-01-07T11:07:11.764Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/0d/2e6ab1ce28fbb45f8e747d33db06ea870c1eee580c584592a8ceb49c0a59/granian-2.6.1-cp311-cp311-manylinux_2_24_x86_64.whl", hash = "sha256:4eacfe0bf355a88486933e6f846c2ecc0c2b0cf020a989750765294da4216b0c", size = 3372055, upload-time = "2026-01-07T11:07:13.584Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/05/9f104225ef0ceef6770e12d476077656c7930cde84474797c4a9807a4d3d/granian-2.6.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:c6ac45432028c7799a083917cda567e377cf42dbcad45c24b66dd03b72b1e1d6", size = 3239306, upload-time = "2026-01-07T11:07:15.01Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/ec/73ead13fe326ac548fda5f85f471e16015629672e8acc3d4ccc07e9b313a/granian-2.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aaac0304c7c68e6b798d15dd96a7b6ae477ab89d519c398d470da460f7ddda0", size = 3309025, upload-time = "2026-01-07T11:07:16.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/fb/0f474c6d437464d440924f3261295c046365dc9514cdd898d152b5a6c0bd/granian-2.6.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:a356685207987c07fb19f76a04d7bac6da0322584ede37adb1af7a607f8c8e35", size = 3492393, upload-time = "2026-01-07T11:07:18.202Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/92/dbd3793e3b02d0a09422dacd456d739039eba4147d2c716e601f87287fde/granian-2.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c2928057de92ef90c2d87e3de5e34af4e50d746c5adfb035a6bbef490ec465af", size = 3498644, upload-time = "2026-01-07T11:07:19.943Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/d1/9d191ea0b4f01a0d2437600b32a025e687189bae072878ec161f358eb465/granian-2.6.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:801bcf7efc3fdd12a08016ed94b1a386480c9a5185eb8e017fd83db1b2d210b4", size = 3070339, upload-time = "2026-01-07T11:07:22.618Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/1e/be0ba55a2b21aeadeb8774721964740130fdd3dd7337d8a5ec130a0c48c0/granian-2.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:853fb869a50d742576bb4f974f321242a71a4d8eed918939397b317ab32c6a2d", size = 2819049, upload-time = "2026-01-07T11:07:23.877Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/c7/d8adb472dc71b212281a82d3ea00858809f2844a79b45e63bbb3a09921b7/granian-2.6.1-cp312-cp312-manylinux_2_24_armv7l.whl", hash = "sha256:327a6090496c1deebd9e315f973bdbfc5c927e5574588bba918bfe2127bbd578", size = 3322325, upload-time = "2026-01-07T11:07:25.304Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/2f/c3ce9e4f19163f35c5c57c45af2ad353abcc6091a44625caec56e065ca4a/granian-2.6.1-cp312-cp312-manylinux_2_24_i686.whl", hash = "sha256:4c91f0eefc34d809773762a9b81c1c48e20ff74c0f1be876d1132d82c0f74609", size = 3136460, upload-time = "2026-01-07T11:07:26.682Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/87/91b57eb5407a12bfe779acfa3fbb2be329aec14e6d88acf293fe910c19e5/granian-2.6.1-cp312-cp312-manylinux_2_24_x86_64.whl", hash = "sha256:c5754de57b56597d5998b7bb40aa9d0dc4e1dbeb5aea3309945126ed71b41c6d", size = 3386850, upload-time = "2026-01-07T11:07:27.989Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/43/b61a6f3bfc2f35e504e42789776a269cbdc0cdafdb10597bd6534e93ba3d/granian-2.6.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:e849d6467ebe77d0a75eb4175f7cc06b1150dbfce0259932a4270c765b4de6c4", size = 3240693, upload-time = "2026-01-07T11:07:29.52Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/1d/c40bd8dd99b855190d67127e0610f082cfbc7898dbd41f1ade015c2041f7/granian-2.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5a265867203e30d3c54d9d99783346040681ba2aaec70fcbe63de0e295e7882f", size = 3312703, upload-time = "2026-01-07T11:07:31.128Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/ca/589c042afc3287b36dfeed6df56074cc831a94e5217bcbd7c1af20812fe2/granian-2.6.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:03f0a1505e7862183203d7d7c1e2b29349bd63a18858ced49aec4d7aadb98fc8", size = 3483737, upload-time = "2026-01-07T11:07:32.726Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/51/72eb037bac01db9623fa5fb128739bfb5679fb90e6da2645c5a3d8a4168d/granian-2.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:703ed57ba134ab16f15d49f7d644329db1cb0f7f8114ec3f08fb8039850e308a", size = 3514745, upload-time = "2026-01-07T11:07:34.706Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/3f/40975a573dc9a80382121694d71379fffab568012f411038043ed454cdd0/granian-2.6.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:8af4c75ffa2c8c77a3f5558b5ff71a9d97a57e08387ef954f560f2412a0b3db9", size = 3069408, upload-time = "2026-01-07T11:07:38.4Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/2f/64b0d58344eedfb7f27c48d7a40e840cd714a8898bcaf3289cecad02f030/granian-2.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b473cdaf3f19ddc16e511b2c2d1e98b9ce7c13fd105e9095ecb268a6a5286a32", size = 2818749, upload-time = "2026-01-07T11:07:39.946Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/1e/e33ae736adbef0633307f13092490021688df33885c9a320b50b83dbc076/granian-2.6.1-cp313-cp313-manylinux_2_24_armv7l.whl", hash = "sha256:b8ca2ac7261bcb8e57a35f8e7202aa891807420d51e3e61fd0913088d379e0fd", size = 3321824, upload-time = "2026-01-07T11:07:41.379Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/3c/ef25189d251d14c81b11514e8d0a9a3cd8f9a467df423fb14362d95c7d6a/granian-2.6.1-cp313-cp313-manylinux_2_24_i686.whl", hash = "sha256:1eca9cfcf05dc54ffb21b14797ed7718707f7d26e7c5274722212e491eb8a4a6", size = 3136201, upload-time = "2026-01-07T11:07:42.703Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/31/a5621235cd26e7cb78d57ce9a3baeb837885dc7ebf5c2c907f2bf319002a/granian-2.6.1-cp313-cp313-manylinux_2_24_x86_64.whl", hash = "sha256:7722373629ab423835eb25015223f59788aa077036ea9ac3a4bddce43b0eb9c9", size = 3386378, upload-time = "2026-01-07T11:07:44.289Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/ce/134a494be1c4d8a602cc03298e3f961d66e4a2b97c974403ffce50c09965/granian-2.6.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:7de143cf76934cfc63cc8cf296af69f750e4e3799ec0700d5da8254202aad12a", size = 3240009, upload-time = "2026-01-07T11:07:46.18Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/f5/48f2fff5effee50dce30d726874936d582e83a690ccdc87cc2ca15b9accf/granian-2.6.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b0e7d6ee92962544a16039e1d5f36c5f43cd0a538541e7a3e5337147c701539", size = 3312533, upload-time = "2026-01-07T11:07:48.176Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/90/0590100351bf2b99ff95b0ac34c8c14f61c13f8417c16b93860fe8b1619a/granian-2.6.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:d9ca799472c72cb378192d49004abe98e387c1719378b01d7dc85ab293fa680e", size = 3482213, upload-time = "2026-01-07T11:07:49.471Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/12/0f8f1367ebc4bbd3e17bed842ad21cf9691d828b8c89029dfcf9f1448fcd/granian-2.6.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:616c3213d2ffe638e49a3578185cbe9d0009635949e7ac083275942a2cbbee0c", size = 3513942, upload-time = "2026-01-07T11:07:51.011Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/ec/49ada0ed6861db9ba127c26058cc1a8451af891922cb2673b9e88e847cf6/granian-2.6.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:fd3151933d867352b6e240b0e57d97b96cd6e0fa010c9e3503f4cb83e6815f6b", size = 3030683, upload-time = "2026-01-07T11:07:53.803Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/7e/4c8b9eb66555e95798b8cf5e18be910519daf6cdf3c8cac90333ffe8e031/granian-2.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:1d11bb744878fa73197a1af99b324b6ccd9368f2c73b82a6c4cfcc696c14dcde", size = 2772412, upload-time = "2026-01-07T11:07:55.605Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/20/013495365505da26d45721b670114855a8969f1d3784ecdc60a124330075/granian-2.6.1-cp313-cp313t-manylinux_2_24_armv7l.whl", hash = "sha256:fe1103a49cdb75bbac47005f0a70353aa575b6ac052e9dc932b39b644358c43a", size = 3317657, upload-time = "2026-01-07T11:07:57.472Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/1e/4070bb26876c12b1304da9d27d6309e3dc53bbdf5928d732ba9a87fe561a/granian-2.6.1-cp313-cp313t-manylinux_2_24_i686.whl", hash = "sha256:e965d85634e02fbf97960e4ba8ef5290d37c094ad089a89fb19b68958888297a", size = 2969120, upload-time = "2026-01-07T11:07:58.778Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/3e/dd29f04737a554acd3309372c8d2c6901a99cac3f9fcdee73bbe5a9bf0aa/granian-2.6.1-cp313-cp313t-manylinux_2_24_x86_64.whl", hash = "sha256:570c509cf608d77f0a32d66a51c9e4e9aba064a0a388776c41398392cc5e58d3", size = 3263555, upload-time = "2026-01-07T11:08:00.133Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/b6/ad4c439a8a20bebbed5066d051150ed0ad32160aee89c0cbdcf49fb1b092/granian-2.6.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:3a105aa2f9b6dba037f81bc36b49a61c1648b60a82020e5c34394ce0940cdaef", size = 3112547, upload-time = "2026-01-07T11:08:01.453Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/92/58b5c7ca48540232034f2fa8be839e2ec997ec723489ff704a02c5a19945/granian-2.6.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:a4d90804f8d7c61e01741d9ccd400257763670f0e52a3cb59829fe2465b2b4a1", size = 3304759, upload-time = "2026-01-07T11:08:02.967Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/e0/fdcf7d91937acf6bfbe6c459820ffc847840d0375624daf1fcd3e2acea50/granian-2.6.1-cp313-cp313t-musllinux_1_1_armv7l.whl", hash = "sha256:647a818814c6c2a4bcd5757509596d9d5a0e10fbe96d52acb4c992f56134ae27", size = 3479270, upload-time = "2026-01-07T11:08:04.337Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/f3/d40f9e8699c9bb6ebf923e6baee6c9f90b9ff997c075173f67ffdee9aefc/granian-2.6.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:20639cec0106f047147c6b873bce6aaa275fb71456810ce3c0124f35b149e408", size = 3509699, upload-time = "2026-01-07T11:08:06.215Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/49/2a3f993a2ee5e9876811a9d5fc41f47582da9a6873b02f214271e68f539b/granian-2.6.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:021ed3abb6805be450136f73ddc15283fe83714135d4481f5c3363c12109ef43", size = 3067322, upload-time = "2026-01-07T11:08:09.136Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/86/ad139301cc61c97ef8d8ec9a96e796350f108fa434c088ea14faa5e43e81/granian-2.6.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:df10c8fb56e00fd51aaccb639a34d067ec43cfbf1241d57d93ac67f0dbebd33d", size = 2818936, upload-time = "2026-01-07T11:08:11.091Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/a7/5223956aed93671dd83094cb4883e6431701e07a7641382b8cde6f4328d3/granian-2.6.1-cp314-cp314-manylinux_2_24_armv7l.whl", hash = "sha256:8f0eaacf8c6f1be67b38022d9839b35040e5a23df6117a08be5fc661ca404200", size = 3319401, upload-time = "2026-01-07T11:08:12.817Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/ab/cbd1e9d9d0419b8d55043c821ea5b431d9cd43b35928c4c9f2034f9abf34/granian-2.6.1-cp314-cp314-manylinux_2_24_i686.whl", hash = "sha256:2f987bdb76a78a78dee56e9b4e44862471dcdbc3549152e35c64b19ba09c4dd0", size = 3132180, upload-time = "2026-01-07T11:08:14.215Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/cb/4e4aa44e696c10a701c1366b9882561d6ba5caa2e3c00e3716a57da40c18/granian-2.6.1-cp314-cp314-manylinux_2_24_x86_64.whl", hash = "sha256:ffb814d4f9df8f04759c4c0fc1c903b607d363496e9d5fcb29596ab7f82bfae0", size = 3383751, upload-time = "2026-01-07T11:08:15.775Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/10/f37497b41c8f2a49fbc7f17dafdb979c84084ea127e27bfe92fe28dcd229/granian-2.6.1-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:450d45418116766cce8d2ef138eeea59e74d15da3759ae87af9732a8896ca3ef", size = 3239746, upload-time = "2026-01-07T11:08:17.75Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/c5/842eb481cba2dc49374ffabb5a0ffeb8e61017da10a5084764369b1ac452/granian-2.6.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:6e85ca2e6c83b5d70801068fbdca8acf733e12e35cee782ee94554f417971aff", size = 3311896, upload-time = "2026-01-07T11:08:19.3Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/6e/db253ae3ab220a394ffcd0cc7c3d7752ba20757bf37270af8f7b6813cd59/granian-2.6.1-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:aa4d187fd52e2873ef714c1d8aee097d88c141ac850f5bf04dbd37b513a3d67b", size = 3480487, upload-time = "2026-01-07T11:08:21.037Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/1d/0a78570d2c1fcccd5df93ff18e2cf00a708a75b9e935c038d973554f5f87/granian-2.6.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:53e2380f2b522082d5587c5dc05ad80ae0463dc751655d7cfb19d7ed3dbc48d6", size = 3509926, upload-time = "2026-01-07T11:08:23.067Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/ed/f1130abe166f5a9bb4c032bae94b4cbc1b04179703a98906493e3b329974/granian-2.6.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:997a2a45b14e583504b920abfbcaf605d3ee9a4e7e0d3599b41f2091b388bd81", size = 3029240, upload-time = "2026-01-07T11:08:26.17Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/40/f5ec53011f3d91c14d6be604b02df930069cdb2dddbeb6aabda0ce265fc4/granian-2.6.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ee9ac64109dd54ff6773a82e9dc302843079edd7d9fcca4a72d9286918ae2c03", size = 2771824, upload-time = "2026-01-07T11:08:28.267Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/98/a75d6d3b47d0a44e7af0ee7733ea295c01fb0070a636cb93290a6075da35/granian-2.6.1-cp314-cp314t-manylinux_2_24_armv7l.whl", hash = "sha256:4a76bbfcfea1b5114bda720fd9dbc11e584e513c351c2ec399cd3e2dcb1f8fd2", size = 3313753, upload-time = "2026-01-07T11:08:30.556Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/33/c37f0c5dd649ff97bed0b70d36b8f02fd488d926c30b272f6d09618ccde7/granian-2.6.1-cp314-cp314t-manylinux_2_24_i686.whl", hash = "sha256:5f3c6bab1e443b2277923b0f836141584b0f56b6db74904d4436d6031118a3fa", size = 2966697, upload-time = "2026-01-07T11:08:32.52Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/71/92dd02f5b53d28478906fc374a91337e65efeae04be9f392bdcb4a313906/granian-2.6.1-cp314-cp314t-manylinux_2_24_x86_64.whl", hash = "sha256:bbc0cac57a8719ae0d96ee250c539872becdd941795ab4590dca49cba240c809", size = 3261860, upload-time = "2026-01-07T11:08:33.946Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/ca/abab95b8b3541a5e540a3b89a0b2805cdd1b064c9083cd4ada71c7d1ac76/granian-2.6.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:dac537c663ec9b2f1f8bd3c0c6fd0c3f749d91b4f35f918df9ea1357528360dd", size = 3111002, upload-time = "2026-01-07T11:08:35.355Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/3f/b872cca9f39b98aaeccf95f4c5b10fca21562c623bb3f7115de4506e9e1b/granian-2.6.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:9d6a3ac7234708fb2cce1fec6dd0941458c45e26b98b377b4a08d715fe9a9bd2", size = 3303389, upload-time = "2026-01-07T11:08:36.767Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/39/2224e99dbd2c61aed585550f5a624b653a5c90b6ea7821c7a1e1c187d4cb/granian-2.6.1-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:75bb8af9306aaceade90ec91cd9f5d4bdb5537b221b31e5a358b1eadb57279ad", size = 3475366, upload-time = "2026-01-07T11:08:38.29Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/01/5077972a90cee7ef28c03a02b35c15d111416b25ccf8f8fda8df9972b6ce/granian-2.6.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:b7c1a0f24bd079bab8f6550fbb277e09c187dd5fe15e24ea3f2ace78453e5b7b", size = 3507600, upload-time = "2026-01-07T11:08:39.691Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/eb/8965002085f93cc1a9887414f43aed0d23025a7d4a4965c27d23d2d9c3c6/granian-2.6.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3c14a8708066e396a56673cc23acff8174fff613c433f1da0746c903341e4c22", size = 3077208, upload-time = "2026-01-07T11:08:43.752Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/8b/5e08ad10d2cfde71cc438bc3887f168f7845e195f67656d726c36bfbfa0f/granian-2.6.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:4e3041bc47b6add143e3944c5bb0d14cd94b5b9722812319d73c24d24816b039", size = 2813151, upload-time = "2026-01-07T11:08:45.094Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/1b/dcb6c44a059b0a6571da1d4abe329a30c7e40c49e7a108e963a7b8c61a4c/granian-2.6.1-pp311-pypy311_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:9635f707119a8bdc22ebfd70b50944a72a7e791905b544ac64938f4e87a8060f", size = 3357183, upload-time = "2026-01-07T11:08:46.952Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/18/b8e976b0bec47edb5d469a3c4e44d8cad3383ffb6b8202eba35249a23845/granian-2.6.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:566941333bed75345583c4ff0a72783812c68c5f87df3f9974e847bfcfb78d3e", size = 3233117, upload-time = "2026-01-07T11:08:48.354Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/e7/939eb934e4de6faa3b60448bf233610aec39e6186b0da212179cedce3baf/granian-2.6.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4a85fa1f42fd54feda9c70a7ef349259da6c5d81f9d5918633c677b7be8238ba", size = 3306125, upload-time = "2026-01-07T11:08:49.848Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/30/b99516161444c2528b9ab9e2633060c226f7f6ddf2d24464fb0d3b3f86ce/granian-2.6.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:7787cfc9b79a298289ff728d26937eac95482fcb468ef912d9178e707889c276", size = 3485546, upload-time = "2026-01-07T11:08:51.479Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/e8/30b0db6729767eac2249856b563a9f71a04b7eb8ce44321b7472834dcb19/granian-2.6.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d3fd613953ea080f911d66bbebd46c8c4b3e66fbb293f78b13c86fb3ec0202ae", size = 3497427, upload-time = "2026-01-07T11:08:53.065Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/c5/0c4469c7eb6d42e8ad3706e8f918fdbed1e79f4b2061749ed89568b84746/granian-2.7.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:442ced696212b1964579a89bb40bd504822ad87326f95488d8117fff0c21cba9", size = 4581109, upload-time = "2026-02-02T11:37:55.991Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/dd/7954ec98c82e0b3e954a9b5263fcd77258b502bf0e021f2c5e8a84da87b6/granian-2.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c07ebe3f8b7284264475ac4ee58839c0f3f1d3f292b40f8ea50363bf8277ef5", size = 4210308, upload-time = "2026-02-02T11:37:57.987Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/e8/62a48b7f0092f465cd24c71e7c990c55554095c6cad5c97975009e5ef223/granian-2.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c7817854489e944e8d0cdfe4695b5605a3b042ae169fc43505c6f6c5b05d8c3a", size = 5129886, upload-time = "2026-02-02T11:37:59.189Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/28/43d8fcfad80ddb38a9311538de93f5225a5e28db8b7fc31146199f6d8cdd/granian-2.7.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d9f4dc1e575d15ea8c0f433dc4190efa69d259bf59a136f009b28de223ef177", size = 4576645, upload-time = "2026-02-02T11:38:00.456Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/66/5409d0049b0878775e50b8f4e1bc71cb1780f895632e5c7d08c558b3fec2/granian-2.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:960908d8a2fb3cc5472fdd3de52a342d6fd76a12146e2447b38706bb01c31f76", size = 4975287, upload-time = "2026-02-02T11:38:02.136Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/eb/a6c1992d2902db13cca3637758b5e1064e70ee2342f6d8b66b80ba805f1f/granian-2.7.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b974e28265e2001539446f456973e6dc8aaeee8c61d31233d0a55d34c2f5482c", size = 4827993, upload-time = "2026-02-02T11:38:05.036Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/8a/0dbafb77124c3534048879fd7466f880fcff3606e63ebdac94633a3694c3/granian-2.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a83a107ce9bcb9609fa3e808f39e3d940cd3933f6c54d7b6c37053767a862b9a", size = 4939190, upload-time = "2026-02-02T11:38:06.426Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/c5/ec1910ca216429e091dd39d07d9053592a38a0e75d25bdcf79cf77fc8d79/granian-2.7.0-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:aa4876903c4c80b26c23231ff71437b1c7489504c4e1c18642ab6898926e2e38", size = 5292735, upload-time = "2026-02-02T11:38:09.455Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/0f/778a5cb91502f1d813d135b0a0c99a6b7d46cdb00af0b9d7e3b8c1b557c5/granian-2.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:be9638b47c9554104eebc99ef9313d016f37ee6e37e03d1d65f1fa495880856d", size = 5087382, upload-time = "2026-02-02T11:38:11.272Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/28/a3ee3f2220c0b9045f8caa2a2cb7484618961b7500f88594349a7889d391/granian-2.7.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e76afb483d7f42a0b911bdb447d282f70ad7a96caabd4c99cdc300117c5f8977", size = 4580966, upload-time = "2026-02-02T11:38:14.077Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/60/b53da9c255f6853a5516d0f8a3e7325c24123f0f7e77856558c49810f4ce/granian-2.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:628523302274f95ca967f295a9aa7bc4ade5e1eced42afc60d06dfe20f2da07a", size = 4210344, upload-time = "2026-02-02T11:38:15.34Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/bb/c3380106565bc99edfb90baafa1a8081a4334709ce0200d207ddda36275e/granian-2.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a62560b64a17e1cbae61038285d5fa8a32613ada9a46f05047dc607ea7d38f23", size = 5130258, upload-time = "2026-02-02T11:38:17.175Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/8f/2c3348d6d33807e3b818ac07366b5251e811ce2548fbe82e0b55982d8a13/granian-2.7.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47b8e0e9497d24466d6511443cc18f22f18405aab5a7e2fece1dd38206af88c4", size = 4576496, upload-time = "2026-02-02T11:38:18.577Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/71/d1d146170a23f3523d8629b47f849b30ba0d513eb519188ce5d7bfd1b916/granian-2.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc6039c61a07b2d36462c487b66b131ae3fd862bdc8fb81d6e5c206c1a2b683c", size = 4975062, upload-time = "2026-02-02T11:38:20.084Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/f9/f3acbf8c41cd10ff81109bd9078d3228f23e52bab8673763c65739a87e30/granian-2.7.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f3b0442beb11b035ee09959726f44b3730d0b55688110defd1d9a9a6c7486955", size = 4827755, upload-time = "2026-02-02T11:38:21.817Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/f8/503135b89539feea2be495b47858c22409ba77ffcb71920ae0727c674189/granian-2.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:741d0b58a5133cc5902b3129a8a4c55143f0f8769a80e7aa80caadc64c9f1d8b", size = 4939033, upload-time = "2026-02-02T11:38:23.033Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/90/aaabe2c1162d07a6af55532b6f616199aa237805ef1d732fa78d9883d217/granian-2.7.0-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:02a6fe6a19f290b70bc23feeb3809511becdaff2263b0469f02c28772af97652", size = 5292980, upload-time = "2026-02-02T11:38:24.823Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/aa/d1eb7342676893ab0ec1e66cceca4450bec3f29c488db2a92af5b4211d4d/granian-2.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8239b1a661271428c3e358e4bdcaaaf877a432cc593e93fc6b5a612ae521b06a", size = 5087230, upload-time = "2026-02-02T11:38:26.09Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/1a/b6d7840bfd9cd9bed627b138e6e8e49d1961997adba30ee39ad75d07ed58/granian-2.7.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d9c42562dcbf52848d0a9d0db58f8f2e790586eb0c363b8ad1b30fe0bd362117", size = 4572728, upload-time = "2026-02-02T11:38:30.143Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/93/f8f7224d9eaaaf4dbf493035a85287fa2e27c17e5f7aacc01821d8aa66b4/granian-2.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3421bd5c90430073e1f3f88fc63bc8d0a8ee547a9a5c06d577a281f384160bd", size = 4195034, upload-time = "2026-02-02T11:38:32.007Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/db/66843a35e1b6345da2a1c71839fb9aa7eb0f17d380fbf4cb5c7e06eb6f85/granian-2.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b8057dc81772932e208f2327b5e347459eb78896118e27af9845801e267cec5", size = 5123768, upload-time = "2026-02-02T11:38:33.449Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/ce/631c5c1f7a4e6b8c98ec857b3e6795fe64e474b6f48df388ac701a21f3fe/granian-2.7.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e5e70f438b1a4787d76566770e98bf7732407efa02802f38f10c960247107d7", size = 4562424, upload-time = "2026-02-02T11:38:34.815Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/41/19bdfa3719e22c4dcf6fa1a53323551a37aa58a4ca7a768db6a0ba714ab0/granian-2.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:213dd224a47c7bfcbb91718c7eeb56d6067825a28dcae50f537964e2dafb729a", size = 5006002, upload-time = "2026-02-02T11:38:36.76Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/5b/3b40f489e2449eb58df93ad38f42d1a6c2910502a4bc8017c047e16d637c/granian-2.7.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:bb5be27c0265268d43bab9a878ac27a20b4288843ffc9fda1009b8226673f629", size = 4825073, upload-time = "2026-02-02T11:38:37.998Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/92/b6de6f8c4146409efb58aee75277b810d54de03a1687d33f1f3f1feb3395/granian-2.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a6ff95aede82903c06eb560a32b10e9235fdafc4568c8fe7dcac28d62be5ffa2", size = 4928628, upload-time = "2026-02-02T11:38:39.481Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/21/d8a191dcfbf8422b868ab847829670075ba3e4325611e0a9fd2dc909a142/granian-2.7.0-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e44f0c1676b27582df26d47cf466fedebd72f520edc2025f125c83ff58af77f9", size = 5282898, upload-time = "2026-02-02T11:38:40.815Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/46/2746f1a4f0f093576fb64b63c3f022f254c6d2c4cc66d37dd881608397ce/granian-2.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9241b72f95ceb57e2bbce55e0f61c250c1c02e9d2f8531b027dd3dc204209fdd", size = 5118453, upload-time = "2026-02-02T11:38:42.716Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/df/b68626242fb4913df0968ee5662f5a394857b3d6fc4ee17c94be69664491/granian-2.7.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:bc61451791c8963232e4921c6805e7c2e366635e1e658267b1854889116ff6d7", size = 4572200, upload-time = "2026-02-02T11:38:46.194Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/15/2fe28bca0751d9dc46e5c7e9e4b0c4fd1a55e3e8ba062f28292322ee160b/granian-2.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e274a0d6a01c475b9135212106ca5b69f5ec2f67f4ca6ce812d185d80255cdf5", size = 4195415, upload-time = "2026-02-02T11:38:47.78Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/2a/d4dc40e58a55835cac5296f5090cc3ce2d43332ad486bbf78b3a00e46199/granian-2.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34bd28075adae3453c596ee20089e0288379e3fdf1cec8bafff89bb175ea0eb4", size = 5122981, upload-time = "2026-02-02T11:38:49.55Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/fe/8c79837df620dc0eca6a8b799505910cbba2d85d92ccc58d1c549f7027be/granian-2.7.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f526583b72cf9e6ca9a4849c781ed546f44005f0ad4b5c7eb1090e1ebec209bf", size = 4561440, upload-time = "2026-02-02T11:38:50.799Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/e7/d7abfaa9829ff50cddc27919bd3ce5a335402ebbbaa650e96fe579136674/granian-2.7.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ac07d5314e03e667210349dfc76124d69726731007c24716e21a2554cc15ca", size = 5005076, upload-time = "2026-02-02T11:38:52.157Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/45/108afaa0636c93b6a8ff12810787e4a1ea27fffe59f12ca0de7c784b119a/granian-2.7.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:f6812e342c41ca80e1b34fb6c9a7e51a4bbd14f59025bd1bb59d45a39e02b8d5", size = 4825142, upload-time = "2026-02-02T11:38:53.506Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/eb/cedf4675b1047490f819ce8bd1ee1ea74b6c772ae9d9dd1c117ae690a3eb/granian-2.7.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a4099ba59885123405699a5313757556ff106f90336dccdf4ceda76f32657d0", size = 4927830, upload-time = "2026-02-02T11:38:54.92Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/b5/2d7a2e03ba29a6915ad41502e2870899b9eb54861e3d06ad8470c5e70b41/granian-2.7.0-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:c487731fbae86808410e88c587eb4071213812c5f52570b7981bf07a1b84be25", size = 5282142, upload-time = "2026-02-02T11:38:56.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/e7/c851b2e2351727186b4bc4a35df832e2e97e4f77b8a93dfdb6daa098cf9e/granian-2.7.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ca4877ebf8873488ba72a299206621bd0c6febb8f091f3da62117c1fe344501f", size = 5117907, upload-time = "2026-02-02T11:38:57.852Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/2f/c9bcd4aa36d3092fe88a623e60aa89bd4ff16836803a633b8b454946a845/granian-2.7.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:e1df8e4669b4fb69b373b2ab40a10a8c511eeb41838d65adb375d1c0e4e7454c", size = 4493110, upload-time = "2026-02-02T11:39:01.294Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/b4/02d11870255920d35f8eab390e509d3688fe0018011bb606aa00057b778f/granian-2.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:6331ed9d3eb06cfba737dfb8efa3f0a8b4d4312a5af91c0a67bfbaa078b62eb4", size = 4122388, upload-time = "2026-02-02T11:39:02.509Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/50/dfad5a414a2e3e14c30cd0d54cef1dab4874a67c1e6f8b1124d9998ed8b2/granian-2.7.0-cp313-cp313t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:093e1c277eddba00eaa94ca82ff7a9ab57b0554cd7013e5b2f3468635dbe520d", size = 4379344, upload-time = "2026-02-02T11:39:04.489Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/53/ef086af03ef31aa3c1dbff2da5928a9b5dd1f48d8ebee18dd6628951ae9e/granian-2.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e8e317bdc9ca9905d0b20f665f8fe31080c7f13d90675439113932bb3272c24", size = 5069172, upload-time = "2026-02-02T11:39:05.757Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/57/117864ea46c6cbcbeff733a4da736e814b06d6634beeb201b9db176bd6be/granian-2.7.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:391e8589265178fd7f444b6711b6dda157a6b66059a15bf1033ffceeaf26918c", size = 4848246, upload-time = "2026-02-02T11:39:07.048Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/da/2d45b7b6638a77362228d6770a61fa2bc3feae6c52a80993c230f344b197/granian-2.7.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:49b6873f4a8ee7a1ea627ff98d67ecdd644cfc18aab475b2e15f651dbcbe4140", size = 4669023, upload-time = "2026-02-02T11:39:09.612Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/69/49e54eb6ed67ccf471c19d4c65f64197dd5a416d501620519e28ea92c82e/granian-2.7.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:39778147c7527de0bcda12cd9c38863d4e6a80d3a8a96ddeb6fe2d1342f337db", size = 4896002, upload-time = "2026-02-02T11:39:10.996Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/f1/a864a78029265d06a6fd61c760c8facf032be0d345deca5081718cbb006f/granian-2.7.0-cp313-cp313t-musllinux_1_1_armv7l.whl", hash = "sha256:8135d0a4574dc5a0acf3a815fc6cad5bbe9075ef86df2c091ec34fbd21639c1c", size = 5239945, upload-time = "2026-02-02T11:39:12.726Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/33/feef40e4570b771d815c1ddd1008ccc9c0e81ce5a015deded6788e919f18/granian-2.7.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:47df2d9e50f22fa820b34fd38ceeeedc0b97994fa164425fa30e746759db8a44", size = 5078968, upload-time = "2026-02-02T11:39:14.048Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/6a/b8d58474bbcbca450f030fd41b65c94ae0afb5e8f58c39fbea2df4efee2b/granian-2.7.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:23c6531b75c94c7b533812aed4f40dc93008c406cfa5629ec93397cd0f6770cb", size = 4569780, upload-time = "2026-02-02T11:39:16.671Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/dc/a8b11425ebdf6cb58e1084fdb7759d853ca7f0b00376e4bb66300322f5d3/granian-2.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e4939b86f2b7918202ce56cb01c2efe20a393c742d41640b444e82c8b444b614", size = 4195285, upload-time = "2026-02-02T11:39:18.596Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/b5/6cc0b94f997d93f4b1510b2d953f07a7f1d16a143d60b53e0e50b887fa12/granian-2.7.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38fa10adf3c4d50e31a08401e6701ee2488613d905bb316cad456e5ebad5aa81", size = 5121311, upload-time = "2026-02-02T11:39:20.092Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/f9/df3d862874cf4b233f97253bb78991ae4f31179a5581beaa41a2100e3bce/granian-2.7.0-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b366a9fd713a20321e668768b122b7b0140bfaeb3cb0557b6cb11dce827a4fb", size = 4557737, upload-time = "2026-02-02T11:39:21.992Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/7f/e3063368345f39188afe5baa1ab62fdd951097656cd83bec3964f91f6e66/granian-2.7.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a916413e0dcd5c6eaf7f7413a6d899f7ba53a988d08e3b3c7ab2e0b5fa687559", size = 5004108, upload-time = "2026-02-02T11:39:23.306Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/eb/892bcc0cfc44ed791795bab251e0b6ed767397182bac134d9f0fcecc552e/granian-2.7.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:e315adf24162294d35ca4bed66c8f66ac15a0696f2cb462e729122d148f6d958", size = 4823143, upload-time = "2026-02-02T11:39:24.696Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/e0/ff8528bf620b6da7833171f6d30bfe4b4b1d6e7d155b634bd17590e0c4b4/granian-2.7.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:486f8785e716f76f96534aaba25acd5dee1a8398725ffd2a55f0833689c75933", size = 4926328, upload-time = "2026-02-02T11:39:26.111Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/f7/fb0a761d39245295660703a42e9448f3c04ce1f26b2f62e044d179167880/granian-2.7.0-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:0e5e2c1c6ff1501e3675e5237096b90b767f506bb0ef88594310b7b9eaa95532", size = 5281190, upload-time = "2026-02-02T11:39:27.68Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/d8/860e7e96ea109c6db431c8284040d265758bded35f9ce2de05f3969d7c0c/granian-2.7.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:d4418b417f9c2162b4fa9ec41ec34ed3e8ed891463bb058873034222be53542f", size = 5117989, upload-time = "2026-02-02T11:39:29.008Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/9a/500ab01ae273870e8fc056956cc49716707b4a0e76fb2b5993258e1494f7/granian-2.7.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:b4367c088c00bdc38a8a495282070010914931edb4c488499f290c91018d9e80", size = 4492656, upload-time = "2026-02-02T11:39:31.614Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/26/86dc5a6fff60ee0cc38c2fcd1a0d4cebd52e6764a9f752a20458001ca57e/granian-2.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c8f3df224284ed1ff673f61de652337d7721100bf4cfc336b2047005b0edb2e0", size = 4122201, upload-time = "2026-02-02T11:39:33.162Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/60/887dc5a099135ff449adcdea9a2aa38f39673baf99de9acb78077b701432/granian-2.7.0-cp314-cp314t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6682c08b0d82ad75f8e9d1571254630133e1563c49f0600c2e2dc26cec743ae7", size = 4377489, upload-time = "2026-02-02T11:39:34.532Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/6b/68c12f8c4c1f1c109bf55d66beeb37a817fd908af5d5d9b48afcbdc3e623/granian-2.7.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d6ccc3bdc2248775b6bd292d7d37a1bff79eb1aaf931f3a217ea9fb9a6fe7ca4", size = 5067294, upload-time = "2026-02-02T11:39:35.84Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/4f/be4f9c129f5f80f52654f257abe91f647defec020fa134b3600013b7219d/granian-2.7.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5431272a4d6f49a200aeb7b01010a3785b93b9bd8cd813d98ed29c8e9ba1c476", size = 4848356, upload-time = "2026-02-02T11:39:37.443Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/aa/f6efcfb435f370a6f3626bd5837465bfb71950f6b3cb3c74e54b176c72e2/granian-2.7.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:790b150255576775672f26dbcbd6eb05f70260dd661b91ce462f6f3846db9501", size = 4669022, upload-time = "2026-02-02T11:39:38.782Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/36/e86050c476046ef1f0aae0eb86d098fa787abfc8887a131c82baccc7565e/granian-2.7.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:ce9be999273c181e4b65efbbd82a5bc6f223f1db3463660514d1dc229c8ba760", size = 4895567, upload-time = "2026-02-02T11:39:40.144Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/5e/25283ff7fc12fcf42ae8a5687243119739cf4b0bf5ccb1c32d11d37987b1/granian-2.7.0-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:319b34f18ed3162354513acb5a9e8cee720ac166cd88fe05f0f057703eb47e4f", size = 5238652, upload-time = "2026-02-02T11:39:41.648Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/60/06148781120e086c7437aa9513198025ea1eb847cb2e244d5e2b9801782e/granian-2.7.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:b01bed8ad748840e7ab49373f642076f3bc459e39937a4ce11c5be03e67cdfd9", size = 5079018, upload-time = "2026-02-02T11:39:43.309Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/0b/39ebf1b791bbd4049239ecfee8f072321211879e5617a023921961be1d55/granian-2.7.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:24a1f6a894bea95ef0e603bebacbccd19c319c0da493bb4fde8b94b8629f3dc8", size = 4581648, upload-time = "2026-02-02T11:39:45.991Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/cd/4642192520478bba4cd547124d92607c958a0786864ebe378f3008b40048/granian-2.7.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:c2799497ac896cffea85512983c5d9eb4ae51ebacd7a9a5fd3d2ac81f1755fac", size = 4214257, upload-time = "2026-02-02T11:39:47.507Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/3f/615f93753c3b682219fe546196fc9eb3a045d846e57883312c97de4d785a/granian-2.7.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b66a15d004136e641706e0e5522b3509151e2027a0677cf4fa97d049d9ddfa41", size = 4979656, upload-time = "2026-02-02T11:39:48.838Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/68/1f2c36a964f93bfe8d6189431b8425acc591b735e47d8898b2e70c478398/granian-2.7.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:de5a6fa93d2138ba2372d20d97b87c1af75fa16a59a93841745326825c3ddf83", size = 4844448, upload-time = "2026-02-02T11:39:50.5Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/23/d8c83fe6a6656026c734c2ea771cbcdec6f0010e749f8ab0db1bfc8a3dfe/granian-2.7.0-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:aacda2ad46724490c4cd811b8dcadff2260603a3e95ca0d8c33552d791a3c6ac", size = 4930755, upload-time = "2026-02-02T11:39:51.866Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/e5/2a86ee18544185e72fc50b50985b6bfb4504f7835875d2636f573e100071/granian-2.7.0-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:7efb5ebdb308ed1685a80cded6ea51447753e8afe92c21fc3abf9a06a9eb5d2e", size = 5295728, upload-time = "2026-02-02T11:39:53.364Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/bd/0d47d17769601c56d876b289456f27799611571227b99ad300e221600bbd/granian-2.7.0-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ae96b75420d01d9a7dbe1bd84f1898b2b0ade6883db59bfe2b233d7c28c6b0df", size = 5095149, upload-time = "2026-02-02T11:39:54.767Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
@@ -2584,6 +2584,11 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/19/04f9b178c2d8a15b076c8b5140708fa6ffc5601fb6f1e975537072df5b2a/mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307", size = 6354, upload-time = "2021-02-05T18:55:29.583Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "microsoft-python-type-stubs"
|
||||
version = "0"
|
||||
source = { git = "https://github.com/microsoft/python-type-stubs.git#692c37c3969d22612b295ddf7e7af5907204a386" }
|
||||
|
||||
[[package]]
|
||||
name = "mkdocs"
|
||||
version = "1.6.1"
|
||||
@@ -2875,6 +2880,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mypy-baseline"
|
||||
version = "0.7.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/65/2a/03288dab6d5f24d187ba0c223f6b3035d9a29de3dd31a3e105a0d4f1b5da/mypy_baseline-0.7.3.tar.gz", hash = "sha256:325f0695310eb8f5c0f10fa7af36ee1b3785a9d26b886a61c07b4a8eddb28d29", size = 319108, upload-time = "2025-05-30T08:43:00.629Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/93/7780302b206a8e8e767ce763ef06159725d1323acbe55e46a1cd1ffd109d/mypy_baseline-0.7.3-py3-none-any.whl", hash = "sha256:bd7fa899e687d75af2e3f392a9d6d1790e65dae3d31fe12525cc14f26d866b74", size = 17868, upload-time = "2025-05-30T08:42:58.262Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mypy-extensions"
|
||||
version = "1.1.0"
|
||||
@@ -3293,7 +3307,9 @@ typing = [
|
||||
{ name = "django-stubs", extra = ["compatible-mypy"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "djangorestframework-stubs", extra = ["compatible-mypy"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "lxml-stubs", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "microsoft-python-type-stubs", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "mypy", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "mypy-baseline", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "types-bleach", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "types-colorama", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "types-dateparser", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
@@ -3338,7 +3354,7 @@ requires-dist = [
|
||||
{ name = "filelock", specifier = "~=3.20.0" },
|
||||
{ name = "flower", specifier = "~=2.0.1" },
|
||||
{ name = "gotenberg-client", specifier = "~=0.13.1" },
|
||||
{ name = "granian", extras = ["uvloop"], marker = "extra == 'webserver'", specifier = "~=2.6.0" },
|
||||
{ name = "granian", extras = ["uvloop"], marker = "extra == 'webserver'", specifier = "~=2.7.0" },
|
||||
{ name = "httpx-oauth", specifier = "~=0.16" },
|
||||
{ name = "imap-tools", specifier = "~=1.11.0" },
|
||||
{ name = "jinja2", specifier = "~=3.1.5" },
|
||||
@@ -3433,7 +3449,9 @@ typing = [
|
||||
{ name = "django-stubs", extras = ["compatible-mypy"] },
|
||||
{ name = "djangorestframework-stubs", extras = ["compatible-mypy"] },
|
||||
{ name = "lxml-stubs" },
|
||||
{ name = "microsoft-python-type-stubs", git = "https://github.com/microsoft/python-type-stubs.git" },
|
||||
{ name = "mypy" },
|
||||
{ name = "mypy-baseline" },
|
||||
{ name = "types-bleach" },
|
||||
{ name = "types-colorama" },
|
||||
{ name = "types-dateparser" },
|
||||
|
||||
Reference in New Issue
Block a user