mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-01-10 21:34:20 -06:00
Compare commits
4 Commits
feature-tr
...
feature-si
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a3c5981865 | ||
|
|
0a61d1b52b | ||
|
|
eb7f8dfe76 | ||
|
|
91c4d0edfd |
@@ -49,7 +49,6 @@ dependencies = [
|
||||
"gotenberg-client~=0.12.0",
|
||||
"httpx-oauth~=0.16",
|
||||
"imap-tools~=1.11.0",
|
||||
"inotifyrecursive~=0.3",
|
||||
"jinja2~=3.1.5",
|
||||
"langdetect~=1.0.9",
|
||||
"nltk~=3.9.1",
|
||||
@@ -69,7 +68,7 @@ dependencies = [
|
||||
"setproctitle~=1.3.4",
|
||||
"tika-client~=0.10.0",
|
||||
"tqdm~=4.67.1",
|
||||
"watchdog~=6.0",
|
||||
"watchfiles>=1.1.1",
|
||||
"whitenoise~=6.9",
|
||||
"whoosh-reloaded>=2.7.5",
|
||||
"zxing-cpp~=2.3.0",
|
||||
|
||||
@@ -414,9 +414,6 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
return newFilter
|
||||
}
|
||||
|
||||
const correspondentAny = addFilterOfType(TriggerFilterType.CorrespondentAny)
|
||||
correspondentAny.get('values').setValue([11])
|
||||
|
||||
const correspondentIs = addFilterOfType(TriggerFilterType.CorrespondentIs)
|
||||
correspondentIs.get('values').setValue(1)
|
||||
|
||||
@@ -426,18 +423,12 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
const documentTypeIs = addFilterOfType(TriggerFilterType.DocumentTypeIs)
|
||||
documentTypeIs.get('values').setValue(1)
|
||||
|
||||
const documentTypeAny = addFilterOfType(TriggerFilterType.DocumentTypeAny)
|
||||
documentTypeAny.get('values').setValue([12])
|
||||
|
||||
const documentTypeNot = addFilterOfType(TriggerFilterType.DocumentTypeNot)
|
||||
documentTypeNot.get('values').setValue([1])
|
||||
|
||||
const storagePathIs = addFilterOfType(TriggerFilterType.StoragePathIs)
|
||||
storagePathIs.get('values').setValue(1)
|
||||
|
||||
const storagePathAny = addFilterOfType(TriggerFilterType.StoragePathAny)
|
||||
storagePathAny.get('values').setValue([13])
|
||||
|
||||
const storagePathNot = addFilterOfType(TriggerFilterType.StoragePathNot)
|
||||
storagePathNot.get('values').setValue([1])
|
||||
|
||||
@@ -452,13 +443,10 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
expect(formValues.triggers[0].filter_has_tags).toEqual([1])
|
||||
expect(formValues.triggers[0].filter_has_all_tags).toEqual([2, 3])
|
||||
expect(formValues.triggers[0].filter_has_not_tags).toEqual([4])
|
||||
expect(formValues.triggers[0].filter_has_any_correspondents).toEqual([11])
|
||||
expect(formValues.triggers[0].filter_has_correspondent).toEqual(1)
|
||||
expect(formValues.triggers[0].filter_has_not_correspondents).toEqual([1])
|
||||
expect(formValues.triggers[0].filter_has_any_document_types).toEqual([12])
|
||||
expect(formValues.triggers[0].filter_has_document_type).toEqual(1)
|
||||
expect(formValues.triggers[0].filter_has_not_document_types).toEqual([1])
|
||||
expect(formValues.triggers[0].filter_has_any_storage_paths).toEqual([13])
|
||||
expect(formValues.triggers[0].filter_has_storage_path).toEqual(1)
|
||||
expect(formValues.triggers[0].filter_has_not_storage_paths).toEqual([1])
|
||||
expect(formValues.triggers[0].filter_custom_field_query).toEqual(
|
||||
@@ -521,22 +509,16 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
|
||||
setFilter(TriggerFilterType.TagsAll, 11)
|
||||
setFilter(TriggerFilterType.TagsNone, 12)
|
||||
setFilter(TriggerFilterType.CorrespondentAny, 16)
|
||||
setFilter(TriggerFilterType.CorrespondentNot, 13)
|
||||
setFilter(TriggerFilterType.DocumentTypeAny, 17)
|
||||
setFilter(TriggerFilterType.DocumentTypeNot, 14)
|
||||
setFilter(TriggerFilterType.StoragePathAny, 18)
|
||||
setFilter(TriggerFilterType.StoragePathNot, 15)
|
||||
|
||||
const formValues = component['getFormValues']()
|
||||
|
||||
expect(formValues.triggers[0].filter_has_all_tags).toEqual([11])
|
||||
expect(formValues.triggers[0].filter_has_not_tags).toEqual([12])
|
||||
expect(formValues.triggers[0].filter_has_any_correspondents).toEqual([16])
|
||||
expect(formValues.triggers[0].filter_has_not_correspondents).toEqual([13])
|
||||
expect(formValues.triggers[0].filter_has_any_document_types).toEqual([17])
|
||||
expect(formValues.triggers[0].filter_has_not_document_types).toEqual([14])
|
||||
expect(formValues.triggers[0].filter_has_any_storage_paths).toEqual([18])
|
||||
expect(formValues.triggers[0].filter_has_not_storage_paths).toEqual([15])
|
||||
})
|
||||
|
||||
@@ -660,11 +642,8 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
filter_has_tags: [],
|
||||
filter_has_all_tags: [],
|
||||
filter_has_not_tags: [],
|
||||
filter_has_any_correspondents: [],
|
||||
filter_has_not_correspondents: [],
|
||||
filter_has_any_document_types: [],
|
||||
filter_has_not_document_types: [],
|
||||
filter_has_any_storage_paths: [],
|
||||
filter_has_not_storage_paths: [],
|
||||
filter_has_correspondent: null,
|
||||
filter_has_document_type: null,
|
||||
@@ -722,14 +701,11 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
trigger.filter_has_tags = [1]
|
||||
trigger.filter_has_all_tags = [2, 3]
|
||||
trigger.filter_has_not_tags = [4]
|
||||
trigger.filter_has_any_correspondents = [10] as any
|
||||
trigger.filter_has_correspondent = 5 as any
|
||||
trigger.filter_has_not_correspondents = [6] as any
|
||||
trigger.filter_has_document_type = 7 as any
|
||||
trigger.filter_has_any_document_types = [11] as any
|
||||
trigger.filter_has_not_document_types = [8] as any
|
||||
trigger.filter_has_storage_path = 9 as any
|
||||
trigger.filter_has_any_storage_paths = [12] as any
|
||||
trigger.filter_has_not_storage_paths = [10] as any
|
||||
trigger.filter_custom_field_query = JSON.stringify([
|
||||
'AND',
|
||||
@@ -740,8 +716,8 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
component.ngOnInit()
|
||||
const triggerGroup = component.triggerFields.at(0) as FormGroup
|
||||
const filters = component.getFiltersFormArray(triggerGroup)
|
||||
expect(filters.length).toBe(13)
|
||||
const customFieldFilter = filters.at(12) as FormGroup
|
||||
expect(filters.length).toBe(10)
|
||||
const customFieldFilter = filters.at(9) as FormGroup
|
||||
expect(customFieldFilter.get('type').value).toBe(
|
||||
TriggerFilterType.CustomFieldQuery
|
||||
)
|
||||
@@ -750,27 +726,12 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
})
|
||||
|
||||
it('should expose select metadata helpers', () => {
|
||||
expect(component.isSelectMultiple(TriggerFilterType.CorrespondentAny)).toBe(
|
||||
true
|
||||
)
|
||||
expect(component.isSelectMultiple(TriggerFilterType.CorrespondentNot)).toBe(
|
||||
true
|
||||
)
|
||||
expect(component.isSelectMultiple(TriggerFilterType.CorrespondentIs)).toBe(
|
||||
false
|
||||
)
|
||||
expect(component.isSelectMultiple(TriggerFilterType.DocumentTypeAny)).toBe(
|
||||
true
|
||||
)
|
||||
expect(component.isSelectMultiple(TriggerFilterType.DocumentTypeIs)).toBe(
|
||||
false
|
||||
)
|
||||
expect(component.isSelectMultiple(TriggerFilterType.StoragePathAny)).toBe(
|
||||
true
|
||||
)
|
||||
expect(component.isSelectMultiple(TriggerFilterType.StoragePathIs)).toBe(
|
||||
false
|
||||
)
|
||||
|
||||
component.correspondents = [{ id: 1, name: 'C1' } as any]
|
||||
component.documentTypes = [{ id: 2, name: 'DT' } as any]
|
||||
@@ -782,15 +743,9 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
expect(
|
||||
component.getFilterSelectItems(TriggerFilterType.DocumentTypeIs)
|
||||
).toEqual(component.documentTypes)
|
||||
expect(
|
||||
component.getFilterSelectItems(TriggerFilterType.DocumentTypeAny)
|
||||
).toEqual(component.documentTypes)
|
||||
expect(
|
||||
component.getFilterSelectItems(TriggerFilterType.StoragePathIs)
|
||||
).toEqual(component.storagePaths)
|
||||
expect(
|
||||
component.getFilterSelectItems(TriggerFilterType.StoragePathAny)
|
||||
).toEqual(component.storagePaths)
|
||||
expect(component.getFilterSelectItems(TriggerFilterType.TagsAll)).toEqual(
|
||||
[]
|
||||
)
|
||||
|
||||
@@ -145,13 +145,10 @@ export enum TriggerFilterType {
|
||||
TagsAny = 'tags_any',
|
||||
TagsAll = 'tags_all',
|
||||
TagsNone = 'tags_none',
|
||||
CorrespondentAny = 'correspondent_any',
|
||||
CorrespondentIs = 'correspondent_is',
|
||||
CorrespondentNot = 'correspondent_not',
|
||||
DocumentTypeAny = 'document_type_any',
|
||||
DocumentTypeIs = 'document_type_is',
|
||||
DocumentTypeNot = 'document_type_not',
|
||||
StoragePathAny = 'storage_path_any',
|
||||
StoragePathIs = 'storage_path_is',
|
||||
StoragePathNot = 'storage_path_not',
|
||||
CustomFieldQuery = 'custom_field_query',
|
||||
@@ -175,11 +172,8 @@ type TriggerFilterAggregate = {
|
||||
filter_has_tags: number[]
|
||||
filter_has_all_tags: number[]
|
||||
filter_has_not_tags: number[]
|
||||
filter_has_any_correspondents: number[]
|
||||
filter_has_not_correspondents: number[]
|
||||
filter_has_any_document_types: number[]
|
||||
filter_has_not_document_types: number[]
|
||||
filter_has_any_storage_paths: number[]
|
||||
filter_has_not_storage_paths: number[]
|
||||
filter_has_correspondent: number | null
|
||||
filter_has_document_type: number | null
|
||||
@@ -225,14 +219,6 @@ const TRIGGER_FILTER_DEFINITIONS: TriggerFilterDefinition[] = [
|
||||
allowMultipleEntries: false,
|
||||
allowMultipleValues: true,
|
||||
},
|
||||
{
|
||||
id: TriggerFilterType.CorrespondentAny,
|
||||
name: $localize`Has any of these correspondents`,
|
||||
inputType: 'select',
|
||||
allowMultipleEntries: false,
|
||||
allowMultipleValues: true,
|
||||
selectItems: 'correspondents',
|
||||
},
|
||||
{
|
||||
id: TriggerFilterType.CorrespondentIs,
|
||||
name: $localize`Has correspondent`,
|
||||
@@ -257,14 +243,6 @@ const TRIGGER_FILTER_DEFINITIONS: TriggerFilterDefinition[] = [
|
||||
allowMultipleValues: false,
|
||||
selectItems: 'documentTypes',
|
||||
},
|
||||
{
|
||||
id: TriggerFilterType.DocumentTypeAny,
|
||||
name: $localize`Has any of these document types`,
|
||||
inputType: 'select',
|
||||
allowMultipleEntries: false,
|
||||
allowMultipleValues: true,
|
||||
selectItems: 'documentTypes',
|
||||
},
|
||||
{
|
||||
id: TriggerFilterType.DocumentTypeNot,
|
||||
name: $localize`Does not have document types`,
|
||||
@@ -281,14 +259,6 @@ const TRIGGER_FILTER_DEFINITIONS: TriggerFilterDefinition[] = [
|
||||
allowMultipleValues: false,
|
||||
selectItems: 'storagePaths',
|
||||
},
|
||||
{
|
||||
id: TriggerFilterType.StoragePathAny,
|
||||
name: $localize`Has any of these storage paths`,
|
||||
inputType: 'select',
|
||||
allowMultipleEntries: false,
|
||||
allowMultipleValues: true,
|
||||
selectItems: 'storagePaths',
|
||||
},
|
||||
{
|
||||
id: TriggerFilterType.StoragePathNot,
|
||||
name: $localize`Does not have storage paths`,
|
||||
@@ -336,15 +306,6 @@ const FILTER_HANDLERS: Record<TriggerFilterType, FilterHandler> = {
|
||||
extract: (trigger) => trigger.filter_has_not_tags,
|
||||
hasValue: (value) => Array.isArray(value) && value.length > 0,
|
||||
},
|
||||
[TriggerFilterType.CorrespondentAny]: {
|
||||
apply: (aggregate, values) => {
|
||||
aggregate.filter_has_any_correspondents = Array.isArray(values)
|
||||
? [...values]
|
||||
: [values]
|
||||
},
|
||||
extract: (trigger) => trigger.filter_has_any_correspondents,
|
||||
hasValue: (value) => Array.isArray(value) && value.length > 0,
|
||||
},
|
||||
[TriggerFilterType.CorrespondentIs]: {
|
||||
apply: (aggregate, values) => {
|
||||
aggregate.filter_has_correspondent = Array.isArray(values)
|
||||
@@ -372,15 +333,6 @@ const FILTER_HANDLERS: Record<TriggerFilterType, FilterHandler> = {
|
||||
extract: (trigger) => trigger.filter_has_document_type,
|
||||
hasValue: (value) => value !== null && value !== undefined,
|
||||
},
|
||||
[TriggerFilterType.DocumentTypeAny]: {
|
||||
apply: (aggregate, values) => {
|
||||
aggregate.filter_has_any_document_types = Array.isArray(values)
|
||||
? [...values]
|
||||
: [values]
|
||||
},
|
||||
extract: (trigger) => trigger.filter_has_any_document_types,
|
||||
hasValue: (value) => Array.isArray(value) && value.length > 0,
|
||||
},
|
||||
[TriggerFilterType.DocumentTypeNot]: {
|
||||
apply: (aggregate, values) => {
|
||||
aggregate.filter_has_not_document_types = Array.isArray(values)
|
||||
@@ -399,15 +351,6 @@ const FILTER_HANDLERS: Record<TriggerFilterType, FilterHandler> = {
|
||||
extract: (trigger) => trigger.filter_has_storage_path,
|
||||
hasValue: (value) => value !== null && value !== undefined,
|
||||
},
|
||||
[TriggerFilterType.StoragePathAny]: {
|
||||
apply: (aggregate, values) => {
|
||||
aggregate.filter_has_any_storage_paths = Array.isArray(values)
|
||||
? [...values]
|
||||
: [values]
|
||||
},
|
||||
extract: (trigger) => trigger.filter_has_any_storage_paths,
|
||||
hasValue: (value) => Array.isArray(value) && value.length > 0,
|
||||
},
|
||||
[TriggerFilterType.StoragePathNot]: {
|
||||
apply: (aggregate, values) => {
|
||||
aggregate.filter_has_not_storage_paths = Array.isArray(values)
|
||||
@@ -699,11 +642,8 @@ export class WorkflowEditDialogComponent
|
||||
filter_has_tags: [],
|
||||
filter_has_all_tags: [],
|
||||
filter_has_not_tags: [],
|
||||
filter_has_any_correspondents: [],
|
||||
filter_has_not_correspondents: [],
|
||||
filter_has_any_document_types: [],
|
||||
filter_has_not_document_types: [],
|
||||
filter_has_any_storage_paths: [],
|
||||
filter_has_not_storage_paths: [],
|
||||
filter_has_correspondent: null,
|
||||
filter_has_document_type: null,
|
||||
@@ -730,16 +670,10 @@ export class WorkflowEditDialogComponent
|
||||
trigger.filter_has_tags = aggregate.filter_has_tags
|
||||
trigger.filter_has_all_tags = aggregate.filter_has_all_tags
|
||||
trigger.filter_has_not_tags = aggregate.filter_has_not_tags
|
||||
trigger.filter_has_any_correspondents =
|
||||
aggregate.filter_has_any_correspondents
|
||||
trigger.filter_has_not_correspondents =
|
||||
aggregate.filter_has_not_correspondents
|
||||
trigger.filter_has_any_document_types =
|
||||
aggregate.filter_has_any_document_types
|
||||
trigger.filter_has_not_document_types =
|
||||
aggregate.filter_has_not_document_types
|
||||
trigger.filter_has_any_storage_paths =
|
||||
aggregate.filter_has_any_storage_paths
|
||||
trigger.filter_has_not_storage_paths =
|
||||
aggregate.filter_has_not_storage_paths
|
||||
trigger.filter_has_correspondent =
|
||||
@@ -922,11 +856,8 @@ export class WorkflowEditDialogComponent
|
||||
case TriggerFilterType.TagsAny:
|
||||
case TriggerFilterType.TagsAll:
|
||||
case TriggerFilterType.TagsNone:
|
||||
case TriggerFilterType.CorrespondentAny:
|
||||
case TriggerFilterType.CorrespondentNot:
|
||||
case TriggerFilterType.DocumentTypeAny:
|
||||
case TriggerFilterType.DocumentTypeNot:
|
||||
case TriggerFilterType.StoragePathAny:
|
||||
case TriggerFilterType.StoragePathNot:
|
||||
return true
|
||||
default:
|
||||
@@ -1248,11 +1179,8 @@ export class WorkflowEditDialogComponent
|
||||
filter_has_tags: [],
|
||||
filter_has_all_tags: [],
|
||||
filter_has_not_tags: [],
|
||||
filter_has_any_correspondents: [],
|
||||
filter_has_not_correspondents: [],
|
||||
filter_has_any_document_types: [],
|
||||
filter_has_not_document_types: [],
|
||||
filter_has_any_storage_paths: [],
|
||||
filter_has_not_storage_paths: [],
|
||||
filter_custom_field_query: null,
|
||||
filter_has_correspondent: null,
|
||||
|
||||
@@ -44,16 +44,10 @@ export interface WorkflowTrigger extends ObjectWithId {
|
||||
|
||||
filter_has_not_tags?: number[] // Tag.id[]
|
||||
|
||||
filter_has_any_correspondents?: number[] // Correspondent.id[]
|
||||
|
||||
filter_has_not_correspondents?: number[] // Correspondent.id[]
|
||||
|
||||
filter_has_any_document_types?: number[] // DocumentType.id[]
|
||||
|
||||
filter_has_not_document_types?: number[] // DocumentType.id[]
|
||||
|
||||
filter_has_any_storage_paths?: number[] // StoragePath.id[]
|
||||
|
||||
filter_has_not_storage_paths?: number[] // StoragePath.id[]
|
||||
|
||||
filter_custom_field_query?: string
|
||||
|
||||
@@ -1,135 +1,362 @@
|
||||
"""
|
||||
Document consumer management command.
|
||||
|
||||
Watches a consumption directory for new documents and queues them for processing.
|
||||
Uses watchfiles for efficient file system monitoring with support for both
|
||||
native OS notifications and polling fallback.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from fnmatch import filter
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from pathlib import PurePath
|
||||
from threading import Event
|
||||
from time import monotonic
|
||||
from time import sleep
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Final
|
||||
|
||||
from django import db
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.base import CommandError
|
||||
from watchdog.events import FileSystemEventHandler
|
||||
from watchdog.observers.polling import PollingObserver
|
||||
from watchfiles import Change
|
||||
from watchfiles import DefaultFilter
|
||||
from watchfiles import watch
|
||||
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.data_models import DocumentSource
|
||||
from documents.models import Tag
|
||||
from documents.parsers import is_file_ext_supported
|
||||
from documents.parsers import get_supported_file_extensions
|
||||
from documents.tasks import consume_file
|
||||
|
||||
try:
|
||||
from inotifyrecursive import INotify
|
||||
from inotifyrecursive import flags
|
||||
except ImportError: # pragma: no cover
|
||||
INotify = flags = None
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterator
|
||||
|
||||
|
||||
logger = logging.getLogger("paperless.management.consumer")
|
||||
|
||||
|
||||
def _tags_from_path(filepath: Path) -> list[int]:
|
||||
@dataclass
|
||||
class TrackedFile:
|
||||
"""Represents a file being tracked for stability."""
|
||||
|
||||
path: Path
|
||||
last_event_time: float
|
||||
last_mtime: float | None = None
|
||||
last_size: int | None = None
|
||||
|
||||
def update_stats(self) -> bool:
|
||||
"""
|
||||
Update file stats. Returns True if file exists and stats were updated.
|
||||
"""
|
||||
try:
|
||||
stat = self.path.stat()
|
||||
self.last_mtime = stat.st_mtime
|
||||
self.last_size = stat.st_size
|
||||
return True
|
||||
except (FileNotFoundError, PermissionError, OSError):
|
||||
return False
|
||||
|
||||
def is_unchanged(self) -> bool:
|
||||
"""
|
||||
Check if file stats match the previously recorded values.
|
||||
Returns False if file doesn't exist or stats changed.
|
||||
"""
|
||||
try:
|
||||
stat = self.path.stat()
|
||||
return stat.st_mtime == self.last_mtime and stat.st_size == self.last_size
|
||||
except (FileNotFoundError, PermissionError, OSError):
|
||||
return False
|
||||
|
||||
|
||||
class FileStabilityTracker:
|
||||
"""
|
||||
Walk up the directory tree from filepath to CONSUMPTION_DIR
|
||||
Tracks file events and determines when files are stable for consumption.
|
||||
|
||||
A file is considered stable when:
|
||||
1. No new events have been received for it within the stability delay
|
||||
2. Its size and modification time haven't changed
|
||||
3. It still exists as a regular file
|
||||
|
||||
This handles various edge cases:
|
||||
- Network copies that write in chunks
|
||||
- Scanners that open/close files multiple times
|
||||
- Temporary files that get renamed
|
||||
- Files that are deleted before becoming stable
|
||||
"""
|
||||
|
||||
def __init__(self, stability_delay: float = 1.0) -> None:
|
||||
"""
|
||||
Initialize the tracker.
|
||||
|
||||
Args:
|
||||
stability_delay: Time in seconds a file must remain unchanged
|
||||
before being considered stable.
|
||||
"""
|
||||
self.stability_delay = stability_delay
|
||||
self._tracked: dict[Path, TrackedFile] = {}
|
||||
|
||||
def track(self, path: Path, change: Change) -> None:
|
||||
"""
|
||||
Register a file event.
|
||||
|
||||
Args:
|
||||
path: The file path that changed.
|
||||
change: The type of change (added, modified, deleted).
|
||||
"""
|
||||
path = path.resolve()
|
||||
|
||||
match change:
|
||||
case Change.deleted:
|
||||
self._tracked.pop(path, None)
|
||||
logger.debug(f"Stopped tracking deleted file: {path}")
|
||||
case Change.added | Change.modified:
|
||||
current_time = monotonic()
|
||||
if path in self._tracked:
|
||||
tracked = self._tracked[path]
|
||||
tracked.last_event_time = current_time
|
||||
tracked.update_stats()
|
||||
logger.debug(f"Updated tracking for: {path}")
|
||||
else:
|
||||
tracked = TrackedFile(path=path, last_event_time=current_time)
|
||||
if tracked.update_stats():
|
||||
self._tracked[path] = tracked
|
||||
logger.debug(f"Started tracking: {path}")
|
||||
else:
|
||||
logger.debug(f"Could not stat file, not tracking: {path}")
|
||||
|
||||
def get_stable_files(self) -> Iterator[Path]:
|
||||
"""
|
||||
Yield files that have been stable for the configured delay.
|
||||
|
||||
Files are removed from tracking once yielded or determined to be invalid.
|
||||
"""
|
||||
current_time = monotonic()
|
||||
to_remove: list[Path] = []
|
||||
to_yield: list[Path] = []
|
||||
|
||||
for path, tracked in list(self._tracked.items()):
|
||||
time_since_event = current_time - tracked.last_event_time
|
||||
|
||||
if time_since_event < self.stability_delay:
|
||||
continue
|
||||
|
||||
# File has waited long enough, verify it's unchanged
|
||||
if not tracked.is_unchanged():
|
||||
# Stats changed or file gone - update and wait again
|
||||
if tracked.update_stats():
|
||||
tracked.last_event_time = current_time
|
||||
logger.debug(f"File changed during stability check: {path}")
|
||||
else:
|
||||
# File no longer exists, remove from tracking
|
||||
to_remove.append(path)
|
||||
logger.debug(f"File disappeared during stability check: {path}")
|
||||
continue
|
||||
|
||||
# File is stable - verify it's a regular file
|
||||
try:
|
||||
if path.is_file():
|
||||
to_yield.append(path)
|
||||
logger.info(f"File is stable: {path}")
|
||||
else:
|
||||
# Not a regular file (directory, symlink, etc.)
|
||||
to_remove.append(path)
|
||||
logger.debug(f"Path is not a regular file: {path}")
|
||||
except (PermissionError, OSError) as e:
|
||||
logger.warning(f"Cannot access {path}: {e}")
|
||||
to_remove.append(path)
|
||||
|
||||
# Remove files that are no longer valid
|
||||
for path in to_remove:
|
||||
self._tracked.pop(path, None)
|
||||
|
||||
# Remove and yield stable files
|
||||
for path in to_yield:
|
||||
self._tracked.pop(path, None)
|
||||
yield path
|
||||
|
||||
def has_pending_files(self) -> bool:
|
||||
"""Check if there are files waiting for stability check."""
|
||||
return len(self._tracked) > 0
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Clear all tracked files."""
|
||||
self._tracked.clear()
|
||||
|
||||
@property
|
||||
def pending_count(self) -> int:
|
||||
"""Number of files being tracked."""
|
||||
return len(self._tracked)
|
||||
|
||||
|
||||
class ConsumerFilter(DefaultFilter):
|
||||
"""
|
||||
Custom filter for the document consumer.
|
||||
|
||||
Filters files based on:
|
||||
- Supported file extensions
|
||||
- User-configured ignore patterns (regex)
|
||||
- Default ignore patterns for common system files
|
||||
"""
|
||||
|
||||
# Default regex patterns to ignore (matched against filename only)
|
||||
DEFAULT_IGNORE_PATTERNS: Final[frozenset[str]] = frozenset(
|
||||
{
|
||||
r"^\.DS_Store$",
|
||||
r"^\.DS_STORE$",
|
||||
r"^\._.*",
|
||||
r"^desktop\.ini$",
|
||||
r"^Thumbs\.db$",
|
||||
},
|
||||
)
|
||||
|
||||
# Directories to always ignore (matched by name via DefaultFilter)
|
||||
DEFAULT_IGNORE_DIRS: Final[tuple[str, ...]] = (
|
||||
".stfolder",
|
||||
".stversions",
|
||||
".localized",
|
||||
"@eaDir",
|
||||
".Spotlight-V100",
|
||||
".Trashes",
|
||||
"__MACOSX",
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
supported_extensions: frozenset[str] | None = None,
|
||||
ignore_patterns: list[str] | None = None,
|
||||
consumption_dir: Path | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
Initialize the consumer filter.
|
||||
|
||||
Args:
|
||||
supported_extensions: Set of supported file extensions (e.g., {".pdf", ".png"}).
|
||||
If None, uses get_supported_file_extensions().
|
||||
ignore_patterns: Additional regex patterns to ignore (matched against filename).
|
||||
consumption_dir: Base consumption directory (unused, kept for API compatibility).
|
||||
"""
|
||||
# Combine default and user patterns
|
||||
all_patterns = set(self.DEFAULT_IGNORE_PATTERNS)
|
||||
if ignore_patterns:
|
||||
all_patterns.update(ignore_patterns)
|
||||
|
||||
# Compile all patterns
|
||||
self._ignore_regexes: list[re.Pattern[str]] = [
|
||||
re.compile(pattern) for pattern in all_patterns
|
||||
]
|
||||
|
||||
# Get supported extensions
|
||||
if supported_extensions is None:
|
||||
supported_extensions = frozenset(get_supported_file_extensions())
|
||||
self._supported_extensions = supported_extensions
|
||||
|
||||
# Call parent with directory ignore list
|
||||
# DefaultFilter.ignore_dirs matches directory names, not full paths
|
||||
super().__init__(
|
||||
ignore_dirs=self.DEFAULT_IGNORE_DIRS,
|
||||
ignore_entity_patterns=None,
|
||||
ignore_paths=None,
|
||||
)
|
||||
|
||||
def __call__(self, change: Change, path: str) -> bool:
|
||||
"""
|
||||
Filter function for watchfiles.
|
||||
|
||||
Returns True if the path should be watched, False to ignore.
|
||||
"""
|
||||
# Let parent filter handle directory ignoring and basic checks
|
||||
if not super().__call__(change, path):
|
||||
return False
|
||||
|
||||
path_obj = Path(path)
|
||||
|
||||
# For directories, parent filter already handled ignore_dirs
|
||||
if path_obj.is_dir():
|
||||
return True
|
||||
|
||||
# For files, check extension
|
||||
if not self._has_supported_extension(path_obj):
|
||||
return False
|
||||
|
||||
# Check filename against ignore patterns
|
||||
return not self._matches_ignore_pattern(path_obj.name)
|
||||
|
||||
def _has_supported_extension(self, path: Path) -> bool:
|
||||
"""Check if the file has a supported extension."""
|
||||
suffix = path.suffix.lower()
|
||||
return suffix in self._supported_extensions
|
||||
|
||||
def _matches_ignore_pattern(self, filename: str) -> bool:
|
||||
"""Check if the filename matches any ignore pattern."""
|
||||
for regex in self._ignore_regexes:
|
||||
if regex.match(filename):
|
||||
logger.debug(
|
||||
f"Filename {filename} matched ignore pattern {regex.pattern}",
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _tags_from_path(filepath: Path, consumption_dir: Path) -> list[int]:
|
||||
"""
|
||||
Walk up the directory tree from filepath to consumption_dir
|
||||
and get or create Tag IDs for every directory.
|
||||
|
||||
Returns set of Tag models
|
||||
Returns list of Tag primary keys.
|
||||
"""
|
||||
db.close_old_connections()
|
||||
tag_ids = set()
|
||||
path_parts = filepath.relative_to(settings.CONSUMPTION_DIR).parent.parts
|
||||
tag_ids: set[int] = set()
|
||||
path_parts = filepath.relative_to(consumption_dir).parent.parts
|
||||
|
||||
for part in path_parts:
|
||||
tag_ids.add(
|
||||
Tag.objects.get_or_create(name__iexact=part, defaults={"name": part})[0].pk,
|
||||
tag, _ = Tag.objects.get_or_create(
|
||||
name__iexact=part,
|
||||
defaults={"name": part},
|
||||
)
|
||||
tag_ids.add(tag.pk)
|
||||
|
||||
return list(tag_ids)
|
||||
|
||||
|
||||
def _is_ignored(filepath: Path) -> bool:
|
||||
def _consume_file(
|
||||
filepath: Path,
|
||||
consumption_dir: Path,
|
||||
*,
|
||||
subdirs_as_tags: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Checks if the given file should be ignored, based on configured
|
||||
patterns.
|
||||
Queue a file for consumption.
|
||||
|
||||
Returns True if the file is ignored, False otherwise
|
||||
Args:
|
||||
filepath: Path to the file to consume.
|
||||
consumption_dir: Base consumption directory.
|
||||
subdirs_as_tags: Whether to create tags from subdirectory names.
|
||||
"""
|
||||
# Trim out the consume directory, leaving only filename and it's
|
||||
# path relative to the consume directory
|
||||
filepath_relative = PurePath(filepath).relative_to(settings.CONSUMPTION_DIR)
|
||||
|
||||
# March through the components of the path, including directories and the filename
|
||||
# looking for anything matching
|
||||
# foo/bar/baz/file.pdf -> (foo, bar, baz, file.pdf)
|
||||
parts = []
|
||||
for part in filepath_relative.parts:
|
||||
# If the part is not the name (ie, it's a dir)
|
||||
# Need to append the trailing slash or fnmatch doesn't match
|
||||
# fnmatch("dir", "dir/*") == False
|
||||
# fnmatch("dir/", "dir/*") == True
|
||||
if part != filepath_relative.name:
|
||||
part = part + "/"
|
||||
parts.append(part)
|
||||
|
||||
for pattern in settings.CONSUMER_IGNORE_PATTERNS:
|
||||
if len(filter(parts, pattern)):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _consume(filepath: Path) -> None:
|
||||
# Check permissions early
|
||||
# Verify file still exists and is accessible
|
||||
try:
|
||||
filepath.stat()
|
||||
except (PermissionError, OSError):
|
||||
logger.warning(f"Not consuming file {filepath}: Permission denied.")
|
||||
if not filepath.is_file():
|
||||
logger.debug(f"Not consuming {filepath}: not a file or doesn't exist")
|
||||
return
|
||||
except (PermissionError, OSError) as e:
|
||||
logger.warning(f"Not consuming {filepath}: {e}")
|
||||
return
|
||||
|
||||
if filepath.is_dir() or _is_ignored(filepath):
|
||||
return
|
||||
|
||||
if not filepath.is_file():
|
||||
logger.debug(f"Not consuming file {filepath}: File has moved.")
|
||||
return
|
||||
|
||||
if not is_file_ext_supported(filepath.suffix):
|
||||
logger.warning(f"Not consuming file {filepath}: Unknown file extension.")
|
||||
return
|
||||
|
||||
# Total wait time: up to 500ms
|
||||
os_error_retry_count: Final[int] = 50
|
||||
os_error_retry_wait: Final[float] = 0.01
|
||||
|
||||
read_try_count = 0
|
||||
file_open_ok = False
|
||||
os_error_str = None
|
||||
|
||||
while (read_try_count < os_error_retry_count) and not file_open_ok:
|
||||
# Get tags from path if configured
|
||||
tag_ids: list[int] | None = None
|
||||
if subdirs_as_tags:
|
||||
try:
|
||||
with filepath.open("rb"):
|
||||
file_open_ok = True
|
||||
except OSError as e:
|
||||
read_try_count += 1
|
||||
os_error_str = str(e)
|
||||
sleep(os_error_retry_wait)
|
||||
tag_ids = _tags_from_path(filepath, consumption_dir)
|
||||
except Exception:
|
||||
logger.exception(f"Error creating tags from path for {filepath}")
|
||||
|
||||
if read_try_count >= os_error_retry_count:
|
||||
logger.warning(f"Not consuming file {filepath}: OS reports {os_error_str}")
|
||||
return
|
||||
|
||||
tag_ids = None
|
||||
# Queue for consumption
|
||||
try:
|
||||
if settings.CONSUMER_SUBDIRS_AS_TAGS:
|
||||
tag_ids = _tags_from_path(filepath)
|
||||
except Exception:
|
||||
logger.exception("Error creating tags from path")
|
||||
|
||||
try:
|
||||
logger.info(f"Adding {filepath} to the task queue.")
|
||||
logger.info(f"Adding {filepath} to the task queue")
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
@@ -138,228 +365,206 @@ def _consume(filepath: Path) -> None:
|
||||
DocumentMetadataOverrides(tag_ids=tag_ids),
|
||||
)
|
||||
except Exception:
|
||||
# Catch all so that the consumer won't crash.
|
||||
# This is also what the test case is listening for to check for
|
||||
# errors.
|
||||
logger.exception("Error while consuming document")
|
||||
|
||||
|
||||
def _consume_wait_unmodified(file: Path) -> None:
|
||||
"""
|
||||
Waits for the given file to appear unmodified based on file size
|
||||
and modification time. Will wait a configured number of seconds
|
||||
and retry a configured number of times before either consuming or
|
||||
giving up
|
||||
"""
|
||||
if _is_ignored(file):
|
||||
return
|
||||
|
||||
logger.debug(f"Waiting for file {file} to remain unmodified")
|
||||
mtime = -1
|
||||
size = -1
|
||||
current_try = 0
|
||||
while current_try < settings.CONSUMER_POLLING_RETRY_COUNT:
|
||||
try:
|
||||
stat_data = file.stat()
|
||||
new_mtime = stat_data.st_mtime
|
||||
new_size = stat_data.st_size
|
||||
except FileNotFoundError:
|
||||
logger.debug(
|
||||
f"File {file} moved while waiting for it to remain unmodified.",
|
||||
)
|
||||
return
|
||||
if new_mtime == mtime and new_size == size:
|
||||
_consume(file)
|
||||
return
|
||||
mtime = new_mtime
|
||||
size = new_size
|
||||
sleep(settings.CONSUMER_POLLING_DELAY)
|
||||
current_try += 1
|
||||
|
||||
logger.error(f"Timeout while waiting on file {file} to remain unmodified.")
|
||||
|
||||
|
||||
class Handler(FileSystemEventHandler):
|
||||
def __init__(self, pool: ThreadPoolExecutor) -> None:
|
||||
super().__init__()
|
||||
self._pool = pool
|
||||
|
||||
def on_created(self, event):
|
||||
self._pool.submit(_consume_wait_unmodified, Path(event.src_path))
|
||||
|
||||
def on_moved(self, event):
|
||||
self._pool.submit(_consume_wait_unmodified, Path(event.dest_path))
|
||||
logger.exception(f"Error while queuing document {filepath}")
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""
|
||||
On every iteration of an infinite loop, consume what we can from the
|
||||
consumption directory.
|
||||
Watch a consumption directory and queue new documents for processing.
|
||||
|
||||
Uses watchfiles for efficient file system monitoring. Supports both
|
||||
native OS notifications (inotify on Linux, FSEvents on macOS) and
|
||||
polling for network filesystems.
|
||||
"""
|
||||
|
||||
# This is here primarily for the tests and is irrelevant in production.
|
||||
stop_flag = Event()
|
||||
# Also only for testing, configures in one place the timeout used before checking
|
||||
# the stop flag
|
||||
testing_timeout_s: Final[float] = 0.5
|
||||
testing_timeout_ms: Final[float] = testing_timeout_s * 1000.0
|
||||
help = "Watch the consumption directory for new documents"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
# For testing - allows tests to stop the consumer
|
||||
stop_flag: Event = Event()
|
||||
|
||||
# Testing timeout in seconds
|
||||
testing_timeout_s: Final[float] = 0.5
|
||||
|
||||
def add_arguments(self, parser) -> None:
|
||||
parser.add_argument(
|
||||
"directory",
|
||||
default=settings.CONSUMPTION_DIR,
|
||||
default=None,
|
||||
nargs="?",
|
||||
help="The consumption directory.",
|
||||
help="The consumption directory (defaults to CONSUMPTION_DIR setting)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--oneshot",
|
||||
action="store_true",
|
||||
help="Process existing files and exit without watching",
|
||||
)
|
||||
parser.add_argument("--oneshot", action="store_true", help="Run only once.")
|
||||
|
||||
# Only use during unit testing, will configure a timeout
|
||||
# Leaving it unset or false and the consumer will exit when it
|
||||
# receives SIGINT
|
||||
parser.add_argument(
|
||||
"--testing",
|
||||
action="store_true",
|
||||
help="Flag used only for unit testing",
|
||||
help="Enable testing mode with shorter timeouts",
|
||||
default=False,
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
directory = options["directory"]
|
||||
recursive = settings.CONSUMER_RECURSIVE
|
||||
|
||||
def handle(self, *args, **options) -> None:
|
||||
# Resolve consumption directory
|
||||
directory = options.get("directory")
|
||||
if not directory:
|
||||
raise CommandError("CONSUMPTION_DIR does not appear to be set.")
|
||||
directory = getattr(settings, "CONSUMPTION_DIR", None)
|
||||
if not directory:
|
||||
raise CommandError("CONSUMPTION_DIR is not configured")
|
||||
|
||||
directory = Path(directory).resolve()
|
||||
|
||||
if not directory.is_dir():
|
||||
raise CommandError(f"Consumption directory {directory} does not exist")
|
||||
if not directory.exists():
|
||||
raise CommandError(f"Consumption directory does not exist: {directory}")
|
||||
|
||||
# Consumer will need this
|
||||
if not directory.is_dir():
|
||||
raise CommandError(f"Consumption path is not a directory: {directory}")
|
||||
|
||||
# Ensure scratch directory exists
|
||||
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if recursive:
|
||||
for dirpath, _, filenames in os.walk(directory):
|
||||
for filename in filenames:
|
||||
filepath = Path(dirpath) / filename
|
||||
_consume(filepath)
|
||||
else:
|
||||
for filepath in directory.iterdir():
|
||||
_consume(filepath)
|
||||
# Get settings
|
||||
recursive: bool = getattr(settings, "CONSUMER_RECURSIVE", False)
|
||||
subdirs_as_tags: bool = getattr(settings, "CONSUMER_SUBDIRS_AS_TAGS", False)
|
||||
polling_interval: float = getattr(settings, "CONSUMER_POLLING_INTERVAL", 0)
|
||||
stability_delay: float = getattr(settings, "CONSUMER_STABILITY_DELAY", 1.0)
|
||||
ignore_patterns: list[str] = getattr(settings, "CONSUMER_IGNORE_PATTERNS", [])
|
||||
is_testing: bool = options.get("testing", False)
|
||||
is_oneshot: bool = options.get("oneshot", False)
|
||||
|
||||
if options["oneshot"]:
|
||||
# Create filter
|
||||
consumer_filter = ConsumerFilter(
|
||||
ignore_patterns=ignore_patterns,
|
||||
consumption_dir=directory,
|
||||
)
|
||||
|
||||
# Process existing files
|
||||
self._process_existing_files(
|
||||
directory=directory,
|
||||
recursive=recursive,
|
||||
subdirs_as_tags=subdirs_as_tags,
|
||||
consumer_filter=consumer_filter,
|
||||
)
|
||||
|
||||
if is_oneshot:
|
||||
logger.info("Oneshot mode: processed existing files, exiting")
|
||||
return
|
||||
|
||||
if settings.CONSUMER_POLLING == 0 and INotify:
|
||||
self.handle_inotify(directory, recursive, is_testing=options["testing"])
|
||||
# Start watching
|
||||
self._watch_directory(
|
||||
directory=directory,
|
||||
recursive=recursive,
|
||||
subdirs_as_tags=subdirs_as_tags,
|
||||
consumer_filter=consumer_filter,
|
||||
polling_interval=polling_interval,
|
||||
stability_delay=stability_delay,
|
||||
is_testing=is_testing,
|
||||
)
|
||||
|
||||
logger.debug("Consumer exiting")
|
||||
|
||||
def _process_existing_files(
|
||||
self,
|
||||
*,
|
||||
directory: Path,
|
||||
recursive: bool,
|
||||
subdirs_as_tags: bool,
|
||||
consumer_filter: ConsumerFilter,
|
||||
) -> None:
|
||||
"""Process any existing files in the consumption directory."""
|
||||
logger.info(f"Processing existing files in {directory}")
|
||||
|
||||
glob_pattern = "**/*" if recursive else "*"
|
||||
|
||||
for filepath in directory.glob(glob_pattern):
|
||||
# Use filter to check if file should be processed
|
||||
if not filepath.is_file():
|
||||
continue
|
||||
|
||||
if not consumer_filter(Change.added, str(filepath)):
|
||||
continue
|
||||
|
||||
_consume_file(
|
||||
filepath=filepath,
|
||||
consumption_dir=directory,
|
||||
subdirs_as_tags=subdirs_as_tags,
|
||||
)
|
||||
|
||||
def _watch_directory(
|
||||
self,
|
||||
*,
|
||||
directory: Path,
|
||||
recursive: bool,
|
||||
subdirs_as_tags: bool,
|
||||
consumer_filter: ConsumerFilter,
|
||||
polling_interval: float,
|
||||
stability_delay: float,
|
||||
is_testing: bool,
|
||||
) -> None:
|
||||
"""Watch directory for changes and process stable files."""
|
||||
use_polling = polling_interval > 0
|
||||
poll_delay_ms = int(polling_interval * 1000) if use_polling else 0
|
||||
|
||||
if use_polling:
|
||||
logger.info(
|
||||
f"Watching {directory} using polling (interval: {polling_interval}s)",
|
||||
)
|
||||
else:
|
||||
if INotify is None and settings.CONSUMER_POLLING == 0: # pragma: no cover
|
||||
logger.warning("Using polling as INotify import failed")
|
||||
self.handle_polling(directory, recursive, is_testing=options["testing"])
|
||||
logger.info(f"Watching {directory} using native file system events")
|
||||
|
||||
logger.debug("Consumer exiting.")
|
||||
# Create stability tracker
|
||||
tracker = FileStabilityTracker(stability_delay=stability_delay)
|
||||
|
||||
def handle_polling(self, directory, recursive, *, is_testing: bool):
|
||||
logger.info(f"Polling directory for changes: {directory}")
|
||||
# Calculate timeouts
|
||||
stability_timeout_ms = int(stability_delay * 1000)
|
||||
testing_timeout_ms = int(self.testing_timeout_s * 1000)
|
||||
|
||||
timeout = None
|
||||
if is_testing:
|
||||
timeout = self.testing_timeout_s
|
||||
logger.debug(f"Configuring timeout to {timeout}s")
|
||||
# Start with no timeout (wait indefinitely for first event)
|
||||
# unless in testing mode
|
||||
timeout_ms = testing_timeout_ms if is_testing else 0
|
||||
|
||||
polling_interval = settings.CONSUMER_POLLING
|
||||
if polling_interval == 0: # pragma: no cover
|
||||
# Only happens if INotify failed to import
|
||||
logger.warning("Using polling of 10s, consider setting this")
|
||||
polling_interval = 10
|
||||
self.stop_flag.clear()
|
||||
|
||||
with ThreadPoolExecutor(max_workers=4) as pool:
|
||||
observer = PollingObserver(timeout=polling_interval)
|
||||
observer.schedule(Handler(pool), directory, recursive=recursive)
|
||||
observer.start()
|
||||
while not self.stop_flag.is_set():
|
||||
try:
|
||||
while observer.is_alive():
|
||||
observer.join(timeout)
|
||||
if self.stop_flag.is_set():
|
||||
observer.stop()
|
||||
for changes in watch(
|
||||
directory,
|
||||
watch_filter=consumer_filter,
|
||||
rust_timeout=timeout_ms,
|
||||
yield_on_timeout=True,
|
||||
force_polling=use_polling,
|
||||
poll_delay_ms=poll_delay_ms,
|
||||
recursive=recursive,
|
||||
stop_event=self.stop_flag,
|
||||
):
|
||||
# Process each change
|
||||
for change_type, path in changes:
|
||||
path = Path(path).resolve()
|
||||
logger.debug(f"Event: {change_type.name} for {path}")
|
||||
tracker.track(path, change_type)
|
||||
|
||||
# Check for stable files
|
||||
for stable_path in tracker.get_stable_files():
|
||||
_consume_file(
|
||||
filepath=stable_path,
|
||||
consumption_dir=directory,
|
||||
subdirs_as_tags=subdirs_as_tags,
|
||||
)
|
||||
|
||||
# Exit watch loop to reconfigure timeout
|
||||
break
|
||||
|
||||
# Determine next timeout
|
||||
if tracker.has_pending_files():
|
||||
# Check pending files at stability interval
|
||||
timeout_ms = stability_timeout_ms
|
||||
elif is_testing:
|
||||
# In testing, use short timeout to check stop flag
|
||||
timeout_ms = testing_timeout_ms
|
||||
else:
|
||||
# No pending files, wait indefinitely
|
||||
timeout_ms = 0
|
||||
|
||||
except KeyboardInterrupt:
|
||||
observer.stop()
|
||||
observer.join()
|
||||
|
||||
def handle_inotify(self, directory, recursive, *, is_testing: bool):
|
||||
logger.info(f"Using inotify to watch directory for changes: {directory}")
|
||||
|
||||
timeout_ms = None
|
||||
if is_testing:
|
||||
timeout_ms = self.testing_timeout_ms
|
||||
logger.debug(f"Configuring timeout to {timeout_ms}ms")
|
||||
|
||||
inotify = INotify()
|
||||
inotify_flags = flags.CLOSE_WRITE | flags.MOVED_TO | flags.MODIFY
|
||||
if recursive:
|
||||
inotify.add_watch_recursive(directory, inotify_flags)
|
||||
else:
|
||||
inotify.add_watch(directory, inotify_flags)
|
||||
|
||||
inotify_debounce_secs: Final[float] = settings.CONSUMER_INOTIFY_DELAY
|
||||
inotify_debounce_ms: Final[int] = inotify_debounce_secs * 1000
|
||||
|
||||
finished = False
|
||||
|
||||
notified_files = {}
|
||||
|
||||
try:
|
||||
while not finished:
|
||||
try:
|
||||
for event in inotify.read(timeout=timeout_ms):
|
||||
path = inotify.get_path(event.wd) if recursive else directory
|
||||
filepath = Path(path) / event.name
|
||||
if flags.MODIFY in flags.from_mask(event.mask):
|
||||
notified_files.pop(filepath, None)
|
||||
else:
|
||||
notified_files[filepath] = monotonic()
|
||||
|
||||
# Check the files against the timeout
|
||||
still_waiting = {}
|
||||
# last_event_time is time of the last inotify event for this file
|
||||
for filepath, last_event_time in notified_files.items():
|
||||
# Current time - last time over the configured timeout
|
||||
waited_long_enough = (
|
||||
monotonic() - last_event_time
|
||||
) > inotify_debounce_secs
|
||||
|
||||
# Also make sure the file exists still, some scanners might write a
|
||||
# temporary file first
|
||||
try:
|
||||
file_still_exists = filepath.exists() and filepath.is_file()
|
||||
except (PermissionError, OSError): # pragma: no cover
|
||||
# If we can't check, let it fail in the _consume function
|
||||
file_still_exists = True
|
||||
continue
|
||||
|
||||
if waited_long_enough and file_still_exists:
|
||||
_consume(filepath)
|
||||
elif file_still_exists:
|
||||
still_waiting[filepath] = last_event_time
|
||||
|
||||
# These files are still waiting to hit the timeout
|
||||
notified_files = still_waiting
|
||||
|
||||
# If files are waiting, need to exit read() to check them
|
||||
# Otherwise, go back to infinite sleep time, but only if not testing
|
||||
if len(notified_files) > 0:
|
||||
timeout_ms = inotify_debounce_ms
|
||||
elif is_testing:
|
||||
timeout_ms = self.testing_timeout_ms
|
||||
else:
|
||||
timeout_ms = None
|
||||
|
||||
if self.stop_flag.is_set():
|
||||
logger.debug("Finishing because event is set")
|
||||
finished = True
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Received SIGINT, stopping inotify")
|
||||
finished = True
|
||||
finally:
|
||||
inotify.close()
|
||||
logger.info("Received interrupt, stopping consumer")
|
||||
self.stop_flag.set()
|
||||
|
||||
@@ -403,18 +403,6 @@ def existing_document_matches_workflow(
|
||||
f"Document tags {list(document.tags.all())} include excluded tags {list(trigger_has_not_tags_qs)}",
|
||||
)
|
||||
|
||||
allowed_correspondent_ids = set(
|
||||
trigger.filter_has_any_correspondents.values_list("id", flat=True),
|
||||
)
|
||||
if (
|
||||
allowed_correspondent_ids
|
||||
and document.correspondent_id not in allowed_correspondent_ids
|
||||
):
|
||||
return (
|
||||
False,
|
||||
f"Document correspondent {document.correspondent} is not one of {list(trigger.filter_has_any_correspondents.all())}",
|
||||
)
|
||||
|
||||
# Document correspondent vs trigger has_correspondent
|
||||
if (
|
||||
trigger.filter_has_correspondent_id is not None
|
||||
@@ -436,17 +424,6 @@ def existing_document_matches_workflow(
|
||||
f"Document correspondent {document.correspondent} is excluded by {list(trigger.filter_has_not_correspondents.all())}",
|
||||
)
|
||||
|
||||
allowed_document_type_ids = set(
|
||||
trigger.filter_has_any_document_types.values_list("id", flat=True),
|
||||
)
|
||||
if allowed_document_type_ids and (
|
||||
document.document_type_id not in allowed_document_type_ids
|
||||
):
|
||||
return (
|
||||
False,
|
||||
f"Document doc type {document.document_type} is not one of {list(trigger.filter_has_any_document_types.all())}",
|
||||
)
|
||||
|
||||
# Document document_type vs trigger has_document_type
|
||||
if (
|
||||
trigger.filter_has_document_type_id is not None
|
||||
@@ -468,17 +445,6 @@ def existing_document_matches_workflow(
|
||||
f"Document doc type {document.document_type} is excluded by {list(trigger.filter_has_not_document_types.all())}",
|
||||
)
|
||||
|
||||
allowed_storage_path_ids = set(
|
||||
trigger.filter_has_any_storage_paths.values_list("id", flat=True),
|
||||
)
|
||||
if allowed_storage_path_ids and (
|
||||
document.storage_path_id not in allowed_storage_path_ids
|
||||
):
|
||||
return (
|
||||
False,
|
||||
f"Document storage path {document.storage_path} is not one of {list(trigger.filter_has_any_storage_paths.all())}",
|
||||
)
|
||||
|
||||
# Document storage_path vs trigger has_storage_path
|
||||
if (
|
||||
trigger.filter_has_storage_path_id is not None
|
||||
@@ -566,10 +532,6 @@ def prefilter_documents_by_workflowtrigger(
|
||||
|
||||
# Correspondent, DocumentType, etc. filtering
|
||||
|
||||
if trigger.filter_has_any_correspondents.exists():
|
||||
documents = documents.filter(
|
||||
correspondent__in=trigger.filter_has_any_correspondents.all(),
|
||||
)
|
||||
if trigger.filter_has_correspondent is not None:
|
||||
documents = documents.filter(
|
||||
correspondent=trigger.filter_has_correspondent,
|
||||
@@ -579,10 +541,6 @@ def prefilter_documents_by_workflowtrigger(
|
||||
correspondent__in=trigger.filter_has_not_correspondents.all(),
|
||||
)
|
||||
|
||||
if trigger.filter_has_any_document_types.exists():
|
||||
documents = documents.filter(
|
||||
document_type__in=trigger.filter_has_any_document_types.all(),
|
||||
)
|
||||
if trigger.filter_has_document_type is not None:
|
||||
documents = documents.filter(
|
||||
document_type=trigger.filter_has_document_type,
|
||||
@@ -592,10 +550,6 @@ def prefilter_documents_by_workflowtrigger(
|
||||
document_type__in=trigger.filter_has_not_document_types.all(),
|
||||
)
|
||||
|
||||
if trigger.filter_has_any_storage_paths.exists():
|
||||
documents = documents.filter(
|
||||
storage_path__in=trigger.filter_has_any_storage_paths.all(),
|
||||
)
|
||||
if trigger.filter_has_storage_path is not None:
|
||||
documents = documents.filter(
|
||||
storage_path=trigger.filter_has_storage_path,
|
||||
@@ -650,11 +604,8 @@ def document_matches_workflow(
|
||||
"filter_has_tags",
|
||||
"filter_has_all_tags",
|
||||
"filter_has_not_tags",
|
||||
"filter_has_any_document_types",
|
||||
"filter_has_not_document_types",
|
||||
"filter_has_any_correspondents",
|
||||
"filter_has_not_correspondents",
|
||||
"filter_has_any_storage_paths",
|
||||
"filter_has_not_storage_paths",
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
# Generated by Django 5.2.7 on 2025-12-17 22:25
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1074_workflowrun_deleted_at_workflowrun_restored_at_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="workflowtrigger",
|
||||
name="filter_has_any_correspondents",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="workflowtriggers_has_any_correspondent",
|
||||
to="documents.correspondent",
|
||||
verbose_name="has one of these correspondents",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="workflowtrigger",
|
||||
name="filter_has_any_document_types",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="workflowtriggers_has_any_document_type",
|
||||
to="documents.documenttype",
|
||||
verbose_name="has one of these document types",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="workflowtrigger",
|
||||
name="filter_has_any_storage_paths",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="workflowtriggers_has_any_storage_path",
|
||||
to="documents.storagepath",
|
||||
verbose_name="has one of these storage paths",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1087,13 +1087,6 @@ class WorkflowTrigger(models.Model):
|
||||
verbose_name=_("has this document type"),
|
||||
)
|
||||
|
||||
filter_has_any_document_types = models.ManyToManyField(
|
||||
DocumentType,
|
||||
blank=True,
|
||||
related_name="workflowtriggers_has_any_document_type",
|
||||
verbose_name=_("has one of these document types"),
|
||||
)
|
||||
|
||||
filter_has_not_document_types = models.ManyToManyField(
|
||||
DocumentType,
|
||||
blank=True,
|
||||
@@ -1116,13 +1109,6 @@ class WorkflowTrigger(models.Model):
|
||||
verbose_name=_("does not have these correspondent(s)"),
|
||||
)
|
||||
|
||||
filter_has_any_correspondents = models.ManyToManyField(
|
||||
Correspondent,
|
||||
blank=True,
|
||||
related_name="workflowtriggers_has_any_correspondent",
|
||||
verbose_name=_("has one of these correspondents"),
|
||||
)
|
||||
|
||||
filter_has_storage_path = models.ForeignKey(
|
||||
StoragePath,
|
||||
null=True,
|
||||
@@ -1131,13 +1117,6 @@ class WorkflowTrigger(models.Model):
|
||||
verbose_name=_("has this storage path"),
|
||||
)
|
||||
|
||||
filter_has_any_storage_paths = models.ManyToManyField(
|
||||
StoragePath,
|
||||
blank=True,
|
||||
related_name="workflowtriggers_has_any_storage_path",
|
||||
verbose_name=_("has one of these storage paths"),
|
||||
)
|
||||
|
||||
filter_has_not_storage_paths = models.ManyToManyField(
|
||||
StoragePath,
|
||||
blank=True,
|
||||
|
||||
@@ -2295,11 +2295,8 @@ class WorkflowTriggerSerializer(serializers.ModelSerializer):
|
||||
"filter_has_all_tags",
|
||||
"filter_has_not_tags",
|
||||
"filter_custom_field_query",
|
||||
"filter_has_any_correspondents",
|
||||
"filter_has_not_correspondents",
|
||||
"filter_has_any_document_types",
|
||||
"filter_has_not_document_types",
|
||||
"filter_has_any_storage_paths",
|
||||
"filter_has_not_storage_paths",
|
||||
"filter_has_correspondent",
|
||||
"filter_has_document_type",
|
||||
@@ -2537,26 +2534,14 @@ class WorkflowSerializer(serializers.ModelSerializer):
|
||||
filter_has_tags = trigger.pop("filter_has_tags", None)
|
||||
filter_has_all_tags = trigger.pop("filter_has_all_tags", None)
|
||||
filter_has_not_tags = trigger.pop("filter_has_not_tags", None)
|
||||
filter_has_any_correspondents = trigger.pop(
|
||||
"filter_has_any_correspondents",
|
||||
None,
|
||||
)
|
||||
filter_has_not_correspondents = trigger.pop(
|
||||
"filter_has_not_correspondents",
|
||||
None,
|
||||
)
|
||||
filter_has_any_document_types = trigger.pop(
|
||||
"filter_has_any_document_types",
|
||||
None,
|
||||
)
|
||||
filter_has_not_document_types = trigger.pop(
|
||||
"filter_has_not_document_types",
|
||||
None,
|
||||
)
|
||||
filter_has_any_storage_paths = trigger.pop(
|
||||
"filter_has_any_storage_paths",
|
||||
None,
|
||||
)
|
||||
filter_has_not_storage_paths = trigger.pop(
|
||||
"filter_has_not_storage_paths",
|
||||
None,
|
||||
@@ -2573,26 +2558,14 @@ class WorkflowSerializer(serializers.ModelSerializer):
|
||||
trigger_instance.filter_has_all_tags.set(filter_has_all_tags)
|
||||
if filter_has_not_tags is not None:
|
||||
trigger_instance.filter_has_not_tags.set(filter_has_not_tags)
|
||||
if filter_has_any_correspondents is not None:
|
||||
trigger_instance.filter_has_any_correspondents.set(
|
||||
filter_has_any_correspondents,
|
||||
)
|
||||
if filter_has_not_correspondents is not None:
|
||||
trigger_instance.filter_has_not_correspondents.set(
|
||||
filter_has_not_correspondents,
|
||||
)
|
||||
if filter_has_any_document_types is not None:
|
||||
trigger_instance.filter_has_any_document_types.set(
|
||||
filter_has_any_document_types,
|
||||
)
|
||||
if filter_has_not_document_types is not None:
|
||||
trigger_instance.filter_has_not_document_types.set(
|
||||
filter_has_not_document_types,
|
||||
)
|
||||
if filter_has_any_storage_paths is not None:
|
||||
trigger_instance.filter_has_any_storage_paths.set(
|
||||
filter_has_any_storage_paths,
|
||||
)
|
||||
if filter_has_not_storage_paths is not None:
|
||||
trigger_instance.filter_has_not_storage_paths.set(
|
||||
filter_has_not_storage_paths,
|
||||
|
||||
@@ -186,11 +186,8 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
|
||||
"filter_has_tags": [self.t1.id],
|
||||
"filter_has_all_tags": [self.t2.id],
|
||||
"filter_has_not_tags": [self.t3.id],
|
||||
"filter_has_any_correspondents": [self.c.id],
|
||||
"filter_has_not_correspondents": [self.c2.id],
|
||||
"filter_has_any_document_types": [self.dt.id],
|
||||
"filter_has_not_document_types": [self.dt2.id],
|
||||
"filter_has_any_storage_paths": [self.sp.id],
|
||||
"filter_has_not_storage_paths": [self.sp2.id],
|
||||
"filter_custom_field_query": json.dumps(
|
||||
[
|
||||
@@ -251,26 +248,14 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
|
||||
set(trigger.filter_has_not_tags.values_list("id", flat=True)),
|
||||
{self.t3.id},
|
||||
)
|
||||
self.assertSetEqual(
|
||||
set(trigger.filter_has_any_correspondents.values_list("id", flat=True)),
|
||||
{self.c.id},
|
||||
)
|
||||
self.assertSetEqual(
|
||||
set(trigger.filter_has_not_correspondents.values_list("id", flat=True)),
|
||||
{self.c2.id},
|
||||
)
|
||||
self.assertSetEqual(
|
||||
set(trigger.filter_has_any_document_types.values_list("id", flat=True)),
|
||||
{self.dt.id},
|
||||
)
|
||||
self.assertSetEqual(
|
||||
set(trigger.filter_has_not_document_types.values_list("id", flat=True)),
|
||||
{self.dt2.id},
|
||||
)
|
||||
self.assertSetEqual(
|
||||
set(trigger.filter_has_any_storage_paths.values_list("id", flat=True)),
|
||||
{self.sp.id},
|
||||
)
|
||||
self.assertSetEqual(
|
||||
set(trigger.filter_has_not_storage_paths.values_list("id", flat=True)),
|
||||
{self.sp2.id},
|
||||
@@ -434,11 +419,8 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
|
||||
"filter_has_tags": [self.t1.id],
|
||||
"filter_has_all_tags": [self.t2.id],
|
||||
"filter_has_not_tags": [self.t3.id],
|
||||
"filter_has_any_correspondents": [self.c.id],
|
||||
"filter_has_not_correspondents": [self.c2.id],
|
||||
"filter_has_any_document_types": [self.dt.id],
|
||||
"filter_has_not_document_types": [self.dt2.id],
|
||||
"filter_has_any_storage_paths": [self.sp.id],
|
||||
"filter_has_not_storage_paths": [self.sp2.id],
|
||||
"filter_custom_field_query": json.dumps(
|
||||
["AND", [[self.cf1.id, "exact", "value"]]],
|
||||
@@ -468,26 +450,14 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
|
||||
workflow.triggers.first().filter_has_not_tags.first(),
|
||||
self.t3,
|
||||
)
|
||||
self.assertEqual(
|
||||
workflow.triggers.first().filter_has_any_correspondents.first(),
|
||||
self.c,
|
||||
)
|
||||
self.assertEqual(
|
||||
workflow.triggers.first().filter_has_not_correspondents.first(),
|
||||
self.c2,
|
||||
)
|
||||
self.assertEqual(
|
||||
workflow.triggers.first().filter_has_any_document_types.first(),
|
||||
self.dt,
|
||||
)
|
||||
self.assertEqual(
|
||||
workflow.triggers.first().filter_has_not_document_types.first(),
|
||||
self.dt2,
|
||||
)
|
||||
self.assertEqual(
|
||||
workflow.triggers.first().filter_has_any_storage_paths.first(),
|
||||
self.sp,
|
||||
)
|
||||
self.assertEqual(
|
||||
workflow.triggers.first().filter_has_not_storage_paths.first(),
|
||||
self.sp2,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1276,76 +1276,6 @@ class TestWorkflows(
|
||||
)
|
||||
self.assertIn(expected_str, cm.output[1])
|
||||
|
||||
def test_document_added_any_filters(self):
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
||||
)
|
||||
trigger.filter_has_any_correspondents.set([self.c])
|
||||
trigger.filter_has_any_document_types.set([self.dt])
|
||||
trigger.filter_has_any_storage_paths.set([self.sp])
|
||||
|
||||
matching_doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
document_type=self.dt,
|
||||
storage_path=self.sp,
|
||||
original_filename="sample.pdf",
|
||||
checksum="checksum-any-match",
|
||||
)
|
||||
|
||||
matched, reason = existing_document_matches_workflow(matching_doc, trigger)
|
||||
self.assertTrue(matched)
|
||||
self.assertIsNone(reason)
|
||||
|
||||
wrong_correspondent = Document.objects.create(
|
||||
title="wrong correspondent",
|
||||
correspondent=self.c2,
|
||||
document_type=self.dt,
|
||||
storage_path=self.sp,
|
||||
original_filename="sample2.pdf",
|
||||
)
|
||||
matched, reason = existing_document_matches_workflow(
|
||||
wrong_correspondent,
|
||||
trigger,
|
||||
)
|
||||
self.assertFalse(matched)
|
||||
self.assertIn("correspondent", reason)
|
||||
|
||||
other_document_type = DocumentType.objects.create(name="Other")
|
||||
wrong_document_type = Document.objects.create(
|
||||
title="wrong doc type",
|
||||
correspondent=self.c,
|
||||
document_type=other_document_type,
|
||||
storage_path=self.sp,
|
||||
original_filename="sample3.pdf",
|
||||
checksum="checksum-wrong-doc-type",
|
||||
)
|
||||
matched, reason = existing_document_matches_workflow(
|
||||
wrong_document_type,
|
||||
trigger,
|
||||
)
|
||||
self.assertFalse(matched)
|
||||
self.assertIn("doc type", reason)
|
||||
|
||||
other_storage_path = StoragePath.objects.create(
|
||||
name="Other path",
|
||||
path="/other/",
|
||||
)
|
||||
wrong_storage_path = Document.objects.create(
|
||||
title="wrong storage",
|
||||
correspondent=self.c,
|
||||
document_type=self.dt,
|
||||
storage_path=other_storage_path,
|
||||
original_filename="sample4.pdf",
|
||||
checksum="checksum-wrong-storage-path",
|
||||
)
|
||||
matched, reason = existing_document_matches_workflow(
|
||||
wrong_storage_path,
|
||||
trigger,
|
||||
)
|
||||
self.assertFalse(matched)
|
||||
self.assertIn("storage path", reason)
|
||||
|
||||
def test_document_added_custom_field_query_no_match(self):
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
||||
@@ -1454,39 +1384,6 @@ class TestWorkflows(
|
||||
self.assertIn(doc1, filtered)
|
||||
self.assertNotIn(doc2, filtered)
|
||||
|
||||
def test_prefilter_documents_any_filters(self):
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
||||
)
|
||||
trigger.filter_has_any_correspondents.set([self.c])
|
||||
trigger.filter_has_any_document_types.set([self.dt])
|
||||
trigger.filter_has_any_storage_paths.set([self.sp])
|
||||
|
||||
allowed_document = Document.objects.create(
|
||||
title="allowed",
|
||||
correspondent=self.c,
|
||||
document_type=self.dt,
|
||||
storage_path=self.sp,
|
||||
original_filename="doc-allowed.pdf",
|
||||
checksum="checksum-any-allowed",
|
||||
)
|
||||
blocked_document = Document.objects.create(
|
||||
title="blocked",
|
||||
correspondent=self.c2,
|
||||
document_type=self.dt,
|
||||
storage_path=self.sp,
|
||||
original_filename="doc-blocked.pdf",
|
||||
checksum="checksum-any-blocked",
|
||||
)
|
||||
|
||||
filtered = prefilter_documents_by_workflowtrigger(
|
||||
Document.objects.all(),
|
||||
trigger,
|
||||
)
|
||||
|
||||
self.assertIn(allowed_document, filtered)
|
||||
self.assertNotIn(blocked_document, filtered)
|
||||
|
||||
def test_consumption_trigger_requires_filter_configuration(self):
|
||||
serializer = WorkflowTriggerSerializer(
|
||||
data={
|
||||
|
||||
@@ -1033,7 +1033,7 @@ CONSUMER_IGNORE_PATTERNS = list(
|
||||
json.loads(
|
||||
os.getenv(
|
||||
"PAPERLESS_CONSUMER_IGNORE_PATTERNS",
|
||||
json.dumps(IGNORABLE_FILES),
|
||||
json.dumps([]),
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
115
uv.lock
generated
115
uv.lock
generated
@@ -1458,26 +1458,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inotify-simple"
|
||||
version = "2.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e3/5c/bfe40e15d684bc30b0073aa97c39be410a5fbef3d33cad6f0bf2012571e0/inotify_simple-2.0.1.tar.gz", hash = "sha256:f010bbbd8283bd71a9f4eb2de94765804ede24bd47320b0e6ef4136e541cdc2c", size = 7101, upload-time = "2025-08-25T06:28:20.998Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/86/8be1ac7e90f80b413e81f1e235148e8db771218886a2353392f02da01be3/inotify_simple-2.0.1-py3-none-any.whl", hash = "sha256:e5da495f2064889f8e68b67f9358b0d102e03b783c2d42e5b8e132ab859a5d8a", size = 7449, upload-time = "2025-08-25T06:28:19.919Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inotifyrecursive"
|
||||
version = "0.3.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "inotify-simple", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/19/3a/9ed038cb750a3ba8090869cf3ad50f5628077a936d911aee14ca83e40f6a/inotifyrecursive-0.3.5.tar.gz", hash = "sha256:a2c450b317693e4538416f90eb1d7858506dafe6b8b885037bd2dd9ae2dafa1e", size = 4576, upload-time = "2020-11-20T12:38:48.035Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/fc/4e5a141c3f7c7bed550ac1f69e599e92b6be449dd4677ec09f325cad0955/inotifyrecursive-0.3.5-py3-none-any.whl", hash = "sha256:7e5f4a2e1dc2bef0efa3b5f6b339c41fb4599055a2b54909d020e9e932cc8d2f", size = 8009, upload-time = "2020-11-20T12:38:46.981Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "isodate"
|
||||
@@ -2186,7 +2166,6 @@ dependencies = [
|
||||
{ name = "gotenberg-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "httpx-oauth", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "imap-tools", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "inotifyrecursive", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "langdetect", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "nltk", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
@@ -2206,7 +2185,7 @@ dependencies = [
|
||||
{ name = "setproctitle", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "tika-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "watchdog", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "watchfiles", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "whitenoise", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "whoosh-reloaded", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "zxing-cpp", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version != '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version != '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or sys_platform == 'darwin'" },
|
||||
@@ -2325,7 +2304,6 @@ requires-dist = [
|
||||
{ name = "granian", extras = ["uvloop"], marker = "extra == 'webserver'", specifier = "~=2.5.1" },
|
||||
{ name = "httpx-oauth", specifier = "~=0.16" },
|
||||
{ name = "imap-tools", specifier = "~=1.11.0" },
|
||||
{ name = "inotifyrecursive", specifier = "~=0.3" },
|
||||
{ name = "jinja2", specifier = "~=3.1.5" },
|
||||
{ name = "langdetect", specifier = "~=1.0.9" },
|
||||
{ name = "mysqlclient", marker = "extra == 'mariadb'", specifier = "~=2.2.7" },
|
||||
@@ -2351,7 +2329,7 @@ requires-dist = [
|
||||
{ name = "setproctitle", specifier = "~=1.3.4" },
|
||||
{ name = "tika-client", specifier = "~=0.10.0" },
|
||||
{ name = "tqdm", specifier = "~=4.67.1" },
|
||||
{ name = "watchdog", specifier = "~=6.0" },
|
||||
{ name = "watchfiles", specifier = ">=1.1.1" },
|
||||
{ name = "whitenoise", specifier = "~=6.9" },
|
||||
{ name = "whoosh-reloaded", specifier = ">=2.7.5" },
|
||||
{ name = "zxing-cpp", marker = "(python_full_version != '3.12.*' and platform_machine == 'aarch64') or (python_full_version != '3.12.*' and platform_machine == 'x86_64') or (platform_machine != 'aarch64' and platform_machine != 'x86_64') or sys_platform != 'linux'", specifier = "~=2.3.0" },
|
||||
@@ -4327,6 +4305,95 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "watchfiles"
|
||||
version = "1.1.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/1a/206e8cf2dd86fddf939165a57b4df61607a1e0add2785f170a3f616b7d9f/watchfiles-1.1.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:eef58232d32daf2ac67f42dea51a2c80f0d03379075d44a587051e63cc2e368c", size = 407318, upload-time = "2025-10-14T15:04:18.753Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/0f/abaf5262b9c496b5dad4ed3c0e799cbecb1f8ea512ecb6ddd46646a9fca3/watchfiles-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:03fa0f5237118a0c5e496185cafa92878568b652a2e9a9382a5151b1a0380a43", size = 394478, upload-time = "2025-10-14T15:04:20.297Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/04/9cc0ba88697b34b755371f5ace8d3a4d9a15719c07bdc7bd13d7d8c6a341/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca65483439f9c791897f7db49202301deb6e15fe9f8fe2fed555bf986d10c31", size = 449894, upload-time = "2025-10-14T15:04:21.527Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/9c/eda4615863cd8621e89aed4df680d8c3ec3da6a4cf1da113c17decd87c7f/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f0ab1c1af0cb38e3f598244c17919fb1a84d1629cc08355b0074b6d7f53138ac", size = 459065, upload-time = "2025-10-14T15:04:22.795Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/13/f28b3f340157d03cbc8197629bc109d1098764abe1e60874622a0be5c112/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bc570d6c01c206c46deb6e935a260be44f186a2f05179f52f7fcd2be086a94d", size = 488377, upload-time = "2025-10-14T15:04:24.138Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/93/cfa597fa9389e122488f7ffdbd6db505b3b915ca7435ecd7542e855898c2/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e84087b432b6ac94778de547e08611266f1f8ffad28c0ee4c82e028b0fc5966d", size = 595837, upload-time = "2025-10-14T15:04:25.057Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/1e/68c1ed5652b48d89fc24d6af905d88ee4f82fa8bc491e2666004e307ded1/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:620bae625f4cb18427b1bb1a2d9426dc0dd5a5ba74c7c2cdb9de405f7b129863", size = 473456, upload-time = "2025-10-14T15:04:26.497Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/dc/1a680b7458ffa3b14bb64878112aefc8f2e4f73c5af763cbf0bd43100658/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:544364b2b51a9b0c7000a4b4b02f90e9423d97fbbf7e06689236443ebcad81ab", size = 455614, upload-time = "2025-10-14T15:04:27.539Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/a5/3d782a666512e01eaa6541a72ebac1d3aae191ff4a31274a66b8dd85760c/watchfiles-1.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bbe1ef33d45bc71cf21364df962af171f96ecaeca06bd9e3d0b583efb12aec82", size = 630690, upload-time = "2025-10-14T15:04:28.495Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/73/bb5f38590e34687b2a9c47a244aa4dd50c56a825969c92c9c5fc7387cea1/watchfiles-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a0bb430adb19ef49389e1ad368450193a90038b5b752f4ac089ec6942c4dff4", size = 622459, upload-time = "2025-10-14T15:04:29.491Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529, upload-time = "2025-10-14T15:04:32.899Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384, upload-time = "2025-10-14T15:04:33.761Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789, upload-time = "2025-10-14T15:04:34.679Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521, upload-time = "2025-10-14T15:04:35.963Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722, upload-time = "2025-10-14T15:04:37.091Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088, upload-time = "2025-10-14T15:04:38.39Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923, upload-time = "2025-10-14T15:04:39.666Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080, upload-time = "2025-10-14T15:04:40.643Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432, upload-time = "2025-10-14T15:04:41.789Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046, upload-time = "2025-10-14T15:04:42.718Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/f4/f750b29225fe77139f7ae5de89d4949f5a99f934c65a1f1c0b248f26f747/watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18", size = 404321, upload-time = "2025-10-14T15:05:02.063Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/f9/f07a295cde762644aa4c4bb0f88921d2d141af45e735b965fb2e87858328/watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a", size = 391783, upload-time = "2025-10-14T15:05:03.052Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/11/fc2502457e0bea39a5c958d86d2cb69e407a4d00b85735ca724bfa6e0d1a/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219", size = 449279, upload-time = "2025-10-14T15:05:04.004Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/1f/d66bc15ea0b728df3ed96a539c777acfcad0eb78555ad9efcaa1274688f0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428", size = 459405, upload-time = "2025-10-14T15:05:04.942Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/90/9f4a65c0aec3ccf032703e6db02d89a157462fbb2cf20dd415128251cac0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0", size = 488976, upload-time = "2025-10-14T15:05:05.905Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/57/ee347af605d867f712be7029bb94c8c071732a4b44792e3176fa3c612d39/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150", size = 595506, upload-time = "2025-10-14T15:05:06.906Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/78/cc5ab0b86c122047f75e8fc471c67a04dee395daf847d3e59381996c8707/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae", size = 474936, upload-time = "2025-10-14T15:05:07.906Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/da/def65b170a3815af7bd40a3e7010bf6ab53089ef1b75d05dd5385b87cf08/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d", size = 456147, upload-time = "2025-10-14T15:05:09.138Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/99/da6573ba71166e82d288d4df0839128004c67d2778d3b566c138695f5c0b/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b", size = 630007, upload-time = "2025-10-14T15:05:10.117Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/51/7439c4dd39511368849eb1e53279cd3454b4a4dbace80bab88feeb83c6b5/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374", size = 622280, upload-time = "2025-10-14T15:05:11.146Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/42/e0a7d749626f1e28c7108a99fb9bf524b501bbbeb9b261ceecde644d5a07/watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da", size = 403389, upload-time = "2025-10-14T15:05:15.777Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/49/08732f90ce0fbbc13913f9f215c689cfc9ced345fb1bcd8829a50007cc8d/watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051", size = 389964, upload-time = "2025-10-14T15:05:16.85Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/0d/7c315d4bd5f2538910491a0393c56bf70d333d51bc5b34bee8e68e8cea19/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e", size = 448114, upload-time = "2025-10-14T15:05:17.876Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/24/9e096de47a4d11bc4df41e9d1e61776393eac4cb6eb11b3e23315b78b2cc/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70", size = 460264, upload-time = "2025-10-14T15:05:18.962Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/0f/e8dea6375f1d3ba5fcb0b3583e2b493e77379834c74fd5a22d66d85d6540/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261", size = 487877, upload-time = "2025-10-14T15:05:20.094Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/5b/df24cfc6424a12deb41503b64d42fbea6b8cb357ec62ca84a5a3476f654a/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620", size = 595176, upload-time = "2025-10-14T15:05:21.134Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/b5/853b6757f7347de4e9b37e8cc3289283fb983cba1ab4d2d7144694871d9c/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04", size = 473577, upload-time = "2025-10-14T15:05:22.306Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5", size = 404315, upload-time = "2025-10-14T15:05:26.501Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd", size = 390869, upload-time = "2025-10-14T15:05:27.649Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/7e/5643bfff5acb6539b18483128fdc0ef2cccc94a5b8fbda130c823e8ed636/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb", size = 449919, upload-time = "2025-10-14T15:05:28.701Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/2e/c410993ba5025a9f9357c376f48976ef0e1b1aefb73b97a5ae01a5972755/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5", size = 460845, upload-time = "2025-10-14T15:05:30.064Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/a4/2df3b404469122e8680f0fcd06079317e48db58a2da2950fb45020947734/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3", size = 489027, upload-time = "2025-10-14T15:05:31.064Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/84/4587ba5b1f267167ee715b7f66e6382cca6938e0a4b870adad93e44747e6/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33", size = 595615, upload-time = "2025-10-14T15:05:32.074Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/0f/c6988c91d06e93cd0bb3d4a808bcf32375ca1904609835c3031799e3ecae/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510", size = 474836, upload-time = "2025-10-14T15:05:33.209Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05", size = 455099, upload-time = "2025-10-14T15:05:34.189Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/e0/8c9bdba88af756a2fce230dd365fab2baf927ba42cd47521ee7498fd5211/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6", size = 630626, upload-time = "2025-10-14T15:05:35.216Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/84/a95db05354bf2d19e438520d92a8ca475e578c647f78f53197f5a2f17aaf/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81", size = 622519, upload-time = "2025-10-14T15:05:36.259Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/a8/e3af2184707c29f0f14b1963c0aace6529f9d1b8582d5b99f31bbf42f59e/watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21", size = 403820, upload-time = "2025-10-14T15:05:40.932Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/ec/e47e307c2f4bd75f9f9e8afbe3876679b18e1bcec449beca132a1c5ffb2d/watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5", size = 390510, upload-time = "2025-10-14T15:05:41.945Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/a0/ad235642118090f66e7b2f18fd5c42082418404a79205cdfca50b6309c13/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7", size = 448408, upload-time = "2025-10-14T15:05:43.385Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/85/97fa10fd5ff3332ae17e7e40e20784e419e28521549780869f1413742e9d/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101", size = 458968, upload-time = "2025-10-14T15:05:44.404Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/c2/9059c2e8966ea5ce678166617a7f75ecba6164375f3b288e50a40dc6d489/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44", size = 488096, upload-time = "2025-10-14T15:05:45.398Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/44/d90a9ec8ac309bc26db808a13e7bfc0e4e78b6fc051078a554e132e80160/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c", size = 596040, upload-time = "2025-10-14T15:05:46.502Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/68/4e3479b20ca305cfc561db3ed207a8a1c745ee32bf24f2026a129d0ddb6e/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc", size = 473847, upload-time = "2025-10-14T15:05:47.484Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/4c/a888c91e2e326872fa4705095d64acd8aa2fb9c1f7b9bd0588f33850516c/watchfiles-1.1.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:17ef139237dfced9da49fb7f2232c86ca9421f666d78c264c7ffca6601d154c3", size = 409611, upload-time = "2025-10-14T15:06:05.809Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/c7/5420d1943c8e3ce1a21c0a9330bcf7edafb6aa65d26b21dbb3267c9e8112/watchfiles-1.1.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:672b8adf25b1a0d35c96b5888b7b18699d27d4194bac8beeae75be4b7a3fc9b2", size = 396889, upload-time = "2025-10-14T15:06:07.035Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/e5/0072cef3804ce8d3aaddbfe7788aadff6b3d3f98a286fdbee9fd74ca59a7/watchfiles-1.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77a13aea58bc2b90173bc69f2a90de8e282648939a00a602e1dc4ee23e26b66d", size = 451616, upload-time = "2025-10-14T15:06:08.072Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/83/4e/b87b71cbdfad81ad7e83358b3e447fedd281b880a03d64a760fe0a11fc2e/watchfiles-1.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b495de0bb386df6a12b18335a0285dda90260f51bdb505503c02bcd1ce27a8b", size = 458413, upload-time = "2025-10-14T15:06:09.209Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250, upload-time = "2025-10-14T15:06:10.264Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117, upload-time = "2025-10-14T15:06:11.28Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493, upload-time = "2025-10-14T15:06:12.321Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546, upload-time = "2025-10-14T15:06:13.372Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wcwidth"
|
||||
version = "0.2.14"
|
||||
|
||||
Reference in New Issue
Block a user