diff --git a/pyproject.toml b/pyproject.toml index fb47e55f1..b790fb26a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,7 +49,6 @@ dependencies = [ "gotenberg-client~=0.12.0", "httpx-oauth~=0.16", "imap-tools~=1.11.0", - "inotifyrecursive~=0.3", "jinja2~=3.1.5", "langdetect~=1.0.9", "nltk~=3.9.1", @@ -69,7 +68,7 @@ dependencies = [ "setproctitle~=1.3.4", "tika-client~=0.10.0", "tqdm~=4.67.1", - "watchdog~=6.0", + "watchfiles>=1.1.1", "whitenoise~=6.9", "whoosh-reloaded>=2.7.5", "zxing-cpp~=2.3.0", diff --git a/src/documents/management/commands/document_consumer.py b/src/documents/management/commands/document_consumer.py index 97027e02d..11905a9b8 100644 --- a/src/documents/management/commands/document_consumer.py +++ b/src/documents/management/commands/document_consumer.py @@ -1,135 +1,362 @@ +""" +Document consumer management command. + +Watches a consumption directory for new documents and queues them for processing. +Uses watchfiles for efficient file system monitoring with support for both +native OS notifications and polling fallback. +""" + +from __future__ import annotations + import logging -import os -from concurrent.futures import ThreadPoolExecutor -from fnmatch import filter +import re +from dataclasses import dataclass from pathlib import Path -from pathlib import PurePath from threading import Event from time import monotonic -from time import sleep +from typing import TYPE_CHECKING from typing import Final from django import db from django.conf import settings from django.core.management.base import BaseCommand from django.core.management.base import CommandError -from watchdog.events import FileSystemEventHandler -from watchdog.observers.polling import PollingObserver +from watchfiles import Change +from watchfiles import DefaultFilter +from watchfiles import watch from documents.data_models import ConsumableDocument from documents.data_models import DocumentMetadataOverrides from documents.data_models import DocumentSource from documents.models import Tag -from documents.parsers import is_file_ext_supported +from documents.parsers import get_supported_file_extensions from documents.tasks import consume_file -try: - from inotifyrecursive import INotify - from inotifyrecursive import flags -except ImportError: # pragma: no cover - INotify = flags = None +if TYPE_CHECKING: + from collections.abc import Iterator + logger = logging.getLogger("paperless.management.consumer") -def _tags_from_path(filepath: Path) -> list[int]: +@dataclass +class TrackedFile: + """Represents a file being tracked for stability.""" + + path: Path + last_event_time: float + last_mtime: float | None = None + last_size: int | None = None + + def update_stats(self) -> bool: + """ + Update file stats. Returns True if file exists and stats were updated. + """ + try: + stat = self.path.stat() + self.last_mtime = stat.st_mtime + self.last_size = stat.st_size + return True + except (FileNotFoundError, PermissionError, OSError): + return False + + def is_unchanged(self) -> bool: + """ + Check if file stats match the previously recorded values. + Returns False if file doesn't exist or stats changed. + """ + try: + stat = self.path.stat() + return stat.st_mtime == self.last_mtime and stat.st_size == self.last_size + except (FileNotFoundError, PermissionError, OSError): + return False + + +class FileStabilityTracker: """ - Walk up the directory tree from filepath to CONSUMPTION_DIR + Tracks file events and determines when files are stable for consumption. + + A file is considered stable when: + 1. No new events have been received for it within the stability delay + 2. Its size and modification time haven't changed + 3. It still exists as a regular file + + This handles various edge cases: + - Network copies that write in chunks + - Scanners that open/close files multiple times + - Temporary files that get renamed + - Files that are deleted before becoming stable + """ + + def __init__(self, stability_delay: float = 1.0) -> None: + """ + Initialize the tracker. + + Args: + stability_delay: Time in seconds a file must remain unchanged + before being considered stable. + """ + self.stability_delay = stability_delay + self._tracked: dict[Path, TrackedFile] = {} + + def track(self, path: Path, change: Change) -> None: + """ + Register a file event. + + Args: + path: The file path that changed. + change: The type of change (added, modified, deleted). + """ + path = path.resolve() + + match change: + case Change.deleted: + self._tracked.pop(path, None) + logger.debug(f"Stopped tracking deleted file: {path}") + case Change.added | Change.modified: + current_time = monotonic() + if path in self._tracked: + tracked = self._tracked[path] + tracked.last_event_time = current_time + tracked.update_stats() + logger.debug(f"Updated tracking for: {path}") + else: + tracked = TrackedFile(path=path, last_event_time=current_time) + if tracked.update_stats(): + self._tracked[path] = tracked + logger.debug(f"Started tracking: {path}") + else: + logger.debug(f"Could not stat file, not tracking: {path}") + + def get_stable_files(self) -> Iterator[Path]: + """ + Yield files that have been stable for the configured delay. + + Files are removed from tracking once yielded or determined to be invalid. + """ + current_time = monotonic() + to_remove: list[Path] = [] + to_yield: list[Path] = [] + + for path, tracked in list(self._tracked.items()): + time_since_event = current_time - tracked.last_event_time + + if time_since_event < self.stability_delay: + continue + + # File has waited long enough, verify it's unchanged + if not tracked.is_unchanged(): + # Stats changed or file gone - update and wait again + if tracked.update_stats(): + tracked.last_event_time = current_time + logger.debug(f"File changed during stability check: {path}") + else: + # File no longer exists, remove from tracking + to_remove.append(path) + logger.debug(f"File disappeared during stability check: {path}") + continue + + # File is stable - verify it's a regular file + try: + if path.is_file(): + to_yield.append(path) + logger.info(f"File is stable: {path}") + else: + # Not a regular file (directory, symlink, etc.) + to_remove.append(path) + logger.debug(f"Path is not a regular file: {path}") + except (PermissionError, OSError) as e: + logger.warning(f"Cannot access {path}: {e}") + to_remove.append(path) + + # Remove files that are no longer valid + for path in to_remove: + self._tracked.pop(path, None) + + # Remove and yield stable files + for path in to_yield: + self._tracked.pop(path, None) + yield path + + def has_pending_files(self) -> bool: + """Check if there are files waiting for stability check.""" + return len(self._tracked) > 0 + + def clear(self) -> None: + """Clear all tracked files.""" + self._tracked.clear() + + @property + def pending_count(self) -> int: + """Number of files being tracked.""" + return len(self._tracked) + + +class ConsumerFilter(DefaultFilter): + """ + Custom filter for the document consumer. + + Filters files based on: + - Supported file extensions + - User-configured ignore patterns (regex) + - Default ignore patterns for common system files + """ + + # Default regex patterns to ignore (matched against filename only) + DEFAULT_IGNORE_PATTERNS: Final[frozenset[str]] = frozenset( + { + r"^\.DS_Store$", + r"^\.DS_STORE$", + r"^\._.*", + r"^desktop\.ini$", + r"^Thumbs\.db$", + }, + ) + + # Directories to always ignore (matched by name via DefaultFilter) + DEFAULT_IGNORE_DIRS: Final[tuple[str, ...]] = ( + ".stfolder", + ".stversions", + ".localized", + "@eaDir", + ".Spotlight-V100", + ".Trashes", + "__MACOSX", + ) + + def __init__( + self, + *, + supported_extensions: frozenset[str] | None = None, + ignore_patterns: list[str] | None = None, + consumption_dir: Path | None = None, + ) -> None: + """ + Initialize the consumer filter. + + Args: + supported_extensions: Set of supported file extensions (e.g., {".pdf", ".png"}). + If None, uses get_supported_file_extensions(). + ignore_patterns: Additional regex patterns to ignore (matched against filename). + consumption_dir: Base consumption directory (unused, kept for API compatibility). + """ + # Combine default and user patterns + all_patterns = set(self.DEFAULT_IGNORE_PATTERNS) + if ignore_patterns: + all_patterns.update(ignore_patterns) + + # Compile all patterns + self._ignore_regexes: list[re.Pattern[str]] = [ + re.compile(pattern) for pattern in all_patterns + ] + + # Get supported extensions + if supported_extensions is None: + supported_extensions = frozenset(get_supported_file_extensions()) + self._supported_extensions = supported_extensions + + # Call parent with directory ignore list + # DefaultFilter.ignore_dirs matches directory names, not full paths + super().__init__( + ignore_dirs=self.DEFAULT_IGNORE_DIRS, + ignore_entity_patterns=None, + ignore_paths=None, + ) + + def __call__(self, change: Change, path: str) -> bool: + """ + Filter function for watchfiles. + + Returns True if the path should be watched, False to ignore. + """ + # Let parent filter handle directory ignoring and basic checks + if not super().__call__(change, path): + return False + + path_obj = Path(path) + + # For directories, parent filter already handled ignore_dirs + if path_obj.is_dir(): + return True + + # For files, check extension + if not self._has_supported_extension(path_obj): + return False + + # Check filename against ignore patterns + return not self._matches_ignore_pattern(path_obj.name) + + def _has_supported_extension(self, path: Path) -> bool: + """Check if the file has a supported extension.""" + suffix = path.suffix.lower() + return suffix in self._supported_extensions + + def _matches_ignore_pattern(self, filename: str) -> bool: + """Check if the filename matches any ignore pattern.""" + for regex in self._ignore_regexes: + if regex.search(filename): + logger.debug( + f"Filename {filename} matched ignore pattern {regex.pattern}", + ) + return True + return False + + +def _tags_from_path(filepath: Path, consumption_dir: Path) -> list[int]: + """ + Walk up the directory tree from filepath to consumption_dir and get or create Tag IDs for every directory. - Returns set of Tag models + Returns list of Tag primary keys. """ db.close_old_connections() - tag_ids = set() - path_parts = filepath.relative_to(settings.CONSUMPTION_DIR).parent.parts + tag_ids: set[int] = set() + path_parts = filepath.relative_to(consumption_dir).parent.parts + for part in path_parts: - tag_ids.add( - Tag.objects.get_or_create(name__iexact=part, defaults={"name": part})[0].pk, + tag, _ = Tag.objects.get_or_create( + name__iexact=part, + defaults={"name": part}, ) + tag_ids.add(tag.pk) return list(tag_ids) -def _is_ignored(filepath: Path) -> bool: +def _consume_file( + filepath: Path, + consumption_dir: Path, + *, + subdirs_as_tags: bool, +) -> None: """ - Checks if the given file should be ignored, based on configured - patterns. + Queue a file for consumption. - Returns True if the file is ignored, False otherwise + Args: + filepath: Path to the file to consume. + consumption_dir: Base consumption directory. + subdirs_as_tags: Whether to create tags from subdirectory names. """ - # Trim out the consume directory, leaving only filename and it's - # path relative to the consume directory - filepath_relative = PurePath(filepath).relative_to(settings.CONSUMPTION_DIR) - - # March through the components of the path, including directories and the filename - # looking for anything matching - # foo/bar/baz/file.pdf -> (foo, bar, baz, file.pdf) - parts = [] - for part in filepath_relative.parts: - # If the part is not the name (ie, it's a dir) - # Need to append the trailing slash or fnmatch doesn't match - # fnmatch("dir", "dir/*") == False - # fnmatch("dir/", "dir/*") == True - if part != filepath_relative.name: - part = part + "/" - parts.append(part) - - for pattern in settings.CONSUMER_IGNORE_PATTERNS: - if len(filter(parts, pattern)): - return True - - return False - - -def _consume(filepath: Path) -> None: - # Check permissions early + # Verify file still exists and is accessible try: - filepath.stat() - except (PermissionError, OSError): - logger.warning(f"Not consuming file {filepath}: Permission denied.") + if not filepath.is_file(): + logger.debug(f"Not consuming {filepath}: not a file or doesn't exist") + return + except (PermissionError, OSError) as e: + logger.warning(f"Not consuming {filepath}: {e}") return - if filepath.is_dir() or _is_ignored(filepath): - return - - if not filepath.is_file(): - logger.debug(f"Not consuming file {filepath}: File has moved.") - return - - if not is_file_ext_supported(filepath.suffix): - logger.warning(f"Not consuming file {filepath}: Unknown file extension.") - return - - # Total wait time: up to 500ms - os_error_retry_count: Final[int] = 50 - os_error_retry_wait: Final[float] = 0.01 - - read_try_count = 0 - file_open_ok = False - os_error_str = None - - while (read_try_count < os_error_retry_count) and not file_open_ok: + # Get tags from path if configured + tag_ids: list[int] | None = None + if subdirs_as_tags: try: - with filepath.open("rb"): - file_open_ok = True - except OSError as e: - read_try_count += 1 - os_error_str = str(e) - sleep(os_error_retry_wait) + tag_ids = _tags_from_path(filepath, consumption_dir) + except Exception: + logger.exception(f"Error creating tags from path for {filepath}") - if read_try_count >= os_error_retry_count: - logger.warning(f"Not consuming file {filepath}: OS reports {os_error_str}") - return - - tag_ids = None + # Queue for consumption try: - if settings.CONSUMER_SUBDIRS_AS_TAGS: - tag_ids = _tags_from_path(filepath) - except Exception: - logger.exception("Error creating tags from path") - - try: - logger.info(f"Adding {filepath} to the task queue.") + logger.info(f"Adding {filepath} to the task queue") consume_file.delay( ConsumableDocument( source=DocumentSource.ConsumeFolder, @@ -138,228 +365,206 @@ def _consume(filepath: Path) -> None: DocumentMetadataOverrides(tag_ids=tag_ids), ) except Exception: - # Catch all so that the consumer won't crash. - # This is also what the test case is listening for to check for - # errors. - logger.exception("Error while consuming document") - - -def _consume_wait_unmodified(file: Path) -> None: - """ - Waits for the given file to appear unmodified based on file size - and modification time. Will wait a configured number of seconds - and retry a configured number of times before either consuming or - giving up - """ - if _is_ignored(file): - return - - logger.debug(f"Waiting for file {file} to remain unmodified") - mtime = -1 - size = -1 - current_try = 0 - while current_try < settings.CONSUMER_POLLING_RETRY_COUNT: - try: - stat_data = file.stat() - new_mtime = stat_data.st_mtime - new_size = stat_data.st_size - except FileNotFoundError: - logger.debug( - f"File {file} moved while waiting for it to remain unmodified.", - ) - return - if new_mtime == mtime and new_size == size: - _consume(file) - return - mtime = new_mtime - size = new_size - sleep(settings.CONSUMER_POLLING_DELAY) - current_try += 1 - - logger.error(f"Timeout while waiting on file {file} to remain unmodified.") - - -class Handler(FileSystemEventHandler): - def __init__(self, pool: ThreadPoolExecutor) -> None: - super().__init__() - self._pool = pool - - def on_created(self, event): - self._pool.submit(_consume_wait_unmodified, Path(event.src_path)) - - def on_moved(self, event): - self._pool.submit(_consume_wait_unmodified, Path(event.dest_path)) + logger.exception(f"Error while queuing document {filepath}") class Command(BaseCommand): """ - On every iteration of an infinite loop, consume what we can from the - consumption directory. + Watch a consumption directory and queue new documents for processing. + + Uses watchfiles for efficient file system monitoring. Supports both + native OS notifications (inotify on Linux, FSEvents on macOS) and + polling for network filesystems. """ - # This is here primarily for the tests and is irrelevant in production. - stop_flag = Event() - # Also only for testing, configures in one place the timeout used before checking - # the stop flag - testing_timeout_s: Final[float] = 0.5 - testing_timeout_ms: Final[float] = testing_timeout_s * 1000.0 + help = "Watch the consumption directory for new documents" - def add_arguments(self, parser): + # For testing - allows tests to stop the consumer + stop_flag: Event = Event() + + # Testing timeout in seconds + testing_timeout_s: Final[float] = 0.5 + + def add_arguments(self, parser) -> None: parser.add_argument( "directory", - default=settings.CONSUMPTION_DIR, + default=None, nargs="?", - help="The consumption directory.", + help="The consumption directory (defaults to CONSUMPTION_DIR setting)", + ) + parser.add_argument( + "--oneshot", + action="store_true", + help="Process existing files and exit without watching", ) - parser.add_argument("--oneshot", action="store_true", help="Run only once.") - - # Only use during unit testing, will configure a timeout - # Leaving it unset or false and the consumer will exit when it - # receives SIGINT parser.add_argument( "--testing", action="store_true", - help="Flag used only for unit testing", + help="Enable testing mode with shorter timeouts", default=False, ) - def handle(self, *args, **options): - directory = options["directory"] - recursive = settings.CONSUMER_RECURSIVE - + def handle(self, *args, **options) -> None: + # Resolve consumption directory + directory = options.get("directory") if not directory: - raise CommandError("CONSUMPTION_DIR does not appear to be set.") + directory = getattr(settings, "CONSUMPTION_DIR", None) + if not directory: + raise CommandError("CONSUMPTION_DIR is not configured") directory = Path(directory).resolve() - if not directory.is_dir(): - raise CommandError(f"Consumption directory {directory} does not exist") + if not directory.exists(): + raise CommandError(f"Consumption directory does not exist: {directory}") - # Consumer will need this + if not directory.is_dir(): + raise CommandError(f"Consumption path is not a directory: {directory}") + + # Ensure scratch directory exists settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True) - if recursive: - for dirpath, _, filenames in os.walk(directory): - for filename in filenames: - filepath = Path(dirpath) / filename - _consume(filepath) - else: - for filepath in directory.iterdir(): - _consume(filepath) + # Get settings + recursive: bool = getattr(settings, "CONSUMER_RECURSIVE", False) + subdirs_as_tags: bool = getattr(settings, "CONSUMER_SUBDIRS_AS_TAGS", False) + polling_interval: float = getattr(settings, "CONSUMER_POLLING_INTERVAL", 0) + stability_delay: float = getattr(settings, "CONSUMER_STABILITY_DELAY", 1.0) + ignore_patterns: list[str] = getattr(settings, "CONSUMER_IGNORE_PATTERNS", []) + is_testing: bool = options.get("testing", False) + is_oneshot: bool = options.get("oneshot", False) - if options["oneshot"]: + # Create filter + consumer_filter = ConsumerFilter( + ignore_patterns=ignore_patterns, + consumption_dir=directory, + ) + + # Process existing files + self._process_existing_files( + directory=directory, + recursive=recursive, + subdirs_as_tags=subdirs_as_tags, + consumer_filter=consumer_filter, + ) + + if is_oneshot: + logger.info("Oneshot mode: processed existing files, exiting") return - if settings.CONSUMER_POLLING == 0 and INotify: - self.handle_inotify(directory, recursive, is_testing=options["testing"]) + # Start watching + self._watch_directory( + directory=directory, + recursive=recursive, + subdirs_as_tags=subdirs_as_tags, + consumer_filter=consumer_filter, + polling_interval=polling_interval, + stability_delay=stability_delay, + is_testing=is_testing, + ) + + logger.debug("Consumer exiting") + + def _process_existing_files( + self, + *, + directory: Path, + recursive: bool, + subdirs_as_tags: bool, + consumer_filter: ConsumerFilter, + ) -> None: + """Process any existing files in the consumption directory.""" + logger.info(f"Processing existing files in {directory}") + + glob_pattern = "**/*" if recursive else "*" + + for filepath in directory.glob(glob_pattern): + # Use filter to check if file should be processed + if not filepath.is_file(): + continue + + if not consumer_filter(Change.added, str(filepath)): + continue + + _consume_file( + filepath=filepath, + consumption_dir=directory, + subdirs_as_tags=subdirs_as_tags, + ) + + def _watch_directory( + self, + *, + directory: Path, + recursive: bool, + subdirs_as_tags: bool, + consumer_filter: ConsumerFilter, + polling_interval: float, + stability_delay: float, + is_testing: bool, + ) -> None: + """Watch directory for changes and process stable files.""" + use_polling = polling_interval > 0 + poll_delay_ms = int(polling_interval * 1000) if use_polling else 0 + + if use_polling: + logger.info( + f"Watching {directory} using polling (interval: {polling_interval}s)", + ) else: - if INotify is None and settings.CONSUMER_POLLING == 0: # pragma: no cover - logger.warning("Using polling as INotify import failed") - self.handle_polling(directory, recursive, is_testing=options["testing"]) + logger.info(f"Watching {directory} using native file system events") - logger.debug("Consumer exiting.") + # Create stability tracker + tracker = FileStabilityTracker(stability_delay=stability_delay) - def handle_polling(self, directory, recursive, *, is_testing: bool): - logger.info(f"Polling directory for changes: {directory}") + # Calculate timeouts + stability_timeout_ms = int(stability_delay * 1000) + testing_timeout_ms = int(self.testing_timeout_s * 1000) - timeout = None - if is_testing: - timeout = self.testing_timeout_s - logger.debug(f"Configuring timeout to {timeout}s") + # Start with no timeout (wait indefinitely for first event) + # unless in testing mode + timeout_ms = testing_timeout_ms if is_testing else 0 - polling_interval = settings.CONSUMER_POLLING - if polling_interval == 0: # pragma: no cover - # Only happens if INotify failed to import - logger.warning("Using polling of 10s, consider setting this") - polling_interval = 10 + self.stop_flag.clear() - with ThreadPoolExecutor(max_workers=4) as pool: - observer = PollingObserver(timeout=polling_interval) - observer.schedule(Handler(pool), directory, recursive=recursive) - observer.start() + while not self.stop_flag.is_set(): try: - while observer.is_alive(): - observer.join(timeout) - if self.stop_flag.is_set(): - observer.stop() + for changes in watch( + directory, + watch_filter=consumer_filter, + rust_timeout=timeout_ms, + yield_on_timeout=True, + force_polling=use_polling, + poll_delay_ms=poll_delay_ms, + recursive=recursive, + stop_event=self.stop_flag, + ): + # Process each change + for change_type, path in changes: + path = Path(path).resolve() + logger.debug(f"Event: {change_type.name} for {path}") + tracker.track(path, change_type) + + # Check for stable files + for stable_path in tracker.get_stable_files(): + _consume_file( + filepath=stable_path, + consumption_dir=directory, + subdirs_as_tags=subdirs_as_tags, + ) + + # Exit watch loop to reconfigure timeout + break + + # Determine next timeout + if tracker.has_pending_files(): + # Check pending files at stability interval + timeout_ms = stability_timeout_ms + elif is_testing: + # In testing, use short timeout to check stop flag + timeout_ms = testing_timeout_ms + else: + # No pending files, wait indefinitely + timeout_ms = 0 + except KeyboardInterrupt: - observer.stop() - observer.join() - - def handle_inotify(self, directory, recursive, *, is_testing: bool): - logger.info(f"Using inotify to watch directory for changes: {directory}") - - timeout_ms = None - if is_testing: - timeout_ms = self.testing_timeout_ms - logger.debug(f"Configuring timeout to {timeout_ms}ms") - - inotify = INotify() - inotify_flags = flags.CLOSE_WRITE | flags.MOVED_TO | flags.MODIFY - if recursive: - inotify.add_watch_recursive(directory, inotify_flags) - else: - inotify.add_watch(directory, inotify_flags) - - inotify_debounce_secs: Final[float] = settings.CONSUMER_INOTIFY_DELAY - inotify_debounce_ms: Final[int] = inotify_debounce_secs * 1000 - - finished = False - - notified_files = {} - - try: - while not finished: - try: - for event in inotify.read(timeout=timeout_ms): - path = inotify.get_path(event.wd) if recursive else directory - filepath = Path(path) / event.name - if flags.MODIFY in flags.from_mask(event.mask): - notified_files.pop(filepath, None) - else: - notified_files[filepath] = monotonic() - - # Check the files against the timeout - still_waiting = {} - # last_event_time is time of the last inotify event for this file - for filepath, last_event_time in notified_files.items(): - # Current time - last time over the configured timeout - waited_long_enough = ( - monotonic() - last_event_time - ) > inotify_debounce_secs - - # Also make sure the file exists still, some scanners might write a - # temporary file first - try: - file_still_exists = filepath.exists() and filepath.is_file() - except (PermissionError, OSError): # pragma: no cover - # If we can't check, let it fail in the _consume function - file_still_exists = True - continue - - if waited_long_enough and file_still_exists: - _consume(filepath) - elif file_still_exists: - still_waiting[filepath] = last_event_time - - # These files are still waiting to hit the timeout - notified_files = still_waiting - - # If files are waiting, need to exit read() to check them - # Otherwise, go back to infinite sleep time, but only if not testing - if len(notified_files) > 0: - timeout_ms = inotify_debounce_ms - elif is_testing: - timeout_ms = self.testing_timeout_ms - else: - timeout_ms = None - - if self.stop_flag.is_set(): - logger.debug("Finishing because event is set") - finished = True - - except KeyboardInterrupt: - logger.info("Received SIGINT, stopping inotify") - finished = True - finally: - inotify.close() + logger.info("Received interrupt, stopping consumer") + self.stop_flag.set() diff --git a/src/documents/tests/test_management_consumer.py b/src/documents/tests/test_management_consumer.py index 38b9eadda..91792ed1b 100644 --- a/src/documents/tests/test_management_consumer.py +++ b/src/documents/tests/test_management_consumer.py @@ -1,438 +1,1156 @@ -import filecmp +""" +Tests for the document consumer management command. + +Tests are organized into classes by component: +- TestFileStabilityTracker: Unit tests for FileStabilityTracker +- TestConsumerFilter: Unit tests for ConsumerFilter +- TestConsumeFile: Unit tests for the _consume_file function +- TestTagsFromPath: Unit tests for _tags_from_path +- TestCommandValidation: Tests for command argument validation +- TestCommandOneshot: Tests for oneshot mode +- TestCommandWatch: Integration tests for the watch loop +""" + +from __future__ import annotations + +import re import shutil from pathlib import Path from threading import Thread +from time import monotonic from time import sleep from unittest import mock -from django.conf import settings +import pytest from django.core.management import CommandError -from django.core.management import call_command -from django.test import TransactionTestCase from django.test import override_settings +from watchfiles import Change -from documents.consumer import ConsumerError from documents.data_models import ConsumableDocument -from documents.management.commands import document_consumer +from documents.data_models import DocumentSource +from documents.management.commands.document_consumer import Command +from documents.management.commands.document_consumer import ConsumerFilter +from documents.management.commands.document_consumer import FileStabilityTracker +from documents.management.commands.document_consumer import TrackedFile +from documents.management.commands.document_consumer import _consume_file +from documents.management.commands.document_consumer import _tags_from_path from documents.models import Tag -from documents.tests.utils import DirectoriesMixin -from documents.tests.utils import DocumentConsumeDelayMixin + +# -- Fixtures -- + + +@pytest.fixture +def stability_tracker() -> FileStabilityTracker: + """Create a FileStabilityTracker with a short delay for testing.""" + return FileStabilityTracker(stability_delay=0.1) + + +@pytest.fixture +def temp_file(tmp_path: Path) -> Path: + """Create a temporary file for testing.""" + file_path = tmp_path / "test_file.pdf" + file_path.write_bytes(b"test content") + return file_path + + +@pytest.fixture +def consumption_dir(tmp_path: Path) -> Path: + """Create a temporary consumption directory for testing.""" + consume_dir = tmp_path / "consume" + consume_dir.mkdir() + return consume_dir + + +@pytest.fixture +def scratch_dir(tmp_path: Path) -> Path: + """Create a temporary scratch directory for testing.""" + scratch = tmp_path / "scratch" + scratch.mkdir() + return scratch + + +@pytest.fixture +def sample_pdf(tmp_path: Path) -> Path: + """Create a sample PDF file.""" + # Use a minimal valid-ish PDF header + pdf_content = b"%PDF-1.4\n%test\n1 0 obj\n<<>>\nendobj\ntrailer\n<<>>\n%%EOF" + pdf_path = tmp_path / "sample.pdf" + pdf_path.write_bytes(pdf_content) + return pdf_path + + +@pytest.fixture +def consumer_filter() -> ConsumerFilter: + """Create a ConsumerFilter for testing.""" + return ConsumerFilter( + supported_extensions=frozenset({".pdf", ".png", ".jpg"}), + ignore_patterns=[r"^custom_ignore.*"], + ) + + +@pytest.fixture +def mock_consume_file_delay(): + """Mock the consume_file.delay celery task.""" + with mock.patch( + "documents.management.commands.document_consumer.consume_file", + ) as mock_task: + mock_task.delay = mock.MagicMock() + yield mock_task + + +# -- TrackedFile Tests -- + + +class TestTrackedFile: + """Tests for the TrackedFile dataclass.""" + + def test_update_stats_existing_file(self, temp_file: Path) -> None: + """Test update_stats succeeds for existing file.""" + tracked = TrackedFile(path=temp_file, last_event_time=monotonic()) + assert tracked.update_stats() is True + assert tracked.last_mtime is not None + assert tracked.last_size is not None + assert tracked.last_size == len(b"test content") + + def test_update_stats_nonexistent_file(self, tmp_path: Path) -> None: + """Test update_stats fails for nonexistent file.""" + tracked = TrackedFile( + path=tmp_path / "nonexistent.pdf", + last_event_time=monotonic(), + ) + assert tracked.update_stats() is False + assert tracked.last_mtime is None + assert tracked.last_size is None + + def test_is_unchanged_same_stats(self, temp_file: Path) -> None: + """Test is_unchanged returns True when stats haven't changed.""" + tracked = TrackedFile(path=temp_file, last_event_time=monotonic()) + tracked.update_stats() + assert tracked.is_unchanged() is True + + def test_is_unchanged_modified_file(self, temp_file: Path) -> None: + """Test is_unchanged returns False when file is modified.""" + tracked = TrackedFile(path=temp_file, last_event_time=monotonic()) + tracked.update_stats() + + # Modify the file + temp_file.write_bytes(b"modified content that is longer") + + assert tracked.is_unchanged() is False + + def test_is_unchanged_deleted_file(self, temp_file: Path) -> None: + """Test is_unchanged returns False when file is deleted.""" + tracked = TrackedFile(path=temp_file, last_event_time=monotonic()) + tracked.update_stats() + temp_file.unlink() + assert tracked.is_unchanged() is False + + +# -- FileStabilityTracker Tests -- + + +class TestFileStabilityTracker: + """Tests for the FileStabilityTracker class.""" + + def test_track_new_file( + self, + stability_tracker: FileStabilityTracker, + temp_file: Path, + ) -> None: + """Test tracking a new file adds it to pending.""" + stability_tracker.track(temp_file, Change.added) + assert stability_tracker.pending_count == 1 + assert stability_tracker.has_pending_files() is True + + def test_track_modified_file( + self, + stability_tracker: FileStabilityTracker, + temp_file: Path, + ) -> None: + """Test tracking a modified file updates its event time.""" + stability_tracker.track(temp_file, Change.added) + sleep(0.05) + stability_tracker.track(temp_file, Change.modified) + + # File should still be pending, not yet stable + assert stability_tracker.pending_count == 1 + + def test_track_deleted_file( + self, + stability_tracker: FileStabilityTracker, + temp_file: Path, + ) -> None: + """Test tracking a deleted file removes it from pending.""" + stability_tracker.track(temp_file, Change.added) + assert stability_tracker.pending_count == 1 + + stability_tracker.track(temp_file, Change.deleted) + assert stability_tracker.pending_count == 0 + assert stability_tracker.has_pending_files() is False + + def test_track_nonexistent_file( + self, + stability_tracker: FileStabilityTracker, + tmp_path: Path, + ) -> None: + """Test tracking a nonexistent file doesn't add it.""" + nonexistent = tmp_path / "nonexistent.pdf" + stability_tracker.track(nonexistent, Change.added) + assert stability_tracker.pending_count == 0 + + def test_get_stable_files_before_delay( + self, + stability_tracker: FileStabilityTracker, + temp_file: Path, + ) -> None: + """Test get_stable_files returns nothing before delay expires.""" + stability_tracker.track(temp_file, Change.added) + stable = list(stability_tracker.get_stable_files()) + assert len(stable) == 0 + assert stability_tracker.pending_count == 1 + + def test_get_stable_files_after_delay( + self, + stability_tracker: FileStabilityTracker, + temp_file: Path, + ) -> None: + """Test get_stable_files returns file after delay expires.""" + stability_tracker.track(temp_file, Change.added) + sleep(0.15) # Wait longer than stability_delay (0.1s) + + stable = list(stability_tracker.get_stable_files()) + assert len(stable) == 1 + assert stable[0] == temp_file + assert stability_tracker.pending_count == 0 + + def test_get_stable_files_modified_during_check( + self, + stability_tracker: FileStabilityTracker, + temp_file: Path, + ) -> None: + """Test file is not returned if modified during stability check.""" + stability_tracker.track(temp_file, Change.added) + sleep(0.12) + + # Modify file just before checking + temp_file.write_bytes(b"modified content") + + stable = list(stability_tracker.get_stable_files()) + assert len(stable) == 0 + # File should be re-tracked with new event time + assert stability_tracker.pending_count == 1 + + def test_get_stable_files_deleted_during_check( + self, + temp_file: Path, + ) -> None: + """Test deleted file is not returned during stability check.""" + tracker = FileStabilityTracker(stability_delay=0.1) + tracker.track(temp_file, Change.added) + sleep(0.12) + + # Delete file just before checking + temp_file.unlink() + + stable = list(tracker.get_stable_files()) + assert len(stable) == 0 + assert tracker.pending_count == 0 + + def test_multiple_files_tracking( + self, + stability_tracker: FileStabilityTracker, + tmp_path: Path, + ) -> None: + """Test tracking multiple files independently.""" + file1 = tmp_path / "file1.pdf" + file2 = tmp_path / "file2.pdf" + file1.write_bytes(b"content1") + file2.write_bytes(b"content2") + + stability_tracker.track(file1, Change.added) + sleep(0.05) + stability_tracker.track(file2, Change.added) + + assert stability_tracker.pending_count == 2 + + # Wait for file1 to be stable (but not file2) + sleep(0.06) + stable = list(stability_tracker.get_stable_files()) + assert len(stable) == 1 + assert stable[0] == file1 + + # Now wait for file2 + sleep(0.06) + stable = list(stability_tracker.get_stable_files()) + assert len(stable) == 1 + assert stable[0] == file2 + + def test_clear( + self, + stability_tracker: FileStabilityTracker, + temp_file: Path, + ) -> None: + """Test clear removes all tracked files.""" + stability_tracker.track(temp_file, Change.added) + assert stability_tracker.pending_count == 1 + + stability_tracker.clear() + assert stability_tracker.pending_count == 0 + assert stability_tracker.has_pending_files() is False + + def test_track_resolves_path( + self, + stability_tracker: FileStabilityTracker, + temp_file: Path, + ) -> None: + """Test that tracking resolves paths consistently.""" + # Track with relative-looking path + stability_tracker.track(temp_file, Change.added) + + # Track again with resolved path - should update, not add + stability_tracker.track(temp_file.resolve(), Change.modified) + + assert stability_tracker.pending_count == 1 + + +# -- ConsumerFilter Tests -- + + +class TestConsumerFilter: + """Tests for the ConsumerFilter class.""" + + def test_accepts_supported_extension( + self, + consumer_filter: ConsumerFilter, + tmp_path: Path, + ) -> None: + """Test filter accepts files with supported extensions.""" + test_file = tmp_path / "document.pdf" + test_file.touch() + assert consumer_filter(Change.added, str(test_file)) is True + + def test_rejects_unsupported_extension( + self, + consumer_filter: ConsumerFilter, + tmp_path: Path, + ) -> None: + """Test filter rejects files with unsupported extensions.""" + test_file = tmp_path / "document.xyz" + test_file.touch() + assert consumer_filter(Change.added, str(test_file)) is False + + def test_rejects_no_extension( + self, + consumer_filter: ConsumerFilter, + tmp_path: Path, + ) -> None: + """Test filter rejects files without extensions.""" + test_file = tmp_path / "document" + test_file.touch() + assert consumer_filter(Change.added, str(test_file)) is False + + def test_case_insensitive_extension( + self, + consumer_filter: ConsumerFilter, + tmp_path: Path, + ) -> None: + """Test filter handles extensions case-insensitively.""" + test_file = tmp_path / "document.PDF" + test_file.touch() + assert consumer_filter(Change.added, str(test_file)) is True + + def test_rejects_ds_store( + self, + consumer_filter: ConsumerFilter, + tmp_path: Path, + ) -> None: + """Test filter rejects .DS_Store files.""" + test_file = tmp_path / ".DS_Store" + test_file.touch() + assert consumer_filter(Change.added, str(test_file)) is False + + def test_rejects_macos_resource_fork( + self, + consumer_filter: ConsumerFilter, + tmp_path: Path, + ) -> None: + """Test filter rejects macOS resource fork files (._*).""" + test_file = tmp_path / "._document.pdf" + test_file.touch() + assert consumer_filter(Change.added, str(test_file)) is False + + def test_rejects_syncthing_folder( + self, + consumer_filter: ConsumerFilter, + tmp_path: Path, + ) -> None: + """Test filter rejects .stfolder directory via ignore_dirs.""" + stfolder = tmp_path / ".stfolder" + stfolder.mkdir() + # DefaultFilter ignores directories by name + assert consumer_filter(Change.added, str(stfolder)) is False + + def test_rejects_syncthing_versions( + self, + consumer_filter: ConsumerFilter, + tmp_path: Path, + ) -> None: + """Test filter rejects .stversions directory via ignore_dirs.""" + stversions = tmp_path / ".stversions" + stversions.mkdir() + assert consumer_filter(Change.added, str(stversions)) is False + + def test_rejects_synology_eadir( + self, + consumer_filter: ConsumerFilter, + tmp_path: Path, + ) -> None: + """Test filter rejects Synology @eaDir directory via ignore_dirs.""" + eadir = tmp_path / "@eaDir" + eadir.mkdir() + assert consumer_filter(Change.added, str(eadir)) is False + + def test_rejects_thumbs_db( + self, + consumer_filter: ConsumerFilter, + tmp_path: Path, + ) -> None: + """Test filter rejects Thumbs.db.""" + test_file = tmp_path / "Thumbs.db" + test_file.touch() + assert consumer_filter(Change.added, str(test_file)) is False + + def test_rejects_desktop_ini( + self, + consumer_filter: ConsumerFilter, + tmp_path: Path, + ) -> None: + """Test filter rejects desktop.ini.""" + test_file = tmp_path / "desktop.ini" + test_file.touch() + assert consumer_filter(Change.added, str(test_file)) is False + + def test_custom_ignore_pattern( + self, + consumer_filter: ConsumerFilter, + tmp_path: Path, + ) -> None: + """Test filter respects custom ignore patterns.""" + test_file = tmp_path / "custom_ignore_this.pdf" + test_file.touch() + assert consumer_filter(Change.added, str(test_file)) is False + + def test_accepts_similar_to_ignored( + self, + consumer_filter: ConsumerFilter, + tmp_path: Path, + ) -> None: + """Test filter accepts files similar to but not matching ignore patterns.""" + test_file = tmp_path / "stfolder.pdf" + test_file.touch() + assert consumer_filter(Change.added, str(test_file)) is True + + def test_default_patterns_are_regex(self) -> None: + """Test that default patterns are valid regex.""" + for pattern in ConsumerFilter.DEFAULT_IGNORE_PATTERNS: + # Should not raise + re.compile(pattern) + + +class TestConsumerFilterWithoutExtensions: + """Tests for ConsumerFilter edge cases.""" + + def test_filter_works_with_default_extensions(self, tmp_path: Path) -> None: + """Test filter works when using default extensions.""" + # This would use get_supported_file_extensions() in real usage + filter_obj = ConsumerFilter( + supported_extensions=frozenset({".pdf"}), + ) + test_file = tmp_path / "document.pdf" + test_file.touch() + assert filter_obj(Change.added, str(test_file)) is True + + def test_ignores_patterns_by_filename(self, tmp_path: Path) -> None: + """Test filter ignores patterns matched against filename only.""" + filter_obj = ConsumerFilter( + supported_extensions=frozenset({".pdf"}), + ) + test_file = tmp_path / ".DS_Store" + test_file.touch() + assert filter_obj(Change.added, str(test_file)) is False + + +# -- _consume_file Tests -- + + +class TestConsumeFile: + """Tests for the _consume_file function.""" + + @pytest.mark.django_db + def test_consume_queues_file( + self, + consumption_dir: Path, + sample_pdf: Path, + mock_consume_file_delay, + ) -> None: + """Test _consume_file queues a valid file.""" + target = consumption_dir / "document.pdf" + shutil.copy(sample_pdf, target) + + _consume_file( + filepath=target, + consumption_dir=consumption_dir, + subdirs_as_tags=False, + ) + + mock_consume_file_delay.delay.assert_called_once() + call_args = mock_consume_file_delay.delay.call_args + consumable_doc = call_args[0][0] + assert isinstance(consumable_doc, ConsumableDocument) + assert consumable_doc.original_file == target + assert consumable_doc.source == DocumentSource.ConsumeFolder + + @pytest.mark.django_db + def test_consume_nonexistent_file( + self, + consumption_dir: Path, + mock_consume_file_delay, + ) -> None: + """Test _consume_file handles nonexistent files gracefully.""" + _consume_file( + filepath=consumption_dir / "nonexistent.pdf", + consumption_dir=consumption_dir, + subdirs_as_tags=False, + ) + + mock_consume_file_delay.delay.assert_not_called() + + @pytest.mark.django_db + def test_consume_directory( + self, + consumption_dir: Path, + mock_consume_file_delay, + ) -> None: + """Test _consume_file ignores directories.""" + subdir = consumption_dir / "subdir" + subdir.mkdir() + + _consume_file( + filepath=subdir, + consumption_dir=consumption_dir, + subdirs_as_tags=False, + ) + + mock_consume_file_delay.delay.assert_not_called() + + @pytest.mark.django_db + def test_consume_with_permission_error( + self, + consumption_dir: Path, + sample_pdf: Path, + mock_consume_file_delay, + ) -> None: + """Test _consume_file handles permission errors.""" + target = consumption_dir / "document.pdf" + shutil.copy(sample_pdf, target) + + with mock.patch.object(Path, "is_file", side_effect=PermissionError("denied")): + _consume_file( + filepath=target, + consumption_dir=consumption_dir, + subdirs_as_tags=False, + ) + + mock_consume_file_delay.delay.assert_not_called() + + +# -- _tags_from_path Tests -- + + +class TestTagsFromPath: + """Tests for the _tags_from_path function.""" + + @pytest.mark.django_db + def test_creates_tags_from_subdirectories(self, consumption_dir: Path) -> None: + """Test tags are created for each subdirectory.""" + subdir = consumption_dir / "Invoice" / "2024" + subdir.mkdir(parents=True) + target = subdir / "document.pdf" + target.touch() + + tag_ids = _tags_from_path(target, consumption_dir) + + assert len(tag_ids) == 2 + assert Tag.objects.filter(name="Invoice").exists() + assert Tag.objects.filter(name="2024").exists() + + @pytest.mark.django_db + def test_reuses_existing_tags(self, consumption_dir: Path) -> None: + """Test existing tags are reused (case-insensitive).""" + existing_tag = Tag.objects.create(name="existing") + + subdir = consumption_dir / "EXISTING" + subdir.mkdir(parents=True) + target = subdir / "document.pdf" + target.touch() + + tag_ids = _tags_from_path(target, consumption_dir) + + assert len(tag_ids) == 1 + assert existing_tag.pk in tag_ids + # Should not create a duplicate + assert Tag.objects.filter(name__iexact="existing").count() == 1 + + @pytest.mark.django_db + def test_no_tags_for_root_file(self, consumption_dir: Path) -> None: + """Test no tags created for files directly in consumption dir.""" + target = consumption_dir / "document.pdf" + target.touch() + + tag_ids = _tags_from_path(target, consumption_dir) + + assert len(tag_ids) == 0 + + +# -- Command Validation Tests -- + + +class TestCommandValidation: + """Tests for command argument validation.""" + + def test_raises_for_missing_consumption_dir(self) -> None: + """Test command raises error when directory is not provided and setting is unset.""" + with override_settings(CONSUMPTION_DIR=None): + with pytest.raises(CommandError, match="not configured"): + cmd = Command() + cmd.handle(directory=None, oneshot=True, testing=False) + + def test_raises_for_nonexistent_directory(self, tmp_path: Path) -> None: + """Test command raises error for nonexistent directory.""" + nonexistent = tmp_path / "nonexistent" + + with pytest.raises(CommandError, match="does not exist"): + cmd = Command() + cmd.handle(directory=str(nonexistent), oneshot=True, testing=False) + + def test_raises_for_file_instead_of_directory( + self, + sample_pdf: Path, + ) -> None: + """Test command raises error when path is a file, not directory.""" + with pytest.raises(CommandError, match="not a directory"): + cmd = Command() + cmd.handle(directory=str(sample_pdf), oneshot=True, testing=False) + + +# -- Command Oneshot Tests -- + + +class TestCommandOneshot: + """Tests for oneshot mode.""" + + @pytest.mark.django_db + def test_processes_existing_files( + self, + consumption_dir: Path, + scratch_dir: Path, + sample_pdf: Path, + mock_consume_file_delay, + ) -> None: + """Test oneshot mode processes existing files.""" + target = consumption_dir / "document.pdf" + shutil.copy(sample_pdf, target) + + with ( + override_settings(SCRATCH_DIR=scratch_dir), + mock.patch( + "documents.management.commands.document_consumer.get_supported_file_extensions", + return_value={".pdf"}, + ), + ): + cmd = Command() + cmd.handle(directory=str(consumption_dir), oneshot=True, testing=False) + + mock_consume_file_delay.delay.assert_called_once() + + @pytest.mark.django_db + def test_processes_recursive( + self, + consumption_dir: Path, + scratch_dir: Path, + sample_pdf: Path, + mock_consume_file_delay, + ) -> None: + """Test oneshot mode processes files recursively.""" + subdir = consumption_dir / "subdir" + subdir.mkdir() + target = subdir / "document.pdf" + shutil.copy(sample_pdf, target) + + with ( + override_settings(SCRATCH_DIR=scratch_dir, CONSUMER_RECURSIVE=True), + mock.patch( + "documents.management.commands.document_consumer.get_supported_file_extensions", + return_value={".pdf"}, + ), + ): + cmd = Command() + cmd.handle(directory=str(consumption_dir), oneshot=True, testing=False) + + mock_consume_file_delay.delay.assert_called_once() + + @pytest.mark.django_db + def test_ignores_unsupported_extensions( + self, + consumption_dir: Path, + scratch_dir: Path, + mock_consume_file_delay, + ) -> None: + """Test oneshot mode ignores unsupported file extensions.""" + target = consumption_dir / "document.xyz" + target.write_bytes(b"content") + + with ( + override_settings(SCRATCH_DIR=scratch_dir), + mock.patch( + "documents.management.commands.document_consumer.get_supported_file_extensions", + return_value={".pdf"}, + ), + ): + cmd = Command() + cmd.handle(directory=str(consumption_dir), oneshot=True, testing=False) + + mock_consume_file_delay.delay.assert_not_called() + + +# -- Command Watch Tests -- class ConsumerThread(Thread): - def __init__(self): + """Thread wrapper for running the consumer command.""" + + def __init__( + self, + consumption_dir: Path, + scratch_dir: Path, + *, + recursive: bool = False, + subdirs_as_tags: bool = False, + polling_interval: float = 0, + stability_delay: float = 0.1, + ) -> None: super().__init__() - self.cmd = document_consumer.Command() + self.consumption_dir = consumption_dir + self.scratch_dir = scratch_dir + self.recursive = recursive + self.subdirs_as_tags = subdirs_as_tags + self.polling_interval = polling_interval + self.stability_delay = stability_delay + self.cmd = Command() self.cmd.stop_flag.clear() + self.daemon = True + self.exception: Exception | None = None def run(self) -> None: - self.cmd.handle(directory=settings.CONSUMPTION_DIR, oneshot=False, testing=True) + try: + # Apply settings overrides within the thread + with override_settings( + SCRATCH_DIR=self.scratch_dir, + CONSUMER_RECURSIVE=self.recursive, + CONSUMER_SUBDIRS_AS_TAGS=self.subdirs_as_tags, + CONSUMER_POLLING_INTERVAL=self.polling_interval, + CONSUMER_STABILITY_DELAY=self.stability_delay, + CONSUMER_IGNORE_PATTERNS=[], + ): + self.cmd.handle( + directory=str(self.consumption_dir), + oneshot=False, + testing=True, + ) + except Exception as e: + self.exception = e - def stop(self): - # Consumer checks this every second. + def stop(self) -> None: self.cmd.stop_flag.set() -def chunked(size, source): - for i in range(0, len(source), size): - yield source[i : i + size] +class TestCommandWatch: + """Integration tests for the watch loop.""" - -class ConsumerThreadMixin(DocumentConsumeDelayMixin): - """ - Provides a thread which runs the consumer management command at setUp - and stops it at tearDown - """ - - sample_file: Path = ( - Path(__file__).parent / Path("samples") / Path("simple.pdf") - ).resolve() - - def setUp(self) -> None: - super().setUp() - self.t = None - - def t_start(self): - self.t = ConsumerThread() - self.t.start() - # give the consumer some time to do initial work - sleep(1) - - def tearDown(self) -> None: - if self.t: - # set the stop flag - self.t.stop() - # wait for the consumer to exit. - self.t.join() - self.t = None - - super().tearDown() - - def wait_for_task_mock_call(self, expected_call_count=1): - n = 0 - while n < 50: - if self.consume_file_mock.call_count >= expected_call_count: - # give task_mock some time to finish and raise errors - sleep(1) - return - n += 1 - sleep(0.1) - - # A bogus async_task that will simply check the file for - # completeness and raise an exception otherwise. - def bogus_task( + @pytest.mark.django_db + def test_detects_new_file( self, - input_doc: ConsumableDocument, - overrides=None, - ): - eq = filecmp.cmp(input_doc.original_file, self.sample_file, shallow=False) - if not eq: - print("Consumed an INVALID file.") # noqa: T201 - raise ConsumerError("Incomplete File READ FAILED") - else: - print("Consumed a perfectly valid file.") # noqa: T201 + consumption_dir: Path, + scratch_dir: Path, + sample_pdf: Path, + mock_consume_file_delay, + ) -> None: + """Test watch mode detects and consumes new files.""" + with mock.patch( + "documents.management.commands.document_consumer.get_supported_file_extensions", + return_value={".pdf"}, + ): + thread = ConsumerThread(consumption_dir, scratch_dir) + thread.start() - def slow_write_file(self, target, *, incomplete=False): - with Path(self.sample_file).open("rb") as f: - pdf_bytes = f.read() + # Give thread time to start watching + sleep(0.5) - if incomplete: - pdf_bytes = pdf_bytes[: len(pdf_bytes) - 100] + # Copy file + target = consumption_dir / "document.pdf" + shutil.copy(sample_pdf, target) - with Path(target).open("wb") as f: - # this will take 2 seconds, since the file is about 20k. - print("Start writing file.") # noqa: T201 - for b in chunked(1000, pdf_bytes): - f.write(b) - sleep(0.1) - print("file completed.") # noqa: T201 + # Wait for stability delay + processing + sleep(0.5) + thread.stop() + thread.join(timeout=2.0) -@override_settings( - CONSUMER_INOTIFY_DELAY=0.01, -) -class TestConsumer(DirectoriesMixin, ConsumerThreadMixin, TransactionTestCase): - def test_consume_file(self): - self.t_start() + if thread.exception: + raise thread.exception - f = Path(self.dirs.consumption_dir) / "my_file.pdf" - shutil.copy(self.sample_file, f) + mock_consume_file_delay.delay.assert_called() - self.wait_for_task_mock_call() + @pytest.mark.django_db + def test_detects_moved_file( + self, + consumption_dir: Path, + scratch_dir: Path, + sample_pdf: Path, + mock_consume_file_delay, + ) -> None: + """Test watch mode detects moved/renamed files.""" + # Create temp file outside consumption dir + temp_location = scratch_dir / "temp.pdf" + shutil.copy(sample_pdf, temp_location) - self.consume_file_mock.assert_called_once() + with mock.patch( + "documents.management.commands.document_consumer.get_supported_file_extensions", + return_value={".pdf"}, + ): + thread = ConsumerThread(consumption_dir, scratch_dir) + thread.start() - input_doc, _ = self.get_last_consume_delay_call_args() + sleep(0.5) - self.assertEqual(input_doc.original_file, f) + # Move file into consumption dir + target = consumption_dir / "document.pdf" + shutil.move(temp_location, target) - def test_consume_file_invalid_ext(self): - self.t_start() + sleep(0.5) - f = Path(self.dirs.consumption_dir) / "my_file.wow" - shutil.copy(self.sample_file, f) + thread.stop() + thread.join(timeout=2.0) - self.wait_for_task_mock_call() + if thread.exception: + raise thread.exception - self.consume_file_mock.assert_not_called() + mock_consume_file_delay.delay.assert_called() - def test_consume_existing_file(self): - f = Path(self.dirs.consumption_dir) / "my_file.pdf" - shutil.copy(self.sample_file, f) + @pytest.mark.django_db + def test_handles_slow_write( + self, + consumption_dir: Path, + scratch_dir: Path, + sample_pdf: Path, + mock_consume_file_delay, + ) -> None: + """Test watch mode waits for slow writes to complete.""" + pdf_bytes = sample_pdf.read_bytes() - self.t_start() - self.consume_file_mock.assert_called_once() - - input_doc, _ = self.get_last_consume_delay_call_args() - - self.assertEqual(input_doc.original_file, f) - - @mock.patch("documents.management.commands.document_consumer.logger.error") - def test_slow_write_pdf(self, error_logger): - self.consume_file_mock.side_effect = self.bogus_task - - self.t_start() - - fname = Path(self.dirs.consumption_dir) / "my_file.pdf" - - self.slow_write_file(fname) - - self.wait_for_task_mock_call() - - error_logger.assert_not_called() - - self.consume_file_mock.assert_called_once() - - input_doc, _ = self.get_last_consume_delay_call_args() - - self.assertEqual(input_doc.original_file, fname) - - @mock.patch("documents.management.commands.document_consumer.logger.error") - def test_slow_write_and_move(self, error_logger): - self.consume_file_mock.side_effect = self.bogus_task - - self.t_start() - - fname = Path(self.dirs.consumption_dir) / "my_file.~df" - fname2 = Path(self.dirs.consumption_dir) / "my_file.pdf" - - self.slow_write_file(fname) - shutil.move(fname, fname2) - - self.wait_for_task_mock_call() - - self.consume_file_mock.assert_called_once() - - input_doc, _ = self.get_last_consume_delay_call_args() - - self.assertEqual(input_doc.original_file, fname2) - - error_logger.assert_not_called() - - @mock.patch("documents.management.commands.document_consumer.logger.error") - def test_slow_write_incomplete(self, error_logger): - self.consume_file_mock.side_effect = self.bogus_task - - self.t_start() - - fname = Path(self.dirs.consumption_dir) / "my_file.pdf" - self.slow_write_file(fname, incomplete=True) - - self.wait_for_task_mock_call() - - self.consume_file_mock.assert_called_once() - - input_doc, _ = self.get_last_consume_delay_call_args() - - self.assertEqual(input_doc.original_file, fname) - - # assert that we have an error logged with this invalid file. - error_logger.assert_called_once() - - @mock.patch("documents.management.commands.document_consumer.logger.warning") - def test_permission_error_on_prechecks(self, warning_logger): - filepath = Path(self.dirs.consumption_dir) / "selinux.txt" - filepath.touch() - - original_stat = Path.stat - - def raising_stat(self, *args, **kwargs): - if self == filepath: - raise PermissionError("Permission denied") - return original_stat(self, *args, **kwargs) - - with mock.patch("pathlib.Path.stat", new=raising_stat): - document_consumer._consume(filepath) - - warning_logger.assert_called_once() - (args, _) = warning_logger.call_args - self.assertIn("Permission denied", args[0]) - self.consume_file_mock.assert_not_called() - - @override_settings(CONSUMPTION_DIR="does_not_exist") - def test_consumption_directory_invalid(self): - self.assertRaises(CommandError, call_command, "document_consumer", "--oneshot") - - @override_settings(CONSUMPTION_DIR="") - def test_consumption_directory_unset(self): - self.assertRaises(CommandError, call_command, "document_consumer", "--oneshot") - - def test_mac_write(self): - self.consume_file_mock.side_effect = self.bogus_task - - self.t_start() - - shutil.copy( - self.sample_file, - Path(self.dirs.consumption_dir) / ".DS_STORE", - ) - shutil.copy( - self.sample_file, - Path(self.dirs.consumption_dir) / "my_file.pdf", - ) - shutil.copy( - self.sample_file, - Path(self.dirs.consumption_dir) / "._my_file.pdf", - ) - shutil.copy( - self.sample_file, - Path(self.dirs.consumption_dir) / "my_second_file.pdf", - ) - shutil.copy( - self.sample_file, - Path(self.dirs.consumption_dir) / "._my_second_file.pdf", - ) - - sleep(5) - - self.wait_for_task_mock_call(expected_call_count=2) - - self.assertEqual(2, self.consume_file_mock.call_count) - - consumed_files = [] - for input_doc, _ in self.get_all_consume_delay_call_args(): - consumed_files.append(input_doc.original_file.name) - - self.assertCountEqual(consumed_files, ["my_file.pdf", "my_second_file.pdf"]) - - def test_is_ignored(self): - test_paths = [ - { - "path": str(Path(self.dirs.consumption_dir) / "foo.pdf"), - "ignore": False, - }, - { - "path": str( - Path(self.dirs.consumption_dir) / "foo" / "bar.pdf", - ), - "ignore": False, - }, - { - "path": str(Path(self.dirs.consumption_dir) / ".DS_STORE"), - "ignore": True, - }, - { - "path": str(Path(self.dirs.consumption_dir) / ".DS_Store"), - "ignore": True, - }, - { - "path": str( - Path(self.dirs.consumption_dir) / ".stfolder" / "foo.pdf", - ), - "ignore": True, - }, - { - "path": str(Path(self.dirs.consumption_dir) / ".stfolder.pdf"), - "ignore": False, - }, - { - "path": str( - Path(self.dirs.consumption_dir) / ".stversions" / "foo.pdf", - ), - "ignore": True, - }, - { - "path": str( - Path(self.dirs.consumption_dir) / ".stversions.pdf", - ), - "ignore": False, - }, - { - "path": str(Path(self.dirs.consumption_dir) / "._foo.pdf"), - "ignore": True, - }, - { - "path": str(Path(self.dirs.consumption_dir) / "my_foo.pdf"), - "ignore": False, - }, - { - "path": str( - Path(self.dirs.consumption_dir) / "._foo" / "bar.pdf", - ), - "ignore": True, - }, - { - "path": str( - Path(self.dirs.consumption_dir) - / "@eaDir" - / "SYNO@.fileindexdb" - / "_1jk.fnm", - ), - "ignore": True, - }, - ] - for test_setup in test_paths: - filepath = test_setup["path"] - expected_ignored_result = test_setup["ignore"] - self.assertEqual( - expected_ignored_result, - document_consumer._is_ignored(filepath), - f'_is_ignored("{filepath}") != {expected_ignored_result}', + with mock.patch( + "documents.management.commands.document_consumer.get_supported_file_extensions", + return_value={".pdf"}, + ): + thread = ConsumerThread( + consumption_dir, + scratch_dir, + stability_delay=0.2, ) + thread.start() - @mock.patch("documents.management.commands.document_consumer.Path.open") - def test_consume_file_busy(self, open_mock): - # Calling this mock always raises this - open_mock.side_effect = OSError + sleep(0.5) - self.t_start() + # Simulate slow write + target = consumption_dir / "document.pdf" + with target.open("wb") as f: + for i in range(0, len(pdf_bytes), 100): + f.write(pdf_bytes[i : i + 100]) + f.flush() + sleep(0.05) - f = Path(self.dirs.consumption_dir) / "my_file.pdf" - shutil.copy(self.sample_file, f) + # Wait for stability + sleep(0.5) - self.wait_for_task_mock_call() + thread.stop() + thread.join(timeout=2.0) - self.consume_file_mock.assert_not_called() + if thread.exception: + raise thread.exception + + mock_consume_file_delay.delay.assert_called() + + @pytest.mark.django_db + def test_ignores_macos_files( + self, + consumption_dir: Path, + scratch_dir: Path, + sample_pdf: Path, + mock_consume_file_delay, + ) -> None: + """Test watch mode ignores macOS system files.""" + with mock.patch( + "documents.management.commands.document_consumer.get_supported_file_extensions", + return_value={".pdf"}, + ): + thread = ConsumerThread(consumption_dir, scratch_dir) + thread.start() + + sleep(0.5) + + # Create macOS files + (consumption_dir / ".DS_Store").write_bytes(b"test") + (consumption_dir / "._document.pdf").write_bytes(b"test") + + # Also create a valid file to confirm filtering works + shutil.copy(sample_pdf, consumption_dir / "valid.pdf") + + sleep(0.5) + + thread.stop() + thread.join(timeout=2.0) + + if thread.exception: + raise thread.exception + + # Should only consume the valid file + assert mock_consume_file_delay.delay.call_count == 1 + call_args = mock_consume_file_delay.delay.call_args[0][0] + assert call_args.original_file.name == "valid.pdf" + + @pytest.mark.django_db + def test_stop_flag_stops_consumer( + self, + consumption_dir: Path, + scratch_dir: Path, + mock_consume_file_delay, + ) -> None: + """Test stop flag properly stops the consumer.""" + with mock.patch( + "documents.management.commands.document_consumer.get_supported_file_extensions", + return_value={".pdf"}, + ): + thread = ConsumerThread(consumption_dir, scratch_dir) + thread.start() + + sleep(0.3) + assert thread.is_alive() + + thread.stop() + thread.join(timeout=2.0) + + assert not thread.is_alive() -@override_settings( - CONSUMER_POLLING=1, - # please leave the delay here and down below - # see https://github.com/paperless-ngx/paperless-ngx/pull/66 - CONSUMER_POLLING_DELAY=3, - CONSUMER_POLLING_RETRY_COUNT=20, -) -class TestConsumerPolling(TestConsumer): - # just do all the tests with polling - pass +class TestCommandWatchPolling: + """Tests for polling mode.""" + + @pytest.mark.django_db + def test_polling_mode_works( + self, + consumption_dir: Path, + scratch_dir: Path, + sample_pdf: Path, + mock_consume_file_delay, + ) -> None: + """Test polling mode detects files.""" + with mock.patch( + "documents.management.commands.document_consumer.get_supported_file_extensions", + return_value={".pdf"}, + ): + thread = ConsumerThread( + consumption_dir, + scratch_dir, + polling_interval=0.5, # Enable polling + ) + thread.start() + + sleep(0.5) + + target = consumption_dir / "document.pdf" + shutil.copy(sample_pdf, target) + + # Polling needs more time + sleep(1.5) + + thread.stop() + thread.join(timeout=2.0) + + if thread.exception: + raise thread.exception + + mock_consume_file_delay.delay.assert_called() -@override_settings(CONSUMER_INOTIFY_DELAY=0.01, CONSUMER_RECURSIVE=True) -class TestConsumerRecursive(TestConsumer): - # just do all the tests with recursive - pass +class TestCommandWatchRecursive: + """Tests for recursive watching.""" + + @pytest.mark.django_db + def test_recursive_detects_nested_files( + self, + consumption_dir: Path, + scratch_dir: Path, + sample_pdf: Path, + mock_consume_file_delay, + ) -> None: + """Test recursive mode detects files in subdirectories.""" + subdir = consumption_dir / "level1" / "level2" + subdir.mkdir(parents=True) + + with mock.patch( + "documents.management.commands.document_consumer.get_supported_file_extensions", + return_value={".pdf"}, + ): + thread = ConsumerThread( + consumption_dir, + scratch_dir, + recursive=True, + ) + thread.start() + + sleep(0.5) + + target = subdir / "document.pdf" + shutil.copy(sample_pdf, target) + + sleep(0.5) + + thread.stop() + thread.join(timeout=2.0) + + if thread.exception: + raise thread.exception + + mock_consume_file_delay.delay.assert_called() + + @pytest.mark.django_db + def test_subdirs_as_tags( + self, + consumption_dir: Path, + scratch_dir: Path, + sample_pdf: Path, + mock_consume_file_delay, + ) -> None: + """Test subdirs_as_tags creates tags from directory names.""" + subdir = consumption_dir / "Invoices" / "2024" + subdir.mkdir(parents=True) + + with mock.patch( + "documents.management.commands.document_consumer.get_supported_file_extensions", + return_value={".pdf"}, + ): + thread = ConsumerThread( + consumption_dir, + scratch_dir, + recursive=True, + subdirs_as_tags=True, + ) + thread.start() + + sleep(0.5) + + target = subdir / "document.pdf" + shutil.copy(sample_pdf, target) + + sleep(0.5) + + thread.stop() + thread.join(timeout=2.0) + + if thread.exception: + raise thread.exception + + mock_consume_file_delay.delay.assert_called() + # Check tags were passed + call_args = mock_consume_file_delay.delay.call_args + overrides = call_args[0][1] + assert overrides.tag_ids is not None + assert len(overrides.tag_ids) == 2 -@override_settings( - CONSUMER_RECURSIVE=True, - CONSUMER_POLLING=1, - CONSUMER_POLLING_DELAY=3, - CONSUMER_POLLING_RETRY_COUNT=20, -) -class TestConsumerRecursivePolling(TestConsumer): - # just do all the tests with polling and recursive - pass +class TestCommandWatchEdgeCases: + """Tests for edge cases and error handling.""" + @pytest.mark.django_db + def test_handles_deleted_before_stable( + self, + consumption_dir: Path, + scratch_dir: Path, + sample_pdf: Path, + mock_consume_file_delay, + ) -> None: + """Test handles files deleted before becoming stable.""" + with mock.patch( + "documents.management.commands.document_consumer.get_supported_file_extensions", + return_value={".pdf"}, + ): + thread = ConsumerThread( + consumption_dir, + scratch_dir, + stability_delay=0.3, # Longer delay + ) + thread.start() -class TestConsumerTags(DirectoriesMixin, ConsumerThreadMixin, TransactionTestCase): - @override_settings(CONSUMER_RECURSIVE=True, CONSUMER_SUBDIRS_AS_TAGS=True) - def test_consume_file_with_path_tags(self): - tag_names = ("existingTag", "Space Tag") - # Create a Tag prior to consuming a file using it in path - tag_ids = [ - Tag.objects.create(name="existingtag").pk, - ] + sleep(0.3) - self.t_start() + # Create and quickly delete + target = consumption_dir / "document.pdf" + shutil.copy(sample_pdf, target) + sleep(0.1) # Before stability delay + target.unlink() - path = Path(self.dirs.consumption_dir) / "/".join(tag_names) - path.mkdir(parents=True, exist_ok=True) - f = path / "my_file.pdf" - # Wait at least inotify read_delay for recursive watchers - # to be created for the new directories - sleep(1) - shutil.copy(self.sample_file, f) + sleep(0.5) - self.wait_for_task_mock_call() + thread.stop() + thread.join(timeout=2.0) - self.consume_file_mock.assert_called_once() + if thread.exception: + raise thread.exception - # Add the pk of the Tag created by _consume() - tag_ids.append(Tag.objects.get(name=tag_names[1]).pk) + # Should not have consumed the deleted file + mock_consume_file_delay.delay.assert_not_called() - input_doc, overrides = self.get_last_consume_delay_call_args() + @pytest.mark.django_db + def test_handles_task_exception( + self, + consumption_dir: Path, + scratch_dir: Path, + sample_pdf: Path, + ) -> None: + """Test handles exceptions from consume task gracefully.""" + with ( + mock.patch( + "documents.management.commands.document_consumer.consume_file", + ) as mock_task, + mock.patch( + "documents.management.commands.document_consumer.get_supported_file_extensions", + return_value={".pdf"}, + ), + ): + mock_task.delay.side_effect = Exception("Task error") - self.assertEqual(input_doc.original_file, f) + thread = ConsumerThread(consumption_dir, scratch_dir) + thread.start() - # assertCountEqual has a bad name, but test that the first - # sequence contains the same elements as second, regardless of - # their order. - self.assertCountEqual(overrides.tag_ids, tag_ids) + sleep(0.3) - @override_settings( - CONSUMER_POLLING=1, - CONSUMER_POLLING_DELAY=3, - CONSUMER_POLLING_RETRY_COUNT=20, - ) - def test_consume_file_with_path_tags_polling(self): - self.test_consume_file_with_path_tags() + target = consumption_dir / "document.pdf" + shutil.copy(sample_pdf, target) + + sleep(0.5) + + # Consumer should still be running despite the exception + assert thread.is_alive() + + thread.stop() + thread.join(timeout=2.0) diff --git a/uv.lock b/uv.lock index c621b203d..b84bf567f 100644 --- a/uv.lock +++ b/uv.lock @@ -1458,26 +1458,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] -[[package]] -name = "inotify-simple" -version = "2.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e3/5c/bfe40e15d684bc30b0073aa97c39be410a5fbef3d33cad6f0bf2012571e0/inotify_simple-2.0.1.tar.gz", hash = "sha256:f010bbbd8283bd71a9f4eb2de94765804ede24bd47320b0e6ef4136e541cdc2c", size = 7101, upload-time = "2025-08-25T06:28:20.998Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/86/8be1ac7e90f80b413e81f1e235148e8db771218886a2353392f02da01be3/inotify_simple-2.0.1-py3-none-any.whl", hash = "sha256:e5da495f2064889f8e68b67f9358b0d102e03b783c2d42e5b8e132ab859a5d8a", size = 7449, upload-time = "2025-08-25T06:28:19.919Z" }, -] - -[[package]] -name = "inotifyrecursive" -version = "0.3.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "inotify-simple", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/19/3a/9ed038cb750a3ba8090869cf3ad50f5628077a936d911aee14ca83e40f6a/inotifyrecursive-0.3.5.tar.gz", hash = "sha256:a2c450b317693e4538416f90eb1d7858506dafe6b8b885037bd2dd9ae2dafa1e", size = 4576, upload-time = "2020-11-20T12:38:48.035Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/fc/4e5a141c3f7c7bed550ac1f69e599e92b6be449dd4677ec09f325cad0955/inotifyrecursive-0.3.5-py3-none-any.whl", hash = "sha256:7e5f4a2e1dc2bef0efa3b5f6b339c41fb4599055a2b54909d020e9e932cc8d2f", size = 8009, upload-time = "2020-11-20T12:38:46.981Z" }, -] [[package]] name = "isodate" @@ -2186,7 +2166,6 @@ dependencies = [ { name = "gotenberg-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "httpx-oauth", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "imap-tools", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, - { name = "inotifyrecursive", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "langdetect", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "nltk", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, @@ -2206,7 +2185,7 @@ dependencies = [ { name = "setproctitle", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "tika-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, - { name = "watchdog", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, + { name = "watchfiles", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "whitenoise", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "whoosh-reloaded", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "zxing-cpp", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version != '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version != '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or sys_platform == 'darwin'" }, @@ -2325,7 +2304,6 @@ requires-dist = [ { name = "granian", extras = ["uvloop"], marker = "extra == 'webserver'", specifier = "~=2.5.1" }, { name = "httpx-oauth", specifier = "~=0.16" }, { name = "imap-tools", specifier = "~=1.11.0" }, - { name = "inotifyrecursive", specifier = "~=0.3" }, { name = "jinja2", specifier = "~=3.1.5" }, { name = "langdetect", specifier = "~=1.0.9" }, { name = "mysqlclient", marker = "extra == 'mariadb'", specifier = "~=2.2.7" }, @@ -2351,7 +2329,7 @@ requires-dist = [ { name = "setproctitle", specifier = "~=1.3.4" }, { name = "tika-client", specifier = "~=0.10.0" }, { name = "tqdm", specifier = "~=4.67.1" }, - { name = "watchdog", specifier = "~=6.0" }, + { name = "watchfiles", specifier = ">=1.1.1" }, { name = "whitenoise", specifier = "~=6.9" }, { name = "whoosh-reloaded", specifier = ">=2.7.5" }, { name = "zxing-cpp", marker = "(python_full_version != '3.12.*' and platform_machine == 'aarch64') or (python_full_version != '3.12.*' and platform_machine == 'x86_64') or (platform_machine != 'aarch64' and platform_machine != 'x86_64') or sys_platform != 'linux'", specifier = "~=2.3.0" }, @@ -4327,6 +4305,95 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" }, ] +[[package]] +name = "watchfiles" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/1a/206e8cf2dd86fddf939165a57b4df61607a1e0add2785f170a3f616b7d9f/watchfiles-1.1.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:eef58232d32daf2ac67f42dea51a2c80f0d03379075d44a587051e63cc2e368c", size = 407318, upload-time = "2025-10-14T15:04:18.753Z" }, + { url = "https://files.pythonhosted.org/packages/b3/0f/abaf5262b9c496b5dad4ed3c0e799cbecb1f8ea512ecb6ddd46646a9fca3/watchfiles-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:03fa0f5237118a0c5e496185cafa92878568b652a2e9a9382a5151b1a0380a43", size = 394478, upload-time = "2025-10-14T15:04:20.297Z" }, + { url = "https://files.pythonhosted.org/packages/b1/04/9cc0ba88697b34b755371f5ace8d3a4d9a15719c07bdc7bd13d7d8c6a341/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca65483439f9c791897f7db49202301deb6e15fe9f8fe2fed555bf986d10c31", size = 449894, upload-time = "2025-10-14T15:04:21.527Z" }, + { url = "https://files.pythonhosted.org/packages/d2/9c/eda4615863cd8621e89aed4df680d8c3ec3da6a4cf1da113c17decd87c7f/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f0ab1c1af0cb38e3f598244c17919fb1a84d1629cc08355b0074b6d7f53138ac", size = 459065, upload-time = "2025-10-14T15:04:22.795Z" }, + { url = "https://files.pythonhosted.org/packages/84/13/f28b3f340157d03cbc8197629bc109d1098764abe1e60874622a0be5c112/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bc570d6c01c206c46deb6e935a260be44f186a2f05179f52f7fcd2be086a94d", size = 488377, upload-time = "2025-10-14T15:04:24.138Z" }, + { url = "https://files.pythonhosted.org/packages/86/93/cfa597fa9389e122488f7ffdbd6db505b3b915ca7435ecd7542e855898c2/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e84087b432b6ac94778de547e08611266f1f8ffad28c0ee4c82e028b0fc5966d", size = 595837, upload-time = "2025-10-14T15:04:25.057Z" }, + { url = "https://files.pythonhosted.org/packages/57/1e/68c1ed5652b48d89fc24d6af905d88ee4f82fa8bc491e2666004e307ded1/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:620bae625f4cb18427b1bb1a2d9426dc0dd5a5ba74c7c2cdb9de405f7b129863", size = 473456, upload-time = "2025-10-14T15:04:26.497Z" }, + { url = "https://files.pythonhosted.org/packages/d5/dc/1a680b7458ffa3b14bb64878112aefc8f2e4f73c5af763cbf0bd43100658/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:544364b2b51a9b0c7000a4b4b02f90e9423d97fbbf7e06689236443ebcad81ab", size = 455614, upload-time = "2025-10-14T15:04:27.539Z" }, + { url = "https://files.pythonhosted.org/packages/61/a5/3d782a666512e01eaa6541a72ebac1d3aae191ff4a31274a66b8dd85760c/watchfiles-1.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bbe1ef33d45bc71cf21364df962af171f96ecaeca06bd9e3d0b583efb12aec82", size = 630690, upload-time = "2025-10-14T15:04:28.495Z" }, + { url = "https://files.pythonhosted.org/packages/9b/73/bb5f38590e34687b2a9c47a244aa4dd50c56a825969c92c9c5fc7387cea1/watchfiles-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a0bb430adb19ef49389e1ad368450193a90038b5b752f4ac089ec6942c4dff4", size = 622459, upload-time = "2025-10-14T15:04:29.491Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529, upload-time = "2025-10-14T15:04:32.899Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384, upload-time = "2025-10-14T15:04:33.761Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789, upload-time = "2025-10-14T15:04:34.679Z" }, + { url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521, upload-time = "2025-10-14T15:04:35.963Z" }, + { url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722, upload-time = "2025-10-14T15:04:37.091Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088, upload-time = "2025-10-14T15:04:38.39Z" }, + { url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923, upload-time = "2025-10-14T15:04:39.666Z" }, + { url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080, upload-time = "2025-10-14T15:04:40.643Z" }, + { url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432, upload-time = "2025-10-14T15:04:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046, upload-time = "2025-10-14T15:04:42.718Z" }, + { url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" }, + { url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" }, + { url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" }, + { url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" }, + { url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" }, + { url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" }, + { url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/f750b29225fe77139f7ae5de89d4949f5a99f934c65a1f1c0b248f26f747/watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18", size = 404321, upload-time = "2025-10-14T15:05:02.063Z" }, + { url = "https://files.pythonhosted.org/packages/2b/f9/f07a295cde762644aa4c4bb0f88921d2d141af45e735b965fb2e87858328/watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a", size = 391783, upload-time = "2025-10-14T15:05:03.052Z" }, + { url = "https://files.pythonhosted.org/packages/bc/11/fc2502457e0bea39a5c958d86d2cb69e407a4d00b85735ca724bfa6e0d1a/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219", size = 449279, upload-time = "2025-10-14T15:05:04.004Z" }, + { url = "https://files.pythonhosted.org/packages/e3/1f/d66bc15ea0b728df3ed96a539c777acfcad0eb78555ad9efcaa1274688f0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428", size = 459405, upload-time = "2025-10-14T15:05:04.942Z" }, + { url = "https://files.pythonhosted.org/packages/be/90/9f4a65c0aec3ccf032703e6db02d89a157462fbb2cf20dd415128251cac0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0", size = 488976, upload-time = "2025-10-14T15:05:05.905Z" }, + { url = "https://files.pythonhosted.org/packages/37/57/ee347af605d867f712be7029bb94c8c071732a4b44792e3176fa3c612d39/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150", size = 595506, upload-time = "2025-10-14T15:05:06.906Z" }, + { url = "https://files.pythonhosted.org/packages/a8/78/cc5ab0b86c122047f75e8fc471c67a04dee395daf847d3e59381996c8707/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae", size = 474936, upload-time = "2025-10-14T15:05:07.906Z" }, + { url = "https://files.pythonhosted.org/packages/62/da/def65b170a3815af7bd40a3e7010bf6ab53089ef1b75d05dd5385b87cf08/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d", size = 456147, upload-time = "2025-10-14T15:05:09.138Z" }, + { url = "https://files.pythonhosted.org/packages/57/99/da6573ba71166e82d288d4df0839128004c67d2778d3b566c138695f5c0b/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b", size = 630007, upload-time = "2025-10-14T15:05:10.117Z" }, + { url = "https://files.pythonhosted.org/packages/a8/51/7439c4dd39511368849eb1e53279cd3454b4a4dbace80bab88feeb83c6b5/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374", size = 622280, upload-time = "2025-10-14T15:05:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/79/42/e0a7d749626f1e28c7108a99fb9bf524b501bbbeb9b261ceecde644d5a07/watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da", size = 403389, upload-time = "2025-10-14T15:05:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/15/49/08732f90ce0fbbc13913f9f215c689cfc9ced345fb1bcd8829a50007cc8d/watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051", size = 389964, upload-time = "2025-10-14T15:05:16.85Z" }, + { url = "https://files.pythonhosted.org/packages/27/0d/7c315d4bd5f2538910491a0393c56bf70d333d51bc5b34bee8e68e8cea19/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e", size = 448114, upload-time = "2025-10-14T15:05:17.876Z" }, + { url = "https://files.pythonhosted.org/packages/c3/24/9e096de47a4d11bc4df41e9d1e61776393eac4cb6eb11b3e23315b78b2cc/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70", size = 460264, upload-time = "2025-10-14T15:05:18.962Z" }, + { url = "https://files.pythonhosted.org/packages/cc/0f/e8dea6375f1d3ba5fcb0b3583e2b493e77379834c74fd5a22d66d85d6540/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261", size = 487877, upload-time = "2025-10-14T15:05:20.094Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/df24cfc6424a12deb41503b64d42fbea6b8cb357ec62ca84a5a3476f654a/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620", size = 595176, upload-time = "2025-10-14T15:05:21.134Z" }, + { url = "https://files.pythonhosted.org/packages/8f/b5/853b6757f7347de4e9b37e8cc3289283fb983cba1ab4d2d7144694871d9c/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04", size = 473577, upload-time = "2025-10-14T15:05:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5", size = 404315, upload-time = "2025-10-14T15:05:26.501Z" }, + { url = "https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd", size = 390869, upload-time = "2025-10-14T15:05:27.649Z" }, + { url = "https://files.pythonhosted.org/packages/b2/7e/5643bfff5acb6539b18483128fdc0ef2cccc94a5b8fbda130c823e8ed636/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb", size = 449919, upload-time = "2025-10-14T15:05:28.701Z" }, + { url = "https://files.pythonhosted.org/packages/51/2e/c410993ba5025a9f9357c376f48976ef0e1b1aefb73b97a5ae01a5972755/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5", size = 460845, upload-time = "2025-10-14T15:05:30.064Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a4/2df3b404469122e8680f0fcd06079317e48db58a2da2950fb45020947734/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3", size = 489027, upload-time = "2025-10-14T15:05:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/ea/84/4587ba5b1f267167ee715b7f66e6382cca6938e0a4b870adad93e44747e6/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33", size = 595615, upload-time = "2025-10-14T15:05:32.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/0f/c6988c91d06e93cd0bb3d4a808bcf32375ca1904609835c3031799e3ecae/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510", size = 474836, upload-time = "2025-10-14T15:05:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05", size = 455099, upload-time = "2025-10-14T15:05:34.189Z" }, + { url = "https://files.pythonhosted.org/packages/98/e0/8c9bdba88af756a2fce230dd365fab2baf927ba42cd47521ee7498fd5211/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6", size = 630626, upload-time = "2025-10-14T15:05:35.216Z" }, + { url = "https://files.pythonhosted.org/packages/2a/84/a95db05354bf2d19e438520d92a8ca475e578c647f78f53197f5a2f17aaf/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81", size = 622519, upload-time = "2025-10-14T15:05:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/47/a8/e3af2184707c29f0f14b1963c0aace6529f9d1b8582d5b99f31bbf42f59e/watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21", size = 403820, upload-time = "2025-10-14T15:05:40.932Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/e47e307c2f4bd75f9f9e8afbe3876679b18e1bcec449beca132a1c5ffb2d/watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5", size = 390510, upload-time = "2025-10-14T15:05:41.945Z" }, + { url = "https://files.pythonhosted.org/packages/d5/a0/ad235642118090f66e7b2f18fd5c42082418404a79205cdfca50b6309c13/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7", size = 448408, upload-time = "2025-10-14T15:05:43.385Z" }, + { url = "https://files.pythonhosted.org/packages/df/85/97fa10fd5ff3332ae17e7e40e20784e419e28521549780869f1413742e9d/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101", size = 458968, upload-time = "2025-10-14T15:05:44.404Z" }, + { url = "https://files.pythonhosted.org/packages/47/c2/9059c2e8966ea5ce678166617a7f75ecba6164375f3b288e50a40dc6d489/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44", size = 488096, upload-time = "2025-10-14T15:05:45.398Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/d90a9ec8ac309bc26db808a13e7bfc0e4e78b6fc051078a554e132e80160/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c", size = 596040, upload-time = "2025-10-14T15:05:46.502Z" }, + { url = "https://files.pythonhosted.org/packages/95/68/4e3479b20ca305cfc561db3ed207a8a1c745ee32bf24f2026a129d0ddb6e/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc", size = 473847, upload-time = "2025-10-14T15:05:47.484Z" }, + { url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" }, + { url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" }, + { url = "https://files.pythonhosted.org/packages/ba/4c/a888c91e2e326872fa4705095d64acd8aa2fb9c1f7b9bd0588f33850516c/watchfiles-1.1.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:17ef139237dfced9da49fb7f2232c86ca9421f666d78c264c7ffca6601d154c3", size = 409611, upload-time = "2025-10-14T15:06:05.809Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c7/5420d1943c8e3ce1a21c0a9330bcf7edafb6aa65d26b21dbb3267c9e8112/watchfiles-1.1.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:672b8adf25b1a0d35c96b5888b7b18699d27d4194bac8beeae75be4b7a3fc9b2", size = 396889, upload-time = "2025-10-14T15:06:07.035Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e5/0072cef3804ce8d3aaddbfe7788aadff6b3d3f98a286fdbee9fd74ca59a7/watchfiles-1.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77a13aea58bc2b90173bc69f2a90de8e282648939a00a602e1dc4ee23e26b66d", size = 451616, upload-time = "2025-10-14T15:06:08.072Z" }, + { url = "https://files.pythonhosted.org/packages/83/4e/b87b71cbdfad81ad7e83358b3e447fedd281b880a03d64a760fe0a11fc2e/watchfiles-1.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b495de0bb386df6a12b18335a0285dda90260f51bdb505503c02bcd1ce27a8b", size = 458413, upload-time = "2025-10-14T15:06:09.209Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250, upload-time = "2025-10-14T15:06:10.264Z" }, + { url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117, upload-time = "2025-10-14T15:06:11.28Z" }, + { url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493, upload-time = "2025-10-14T15:06:12.321Z" }, + { url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546, upload-time = "2025-10-14T15:06:13.372Z" }, +] + [[package]] name = "wcwidth" version = "0.2.14"