From ddd2428d9c2865e62c16316caef531b7164415de Mon Sep 17 00:00:00 2001 From: shamoon <4887959+shamoon@users.noreply.github.com> Date: Mon, 21 Apr 2025 13:04:01 -0700 Subject: [PATCH] Invalidate llm suggestion cache on doc save --- src/documents/caching.py | 15 ++++++++++++++- src/documents/signals/handlers.py | 10 ++++++++++ 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/src/documents/caching.py b/src/documents/caching.py index bde21fd92..48ff9ebe7 100644 --- a/src/documents/caching.py +++ b/src/documents/caching.py @@ -122,7 +122,7 @@ def get_llm_suggestion_cache( doc_key = get_suggestion_cache_key(document_id) data: SuggestionCacheData = cache.get(doc_key) - if data and data.classifier_version == 1000 and data.classifier_hash == backend: + if data and data.classifier_hash == backend: return data return None @@ -152,6 +152,19 @@ def set_llm_suggestions_cache( ) +def invalidate_llm_suggestions_cache( + document_id: int, +) -> None: + """ + Invalidate the LLM suggestions cache for a specific document and backend. + """ + doc_key = get_suggestion_cache_key(document_id) + data: SuggestionCacheData = cache.get(doc_key) + + if data: + cache.delete(doc_key) + + def get_metadata_cache_key(document_id: int) -> str: """ Returns the basic key for a document's metadata diff --git a/src/documents/signals/handlers.py b/src/documents/signals/handlers.py index 673ecba52..43ba1f099 100644 --- a/src/documents/signals/handlers.py +++ b/src/documents/signals/handlers.py @@ -25,6 +25,7 @@ from guardian.shortcuts import remove_perm from documents import matching from documents.caching import clear_document_caches +from documents.caching import invalidate_llm_suggestions_cache from documents.file_handling import create_source_path_directory from documents.file_handling import delete_empty_directories from documents.file_handling import generate_unique_filename @@ -524,6 +525,15 @@ def update_filename_and_move_files( ) +@receiver(models.signals.post_save, sender=Document) +def update_llm_suggestions_cache(sender, instance, **kwargs): + """ + Invalidate the LLM suggestions cache when a document is saved. + """ + # Invalidate the cache for the document + invalidate_llm_suggestions_cache(instance.pk) + + # should be disabled in /src/documents/management/commands/document_importer.py handle @receiver(models.signals.post_save, sender=CustomField) def check_paths_and_prune_custom_fields(sender, instance: CustomField, **kwargs):