mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-12-31 13:58:04 -06:00
Compare commits
2 Commits
feature-ac
...
de12023311
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
de12023311 | ||
|
|
60ebdc0ad6 |
@@ -17,8 +17,6 @@ from guardian.shortcuts import get_users_with_perms
|
||||
from whoosh import classify
|
||||
from whoosh import highlight
|
||||
from whoosh import query
|
||||
from whoosh.analysis import CharsetFilter
|
||||
from whoosh.analysis import StemmingAnalyzer
|
||||
from whoosh.fields import BOOLEAN
|
||||
from whoosh.fields import DATETIME
|
||||
from whoosh.fields import KEYWORD
|
||||
@@ -38,7 +36,6 @@ from whoosh.qparser.dateparse import DateParserPlugin
|
||||
from whoosh.qparser.dateparse import English
|
||||
from whoosh.qparser.plugins import FieldsPlugin
|
||||
from whoosh.scoring import TF_IDF
|
||||
from whoosh.support.charset import accent_map
|
||||
from whoosh.util.times import timespan
|
||||
from whoosh.writing import AsyncWriter
|
||||
|
||||
@@ -57,13 +54,10 @@ logger = logging.getLogger("paperless.index")
|
||||
|
||||
|
||||
def get_schema() -> Schema:
|
||||
# add accent-folding filter to a stemming analyzer:
|
||||
af_analyzer = StemmingAnalyzer() | CharsetFilter(accent_map)
|
||||
|
||||
return Schema(
|
||||
id=NUMERIC(stored=True, unique=True),
|
||||
title=TEXT(sortable=True, analyzer=af_analyzer),
|
||||
content=TEXT(analyzer=af_analyzer),
|
||||
title=TEXT(sortable=True),
|
||||
content=TEXT(),
|
||||
asn=NUMERIC(sortable=True, signed=False),
|
||||
correspondent=TEXT(sortable=True),
|
||||
correspondent_id=NUMERIC(),
|
||||
|
||||
@@ -1021,7 +1021,7 @@ def run_workflows(
|
||||
|
||||
if action.remove_all_custom_fields:
|
||||
if not use_overrides:
|
||||
CustomFieldInstance.objects.filter(document=document).delete()
|
||||
CustomFieldInstance.objects.filter(document=document).hard_delete()
|
||||
else:
|
||||
overrides.custom_fields = None
|
||||
elif action.remove_custom_fields.exists():
|
||||
@@ -1029,7 +1029,7 @@ def run_workflows(
|
||||
CustomFieldInstance.objects.filter(
|
||||
field__in=action.remove_custom_fields.all(),
|
||||
document=document,
|
||||
).delete()
|
||||
).hard_delete()
|
||||
elif overrides.custom_fields:
|
||||
for field in action.remove_custom_fields.filter(
|
||||
pk__in=overrides.custom_fields.keys(),
|
||||
|
||||
@@ -557,7 +557,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
|
||||
|
||||
response = self.client.get("/api/search/autocomplete/?term=app")
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data, [b"appl", b"applebaum", b"appletini"])
|
||||
self.assertEqual(response.data, [b"apples", b"applebaum", b"appletini"])
|
||||
|
||||
d3.owner = u2
|
||||
|
||||
@@ -566,7 +566,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
|
||||
|
||||
response = self.client.get("/api/search/autocomplete/?term=app")
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data, [b"appl", b"applebaum"])
|
||||
self.assertEqual(response.data, [b"apples", b"applebaum"])
|
||||
|
||||
assign_perm("view_document", u1, d3)
|
||||
|
||||
@@ -575,7 +575,7 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
|
||||
|
||||
response = self.client.get("/api/search/autocomplete/?term=app")
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data, [b"appl", b"applebaum", b"appletini"])
|
||||
self.assertEqual(response.data, [b"apples", b"applebaum", b"appletini"])
|
||||
|
||||
def test_search_autocomplete_field_name_match(self):
|
||||
"""
|
||||
|
||||
@@ -363,7 +363,7 @@ urlpatterns = [
|
||||
|
||||
|
||||
websocket_urlpatterns = [
|
||||
path(settings.BASE_URL.lstrip("/") + "ws/status/", StatusConsumer.as_asgi()),
|
||||
path("ws/status/", StatusConsumer.as_asgi()),
|
||||
]
|
||||
|
||||
# Text in each page's <h1> (and above login form).
|
||||
|
||||
Reference in New Issue
Block a user