mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-07-30 18:27:45 -05:00
Runs the pre-commit hooks over all the Python files
This commit is contained in:
@@ -1,21 +1,30 @@
|
||||
import logging
|
||||
import math
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
|
||||
import math
|
||||
from dateutil.parser import isoparse
|
||||
from django.conf import settings
|
||||
from whoosh import highlight, classify, query
|
||||
from whoosh.fields import Schema, TEXT, NUMERIC, KEYWORD, DATETIME, BOOLEAN
|
||||
from documents.models import Document
|
||||
from whoosh import classify
|
||||
from whoosh import highlight
|
||||
from whoosh import query
|
||||
from whoosh.fields import BOOLEAN
|
||||
from whoosh.fields import DATETIME
|
||||
from whoosh.fields import KEYWORD
|
||||
from whoosh.fields import NUMERIC
|
||||
from whoosh.fields import Schema
|
||||
from whoosh.fields import TEXT
|
||||
from whoosh.highlight import HtmlFormatter
|
||||
from whoosh.index import create_in, exists_in, open_dir
|
||||
from whoosh.index import create_in
|
||||
from whoosh.index import exists_in
|
||||
from whoosh.index import open_dir
|
||||
from whoosh.qparser import MultifieldParser
|
||||
from whoosh.qparser.dateparse import DateParserPlugin
|
||||
from whoosh.searching import ResultsPage, Searcher
|
||||
from whoosh.searching import ResultsPage
|
||||
from whoosh.searching import Searcher
|
||||
from whoosh.writing import AsyncWriter
|
||||
|
||||
from documents.models import Document
|
||||
|
||||
logger = logging.getLogger("paperless.index")
|
||||
|
||||
|
||||
@@ -45,7 +54,7 @@ def open_index(recreate=False):
|
||||
if exists_in(settings.INDEX_DIR) and not recreate:
|
||||
return open_dir(settings.INDEX_DIR, schema=get_schema())
|
||||
except Exception:
|
||||
logger.exception(f"Error while opening the index, recreating.")
|
||||
logger.exception("Error while opening the index, recreating.")
|
||||
|
||||
if not os.path.isdir(settings.INDEX_DIR):
|
||||
os.makedirs(settings.INDEX_DIR, exist_ok=True)
|
||||
@@ -138,11 +147,11 @@ class DelayedQuery:
|
||||
criterias.append(query.Term("has_type", v == "false"))
|
||||
elif k == "created__date__lt":
|
||||
criterias.append(
|
||||
query.DateRange("created", start=None, end=isoparse(v))
|
||||
query.DateRange("created", start=None, end=isoparse(v)),
|
||||
)
|
||||
elif k == "created__date__gt":
|
||||
criterias.append(
|
||||
query.DateRange("created", start=isoparse(v), end=None)
|
||||
query.DateRange("created", start=isoparse(v), end=None),
|
||||
)
|
||||
elif k == "added__date__gt":
|
||||
criterias.append(query.DateRange("added", start=isoparse(v), end=None))
|
||||
@@ -220,7 +229,7 @@ class DelayedQuery:
|
||||
hit[1],
|
||||
),
|
||||
page.results.top_n,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
self.saved_results[item.start] = page
|
||||
@@ -240,7 +249,7 @@ class DelayedFullTextQuery(DelayedQuery):
|
||||
|
||||
corrected = self.searcher.correct_query(q, q_str)
|
||||
if corrected.query != q:
|
||||
corrected_query = corrected.string
|
||||
corrected.query = corrected.string
|
||||
|
||||
return q, None
|
||||
|
||||
@@ -252,10 +261,14 @@ class DelayedMoreLikeThisQuery(DelayedQuery):
|
||||
|
||||
docnum = self.searcher.document_number(id=more_like_doc_id)
|
||||
kts = self.searcher.key_terms_from_text(
|
||||
"content", content, numterms=20, model=classify.Bo1Model, normalize=False
|
||||
"content",
|
||||
content,
|
||||
numterms=20,
|
||||
model=classify.Bo1Model,
|
||||
normalize=False,
|
||||
)
|
||||
q = query.Or(
|
||||
[query.Term("content", word, boost=weight) for word, weight in kts]
|
||||
[query.Term("content", word, boost=weight) for word, weight in kts],
|
||||
)
|
||||
mask = {docnum}
|
||||
|
||||
@@ -266,7 +279,9 @@ def autocomplete(ix, term, limit=10):
|
||||
with ix.reader() as reader:
|
||||
terms = []
|
||||
for (score, t) in reader.most_distinctive_terms(
|
||||
"content", number=limit, prefix=term.lower()
|
||||
"content",
|
||||
number=limit,
|
||||
prefix=term.lower(),
|
||||
):
|
||||
terms.append(t)
|
||||
return terms
|
||||
|
Reference in New Issue
Block a user