mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-07-28 18:24:38 -05:00
Upgrades black to v23, upgrades ruff
This commit is contained in:
@@ -13,28 +13,24 @@ from .models import Tag
|
||||
|
||||
|
||||
class CorrespondentAdmin(GuardedModelAdmin):
|
||||
|
||||
list_display = ("name", "match", "matching_algorithm")
|
||||
list_filter = ("matching_algorithm",)
|
||||
list_editable = ("match", "matching_algorithm")
|
||||
|
||||
|
||||
class TagAdmin(GuardedModelAdmin):
|
||||
|
||||
list_display = ("name", "color", "match", "matching_algorithm")
|
||||
list_filter = ("color", "matching_algorithm")
|
||||
list_editable = ("color", "match", "matching_algorithm")
|
||||
|
||||
|
||||
class DocumentTypeAdmin(GuardedModelAdmin):
|
||||
|
||||
list_display = ("name", "match", "matching_algorithm")
|
||||
list_filter = ("matching_algorithm",)
|
||||
list_editable = ("match", "matching_algorithm")
|
||||
|
||||
|
||||
class DocumentAdmin(GuardedModelAdmin):
|
||||
|
||||
search_fields = ("correspondent__name", "title", "content", "tags__name")
|
||||
readonly_fields = (
|
||||
"added",
|
||||
@@ -99,7 +95,6 @@ class RuleInline(admin.TabularInline):
|
||||
|
||||
|
||||
class SavedViewAdmin(GuardedModelAdmin):
|
||||
|
||||
list_display = ("name", "owner")
|
||||
|
||||
inlines = [RuleInline]
|
||||
@@ -116,7 +111,6 @@ class StoragePathAdmin(GuardedModelAdmin):
|
||||
|
||||
|
||||
class TaskAdmin(admin.ModelAdmin):
|
||||
|
||||
list_display = ("task_id", "task_file_name", "task_name", "date_done", "status")
|
||||
list_filter = ("status", "date_done", "task_file_name", "task_name")
|
||||
search_fields = ("task_name", "task_id", "status")
|
||||
@@ -133,7 +127,6 @@ class TaskAdmin(admin.ModelAdmin):
|
||||
|
||||
|
||||
class NotesAdmin(GuardedModelAdmin):
|
||||
|
||||
list_display = ("user", "created", "note", "document")
|
||||
list_filter = ("created", "user")
|
||||
list_display_links = ("created",)
|
||||
|
@@ -3,7 +3,6 @@ from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
class DocumentsConfig(AppConfig):
|
||||
|
||||
name = "documents"
|
||||
|
||||
verbose_name = _("Documents")
|
||||
|
@@ -55,7 +55,6 @@ def set_document_type(doc_ids, document_type):
|
||||
|
||||
|
||||
def add_tag(doc_ids, tag):
|
||||
|
||||
qs = Document.objects.filter(Q(id__in=doc_ids) & ~Q(tags__id=tag))
|
||||
affected_docs = [doc.id for doc in qs]
|
||||
|
||||
@@ -71,7 +70,6 @@ def add_tag(doc_ids, tag):
|
||||
|
||||
|
||||
def remove_tag(doc_ids, tag):
|
||||
|
||||
qs = Document.objects.filter(Q(id__in=doc_ids) & Q(tags__id=tag))
|
||||
affected_docs = [doc.id for doc in qs]
|
||||
|
||||
@@ -123,7 +121,6 @@ def delete(doc_ids):
|
||||
|
||||
|
||||
def redo_ocr(doc_ids):
|
||||
|
||||
for document_id in doc_ids:
|
||||
update_document_archive_file.delay(
|
||||
document_id=document_id,
|
||||
@@ -133,7 +130,6 @@ def redo_ocr(doc_ids):
|
||||
|
||||
|
||||
def set_permissions(doc_ids, set_permissions, owner=None):
|
||||
|
||||
qs = Document.objects.filter(id__in=doc_ids)
|
||||
|
||||
qs.update(owner=owner)
|
||||
|
@@ -23,7 +23,6 @@ def changed_password_check(app_configs, **kwargs):
|
||||
return [] # No documents table yet
|
||||
|
||||
if encrypted_doc:
|
||||
|
||||
if not settings.PASSPHRASE:
|
||||
return [
|
||||
Error(
|
||||
@@ -53,7 +52,6 @@ def changed_password_check(app_configs, **kwargs):
|
||||
|
||||
@register()
|
||||
def parser_check(app_configs, **kwargs):
|
||||
|
||||
parsers = []
|
||||
for response in document_consumer_declaration.send(None):
|
||||
parsers.append(response[1])
|
||||
|
@@ -60,7 +60,6 @@ def load_classifier() -> Optional["DocumentClassifier"]:
|
||||
|
||||
|
||||
class DocumentClassifier:
|
||||
|
||||
# v7 - Updated scikit-learn package version
|
||||
# v8 - Added storage path classifier
|
||||
# v9 - Changed from hashing to time/ids for re-train check
|
||||
@@ -141,7 +140,6 @@ class DocumentClassifier:
|
||||
target_file_temp.rename(target_file)
|
||||
|
||||
def train(self):
|
||||
|
||||
# Get non-inbox documents
|
||||
docs_queryset = Document.objects.exclude(
|
||||
tags__is_inbox_tag=True,
|
||||
@@ -160,7 +158,6 @@ class DocumentClassifier:
|
||||
logger.debug("Gathering data from database...")
|
||||
hasher = sha256()
|
||||
for doc in docs_queryset:
|
||||
|
||||
y = -1
|
||||
dt = doc.document_type
|
||||
if dt and dt.matching_algorithm == MatchingModel.MATCH_AUTO:
|
||||
@@ -335,7 +332,6 @@ class DocumentClassifier:
|
||||
|
||||
# If the NLTK language is supported, do further processing
|
||||
if settings.NLTK_LANGUAGE is not None and settings.NLTK_ENABLED:
|
||||
|
||||
import nltk
|
||||
from nltk.corpus import stopwords
|
||||
from nltk.stem import SnowballStemmer
|
||||
|
@@ -60,7 +60,6 @@ MESSAGE_FINISHED = "finished"
|
||||
|
||||
|
||||
class Consumer(LoggingMixin):
|
||||
|
||||
logging_name = "paperless.consumer"
|
||||
|
||||
def _send_progress(
|
||||
@@ -426,7 +425,6 @@ class Consumer(LoggingMixin):
|
||||
# in the system. This will be a transaction and reasonably fast.
|
||||
try:
|
||||
with transaction.atomic():
|
||||
|
||||
# store the document.
|
||||
document = self._store(text=text, date=date, mime_type=mime_type)
|
||||
|
||||
@@ -520,7 +518,6 @@ class Consumer(LoggingMixin):
|
||||
date: Optional[datetime.datetime],
|
||||
mime_type: str,
|
||||
) -> Document:
|
||||
|
||||
# If someone gave us the original filename, use it instead of doc.
|
||||
|
||||
file_info = FileInfo.from_filename(self.filename)
|
||||
|
@@ -82,7 +82,6 @@ class TitleContentFilter(Filter):
|
||||
|
||||
|
||||
class DocumentFilterSet(FilterSet):
|
||||
|
||||
is_tagged = BooleanFilter(
|
||||
label="Is tagged",
|
||||
field_name="tags",
|
||||
|
@@ -331,7 +331,7 @@ class DelayedMoreLikeThisQuery(DelayedQuery):
|
||||
def autocomplete(ix, term, limit=10):
|
||||
with ix.reader() as reader:
|
||||
terms = []
|
||||
for (score, t) in reader.most_distinctive_terms(
|
||||
for score, t in reader.most_distinctive_terms(
|
||||
"content",
|
||||
number=limit,
|
||||
prefix=term.lower(),
|
||||
|
@@ -3,7 +3,6 @@ import uuid
|
||||
|
||||
|
||||
class LoggingMixin:
|
||||
|
||||
logging_group = None
|
||||
|
||||
logging_name = None
|
||||
|
@@ -9,14 +9,12 @@ from paperless.db import GnuPG
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
help = (
|
||||
"This is how you migrate your stored documents from an encrypted "
|
||||
"state to an unencrypted one (or vice-versa)"
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
|
||||
parser.add_argument(
|
||||
"--passphrase",
|
||||
help="If PAPERLESS_PASSPHRASE isn't set already, you need to "
|
||||
@@ -24,7 +22,6 @@ class Command(BaseCommand):
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
||||
try:
|
||||
print(
|
||||
"\n\nWARNING: This script is going to work directly on your "
|
||||
@@ -49,13 +46,11 @@ class Command(BaseCommand):
|
||||
|
||||
@staticmethod
|
||||
def __gpg_to_unencrypted(passphrase):
|
||||
|
||||
encrypted_files = Document.objects.filter(
|
||||
storage_type=Document.STORAGE_TYPE_GPG,
|
||||
)
|
||||
|
||||
for document in encrypted_files:
|
||||
|
||||
print(f"Decrypting {document}".encode())
|
||||
|
||||
old_paths = [document.source_path, document.thumbnail_path]
|
||||
|
@@ -14,7 +14,6 @@ logger = logging.getLogger("paperless.management.archiver")
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
help = """
|
||||
Using the current classification model, assigns correspondents, tags
|
||||
and document types to all documents, effectively allowing you to
|
||||
@@ -51,7 +50,6 @@ class Command(BaseCommand):
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
||||
os.makedirs(settings.SCRATCH_DIR, exist_ok=True)
|
||||
|
||||
overwrite = options["overwrite"]
|
||||
@@ -74,7 +72,6 @@ class Command(BaseCommand):
|
||||
db.connections.close_all()
|
||||
|
||||
try:
|
||||
|
||||
logging.getLogger().handlers[0].level = logging.ERROR
|
||||
with multiprocessing.Pool(processes=settings.TASK_WORKERS) as pool:
|
||||
list(
|
||||
|
@@ -4,7 +4,6 @@ from documents.tasks import train_classifier
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
help = """
|
||||
Trains the classifier on your data and saves the resulting models to a
|
||||
file. The document consumer will then automatically use this new model.
|
||||
|
@@ -40,7 +40,6 @@ from paperless_mail.models import MailRule
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
help = """
|
||||
Decrypt and rename all files in our collection into a given target
|
||||
directory. And include a manifest file containing document data for
|
||||
@@ -144,7 +143,6 @@ class Command(BaseCommand):
|
||||
self.no_thumbnail = False
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
||||
self.target = Path(options["target"]).resolve()
|
||||
self.split_manifest = options["split_manifest"]
|
||||
self.compare_checksums = options["compare_checksums"]
|
||||
|
@@ -36,7 +36,6 @@ def disable_signal(sig, receiver, sender):
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
help = """
|
||||
Using a manifest.json file, load the data from there, and import the
|
||||
documents it refers to.
|
||||
@@ -61,7 +60,6 @@ class Command(BaseCommand):
|
||||
self.version = None
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
||||
logging.getLogger().handlers[0].level = logging.ERROR
|
||||
|
||||
self.source = Path(options["source"]).resolve()
|
||||
@@ -163,7 +161,6 @@ class Command(BaseCommand):
|
||||
"""
|
||||
self.stdout.write("Checking the manifest")
|
||||
for record in self.manifest:
|
||||
|
||||
if record["model"] != "documents.document":
|
||||
continue
|
||||
|
||||
@@ -205,7 +202,6 @@ class Command(BaseCommand):
|
||||
) from e
|
||||
|
||||
def _import_files_from_manifest(self, progress_bar_disable):
|
||||
|
||||
os.makedirs(settings.ORIGINALS_DIR, exist_ok=True)
|
||||
os.makedirs(settings.THUMBNAIL_DIR, exist_ok=True)
|
||||
os.makedirs(settings.ARCHIVE_DIR, exist_ok=True)
|
||||
@@ -217,7 +213,6 @@ class Command(BaseCommand):
|
||||
)
|
||||
|
||||
for record in tqdm.tqdm(manifest_documents, disable=progress_bar_disable):
|
||||
|
||||
document = Document.objects.get(pk=record["pk"])
|
||||
|
||||
doc_file = record[EXPORTER_FILE_NAME]
|
||||
|
@@ -6,7 +6,6 @@ from documents.tasks import index_reindex
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
help = "Manages the document index."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
|
@@ -8,7 +8,6 @@ from documents.models import Document
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
help = """
|
||||
This will rename all documents to match the latest filename format.
|
||||
""".replace(
|
||||
@@ -25,7 +24,6 @@ class Command(BaseCommand):
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
||||
logging.getLogger().handlers[0].level = logging.ERROR
|
||||
|
||||
for document in tqdm.tqdm(
|
||||
|
@@ -14,7 +14,6 @@ logger = logging.getLogger("paperless.management.retagger")
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
help = """
|
||||
Using the current classification model, assigns correspondents, tags
|
||||
and document types to all documents, effectively allowing you to
|
||||
@@ -78,7 +77,6 @@ class Command(BaseCommand):
|
||||
classifier = load_classifier()
|
||||
|
||||
for document in tqdm.tqdm(documents, disable=options["no_progress_bar"]):
|
||||
|
||||
if options["correspondent"]:
|
||||
set_correspondent(
|
||||
sender=None,
|
||||
|
@@ -4,7 +4,6 @@ from documents.sanity_checker import check_sanity
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
help = """
|
||||
This command checks your document archive for issues.
|
||||
""".replace(
|
||||
@@ -21,7 +20,6 @@ class Command(BaseCommand):
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
||||
messages = check_sanity(progress=not options["no_progress_bar"])
|
||||
|
||||
messages.log_messages()
|
||||
|
@@ -21,7 +21,6 @@ def _process_document(doc_in):
|
||||
return
|
||||
|
||||
try:
|
||||
|
||||
thumb = parser.get_thumbnail(
|
||||
document.source_path,
|
||||
document.mime_type,
|
||||
@@ -34,7 +33,6 @@ def _process_document(doc_in):
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
help = """
|
||||
This will regenerate the thumbnails for all documents.
|
||||
""".replace(
|
||||
|
@@ -8,7 +8,6 @@ logger = logging.getLogger("paperless.management.superuser")
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
help = """
|
||||
Creates a Django superuser:
|
||||
User named: admin
|
||||
@@ -24,7 +23,6 @@ class Command(BaseCommand):
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
||||
username = os.getenv("PAPERLESS_ADMIN_USER", "admin")
|
||||
mail = os.getenv("PAPERLESS_ADMIN_MAIL", "root@localhost")
|
||||
password = os.getenv("PAPERLESS_ADMIN_PASSWORD")
|
||||
|
@@ -6,7 +6,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = []
|
||||
|
@@ -6,7 +6,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0001_initial"),
|
||||
]
|
||||
|
@@ -9,7 +9,6 @@ DOCUMENT_SENDER_MAP = {}
|
||||
|
||||
|
||||
def move_sender_strings_to_sender_model(apps, schema_editor):
|
||||
|
||||
sender_model = apps.get_model("documents", "Sender")
|
||||
document_model = apps.get_model("documents", "Document")
|
||||
|
||||
|
@@ -6,7 +6,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0003_sender"),
|
||||
]
|
||||
|
@@ -4,7 +4,6 @@ from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0004_auto_20160114_1844"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0005_auto_20160123_0313"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0006_auto_20160123_0430"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0007_auto_20160126_2114"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0008_document_file_type"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0009_auto_20160214_0040"),
|
||||
]
|
||||
|
@@ -34,7 +34,6 @@ class GnuPG:
|
||||
|
||||
|
||||
def move_documents_and_create_thumbnails(apps, schema_editor):
|
||||
|
||||
os.makedirs(
|
||||
os.path.join(settings.MEDIA_ROOT, "documents", "originals"),
|
||||
exist_ok=True,
|
||||
@@ -67,7 +66,6 @@ def move_documents_and_create_thumbnails(apps, schema_editor):
|
||||
pass
|
||||
|
||||
for f in sorted(documents):
|
||||
|
||||
if not f.endswith("gpg"):
|
||||
continue
|
||||
|
||||
|
@@ -6,7 +6,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0012_auto_20160305_0040"),
|
||||
]
|
||||
|
@@ -75,7 +75,6 @@ class Document:
|
||||
|
||||
|
||||
def set_checksums(apps, schema_editor):
|
||||
|
||||
document_model = apps.get_model("documents", "Document")
|
||||
|
||||
if not document_model.objects.all().exists():
|
||||
@@ -95,7 +94,6 @@ def set_checksums(apps, schema_editor):
|
||||
|
||||
sums = {}
|
||||
for d in document_model.objects.all():
|
||||
|
||||
document = Document(d)
|
||||
|
||||
print(
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0014_document_checksum"),
|
||||
]
|
||||
|
@@ -6,7 +6,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0015_add_insensitive_to_match"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0016_auto_20170325_1558"),
|
||||
]
|
||||
|
@@ -6,7 +6,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0017_auto_20170512_0507"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0020_document_added"),
|
||||
]
|
||||
|
@@ -20,7 +20,6 @@ def re_slug_all_the_things(apps, schema_editor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0021_document_storage_type"),
|
||||
]
|
||||
|
@@ -19,7 +19,6 @@ def set_filename(apps, schema_editor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0022_auto_20181007_1420"),
|
||||
]
|
||||
|
@@ -15,7 +15,6 @@ def logs_set_default_group(apps, schema_editor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "0023_document_current_filename"),
|
||||
]
|
||||
|
@@ -4,7 +4,6 @@ from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1000_update_paperless_all"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1001_auto_20201109_1636"),
|
||||
]
|
||||
|
@@ -30,7 +30,6 @@ def add_mime_types(apps, schema_editor):
|
||||
for d in documents:
|
||||
f = open(source_path(d), "rb")
|
||||
if d.storage_type == STORAGE_TYPE_GPG:
|
||||
|
||||
data = GnuPG.decrypted(f)
|
||||
else:
|
||||
data = f.read(1024)
|
||||
@@ -51,7 +50,6 @@ def add_file_extensions(apps, schema_editor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1002_auto_20201111_1105"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db.migrations import RunPython
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1003_mime_types"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1004_sanity_check_schedule"),
|
||||
]
|
||||
|
@@ -4,7 +4,6 @@ from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1005_checksums"),
|
||||
]
|
||||
|
@@ -7,7 +7,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("documents", "1006_auto_20201208_2209"),
|
||||
|
@@ -5,7 +5,6 @@ from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1007_savedview_savedviewfilterrule"),
|
||||
]
|
||||
|
@@ -4,7 +4,6 @@ from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1008_auto_20201216_1736"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1009_auto_20201216_2005"),
|
||||
]
|
||||
|
@@ -8,7 +8,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("documents", "1010_auto_20210101_2159"),
|
||||
|
@@ -254,7 +254,6 @@ def move_old_to_new_locations(apps, schema_editor):
|
||||
)
|
||||
|
||||
for doc in Document.objects.filter(archive_checksum__isnull=False):
|
||||
|
||||
if doc.id in affected_document_ids:
|
||||
old_path = archive_path_old(doc)
|
||||
# remove affected archive versions
|
||||
@@ -304,7 +303,6 @@ def move_new_to_old_locations(apps, schema_editor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1011_auto_20210101_2340"),
|
||||
]
|
||||
|
@@ -47,7 +47,6 @@ def reverse(apps, schema_editor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1012_fix_archive_files"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1013_migrate_tag_colour"),
|
||||
]
|
||||
|
@@ -18,7 +18,6 @@ def remove_null_characters(apps, schema_editor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1014_auto_20210228_1614"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1015_remove_null_characters"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1016_auto_20210317_1351"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1017_alter_savedviewfilterrule_rule_type"),
|
||||
]
|
||||
|
@@ -6,7 +6,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1018_alter_savedviewfilterrule_value"),
|
||||
]
|
||||
|
@@ -7,7 +7,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("documents", "1018_alter_savedviewfilterrule_value"),
|
||||
|
@@ -4,7 +4,6 @@ from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1019_storagepath_document_storage_path"),
|
||||
("documents", "1019_uisettings"),
|
||||
|
@@ -17,7 +17,6 @@ logger = logging.getLogger("paperless.migrations")
|
||||
def _do_convert(work_package):
|
||||
existing_thumbnail, converted_thumbnail = work_package
|
||||
try:
|
||||
|
||||
logger.info(f"Converting thumbnail: {existing_thumbnail}")
|
||||
|
||||
# Run actual conversion
|
||||
@@ -51,7 +50,6 @@ def _convert_thumbnails_to_webp(apps, schema_editor):
|
||||
start = time.time()
|
||||
|
||||
with tempfile.TemporaryDirectory() as tempdir:
|
||||
|
||||
work_packages = []
|
||||
|
||||
for file in Path(settings.THUMBNAIL_DIR).glob("*.png"):
|
||||
@@ -73,7 +71,6 @@ def _convert_thumbnails_to_webp(apps, schema_editor):
|
||||
)
|
||||
|
||||
if len(work_packages):
|
||||
|
||||
logger.info(
|
||||
"\n\n"
|
||||
" This is a one-time only migration to convert thumbnails for all of your\n"
|
||||
@@ -95,7 +92,6 @@ def _convert_thumbnails_to_webp(apps, schema_editor):
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1020_merge_20220518_1839"),
|
||||
]
|
||||
|
@@ -6,7 +6,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1021_webp_thumbnail_conversion"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1023_add_comments"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1024_document_original_filename"),
|
||||
]
|
||||
|
@@ -6,7 +6,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("django_celery_results", "0011_taskresult_periodic_task_name"),
|
||||
("documents", "1025_alter_savedviewfilterrule_rule_type"),
|
||||
|
@@ -6,7 +6,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1026_transition_to_celery"),
|
||||
]
|
||||
|
@@ -4,7 +4,6 @@ from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1027_remove_paperlesstask_attempted_task_and_more"),
|
||||
]
|
||||
|
@@ -6,7 +6,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1028_remove_paperlesstask_task_args_and_more"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1029_alter_document_archive_serial_number"),
|
||||
]
|
||||
|
@@ -7,7 +7,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("documents", "1030_alter_paperlesstask_task_file_name"),
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1031_remove_savedview_user_correspondent_owner_and_more"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1032_alter_correspondent_matching_algorithm_and_more"),
|
||||
]
|
||||
|
@@ -5,7 +5,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("documents", "1033_alter_documenttype_options_alter_tag_options_and_more"),
|
||||
]
|
||||
|
@@ -7,7 +7,6 @@ from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("documents", "1034_alter_savedviewfilterrule_rule_type"),
|
||||
|
@@ -38,7 +38,6 @@ class ModelWithOwner(models.Model):
|
||||
|
||||
|
||||
class MatchingModel(ModelWithOwner):
|
||||
|
||||
MATCH_NONE = 0
|
||||
MATCH_ANY = 1
|
||||
MATCH_ALL = 2
|
||||
@@ -95,7 +94,6 @@ class Correspondent(MatchingModel):
|
||||
|
||||
|
||||
class Tag(MatchingModel):
|
||||
|
||||
color = models.CharField(_("color"), max_length=7, default="#a6cee3")
|
||||
|
||||
is_inbox_tag = models.BooleanField(
|
||||
@@ -130,7 +128,6 @@ class StoragePath(MatchingModel):
|
||||
|
||||
|
||||
class Document(ModelWithOwner):
|
||||
|
||||
STORAGE_TYPE_UNENCRYPTED = "unencrypted"
|
||||
STORAGE_TYPE_GPG = "gpg"
|
||||
STORAGE_TYPES = (
|
||||
@@ -280,7 +277,6 @@ class Document(ModelWithOwner):
|
||||
verbose_name_plural = _("documents")
|
||||
|
||||
def __str__(self) -> str:
|
||||
|
||||
# Convert UTC database time to local time
|
||||
created = datetime.date.isoformat(timezone.localdate(self.created))
|
||||
|
||||
@@ -365,7 +361,6 @@ class Document(ModelWithOwner):
|
||||
|
||||
|
||||
class Log(models.Model):
|
||||
|
||||
LEVELS = (
|
||||
(logging.DEBUG, _("debug")),
|
||||
(logging.INFO, _("information")),
|
||||
@@ -397,7 +392,6 @@ class Log(models.Model):
|
||||
|
||||
class SavedView(ModelWithOwner):
|
||||
class Meta:
|
||||
|
||||
ordering = ("name",)
|
||||
verbose_name = _("saved view")
|
||||
verbose_name_plural = _("saved views")
|
||||
@@ -481,7 +475,6 @@ class SavedViewFilterRule(models.Model):
|
||||
# the filename, if possible, as a higher priority than either document filename or
|
||||
# content parsing
|
||||
class FileInfo:
|
||||
|
||||
REGEXES = OrderedDict(
|
||||
[
|
||||
(
|
||||
@@ -503,7 +496,6 @@ class FileInfo:
|
||||
tags=(),
|
||||
extension=None,
|
||||
):
|
||||
|
||||
self.created = created
|
||||
self.title = title
|
||||
self.extension = extension
|
||||
@@ -530,7 +522,7 @@ class FileInfo:
|
||||
def from_filename(cls, filename) -> "FileInfo":
|
||||
# Mutate filename in-place before parsing its components
|
||||
# by applying at most one of the configured transformations.
|
||||
for (pattern, repl) in settings.FILENAME_PARSE_TRANSFORMS:
|
||||
for pattern, repl in settings.FILENAME_PARSE_TRANSFORMS:
|
||||
(filename, count) = pattern.subn(repl, filename)
|
||||
if count:
|
||||
break
|
||||
@@ -564,7 +556,6 @@ class FileInfo:
|
||||
|
||||
# Extending User Model Using a One-To-One Link
|
||||
class UiSettings(models.Model):
|
||||
|
||||
user = models.OneToOneField(
|
||||
User,
|
||||
on_delete=models.CASCADE,
|
||||
|
@@ -140,7 +140,6 @@ def run_convert(
|
||||
extra=None,
|
||||
logging_group=None,
|
||||
) -> None:
|
||||
|
||||
environment = os.environ.copy()
|
||||
if settings.CONVERT_MEMORY_LIMIT:
|
||||
environment["MAGICK_MEMORY_LIMIT"] = settings.CONVERT_MEMORY_LIMIT
|
||||
|
@@ -2,6 +2,7 @@ from django.contrib.auth.models import Group
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from guardian.core import ObjectPermissionChecker
|
||||
from guardian.models import GroupObjectPermission
|
||||
from guardian.shortcuts import assign_perm
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
@@ -9,7 +10,6 @@ from guardian.shortcuts import get_users_with_perms
|
||||
from guardian.shortcuts import remove_perm
|
||||
from rest_framework.permissions import BasePermission
|
||||
from rest_framework.permissions import DjangoObjectPermissions
|
||||
from guardian.core import ObjectPermissionChecker
|
||||
|
||||
|
||||
class PaperlessObjectPermissions(DjangoObjectPermissions):
|
||||
|
@@ -33,7 +33,6 @@ class SanityCheckMessages:
|
||||
if len(self._messages) == 0:
|
||||
logger.info("Sanity checker detected no issues.")
|
||||
else:
|
||||
|
||||
# Query once
|
||||
all_docs = Document.objects.all()
|
||||
|
||||
|
@@ -58,7 +58,6 @@ class DynamicFieldsModelSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class MatchingModelSerializer(serializers.ModelSerializer):
|
||||
|
||||
document_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
def get_slug(self, obj):
|
||||
@@ -221,7 +220,6 @@ class OwnedObjectSerializer(serializers.ModelSerializer, SetPermissionsMixin):
|
||||
|
||||
|
||||
class CorrespondentSerializer(MatchingModelSerializer, OwnedObjectSerializer):
|
||||
|
||||
last_correspondence = serializers.DateTimeField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
@@ -259,7 +257,6 @@ class DocumentTypeSerializer(MatchingModelSerializer, OwnedObjectSerializer):
|
||||
|
||||
|
||||
class ColorField(serializers.Field):
|
||||
|
||||
COLOURS = (
|
||||
(1, "#a6cee3"),
|
||||
(2, "#1f78b4"),
|
||||
@@ -290,7 +287,6 @@ class ColorField(serializers.Field):
|
||||
|
||||
|
||||
class TagSerializerVersion1(MatchingModelSerializer, OwnedObjectSerializer):
|
||||
|
||||
colour = ColorField(source="color", default="#a6cee3")
|
||||
|
||||
class Meta:
|
||||
@@ -373,7 +369,6 @@ class StoragePathField(serializers.PrimaryKeyRelatedField):
|
||||
|
||||
|
||||
class DocumentSerializer(OwnedObjectSerializer, DynamicFieldsModelSerializer):
|
||||
|
||||
correspondent = CorrespondentField(allow_null=True)
|
||||
tags = TagsField(many=True)
|
||||
document_type = DocumentTypeField(allow_null=True)
|
||||
@@ -454,7 +449,6 @@ class SavedViewFilterRuleSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class SavedViewSerializer(OwnedObjectSerializer):
|
||||
|
||||
filter_rules = SavedViewFilterRuleSerializer(many=True)
|
||||
|
||||
class Meta:
|
||||
@@ -500,7 +494,6 @@ class SavedViewSerializer(OwnedObjectSerializer):
|
||||
|
||||
|
||||
class DocumentListSerializer(serializers.Serializer):
|
||||
|
||||
documents = serializers.ListField(
|
||||
required=True,
|
||||
label="Documents",
|
||||
@@ -525,7 +518,6 @@ class DocumentListSerializer(serializers.Serializer):
|
||||
|
||||
|
||||
class BulkEditSerializer(DocumentListSerializer, SetPermissionsMixin):
|
||||
|
||||
method = serializers.ChoiceField(
|
||||
choices=[
|
||||
"set_correspondent",
|
||||
@@ -651,7 +643,6 @@ class BulkEditSerializer(DocumentListSerializer, SetPermissionsMixin):
|
||||
self._validate_owner(parameters["owner"])
|
||||
|
||||
def validate(self, attrs):
|
||||
|
||||
method = attrs["method"]
|
||||
parameters = attrs["parameters"]
|
||||
|
||||
@@ -672,7 +663,6 @@ class BulkEditSerializer(DocumentListSerializer, SetPermissionsMixin):
|
||||
|
||||
|
||||
class PostDocumentSerializer(serializers.Serializer):
|
||||
|
||||
created = serializers.DateTimeField(
|
||||
label="Created",
|
||||
allow_null=True,
|
||||
@@ -754,7 +744,6 @@ class PostDocumentSerializer(serializers.Serializer):
|
||||
|
||||
|
||||
class BulkDownloadSerializer(DocumentListSerializer):
|
||||
|
||||
content = serializers.ChoiceField(
|
||||
choices=["archive", "originals", "both"],
|
||||
default="archive",
|
||||
@@ -905,7 +894,6 @@ class TasksViewSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class AcknowledgeTasksViewSerializer(serializers.Serializer):
|
||||
|
||||
tasks = serializers.ListField(
|
||||
required=True,
|
||||
label="Tasks",
|
||||
|
@@ -175,7 +175,6 @@ def set_tags(
|
||||
color=False,
|
||||
**kwargs,
|
||||
):
|
||||
|
||||
if replace:
|
||||
Document.tags.through.objects.filter(document=document).exclude(
|
||||
Q(tag__is_inbox_tag=True),
|
||||
@@ -376,7 +375,6 @@ def validate_move(instance, old_path, new_path):
|
||||
@receiver(models.signals.m2m_changed, sender=Document.tags.through)
|
||||
@receiver(models.signals.post_save, sender=Document)
|
||||
def update_filename_and_move_files(sender, instance: Document, **kwargs):
|
||||
|
||||
if not instance.filename:
|
||||
# Can't update the filename if there is no filename to begin with
|
||||
# This happens when the consumer creates a new document.
|
||||
@@ -390,7 +388,6 @@ def update_filename_and_move_files(sender, instance: Document, **kwargs):
|
||||
|
||||
with FileLock(settings.MEDIA_LOCK):
|
||||
try:
|
||||
|
||||
# If this was waiting for the lock, the filename or archive_filename
|
||||
# of this document may have been updated. This happens if multiple updates
|
||||
# get queued from the UI for the same document
|
||||
@@ -407,7 +404,6 @@ def update_filename_and_move_files(sender, instance: Document, **kwargs):
|
||||
old_archive_path = instance.archive_path
|
||||
|
||||
if instance.has_archive_version:
|
||||
|
||||
instance.archive_filename = generate_unique_filename(
|
||||
instance,
|
||||
archive_filename=True,
|
||||
@@ -487,7 +483,6 @@ def update_filename_and_move_files(sender, instance: Document, **kwargs):
|
||||
|
||||
|
||||
def set_log_entry(sender, document=None, logging_group=None, **kwargs):
|
||||
|
||||
ct = ContentType.objects.get(model="document")
|
||||
user = User.objects.get(username="consumer")
|
||||
|
||||
|
@@ -65,7 +65,6 @@ def train_classifier():
|
||||
and not Correspondent.objects.filter(matching_algorithm=Tag.MATCH_AUTO).exists()
|
||||
and not StoragePath.objects.filter(matching_algorithm=Tag.MATCH_AUTO).exists()
|
||||
):
|
||||
|
||||
return
|
||||
|
||||
classifier = load_classifier()
|
||||
@@ -91,7 +90,6 @@ def consume_file(
|
||||
input_doc: ConsumableDocument,
|
||||
overrides: Optional[DocumentMetadataOverrides] = None,
|
||||
):
|
||||
|
||||
# Default no overrides
|
||||
if overrides is None:
|
||||
overrides = DocumentMetadataOverrides()
|
||||
@@ -117,7 +115,6 @@ def consume_file(
|
||||
)
|
||||
|
||||
if document_list:
|
||||
|
||||
# If the file is an upload, it's in the scratch directory
|
||||
# Move it to consume directory to be picked up
|
||||
# Otherwise, use the current parent to keep possible tags
|
||||
|
@@ -55,7 +55,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.client.force_authenticate(user=self.user)
|
||||
|
||||
def testDocuments(self):
|
||||
|
||||
response = self.client.get("/api/documents/").data
|
||||
|
||||
self.assertEqual(response["count"], 0)
|
||||
@@ -171,7 +170,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertEqual(len(results[0]), 0)
|
||||
|
||||
def test_document_actions(self):
|
||||
|
||||
_, filename = tempfile.mkstemp(dir=self.dirs.originals_dir)
|
||||
|
||||
content = b"This is a test"
|
||||
@@ -270,7 +268,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
|
||||
@override_settings(FILENAME_FORMAT="")
|
||||
def test_download_with_archive(self):
|
||||
|
||||
content = b"This is a test"
|
||||
content_archive = b"This is the same test but archived"
|
||||
|
||||
@@ -312,7 +309,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertEqual(response.content, content)
|
||||
|
||||
def test_document_actions_not_existing_file(self):
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="none",
|
||||
filename=os.path.basename("asd"),
|
||||
@@ -329,7 +325,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def test_document_filters(self):
|
||||
|
||||
doc1 = Document.objects.create(
|
||||
title="none1",
|
||||
checksum="A",
|
||||
@@ -427,7 +422,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertEqual(len(results), 0)
|
||||
|
||||
def test_documents_title_content_filter(self):
|
||||
|
||||
doc1 = Document.objects.create(
|
||||
title="title A",
|
||||
content="content A",
|
||||
@@ -1101,7 +1095,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
)
|
||||
|
||||
def test_statistics(self):
|
||||
|
||||
doc1 = Document.objects.create(
|
||||
title="none1",
|
||||
checksum="A",
|
||||
@@ -1149,7 +1142,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertEqual(response.data["inbox_tag"], None)
|
||||
|
||||
def test_upload(self):
|
||||
|
||||
self.consume_file_mock.return_value = celery.result.AsyncResult(
|
||||
id=str(uuid.uuid4()),
|
||||
)
|
||||
@@ -1177,7 +1169,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertIsNone(overrides.tag_ids)
|
||||
|
||||
def test_upload_empty_metadata(self):
|
||||
|
||||
self.consume_file_mock.return_value = celery.result.AsyncResult(
|
||||
id=str(uuid.uuid4()),
|
||||
)
|
||||
@@ -1205,7 +1196,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertIsNone(overrides.tag_ids)
|
||||
|
||||
def test_upload_invalid_form(self):
|
||||
|
||||
self.consume_file_mock.return_value = celery.result.AsyncResult(
|
||||
id=str(uuid.uuid4()),
|
||||
)
|
||||
@@ -1222,7 +1212,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.consume_file_mock.assert_not_called()
|
||||
|
||||
def test_upload_invalid_file(self):
|
||||
|
||||
self.consume_file_mock.return_value = celery.result.AsyncResult(
|
||||
id=str(uuid.uuid4()),
|
||||
)
|
||||
@@ -1239,7 +1228,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.consume_file_mock.assert_not_called()
|
||||
|
||||
def test_upload_with_title(self):
|
||||
|
||||
self.consume_file_mock.return_value = celery.result.AsyncResult(
|
||||
id=str(uuid.uuid4()),
|
||||
)
|
||||
@@ -1264,7 +1252,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertIsNone(overrides.tag_ids)
|
||||
|
||||
def test_upload_with_correspondent(self):
|
||||
|
||||
self.consume_file_mock.return_value = celery.result.AsyncResult(
|
||||
id=str(uuid.uuid4()),
|
||||
)
|
||||
@@ -1290,7 +1277,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertIsNone(overrides.tag_ids)
|
||||
|
||||
def test_upload_with_invalid_correspondent(self):
|
||||
|
||||
self.consume_file_mock.return_value = celery.result.AsyncResult(
|
||||
id=str(uuid.uuid4()),
|
||||
)
|
||||
@@ -1308,7 +1294,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.consume_file_mock.assert_not_called()
|
||||
|
||||
def test_upload_with_document_type(self):
|
||||
|
||||
self.consume_file_mock.return_value = celery.result.AsyncResult(
|
||||
id=str(uuid.uuid4()),
|
||||
)
|
||||
@@ -1334,7 +1319,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertIsNone(overrides.tag_ids)
|
||||
|
||||
def test_upload_with_invalid_document_type(self):
|
||||
|
||||
self.consume_file_mock.return_value = celery.result.AsyncResult(
|
||||
id=str(uuid.uuid4()),
|
||||
)
|
||||
@@ -1352,7 +1336,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.consume_file_mock.assert_not_called()
|
||||
|
||||
def test_upload_with_tags(self):
|
||||
|
||||
self.consume_file_mock.return_value = celery.result.AsyncResult(
|
||||
id=str(uuid.uuid4()),
|
||||
)
|
||||
@@ -1379,7 +1362,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertIsNone(overrides.title)
|
||||
|
||||
def test_upload_with_invalid_tags(self):
|
||||
|
||||
self.consume_file_mock.return_value = celery.result.AsyncResult(
|
||||
id=str(uuid.uuid4()),
|
||||
)
|
||||
@@ -1399,7 +1381,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.consume_file_mock.assert_not_called()
|
||||
|
||||
def test_upload_with_created(self):
|
||||
|
||||
self.consume_file_mock.return_value = celery.result.AsyncResult(
|
||||
id=str(uuid.uuid4()),
|
||||
)
|
||||
@@ -1431,7 +1412,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertEqual(overrides.created, created)
|
||||
|
||||
def test_upload_with_asn(self):
|
||||
|
||||
self.consume_file_mock.return_value = celery.result.AsyncResult(
|
||||
id=str(uuid.uuid4()),
|
||||
)
|
||||
@@ -1655,7 +1635,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
)
|
||||
|
||||
def test_create_update_patch(self):
|
||||
|
||||
User.objects.create_user("user1")
|
||||
|
||||
view = {
|
||||
@@ -2134,7 +2113,6 @@ class TestDocumentApiV2(DirectoriesMixin, APITestCase):
|
||||
|
||||
|
||||
class TestApiUiSettings(DirectoriesMixin, APITestCase):
|
||||
|
||||
ENDPOINT = "/api/ui_settings/"
|
||||
|
||||
def setUp(self):
|
||||
@@ -2930,7 +2908,6 @@ class TestBulkEdit(DirectoriesMixin, APITestCase):
|
||||
|
||||
|
||||
class TestBulkDownload(DirectoriesMixin, APITestCase):
|
||||
|
||||
ENDPOINT = "/api/documents/bulk_download/"
|
||||
|
||||
def setUp(self):
|
||||
@@ -3252,7 +3229,6 @@ class TestBulkDownload(DirectoriesMixin, APITestCase):
|
||||
|
||||
class TestApiAuth(DirectoriesMixin, APITestCase):
|
||||
def test_auth_required(self):
|
||||
|
||||
d = Document.objects.create(title="Test")
|
||||
|
||||
self.assertEqual(
|
||||
@@ -3317,7 +3293,6 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
|
||||
)
|
||||
|
||||
def test_api_version_no_auth(self):
|
||||
|
||||
response = self.client.get("/api/")
|
||||
self.assertNotIn("X-Api-Version", response)
|
||||
self.assertNotIn("X-Version", response)
|
||||
@@ -3430,7 +3405,6 @@ class TestApiRemoteVersion(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("urllib.request.urlopen")
|
||||
def test_remote_version_enabled_no_update_prefix(self, urlopen_mock):
|
||||
|
||||
cm = MagicMock()
|
||||
cm.getcode.return_value = status.HTTP_200_OK
|
||||
cm.read.return_value = json.dumps({"tag_name": "ngx-1.6.0"}).encode()
|
||||
@@ -3450,7 +3424,6 @@ class TestApiRemoteVersion(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("urllib.request.urlopen")
|
||||
def test_remote_version_enabled_no_update_no_prefix(self, urlopen_mock):
|
||||
|
||||
cm = MagicMock()
|
||||
cm.getcode.return_value = status.HTTP_200_OK
|
||||
cm.read.return_value = json.dumps(
|
||||
@@ -3472,7 +3445,6 @@ class TestApiRemoteVersion(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("urllib.request.urlopen")
|
||||
def test_remote_version_enabled_update(self, urlopen_mock):
|
||||
|
||||
new_version = (
|
||||
version.__version__[0],
|
||||
version.__version__[1],
|
||||
@@ -3501,7 +3473,6 @@ class TestApiRemoteVersion(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("urllib.request.urlopen")
|
||||
def test_remote_version_bad_json(self, urlopen_mock):
|
||||
|
||||
cm = MagicMock()
|
||||
cm.getcode.return_value = status.HTTP_200_OK
|
||||
cm.read.return_value = b'{ "blah":'
|
||||
@@ -3521,7 +3492,6 @@ class TestApiRemoteVersion(DirectoriesMixin, APITestCase):
|
||||
|
||||
@mock.patch("urllib.request.urlopen")
|
||||
def test_remote_version_exception(self, urlopen_mock):
|
||||
|
||||
cm = MagicMock()
|
||||
cm.getcode.return_value = status.HTTP_200_OK
|
||||
cm.read.side_effect = urllib.error.URLError("an error")
|
||||
|
@@ -36,7 +36,6 @@ class TestDocumentChecks(TestCase):
|
||||
@mock.patch("paperless.db.GnuPG.decrypted")
|
||||
@mock.patch("documents.models.Document.source_file")
|
||||
def test_encrypted_decrypt_fails(self, mock_decrypted, mock_source_file):
|
||||
|
||||
mock_decrypted.return_value = None
|
||||
mock_source_file.return_value = b""
|
||||
|
||||
@@ -61,7 +60,6 @@ class TestDocumentChecks(TestCase):
|
||||
)
|
||||
|
||||
def test_parser_check(self):
|
||||
|
||||
self.assertEqual(parser_check(None), [])
|
||||
|
||||
with mock.patch("documents.checks.document_consumer_declaration.send") as m:
|
||||
|
@@ -326,7 +326,6 @@ class TestClassifier(DirectoriesMixin, TestCase):
|
||||
classifier2.load()
|
||||
|
||||
def testSaveClassifier(self):
|
||||
|
||||
self.generate_train_and_save()
|
||||
|
||||
new_classifier = DocumentClassifier()
|
||||
@@ -336,7 +335,6 @@ class TestClassifier(DirectoriesMixin, TestCase):
|
||||
self.assertFalse(new_classifier.train())
|
||||
|
||||
def test_load_and_classify(self):
|
||||
|
||||
self.generate_train_and_save()
|
||||
|
||||
new_classifier = DocumentClassifier()
|
||||
|
@@ -35,7 +35,6 @@ from .utils import DirectoriesMixin
|
||||
|
||||
|
||||
class TestAttributes(TestCase):
|
||||
|
||||
TAGS = ("tag1", "tag2", "tag3")
|
||||
|
||||
def _test_guess_attributes_from_name(self, filename, sender, title, tags):
|
||||
@@ -68,7 +67,6 @@ class TestAttributes(TestCase):
|
||||
|
||||
|
||||
class TestFieldPermutations(TestCase):
|
||||
|
||||
valid_dates = (
|
||||
"20150102030405Z",
|
||||
"20150102Z",
|
||||
@@ -85,7 +83,6 @@ class TestFieldPermutations(TestCase):
|
||||
title=None,
|
||||
tags=None,
|
||||
):
|
||||
|
||||
info = FileInfo.from_filename(filename)
|
||||
|
||||
# Created
|
||||
@@ -132,7 +129,6 @@ class TestFieldPermutations(TestCase):
|
||||
self.assertIsNone(info.created)
|
||||
|
||||
def test_filename_parse_transforms(self):
|
||||
|
||||
filename = "tag1,tag2_20190908_180610_0001.pdf"
|
||||
all_patt = re.compile("^.*$")
|
||||
none_patt = re.compile("$a")
|
||||
@@ -215,7 +211,6 @@ class FaultyParser(DocumentParser):
|
||||
|
||||
|
||||
def fake_magic_from_file(file, mime=False):
|
||||
|
||||
if mime:
|
||||
if os.path.splitext(file)[1] == ".pdf":
|
||||
return "application/pdf"
|
||||
@@ -240,7 +235,6 @@ class TestConsumer(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
last_progress=100,
|
||||
last_progress_max=100,
|
||||
):
|
||||
|
||||
self._send_progress.assert_called()
|
||||
|
||||
args, kwargs = self._send_progress.call_args_list[0]
|
||||
@@ -315,7 +309,6 @@ class TestConsumer(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
|
||||
@override_settings(FILENAME_FORMAT=None, TIME_ZONE="America/Chicago")
|
||||
def testNormalOperation(self):
|
||||
|
||||
filename = self.get_test_file()
|
||||
|
||||
# Get the local time, as an aware datetime
|
||||
@@ -437,7 +430,6 @@ class TestConsumer(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
def testNotAFile(self):
|
||||
|
||||
self.assertRaisesMessage(
|
||||
ConsumerError,
|
||||
"File not found",
|
||||
@@ -545,7 +537,6 @@ class TestConsumer(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
|
||||
@mock.patch("documents.signals.handlers.generate_unique_filename")
|
||||
def testFilenameHandlingUnstableFormat(self, m):
|
||||
|
||||
filenames = ["this", "that", "now this", "i cant decide"]
|
||||
|
||||
def get_filename():
|
||||
@@ -792,7 +783,6 @@ class TestConsumerCreatedDate(DirectoriesMixin, TestCase):
|
||||
|
||||
class PreConsumeTestCase(TestCase):
|
||||
def setUp(self) -> None:
|
||||
|
||||
# this prevents websocket message reports during testing.
|
||||
patcher = mock.patch("documents.consumer.Consumer._send_progress")
|
||||
self._send_progress = patcher.start()
|
||||
@@ -900,7 +890,6 @@ class PreConsumeTestCase(TestCase):
|
||||
|
||||
class PostConsumeTestCase(TestCase):
|
||||
def setUp(self) -> None:
|
||||
|
||||
# this prevents websocket message reports during testing.
|
||||
patcher = mock.patch("documents.consumer.Consumer._send_progress")
|
||||
self._send_progress = patcher.start()
|
||||
|
@@ -13,7 +13,6 @@ from documents.parsers import parse_date_generator
|
||||
|
||||
|
||||
class TestDate(TestCase):
|
||||
|
||||
SAMPLE_FILES = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
"../../paperless_tesseract/tests/samples",
|
||||
|
@@ -52,7 +52,6 @@ class TestDocument(TestCase):
|
||||
self.assertEqual(mock_unlink.call_count, 2)
|
||||
|
||||
def test_file_name(self):
|
||||
|
||||
doc = Document(
|
||||
mime_type="application/pdf",
|
||||
title="test",
|
||||
@@ -64,7 +63,6 @@ class TestDocument(TestCase):
|
||||
TIME_ZONE="Europe/Berlin",
|
||||
)
|
||||
def test_file_name_with_timezone(self):
|
||||
|
||||
# See https://docs.djangoproject.com/en/4.0/ref/utils/#django.utils.timezone.now
|
||||
# The default for created is an aware datetime in UTC
|
||||
# This does that, just manually, with a fixed date
|
||||
@@ -107,7 +105,6 @@ class TestDocument(TestCase):
|
||||
self.assertEqual(doc.get_public_filename(), "2020-01-01 test.pdf")
|
||||
|
||||
def test_file_name_jpg(self):
|
||||
|
||||
doc = Document(
|
||||
mime_type="image/jpeg",
|
||||
title="test",
|
||||
@@ -116,7 +113,6 @@ class TestDocument(TestCase):
|
||||
self.assertEqual(doc.get_public_filename(), "2020-12-25 test.jpg")
|
||||
|
||||
def test_file_name_unknown(self):
|
||||
|
||||
doc = Document(
|
||||
mime_type="application/zip",
|
||||
title="test",
|
||||
@@ -125,7 +121,6 @@ class TestDocument(TestCase):
|
||||
self.assertEqual(doc.get_public_filename(), "2020-12-25 test.zip")
|
||||
|
||||
def test_file_name_invalid_type(self):
|
||||
|
||||
doc = Document(
|
||||
mime_type="image/jpegasd",
|
||||
title="test",
|
||||
|
@@ -119,7 +119,6 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
|
||||
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
|
||||
def test_file_renaming_database_error(self):
|
||||
|
||||
Document.objects.create(
|
||||
mime_type="application/pdf",
|
||||
storage_type=Document.STORAGE_TYPE_UNENCRYPTED,
|
||||
@@ -842,7 +841,6 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
|
||||
|
||||
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
|
||||
def test_database_error(self):
|
||||
|
||||
original = os.path.join(settings.ORIGINALS_DIR, "0000001.pdf")
|
||||
archive = os.path.join(settings.ARCHIVE_DIR, "0000001.pdf")
|
||||
Path(original).touch()
|
||||
@@ -868,7 +866,6 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
|
||||
class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
||||
@override_settings(FILENAME_FORMAT="{title}")
|
||||
def test_invalid_characters(self):
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="This. is the title.",
|
||||
mime_type="application/pdf",
|
||||
|
@@ -23,7 +23,6 @@ class TestImporter(TestCase):
|
||||
)
|
||||
|
||||
def test_check_manifest(self):
|
||||
|
||||
cmd = Command()
|
||||
cmd.source = Path("/tmp")
|
||||
|
||||
@@ -54,7 +53,6 @@ class TestImporter(TestCase):
|
||||
- CommandError is raised indicating the issue
|
||||
"""
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
|
||||
# Create empty files
|
||||
original_path = Path(temp_dir) / "original.pdf"
|
||||
archive_path = Path(temp_dir) / "archive.pdf"
|
||||
|
@@ -9,7 +9,6 @@ from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
class TestAutoComplete(DirectoriesMixin, TestCase):
|
||||
def test_auto_complete(self):
|
||||
|
||||
doc1 = Document.objects.create(
|
||||
title="doc1",
|
||||
checksum="A",
|
||||
|
@@ -30,7 +30,6 @@ class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
)
|
||||
|
||||
def test_archiver(self):
|
||||
|
||||
doc = self.make_models()
|
||||
shutil.copy(
|
||||
sample_file,
|
||||
@@ -40,7 +39,6 @@ class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
call_command("document_archiver")
|
||||
|
||||
def test_handle_document(self):
|
||||
|
||||
doc = self.make_models()
|
||||
shutil.copy(
|
||||
sample_file,
|
||||
@@ -114,7 +112,6 @@ class TestDecryptDocuments(FileSystemAssertsMixin, TestCase):
|
||||
)
|
||||
@mock.patch("documents.management.commands.decrypt_documents.input")
|
||||
def test_decrypt(self, m):
|
||||
|
||||
media_dir = tempfile.mkdtemp()
|
||||
originals_dir = os.path.join(media_dir, "documents", "originals")
|
||||
thumb_dir = os.path.join(media_dir, "documents", "thumbnails")
|
||||
|
@@ -150,7 +150,6 @@ class TestConsumer(DirectoriesMixin, ConsumerThreadMixin, TransactionTestCase):
|
||||
|
||||
@mock.patch("documents.management.commands.document_consumer.logger.error")
|
||||
def test_slow_write_pdf(self, error_logger):
|
||||
|
||||
self.consume_file_mock.side_effect = self.bogus_task
|
||||
|
||||
self.t_start()
|
||||
@@ -171,7 +170,6 @@ class TestConsumer(DirectoriesMixin, ConsumerThreadMixin, TransactionTestCase):
|
||||
|
||||
@mock.patch("documents.management.commands.document_consumer.logger.error")
|
||||
def test_slow_write_and_move(self, error_logger):
|
||||
|
||||
self.consume_file_mock.side_effect = self.bogus_task
|
||||
|
||||
self.t_start()
|
||||
@@ -194,7 +192,6 @@ class TestConsumer(DirectoriesMixin, ConsumerThreadMixin, TransactionTestCase):
|
||||
|
||||
@mock.patch("documents.management.commands.document_consumer.logger.error")
|
||||
def test_slow_write_incomplete(self, error_logger):
|
||||
|
||||
self.consume_file_mock.side_effect = self.bogus_task
|
||||
|
||||
self.t_start()
|
||||
@@ -215,12 +212,10 @@ class TestConsumer(DirectoriesMixin, ConsumerThreadMixin, TransactionTestCase):
|
||||
|
||||
@override_settings(CONSUMPTION_DIR="does_not_exist")
|
||||
def test_consumption_directory_invalid(self):
|
||||
|
||||
self.assertRaises(CommandError, call_command, "document_consumer", "--oneshot")
|
||||
|
||||
@override_settings(CONSUMPTION_DIR="")
|
||||
def test_consumption_directory_unset(self):
|
||||
|
||||
self.assertRaises(CommandError, call_command, "document_consumer", "--oneshot")
|
||||
|
||||
def test_mac_write(self):
|
||||
@@ -332,7 +327,6 @@ class TestConsumer(DirectoriesMixin, ConsumerThreadMixin, TransactionTestCase):
|
||||
|
||||
@mock.patch("documents.management.commands.document_consumer.open")
|
||||
def test_consume_file_busy(self, open_mock):
|
||||
|
||||
# Calling this mock always raises this
|
||||
open_mock.side_effect = OSError
|
||||
|
||||
@@ -378,7 +372,6 @@ class TestConsumerRecursivePolling(TestConsumer):
|
||||
class TestConsumerTags(DirectoriesMixin, ConsumerThreadMixin, TransactionTestCase):
|
||||
@override_settings(CONSUMER_RECURSIVE=True, CONSUMER_SUBDIRS_AS_TAGS=True)
|
||||
def test_consume_file_with_path_tags(self):
|
||||
|
||||
tag_names = ("existingTag", "Space Tag")
|
||||
# Create a Tag prior to consuming a file using it in path
|
||||
tag_ids = [
|
||||
|
@@ -364,7 +364,6 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
)
|
||||
|
||||
def test_export_missing_files(self):
|
||||
|
||||
target = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, target)
|
||||
Document.objects.create(
|
||||
@@ -458,7 +457,6 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
args = ["document_exporter", "/tmp/foo/bar"]
|
||||
|
||||
with self.assertRaises(CommandError) as e:
|
||||
|
||||
call_command(*args)
|
||||
|
||||
self.assertEqual("That path isn't a directory", str(e))
|
||||
@@ -474,11 +472,9 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
"""
|
||||
|
||||
with tempfile.NamedTemporaryFile() as tmp_file:
|
||||
|
||||
args = ["document_exporter", tmp_file.name]
|
||||
|
||||
with self.assertRaises(CommandError) as e:
|
||||
|
||||
call_command(*args)
|
||||
|
||||
self.assertEqual("That path isn't a directory", str(e))
|
||||
@@ -493,13 +489,11 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
- Error is raised
|
||||
"""
|
||||
with tempfile.TemporaryDirectory() as tmp_dir:
|
||||
|
||||
os.chmod(tmp_dir, 0o000)
|
||||
|
||||
args = ["document_exporter", tmp_dir]
|
||||
|
||||
with self.assertRaises(CommandError) as e:
|
||||
|
||||
call_command(*args)
|
||||
|
||||
self.assertEqual("That path doesn't appear to be writable", str(e))
|
||||
|
@@ -11,7 +11,6 @@ from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
class TestRetagger(DirectoriesMixin, TestCase):
|
||||
def make_models(self):
|
||||
|
||||
self.sp1 = StoragePath.objects.create(
|
||||
name="dummy a",
|
||||
path="{created_data}/{title}",
|
||||
|
@@ -48,7 +48,6 @@ class _TestMatchingBase(TestCase):
|
||||
|
||||
class TestMatching(_TestMatchingBase):
|
||||
def test_match_none(self):
|
||||
|
||||
self._test_matching(
|
||||
"",
|
||||
"MATCH_NONE",
|
||||
@@ -60,7 +59,6 @@ class TestMatching(_TestMatchingBase):
|
||||
)
|
||||
|
||||
def test_match_all(self):
|
||||
|
||||
self._test_matching(
|
||||
"alpha charlie gamma",
|
||||
"MATCH_ALL",
|
||||
@@ -107,7 +105,6 @@ class TestMatching(_TestMatchingBase):
|
||||
)
|
||||
|
||||
def test_match_any(self):
|
||||
|
||||
self._test_matching(
|
||||
"alpha charlie gamma",
|
||||
"MATCH_ANY",
|
||||
@@ -152,7 +149,6 @@ class TestMatching(_TestMatchingBase):
|
||||
)
|
||||
|
||||
def test_match_literal(self):
|
||||
|
||||
self._test_matching(
|
||||
"alpha charlie gamma",
|
||||
"MATCH_LITERAL",
|
||||
@@ -187,7 +183,6 @@ class TestMatching(_TestMatchingBase):
|
||||
)
|
||||
|
||||
def test_match_regex(self):
|
||||
|
||||
self._test_matching(
|
||||
r"alpha\w+gamma",
|
||||
"MATCH_REGEX",
|
||||
@@ -211,7 +206,6 @@ class TestMatching(_TestMatchingBase):
|
||||
self._test_matching("[", "MATCH_REGEX", [], ["Don't match this"])
|
||||
|
||||
def test_match_fuzzy(self):
|
||||
|
||||
self._test_matching(
|
||||
"Springfield, Miss.",
|
||||
"MATCH_FUZZY",
|
||||
@@ -331,7 +325,6 @@ class TestCaseSensitiveMatching(_TestMatchingBase):
|
||||
)
|
||||
|
||||
def test_match_literal(self):
|
||||
|
||||
self._test_matching(
|
||||
"alpha charlie gamma",
|
||||
"MATCH_LITERAL",
|
||||
|
@@ -114,7 +114,6 @@ simple_png2 = os.path.join(os.path.dirname(__file__), "examples", "no-text.png")
|
||||
|
||||
@override_settings(FILENAME_FORMAT="")
|
||||
class TestMigrateArchiveFiles(DirectoriesMixin, FileSystemAssertsMixin, TestMigrations):
|
||||
|
||||
migrate_from = "1011_auto_20210101_2340"
|
||||
migrate_to = "1012_fix_archive_files"
|
||||
|
||||
@@ -282,13 +281,11 @@ def fake_parse_wrapper(parser, path, mime_type, file_name):
|
||||
|
||||
@override_settings(FILENAME_FORMAT="")
|
||||
class TestMigrateArchiveFilesErrors(DirectoriesMixin, TestMigrations):
|
||||
|
||||
migrate_from = "1011_auto_20210101_2340"
|
||||
migrate_to = "1012_fix_archive_files"
|
||||
auto_migrate = False
|
||||
|
||||
def test_archive_missing(self):
|
||||
|
||||
Document = self.apps.get_model("documents", "Document")
|
||||
|
||||
doc = make_test_document(
|
||||
@@ -454,12 +451,10 @@ class TestMigrateArchiveFilesBackwards(
|
||||
FileSystemAssertsMixin,
|
||||
TestMigrations,
|
||||
):
|
||||
|
||||
migrate_from = "1012_fix_archive_files"
|
||||
migrate_to = "1011_auto_20210101_2340"
|
||||
|
||||
def setUpBeforeMigration(self, apps):
|
||||
|
||||
Document = apps.get_model("documents", "Document")
|
||||
|
||||
make_test_document(
|
||||
@@ -519,13 +514,11 @@ class TestMigrateArchiveFilesBackwardsWithFilenameFormat(
|
||||
|
||||
@override_settings(FILENAME_FORMAT="")
|
||||
class TestMigrateArchiveFilesBackwardsErrors(DirectoriesMixin, TestMigrations):
|
||||
|
||||
migrate_from = "1012_fix_archive_files"
|
||||
migrate_to = "1011_auto_20210101_2340"
|
||||
auto_migrate = False
|
||||
|
||||
def test_filename_clash(self):
|
||||
|
||||
Document = self.apps.get_model("documents", "Document")
|
||||
|
||||
self.clashA = make_test_document(
|
||||
@@ -554,7 +547,6 @@ class TestMigrateArchiveFilesBackwardsErrors(DirectoriesMixin, TestMigrations):
|
||||
)
|
||||
|
||||
def test_filename_exists(self):
|
||||
|
||||
Document = self.apps.get_model("documents", "Document")
|
||||
|
||||
self.clashA = make_test_document(
|
||||
|
@@ -40,7 +40,6 @@ def source_path_after(doc):
|
||||
|
||||
@override_settings(PASSPHRASE="test")
|
||||
class TestMigrateMimeType(DirectoriesMixin, TestMigrations):
|
||||
|
||||
migrate_from = "1002_auto_20201111_1105"
|
||||
migrate_to = "1003_mime_types"
|
||||
|
||||
@@ -86,7 +85,6 @@ class TestMigrateMimeType(DirectoriesMixin, TestMigrations):
|
||||
|
||||
@override_settings(PASSPHRASE="test")
|
||||
class TestMigrateMimeTypeBackwards(DirectoriesMixin, TestMigrations):
|
||||
|
||||
migrate_from = "1003_mime_types"
|
||||
migrate_to = "1002_auto_20201111_1105"
|
||||
|
||||
|
@@ -3,7 +3,6 @@ from documents.tests.utils import TestMigrations
|
||||
|
||||
|
||||
class TestMigrateNullCharacters(DirectoriesMixin, TestMigrations):
|
||||
|
||||
migrate_from = "1014_auto_20210228_1614"
|
||||
migrate_to = "1015_remove_null_characters"
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user