Upgrades black to v23, upgrades ruff

This commit is contained in:
Trenton H 2023-04-25 09:59:24 -07:00
parent 3bcbd05252
commit 6f163111ce
147 changed files with 74 additions and 387 deletions

View File

@ -200,7 +200,6 @@ class RegistryTagsCleaner:
tag,
)
for manifest in image_index.image_pointers:
if manifest.digest in untagged_versions:
logger.info(
f"Skipping deletion of {manifest.digest},"
@ -287,7 +286,6 @@ class RegistryTagsCleaner:
logger.info("Beginning confirmation step")
a_tag_failed = False
for tag in sorted(self.tags_to_keep):
try:
image_index = ImageIndex(
f"ghcr.io/{self.repo_owner}/{self.package_name}",
@ -301,7 +299,6 @@ class RegistryTagsCleaner:
digest_name = f"ghcr.io/{self.repo_owner}/{self.package_name}@{manifest.digest}"
try:
subprocess.run(
[
shutil.which("docker"),

View File

@ -37,16 +37,16 @@ repos:
exclude: "(^Pipfile\\.lock$)"
# Python hooks
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: 'v0.0.259'
rev: 'v0.0.263'
hooks:
- id: ruff
- repo: https://github.com/psf/black
rev: 22.12.0
rev: 23.3.0
hooks:
- id: black
# Dockerfile hooks
- repo: https://github.com/AleksaC/hadolint-py
rev: v2.10.0
rev: v2.12.0.2
hooks:
- id: hadolint
# Shell script hooks

104
Pipfile.lock generated
View File

@ -2256,34 +2256,35 @@
},
"black": {
"hashes": [
"sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd",
"sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555",
"sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481",
"sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468",
"sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9",
"sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a",
"sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958",
"sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580",
"sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26",
"sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32",
"sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8",
"sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753",
"sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b",
"sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074",
"sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651",
"sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24",
"sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6",
"sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad",
"sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac",
"sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221",
"sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06",
"sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27",
"sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648",
"sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739",
"sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"
"sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5",
"sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915",
"sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326",
"sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940",
"sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b",
"sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30",
"sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c",
"sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c",
"sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab",
"sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27",
"sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2",
"sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961",
"sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9",
"sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb",
"sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70",
"sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331",
"sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2",
"sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266",
"sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d",
"sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6",
"sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b",
"sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925",
"sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8",
"sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4",
"sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"
],
"index": "pypi",
"version": "==23.1.0"
"markers": "python_version >= '3.7'",
"version": "==23.3.0"
},
"certifi": {
"hashes": [
@ -2718,11 +2719,11 @@
},
"packaging": {
"hashes": [
"sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2",
"sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"
"sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61",
"sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"
],
"markers": "python_version >= '3.7'",
"version": "==23.0"
"version": "==23.1"
},
"pathspec": {
"hashes": [
@ -2817,11 +2818,11 @@
},
"platformdirs": {
"hashes": [
"sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa",
"sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"
"sha256:64370d47dc3fca65b4879f89bdead8197e93e05d696d6d1816243ebae8595da5",
"sha256:ea61fd7b85554beecbbd3e9b37fb26689b227ffae38f73353cbcc1cf8bd01878"
],
"markers": "python_version >= '3.7'",
"version": "==3.1.1"
"version": "==3.3.0"
},
"pluggy": {
"hashes": [
@ -3071,26 +3072,27 @@
},
"ruff": {
"hashes": [
"sha256:22e1e35bf5f12072cd644d22afd9203641ccf258bc14ff91aa1c43dc14f6047d",
"sha256:29e2b77b7d5da6a7dd5cf9b738b511355c5734ece56f78e500d4b5bffd58c1a0",
"sha256:38704f151323aa5858370a2f792e122cc25e5d1aabe7d42ceeab83da18f0b456",
"sha256:40ae87f2638484b7e8a7567b04a7af719f1c484c5bf132038b702bb32e1f6577",
"sha256:428507fb321b386dda70d66cd1a8aa0abf51d7c197983d83bb9e4fa5ee60300b",
"sha256:49e903bcda19f6bb0725a962c058eb5d61f40d84ef52ed53b61939b69402ab4e",
"sha256:5b3c1beacf6037e7f0781d4699d9a2dd4ba2462f475be5b1f45cf84c4ba3c69d",
"sha256:71f0ef1985e9a6696fa97da8459917fa34bdaa2c16bd33bd5edead585b7d44f7",
"sha256:79b02fa17ec1fd8d306ae302cb47fb614b71e1f539997858243769bcbe78c6d9",
"sha256:7cfef26619cba184d59aa7fa17b48af5891d51fc0b755a9bc533478a10d4d066",
"sha256:8b56496063ab3bfdf72339a5fbebb8bd46e5c5fee25ef11a9f03b208fa0562ec",
"sha256:aa9449b898287e621942cc71b9327eceb8f0c357e4065fecefb707ef2d978df8",
"sha256:c5fbaea9167f1852757f02133e5daacdb8c75b3431343205395da5b10499927a",
"sha256:d2fb20e89e85d147c85caa807707a1488bccc1f3854dc3d53533e89b52a0c5ff",
"sha256:daaea322e7e85f4c13d82be9536309e1c4b8b9851bb0cbc7eeb15d490fd46bf9",
"sha256:e4f39e18702de69faaaee3969934b92d7467285627f99a5b6ecd55a7d9f5d086",
"sha256:f3938dc45e2a3f818e9cbd53007265c22246fbfded8837b2c563bf0ebde1a226"
"sha256:04e0b280dd246448564c892bce5607d820ad1f14944f3d535db98692e2a7ac07",
"sha256:1008f211ad8aa1d998517ac5bf3d68fbc68ec516d1da89b6081f25ff2f30b687",
"sha256:15386933dd8e03aafa3186f9e996d6823105492817311338fbcb64d0ecbcd95f",
"sha256:3e9fcee3f81129eabc75da005d839235e32d7d374f2d4c0db0c708dad4703d6e",
"sha256:4010b156f2e9fa6e74b5581098467f6ff68beac48945599b3a9239481e578ab4",
"sha256:4f75fa1632ea065b8f10678e7b6ae9873f84d5046bdf146990112751e98af42a",
"sha256:7890499c2c3dcb1e60de2a8b4c5f5775b2bfcdff7d3e68e38db5cb2d65b12006",
"sha256:82c41f276106017b6f075dd2f2cc68e1a0b434cc75488f816fc98bd41982628d",
"sha256:981e3c4d773f7ff52479c4fd74a65e408f1e13fa5f889b72214d400cd1299ce4",
"sha256:9af932f665e177de62e172901704257fd6e5bfabb95893867ff7382a851459d3",
"sha256:bed1d3fba306e3f7e13ce226927b84200350e25abd1e754e06ee361c6d41de15",
"sha256:c2b79919ebd93674b93dfc2c843e264bf8e52fbe737467e9b58521775c85f4ad",
"sha256:c3b7d4b365207f3e4c40d235127091478e595b31e35b6cd57d940920cdfae68b",
"sha256:ddcee0d91629a4fa4bc9faebf5b94d4615d50d1cd76d1098fa71fbe1c54f4104",
"sha256:ddf4503595b560bfa5fae92fa2e4cb09ec465ee4cf88cc248f10ad2e956deec3",
"sha256:ebc778d95f29c9917e6e7608b2b67815707e6ab8eb5af9341617beda479c3edf",
"sha256:ee6c7a77f142c427fa73e1f5f603fc1a39413a36fe6966ed0fc55e97f6921d9c"
],
"index": "pypi",
"version": "==0.0.259"
"markers": "python_version >= '3.7'",
"version": "==0.0.263"
},
"scipy": {
"hashes": [
@ -3158,7 +3160,7 @@
"sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb",
"sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"
],
"markers": "python_version >= '3.7'",
"markers": "python_version < '3.10'",
"version": "==4.5.0"
},
"urllib3": {

View File

@ -12,7 +12,6 @@ from typing import Final
from redis import Redis
if __name__ == "__main__":
MAX_RETRY_COUNT: Final[int] = 5
RETRY_SLEEP_SECONDS: Final[int] = 5

View File

@ -13,28 +13,24 @@ from .models import Tag
class CorrespondentAdmin(GuardedModelAdmin):
list_display = ("name", "match", "matching_algorithm")
list_filter = ("matching_algorithm",)
list_editable = ("match", "matching_algorithm")
class TagAdmin(GuardedModelAdmin):
list_display = ("name", "color", "match", "matching_algorithm")
list_filter = ("color", "matching_algorithm")
list_editable = ("color", "match", "matching_algorithm")
class DocumentTypeAdmin(GuardedModelAdmin):
list_display = ("name", "match", "matching_algorithm")
list_filter = ("matching_algorithm",)
list_editable = ("match", "matching_algorithm")
class DocumentAdmin(GuardedModelAdmin):
search_fields = ("correspondent__name", "title", "content", "tags__name")
readonly_fields = (
"added",
@ -99,7 +95,6 @@ class RuleInline(admin.TabularInline):
class SavedViewAdmin(GuardedModelAdmin):
list_display = ("name", "owner")
inlines = [RuleInline]
@ -116,7 +111,6 @@ class StoragePathAdmin(GuardedModelAdmin):
class TaskAdmin(admin.ModelAdmin):
list_display = ("task_id", "task_file_name", "task_name", "date_done", "status")
list_filter = ("status", "date_done", "task_file_name", "task_name")
search_fields = ("task_name", "task_id", "status")
@ -133,7 +127,6 @@ class TaskAdmin(admin.ModelAdmin):
class NotesAdmin(GuardedModelAdmin):
list_display = ("user", "created", "note", "document")
list_filter = ("created", "user")
list_display_links = ("created",)

View File

@ -3,7 +3,6 @@ from django.utils.translation import gettext_lazy as _
class DocumentsConfig(AppConfig):
name = "documents"
verbose_name = _("Documents")

View File

@ -55,7 +55,6 @@ def set_document_type(doc_ids, document_type):
def add_tag(doc_ids, tag):
qs = Document.objects.filter(Q(id__in=doc_ids) & ~Q(tags__id=tag))
affected_docs = [doc.id for doc in qs]
@ -71,7 +70,6 @@ def add_tag(doc_ids, tag):
def remove_tag(doc_ids, tag):
qs = Document.objects.filter(Q(id__in=doc_ids) & Q(tags__id=tag))
affected_docs = [doc.id for doc in qs]
@ -123,7 +121,6 @@ def delete(doc_ids):
def redo_ocr(doc_ids):
for document_id in doc_ids:
update_document_archive_file.delay(
document_id=document_id,
@ -133,7 +130,6 @@ def redo_ocr(doc_ids):
def set_permissions(doc_ids, set_permissions, owner=None):
qs = Document.objects.filter(id__in=doc_ids)
qs.update(owner=owner)

View File

@ -23,7 +23,6 @@ def changed_password_check(app_configs, **kwargs):
return [] # No documents table yet
if encrypted_doc:
if not settings.PASSPHRASE:
return [
Error(
@ -53,7 +52,6 @@ def changed_password_check(app_configs, **kwargs):
@register()
def parser_check(app_configs, **kwargs):
parsers = []
for response in document_consumer_declaration.send(None):
parsers.append(response[1])

View File

@ -60,7 +60,6 @@ def load_classifier() -> Optional["DocumentClassifier"]:
class DocumentClassifier:
# v7 - Updated scikit-learn package version
# v8 - Added storage path classifier
# v9 - Changed from hashing to time/ids for re-train check
@ -141,7 +140,6 @@ class DocumentClassifier:
target_file_temp.rename(target_file)
def train(self):
# Get non-inbox documents
docs_queryset = Document.objects.exclude(
tags__is_inbox_tag=True,
@ -160,7 +158,6 @@ class DocumentClassifier:
logger.debug("Gathering data from database...")
hasher = sha256()
for doc in docs_queryset:
y = -1
dt = doc.document_type
if dt and dt.matching_algorithm == MatchingModel.MATCH_AUTO:
@ -335,7 +332,6 @@ class DocumentClassifier:
# If the NLTK language is supported, do further processing
if settings.NLTK_LANGUAGE is not None and settings.NLTK_ENABLED:
import nltk
from nltk.corpus import stopwords
from nltk.stem import SnowballStemmer

View File

@ -60,7 +60,6 @@ MESSAGE_FINISHED = "finished"
class Consumer(LoggingMixin):
logging_name = "paperless.consumer"
def _send_progress(
@ -426,7 +425,6 @@ class Consumer(LoggingMixin):
# in the system. This will be a transaction and reasonably fast.
try:
with transaction.atomic():
# store the document.
document = self._store(text=text, date=date, mime_type=mime_type)
@ -520,7 +518,6 @@ class Consumer(LoggingMixin):
date: Optional[datetime.datetime],
mime_type: str,
) -> Document:
# If someone gave us the original filename, use it instead of doc.
file_info = FileInfo.from_filename(self.filename)

View File

@ -82,7 +82,6 @@ class TitleContentFilter(Filter):
class DocumentFilterSet(FilterSet):
is_tagged = BooleanFilter(
label="Is tagged",
field_name="tags",

View File

@ -331,7 +331,7 @@ class DelayedMoreLikeThisQuery(DelayedQuery):
def autocomplete(ix, term, limit=10):
with ix.reader() as reader:
terms = []
for (score, t) in reader.most_distinctive_terms(
for score, t in reader.most_distinctive_terms(
"content",
number=limit,
prefix=term.lower(),

View File

@ -3,7 +3,6 @@ import uuid
class LoggingMixin:
logging_group = None
logging_name = None

View File

@ -9,14 +9,12 @@ from paperless.db import GnuPG
class Command(BaseCommand):
help = (
"This is how you migrate your stored documents from an encrypted "
"state to an unencrypted one (or vice-versa)"
)
def add_arguments(self, parser):
parser.add_argument(
"--passphrase",
help="If PAPERLESS_PASSPHRASE isn't set already, you need to "
@ -24,7 +22,6 @@ class Command(BaseCommand):
)
def handle(self, *args, **options):
try:
print(
"\n\nWARNING: This script is going to work directly on your "
@ -49,13 +46,11 @@ class Command(BaseCommand):
@staticmethod
def __gpg_to_unencrypted(passphrase):
encrypted_files = Document.objects.filter(
storage_type=Document.STORAGE_TYPE_GPG,
)
for document in encrypted_files:
print(f"Decrypting {document}".encode())
old_paths = [document.source_path, document.thumbnail_path]

View File

@ -14,7 +14,6 @@ logger = logging.getLogger("paperless.management.archiver")
class Command(BaseCommand):
help = """
Using the current classification model, assigns correspondents, tags
and document types to all documents, effectively allowing you to
@ -51,7 +50,6 @@ class Command(BaseCommand):
)
def handle(self, *args, **options):
os.makedirs(settings.SCRATCH_DIR, exist_ok=True)
overwrite = options["overwrite"]
@ -74,7 +72,6 @@ class Command(BaseCommand):
db.connections.close_all()
try:
logging.getLogger().handlers[0].level = logging.ERROR
with multiprocessing.Pool(processes=settings.TASK_WORKERS) as pool:
list(

View File

@ -4,7 +4,6 @@ from documents.tasks import train_classifier
class Command(BaseCommand):
help = """
Trains the classifier on your data and saves the resulting models to a
file. The document consumer will then automatically use this new model.

View File

@ -40,7 +40,6 @@ from paperless_mail.models import MailRule
class Command(BaseCommand):
help = """
Decrypt and rename all files in our collection into a given target
directory. And include a manifest file containing document data for
@ -144,7 +143,6 @@ class Command(BaseCommand):
self.no_thumbnail = False
def handle(self, *args, **options):
self.target = Path(options["target"]).resolve()
self.split_manifest = options["split_manifest"]
self.compare_checksums = options["compare_checksums"]

View File

@ -36,7 +36,6 @@ def disable_signal(sig, receiver, sender):
class Command(BaseCommand):
help = """
Using a manifest.json file, load the data from there, and import the
documents it refers to.
@ -61,7 +60,6 @@ class Command(BaseCommand):
self.version = None
def handle(self, *args, **options):
logging.getLogger().handlers[0].level = logging.ERROR
self.source = Path(options["source"]).resolve()
@ -163,7 +161,6 @@ class Command(BaseCommand):
"""
self.stdout.write("Checking the manifest")
for record in self.manifest:
if record["model"] != "documents.document":
continue
@ -205,7 +202,6 @@ class Command(BaseCommand):
) from e
def _import_files_from_manifest(self, progress_bar_disable):
os.makedirs(settings.ORIGINALS_DIR, exist_ok=True)
os.makedirs(settings.THUMBNAIL_DIR, exist_ok=True)
os.makedirs(settings.ARCHIVE_DIR, exist_ok=True)
@ -217,7 +213,6 @@ class Command(BaseCommand):
)
for record in tqdm.tqdm(manifest_documents, disable=progress_bar_disable):
document = Document.objects.get(pk=record["pk"])
doc_file = record[EXPORTER_FILE_NAME]

View File

@ -6,7 +6,6 @@ from documents.tasks import index_reindex
class Command(BaseCommand):
help = "Manages the document index."
def add_arguments(self, parser):

View File

@ -8,7 +8,6 @@ from documents.models import Document
class Command(BaseCommand):
help = """
This will rename all documents to match the latest filename format.
""".replace(
@ -25,7 +24,6 @@ class Command(BaseCommand):
)
def handle(self, *args, **options):
logging.getLogger().handlers[0].level = logging.ERROR
for document in tqdm.tqdm(

View File

@ -14,7 +14,6 @@ logger = logging.getLogger("paperless.management.retagger")
class Command(BaseCommand):
help = """
Using the current classification model, assigns correspondents, tags
and document types to all documents, effectively allowing you to
@ -78,7 +77,6 @@ class Command(BaseCommand):
classifier = load_classifier()
for document in tqdm.tqdm(documents, disable=options["no_progress_bar"]):
if options["correspondent"]:
set_correspondent(
sender=None,

View File

@ -4,7 +4,6 @@ from documents.sanity_checker import check_sanity
class Command(BaseCommand):
help = """
This command checks your document archive for issues.
""".replace(
@ -21,7 +20,6 @@ class Command(BaseCommand):
)
def handle(self, *args, **options):
messages = check_sanity(progress=not options["no_progress_bar"])
messages.log_messages()

View File

@ -21,7 +21,6 @@ def _process_document(doc_in):
return
try:
thumb = parser.get_thumbnail(
document.source_path,
document.mime_type,
@ -34,7 +33,6 @@ def _process_document(doc_in):
class Command(BaseCommand):
help = """
This will regenerate the thumbnails for all documents.
""".replace(

View File

@ -8,7 +8,6 @@ logger = logging.getLogger("paperless.management.superuser")
class Command(BaseCommand):
help = """
Creates a Django superuser:
User named: admin
@ -24,7 +23,6 @@ class Command(BaseCommand):
)
def handle(self, *args, **options):
username = os.getenv("PAPERLESS_ADMIN_USER", "admin")
mail = os.getenv("PAPERLESS_ADMIN_MAIL", "root@localhost")
password = os.getenv("PAPERLESS_ADMIN_PASSWORD")

View File

@ -6,7 +6,6 @@ from django.db import models
class Migration(migrations.Migration):
initial = True
dependencies = []

View File

@ -6,7 +6,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0001_initial"),
]

View File

@ -9,7 +9,6 @@ DOCUMENT_SENDER_MAP = {}
def move_sender_strings_to_sender_model(apps, schema_editor):
sender_model = apps.get_model("documents", "Sender")
document_model = apps.get_model("documents", "Document")

View File

@ -6,7 +6,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0003_sender"),
]

View File

@ -4,7 +4,6 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("documents", "0004_auto_20160114_1844"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0005_auto_20160123_0313"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0006_auto_20160123_0430"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0007_auto_20160126_2114"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0008_document_file_type"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0009_auto_20160214_0040"),
]

View File

@ -34,7 +34,6 @@ class GnuPG:
def move_documents_and_create_thumbnails(apps, schema_editor):
os.makedirs(
os.path.join(settings.MEDIA_ROOT, "documents", "originals"),
exist_ok=True,
@ -67,7 +66,6 @@ def move_documents_and_create_thumbnails(apps, schema_editor):
pass
for f in sorted(documents):
if not f.endswith("gpg"):
continue

View File

@ -6,7 +6,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0012_auto_20160305_0040"),
]

View File

@ -75,7 +75,6 @@ class Document:
def set_checksums(apps, schema_editor):
document_model = apps.get_model("documents", "Document")
if not document_model.objects.all().exists():
@ -95,7 +94,6 @@ def set_checksums(apps, schema_editor):
sums = {}
for d in document_model.objects.all():
document = Document(d)
print(

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0014_document_checksum"),
]

View File

@ -6,7 +6,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0015_add_insensitive_to_match"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0016_auto_20170325_1558"),
]

View File

@ -6,7 +6,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0017_auto_20170512_0507"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0020_document_added"),
]

View File

@ -20,7 +20,6 @@ def re_slug_all_the_things(apps, schema_editor):
class Migration(migrations.Migration):
dependencies = [
("documents", "0021_document_storage_type"),
]

View File

@ -19,7 +19,6 @@ def set_filename(apps, schema_editor):
class Migration(migrations.Migration):
dependencies = [
("documents", "0022_auto_20181007_1420"),
]

View File

@ -15,7 +15,6 @@ def logs_set_default_group(apps, schema_editor):
class Migration(migrations.Migration):
dependencies = [
("documents", "0023_document_current_filename"),
]

View File

@ -4,7 +4,6 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("documents", "1000_update_paperless_all"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1001_auto_20201109_1636"),
]

View File

@ -30,7 +30,6 @@ def add_mime_types(apps, schema_editor):
for d in documents:
f = open(source_path(d), "rb")
if d.storage_type == STORAGE_TYPE_GPG:
data = GnuPG.decrypted(f)
else:
data = f.read(1024)
@ -51,7 +50,6 @@ def add_file_extensions(apps, schema_editor):
class Migration(migrations.Migration):
dependencies = [
("documents", "1002_auto_20201111_1105"),
]

View File

@ -5,7 +5,6 @@ from django.db.migrations import RunPython
class Migration(migrations.Migration):
dependencies = [
("documents", "1003_mime_types"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1004_sanity_check_schedule"),
]

View File

@ -4,7 +4,6 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("documents", "1005_checksums"),
]

View File

@ -7,7 +7,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("documents", "1006_auto_20201208_2209"),

View File

@ -5,7 +5,6 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("documents", "1007_savedview_savedviewfilterrule"),
]

View File

@ -4,7 +4,6 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("documents", "1008_auto_20201216_1736"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1009_auto_20201216_2005"),
]

View File

@ -8,7 +8,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("documents", "1010_auto_20210101_2159"),

View File

@ -254,7 +254,6 @@ def move_old_to_new_locations(apps, schema_editor):
)
for doc in Document.objects.filter(archive_checksum__isnull=False):
if doc.id in affected_document_ids:
old_path = archive_path_old(doc)
# remove affected archive versions
@ -304,7 +303,6 @@ def move_new_to_old_locations(apps, schema_editor):
class Migration(migrations.Migration):
dependencies = [
("documents", "1011_auto_20210101_2340"),
]

View File

@ -47,7 +47,6 @@ def reverse(apps, schema_editor):
class Migration(migrations.Migration):
dependencies = [
("documents", "1012_fix_archive_files"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1013_migrate_tag_colour"),
]

View File

@ -18,7 +18,6 @@ def remove_null_characters(apps, schema_editor):
class Migration(migrations.Migration):
dependencies = [
("documents", "1014_auto_20210228_1614"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1015_remove_null_characters"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1016_auto_20210317_1351"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1017_alter_savedviewfilterrule_rule_type"),
]

View File

@ -6,7 +6,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1018_alter_savedviewfilterrule_value"),
]

View File

@ -7,7 +7,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("documents", "1018_alter_savedviewfilterrule_value"),

View File

@ -4,7 +4,6 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("documents", "1019_storagepath_document_storage_path"),
("documents", "1019_uisettings"),

View File

@ -17,7 +17,6 @@ logger = logging.getLogger("paperless.migrations")
def _do_convert(work_package):
existing_thumbnail, converted_thumbnail = work_package
try:
logger.info(f"Converting thumbnail: {existing_thumbnail}")
# Run actual conversion
@ -51,7 +50,6 @@ def _convert_thumbnails_to_webp(apps, schema_editor):
start = time.time()
with tempfile.TemporaryDirectory() as tempdir:
work_packages = []
for file in Path(settings.THUMBNAIL_DIR).glob("*.png"):
@ -73,7 +71,6 @@ def _convert_thumbnails_to_webp(apps, schema_editor):
)
if len(work_packages):
logger.info(
"\n\n"
" This is a one-time only migration to convert thumbnails for all of your\n"
@ -95,7 +92,6 @@ def _convert_thumbnails_to_webp(apps, schema_editor):
class Migration(migrations.Migration):
dependencies = [
("documents", "1020_merge_20220518_1839"),
]

View File

@ -6,7 +6,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1021_webp_thumbnail_conversion"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1023_add_comments"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1024_document_original_filename"),
]

View File

@ -6,7 +6,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("django_celery_results", "0011_taskresult_periodic_task_name"),
("documents", "1025_alter_savedviewfilterrule_rule_type"),

View File

@ -6,7 +6,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1026_transition_to_celery"),
]

View File

@ -4,7 +4,6 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("documents", "1027_remove_paperlesstask_attempted_task_and_more"),
]

View File

@ -6,7 +6,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1028_remove_paperlesstask_task_args_and_more"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1029_alter_document_archive_serial_number"),
]

View File

@ -7,7 +7,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("documents", "1030_alter_paperlesstask_task_file_name"),

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1031_remove_savedview_user_correspondent_owner_and_more"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1032_alter_correspondent_matching_algorithm_and_more"),
]

View File

@ -5,7 +5,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1033_alter_documenttype_options_alter_tag_options_and_more"),
]

View File

@ -7,7 +7,6 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("documents", "1034_alter_savedviewfilterrule_rule_type"),

View File

@ -38,7 +38,6 @@ class ModelWithOwner(models.Model):
class MatchingModel(ModelWithOwner):
MATCH_NONE = 0
MATCH_ANY = 1
MATCH_ALL = 2
@ -95,7 +94,6 @@ class Correspondent(MatchingModel):
class Tag(MatchingModel):
color = models.CharField(_("color"), max_length=7, default="#a6cee3")
is_inbox_tag = models.BooleanField(
@ -130,7 +128,6 @@ class StoragePath(MatchingModel):
class Document(ModelWithOwner):
STORAGE_TYPE_UNENCRYPTED = "unencrypted"
STORAGE_TYPE_GPG = "gpg"
STORAGE_TYPES = (
@ -280,7 +277,6 @@ class Document(ModelWithOwner):
verbose_name_plural = _("documents")
def __str__(self) -> str:
# Convert UTC database time to local time
created = datetime.date.isoformat(timezone.localdate(self.created))
@ -365,7 +361,6 @@ class Document(ModelWithOwner):
class Log(models.Model):
LEVELS = (
(logging.DEBUG, _("debug")),
(logging.INFO, _("information")),
@ -397,7 +392,6 @@ class Log(models.Model):
class SavedView(ModelWithOwner):
class Meta:
ordering = ("name",)
verbose_name = _("saved view")
verbose_name_plural = _("saved views")
@ -481,7 +475,6 @@ class SavedViewFilterRule(models.Model):
# the filename, if possible, as a higher priority than either document filename or
# content parsing
class FileInfo:
REGEXES = OrderedDict(
[
(
@ -503,7 +496,6 @@ class FileInfo:
tags=(),
extension=None,
):
self.created = created
self.title = title
self.extension = extension
@ -530,7 +522,7 @@ class FileInfo:
def from_filename(cls, filename) -> "FileInfo":
# Mutate filename in-place before parsing its components
# by applying at most one of the configured transformations.
for (pattern, repl) in settings.FILENAME_PARSE_TRANSFORMS:
for pattern, repl in settings.FILENAME_PARSE_TRANSFORMS:
(filename, count) = pattern.subn(repl, filename)
if count:
break
@ -564,7 +556,6 @@ class FileInfo:
# Extending User Model Using a One-To-One Link
class UiSettings(models.Model):
user = models.OneToOneField(
User,
on_delete=models.CASCADE,

View File

@ -140,7 +140,6 @@ def run_convert(
extra=None,
logging_group=None,
) -> None:
environment = os.environ.copy()
if settings.CONVERT_MEMORY_LIMIT:
environment["MAGICK_MEMORY_LIMIT"] = settings.CONVERT_MEMORY_LIMIT

View File

@ -2,6 +2,7 @@ from django.contrib.auth.models import Group
from django.contrib.auth.models import Permission
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from guardian.core import ObjectPermissionChecker
from guardian.models import GroupObjectPermission
from guardian.shortcuts import assign_perm
from guardian.shortcuts import get_objects_for_user
@ -9,7 +10,6 @@ from guardian.shortcuts import get_users_with_perms
from guardian.shortcuts import remove_perm
from rest_framework.permissions import BasePermission
from rest_framework.permissions import DjangoObjectPermissions
from guardian.core import ObjectPermissionChecker
class PaperlessObjectPermissions(DjangoObjectPermissions):

View File

@ -33,7 +33,6 @@ class SanityCheckMessages:
if len(self._messages) == 0:
logger.info("Sanity checker detected no issues.")
else:
# Query once
all_docs = Document.objects.all()

View File

@ -58,7 +58,6 @@ class DynamicFieldsModelSerializer(serializers.ModelSerializer):
class MatchingModelSerializer(serializers.ModelSerializer):
document_count = serializers.IntegerField(read_only=True)
def get_slug(self, obj):
@ -221,7 +220,6 @@ class OwnedObjectSerializer(serializers.ModelSerializer, SetPermissionsMixin):
class CorrespondentSerializer(MatchingModelSerializer, OwnedObjectSerializer):
last_correspondence = serializers.DateTimeField(read_only=True)
class Meta:
@ -259,7 +257,6 @@ class DocumentTypeSerializer(MatchingModelSerializer, OwnedObjectSerializer):
class ColorField(serializers.Field):
COLOURS = (
(1, "#a6cee3"),
(2, "#1f78b4"),
@ -290,7 +287,6 @@ class ColorField(serializers.Field):
class TagSerializerVersion1(MatchingModelSerializer, OwnedObjectSerializer):
colour = ColorField(source="color", default="#a6cee3")
class Meta:
@ -373,7 +369,6 @@ class StoragePathField(serializers.PrimaryKeyRelatedField):
class DocumentSerializer(OwnedObjectSerializer, DynamicFieldsModelSerializer):
correspondent = CorrespondentField(allow_null=True)
tags = TagsField(many=True)
document_type = DocumentTypeField(allow_null=True)
@ -454,7 +449,6 @@ class SavedViewFilterRuleSerializer(serializers.ModelSerializer):
class SavedViewSerializer(OwnedObjectSerializer):
filter_rules = SavedViewFilterRuleSerializer(many=True)
class Meta:
@ -500,7 +494,6 @@ class SavedViewSerializer(OwnedObjectSerializer):
class DocumentListSerializer(serializers.Serializer):
documents = serializers.ListField(
required=True,
label="Documents",
@ -525,7 +518,6 @@ class DocumentListSerializer(serializers.Serializer):
class BulkEditSerializer(DocumentListSerializer, SetPermissionsMixin):
method = serializers.ChoiceField(
choices=[
"set_correspondent",
@ -651,7 +643,6 @@ class BulkEditSerializer(DocumentListSerializer, SetPermissionsMixin):
self._validate_owner(parameters["owner"])
def validate(self, attrs):
method = attrs["method"]
parameters = attrs["parameters"]
@ -672,7 +663,6 @@ class BulkEditSerializer(DocumentListSerializer, SetPermissionsMixin):
class PostDocumentSerializer(serializers.Serializer):
created = serializers.DateTimeField(
label="Created",
allow_null=True,
@ -754,7 +744,6 @@ class PostDocumentSerializer(serializers.Serializer):
class BulkDownloadSerializer(DocumentListSerializer):
content = serializers.ChoiceField(
choices=["archive", "originals", "both"],
default="archive",
@ -905,7 +894,6 @@ class TasksViewSerializer(serializers.ModelSerializer):
class AcknowledgeTasksViewSerializer(serializers.Serializer):
tasks = serializers.ListField(
required=True,
label="Tasks",

View File

@ -175,7 +175,6 @@ def set_tags(
color=False,
**kwargs,
):
if replace:
Document.tags.through.objects.filter(document=document).exclude(
Q(tag__is_inbox_tag=True),
@ -376,7 +375,6 @@ def validate_move(instance, old_path, new_path):
@receiver(models.signals.m2m_changed, sender=Document.tags.through)
@receiver(models.signals.post_save, sender=Document)
def update_filename_and_move_files(sender, instance: Document, **kwargs):
if not instance.filename:
# Can't update the filename if there is no filename to begin with
# This happens when the consumer creates a new document.
@ -390,7 +388,6 @@ def update_filename_and_move_files(sender, instance: Document, **kwargs):
with FileLock(settings.MEDIA_LOCK):
try:
# If this was waiting for the lock, the filename or archive_filename
# of this document may have been updated. This happens if multiple updates
# get queued from the UI for the same document
@ -407,7 +404,6 @@ def update_filename_and_move_files(sender, instance: Document, **kwargs):
old_archive_path = instance.archive_path
if instance.has_archive_version:
instance.archive_filename = generate_unique_filename(
instance,
archive_filename=True,
@ -487,7 +483,6 @@ def update_filename_and_move_files(sender, instance: Document, **kwargs):
def set_log_entry(sender, document=None, logging_group=None, **kwargs):
ct = ContentType.objects.get(model="document")
user = User.objects.get(username="consumer")

View File

@ -65,7 +65,6 @@ def train_classifier():
and not Correspondent.objects.filter(matching_algorithm=Tag.MATCH_AUTO).exists()
and not StoragePath.objects.filter(matching_algorithm=Tag.MATCH_AUTO).exists()
):
return
classifier = load_classifier()
@ -91,7 +90,6 @@ def consume_file(
input_doc: ConsumableDocument,
overrides: Optional[DocumentMetadataOverrides] = None,
):
# Default no overrides
if overrides is None:
overrides = DocumentMetadataOverrides()
@ -117,7 +115,6 @@ def consume_file(
)
if document_list:
# If the file is an upload, it's in the scratch directory
# Move it to consume directory to be picked up
# Otherwise, use the current parent to keep possible tags

View File

@ -55,7 +55,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.client.force_authenticate(user=self.user)
def testDocuments(self):
response = self.client.get("/api/documents/").data
self.assertEqual(response["count"], 0)
@ -171,7 +170,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.assertEqual(len(results[0]), 0)
def test_document_actions(self):
_, filename = tempfile.mkstemp(dir=self.dirs.originals_dir)
content = b"This is a test"
@ -270,7 +268,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
@override_settings(FILENAME_FORMAT="")
def test_download_with_archive(self):
content = b"This is a test"
content_archive = b"This is the same test but archived"
@ -312,7 +309,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.assertEqual(response.content, content)
def test_document_actions_not_existing_file(self):
doc = Document.objects.create(
title="none",
filename=os.path.basename("asd"),
@ -329,7 +325,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_document_filters(self):
doc1 = Document.objects.create(
title="none1",
checksum="A",
@ -427,7 +422,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.assertEqual(len(results), 0)
def test_documents_title_content_filter(self):
doc1 = Document.objects.create(
title="title A",
content="content A",
@ -1101,7 +1095,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
)
def test_statistics(self):
doc1 = Document.objects.create(
title="none1",
checksum="A",
@ -1149,7 +1142,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.assertEqual(response.data["inbox_tag"], None)
def test_upload(self):
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
@ -1177,7 +1169,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.assertIsNone(overrides.tag_ids)
def test_upload_empty_metadata(self):
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
@ -1205,7 +1196,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.assertIsNone(overrides.tag_ids)
def test_upload_invalid_form(self):
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
@ -1222,7 +1212,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.consume_file_mock.assert_not_called()
def test_upload_invalid_file(self):
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
@ -1239,7 +1228,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.consume_file_mock.assert_not_called()
def test_upload_with_title(self):
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
@ -1264,7 +1252,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.assertIsNone(overrides.tag_ids)
def test_upload_with_correspondent(self):
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
@ -1290,7 +1277,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.assertIsNone(overrides.tag_ids)
def test_upload_with_invalid_correspondent(self):
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
@ -1308,7 +1294,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.consume_file_mock.assert_not_called()
def test_upload_with_document_type(self):
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
@ -1334,7 +1319,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.assertIsNone(overrides.tag_ids)
def test_upload_with_invalid_document_type(self):
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
@ -1352,7 +1336,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.consume_file_mock.assert_not_called()
def test_upload_with_tags(self):
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
@ -1379,7 +1362,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.assertIsNone(overrides.title)
def test_upload_with_invalid_tags(self):
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
@ -1399,7 +1381,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.consume_file_mock.assert_not_called()
def test_upload_with_created(self):
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
@ -1431,7 +1412,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
self.assertEqual(overrides.created, created)
def test_upload_with_asn(self):
self.consume_file_mock.return_value = celery.result.AsyncResult(
id=str(uuid.uuid4()),
)
@ -1655,7 +1635,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
)
def test_create_update_patch(self):
User.objects.create_user("user1")
view = {
@ -2134,7 +2113,6 @@ class TestDocumentApiV2(DirectoriesMixin, APITestCase):
class TestApiUiSettings(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/ui_settings/"
def setUp(self):
@ -2930,7 +2908,6 @@ class TestBulkEdit(DirectoriesMixin, APITestCase):
class TestBulkDownload(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/documents/bulk_download/"
def setUp(self):
@ -3252,7 +3229,6 @@ class TestBulkDownload(DirectoriesMixin, APITestCase):
class TestApiAuth(DirectoriesMixin, APITestCase):
def test_auth_required(self):
d = Document.objects.create(title="Test")
self.assertEqual(
@ -3317,7 +3293,6 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
)
def test_api_version_no_auth(self):
response = self.client.get("/api/")
self.assertNotIn("X-Api-Version", response)
self.assertNotIn("X-Version", response)
@ -3430,7 +3405,6 @@ class TestApiRemoteVersion(DirectoriesMixin, APITestCase):
@mock.patch("urllib.request.urlopen")
def test_remote_version_enabled_no_update_prefix(self, urlopen_mock):
cm = MagicMock()
cm.getcode.return_value = status.HTTP_200_OK
cm.read.return_value = json.dumps({"tag_name": "ngx-1.6.0"}).encode()
@ -3450,7 +3424,6 @@ class TestApiRemoteVersion(DirectoriesMixin, APITestCase):
@mock.patch("urllib.request.urlopen")
def test_remote_version_enabled_no_update_no_prefix(self, urlopen_mock):
cm = MagicMock()
cm.getcode.return_value = status.HTTP_200_OK
cm.read.return_value = json.dumps(
@ -3472,7 +3445,6 @@ class TestApiRemoteVersion(DirectoriesMixin, APITestCase):
@mock.patch("urllib.request.urlopen")
def test_remote_version_enabled_update(self, urlopen_mock):
new_version = (
version.__version__[0],
version.__version__[1],
@ -3501,7 +3473,6 @@ class TestApiRemoteVersion(DirectoriesMixin, APITestCase):
@mock.patch("urllib.request.urlopen")
def test_remote_version_bad_json(self, urlopen_mock):
cm = MagicMock()
cm.getcode.return_value = status.HTTP_200_OK
cm.read.return_value = b'{ "blah":'
@ -3521,7 +3492,6 @@ class TestApiRemoteVersion(DirectoriesMixin, APITestCase):
@mock.patch("urllib.request.urlopen")
def test_remote_version_exception(self, urlopen_mock):
cm = MagicMock()
cm.getcode.return_value = status.HTTP_200_OK
cm.read.side_effect = urllib.error.URLError("an error")

View File

@ -36,7 +36,6 @@ class TestDocumentChecks(TestCase):
@mock.patch("paperless.db.GnuPG.decrypted")
@mock.patch("documents.models.Document.source_file")
def test_encrypted_decrypt_fails(self, mock_decrypted, mock_source_file):
mock_decrypted.return_value = None
mock_source_file.return_value = b""
@ -61,7 +60,6 @@ class TestDocumentChecks(TestCase):
)
def test_parser_check(self):
self.assertEqual(parser_check(None), [])
with mock.patch("documents.checks.document_consumer_declaration.send") as m:

View File

@ -326,7 +326,6 @@ class TestClassifier(DirectoriesMixin, TestCase):
classifier2.load()
def testSaveClassifier(self):
self.generate_train_and_save()
new_classifier = DocumentClassifier()
@ -336,7 +335,6 @@ class TestClassifier(DirectoriesMixin, TestCase):
self.assertFalse(new_classifier.train())
def test_load_and_classify(self):
self.generate_train_and_save()
new_classifier = DocumentClassifier()

View File

@ -35,7 +35,6 @@ from .utils import DirectoriesMixin
class TestAttributes(TestCase):
TAGS = ("tag1", "tag2", "tag3")
def _test_guess_attributes_from_name(self, filename, sender, title, tags):
@ -68,7 +67,6 @@ class TestAttributes(TestCase):
class TestFieldPermutations(TestCase):
valid_dates = (
"20150102030405Z",
"20150102Z",
@ -85,7 +83,6 @@ class TestFieldPermutations(TestCase):
title=None,
tags=None,
):
info = FileInfo.from_filename(filename)
# Created
@ -132,7 +129,6 @@ class TestFieldPermutations(TestCase):
self.assertIsNone(info.created)
def test_filename_parse_transforms(self):
filename = "tag1,tag2_20190908_180610_0001.pdf"
all_patt = re.compile("^.*$")
none_patt = re.compile("$a")
@ -215,7 +211,6 @@ class FaultyParser(DocumentParser):
def fake_magic_from_file(file, mime=False):
if mime:
if os.path.splitext(file)[1] == ".pdf":
return "application/pdf"
@ -240,7 +235,6 @@ class TestConsumer(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
last_progress=100,
last_progress_max=100,
):
self._send_progress.assert_called()
args, kwargs = self._send_progress.call_args_list[0]
@ -315,7 +309,6 @@ class TestConsumer(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
@override_settings(FILENAME_FORMAT=None, TIME_ZONE="America/Chicago")
def testNormalOperation(self):
filename = self.get_test_file()
# Get the local time, as an aware datetime
@ -437,7 +430,6 @@ class TestConsumer(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
self._assert_first_last_send_progress()
def testNotAFile(self):
self.assertRaisesMessage(
ConsumerError,
"File not found",
@ -545,7 +537,6 @@ class TestConsumer(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
@mock.patch("documents.signals.handlers.generate_unique_filename")
def testFilenameHandlingUnstableFormat(self, m):
filenames = ["this", "that", "now this", "i cant decide"]
def get_filename():
@ -792,7 +783,6 @@ class TestConsumerCreatedDate(DirectoriesMixin, TestCase):
class PreConsumeTestCase(TestCase):
def setUp(self) -> None:
# this prevents websocket message reports during testing.
patcher = mock.patch("documents.consumer.Consumer._send_progress")
self._send_progress = patcher.start()
@ -900,7 +890,6 @@ class PreConsumeTestCase(TestCase):
class PostConsumeTestCase(TestCase):
def setUp(self) -> None:
# this prevents websocket message reports during testing.
patcher = mock.patch("documents.consumer.Consumer._send_progress")
self._send_progress = patcher.start()

View File

@ -13,7 +13,6 @@ from documents.parsers import parse_date_generator
class TestDate(TestCase):
SAMPLE_FILES = os.path.join(
os.path.dirname(__file__),
"../../paperless_tesseract/tests/samples",

View File

@ -52,7 +52,6 @@ class TestDocument(TestCase):
self.assertEqual(mock_unlink.call_count, 2)
def test_file_name(self):
doc = Document(
mime_type="application/pdf",
title="test",
@ -64,7 +63,6 @@ class TestDocument(TestCase):
TIME_ZONE="Europe/Berlin",
)
def test_file_name_with_timezone(self):
# See https://docs.djangoproject.com/en/4.0/ref/utils/#django.utils.timezone.now
# The default for created is an aware datetime in UTC
# This does that, just manually, with a fixed date
@ -107,7 +105,6 @@ class TestDocument(TestCase):
self.assertEqual(doc.get_public_filename(), "2020-01-01 test.pdf")
def test_file_name_jpg(self):
doc = Document(
mime_type="image/jpeg",
title="test",
@ -116,7 +113,6 @@ class TestDocument(TestCase):
self.assertEqual(doc.get_public_filename(), "2020-12-25 test.jpg")
def test_file_name_unknown(self):
doc = Document(
mime_type="application/zip",
title="test",
@ -125,7 +121,6 @@ class TestDocument(TestCase):
self.assertEqual(doc.get_public_filename(), "2020-12-25 test.zip")
def test_file_name_invalid_type(self):
doc = Document(
mime_type="image/jpegasd",
title="test",

View File

@ -119,7 +119,6 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
def test_file_renaming_database_error(self):
Document.objects.create(
mime_type="application/pdf",
storage_type=Document.STORAGE_TYPE_UNENCRYPTED,
@ -842,7 +841,6 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
def test_database_error(self):
original = os.path.join(settings.ORIGINALS_DIR, "0000001.pdf")
archive = os.path.join(settings.ARCHIVE_DIR, "0000001.pdf")
Path(original).touch()
@ -868,7 +866,6 @@ class TestFileHandlingWithArchive(DirectoriesMixin, FileSystemAssertsMixin, Test
class TestFilenameGeneration(DirectoriesMixin, TestCase):
@override_settings(FILENAME_FORMAT="{title}")
def test_invalid_characters(self):
doc = Document.objects.create(
title="This. is the title.",
mime_type="application/pdf",

View File

@ -23,7 +23,6 @@ class TestImporter(TestCase):
)
def test_check_manifest(self):
cmd = Command()
cmd.source = Path("/tmp")
@ -54,7 +53,6 @@ class TestImporter(TestCase):
- CommandError is raised indicating the issue
"""
with tempfile.TemporaryDirectory() as temp_dir:
# Create empty files
original_path = Path(temp_dir) / "original.pdf"
archive_path = Path(temp_dir) / "archive.pdf"

View File

@ -9,7 +9,6 @@ from documents.tests.utils import DirectoriesMixin
class TestAutoComplete(DirectoriesMixin, TestCase):
def test_auto_complete(self):
doc1 = Document.objects.create(
title="doc1",
checksum="A",

View File

@ -30,7 +30,6 @@ class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
)
def test_archiver(self):
doc = self.make_models()
shutil.copy(
sample_file,
@ -40,7 +39,6 @@ class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
call_command("document_archiver")
def test_handle_document(self):
doc = self.make_models()
shutil.copy(
sample_file,
@ -114,7 +112,6 @@ class TestDecryptDocuments(FileSystemAssertsMixin, TestCase):
)
@mock.patch("documents.management.commands.decrypt_documents.input")
def test_decrypt(self, m):
media_dir = tempfile.mkdtemp()
originals_dir = os.path.join(media_dir, "documents", "originals")
thumb_dir = os.path.join(media_dir, "documents", "thumbnails")

View File

@ -150,7 +150,6 @@ class TestConsumer(DirectoriesMixin, ConsumerThreadMixin, TransactionTestCase):
@mock.patch("documents.management.commands.document_consumer.logger.error")
def test_slow_write_pdf(self, error_logger):
self.consume_file_mock.side_effect = self.bogus_task
self.t_start()
@ -171,7 +170,6 @@ class TestConsumer(DirectoriesMixin, ConsumerThreadMixin, TransactionTestCase):
@mock.patch("documents.management.commands.document_consumer.logger.error")
def test_slow_write_and_move(self, error_logger):
self.consume_file_mock.side_effect = self.bogus_task
self.t_start()
@ -194,7 +192,6 @@ class TestConsumer(DirectoriesMixin, ConsumerThreadMixin, TransactionTestCase):
@mock.patch("documents.management.commands.document_consumer.logger.error")
def test_slow_write_incomplete(self, error_logger):
self.consume_file_mock.side_effect = self.bogus_task
self.t_start()
@ -215,12 +212,10 @@ class TestConsumer(DirectoriesMixin, ConsumerThreadMixin, TransactionTestCase):
@override_settings(CONSUMPTION_DIR="does_not_exist")
def test_consumption_directory_invalid(self):
self.assertRaises(CommandError, call_command, "document_consumer", "--oneshot")
@override_settings(CONSUMPTION_DIR="")
def test_consumption_directory_unset(self):
self.assertRaises(CommandError, call_command, "document_consumer", "--oneshot")
def test_mac_write(self):
@ -332,7 +327,6 @@ class TestConsumer(DirectoriesMixin, ConsumerThreadMixin, TransactionTestCase):
@mock.patch("documents.management.commands.document_consumer.open")
def test_consume_file_busy(self, open_mock):
# Calling this mock always raises this
open_mock.side_effect = OSError
@ -378,7 +372,6 @@ class TestConsumerRecursivePolling(TestConsumer):
class TestConsumerTags(DirectoriesMixin, ConsumerThreadMixin, TransactionTestCase):
@override_settings(CONSUMER_RECURSIVE=True, CONSUMER_SUBDIRS_AS_TAGS=True)
def test_consume_file_with_path_tags(self):
tag_names = ("existingTag", "Space Tag")
# Create a Tag prior to consuming a file using it in path
tag_ids = [

View File

@ -364,7 +364,6 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
)
def test_export_missing_files(self):
target = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, target)
Document.objects.create(
@ -458,7 +457,6 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
args = ["document_exporter", "/tmp/foo/bar"]
with self.assertRaises(CommandError) as e:
call_command(*args)
self.assertEqual("That path isn't a directory", str(e))
@ -474,11 +472,9 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
"""
with tempfile.NamedTemporaryFile() as tmp_file:
args = ["document_exporter", tmp_file.name]
with self.assertRaises(CommandError) as e:
call_command(*args)
self.assertEqual("That path isn't a directory", str(e))
@ -493,13 +489,11 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
- Error is raised
"""
with tempfile.TemporaryDirectory() as tmp_dir:
os.chmod(tmp_dir, 0o000)
args = ["document_exporter", tmp_dir]
with self.assertRaises(CommandError) as e:
call_command(*args)
self.assertEqual("That path doesn't appear to be writable", str(e))

View File

@ -11,7 +11,6 @@ from documents.tests.utils import DirectoriesMixin
class TestRetagger(DirectoriesMixin, TestCase):
def make_models(self):
self.sp1 = StoragePath.objects.create(
name="dummy a",
path="{created_data}/{title}",

Some files were not shown because too many files have changed in this diff Show More