mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-07-28 18:24:38 -05:00
Runs the pre-commit hooks over all the Python files
This commit is contained in:
@@ -1,8 +1,8 @@
|
||||
import os
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.base import CommandError
|
||||
from documents.models import Document
|
||||
from paperless.db import GnuPG
|
||||
|
||||
@@ -31,9 +31,9 @@ class Command(BaseCommand):
|
||||
"this unless you've got a recent backup\nWARNING: handy. It "
|
||||
"*should* work without a hitch, but be safe and backup your\n"
|
||||
"WARNING: stuff first.\n\nHit Ctrl+C to exit now, or Enter to "
|
||||
"continue.\n\n"
|
||||
"continue.\n\n",
|
||||
)
|
||||
__ = input()
|
||||
_ = input()
|
||||
except KeyboardInterrupt:
|
||||
return
|
||||
|
||||
@@ -41,7 +41,7 @@ class Command(BaseCommand):
|
||||
if not passphrase:
|
||||
raise CommandError(
|
||||
"Passphrase not defined. Please set it with --passphrase or "
|
||||
"by declaring it in your environment or your config."
|
||||
"by declaring it in your environment or your config.",
|
||||
)
|
||||
|
||||
self.__gpg_to_unencrypted(passphrase)
|
||||
@@ -50,7 +50,7 @@ class Command(BaseCommand):
|
||||
def __gpg_to_unencrypted(passphrase):
|
||||
|
||||
encrypted_files = Document.objects.filter(
|
||||
storage_type=Document.STORAGE_TYPE_GPG
|
||||
storage_type=Document.STORAGE_TYPE_GPG,
|
||||
)
|
||||
|
||||
for document in encrypted_files:
|
||||
@@ -71,7 +71,7 @@ class Command(BaseCommand):
|
||||
if not ext == ".gpg":
|
||||
raise CommandError(
|
||||
f"Abort: encrypted file {document.source_path} does not "
|
||||
f"end with .gpg"
|
||||
f"end with .gpg",
|
||||
)
|
||||
|
||||
document.filename = os.path.splitext(document.filename)[0]
|
||||
@@ -83,7 +83,8 @@ class Command(BaseCommand):
|
||||
f.write(raw_thumb)
|
||||
|
||||
Document.objects.filter(id=document.id).update(
|
||||
storage_type=document.storage_type, filename=document.filename
|
||||
storage_type=document.storage_type,
|
||||
filename=document.filename,
|
||||
)
|
||||
|
||||
for path in old_paths:
|
||||
|
@@ -1,7 +1,6 @@
|
||||
import hashlib
|
||||
import multiprocessing
|
||||
|
||||
import logging
|
||||
import multiprocessing
|
||||
import os
|
||||
import shutil
|
||||
import uuid
|
||||
@@ -11,12 +10,12 @@ from django import db
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
from filelock import FileLock
|
||||
from whoosh.writing import AsyncWriter
|
||||
|
||||
from documents.models import Document
|
||||
from filelock import FileLock
|
||||
|
||||
from ... import index
|
||||
from ...file_handling import create_source_path_directory, generate_unique_filename
|
||||
from ...file_handling import create_source_path_directory
|
||||
from ...file_handling import generate_unique_filename
|
||||
from ...parsers import get_parser_class_for_mime_type
|
||||
|
||||
|
||||
@@ -33,7 +32,7 @@ def handle_document(document_id):
|
||||
if not parser_class:
|
||||
logger.error(
|
||||
f"No parser found for mime type {mime_type}, cannot "
|
||||
f"archive document {document} (ID: {document_id})"
|
||||
f"archive document {document} (ID: {document_id})",
|
||||
)
|
||||
return
|
||||
|
||||
@@ -43,7 +42,9 @@ def handle_document(document_id):
|
||||
parser.parse(document.source_path, mime_type, document.get_public_filename())
|
||||
|
||||
thumbnail = parser.get_optimised_thumbnail(
|
||||
document.source_path, mime_type, document.get_public_filename()
|
||||
document.source_path,
|
||||
mime_type,
|
||||
document.get_public_filename(),
|
||||
)
|
||||
|
||||
if parser.get_archive_path():
|
||||
@@ -55,7 +56,8 @@ def handle_document(document_id):
|
||||
# We also don't use save() since that triggers the filehandling
|
||||
# logic, and we don't want that yet (file not yet in place)
|
||||
document.archive_filename = generate_unique_filename(
|
||||
document, archive_filename=True
|
||||
document,
|
||||
archive_filename=True,
|
||||
)
|
||||
Document.objects.filter(pk=document.pk).update(
|
||||
archive_checksum=checksum,
|
||||
@@ -70,9 +72,9 @@ def handle_document(document_id):
|
||||
with index.open_index_writer() as writer:
|
||||
index.update_document(writer, document)
|
||||
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
logger.exception(
|
||||
f"Error while parsing document {document} " f"(ID: {document_id})"
|
||||
f"Error while parsing document {document} " f"(ID: {document_id})",
|
||||
)
|
||||
finally:
|
||||
parser.cleanup()
|
||||
@@ -86,7 +88,8 @@ class Command(BaseCommand):
|
||||
back-tag all previously indexed documents with metadata created (or
|
||||
modified) after their initial import.
|
||||
""".replace(
|
||||
" ", ""
|
||||
" ",
|
||||
"",
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
@@ -129,7 +132,7 @@ class Command(BaseCommand):
|
||||
map(
|
||||
lambda doc: doc.id,
|
||||
filter(lambda d: overwrite or not d.has_archive_version, documents),
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
# Note to future self: this prevents django from reusing database
|
||||
@@ -146,7 +149,7 @@ class Command(BaseCommand):
|
||||
pool.imap_unordered(handle_document, document_ids),
|
||||
total=len(document_ids),
|
||||
disable=options["no_progress_bar"],
|
||||
)
|
||||
),
|
||||
)
|
||||
except KeyboardInterrupt:
|
||||
print("Aborting...")
|
||||
|
@@ -1,17 +1,18 @@
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path, PurePath
|
||||
from pathlib import Path
|
||||
from pathlib import PurePath
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.base import CommandError
|
||||
from django_q.tasks import async_task
|
||||
from watchdog.events import FileSystemEventHandler
|
||||
from watchdog.observers.polling import PollingObserver
|
||||
|
||||
from documents.models import Tag
|
||||
from documents.parsers import is_file_ext_supported
|
||||
from watchdog.events import FileSystemEventHandler
|
||||
from watchdog.observers.polling import PollingObserver
|
||||
|
||||
try:
|
||||
from inotifyrecursive import INotify, flags
|
||||
@@ -29,7 +30,7 @@ def _tags_from_path(filepath):
|
||||
path_parts = Path(filepath).relative_to(settings.CONSUMPTION_DIR).parent.parts
|
||||
for part in path_parts:
|
||||
tag_ids.add(
|
||||
Tag.objects.get_or_create(name__iexact=part, defaults={"name": part})[0].pk
|
||||
Tag.objects.get_or_create(name__iexact=part, defaults={"name": part})[0].pk,
|
||||
)
|
||||
|
||||
return tag_ids
|
||||
@@ -56,7 +57,7 @@ def _consume(filepath):
|
||||
try:
|
||||
if settings.CONSUMER_SUBDIRS_AS_TAGS:
|
||||
tag_ids = _tags_from_path(filepath)
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
logger.exception("Error creating tags from path")
|
||||
|
||||
try:
|
||||
@@ -67,7 +68,7 @@ def _consume(filepath):
|
||||
override_tag_ids=tag_ids if tag_ids else None,
|
||||
task_name=os.path.basename(filepath)[:100],
|
||||
)
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
# Catch all so that the consumer won't crash.
|
||||
# This is also what the test case is listening for to check for
|
||||
# errors.
|
||||
@@ -86,7 +87,7 @@ def _consume_wait_unmodified(file):
|
||||
new_mtime = os.stat(file).st_mtime
|
||||
except FileNotFoundError:
|
||||
logger.debug(
|
||||
f"File {file} moved while waiting for it to remain " f"unmodified."
|
||||
f"File {file} moved while waiting for it to remain " f"unmodified.",
|
||||
)
|
||||
return
|
||||
if new_mtime == mtime:
|
||||
|
@@ -9,7 +9,8 @@ class Command(BaseCommand):
|
||||
Trains the classifier on your data and saves the resulting models to a
|
||||
file. The document consumer will then automatically use this new model.
|
||||
""".replace(
|
||||
" ", ""
|
||||
" ",
|
||||
"",
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
@@ -6,28 +6,28 @@ import time
|
||||
|
||||
import tqdm
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User, Group
|
||||
from django.contrib.auth.models import Group
|
||||
from django.contrib.auth.models import User
|
||||
from django.core import serializers
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.base import CommandError
|
||||
from django.db import transaction
|
||||
from documents.models import Correspondent
|
||||
from documents.models import Document
|
||||
from documents.models import DocumentType
|
||||
from documents.models import SavedView
|
||||
from documents.models import SavedViewFilterRule
|
||||
from documents.models import Tag
|
||||
from documents.settings import EXPORTER_ARCHIVE_NAME
|
||||
from documents.settings import EXPORTER_FILE_NAME
|
||||
from documents.settings import EXPORTER_THUMBNAIL_NAME
|
||||
from filelock import FileLock
|
||||
|
||||
from documents.models import (
|
||||
Document,
|
||||
Correspondent,
|
||||
Tag,
|
||||
DocumentType,
|
||||
SavedView,
|
||||
SavedViewFilterRule,
|
||||
)
|
||||
from documents.settings import (
|
||||
EXPORTER_FILE_NAME,
|
||||
EXPORTER_THUMBNAIL_NAME,
|
||||
EXPORTER_ARCHIVE_NAME,
|
||||
)
|
||||
from paperless.db import GnuPG
|
||||
from paperless_mail.models import MailAccount, MailRule
|
||||
from ...file_handling import generate_filename, delete_empty_directories
|
||||
from paperless_mail.models import MailAccount
|
||||
from paperless_mail.models import MailRule
|
||||
|
||||
from ...file_handling import delete_empty_directories
|
||||
from ...file_handling import generate_filename
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -37,7 +37,8 @@ class Command(BaseCommand):
|
||||
directory. And include a manifest file containing document data for
|
||||
easy import.
|
||||
""".replace(
|
||||
" ", ""
|
||||
" ",
|
||||
"",
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
@@ -107,20 +108,20 @@ class Command(BaseCommand):
|
||||
# 1. Take a snapshot of what files exist in the current export folder
|
||||
for root, dirs, files in os.walk(self.target):
|
||||
self.files_in_export_dir.extend(
|
||||
map(lambda f: os.path.abspath(os.path.join(root, f)), files)
|
||||
map(lambda f: os.path.abspath(os.path.join(root, f)), files),
|
||||
)
|
||||
|
||||
# 2. Create manifest, containing all correspondents, types, tags and
|
||||
# documents
|
||||
with transaction.atomic():
|
||||
manifest = json.loads(
|
||||
serializers.serialize("json", Correspondent.objects.all())
|
||||
serializers.serialize("json", Correspondent.objects.all()),
|
||||
)
|
||||
|
||||
manifest += json.loads(serializers.serialize("json", Tag.objects.all()))
|
||||
|
||||
manifest += json.loads(
|
||||
serializers.serialize("json", DocumentType.objects.all())
|
||||
serializers.serialize("json", DocumentType.objects.all()),
|
||||
)
|
||||
|
||||
documents = Document.objects.order_by("id")
|
||||
@@ -129,19 +130,19 @@ class Command(BaseCommand):
|
||||
manifest += document_manifest
|
||||
|
||||
manifest += json.loads(
|
||||
serializers.serialize("json", MailAccount.objects.all())
|
||||
serializers.serialize("json", MailAccount.objects.all()),
|
||||
)
|
||||
|
||||
manifest += json.loads(
|
||||
serializers.serialize("json", MailRule.objects.all())
|
||||
serializers.serialize("json", MailRule.objects.all()),
|
||||
)
|
||||
|
||||
manifest += json.loads(
|
||||
serializers.serialize("json", SavedView.objects.all())
|
||||
serializers.serialize("json", SavedView.objects.all()),
|
||||
)
|
||||
|
||||
manifest += json.loads(
|
||||
serializers.serialize("json", SavedViewFilterRule.objects.all())
|
||||
serializers.serialize("json", SavedViewFilterRule.objects.all()),
|
||||
)
|
||||
|
||||
manifest += json.loads(serializers.serialize("json", Group.objects.all()))
|
||||
@@ -155,9 +156,7 @@ class Command(BaseCommand):
|
||||
disable=progress_bar_disable,
|
||||
):
|
||||
# 3.1. store files unencrypted
|
||||
document_dict["fields"][
|
||||
"storage_type"
|
||||
] = Document.STORAGE_TYPE_UNENCRYPTED # NOQA: E501
|
||||
document_dict["fields"]["storage_type"] = Document.STORAGE_TYPE_UNENCRYPTED
|
||||
|
||||
document = document_map[document_dict["pk"]]
|
||||
|
||||
@@ -166,7 +165,9 @@ class Command(BaseCommand):
|
||||
while True:
|
||||
if self.use_filename_format:
|
||||
base_name = generate_filename(
|
||||
document, counter=filename_counter, append_gpg=False
|
||||
document,
|
||||
counter=filename_counter,
|
||||
append_gpg=False,
|
||||
)
|
||||
else:
|
||||
base_name = document.get_public_filename(counter=filename_counter)
|
||||
@@ -217,14 +218,18 @@ class Command(BaseCommand):
|
||||
os.utime(archive_target, times=(t, t))
|
||||
else:
|
||||
self.check_and_copy(
|
||||
document.source_path, document.checksum, original_target
|
||||
document.source_path,
|
||||
document.checksum,
|
||||
original_target,
|
||||
)
|
||||
|
||||
self.check_and_copy(document.thumbnail_path, None, thumbnail_target)
|
||||
|
||||
if archive_target:
|
||||
self.check_and_copy(
|
||||
document.archive_path, document.archive_checksum, archive_target
|
||||
document.archive_path,
|
||||
document.archive_checksum,
|
||||
archive_target,
|
||||
)
|
||||
|
||||
# 4. write manifest to target forlder
|
||||
@@ -243,7 +248,8 @@ class Command(BaseCommand):
|
||||
os.remove(f)
|
||||
|
||||
delete_empty_directories(
|
||||
os.path.abspath(os.path.dirname(f)), os.path.abspath(self.target)
|
||||
os.path.abspath(os.path.dirname(f)),
|
||||
os.path.abspath(self.target),
|
||||
)
|
||||
|
||||
def check_and_copy(self, source, source_checksum, target):
|
||||
|
@@ -7,16 +7,16 @@ from contextlib import contextmanager
|
||||
import tqdm
|
||||
from django.conf import settings
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db.models.signals import post_save, m2m_changed
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.base import CommandError
|
||||
from django.db.models.signals import m2m_changed
|
||||
from django.db.models.signals import post_save
|
||||
from documents.models import Document
|
||||
from documents.settings import EXPORTER_ARCHIVE_NAME
|
||||
from documents.settings import EXPORTER_FILE_NAME
|
||||
from documents.settings import EXPORTER_THUMBNAIL_NAME
|
||||
from filelock import FileLock
|
||||
|
||||
from documents.models import Document
|
||||
from documents.settings import (
|
||||
EXPORTER_FILE_NAME,
|
||||
EXPORTER_THUMBNAIL_NAME,
|
||||
EXPORTER_ARCHIVE_NAME,
|
||||
)
|
||||
from ...file_handling import create_source_path_directory
|
||||
from ...signals.handlers import update_filename_and_move_files
|
||||
|
||||
@@ -36,7 +36,8 @@ class Command(BaseCommand):
|
||||
Using a manifest.json file, load the data from there, and import the
|
||||
documents it refers to.
|
||||
""".replace(
|
||||
" ", ""
|
||||
" ",
|
||||
"",
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
@@ -73,7 +74,9 @@ class Command(BaseCommand):
|
||||
|
||||
self._check_manifest()
|
||||
with disable_signal(
|
||||
post_save, receiver=update_filename_and_move_files, sender=Document
|
||||
post_save,
|
||||
receiver=update_filename_and_move_files,
|
||||
sender=Document,
|
||||
):
|
||||
with disable_signal(
|
||||
m2m_changed,
|
||||
@@ -92,7 +95,7 @@ class Command(BaseCommand):
|
||||
def _check_manifest_exists(path):
|
||||
if not os.path.exists(path):
|
||||
raise CommandError(
|
||||
"That directory doesn't appear to contain a manifest.json " "file."
|
||||
"That directory doesn't appear to contain a manifest.json " "file.",
|
||||
)
|
||||
|
||||
def _check_manifest(self):
|
||||
@@ -105,14 +108,14 @@ class Command(BaseCommand):
|
||||
if EXPORTER_FILE_NAME not in record:
|
||||
raise CommandError(
|
||||
"The manifest file contains a record which does not "
|
||||
"refer to an actual document file."
|
||||
"refer to an actual document file.",
|
||||
)
|
||||
|
||||
doc_file = record[EXPORTER_FILE_NAME]
|
||||
if not os.path.exists(os.path.join(self.source, doc_file)):
|
||||
raise CommandError(
|
||||
'The manifest file refers to "{}" which does not '
|
||||
"appear to be in the source directory.".format(doc_file)
|
||||
"appear to be in the source directory.".format(doc_file),
|
||||
)
|
||||
|
||||
if EXPORTER_ARCHIVE_NAME in record:
|
||||
@@ -120,7 +123,7 @@ class Command(BaseCommand):
|
||||
if not os.path.exists(os.path.join(self.source, archive_file)):
|
||||
raise CommandError(
|
||||
f"The manifest file refers to {archive_file} which "
|
||||
f"does not appear to be in the source directory."
|
||||
f"does not appear to be in the source directory.",
|
||||
)
|
||||
|
||||
def _import_files_from_manifest(self, progress_bar_disable):
|
||||
@@ -132,7 +135,7 @@ class Command(BaseCommand):
|
||||
print("Copy files into paperless...")
|
||||
|
||||
manifest_documents = list(
|
||||
filter(lambda r: r["model"] == "documents.document", self.manifest)
|
||||
filter(lambda r: r["model"] == "documents.document", self.manifest),
|
||||
)
|
||||
|
||||
for record in tqdm.tqdm(manifest_documents, disable=progress_bar_disable):
|
||||
|
@@ -1,7 +1,7 @@
|
||||
from django.core.management import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
from documents.tasks import index_reindex, index_optimize
|
||||
from documents.tasks import index_optimize
|
||||
from documents.tasks import index_reindex
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
@@ -3,7 +3,6 @@ import logging
|
||||
import tqdm
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models.signals import post_save
|
||||
|
||||
from documents.models import Document
|
||||
|
||||
|
||||
@@ -12,7 +11,8 @@ class Command(BaseCommand):
|
||||
help = """
|
||||
This will rename all documents to match the latest filename format.
|
||||
""".replace(
|
||||
" ", ""
|
||||
" ",
|
||||
"",
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
@@ -28,6 +28,7 @@ class Command(BaseCommand):
|
||||
logging.getLogger().handlers[0].level = logging.ERROR
|
||||
|
||||
for document in tqdm.tqdm(
|
||||
Document.objects.all(), disable=options["no_progress_bar"]
|
||||
Document.objects.all(),
|
||||
disable=options["no_progress_bar"],
|
||||
):
|
||||
post_save.send(Document, instance=document)
|
||||
|
@@ -2,10 +2,12 @@ import logging
|
||||
|
||||
import tqdm
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from documents.classifier import load_classifier
|
||||
from documents.models import Document
|
||||
from ...signals.handlers import set_correspondent, set_document_type, set_tags
|
||||
|
||||
from ...signals.handlers import set_correspondent
|
||||
from ...signals.handlers import set_document_type
|
||||
from ...signals.handlers import set_tags
|
||||
|
||||
|
||||
logger = logging.getLogger("paperless.management.retagger")
|
||||
@@ -19,7 +21,8 @@ class Command(BaseCommand):
|
||||
back-tag all previously indexed documents with metadata created (or
|
||||
modified) after their initial import.
|
||||
""".replace(
|
||||
" ", ""
|
||||
" ",
|
||||
"",
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
@@ -57,7 +60,8 @@ class Command(BaseCommand):
|
||||
help="Return the suggestion, don't change anything.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--base-url", help="The base URL to use to build the link to the documents."
|
||||
"--base-url",
|
||||
help="The base URL to use to build the link to the documents.",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
@@ -7,7 +7,8 @@ class Command(BaseCommand):
|
||||
help = """
|
||||
This command checks your document archive for issues.
|
||||
""".replace(
|
||||
" ", ""
|
||||
" ",
|
||||
"",
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
|
@@ -5,8 +5,8 @@ import shutil
|
||||
import tqdm
|
||||
from django import db
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from documents.models import Document
|
||||
|
||||
from ...parsers import get_parser_class_for_mime_type
|
||||
|
||||
|
||||
@@ -22,7 +22,9 @@ def _process_document(doc_in):
|
||||
|
||||
try:
|
||||
thumb = parser.get_optimised_thumbnail(
|
||||
document.source_path, document.mime_type, document.get_public_filename()
|
||||
document.source_path,
|
||||
document.mime_type,
|
||||
document.get_public_filename(),
|
||||
)
|
||||
|
||||
shutil.move(thumb, document.thumbnail_path)
|
||||
@@ -35,7 +37,8 @@ class Command(BaseCommand):
|
||||
help = """
|
||||
This will regenerate the thumbnails for all documents.
|
||||
""".replace(
|
||||
" ", ""
|
||||
" ",
|
||||
"",
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
@@ -76,5 +79,5 @@ class Command(BaseCommand):
|
||||
pool.imap_unordered(_process_document, ids),
|
||||
total=len(ids),
|
||||
disable=options["no_progress_bar"],
|
||||
)
|
||||
),
|
||||
)
|
||||
|
@@ -2,7 +2,7 @@ import logging
|
||||
import os
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
logger = logging.getLogger("paperless.management.superuser")
|
||||
@@ -13,7 +13,8 @@ class Command(BaseCommand):
|
||||
help = """
|
||||
Creates a Django superuser based on env variables.
|
||||
""".replace(
|
||||
" ", ""
|
||||
" ",
|
||||
"",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
@@ -39,5 +40,5 @@ class Command(BaseCommand):
|
||||
self.stdout.write(f'Did not create superuser "{username}".')
|
||||
self.stdout.write(
|
||||
'Make sure you specified "PAPERLESS_ADMIN_PASSWORD" in your '
|
||||
'"docker-compose.env" file.'
|
||||
'"docker-compose.env" file.',
|
||||
)
|
||||
|
Reference in New Issue
Block a user