mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-02-09 23:49:29 -06:00
Wrong indentation level
This commit is contained in:
@@ -327,48 +327,50 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
for index, document_dict in enumerate(document_manifest):
|
for index, document_dict in enumerate(document_manifest):
|
||||||
document = document_map[document_dict["pk"]]
|
document = document_map[document_dict["pk"]]
|
||||||
|
|
||||||
# 3.1. generate a unique filename
|
# 3.1. generate a unique filename
|
||||||
base_name = self.generate_base_name(document)
|
base_name = self.generate_base_name(document)
|
||||||
|
|
||||||
# 3.2. write filenames into manifest
|
# 3.2. write filenames into manifest
|
||||||
original_target, thumbnail_target, archive_target = (
|
original_target, thumbnail_target, archive_target = (
|
||||||
self.generate_document_targets(document, base_name, document_dict)
|
self.generate_document_targets(document, base_name, document_dict)
|
||||||
)
|
|
||||||
|
|
||||||
# 3.3. write files to target folder
|
|
||||||
if not self.data_only:
|
|
||||||
self.copy_document_files(
|
|
||||||
document,
|
|
||||||
original_target,
|
|
||||||
thumbnail_target,
|
|
||||||
archive_target,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.split_manifest:
|
# 3.3. write files to target folder
|
||||||
manifest_name = base_name.with_name(f"{base_name.stem}-manifest.json")
|
if not self.data_only:
|
||||||
if self.use_folder_prefix:
|
self.copy_document_files(
|
||||||
manifest_name = Path("json") / manifest_name
|
document,
|
||||||
manifest_name = (self.target / manifest_name).resolve()
|
original_target,
|
||||||
manifest_name.parent.mkdir(parents=True, exist_ok=True)
|
thumbnail_target,
|
||||||
content = [document_manifest[index]]
|
archive_target,
|
||||||
content += list(
|
)
|
||||||
filter(
|
|
||||||
lambda d: d["fields"]["document"] == document_dict["pk"],
|
|
||||||
manifest_dict["notes"],
|
|
||||||
),
|
|
||||||
)
|
|
||||||
content += list(
|
|
||||||
filter(
|
|
||||||
lambda d: d["fields"]["document"] == document_dict["pk"],
|
|
||||||
manifest_dict["custom_field_instances"],
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
self.check_and_write_json(
|
if self.split_manifest:
|
||||||
content,
|
manifest_name = base_name.with_name(
|
||||||
manifest_name,
|
f"{base_name.stem}-manifest.json",
|
||||||
)
|
)
|
||||||
progress.update(task, advance=1)
|
if self.use_folder_prefix:
|
||||||
|
manifest_name = Path("json") / manifest_name
|
||||||
|
manifest_name = (self.target / manifest_name).resolve()
|
||||||
|
manifest_name.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
content = [document_manifest[index]]
|
||||||
|
content += list(
|
||||||
|
filter(
|
||||||
|
lambda d: d["fields"]["document"] == document_dict["pk"],
|
||||||
|
manifest_dict["notes"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
content += list(
|
||||||
|
filter(
|
||||||
|
lambda d: d["fields"]["document"] == document_dict["pk"],
|
||||||
|
manifest_dict["custom_field_instances"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.check_and_write_json(
|
||||||
|
content,
|
||||||
|
manifest_name,
|
||||||
|
)
|
||||||
|
progress.update(task, advance=1)
|
||||||
|
|
||||||
# These were exported already
|
# These were exported already
|
||||||
if self.split_manifest:
|
if self.split_manifest:
|
||||||
|
|||||||
@@ -383,56 +383,56 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
for record in manifest_documents:
|
for record in manifest_documents:
|
||||||
document = Document.objects.get(pk=record["pk"])
|
document = Document.objects.get(pk=record["pk"])
|
||||||
|
|
||||||
doc_file = record[EXPORTER_FILE_NAME]
|
doc_file = record[EXPORTER_FILE_NAME]
|
||||||
document_path = self.source / doc_file
|
document_path = self.source / doc_file
|
||||||
|
|
||||||
if EXPORTER_THUMBNAIL_NAME in record:
|
if EXPORTER_THUMBNAIL_NAME in record:
|
||||||
thumb_file = record[EXPORTER_THUMBNAIL_NAME]
|
thumb_file = record[EXPORTER_THUMBNAIL_NAME]
|
||||||
thumbnail_path = (self.source / thumb_file).resolve()
|
thumbnail_path = (self.source / thumb_file).resolve()
|
||||||
else:
|
else:
|
||||||
thumbnail_path = None
|
thumbnail_path = None
|
||||||
|
|
||||||
if EXPORTER_ARCHIVE_NAME in record:
|
if EXPORTER_ARCHIVE_NAME in record:
|
||||||
archive_file = record[EXPORTER_ARCHIVE_NAME]
|
archive_file = record[EXPORTER_ARCHIVE_NAME]
|
||||||
archive_path = self.source / archive_file
|
archive_path = self.source / archive_file
|
||||||
else:
|
else:
|
||||||
archive_path = None
|
archive_path = None
|
||||||
|
|
||||||
with FileLock(settings.MEDIA_LOCK):
|
with FileLock(settings.MEDIA_LOCK):
|
||||||
if Path(document.source_path).is_file():
|
if Path(document.source_path).is_file():
|
||||||
raise FileExistsError(document.source_path)
|
raise FileExistsError(document.source_path)
|
||||||
|
|
||||||
create_source_path_directory(document.source_path)
|
create_source_path_directory(document.source_path)
|
||||||
|
|
||||||
copy_file_with_basic_stats(document_path, document.source_path)
|
copy_file_with_basic_stats(document_path, document.source_path)
|
||||||
|
|
||||||
if thumbnail_path:
|
if thumbnail_path:
|
||||||
if thumbnail_path.suffix in {".png", ".PNG"}:
|
if thumbnail_path.suffix in {".png", ".PNG"}:
|
||||||
run_convert(
|
run_convert(
|
||||||
density=300,
|
density=300,
|
||||||
scale="500x5000>",
|
scale="500x5000>",
|
||||||
alpha="remove",
|
alpha="remove",
|
||||||
strip=True,
|
strip=True,
|
||||||
trim=False,
|
trim=False,
|
||||||
auto_orient=True,
|
auto_orient=True,
|
||||||
input_file=f"{thumbnail_path}[0]",
|
input_file=f"{thumbnail_path}[0]",
|
||||||
output_file=str(document.thumbnail_path),
|
output_file=str(document.thumbnail_path),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
copy_file_with_basic_stats(
|
copy_file_with_basic_stats(
|
||||||
thumbnail_path,
|
thumbnail_path,
|
||||||
document.thumbnail_path,
|
document.thumbnail_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
if archive_path:
|
if archive_path:
|
||||||
create_source_path_directory(document.archive_path)
|
create_source_path_directory(document.archive_path)
|
||||||
# TODO: this assumes that the export is valid and
|
# TODO: this assumes that the export is valid and
|
||||||
# archive_filename is present on all documents with
|
# archive_filename is present on all documents with
|
||||||
# archived files
|
# archived files
|
||||||
copy_file_with_basic_stats(archive_path, document.archive_path)
|
copy_file_with_basic_stats(archive_path, document.archive_path)
|
||||||
|
|
||||||
document.save()
|
document.save()
|
||||||
progress.update(task, advance=1)
|
progress.update(task, advance=1)
|
||||||
|
|
||||||
def decrypt_secret_fields(self) -> None:
|
def decrypt_secret_fields(self) -> None:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -76,6 +76,7 @@ def test_update_llm_index(
|
|||||||
mock_queryset = MagicMock()
|
mock_queryset = MagicMock()
|
||||||
mock_queryset.exists.return_value = True
|
mock_queryset.exists.return_value = True
|
||||||
mock_queryset.__iter__.return_value = iter([real_document])
|
mock_queryset.__iter__.return_value = iter([real_document])
|
||||||
|
mock_queryset.count.return_value = 1
|
||||||
mock_all.return_value = mock_queryset
|
mock_all.return_value = mock_queryset
|
||||||
indexing.update_llm_index(rebuild=True)
|
indexing.update_llm_index(rebuild=True)
|
||||||
|
|
||||||
@@ -97,6 +98,7 @@ def test_update_llm_index_removes_meta(
|
|||||||
mock_queryset = MagicMock()
|
mock_queryset = MagicMock()
|
||||||
mock_queryset.exists.return_value = True
|
mock_queryset.exists.return_value = True
|
||||||
mock_queryset.__iter__.return_value = iter([real_document])
|
mock_queryset.__iter__.return_value = iter([real_document])
|
||||||
|
mock_queryset.count.return_value = 1
|
||||||
mock_all.return_value = mock_queryset
|
mock_all.return_value = mock_queryset
|
||||||
indexing.update_llm_index(rebuild=True)
|
indexing.update_llm_index(rebuild=True)
|
||||||
|
|
||||||
@@ -129,6 +131,7 @@ def test_update_llm_index_partial_update(
|
|||||||
mock_queryset = MagicMock()
|
mock_queryset = MagicMock()
|
||||||
mock_queryset.exists.return_value = True
|
mock_queryset.exists.return_value = True
|
||||||
mock_queryset.__iter__.return_value = iter([real_document, doc2])
|
mock_queryset.__iter__.return_value = iter([real_document, doc2])
|
||||||
|
mock_queryset.count.return_value = 2
|
||||||
mock_all.return_value = mock_queryset
|
mock_all.return_value = mock_queryset
|
||||||
|
|
||||||
indexing.update_llm_index(rebuild=True)
|
indexing.update_llm_index(rebuild=True)
|
||||||
@@ -149,6 +152,7 @@ def test_update_llm_index_partial_update(
|
|||||||
mock_queryset = MagicMock()
|
mock_queryset = MagicMock()
|
||||||
mock_queryset.exists.return_value = True
|
mock_queryset.exists.return_value = True
|
||||||
mock_queryset.__iter__.return_value = iter([updated_document, doc2, doc3])
|
mock_queryset.__iter__.return_value = iter([updated_document, doc2, doc3])
|
||||||
|
mock_queryset.count.return_value = 3
|
||||||
mock_all.return_value = mock_queryset
|
mock_all.return_value = mock_queryset
|
||||||
|
|
||||||
# assert logs "Updating LLM index with %d new nodes and removing %d old nodes."
|
# assert logs "Updating LLM index with %d new nodes and removing %d old nodes."
|
||||||
|
|||||||
Reference in New Issue
Block a user