mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-09-08 21:23:44 -05:00
Compare commits
10 Commits
dependabot
...
feature-do
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f3e664b577 | ||
![]() |
50a5192c89 | ||
![]() |
3a6bcec27c | ||
![]() |
6a58d3ed20 | ||
![]() |
7e788a0ecd | ||
![]() |
dac15ea6be | ||
![]() |
67f9375d77 | ||
![]() |
9e8b6071a3 | ||
![]() |
8897307a10 | ||
![]() |
26757e4930 |
@@ -30,16 +30,16 @@ dependencies = [
|
||||
"django-cachalot~=2.8.0",
|
||||
"django-celery-results~=2.6.0",
|
||||
"django-compression-middleware~=0.5.0",
|
||||
"django-cors-headers~=4.8.0",
|
||||
"django-cors-headers~=4.7.0",
|
||||
"django-extensions~=4.1",
|
||||
"django-filter~=25.1",
|
||||
"django-guardian~=3.1.2",
|
||||
"django-guardian~=3.0.3",
|
||||
"django-multiselectfield~=1.0.1",
|
||||
"django-soft-delete~=1.0.18",
|
||||
"djangorestframework~=3.16",
|
||||
"djangorestframework-guardian~=0.4.0",
|
||||
"drf-spectacular~=0.28",
|
||||
"drf-spectacular-sidecar~=2025.9.1",
|
||||
"drf-spectacular-sidecar~=2025.8.1",
|
||||
"drf-writable-nested~=0.7.1",
|
||||
"filelock~=3.19.1",
|
||||
"flower~=2.0.1",
|
||||
|
@@ -1,7 +1,30 @@
|
||||
<pngx-page-header [(title)]="title">
|
||||
|
||||
@if (document?.versions?.length > 0) {
|
||||
<div class="btn-group" ngbDropdown role="group">
|
||||
<div class="btn-group" ngbDropdown role="group">
|
||||
<button class="btn btn-sm btn-outline-secondary dropdown-toggle" ngbDropdownToggle>
|
||||
<i-bs name="layers"></i-bs>
|
||||
<span class="d-none d-lg-inline ps-1" i18n>Version</span>
|
||||
</button>
|
||||
<div class="dropdown-menu shadow" ngbDropdownMenu>
|
||||
@for (vid of document.versions; track vid) {
|
||||
<button ngbDropdownItem (click)="selectVersion(vid)">
|
||||
<span i18n>Version</span> {{vid}}
|
||||
@if (selectedVersionId === vid) { <span> ✓</span> }
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
<input #versionFileInput type="file" class="visually-hidden" (change)="onVersionFileSelected($event)" />
|
||||
<button class="btn btn-sm btn-outline-secondary" title="Upload new version" i18n-title (click)="triggerUploadVersion()" [disabled]="!userIsOwner || !userCanEdit">
|
||||
<i-bs name="file-earmark-plus"></i-bs><span class="visually-hidden" i18n>Upload new version</span>
|
||||
</button>
|
||||
</div>
|
||||
}
|
||||
@if (archiveContentRenderType === ContentRenderType.PDF && !useNativePdfViewer) {
|
||||
@if (previewNumPages) {
|
||||
<div class="input-group input-group-sm d-none d-md-flex">
|
||||
<div class="input-group input-group-sm ms-2 d-none d-md-flex">
|
||||
<div class="input-group-text" i18n>Page</div>
|
||||
<input class="form-control flex-grow-0 w-auto" type="number" min="1" [max]="previewNumPages" [(ngModel)]="previewCurrentPage" />
|
||||
<div class="input-group-text" i18n>of {{previewNumPages}}</div>
|
||||
|
@@ -220,6 +220,8 @@ export class DocumentDetailComponent
|
||||
titleSubject: Subject<string> = new Subject()
|
||||
previewUrl: string
|
||||
thumbUrl: string
|
||||
// Versioning: which document ID to use for file preview/download
|
||||
selectedVersionId: number
|
||||
previewText: string
|
||||
previewLoaded: boolean = false
|
||||
tiffURL: string
|
||||
@@ -268,6 +270,7 @@ export class DocumentDetailComponent
|
||||
public readonly DataType = DataType
|
||||
|
||||
@ViewChild('nav') nav: NgbNav
|
||||
@ViewChild('versionFileInput') versionFileInput
|
||||
@ViewChild('pdfPreview') set pdfPreview(element) {
|
||||
// this gets called when component added or removed from DOM
|
||||
if (
|
||||
@@ -396,7 +399,10 @@ export class DocumentDetailComponent
|
||||
}
|
||||
|
||||
private loadDocument(documentId: number): void {
|
||||
this.previewUrl = this.documentsService.getPreviewUrl(documentId)
|
||||
this.selectedVersionId = documentId
|
||||
this.previewUrl = this.documentsService.getPreviewUrl(
|
||||
this.selectedVersionId
|
||||
)
|
||||
this.http
|
||||
.get(this.previewUrl, { responseType: 'text' })
|
||||
.pipe(
|
||||
@@ -411,7 +417,7 @@ export class DocumentDetailComponent
|
||||
err.message ?? err.toString()
|
||||
}`),
|
||||
})
|
||||
this.thumbUrl = this.documentsService.getThumbUrl(documentId)
|
||||
this.thumbUrl = this.documentsService.getThumbUrl(this.selectedVersionId)
|
||||
this.documentsService
|
||||
.get(documentId)
|
||||
.pipe(
|
||||
@@ -632,6 +638,10 @@ export class DocumentDetailComponent
|
||||
|
||||
updateComponent(doc: Document) {
|
||||
this.document = doc
|
||||
// Default selected version is the newest version
|
||||
this.selectedVersionId = doc.versions?.length
|
||||
? Math.max(...doc.versions)
|
||||
: doc.id
|
||||
this.requiresPassword = false
|
||||
this.updateFormForCustomFields()
|
||||
if (this.archiveContentRenderType === ContentRenderType.TIFF) {
|
||||
@@ -696,6 +706,32 @@ export class DocumentDetailComponent
|
||||
this.prepareForm(doc)
|
||||
}
|
||||
|
||||
// Update file preview and download target to a specific version (by document id)
|
||||
selectVersion(versionId: number) {
|
||||
this.selectedVersionId = versionId
|
||||
this.previewUrl = this.documentsService.getPreviewUrl(
|
||||
this.documentId,
|
||||
false,
|
||||
this.selectedVersionId
|
||||
)
|
||||
this.thumbUrl = this.documentsService.getThumbUrl(this.selectedVersionId)
|
||||
// For text previews, refresh content
|
||||
this.http
|
||||
.get(this.previewUrl, { responseType: 'text' })
|
||||
.pipe(
|
||||
first(),
|
||||
takeUntil(this.unsubscribeNotifier),
|
||||
takeUntil(this.docChangeNotifier)
|
||||
)
|
||||
.subscribe({
|
||||
next: (res) => (this.previewText = res.toString()),
|
||||
error: (err) =>
|
||||
(this.previewText = $localize`An error occurred loading content: ${
|
||||
err.message ?? err.toString()
|
||||
}`),
|
||||
})
|
||||
}
|
||||
|
||||
get customFieldFormFields(): FormArray {
|
||||
return this.documentForm.get('custom_fields') as FormArray
|
||||
}
|
||||
@@ -1043,10 +1079,41 @@ export class DocumentDetailComponent
|
||||
})
|
||||
}
|
||||
|
||||
// Upload a new file version for this document
|
||||
triggerUploadVersion() {
|
||||
this.versionFileInput?.nativeElement?.click()
|
||||
}
|
||||
|
||||
onVersionFileSelected(event: Event) {
|
||||
const input = event.target as HTMLInputElement
|
||||
if (!input?.files || input.files.length === 0) return
|
||||
const file = input.files[0]
|
||||
// Reset input to allow re-selection of the same file later
|
||||
input.value = ''
|
||||
this.documentsService
|
||||
.uploadVersion(this.documentId, file)
|
||||
.pipe(first())
|
||||
.subscribe({
|
||||
next: () => {
|
||||
this.toastService.showInfo(
|
||||
$localize`Uploading new version. Processing will happen in the background.`
|
||||
)
|
||||
// Refresh metadata to reflect that versions changed (when ready)
|
||||
this.openDocumentService.refreshDocument(this.documentId)
|
||||
},
|
||||
error: (error) => {
|
||||
this.toastService.showError(
|
||||
$localize`Error uploading new version`,
|
||||
error
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
download(original: boolean = false) {
|
||||
this.downloading = true
|
||||
const downloadUrl = this.documentsService.getDownloadUrl(
|
||||
this.documentId,
|
||||
this.selectedVersionId || this.documentId,
|
||||
original
|
||||
)
|
||||
this.http
|
||||
|
@@ -159,6 +159,10 @@ export interface Document extends ObjectWithPermissions {
|
||||
|
||||
page_count?: number
|
||||
|
||||
// Versioning
|
||||
head_version?: number
|
||||
versions?: number[]
|
||||
|
||||
// Frontend only
|
||||
__changedFields?: string[]
|
||||
}
|
||||
|
@@ -163,12 +163,19 @@ export class DocumentService extends AbstractPaperlessService<Document> {
|
||||
})
|
||||
}
|
||||
|
||||
getPreviewUrl(id: number, original: boolean = false): string {
|
||||
getPreviewUrl(
|
||||
id: number,
|
||||
original: boolean = false,
|
||||
versionID: number = null
|
||||
): string {
|
||||
let url = new URL(this.getResourceUrl(id, 'preview'))
|
||||
if (this._searchQuery) url.hash = `#search="${this.searchQuery}"`
|
||||
if (original) {
|
||||
url.searchParams.append('original', 'true')
|
||||
}
|
||||
if (versionID) {
|
||||
url.searchParams.append('version', versionID.toString())
|
||||
}
|
||||
return url.toString()
|
||||
}
|
||||
|
||||
@@ -184,6 +191,16 @@ export class DocumentService extends AbstractPaperlessService<Document> {
|
||||
return url
|
||||
}
|
||||
|
||||
uploadVersion(documentId: number, file: File) {
|
||||
const formData = new FormData()
|
||||
formData.append('document', file, file.name)
|
||||
return this.http.post(
|
||||
this.getResourceUrl(documentId, 'update_version'),
|
||||
formData,
|
||||
{ reportProgress: true, observe: 'events' }
|
||||
)
|
||||
}
|
||||
|
||||
getNextAsn(): Observable<number> {
|
||||
return this.http.get<number>(this.getResourceUrl(null, 'next_asn'))
|
||||
}
|
||||
|
@@ -77,6 +77,7 @@ import {
|
||||
fileEarmarkFill,
|
||||
fileEarmarkLock,
|
||||
fileEarmarkMinus,
|
||||
fileEarmarkPlus,
|
||||
fileEarmarkRichtext,
|
||||
fileText,
|
||||
files,
|
||||
@@ -286,6 +287,7 @@ const icons = {
|
||||
fileEarmarkFill,
|
||||
fileEarmarkLock,
|
||||
fileEarmarkMinus,
|
||||
fileEarmarkPlus,
|
||||
fileEarmarkRichtext,
|
||||
files,
|
||||
fileText,
|
||||
|
@@ -1,6 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import itertools
|
||||
import logging
|
||||
import tempfile
|
||||
@@ -283,10 +282,8 @@ def rotate(doc_ids: list[int], degrees: int) -> Literal["OK"]:
|
||||
f"Attempting to rotate {len(doc_ids)} documents by {degrees} degrees.",
|
||||
)
|
||||
qs = Document.objects.filter(id__in=doc_ids)
|
||||
affected_docs: list[int] = []
|
||||
import pikepdf
|
||||
|
||||
rotate_tasks = []
|
||||
for doc in qs:
|
||||
if doc.mime_type != "application/pdf":
|
||||
logger.warning(
|
||||
@@ -294,28 +291,34 @@ def rotate(doc_ids: list[int], degrees: int) -> Literal["OK"]:
|
||||
)
|
||||
continue
|
||||
try:
|
||||
with pikepdf.open(doc.source_path, allow_overwriting_input=True) as pdf:
|
||||
# Write rotated output to a temp file and create a new version via consume pipeline
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{doc.id}_rotated.pdf"
|
||||
)
|
||||
with pikepdf.open(doc.source_path) as pdf:
|
||||
for page in pdf.pages:
|
||||
page.rotate(degrees, relative=True)
|
||||
pdf.save()
|
||||
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
doc.save()
|
||||
rotate_tasks.append(
|
||||
update_document_content_maybe_archive_file.s(
|
||||
document_id=doc.id,
|
||||
),
|
||||
)
|
||||
logger.info(
|
||||
f"Rotated document {doc.id} by {degrees} degrees",
|
||||
)
|
||||
affected_docs.append(doc.id)
|
||||
pdf.remove_unreferenced_resources()
|
||||
pdf.save(filepath)
|
||||
|
||||
# Preserve metadata/permissions via overrides; mark as new version
|
||||
overrides = DocumentMetadataOverrides().from_document(doc)
|
||||
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
head_version_id=doc.id,
|
||||
),
|
||||
overrides,
|
||||
)
|
||||
logger.info(
|
||||
f"Queued new rotated version for document {doc.id} by {degrees} degrees",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(f"Error rotating document {doc.id}: {e}")
|
||||
|
||||
if len(affected_docs) > 0:
|
||||
bulk_update_task = bulk_update_documents.si(document_ids=affected_docs)
|
||||
chord(header=rotate_tasks, body=bulk_update_task).delay()
|
||||
|
||||
return "OK"
|
||||
|
||||
|
||||
@@ -478,19 +481,31 @@ def delete_pages(doc_ids: list[int], pages: list[int]) -> Literal["OK"]:
|
||||
import pikepdf
|
||||
|
||||
try:
|
||||
with pikepdf.open(doc.source_path, allow_overwriting_input=True) as pdf:
|
||||
# Produce edited PDF to a temp file and create a new version
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{doc.id}_pages_deleted.pdf"
|
||||
)
|
||||
with pikepdf.open(doc.source_path) as pdf:
|
||||
offset = 1 # pages are 1-indexed
|
||||
for page_num in pages:
|
||||
pdf.pages.remove(pdf.pages[page_num - offset])
|
||||
offset += 1 # remove() changes the index of the pages
|
||||
pdf.remove_unreferenced_resources()
|
||||
pdf.save()
|
||||
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
if doc.page_count is not None:
|
||||
doc.page_count = doc.page_count - len(pages)
|
||||
doc.save()
|
||||
update_document_content_maybe_archive_file.delay(document_id=doc.id)
|
||||
logger.info(f"Deleted pages {pages} from document {doc.id}")
|
||||
pdf.save(filepath)
|
||||
|
||||
overrides = DocumentMetadataOverrides().from_document(doc)
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
head_version_id=doc.id,
|
||||
),
|
||||
overrides,
|
||||
)
|
||||
logger.info(
|
||||
f"Queued new version for document {doc.id} after deleting pages {pages}",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(f"Error deleting pages from document {doc.id}: {e}")
|
||||
|
||||
@@ -542,17 +557,29 @@ def edit_pdf(
|
||||
dst.pages[-1].rotate(op["rotate"], relative=True)
|
||||
|
||||
if update_document:
|
||||
temp_path = doc.source_path.with_suffix(".tmp.pdf")
|
||||
# Create a new version from the edited PDF rather than replacing in-place
|
||||
pdf = pdf_docs[0]
|
||||
pdf.remove_unreferenced_resources()
|
||||
# save the edited PDF to a temporary file in case of errors
|
||||
pdf.save(temp_path)
|
||||
# replace the original document with the edited one
|
||||
temp_path.replace(doc.source_path)
|
||||
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
doc.page_count = len(pdf.pages)
|
||||
doc.save()
|
||||
update_document_content_maybe_archive_file.delay(document_id=doc.id)
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{doc.id}_edited.pdf"
|
||||
)
|
||||
pdf.save(filepath)
|
||||
overrides = (
|
||||
DocumentMetadataOverrides().from_document(doc)
|
||||
if include_metadata
|
||||
else DocumentMetadataOverrides()
|
||||
)
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
head_version_id=doc.id,
|
||||
),
|
||||
overrides,
|
||||
)
|
||||
else:
|
||||
consume_tasks = []
|
||||
overrides = (
|
||||
|
@@ -14,6 +14,51 @@ from documents.classifier import DocumentClassifier
|
||||
from documents.models import Document
|
||||
|
||||
|
||||
def _resolve_effective_doc(pk: int, request) -> Document | None:
|
||||
"""
|
||||
Resolve which Document row should be considered for caching keys:
|
||||
- If a version is requested, use that version
|
||||
- If pk is a head doc, use its newest child version if present, else the head.
|
||||
- Else, pk is a version, use that version.
|
||||
Returns None if resolution fails (treat as no-cache).
|
||||
"""
|
||||
try:
|
||||
request_doc = Document.objects.only("id", "head_version_id").get(pk=pk)
|
||||
except Document.DoesNotExist:
|
||||
return None
|
||||
|
||||
head_doc = (
|
||||
request_doc
|
||||
if request_doc.head_version_id is None
|
||||
else Document.objects.only("id").get(id=request_doc.head_version_id)
|
||||
)
|
||||
|
||||
version_param = (
|
||||
request.query_params.get("version")
|
||||
if hasattr(request, "query_params")
|
||||
else None
|
||||
)
|
||||
if version_param:
|
||||
try:
|
||||
version_id = int(version_param)
|
||||
candidate = Document.objects.only("id", "head_version_id").get(
|
||||
id=version_id,
|
||||
)
|
||||
if candidate.id != head_doc.id and candidate.head_version_id != head_doc.id:
|
||||
return None
|
||||
return candidate
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# Default behavior: if pk is a head doc, prefer its newest child version
|
||||
if request_doc.head_version_id is None:
|
||||
latest = head_doc.versions.only("id").order_by("id").last()
|
||||
return latest or head_doc
|
||||
|
||||
# pk is already a version
|
||||
return request_doc
|
||||
|
||||
|
||||
def suggestions_etag(request, pk: int) -> str | None:
|
||||
"""
|
||||
Returns an optional string for the ETag, allowing browser caching of
|
||||
@@ -71,11 +116,10 @@ def metadata_etag(request, pk: int) -> str | None:
|
||||
Metadata is extracted from the original file, so use its checksum as the
|
||||
ETag
|
||||
"""
|
||||
try:
|
||||
doc = Document.objects.only("checksum").get(pk=pk)
|
||||
return doc.checksum
|
||||
except Document.DoesNotExist: # pragma: no cover
|
||||
doc = _resolve_effective_doc(pk, request)
|
||||
if doc is None:
|
||||
return None
|
||||
return doc.checksum
|
||||
return None
|
||||
|
||||
|
||||
@@ -85,11 +129,10 @@ def metadata_last_modified(request, pk: int) -> datetime | None:
|
||||
not the modification of the original file, but of the database object, but might as well
|
||||
error on the side of more cautious
|
||||
"""
|
||||
try:
|
||||
doc = Document.objects.only("modified").get(pk=pk)
|
||||
return doc.modified
|
||||
except Document.DoesNotExist: # pragma: no cover
|
||||
doc = _resolve_effective_doc(pk, request)
|
||||
if doc is None:
|
||||
return None
|
||||
return doc.modified
|
||||
return None
|
||||
|
||||
|
||||
@@ -97,15 +140,15 @@ def preview_etag(request, pk: int) -> str | None:
|
||||
"""
|
||||
ETag for the document preview, using the original or archive checksum, depending on the request
|
||||
"""
|
||||
try:
|
||||
doc = Document.objects.only("checksum", "archive_checksum").get(pk=pk)
|
||||
use_original = (
|
||||
"original" in request.query_params
|
||||
and request.query_params["original"] == "true"
|
||||
)
|
||||
return doc.checksum if use_original else doc.archive_checksum
|
||||
except Document.DoesNotExist: # pragma: no cover
|
||||
doc = _resolve_effective_doc(pk, request)
|
||||
if doc is None:
|
||||
return None
|
||||
use_original = (
|
||||
hasattr(request, "query_params")
|
||||
and "original" in request.query_params
|
||||
and request.query_params["original"] == "true"
|
||||
)
|
||||
return doc.checksum if use_original else doc.archive_checksum
|
||||
return None
|
||||
|
||||
|
||||
@@ -114,11 +157,10 @@ def preview_last_modified(request, pk: int) -> datetime | None:
|
||||
Uses the documents modified time to set the Last-Modified header. Not strictly
|
||||
speaking correct, but close enough and quick
|
||||
"""
|
||||
try:
|
||||
doc = Document.objects.only("modified").get(pk=pk)
|
||||
return doc.modified
|
||||
except Document.DoesNotExist: # pragma: no cover
|
||||
doc = _resolve_effective_doc(pk, request)
|
||||
if doc is None:
|
||||
return None
|
||||
return doc.modified
|
||||
return None
|
||||
|
||||
|
||||
@@ -128,10 +170,13 @@ def thumbnail_last_modified(request, pk: int) -> datetime | None:
|
||||
Cache should be (slightly?) faster than filesystem
|
||||
"""
|
||||
try:
|
||||
doc = Document.objects.only("storage_type").get(pk=pk)
|
||||
doc = _resolve_effective_doc(pk, request)
|
||||
if doc is None:
|
||||
return None
|
||||
if not doc.thumbnail_path.exists():
|
||||
return None
|
||||
doc_key = get_thumbnail_modified_key(pk)
|
||||
# Use the effective document id for cache key
|
||||
doc_key = get_thumbnail_modified_key(doc.id)
|
||||
|
||||
cache_hit = cache.get(doc_key)
|
||||
if cache_hit is not None:
|
||||
|
@@ -113,6 +113,12 @@ class ConsumerPluginMixin:
|
||||
|
||||
self.filename = self.metadata.filename or self.input_doc.original_file.name
|
||||
|
||||
if input_doc.head_version_id:
|
||||
self.log.debug(f"Document head version id: {input_doc.head_version_id}")
|
||||
head_version = Document.objects.get(pk=input_doc.head_version_id)
|
||||
version_index = head_version.versions.count()
|
||||
self.filename += f"_v{version_index}"
|
||||
|
||||
def _send_progress(
|
||||
self,
|
||||
current_progress: int,
|
||||
@@ -470,12 +476,44 @@ class ConsumerPlugin(
|
||||
try:
|
||||
with transaction.atomic():
|
||||
# store the document.
|
||||
document = self._store(
|
||||
text=text,
|
||||
date=date,
|
||||
page_count=page_count,
|
||||
mime_type=mime_type,
|
||||
)
|
||||
if self.input_doc.head_version_id:
|
||||
# If this is a new version of an existing document, we need
|
||||
# to make sure we're not creating a new document, but updating
|
||||
# the existing one.
|
||||
original_document = Document.objects.get(
|
||||
pk=self.input_doc.head_version_id,
|
||||
)
|
||||
self.log.debug("Saving record for updated version to database")
|
||||
original_document.pk = None
|
||||
original_document.head_version = Document.objects.get(
|
||||
pk=self.input_doc.head_version_id,
|
||||
)
|
||||
file_for_checksum = (
|
||||
self.unmodified_original
|
||||
if self.unmodified_original is not None
|
||||
else self.working_copy
|
||||
)
|
||||
original_document.checksum = hashlib.md5(
|
||||
file_for_checksum.read_bytes(),
|
||||
).hexdigest()
|
||||
original_document.content = text
|
||||
original_document.page_count = page_count
|
||||
original_document.mime_type = mime_type
|
||||
original_document.original_filename = self.filename
|
||||
# Clear unique file path fields so they can be generated uniquely later
|
||||
original_document.filename = None
|
||||
original_document.archive_filename = None
|
||||
original_document.archive_checksum = None
|
||||
original_document.modified = timezone.now()
|
||||
original_document.save()
|
||||
document = original_document
|
||||
else:
|
||||
document = self._store(
|
||||
text=text,
|
||||
date=date,
|
||||
page_count=page_count,
|
||||
mime_type=mime_type,
|
||||
)
|
||||
|
||||
# If we get here, it was successful. Proceed with post-consume
|
||||
# hooks. If they fail, nothing will get changed.
|
||||
|
@@ -156,6 +156,7 @@ class ConsumableDocument:
|
||||
|
||||
source: DocumentSource
|
||||
original_file: Path
|
||||
head_version_id: int | None = None
|
||||
mailrule_id: int | None = None
|
||||
mime_type: str = dataclasses.field(init=False, default=None)
|
||||
|
||||
|
26
src/documents/migrations/1069_document_head_version.py
Normal file
26
src/documents/migrations/1069_document_head_version.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 5.1.6 on 2025-02-26 17:08
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1068_alter_document_created"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="head_version",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="versions",
|
||||
to="documents.document",
|
||||
verbose_name="head version of document",
|
||||
),
|
||||
),
|
||||
]
|
@@ -289,6 +289,15 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
||||
),
|
||||
)
|
||||
|
||||
head_version = models.ForeignKey(
|
||||
"self",
|
||||
blank=True,
|
||||
null=True,
|
||||
related_name="versions",
|
||||
on_delete=models.CASCADE,
|
||||
verbose_name=_("head version of document"),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ("-created",)
|
||||
verbose_name = _("document")
|
||||
|
@@ -957,6 +957,10 @@ class DocumentSerializer(
|
||||
request.version if request else settings.REST_FRAMEWORK["DEFAULT_VERSION"],
|
||||
)
|
||||
|
||||
if doc.get("versions") is not None:
|
||||
doc["versions"] = sorted(doc["versions"], reverse=True)
|
||||
doc["versions"].append(doc["id"])
|
||||
|
||||
if api_version < 9:
|
||||
# provide created as a datetime for backwards compatibility
|
||||
from django.utils import timezone
|
||||
@@ -1103,6 +1107,8 @@ class DocumentSerializer(
|
||||
"remove_inbox_tags",
|
||||
"page_count",
|
||||
"mime_type",
|
||||
"head_version",
|
||||
"versions",
|
||||
)
|
||||
list_serializer_class = OwnedObjectListSerializer
|
||||
|
||||
@@ -1738,6 +1744,15 @@ class PostDocumentSerializer(serializers.Serializer):
|
||||
return created.date()
|
||||
|
||||
|
||||
class DocumentVersionSerializer(serializers.Serializer):
|
||||
document = serializers.FileField(
|
||||
label="Document",
|
||||
write_only=True,
|
||||
)
|
||||
|
||||
validate_document = PostDocumentSerializer().validate_document
|
||||
|
||||
|
||||
class BulkDownloadSerializer(DocumentListSerializer):
|
||||
content = serializers.ChoiceField(
|
||||
choices=["archive", "originals", "both"],
|
||||
|
@@ -145,13 +145,17 @@ def consume_file(
|
||||
if overrides is None:
|
||||
overrides = DocumentMetadataOverrides()
|
||||
|
||||
plugins: list[type[ConsumeTaskPlugin]] = [
|
||||
ConsumerPreflightPlugin,
|
||||
CollatePlugin,
|
||||
BarcodePlugin,
|
||||
WorkflowTriggerPlugin,
|
||||
ConsumerPlugin,
|
||||
]
|
||||
plugins: list[type[ConsumeTaskPlugin]] = (
|
||||
[ConsumerPreflightPlugin,ConsumerPlugin]
|
||||
if input_doc.head_version_id is not None
|
||||
else [
|
||||
ConsumerPreflightPlugin,
|
||||
CollatePlugin,
|
||||
BarcodePlugin,
|
||||
WorkflowTriggerPlugin,
|
||||
ConsumerPlugin,
|
||||
]
|
||||
)
|
||||
|
||||
with (
|
||||
ProgressManager(
|
||||
|
@@ -147,6 +147,7 @@ from documents.serialisers import CustomFieldSerializer
|
||||
from documents.serialisers import DocumentListSerializer
|
||||
from documents.serialisers import DocumentSerializer
|
||||
from documents.serialisers import DocumentTypeSerializer
|
||||
from documents.serialisers import DocumentVersionSerializer
|
||||
from documents.serialisers import NotesSerializer
|
||||
from documents.serialisers import PostDocumentSerializer
|
||||
from documents.serialisers import RunTaskViewSerializer
|
||||
@@ -559,7 +560,7 @@ class DocumentViewSet(
|
||||
GenericViewSet,
|
||||
):
|
||||
model = Document
|
||||
queryset = Document.objects.annotate(num_notes=Count("notes"))
|
||||
queryset = Document.objects.all()
|
||||
serializer_class = DocumentSerializer
|
||||
pagination_class = StandardPagination
|
||||
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
|
||||
@@ -588,7 +589,8 @@ class DocumentViewSet(
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
Document.objects.distinct()
|
||||
Document.objects.filter(head_version__isnull=True)
|
||||
.distinct()
|
||||
.order_by("-created")
|
||||
.annotate(num_notes=Count("notes"))
|
||||
.select_related("correspondent", "storage_path", "document_type", "owner")
|
||||
@@ -650,18 +652,55 @@ class DocumentViewSet(
|
||||
and request.query_params["original"] == "true"
|
||||
)
|
||||
|
||||
def _resolve_file_doc(self, head_doc: Document, request):
|
||||
version_param = request.query_params.get("version")
|
||||
if version_param:
|
||||
try:
|
||||
version_id = int(version_param)
|
||||
except (TypeError, ValueError):
|
||||
raise NotFound("Invalid version parameter")
|
||||
try:
|
||||
candidate = Document.global_objects.select_related("owner").get(
|
||||
id=version_id,
|
||||
)
|
||||
except Document.DoesNotExist:
|
||||
raise Http404
|
||||
if candidate.id != head_doc.id and candidate.head_version_id != head_doc.id:
|
||||
raise Http404
|
||||
return candidate
|
||||
latest = head_doc.versions.order_by("id").last()
|
||||
return latest or head_doc
|
||||
|
||||
def file_response(self, pk, request, disposition):
|
||||
doc = Document.global_objects.select_related("owner").get(id=pk)
|
||||
request_doc = Document.global_objects.select_related("owner").get(id=pk)
|
||||
head_doc = (
|
||||
request_doc
|
||||
if request_doc.head_version_id is None
|
||||
else Document.global_objects.select_related("owner").get(
|
||||
id=request_doc.head_version_id,
|
||||
)
|
||||
)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"view_document",
|
||||
doc,
|
||||
head_doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
# If a version is explicitly requested, use it. Otherwise:
|
||||
# - if pk is a head document: serve newest version
|
||||
# - if pk is a version: serve that version
|
||||
if "version" in request.query_params:
|
||||
file_doc = self._resolve_file_doc(head_doc, request)
|
||||
else:
|
||||
file_doc = (
|
||||
self._resolve_file_doc(head_doc, request)
|
||||
if request_doc.head_version_id is None
|
||||
else request_doc
|
||||
)
|
||||
return serve_file(
|
||||
doc=doc,
|
||||
doc=file_doc,
|
||||
use_archive=not self.original_requested(request)
|
||||
and doc.has_archive_version,
|
||||
and file_doc.has_archive_version,
|
||||
disposition=disposition,
|
||||
)
|
||||
|
||||
@@ -696,16 +735,33 @@ class DocumentViewSet(
|
||||
)
|
||||
def metadata(self, request, pk=None):
|
||||
try:
|
||||
doc = Document.objects.select_related("owner").get(pk=pk)
|
||||
request_doc = Document.objects.select_related("owner").get(pk=pk)
|
||||
head_doc = (
|
||||
request_doc
|
||||
if request_doc.head_version_id is None
|
||||
else Document.objects.select_related("owner").get(
|
||||
id=request_doc.head_version_id,
|
||||
)
|
||||
)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"view_document",
|
||||
doc,
|
||||
head_doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
except Document.DoesNotExist:
|
||||
raise Http404
|
||||
|
||||
# Choose the effective document (newest version by default, or explicit via ?version=)
|
||||
if "version" in request.query_params:
|
||||
doc = self._resolve_file_doc(head_doc, request)
|
||||
else:
|
||||
doc = (
|
||||
self._resolve_file_doc(head_doc, request)
|
||||
if request_doc.head_version_id is None
|
||||
else request_doc
|
||||
)
|
||||
|
||||
document_cached_metadata = get_metadata_cache(doc.pk)
|
||||
|
||||
archive_metadata = None
|
||||
@@ -807,8 +863,36 @@ class DocumentViewSet(
|
||||
)
|
||||
def preview(self, request, pk=None):
|
||||
try:
|
||||
response = self.file_response(pk, request, "inline")
|
||||
return response
|
||||
request_doc = Document.objects.select_related("owner").get(id=pk)
|
||||
head_doc = (
|
||||
request_doc
|
||||
if request_doc.head_version_id is None
|
||||
else Document.objects.select_related("owner").get(
|
||||
id=request_doc.head_version_id,
|
||||
)
|
||||
)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"view_document",
|
||||
head_doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
|
||||
if "version" in request.query_params:
|
||||
file_doc = self._resolve_file_doc(head_doc, request)
|
||||
else:
|
||||
file_doc = (
|
||||
self._resolve_file_doc(head_doc, request)
|
||||
if request_doc.head_version_id is None
|
||||
else request_doc
|
||||
)
|
||||
|
||||
return serve_file(
|
||||
doc=file_doc,
|
||||
use_archive=not self.original_requested(request)
|
||||
and file_doc.has_archive_version,
|
||||
disposition="inline",
|
||||
)
|
||||
except (FileNotFoundError, Document.DoesNotExist):
|
||||
raise Http404
|
||||
|
||||
@@ -817,17 +901,32 @@ class DocumentViewSet(
|
||||
@method_decorator(last_modified(thumbnail_last_modified))
|
||||
def thumb(self, request, pk=None):
|
||||
try:
|
||||
doc = Document.objects.select_related("owner").get(id=pk)
|
||||
request_doc = Document.objects.select_related("owner").get(id=pk)
|
||||
head_doc = (
|
||||
request_doc
|
||||
if request_doc.head_version_id is None
|
||||
else Document.objects.select_related("owner").get(
|
||||
id=request_doc.head_version_id,
|
||||
)
|
||||
)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"view_document",
|
||||
doc,
|
||||
head_doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
if doc.storage_type == Document.STORAGE_TYPE_GPG:
|
||||
handle = GnuPG.decrypted(doc.thumbnail_file)
|
||||
if "version" in request.query_params:
|
||||
file_doc = self._resolve_file_doc(head_doc, request)
|
||||
else:
|
||||
handle = doc.thumbnail_file
|
||||
file_doc = (
|
||||
self._resolve_file_doc(head_doc, request)
|
||||
if request_doc.head_version_id is None
|
||||
else request_doc
|
||||
)
|
||||
if file_doc.storage_type == Document.STORAGE_TYPE_GPG:
|
||||
handle = GnuPG.decrypted(file_doc.thumbnail_file)
|
||||
else:
|
||||
handle = file_doc.thumbnail_file
|
||||
|
||||
return HttpResponse(handle, content_type="image/webp")
|
||||
except (FileNotFoundError, Document.DoesNotExist):
|
||||
@@ -1095,6 +1194,56 @@ class DocumentViewSet(
|
||||
"Error emailing document, check logs for more detail.",
|
||||
)
|
||||
|
||||
@action(methods=["post"], detail=True)
|
||||
def update_version(self, request, pk=None):
|
||||
serializer = DocumentVersionSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
try:
|
||||
doc = Document.objects.select_related("owner").get(pk=pk)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"change_document",
|
||||
doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
except Document.DoesNotExist:
|
||||
raise Http404
|
||||
|
||||
try:
|
||||
doc_name, doc_data = serializer.validated_data.get("document")
|
||||
|
||||
t = int(mktime(datetime.now().timetuple()))
|
||||
|
||||
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
temp_file_path = Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR)) / Path(
|
||||
pathvalidate.sanitize_filename(doc_name),
|
||||
)
|
||||
|
||||
temp_file_path.write_bytes(doc_data)
|
||||
|
||||
os.utime(temp_file_path, times=(t, t))
|
||||
|
||||
input_doc = ConsumableDocument(
|
||||
source=DocumentSource.ApiUpload,
|
||||
original_file=temp_file_path,
|
||||
head_version_id=doc.pk,
|
||||
)
|
||||
|
||||
async_task = consume_file.delay(
|
||||
input_doc,
|
||||
)
|
||||
logger.debug(
|
||||
f"Updated document {doc.id} with new version",
|
||||
)
|
||||
return Response(async_task.id)
|
||||
except Exception as e:
|
||||
logger.warning(f"An error occurred updating document: {e!s}")
|
||||
return HttpResponseServerError(
|
||||
"Error updating document, check logs for more detail.",
|
||||
)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
|
31
uv.lock
generated
31
uv.lock
generated
@@ -644,15 +644,15 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "django"
|
||||
version = "5.2.6"
|
||||
version = "5.2.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "asgiref", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "sqlparse", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4c/8c/2a21594337250a171d45dda926caa96309d5136becd1f48017247f9cdea0/django-5.2.6.tar.gz", hash = "sha256:da5e00372763193d73cecbf71084a3848458cecf4cee36b9a1e8d318d114a87b", size = 10858861, upload-time = "2025-09-03T13:04:03.23Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/62/9b/779f853c3d2d58b9e08346061ff3e331cdec3fe3f53aae509e256412a593/django-5.2.5.tar.gz", hash = "sha256:0745b25681b129a77aae3d4f6549b62d3913d74407831abaa0d9021a03954bae", size = 10859748, upload-time = "2025-08-06T08:26:29.978Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/af/6593f6d21404e842007b40fdeb81e73c20b6649b82d020bb0801b270174c/django-5.2.6-py3-none-any.whl", hash = "sha256:60549579b1174a304b77e24a93d8d9fafe6b6c03ac16311f3e25918ea5a20058", size = 8303111, upload-time = "2025-09-03T13:03:47.808Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/6e/98a1d23648e0085bb5825326af17612ecd8fc76be0ce96ea4dc35e17b926/django-5.2.5-py3-none-any.whl", hash = "sha256:2b2ada0ee8a5ff743a40e2b9820d1f8e24c11bac9ae6469cd548f0057ea6ddcd", size = 8302999, upload-time = "2025-08-06T08:26:23.562Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -730,15 +730,15 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "django-cors-headers"
|
||||
version = "4.8.0"
|
||||
version = "4.7.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "asgiref", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/89/8e/6225441edcfe179bf4861e9e67489e33375e0b66316c8d7b9edaae863d37/django_cors_headers-4.8.0.tar.gz", hash = "sha256:0a12a2efcd59a3cea741e44db8ab589e929949de5bc4cdf35a29c6ae77297686", size = 21425, upload-time = "2025-09-08T15:58:05.34Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/93/6c/16f6cb6064c63074fd5b2bd494eb319afd846236d9c1a6c765946df2c289/django_cors_headers-4.7.0.tar.gz", hash = "sha256:6fdf31bf9c6d6448ba09ef57157db2268d515d94fc5c89a0a1028e1fc03ee52b", size = 21037, upload-time = "2025-02-06T22:15:28.924Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/b3/29ef49d6ff7800f323f3d98cde7777b3cfdda133de8feea84cffafea4578/django_cors_headers-4.8.0-py3-none-any.whl", hash = "sha256:3b883f4c6d07848673218456a5e070d8ab51f97341c1f27d0242ca167e7272ab", size = 12804, upload-time = "2025-09-08T15:58:03.882Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/a2/7bcfff86314bd9dd698180e31ba00604001606efb518a06cca6833a54285/django_cors_headers-4.7.0-py3-none-any.whl", hash = "sha256:f1c125dcd58479fe7a67fe2499c16ee38b81b397463cf025f0e2c42937421070", size = 12794, upload-time = "2025-02-06T22:15:24.341Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -782,15 +782,14 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "django-guardian"
|
||||
version = "3.1.2"
|
||||
version = "3.0.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux')" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/28/ac/5a8f7301c0181ee9020e020a4fa519f9851726b8fd3c1177656c1f5a1be0/django_guardian-3.1.2.tar.gz", hash = "sha256:6fc93b55e5eacd1a062a959c5578b433d999a286742aa3e7e713c71046813538", size = 93422, upload-time = "2025-09-08T15:43:51.361Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/30/c2/3ed43813dd7313f729dbaa829b4f9ed4a647530151f672cfb5f843c12edf/django_guardian-3.0.3.tar.gz", hash = "sha256:4e59eab4d836da5a027cf0c176d14bc2a4e22cbbdf753159a03946c08c8a196d", size = 85410, upload-time = "2025-06-25T20:42:17.475Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/20/50/3a4a891f809c9865d30864f59f993f9535b18e3935dfad278c9682fc537f/django_guardian-3.1.2-py3-none-any.whl", hash = "sha256:6c10f88d0b7efd171ae65d7ac487a666f41eb373c6f94d80016b1a19bdfbf212", size = 127451, upload-time = "2025-09-08T15:43:49.987Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/13/e6f629a978ef5fab8b8d2760cacc3e451016cef952cf4c049d672c5c6b07/django_guardian-3.0.3-py3-none-any.whl", hash = "sha256:d2164cea9f03c369d7ade21802710f3ab23ca6734bcc7dfcfb385906783916c7", size = 118198, upload-time = "2025-06-25T20:42:15.377Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -918,14 +917,14 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "drf-spectacular-sidecar"
|
||||
version = "2025.9.1"
|
||||
version = "2025.8.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/51/e2/85a0b8dbed8631165a6b49b2aee57636da8e4e710c444566636ffd972a7b/drf_spectacular_sidecar-2025.9.1.tar.gz", hash = "sha256:da2aa45da48fff76de7a1e357b84d1eb0b9df40ca89ec19d5fe94ad1037bb3c8", size = 2420902, upload-time = "2025-09-01T11:23:24.156Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a9/f7/0cb2f520723f1823ef7b6651d447927f61ba92d152a5d68132599b90624f/drf_spectacular_sidecar-2025.8.1.tar.gz", hash = "sha256:1944ae0eb5136cff5aa135211bec31084cef1af03a04de9b7f2f912b3c59c251", size = 2407787, upload-time = "2025-08-01T11:28:01.319Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/96/24/db59146ba89491fe1d44ca8aef239c94bf3c7fd41523976090f099430312/drf_spectacular_sidecar-2025.9.1-py3-none-any.whl", hash = "sha256:8e80625209b8a23ff27616db305b9ab71c2e2d1069dacd99720a9c11e429af50", size = 2440255, upload-time = "2025-09-01T11:23:22.822Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/3b/0fcdc6eb294a11ed6e3ddc02fc29968bf403d3ce31645764eedfc91f87a6/drf_spectacular_sidecar-2025.8.1-py3-none-any.whl", hash = "sha256:c65a2a423000cc067395150b4dc28e7398a762d66ee101c4c38a4fb0d29a42a2", size = 2427849, upload-time = "2025-08-01T11:27:59.648Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2169,16 +2168,16 @@ requires-dist = [
|
||||
{ name = "django-cachalot", specifier = "~=2.8.0" },
|
||||
{ name = "django-celery-results", specifier = "~=2.6.0" },
|
||||
{ name = "django-compression-middleware", specifier = "~=0.5.0" },
|
||||
{ name = "django-cors-headers", specifier = "~=4.8.0" },
|
||||
{ name = "django-cors-headers", specifier = "~=4.7.0" },
|
||||
{ name = "django-extensions", specifier = "~=4.1" },
|
||||
{ name = "django-filter", specifier = "~=25.1" },
|
||||
{ name = "django-guardian", specifier = "~=3.1.2" },
|
||||
{ name = "django-guardian", specifier = "~=3.0.3" },
|
||||
{ name = "django-multiselectfield", specifier = "~=1.0.1" },
|
||||
{ name = "django-soft-delete", specifier = "~=1.0.18" },
|
||||
{ name = "djangorestframework", specifier = "~=3.16" },
|
||||
{ name = "djangorestframework-guardian", specifier = "~=0.4.0" },
|
||||
{ name = "drf-spectacular", specifier = "~=0.28" },
|
||||
{ name = "drf-spectacular-sidecar", specifier = "~=2025.9.1" },
|
||||
{ name = "drf-spectacular-sidecar", specifier = "~=2025.8.1" },
|
||||
{ name = "drf-writable-nested", specifier = "~=0.7.1" },
|
||||
{ name = "filelock", specifier = "~=3.19.1" },
|
||||
{ name = "flower", specifier = "~=2.0.1" },
|
||||
|
Reference in New Issue
Block a user