Compare commits

..

31 Commits

Author SHA1 Message Date
shamoon
9a36c7bb9e More testing 2026-01-09 20:44:32 -08:00
shamoon
ababa0f150 Update serialisers.py 2026-01-09 20:44:32 -08:00
shamoon
3c3b4c69b0 Update 1075_sharelinkbundle.py 2026-01-09 20:44:32 -08:00
shamoon
3c6e0ff4b4 More backend coverage, rename 2026-01-09 20:44:32 -08:00
shamoon
69848874a3 Coverage 2026-01-09 20:44:32 -08:00
shamoon
4d3692e1ee Sonar fixes 2026-01-09 20:44:32 -08:00
shamoon
d1a6849b8a Use a confirm button 2026-01-09 20:44:32 -08:00
shamoon
ef98030738 Fix scheduled tasks tests 2026-01-09 20:44:32 -08:00
shamoon
56e806e47a Frontend coverage for bulk editor changes, sharelink bundle service 2026-01-09 20:44:32 -08:00
shamoon
d36a828e8d Frontend tests 2026-01-09 20:44:32 -08:00
shamoon
03e7274088 Backend tests 2026-01-09 20:44:32 -08:00
shamoon
933343d290 Docs 2026-01-09 20:44:32 -08:00
shamoon
203137d69f Dialog tweaks 2026-01-09 20:44:32 -08:00
shamoon
18d29025de Consistent naming to Share Link Bundle 2026-01-09 20:44:32 -08:00
shamoon
8d2b085778 Nice badge 2026-01-09 20:44:32 -08:00
shamoon
b3292841e7 Use titles not IDs 2026-01-09 20:44:32 -08:00
shamoon
0dae45edc8 Remove standalone 2026-01-09 20:44:32 -08:00
shamoon
29ca9b113f Trim slug in zip name 2026-01-09 20:44:32 -08:00
shamoon
ff058dad84 Better explanation 2026-01-09 20:44:32 -08:00
shamoon
75efabd024 Cleanup expired task 2026-01-09 20:44:32 -08:00
shamoon
539e175fe6 Unify labels 2026-01-09 20:44:32 -08:00
shamoon
b8aacba499 Manage dialog polling 2026-01-09 20:44:32 -08:00
shamoon
9b6d5faf9c Initial result display in create dialog 2026-01-09 20:44:32 -08:00
shamoon
34fb597114 Initial task for building 2026-01-09 20:44:32 -08:00
shamoon
73c8819198 Basic wiring of existing bundles 2026-01-09 20:44:32 -08:00
shamoon
284d7d632e Backend initial stuff 2026-01-09 20:44:32 -08:00
shamoon
2430671b32 Random cleanup stuff 2026-01-09 20:44:32 -08:00
shamoon
700a97a66f Skeleton bundle component some more 2026-01-09 20:44:32 -08:00
shamoon
275ccbde82 Generic warning 2026-01-09 20:44:32 -08:00
shamoon
c2b36242d4 Skeleton share bundle component 2026-01-09 20:44:32 -08:00
shamoon
4e5006c958 Add send menu to bulk editor 2026-01-09 20:44:32 -08:00
34 changed files with 3568 additions and 1520 deletions

View File

@@ -1603,6 +1603,16 @@ processing. This only has an effect if
Defaults to `0 1 * * *`, once per day.
## Share links
#### [`PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON=<cron expression>`](#PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON) {#PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON}
: Controls how often Paperless-ngx removes expired share link bundles (and their generated ZIP archives).
: If set to the string "disable", expired bundles are not cleaned up automatically.
Defaults to `0 2 * * *`, once per day at 02:00.
## Binaries
There are a few external software packages that Paperless expects to

View File

@@ -286,12 +286,14 @@ or using [email](#workflow-action-email) or [webhook](#workflow-action-webhook)
### Share Links
"Share links" are shareable public links to files and can be created and managed under the 'Send' button on the document detail screen.
"Share links" are public links to files (or an archive of files) and can be created and managed under the 'Send' button on the document detail screen or from the bulk editor.
- Share links do not require a user to login and thus link directly to a file.
- Share links do not require a user to login and thus link directly to a file or bundled download.
- Links are unique and are of the form `{paperless-url}/share/{randomly-generated-slug}`.
- Links can optionally have an expiration time set.
- After a link expires or is deleted users will be redirected to the regular paperless-ngx login.
- From the document detail screen you can create a share link for that single document.
- From the bulk editor you can create a **share link bundle** for any selection. Paperless-ngx prepares a ZIP archive in the background and exposes a single share link. You can revisit the "Manage share link bundles" dialog to monitor progress, retry failed bundles, or delete links.
!!! tip

View File

@@ -49,6 +49,7 @@ dependencies = [
"gotenberg-client~=0.12.0",
"httpx-oauth~=0.16",
"imap-tools~=1.11.0",
"inotifyrecursive~=0.3",
"jinja2~=3.1.5",
"langdetect~=1.0.9",
"nltk~=3.9.1",
@@ -68,7 +69,7 @@ dependencies = [
"setproctitle~=1.3.4",
"tika-client~=0.10.0",
"tqdm~=4.67.1",
"watchfiles>=1.1.1",
"watchdog~=6.0",
"whitenoise~=6.9",
"whoosh-reloaded>=2.7.5",
"zxing-cpp~=2.3.0",

View File

@@ -0,0 +1,129 @@
<div class="modal-header">
<h4 class="modal-title">{{ title }}</h4>
<button type="button" class="btn-close" aria-label="Close" (click)="cancel()"></button>
</div>
<div class="modal-body">
@if (!createdBundle) {
<form [formGroup]="form" class="d-flex flex-column gap-3">
<div>
<p class="mb-1">
<ng-container i18n>Selected documents:</ng-container>
{{ selectionCount }}
</p>
@if (documentPreview.length > 0) {
<ul class="list-unstyled small mb-0">
@for (doc of documentPreview; track doc.id) {
<li>
<strong>{{ doc.title | documentTitle }}</strong>
</li>
}
@if (selectionCount > documentPreview.length) {
<li>
<ng-container i18n>+ {{ selectionCount - documentPreview.length }} more…</ng-container>
</li>
}
</ul>
}
</div>
<div class="d-flex align-items-center justify-content-between">
<div class="input-group">
<label class="input-group-text" for="expirationDays"><ng-container i18n>Expires</ng-container>:</label>
<select class="form-select" id="expirationDays" formControlName="expirationDays">
@for (option of expirationOptions; track option.value) {
<option [ngValue]="option.value">{{ option.label }}</option>
}
</select>
</div>
<div class="form-check form-switch w-100 ms-3">
<input
class="form-check-input"
type="checkbox"
role="switch"
id="shareArchiveSwitch"
formControlName="shareArchiveVersion"
aria-checked="{{ shareArchiveVersion }}"
/>
<label class="form-check-label" for="shareArchiveSwitch" i18n>Share archive version (if available)</label>
</div>
</div>
</form>
} @else {
<div class="d-flex flex-column gap-3">
<div class="alert alert-success mb-0" role="status">
<h6 class="alert-heading mb-1" i18n>Share link bundle requested</h6>
<p class="mb-0 small" i18n>
You can copy the share link below or open the manager to monitor progress. The link will start working once the bundle is ready.
</p>
</div>
<dl class="row mb-0 small">
<dt class="col-sm-4" i18n>Status</dt>
<dd class="col-sm-8">
<span class="badge text-bg-secondary text-uppercase">{{ statusLabel(createdBundle.status) }}</span>
</dd>
<dt class="col-sm-4" i18n>Slug</dt>
<dd class="col-sm-8"><code>{{ createdBundle.slug }}</code></dd>
<dt class="col-sm-4" i18n>Link</dt>
<dd class="col-sm-8">
<div class="input-group input-group-sm">
<input class="form-control" type="text" [value]="getShareUrl(createdBundle)" readonly>
<button
class="btn btn-outline-primary"
type="button"
(click)="copy(createdBundle)"
>
@if (copied) {
<i-bs name="clipboard-check"></i-bs>
}
@if (!copied) {
<i-bs name="clipboard"></i-bs>
}
<span class="visually-hidden" i18n>Copy link</span>
</button>
</div>
</dd>
<dt class="col-sm-4" i18n>Documents</dt>
<dd class="col-sm-8">{{ createdBundle.document_count }}</dd>
<dt class="col-sm-4" i18n>Expires</dt>
<dd class="col-sm-8">
@if (createdBundle.expiration) {
{{ createdBundle.expiration | date: 'short' }}
}
@if (!createdBundle.expiration) {
<span i18n>Never</span>
}
</dd>
<dt class="col-sm-4" i18n>File version</dt>
<dd class="col-sm-8">{{ fileVersionLabel(createdBundle.file_version) }}</dd>
@if (createdBundle.size_bytes !== undefined && createdBundle.size_bytes !== null) {
<dt class="col-sm-4" i18n>Size</dt>
<dd class="col-sm-8">{{ createdBundle.size_bytes | fileSize }}</dd>
}
</dl>
</div>
}
</div>
<div class="modal-footer">
<div class="d-flex align-items-center gap-2 w-100">
<div class="text-light fst-italic small">
<ng-container i18n>A zip file containing the selected documents will be created for this share link bundle. This process happens in the background and may take some time, especially for large bundles.</ng-container>
</div>
<button type="button" class="btn btn-outline-secondary btn-sm ms-auto" (click)="cancel()">{{ cancelBtnCaption }}</button>
@if (createdBundle) {
<button type="button" class="btn btn-outline-secondary btn-sm text-nowrap" (click)="openManage()" i18n>Manage share link bundles</button>
}
@if (!createdBundle) {
<button
type="button"
class="btn btn-primary btn-sm d-inline-flex align-items-center gap-2 text-nowrap"
(click)="submit()"
[disabled]="loading || !buttonsEnabled">
@if (loading) {
<span class="spinner-border spinner-border-sm" role="status" aria-hidden="true"></span>
}
{{ btnCaption }}
</button>
}
</div>
</div>

View File

@@ -0,0 +1,161 @@
import { Clipboard } from '@angular/cdk/clipboard'
import {
ComponentFixture,
TestBed,
fakeAsync,
tick,
} from '@angular/core/testing'
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
import { FileVersion } from 'src/app/data/share-link'
import {
ShareLinkBundleStatus,
ShareLinkBundleSummary,
} from 'src/app/data/share-link-bundle'
import { ToastService } from 'src/app/services/toast.service'
import { environment } from 'src/environments/environment'
import { ShareLinkBundleDialogComponent } from './share-link-bundle-dialog.component'
class MockToastService {
showInfo = jest.fn()
showError = jest.fn()
}
describe('ShareLinkBundleDialogComponent', () => {
let component: ShareLinkBundleDialogComponent
let fixture: ComponentFixture<ShareLinkBundleDialogComponent>
let clipboard: Clipboard
let toastService: MockToastService
let activeModal: NgbActiveModal
let originalApiBaseUrl: string
beforeEach(() => {
originalApiBaseUrl = environment.apiBaseUrl
toastService = new MockToastService()
TestBed.configureTestingModule({
imports: [
ShareLinkBundleDialogComponent,
NgxBootstrapIconsModule.pick(allIcons),
],
providers: [
NgbActiveModal,
{ provide: ToastService, useValue: toastService },
],
})
fixture = TestBed.createComponent(ShareLinkBundleDialogComponent)
component = fixture.componentInstance
clipboard = TestBed.inject(Clipboard)
activeModal = TestBed.inject(NgbActiveModal)
fixture.detectChanges()
})
afterEach(() => {
jest.clearAllTimers()
environment.apiBaseUrl = originalApiBaseUrl
})
it('builds payload and emits confirm on submit', () => {
const confirmSpy = jest.spyOn(component.confirmClicked, 'emit')
component.documents = [
{ id: 1, title: 'Doc 1' } as any,
{ id: 2, title: 'Doc 2' } as any,
]
component.form.setValue({
shareArchiveVersion: false,
expirationDays: 3,
})
component.submit()
expect(component.payload).toEqual({
document_ids: [1, 2],
file_version: FileVersion.Original,
expiration_days: 3,
})
expect(component.buttonsEnabled).toBe(false)
expect(confirmSpy).toHaveBeenCalled()
component.form.setValue({
shareArchiveVersion: true,
expirationDays: 7,
})
component.submit()
expect(component.payload).toEqual({
document_ids: [1, 2],
file_version: FileVersion.Archive,
expiration_days: 7,
})
})
it('ignores submit when bundle already created', () => {
component.createdBundle = { id: 1 } as ShareLinkBundleSummary
const confirmSpy = jest.spyOn(component, 'confirm')
component.submit()
expect(confirmSpy).not.toHaveBeenCalled()
})
it('limits preview to ten documents', () => {
const docs = Array.from({ length: 12 }).map((_, index) => ({
id: index + 1,
}))
component.documents = docs as any
expect(component.selectionCount).toBe(12)
expect(component.documentPreview).toHaveLength(10)
expect(component.documentPreview[0].id).toBe(1)
})
it('copies share link and resets state after timeout', fakeAsync(() => {
const copySpy = jest.spyOn(clipboard, 'copy').mockReturnValue(true)
const bundle = {
slug: 'bundle-slug',
status: ShareLinkBundleStatus.Ready,
} as ShareLinkBundleSummary
component.copy(bundle)
expect(copySpy).toHaveBeenCalledWith(component.getShareUrl(bundle))
expect(component.copied).toBe(true)
expect(toastService.showInfo).toHaveBeenCalled()
tick(3000)
expect(component.copied).toBe(false)
}))
it('generates share URLs based on API base URL', () => {
environment.apiBaseUrl = 'https://example.com/api/'
expect(
component.getShareUrl({ slug: 'abc' } as ShareLinkBundleSummary)
).toBe('https://example.com/share/abc')
})
it('opens manage dialog when callback provided', () => {
const manageSpy = jest.fn()
component.onOpenManage = manageSpy
component.openManage()
expect(manageSpy).toHaveBeenCalled()
})
it('falls back to cancel when manage callback missing', () => {
const cancelSpy = jest.spyOn(component, 'cancel')
component.onOpenManage = undefined
component.openManage()
expect(cancelSpy).toHaveBeenCalled()
})
it('maps status and file version labels', () => {
expect(component.statusLabel(ShareLinkBundleStatus.Processing)).toContain(
'Processing'
)
expect(component.fileVersionLabel(FileVersion.Archive)).toContain('Archive')
})
it('closes dialog when cancel invoked', () => {
const closeSpy = jest.spyOn(activeModal, 'close')
component.cancel()
expect(closeSpy).toHaveBeenCalled()
})
})

View File

@@ -0,0 +1,118 @@
import { Clipboard } from '@angular/cdk/clipboard'
import { CommonModule } from '@angular/common'
import { Component, Input, inject } from '@angular/core'
import { FormBuilder, FormGroup, ReactiveFormsModule } from '@angular/forms'
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
import { Document } from 'src/app/data/document'
import {
FileVersion,
SHARE_LINK_EXPIRATION_OPTIONS,
} from 'src/app/data/share-link'
import {
SHARE_LINK_BUNDLE_FILE_VERSION_LABELS,
SHARE_LINK_BUNDLE_STATUS_LABELS,
ShareLinkBundleCreatePayload,
ShareLinkBundleStatus,
ShareLinkBundleSummary,
} from 'src/app/data/share-link-bundle'
import { DocumentTitlePipe } from 'src/app/pipes/document-title.pipe'
import { FileSizePipe } from 'src/app/pipes/file-size.pipe'
import { ToastService } from 'src/app/services/toast.service'
import { environment } from 'src/environments/environment'
import { ConfirmDialogComponent } from '../confirm-dialog/confirm-dialog.component'
@Component({
selector: 'pngx-share-link-bundle-dialog',
templateUrl: './share-link-bundle-dialog.component.html',
imports: [
CommonModule,
ReactiveFormsModule,
NgxBootstrapIconsModule,
FileSizePipe,
DocumentTitlePipe,
],
providers: [],
})
export class ShareLinkBundleDialogComponent extends ConfirmDialogComponent {
private readonly formBuilder = inject(FormBuilder)
private readonly clipboard = inject(Clipboard)
private readonly toastService = inject(ToastService)
private _documents: Document[] = []
selectionCount = 0
documentPreview: Document[] = []
form: FormGroup = this.formBuilder.group({
shareArchiveVersion: true,
expirationDays: [7],
})
payload: ShareLinkBundleCreatePayload | null = null
readonly expirationOptions = SHARE_LINK_EXPIRATION_OPTIONS
createdBundle: ShareLinkBundleSummary | null = null
copied = false
onOpenManage?: () => void
readonly statuses = ShareLinkBundleStatus
constructor() {
super()
this.loading = false
this.title = $localize`Create share link bundle`
this.btnCaption = $localize`Create link`
}
@Input()
set documents(docs: Document[]) {
this._documents = docs.concat()
this.selectionCount = this._documents.length
this.documentPreview = this._documents.slice(0, 10)
}
submit() {
if (this.createdBundle) return
this.payload = {
document_ids: this._documents.map((doc) => doc.id),
file_version: this.form.value.shareArchiveVersion
? FileVersion.Archive
: FileVersion.Original,
expiration_days: this.form.value.expirationDays,
}
this.buttonsEnabled = false
super.confirm()
}
getShareUrl(bundle: ShareLinkBundleSummary): string {
const apiURL = new URL(environment.apiBaseUrl)
return `${apiURL.origin}${apiURL.pathname.replace(/\/api\/$/, '/share/')}${
bundle.slug
}`
}
copy(bundle: ShareLinkBundleSummary): void {
const success = this.clipboard.copy(this.getShareUrl(bundle))
if (success) {
this.copied = true
this.toastService.showInfo($localize`Share link copied to clipboard.`)
setTimeout(() => {
this.copied = false
}, 3000)
}
}
openManage(): void {
if (this.onOpenManage) {
this.onOpenManage()
} else {
this.cancel()
}
}
statusLabel(status: ShareLinkBundleSummary['status']): string {
return SHARE_LINK_BUNDLE_STATUS_LABELS[status] ?? status
}
fileVersionLabel(version: FileVersion): string {
return SHARE_LINK_BUNDLE_FILE_VERSION_LABELS[version] ?? version
}
}

View File

@@ -0,0 +1,131 @@
<div class="modal-header">
<h4 class="modal-title">{{ title }}</h4>
<button type="button" class="btn-close" aria-label="Close" (click)="close()"></button>
</div>
<div class="modal-body">
@if (loading) {
<div class="d-flex align-items-center gap-2">
<div class="spinner-border spinner-border-sm" role="status"></div>
<span i18n>Loading share link bundles…</span>
</div>
}
@if (!loading && error) {
<div class="alert alert-danger mb-0" role="alert">
{{ error }}
</div>
}
@if (!loading && !error) {
<div class="d-flex justify-content-between align-items-center mb-2">
<p class="mb-0 text-muted small">
<ng-container i18n>Status updates every few seconds while bundles are being prepared.</ng-container>
</p>
</div>
@if (bundles.length === 0) {
<p class="mb-0 text-muted fst-italic" i18n>No share link bundles currently exist.</p>
}
@if (bundles.length > 0) {
<div class="table-responsive">
<table class="table table-sm align-middle mb-0">
<thead>
<tr>
<th scope="col" i18n>Created</th>
<th scope="col" i18n>Status</th>
<th scope="col" i18n>Size</th>
<th scope="col" i18n>Expires</th>
<th scope="col" i18n>Documents</th>
<th scope="col" i18n>File version</th>
<th scope="col" class="text-end" i18n>Actions</th>
</tr>
</thead>
<tbody>
@for (bundle of bundles; track bundle.id) {
<tr>
<td>
<div>{{ bundle.created | date: 'short' }}</div>
@if (bundle.built_at) {
<div class="small text-muted">
<ng-container i18n>Built:</ng-container> {{ bundle.built_at | date: 'short' }}
</div>
}
</td>
<td>
<div class="d-flex align-items-center gap-2">
@if (bundle.status === statuses.Processing || bundle.status === statuses.Pending) {
<span class="spinner-border spinner-border-sm" role="status"></span>
}
<span class="badge text-bg-secondary text-uppercase">{{ statusLabel(bundle.status) }}</span>
</div>
@if (bundle.last_error && bundle.status === statuses.Failed) {
<div class="small text-danger mt-1">{{ bundle.last_error }}</div>
}
</td>
<td>
@if (bundle.size_bytes !== undefined && bundle.size_bytes !== null) {
{{ bundle.size_bytes | fileSize }}
}
@if (bundle.size_bytes === undefined || bundle.size_bytes === null) {
<span class="text-muted">&mdash;</span>
}
</td>
<td>
@if (bundle.expiration) {
{{ bundle.expiration | date: 'short' }}
}
@if (!bundle.expiration) {
<span i18n>Never</span>
}
</td>
<td>{{ bundle.document_count }}</td>
<td>{{ fileVersionLabel(bundle.file_version) }}</td>
<td class="text-end">
<div class="btn-group btn-group-sm">
<button
type="button"
class="btn btn-outline-primary"
[disabled]="bundle.status !== statuses.Ready"
(click)="copy(bundle)"
title="Copy share link"
i18n-title
>
@if (copiedSlug === bundle.slug) {
<i-bs name="clipboard-check"></i-bs>
}
@if (copiedSlug !== bundle.slug) {
<i-bs name="clipboard"></i-bs>
}
<span class="visually-hidden" i18n>Copy share link</span>
</button>
@if (bundle.status === statuses.Failed) {
<button
type="button"
class="btn btn-outline-warning"
[disabled]="loading"
(click)="retry(bundle)"
>
<i-bs name="arrow-clockwise"></i-bs>
<span class="visually-hidden" i18n>Retry</span>
</button>
}
<pngx-confirm-button
buttonClasses="btn btn-sm btn-outline-danger"
[disabled]="loading"
(confirm)="delete(bundle)"
iconName="trash"
>
<span class="visually-hidden" i18n>Delete share link bundle</span>
</pngx-confirm-button>
</div>
</td>
</tr>
}
</tbody>
</table>
</div>
}
}
</div>
<div class="modal-footer">
<button type="button" class="btn btn-outline-secondary btn-sm" (click)="close()" i18n>Close</button>
</div>

View File

@@ -0,0 +1,250 @@
import { Clipboard } from '@angular/cdk/clipboard'
import {
ComponentFixture,
TestBed,
fakeAsync,
tick,
} from '@angular/core/testing'
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
import { of, throwError } from 'rxjs'
import { FileVersion } from 'src/app/data/share-link'
import {
ShareLinkBundleStatus,
ShareLinkBundleSummary,
} from 'src/app/data/share-link-bundle'
import { ShareLinkBundleService } from 'src/app/services/rest/share-link-bundle.service'
import { ToastService } from 'src/app/services/toast.service'
import { environment } from 'src/environments/environment'
import { ShareLinkBundleManageDialogComponent } from './share-link-bundle-manage-dialog.component'
class MockShareLinkBundleService {
listAllBundles = jest.fn()
delete = jest.fn()
rebuildBundle = jest.fn()
}
class MockToastService {
showInfo = jest.fn()
showError = jest.fn()
}
describe('ShareLinkBundleManageDialogComponent', () => {
let component: ShareLinkBundleManageDialogComponent
let fixture: ComponentFixture<ShareLinkBundleManageDialogComponent>
let service: MockShareLinkBundleService
let toastService: MockToastService
let clipboard: Clipboard
let activeModal: NgbActiveModal
let originalApiBaseUrl: string
beforeEach(() => {
service = new MockShareLinkBundleService()
toastService = new MockToastService()
originalApiBaseUrl = environment.apiBaseUrl
service.listAllBundles.mockReturnValue(of([]))
service.delete.mockReturnValue(of(true))
service.rebuildBundle.mockReturnValue(of(sampleBundle()))
TestBed.configureTestingModule({
imports: [
ShareLinkBundleManageDialogComponent,
NgxBootstrapIconsModule.pick(allIcons),
],
providers: [
NgbActiveModal,
{ provide: ShareLinkBundleService, useValue: service },
{ provide: ToastService, useValue: toastService },
],
})
fixture = TestBed.createComponent(ShareLinkBundleManageDialogComponent)
component = fixture.componentInstance
clipboard = TestBed.inject(Clipboard)
activeModal = TestBed.inject(NgbActiveModal)
})
afterEach(() => {
component.ngOnDestroy()
fixture.destroy()
environment.apiBaseUrl = originalApiBaseUrl
jest.clearAllMocks()
})
const sampleBundle = (overrides: Partial<ShareLinkBundleSummary> = {}) =>
({
id: 1,
slug: 'bundle-slug',
created: new Date().toISOString(),
document_count: 1,
documents: [1],
status: ShareLinkBundleStatus.Pending,
file_version: FileVersion.Archive,
...overrides,
}) as ShareLinkBundleSummary
it('loads bundles on init and polls periodically', fakeAsync(() => {
const bundles = [sampleBundle({ status: ShareLinkBundleStatus.Ready })]
service.listAllBundles.mockReset()
service.listAllBundles
.mockReturnValueOnce(of(bundles))
.mockReturnValue(of(bundles))
fixture.detectChanges()
tick()
expect(service.listAllBundles).toHaveBeenCalledTimes(1)
expect(component.bundles).toEqual(bundles)
expect(component.loading).toBe(false)
expect(component.error).toBeNull()
tick(5000)
expect(service.listAllBundles).toHaveBeenCalledTimes(2)
}))
it('handles errors when loading bundles', fakeAsync(() => {
service.listAllBundles.mockReset()
service.listAllBundles
.mockReturnValueOnce(throwError(() => new Error('load fail')))
.mockReturnValue(of([]))
fixture.detectChanges()
tick()
expect(component.error).toContain('Failed to load share link bundles.')
expect(toastService.showError).toHaveBeenCalled()
expect(component.loading).toBe(false)
tick(5000)
expect(service.listAllBundles).toHaveBeenCalledTimes(2)
}))
it('copies bundle links when ready', fakeAsync(() => {
jest.spyOn(clipboard, 'copy').mockReturnValue(true)
fixture.detectChanges()
tick()
const readyBundle = sampleBundle({
slug: 'ready-slug',
status: ShareLinkBundleStatus.Ready,
})
component.copy(readyBundle)
expect(clipboard.copy).toHaveBeenCalledWith(
component.getShareUrl(readyBundle)
)
expect(component.copiedSlug).toBe('ready-slug')
expect(toastService.showInfo).toHaveBeenCalled()
tick(3000)
expect(component.copiedSlug).toBeNull()
}))
it('ignores copy requests for non-ready bundles', fakeAsync(() => {
const copySpy = jest.spyOn(clipboard, 'copy')
fixture.detectChanges()
tick()
component.copy(sampleBundle({ status: ShareLinkBundleStatus.Pending }))
expect(copySpy).not.toHaveBeenCalled()
}))
it('deletes bundles and refreshes list', fakeAsync(() => {
service.listAllBundles.mockReturnValue(of([]))
service.delete.mockReturnValue(of(true))
fixture.detectChanges()
tick()
component.delete(sampleBundle())
tick()
expect(service.delete).toHaveBeenCalled()
expect(toastService.showInfo).toHaveBeenCalledWith(
expect.stringContaining('deleted.')
)
expect(service.listAllBundles).toHaveBeenCalledTimes(2)
expect(component.loading).toBe(false)
}))
it('handles delete errors gracefully', fakeAsync(() => {
service.listAllBundles.mockReturnValue(of([]))
service.delete.mockReturnValue(throwError(() => new Error('delete fail')))
fixture.detectChanges()
tick()
component.delete(sampleBundle())
tick()
expect(toastService.showError).toHaveBeenCalled()
expect(component.loading).toBe(false)
}))
it('retries bundle build and replaces existing entry', fakeAsync(() => {
service.listAllBundles.mockReturnValue(of([]))
const updated = sampleBundle({ status: ShareLinkBundleStatus.Ready })
service.rebuildBundle.mockReturnValue(of(updated))
fixture.detectChanges()
tick()
component.bundles = [sampleBundle()]
component.retry(component.bundles[0])
tick()
expect(service.rebuildBundle).toHaveBeenCalledWith(updated.id)
expect(component.bundles[0].status).toBe(ShareLinkBundleStatus.Ready)
expect(toastService.showInfo).toHaveBeenCalled()
}))
it('adds new bundle when retry returns unknown entry', fakeAsync(() => {
service.listAllBundles.mockReturnValue(of([]))
service.rebuildBundle.mockReturnValue(
of(sampleBundle({ id: 99, slug: 'new-slug' }))
)
fixture.detectChanges()
tick()
component.bundles = [sampleBundle()]
component.retry({ id: 99 } as ShareLinkBundleSummary)
tick()
expect(component.bundles.find((bundle) => bundle.id === 99)).toBeTruthy()
}))
it('handles retry errors', fakeAsync(() => {
service.listAllBundles.mockReturnValue(of([]))
service.rebuildBundle.mockReturnValue(throwError(() => new Error('fail')))
fixture.detectChanges()
tick()
component.retry(sampleBundle())
tick()
expect(toastService.showError).toHaveBeenCalled()
}))
it('maps helpers and closes dialog', fakeAsync(() => {
service.listAllBundles.mockReturnValue(of([]))
fixture.detectChanges()
tick()
expect(component.statusLabel(ShareLinkBundleStatus.Processing)).toContain(
'Processing'
)
expect(component.fileVersionLabel(FileVersion.Original)).toContain(
'Original'
)
environment.apiBaseUrl = 'https://example.com/api/'
const url = component.getShareUrl(sampleBundle({ slug: 'sluggy' }))
expect(url).toBe('https://example.com/share/sluggy')
const closeSpy = jest.spyOn(activeModal, 'close')
component.close()
expect(closeSpy).toHaveBeenCalled()
}))
})

View File

@@ -0,0 +1,175 @@
import { Clipboard } from '@angular/cdk/clipboard'
import { CommonModule } from '@angular/common'
import { Component, OnDestroy, OnInit, inject } from '@angular/core'
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
import { Subject, catchError, of, switchMap, takeUntil, timer } from 'rxjs'
import { FileVersion } from 'src/app/data/share-link'
import {
SHARE_LINK_BUNDLE_FILE_VERSION_LABELS,
SHARE_LINK_BUNDLE_STATUS_LABELS,
ShareLinkBundleStatus,
ShareLinkBundleSummary,
} from 'src/app/data/share-link-bundle'
import { FileSizePipe } from 'src/app/pipes/file-size.pipe'
import { ShareLinkBundleService } from 'src/app/services/rest/share-link-bundle.service'
import { ToastService } from 'src/app/services/toast.service'
import { environment } from 'src/environments/environment'
import { LoadingComponentWithPermissions } from '../../loading-component/loading.component'
import { ConfirmButtonComponent } from '../confirm-button/confirm-button.component'
@Component({
selector: 'pngx-share-link-bundle-manage-dialog',
templateUrl: './share-link-bundle-manage-dialog.component.html',
imports: [
ConfirmButtonComponent,
CommonModule,
NgxBootstrapIconsModule,
FileSizePipe,
],
})
export class ShareLinkBundleManageDialogComponent
extends LoadingComponentWithPermissions
implements OnInit, OnDestroy
{
private readonly activeModal = inject(NgbActiveModal)
private readonly shareLinkBundleService = inject(ShareLinkBundleService)
private readonly toastService = inject(ToastService)
private readonly clipboard = inject(Clipboard)
title = $localize`Share link bundles`
bundles: ShareLinkBundleSummary[] = []
error: string | null = null
copiedSlug: string | null = null
readonly statuses = ShareLinkBundleStatus
readonly fileVersions = FileVersion
private readonly refresh$ = new Subject<boolean>()
ngOnInit(): void {
this.refresh$
.pipe(
switchMap((silent) => {
if (!silent) {
this.loading = true
}
this.error = null
return this.shareLinkBundleService.listAllBundles().pipe(
catchError((error) => {
if (!silent) {
this.loading = false
}
this.error = $localize`Failed to load share link bundles.`
this.toastService.showError(
$localize`Error retrieving share link bundles.`,
error
)
return of(null)
})
)
}),
takeUntil(this.unsubscribeNotifier)
)
.subscribe((results) => {
if (results) {
this.bundles = results
this.copiedSlug = null
}
this.loading = false
})
this.triggerRefresh(false)
timer(5000, 5000)
.pipe(takeUntil(this.unsubscribeNotifier))
.subscribe(() => this.triggerRefresh(true))
}
ngOnDestroy(): void {
super.ngOnDestroy()
}
getShareUrl(bundle: ShareLinkBundleSummary): string {
const apiURL = new URL(environment.apiBaseUrl)
return `${apiURL.origin}${apiURL.pathname.replace(/\/api\/$/, '/share/')}${
bundle.slug
}`
}
copy(bundle: ShareLinkBundleSummary): void {
if (bundle.status !== ShareLinkBundleStatus.Ready) {
return
}
const success = this.clipboard.copy(this.getShareUrl(bundle))
if (success) {
this.copiedSlug = bundle.slug
setTimeout(() => {
this.copiedSlug = null
}, 3000)
this.toastService.showInfo($localize`Share link copied to clipboard.`)
}
}
delete(bundle: ShareLinkBundleSummary): void {
this.error = null
this.loading = true
this.shareLinkBundleService.delete(bundle).subscribe({
next: () => {
this.toastService.showInfo($localize`Share link bundle deleted.`)
this.triggerRefresh(false)
},
error: (e) => {
this.loading = false
this.toastService.showError(
$localize`Error deleting share link bundle.`,
e
)
},
})
}
retry(bundle: ShareLinkBundleSummary): void {
this.error = null
this.shareLinkBundleService.rebuildBundle(bundle.id).subscribe({
next: (updated) => {
this.toastService.showInfo(
$localize`Share link bundle rebuild requested.`
)
this.replaceBundle(updated)
},
error: (e) => {
this.toastService.showError($localize`Error requesting rebuild.`, e)
},
})
}
statusLabel(status: ShareLinkBundleStatus): string {
return SHARE_LINK_BUNDLE_STATUS_LABELS[status] ?? status
}
fileVersionLabel(version: FileVersion): string {
return SHARE_LINK_BUNDLE_FILE_VERSION_LABELS[version] ?? version
}
close(): void {
this.activeModal.close()
}
private replaceBundle(updated: ShareLinkBundleSummary): void {
const index = this.bundles.findIndex((bundle) => bundle.id === updated.id)
if (index >= 0) {
this.bundles = [
...this.bundles.slice(0, index),
updated,
...this.bundles.slice(index + 1),
]
} else {
this.bundles = [updated, ...this.bundles]
}
}
private triggerRefresh(silent: boolean): void {
this.refresh$.next(silent)
}
}

View File

@@ -51,7 +51,7 @@
<div class="input-group w-100 mt-2">
<label class="input-group-text" for="addLink"><ng-container i18n>Expires</ng-container>:</label>
<select class="form-select fs-6" [(ngModel)]="expirationDays">
@for (option of EXPIRATION_OPTIONS; track option) {
@for (option of expirationOptions; track option) {
<option [ngValue]="option.value">{{ option.label }}</option>
}
</select>

View File

@@ -4,7 +4,11 @@ import { FormsModule, ReactiveFormsModule } from '@angular/forms'
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
import { first } from 'rxjs'
import { FileVersion, ShareLink } from 'src/app/data/share-link'
import {
FileVersion,
SHARE_LINK_EXPIRATION_OPTIONS,
ShareLink,
} from 'src/app/data/share-link'
import { ShareLinkService } from 'src/app/services/rest/share-link.service'
import { ToastService } from 'src/app/services/toast.service'
import { environment } from 'src/environments/environment'
@@ -21,12 +25,7 @@ export class ShareLinksDialogComponent implements OnInit {
private toastService = inject(ToastService)
private clipboard = inject(Clipboard)
EXPIRATION_OPTIONS = [
{ label: $localize`1 day`, value: 1 },
{ label: $localize`7 days`, value: 7 },
{ label: $localize`30 days`, value: 30 },
{ label: $localize`Never`, value: null },
]
readonly expirationOptions = SHARE_LINK_EXPIRATION_OPTIONS
@Input()
title = $localize`Share Links`

View File

@@ -96,14 +96,36 @@
<button ngbDropdownItem (click)="mergeSelected()" [disabled]="!userCanAdd || list.selected.size < 2">
<i-bs name="journals"></i-bs>&nbsp;<ng-container i18n>Merge</ng-container>
</button>
@if (emailEnabled) {
<button ngbDropdownItem (click)="emailSelected()">
<i-bs name="envelope"></i-bs>&nbsp;<ng-container i18n>Email</ng-container>
</button>
}
</div>
</div>
</div>
<div class="btn-toolbar" ngbDropdown>
<button
class="btn btn-sm btn-outline-primary"
id="dropdownSend"
ngbDropdownToggle
[disabled]="disabled || list.selected.size === 0"
>
<i-bs name="send"></i-bs>
<div class="d-none d-sm-inline">
&nbsp;<ng-container i18n>Send</ng-container>
</div>
</button>
<div ngbDropdownMenu aria-labelledby="dropdownSend" class="shadow">
<button ngbDropdownItem (click)="createShareLinkBundle()">
<i-bs name="link"></i-bs>&nbsp;<ng-container i18n>Create a share link bundle</ng-container>
</button>
<button ngbDropdownItem (click)="manageShareLinkBundles()">
<i-bs name="list-ul"></i-bs>&nbsp;<ng-container i18n>Manage share link bundles</ng-container>
</button>
<div class="dropdown-divider"></div>
@if (emailEnabled) {
<button ngbDropdownItem (click)="emailSelected()">
<i-bs name="envelope"></i-bs>&nbsp;<ng-container i18n>Email</ng-container>
</button>
}
</div>
</div>
<div class="btn-group btn-group-sm">
<button class="btn btn-sm btn-outline-primary" [disabled]="awaitingDownload" (click)="downloadSelected()">
@if (!awaitingDownload) {

View File

@@ -3,6 +3,7 @@ import {
HttpTestingController,
provideHttpClientTesting,
} from '@angular/common/http/testing'
import { EventEmitter } from '@angular/core'
import { ComponentFixture, TestBed } from '@angular/core/testing'
import { By } from '@angular/platform-browser'
import { NgbModal, NgbModalRef } from '@ng-bootstrap/ng-bootstrap'
@@ -25,6 +26,7 @@ import {
SelectionData,
} from 'src/app/services/rest/document.service'
import { GroupService } from 'src/app/services/rest/group.service'
import { ShareLinkBundleService } from 'src/app/services/rest/share-link-bundle.service'
import { StoragePathService } from 'src/app/services/rest/storage-path.service'
import { TagService } from 'src/app/services/rest/tag.service'
import { UserService } from 'src/app/services/rest/user.service'
@@ -38,6 +40,8 @@ import { EditDialogMode } from '../../common/edit-dialog/edit-dialog.component'
import { StoragePathEditDialogComponent } from '../../common/edit-dialog/storage-path-edit-dialog/storage-path-edit-dialog.component'
import { TagEditDialogComponent } from '../../common/edit-dialog/tag-edit-dialog/tag-edit-dialog.component'
import { FilterableDropdownComponent } from '../../common/filterable-dropdown/filterable-dropdown.component'
import { ShareLinkBundleDialogComponent } from '../../common/share-link-bundle-dialog/share-link-bundle-dialog.component'
import { ShareLinkBundleManageDialogComponent } from '../../common/share-link-bundle-manage-dialog/share-link-bundle-manage-dialog.component'
import { BulkEditorComponent } from './bulk-editor.component'
const selectionData: SelectionData = {
@@ -72,6 +76,7 @@ describe('BulkEditorComponent', () => {
let storagePathService: StoragePathService
let customFieldsService: CustomFieldsService
let httpTestingController: HttpTestingController
let shareLinkBundleService: ShareLinkBundleService
beforeEach(async () => {
TestBed.configureTestingModule({
@@ -152,6 +157,15 @@ describe('BulkEditorComponent', () => {
}),
},
},
{
provide: ShareLinkBundleService,
useValue: {
createBundle: jest.fn(),
listAllBundles: jest.fn(),
rebuildBundle: jest.fn(),
delete: jest.fn(),
},
},
provideHttpClient(withInterceptorsFromDi()),
provideHttpClientTesting(),
],
@@ -168,6 +182,7 @@ describe('BulkEditorComponent', () => {
storagePathService = TestBed.inject(StoragePathService)
customFieldsService = TestBed.inject(CustomFieldsService)
httpTestingController = TestBed.inject(HttpTestingController)
shareLinkBundleService = TestBed.inject(ShareLinkBundleService)
fixture = TestBed.createComponent(BulkEditorComponent)
component = fixture.componentInstance
@@ -1454,4 +1469,130 @@ describe('BulkEditorComponent', () => {
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
) // listAllFilteredIds
})
it('should create share link bundle and enable manage callback', () => {
jest.spyOn(permissionsService, 'currentUserCan').mockReturnValue(true)
jest
.spyOn(documentListViewService, 'documents', 'get')
.mockReturnValue([{ id: 5 }, { id: 7 }] as any)
jest
.spyOn(documentListViewService, 'selected', 'get')
.mockReturnValue(new Set([5, 7]))
const confirmClicked = new EventEmitter<void>()
const modalRef: Partial<NgbModalRef> = {
close: jest.fn(),
componentInstance: {
documents: [],
confirmClicked,
payload: {
document_ids: [5, 7],
file_version: 'archive',
expiration_days: 7,
},
loading: false,
buttonsEnabled: true,
copied: false,
},
}
const openSpy = jest.spyOn(modalService, 'open')
openSpy.mockReturnValueOnce(modalRef as NgbModalRef)
openSpy.mockReturnValueOnce({} as NgbModalRef)
;(shareLinkBundleService.createBundle as jest.Mock).mockReturnValueOnce(
of({ id: 42 })
)
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
component.createShareLinkBundle()
expect(openSpy).toHaveBeenNthCalledWith(
1,
ShareLinkBundleDialogComponent,
expect.objectContaining({ backdrop: 'static', size: 'lg' })
)
const dialogInstance = modalRef.componentInstance as any
expect(dialogInstance.documents).toEqual([{ id: 5 }, { id: 7 }])
confirmClicked.emit()
expect(shareLinkBundleService.createBundle).toHaveBeenCalledWith({
document_ids: [5, 7],
file_version: 'archive',
expiration_days: 7,
})
expect(dialogInstance.loading).toBe(false)
expect(dialogInstance.buttonsEnabled).toBe(false)
expect(dialogInstance.createdBundle).toEqual({ id: 42 })
expect(typeof dialogInstance.onOpenManage).toBe('function')
expect(toastInfoSpy).toHaveBeenCalledWith(
$localize`Share link bundle creation requested.`
)
dialogInstance.onOpenManage()
expect(modalRef.close).toHaveBeenCalled()
expect(openSpy).toHaveBeenNthCalledWith(
2,
ShareLinkBundleManageDialogComponent,
expect.objectContaining({ backdrop: 'static', size: 'lg' })
)
openSpy.mockRestore()
})
it('should handle share link bundle creation errors', () => {
jest.spyOn(permissionsService, 'currentUserCan').mockReturnValue(true)
jest
.spyOn(documentListViewService, 'documents', 'get')
.mockReturnValue([{ id: 9 }] as any)
jest
.spyOn(documentListViewService, 'selected', 'get')
.mockReturnValue(new Set([9]))
const confirmClicked = new EventEmitter<void>()
const modalRef: Partial<NgbModalRef> = {
componentInstance: {
documents: [],
confirmClicked,
payload: {
document_ids: [9],
file_version: 'original',
expiration_days: null,
},
loading: false,
buttonsEnabled: true,
},
}
const openSpy = jest
.spyOn(modalService, 'open')
.mockReturnValue(modalRef as NgbModalRef)
;(shareLinkBundleService.createBundle as jest.Mock).mockReturnValueOnce(
throwError(() => new Error('bundle failure'))
)
const toastErrorSpy = jest.spyOn(toastService, 'showError')
component.createShareLinkBundle()
const dialogInstance = modalRef.componentInstance as any
confirmClicked.emit()
expect(toastErrorSpy).toHaveBeenCalledWith(
$localize`Share link bundle creation is not available yet.`,
expect.any(Error)
)
expect(dialogInstance.loading).toBe(false)
expect(dialogInstance.buttonsEnabled).toBe(true)
openSpy.mockRestore()
})
it('should open share link bundle management dialog', () => {
const openSpy = jest.spyOn(modalService, 'open')
component.manageShareLinkBundles()
expect(openSpy).toHaveBeenCalledWith(
ShareLinkBundleManageDialogComponent,
expect.objectContaining({ backdrop: 'static', size: 'lg' })
)
openSpy.mockRestore()
})
})

View File

@@ -33,6 +33,7 @@ import {
SelectionDataItem,
} from 'src/app/services/rest/document.service'
import { SavedViewService } from 'src/app/services/rest/saved-view.service'
import { ShareLinkBundleService } from 'src/app/services/rest/share-link-bundle.service'
import { StoragePathService } from 'src/app/services/rest/storage-path.service'
import { TagService } from 'src/app/services/rest/tag.service'
import { SettingsService } from 'src/app/services/settings.service'
@@ -54,6 +55,8 @@ import {
} from '../../common/filterable-dropdown/filterable-dropdown.component'
import { ToggleableItemState } from '../../common/filterable-dropdown/toggleable-dropdown-button/toggleable-dropdown-button.component'
import { PermissionsDialogComponent } from '../../common/permissions-dialog/permissions-dialog.component'
import { ShareLinkBundleDialogComponent } from '../../common/share-link-bundle-dialog/share-link-bundle-dialog.component'
import { ShareLinkBundleManageDialogComponent } from '../../common/share-link-bundle-manage-dialog/share-link-bundle-manage-dialog.component'
import { ComponentWithPermissions } from '../../with-permissions/with-permissions.component'
import { CustomFieldsBulkEditDialogComponent } from './custom-fields-bulk-edit-dialog/custom-fields-bulk-edit-dialog.component'
@@ -87,6 +90,7 @@ export class BulkEditorComponent
private customFieldService = inject(CustomFieldsService)
private permissionService = inject(PermissionsService)
private savedViewService = inject(SavedViewService)
private readonly shareLinkBundleService = inject(ShareLinkBundleService)
tagSelectionModel = new FilterableDropdownSelectionModel(true)
correspondentSelectionModel = new FilterableDropdownSelectionModel()
@@ -908,6 +912,58 @@ export class BulkEditorComponent
return this.settings.get(SETTINGS_KEYS.EMAIL_ENABLED)
}
createShareLinkBundle() {
const modal = this.modalService.open(ShareLinkBundleDialogComponent, {
backdrop: 'static',
size: 'lg',
})
const dialog = modal.componentInstance as ShareLinkBundleDialogComponent
const selectedDocuments = this.list.documents.filter((d) =>
this.list.selected.has(d.id)
)
dialog.documents = selectedDocuments
dialog.confirmClicked
.pipe(takeUntil(this.unsubscribeNotifier))
.subscribe(() => {
dialog.loading = true
dialog.buttonsEnabled = false
this.shareLinkBundleService
.createBundle(dialog.payload)
.pipe(first())
.subscribe({
next: (result) => {
dialog.loading = false
dialog.buttonsEnabled = false
dialog.createdBundle = result
dialog.copied = false
dialog.payload = null
dialog.onOpenManage = () => {
modal.close()
this.manageShareLinkBundles()
}
this.toastService.showInfo(
$localize`Share link bundle creation requested.`
)
},
error: (error) => {
dialog.loading = false
dialog.buttonsEnabled = true
this.toastService.showError(
$localize`Share link bundle creation is not available yet.`,
error
)
},
})
})
}
manageShareLinkBundles() {
this.modalService.open(ShareLinkBundleManageDialogComponent, {
backdrop: 'static',
size: 'lg',
})
}
emailSelected() {
const allHaveArchiveVersion = this.list.documents
.filter((d) => this.list.selected.has(d.id))

View File

@@ -0,0 +1,46 @@
import { FileVersion } from './share-link'
export enum ShareLinkBundleStatus {
Pending = 'pending',
Processing = 'processing',
Ready = 'ready',
Failed = 'failed',
}
export interface ShareLinkBundleSummary {
id: number
slug: string
created: string // Date
expiration?: string // Date
documents: number[]
document_count: number
file_version: FileVersion
status: ShareLinkBundleStatus
built_at?: string
size_bytes?: number
last_error?: string
}
export interface ShareLinkBundleCreatePayload {
document_ids: number[]
file_version: FileVersion
expiration_days: number | null
}
export const SHARE_LINK_BUNDLE_STATUS_LABELS: Record<
ShareLinkBundleStatus,
string
> = {
[ShareLinkBundleStatus.Pending]: $localize`Pending`,
[ShareLinkBundleStatus.Processing]: $localize`Processing`,
[ShareLinkBundleStatus.Ready]: $localize`Ready`,
[ShareLinkBundleStatus.Failed]: $localize`Failed`,
}
export const SHARE_LINK_BUNDLE_FILE_VERSION_LABELS: Record<
FileVersion,
string
> = {
[FileVersion.Archive]: $localize`Archive`,
[FileVersion.Original]: $localize`Original`,
}

View File

@@ -5,6 +5,18 @@ export enum FileVersion {
Original = 'original',
}
export interface ShareLinkExpirationOption {
label: string
value: number | null
}
export const SHARE_LINK_EXPIRATION_OPTIONS: ShareLinkExpirationOption[] = [
{ label: $localize`1 day`, value: 1 },
{ label: $localize`7 days`, value: 7 },
{ label: $localize`30 days`, value: 30 },
{ label: $localize`Never`, value: null },
]
export interface ShareLink extends ObjectWithPermissions {
created: string // Date

View File

@@ -0,0 +1,60 @@
import { HttpTestingController } from '@angular/common/http/testing'
import { TestBed } from '@angular/core/testing'
import { Subscription } from 'rxjs'
import { environment } from 'src/environments/environment'
import { commonAbstractPaperlessServiceTests } from './abstract-paperless-service.spec'
import { ShareLinkBundleService } from './share-link-bundle.service'
const endpoint = 'share_link_bundles'
commonAbstractPaperlessServiceTests(endpoint, ShareLinkBundleService)
describe('ShareLinkBundleService', () => {
let httpTestingController: HttpTestingController
let service: ShareLinkBundleService
let subscription: Subscription | undefined
beforeEach(() => {
httpTestingController = TestBed.inject(HttpTestingController)
service = TestBed.inject(ShareLinkBundleService)
})
afterEach(() => {
subscription?.unsubscribe()
httpTestingController.verify()
})
it('creates bundled share links', () => {
const payload = {
document_ids: [1, 2],
file_version: 'archive',
expiration_days: 7,
}
subscription = service.createBundle(payload as any).subscribe()
const req = httpTestingController.expectOne(
`${environment.apiBaseUrl}${endpoint}/`
)
expect(req.request.method).toBe('POST')
expect(req.request.body).toEqual(payload)
req.flush({})
})
it('rebuilds bundles', () => {
subscription = service.rebuildBundle(12).subscribe()
const req = httpTestingController.expectOne(
`${environment.apiBaseUrl}${endpoint}/12/rebuild/`
)
expect(req.request.method).toBe('POST')
expect(req.request.body).toEqual({})
req.flush({})
})
it('lists bundles with expected parameters', () => {
subscription = service.listAllBundles().subscribe()
const req = httpTestingController.expectOne(
`${environment.apiBaseUrl}${endpoint}/?page=1&page_size=1000&ordering=-created`
)
expect(req.request.method).toBe('GET')
req.flush({ results: [] })
})
})

View File

@@ -0,0 +1,41 @@
import { Injectable } from '@angular/core'
import { Observable } from 'rxjs'
import { map } from 'rxjs/operators'
import {
ShareLinkBundleCreatePayload,
ShareLinkBundleSummary,
} from 'src/app/data/share-link-bundle'
import { AbstractNameFilterService } from './abstract-name-filter-service'
@Injectable({
providedIn: 'root',
})
export class ShareLinkBundleService extends AbstractNameFilterService<ShareLinkBundleSummary> {
constructor() {
super()
this.resourceName = 'share_link_bundles'
}
createBundle(
payload: ShareLinkBundleCreatePayload
): Observable<ShareLinkBundleSummary> {
this.clearCache()
return this.http.post<ShareLinkBundleSummary>(
this.getResourceUrl(),
payload
)
}
rebuildBundle(bundleId: number): Observable<ShareLinkBundleSummary> {
this.clearCache()
return this.http.post<ShareLinkBundleSummary>(
this.getResourceUrl(bundleId, 'rebuild'),
{}
)
}
listAllBundles(): Observable<ShareLinkBundleSummary[]> {
return this.list(1, 1000, 'created', true).pipe(
map((response) => response.results)
)
}
}

View File

@@ -13,6 +13,7 @@ from documents.models import PaperlessTask
from documents.models import SavedView
from documents.models import SavedViewFilterRule
from documents.models import ShareLink
from documents.models import ShareLinkBundle
from documents.models import StoragePath
from documents.models import Tag
from documents.tasks import update_document_parent_tags
@@ -185,6 +186,22 @@ class ShareLinksAdmin(GuardedModelAdmin):
return super().get_queryset(request).select_related("document__correspondent")
class ShareLinkBundleAdmin(GuardedModelAdmin):
list_display = ("created", "status", "expiration", "owner", "slug")
list_filter = ("status", "created", "expiration", "owner")
search_fields = ("slug",)
def get_queryset(self, request): # pragma: no cover
return (
super()
.get_queryset(request)
.select_related("owner")
.prefetch_related(
"documents",
)
)
class CustomFieldsAdmin(GuardedModelAdmin):
fields = ("name", "created", "data_type")
readonly_fields = ("created", "data_type")
@@ -216,6 +233,7 @@ admin.site.register(StoragePath, StoragePathAdmin)
admin.site.register(PaperlessTask, TaskAdmin)
admin.site.register(Note, NotesAdmin)
admin.site.register(ShareLink, ShareLinksAdmin)
admin.site.register(ShareLinkBundle, ShareLinkBundleAdmin)
admin.site.register(CustomField, CustomFieldsAdmin)
admin.site.register(CustomFieldInstance, CustomFieldInstancesAdmin)

View File

@@ -39,6 +39,7 @@ from documents.models import Document
from documents.models import DocumentType
from documents.models import PaperlessTask
from documents.models import ShareLink
from documents.models import ShareLinkBundle
from documents.models import StoragePath
from documents.models import Tag
@@ -796,6 +797,29 @@ class ShareLinkFilterSet(FilterSet):
}
class ShareLinkBundleFilterSet(FilterSet):
documents = Filter(method="filter_documents")
class Meta:
model = ShareLinkBundle
fields = {
"created": DATETIME_KWARGS,
"expiration": DATETIME_KWARGS,
"status": ["exact"],
}
def filter_documents(self, queryset, name, value):
if not value:
return queryset
try:
ids = [int(item) for item in value.split(",") if item]
except ValueError:
return queryset.none()
if not ids:
return queryset
return queryset.filter(documents__in=ids).distinct()
class PaperlessTaskFilterSet(FilterSet):
acknowledged = BooleanFilter(
label="Acknowledged",

View File

@@ -1,362 +1,135 @@
"""
Document consumer management command.
Watches a consumption directory for new documents and queues them for processing.
Uses watchfiles for efficient file system monitoring with support for both
native OS notifications and polling fallback.
"""
from __future__ import annotations
import logging
import re
from dataclasses import dataclass
import os
from concurrent.futures import ThreadPoolExecutor
from fnmatch import filter
from pathlib import Path
from pathlib import PurePath
from threading import Event
from time import monotonic
from typing import TYPE_CHECKING
from time import sleep
from typing import Final
from django import db
from django.conf import settings
from django.core.management.base import BaseCommand
from django.core.management.base import CommandError
from watchfiles import Change
from watchfiles import DefaultFilter
from watchfiles import watch
from watchdog.events import FileSystemEventHandler
from watchdog.observers.polling import PollingObserver
from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides
from documents.data_models import DocumentSource
from documents.models import Tag
from documents.parsers import get_supported_file_extensions
from documents.parsers import is_file_ext_supported
from documents.tasks import consume_file
if TYPE_CHECKING:
from collections.abc import Iterator
try:
from inotifyrecursive import INotify
from inotifyrecursive import flags
except ImportError: # pragma: no cover
INotify = flags = None
logger = logging.getLogger("paperless.management.consumer")
@dataclass
class TrackedFile:
"""Represents a file being tracked for stability."""
path: Path
last_event_time: float
last_mtime: float | None = None
last_size: int | None = None
def update_stats(self) -> bool:
"""
Update file stats. Returns True if file exists and stats were updated.
"""
try:
stat = self.path.stat()
self.last_mtime = stat.st_mtime
self.last_size = stat.st_size
return True
except (FileNotFoundError, PermissionError, OSError):
return False
def is_unchanged(self) -> bool:
"""
Check if file stats match the previously recorded values.
Returns False if file doesn't exist or stats changed.
"""
try:
stat = self.path.stat()
return stat.st_mtime == self.last_mtime and stat.st_size == self.last_size
except (FileNotFoundError, PermissionError, OSError):
return False
class FileStabilityTracker:
def _tags_from_path(filepath: Path) -> list[int]:
"""
Tracks file events and determines when files are stable for consumption.
A file is considered stable when:
1. No new events have been received for it within the stability delay
2. Its size and modification time haven't changed
3. It still exists as a regular file
This handles various edge cases:
- Network copies that write in chunks
- Scanners that open/close files multiple times
- Temporary files that get renamed
- Files that are deleted before becoming stable
"""
def __init__(self, stability_delay: float = 1.0) -> None:
"""
Initialize the tracker.
Args:
stability_delay: Time in seconds a file must remain unchanged
before being considered stable.
"""
self.stability_delay = stability_delay
self._tracked: dict[Path, TrackedFile] = {}
def track(self, path: Path, change: Change) -> None:
"""
Register a file event.
Args:
path: The file path that changed.
change: The type of change (added, modified, deleted).
"""
path = path.resolve()
match change:
case Change.deleted:
self._tracked.pop(path, None)
logger.debug(f"Stopped tracking deleted file: {path}")
case Change.added | Change.modified:
current_time = monotonic()
if path in self._tracked:
tracked = self._tracked[path]
tracked.last_event_time = current_time
tracked.update_stats()
logger.debug(f"Updated tracking for: {path}")
else:
tracked = TrackedFile(path=path, last_event_time=current_time)
if tracked.update_stats():
self._tracked[path] = tracked
logger.debug(f"Started tracking: {path}")
else:
logger.debug(f"Could not stat file, not tracking: {path}")
def get_stable_files(self) -> Iterator[Path]:
"""
Yield files that have been stable for the configured delay.
Files are removed from tracking once yielded or determined to be invalid.
"""
current_time = monotonic()
to_remove: list[Path] = []
to_yield: list[Path] = []
for path, tracked in list(self._tracked.items()):
time_since_event = current_time - tracked.last_event_time
if time_since_event < self.stability_delay:
continue
# File has waited long enough, verify it's unchanged
if not tracked.is_unchanged():
# Stats changed or file gone - update and wait again
if tracked.update_stats():
tracked.last_event_time = current_time
logger.debug(f"File changed during stability check: {path}")
else:
# File no longer exists, remove from tracking
to_remove.append(path)
logger.debug(f"File disappeared during stability check: {path}")
continue
# File is stable - verify it's a regular file
try:
if path.is_file():
to_yield.append(path)
logger.info(f"File is stable: {path}")
else:
# Not a regular file (directory, symlink, etc.)
to_remove.append(path)
logger.debug(f"Path is not a regular file: {path}")
except (PermissionError, OSError) as e:
logger.warning(f"Cannot access {path}: {e}")
to_remove.append(path)
# Remove files that are no longer valid
for path in to_remove:
self._tracked.pop(path, None)
# Remove and yield stable files
for path in to_yield:
self._tracked.pop(path, None)
yield path
def has_pending_files(self) -> bool:
"""Check if there are files waiting for stability check."""
return len(self._tracked) > 0
def clear(self) -> None:
"""Clear all tracked files."""
self._tracked.clear()
@property
def pending_count(self) -> int:
"""Number of files being tracked."""
return len(self._tracked)
class ConsumerFilter(DefaultFilter):
"""
Custom filter for the document consumer.
Filters files based on:
- Supported file extensions
- User-configured ignore patterns (regex)
- Default ignore patterns for common system files
"""
# Default regex patterns to ignore (matched against filename only)
DEFAULT_IGNORE_PATTERNS: Final[frozenset[str]] = frozenset(
{
r"^\.DS_Store$",
r"^\.DS_STORE$",
r"^\._.*",
r"^desktop\.ini$",
r"^Thumbs\.db$",
},
)
# Directories to always ignore (matched by name via DefaultFilter)
DEFAULT_IGNORE_DIRS: Final[tuple[str, ...]] = (
".stfolder",
".stversions",
".localized",
"@eaDir",
".Spotlight-V100",
".Trashes",
"__MACOSX",
)
def __init__(
self,
*,
supported_extensions: frozenset[str] | None = None,
ignore_patterns: list[str] | None = None,
consumption_dir: Path | None = None,
) -> None:
"""
Initialize the consumer filter.
Args:
supported_extensions: Set of supported file extensions (e.g., {".pdf", ".png"}).
If None, uses get_supported_file_extensions().
ignore_patterns: Additional regex patterns to ignore (matched against filename).
consumption_dir: Base consumption directory (unused, kept for API compatibility).
"""
# Combine default and user patterns
all_patterns = set(self.DEFAULT_IGNORE_PATTERNS)
if ignore_patterns:
all_patterns.update(ignore_patterns)
# Compile all patterns
self._ignore_regexes: list[re.Pattern[str]] = [
re.compile(pattern) for pattern in all_patterns
]
# Get supported extensions
if supported_extensions is None:
supported_extensions = frozenset(get_supported_file_extensions())
self._supported_extensions = supported_extensions
# Call parent with directory ignore list
# DefaultFilter.ignore_dirs matches directory names, not full paths
super().__init__(
ignore_dirs=self.DEFAULT_IGNORE_DIRS,
ignore_entity_patterns=None,
ignore_paths=None,
)
def __call__(self, change: Change, path: str) -> bool:
"""
Filter function for watchfiles.
Returns True if the path should be watched, False to ignore.
"""
# Let parent filter handle directory ignoring and basic checks
if not super().__call__(change, path):
return False
path_obj = Path(path)
# For directories, parent filter already handled ignore_dirs
if path_obj.is_dir():
return True
# For files, check extension
if not self._has_supported_extension(path_obj):
return False
# Check filename against ignore patterns
return not self._matches_ignore_pattern(path_obj.name)
def _has_supported_extension(self, path: Path) -> bool:
"""Check if the file has a supported extension."""
suffix = path.suffix.lower()
return suffix in self._supported_extensions
def _matches_ignore_pattern(self, filename: str) -> bool:
"""Check if the filename matches any ignore pattern."""
for regex in self._ignore_regexes:
if regex.match(filename):
logger.debug(
f"Filename {filename} matched ignore pattern {regex.pattern}",
)
return True
return False
def _tags_from_path(filepath: Path, consumption_dir: Path) -> list[int]:
"""
Walk up the directory tree from filepath to consumption_dir
Walk up the directory tree from filepath to CONSUMPTION_DIR
and get or create Tag IDs for every directory.
Returns list of Tag primary keys.
Returns set of Tag models
"""
db.close_old_connections()
tag_ids: set[int] = set()
path_parts = filepath.relative_to(consumption_dir).parent.parts
tag_ids = set()
path_parts = filepath.relative_to(settings.CONSUMPTION_DIR).parent.parts
for part in path_parts:
tag, _ = Tag.objects.get_or_create(
name__iexact=part,
defaults={"name": part},
tag_ids.add(
Tag.objects.get_or_create(name__iexact=part, defaults={"name": part})[0].pk,
)
tag_ids.add(tag.pk)
return list(tag_ids)
def _consume_file(
filepath: Path,
consumption_dir: Path,
*,
subdirs_as_tags: bool,
) -> None:
def _is_ignored(filepath: Path) -> bool:
"""
Queue a file for consumption.
Checks if the given file should be ignored, based on configured
patterns.
Args:
filepath: Path to the file to consume.
consumption_dir: Base consumption directory.
subdirs_as_tags: Whether to create tags from subdirectory names.
Returns True if the file is ignored, False otherwise
"""
# Verify file still exists and is accessible
# Trim out the consume directory, leaving only filename and it's
# path relative to the consume directory
filepath_relative = PurePath(filepath).relative_to(settings.CONSUMPTION_DIR)
# March through the components of the path, including directories and the filename
# looking for anything matching
# foo/bar/baz/file.pdf -> (foo, bar, baz, file.pdf)
parts = []
for part in filepath_relative.parts:
# If the part is not the name (ie, it's a dir)
# Need to append the trailing slash or fnmatch doesn't match
# fnmatch("dir", "dir/*") == False
# fnmatch("dir/", "dir/*") == True
if part != filepath_relative.name:
part = part + "/"
parts.append(part)
for pattern in settings.CONSUMER_IGNORE_PATTERNS:
if len(filter(parts, pattern)):
return True
return False
def _consume(filepath: Path) -> None:
# Check permissions early
try:
if not filepath.is_file():
logger.debug(f"Not consuming {filepath}: not a file or doesn't exist")
return
except (PermissionError, OSError) as e:
logger.warning(f"Not consuming {filepath}: {e}")
filepath.stat()
except (PermissionError, OSError):
logger.warning(f"Not consuming file {filepath}: Permission denied.")
return
# Get tags from path if configured
tag_ids: list[int] | None = None
if subdirs_as_tags:
try:
tag_ids = _tags_from_path(filepath, consumption_dir)
except Exception:
logger.exception(f"Error creating tags from path for {filepath}")
if filepath.is_dir() or _is_ignored(filepath):
return
# Queue for consumption
if not filepath.is_file():
logger.debug(f"Not consuming file {filepath}: File has moved.")
return
if not is_file_ext_supported(filepath.suffix):
logger.warning(f"Not consuming file {filepath}: Unknown file extension.")
return
# Total wait time: up to 500ms
os_error_retry_count: Final[int] = 50
os_error_retry_wait: Final[float] = 0.01
read_try_count = 0
file_open_ok = False
os_error_str = None
while (read_try_count < os_error_retry_count) and not file_open_ok:
try:
with filepath.open("rb"):
file_open_ok = True
except OSError as e:
read_try_count += 1
os_error_str = str(e)
sleep(os_error_retry_wait)
if read_try_count >= os_error_retry_count:
logger.warning(f"Not consuming file {filepath}: OS reports {os_error_str}")
return
tag_ids = None
try:
logger.info(f"Adding {filepath} to the task queue")
if settings.CONSUMER_SUBDIRS_AS_TAGS:
tag_ids = _tags_from_path(filepath)
except Exception:
logger.exception("Error creating tags from path")
try:
logger.info(f"Adding {filepath} to the task queue.")
consume_file.delay(
ConsumableDocument(
source=DocumentSource.ConsumeFolder,
@@ -365,206 +138,228 @@ def _consume_file(
DocumentMetadataOverrides(tag_ids=tag_ids),
)
except Exception:
logger.exception(f"Error while queuing document {filepath}")
# Catch all so that the consumer won't crash.
# This is also what the test case is listening for to check for
# errors.
logger.exception("Error while consuming document")
def _consume_wait_unmodified(file: Path) -> None:
"""
Waits for the given file to appear unmodified based on file size
and modification time. Will wait a configured number of seconds
and retry a configured number of times before either consuming or
giving up
"""
if _is_ignored(file):
return
logger.debug(f"Waiting for file {file} to remain unmodified")
mtime = -1
size = -1
current_try = 0
while current_try < settings.CONSUMER_POLLING_RETRY_COUNT:
try:
stat_data = file.stat()
new_mtime = stat_data.st_mtime
new_size = stat_data.st_size
except FileNotFoundError:
logger.debug(
f"File {file} moved while waiting for it to remain unmodified.",
)
return
if new_mtime == mtime and new_size == size:
_consume(file)
return
mtime = new_mtime
size = new_size
sleep(settings.CONSUMER_POLLING_DELAY)
current_try += 1
logger.error(f"Timeout while waiting on file {file} to remain unmodified.")
class Handler(FileSystemEventHandler):
def __init__(self, pool: ThreadPoolExecutor) -> None:
super().__init__()
self._pool = pool
def on_created(self, event):
self._pool.submit(_consume_wait_unmodified, Path(event.src_path))
def on_moved(self, event):
self._pool.submit(_consume_wait_unmodified, Path(event.dest_path))
class Command(BaseCommand):
"""
Watch a consumption directory and queue new documents for processing.
Uses watchfiles for efficient file system monitoring. Supports both
native OS notifications (inotify on Linux, FSEvents on macOS) and
polling for network filesystems.
On every iteration of an infinite loop, consume what we can from the
consumption directory.
"""
help = "Watch the consumption directory for new documents"
# For testing - allows tests to stop the consumer
stop_flag: Event = Event()
# Testing timeout in seconds
# This is here primarily for the tests and is irrelevant in production.
stop_flag = Event()
# Also only for testing, configures in one place the timeout used before checking
# the stop flag
testing_timeout_s: Final[float] = 0.5
testing_timeout_ms: Final[float] = testing_timeout_s * 1000.0
def add_arguments(self, parser) -> None:
def add_arguments(self, parser):
parser.add_argument(
"directory",
default=None,
default=settings.CONSUMPTION_DIR,
nargs="?",
help="The consumption directory (defaults to CONSUMPTION_DIR setting)",
)
parser.add_argument(
"--oneshot",
action="store_true",
help="Process existing files and exit without watching",
help="The consumption directory.",
)
parser.add_argument("--oneshot", action="store_true", help="Run only once.")
# Only use during unit testing, will configure a timeout
# Leaving it unset or false and the consumer will exit when it
# receives SIGINT
parser.add_argument(
"--testing",
action="store_true",
help="Enable testing mode with shorter timeouts",
help="Flag used only for unit testing",
default=False,
)
def handle(self, *args, **options) -> None:
# Resolve consumption directory
directory = options.get("directory")
def handle(self, *args, **options):
directory = options["directory"]
recursive = settings.CONSUMER_RECURSIVE
if not directory:
directory = getattr(settings, "CONSUMPTION_DIR", None)
if not directory:
raise CommandError("CONSUMPTION_DIR is not configured")
raise CommandError("CONSUMPTION_DIR does not appear to be set.")
directory = Path(directory).resolve()
if not directory.exists():
raise CommandError(f"Consumption directory does not exist: {directory}")
if not directory.is_dir():
raise CommandError(f"Consumption path is not a directory: {directory}")
raise CommandError(f"Consumption directory {directory} does not exist")
# Ensure scratch directory exists
# Consumer will need this
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
# Get settings
recursive: bool = getattr(settings, "CONSUMER_RECURSIVE", False)
subdirs_as_tags: bool = getattr(settings, "CONSUMER_SUBDIRS_AS_TAGS", False)
polling_interval: float = getattr(settings, "CONSUMER_POLLING_INTERVAL", 0)
stability_delay: float = getattr(settings, "CONSUMER_STABILITY_DELAY", 1.0)
ignore_patterns: list[str] = getattr(settings, "CONSUMER_IGNORE_PATTERNS", [])
is_testing: bool = options.get("testing", False)
is_oneshot: bool = options.get("oneshot", False)
if recursive:
for dirpath, _, filenames in os.walk(directory):
for filename in filenames:
filepath = Path(dirpath) / filename
_consume(filepath)
else:
for filepath in directory.iterdir():
_consume(filepath)
# Create filter
consumer_filter = ConsumerFilter(
ignore_patterns=ignore_patterns,
consumption_dir=directory,
)
# Process existing files
self._process_existing_files(
directory=directory,
recursive=recursive,
subdirs_as_tags=subdirs_as_tags,
consumer_filter=consumer_filter,
)
if is_oneshot:
logger.info("Oneshot mode: processed existing files, exiting")
if options["oneshot"]:
return
# Start watching
self._watch_directory(
directory=directory,
recursive=recursive,
subdirs_as_tags=subdirs_as_tags,
consumer_filter=consumer_filter,
polling_interval=polling_interval,
stability_delay=stability_delay,
is_testing=is_testing,
)
logger.debug("Consumer exiting")
def _process_existing_files(
self,
*,
directory: Path,
recursive: bool,
subdirs_as_tags: bool,
consumer_filter: ConsumerFilter,
) -> None:
"""Process any existing files in the consumption directory."""
logger.info(f"Processing existing files in {directory}")
glob_pattern = "**/*" if recursive else "*"
for filepath in directory.glob(glob_pattern):
# Use filter to check if file should be processed
if not filepath.is_file():
continue
if not consumer_filter(Change.added, str(filepath)):
continue
_consume_file(
filepath=filepath,
consumption_dir=directory,
subdirs_as_tags=subdirs_as_tags,
)
def _watch_directory(
self,
*,
directory: Path,
recursive: bool,
subdirs_as_tags: bool,
consumer_filter: ConsumerFilter,
polling_interval: float,
stability_delay: float,
is_testing: bool,
) -> None:
"""Watch directory for changes and process stable files."""
use_polling = polling_interval > 0
poll_delay_ms = int(polling_interval * 1000) if use_polling else 0
if use_polling:
logger.info(
f"Watching {directory} using polling (interval: {polling_interval}s)",
)
if settings.CONSUMER_POLLING == 0 and INotify:
self.handle_inotify(directory, recursive, is_testing=options["testing"])
else:
logger.info(f"Watching {directory} using native file system events")
if INotify is None and settings.CONSUMER_POLLING == 0: # pragma: no cover
logger.warning("Using polling as INotify import failed")
self.handle_polling(directory, recursive, is_testing=options["testing"])
# Create stability tracker
tracker = FileStabilityTracker(stability_delay=stability_delay)
logger.debug("Consumer exiting.")
# Calculate timeouts
stability_timeout_ms = int(stability_delay * 1000)
testing_timeout_ms = int(self.testing_timeout_s * 1000)
def handle_polling(self, directory, recursive, *, is_testing: bool):
logger.info(f"Polling directory for changes: {directory}")
# Start with no timeout (wait indefinitely for first event)
# unless in testing mode
timeout_ms = testing_timeout_ms if is_testing else 0
timeout = None
if is_testing:
timeout = self.testing_timeout_s
logger.debug(f"Configuring timeout to {timeout}s")
self.stop_flag.clear()
polling_interval = settings.CONSUMER_POLLING
if polling_interval == 0: # pragma: no cover
# Only happens if INotify failed to import
logger.warning("Using polling of 10s, consider setting this")
polling_interval = 10
while not self.stop_flag.is_set():
with ThreadPoolExecutor(max_workers=4) as pool:
observer = PollingObserver(timeout=polling_interval)
observer.schedule(Handler(pool), directory, recursive=recursive)
observer.start()
try:
for changes in watch(
directory,
watch_filter=consumer_filter,
rust_timeout=timeout_ms,
yield_on_timeout=True,
force_polling=use_polling,
poll_delay_ms=poll_delay_ms,
recursive=recursive,
stop_event=self.stop_flag,
):
# Process each change
for change_type, path in changes:
path = Path(path).resolve()
logger.debug(f"Event: {change_type.name} for {path}")
tracker.track(path, change_type)
# Check for stable files
for stable_path in tracker.get_stable_files():
_consume_file(
filepath=stable_path,
consumption_dir=directory,
subdirs_as_tags=subdirs_as_tags,
)
# Exit watch loop to reconfigure timeout
break
# Determine next timeout
if tracker.has_pending_files():
# Check pending files at stability interval
timeout_ms = stability_timeout_ms
elif is_testing:
# In testing, use short timeout to check stop flag
timeout_ms = testing_timeout_ms
else:
# No pending files, wait indefinitely
timeout_ms = 0
while observer.is_alive():
observer.join(timeout)
if self.stop_flag.is_set():
observer.stop()
except KeyboardInterrupt:
logger.info("Received interrupt, stopping consumer")
self.stop_flag.set()
observer.stop()
observer.join()
def handle_inotify(self, directory, recursive, *, is_testing: bool):
logger.info(f"Using inotify to watch directory for changes: {directory}")
timeout_ms = None
if is_testing:
timeout_ms = self.testing_timeout_ms
logger.debug(f"Configuring timeout to {timeout_ms}ms")
inotify = INotify()
inotify_flags = flags.CLOSE_WRITE | flags.MOVED_TO | flags.MODIFY
if recursive:
inotify.add_watch_recursive(directory, inotify_flags)
else:
inotify.add_watch(directory, inotify_flags)
inotify_debounce_secs: Final[float] = settings.CONSUMER_INOTIFY_DELAY
inotify_debounce_ms: Final[int] = inotify_debounce_secs * 1000
finished = False
notified_files = {}
try:
while not finished:
try:
for event in inotify.read(timeout=timeout_ms):
path = inotify.get_path(event.wd) if recursive else directory
filepath = Path(path) / event.name
if flags.MODIFY in flags.from_mask(event.mask):
notified_files.pop(filepath, None)
else:
notified_files[filepath] = monotonic()
# Check the files against the timeout
still_waiting = {}
# last_event_time is time of the last inotify event for this file
for filepath, last_event_time in notified_files.items():
# Current time - last time over the configured timeout
waited_long_enough = (
monotonic() - last_event_time
) > inotify_debounce_secs
# Also make sure the file exists still, some scanners might write a
# temporary file first
try:
file_still_exists = filepath.exists() and filepath.is_file()
except (PermissionError, OSError): # pragma: no cover
# If we can't check, let it fail in the _consume function
file_still_exists = True
continue
if waited_long_enough and file_still_exists:
_consume(filepath)
elif file_still_exists:
still_waiting[filepath] = last_event_time
# These files are still waiting to hit the timeout
notified_files = still_waiting
# If files are waiting, need to exit read() to check them
# Otherwise, go back to infinite sleep time, but only if not testing
if len(notified_files) > 0:
timeout_ms = inotify_debounce_ms
elif is_testing:
timeout_ms = self.testing_timeout_ms
else:
timeout_ms = None
if self.stop_flag.is_set():
logger.debug("Finishing because event is set")
finished = True
except KeyboardInterrupt:
logger.info("Received SIGINT, stopping inotify")
finished = True
finally:
inotify.close()

View File

@@ -0,0 +1,187 @@
# Generated by Django 5.2.7 on 2025-11-04 18:34
import django.db.models.deletion
import django.utils.timezone
from django.conf import settings
from django.contrib.auth.management import create_permissions
from django.contrib.auth.models import Group
from django.contrib.auth.models import Permission
from django.contrib.auth.models import User
from django.db import migrations
from django.db import models
def grant_share_link_bundle_permissions(apps, schema_editor):
# Ensure newly introduced permissions are created for all apps
for app_config in apps.get_app_configs():
app_config.models_module = True
create_permissions(app_config, apps=apps, verbosity=0)
app_config.models_module = None
add_document_perm = Permission.objects.filter(codename="add_document").first()
share_bundle_permissions = Permission.objects.filter(
codename__contains="sharelinkbundle",
)
users = User.objects.filter(user_permissions=add_document_perm).distinct()
for user in users:
user.user_permissions.add(*share_bundle_permissions)
groups = Group.objects.filter(permissions=add_document_perm).distinct()
for group in groups:
group.permissions.add(*share_bundle_permissions)
def revoke_share_link_bundle_permissions(apps, schema_editor):
share_bundle_permissions = Permission.objects.filter(
codename__contains="sharelinkbundle",
)
for user in User.objects.all():
user.user_permissions.remove(*share_bundle_permissions)
for group in Group.objects.all():
group.permissions.remove(*share_bundle_permissions)
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("documents", "1074_workflowrun_deleted_at_workflowrun_restored_at_and_more"),
]
operations = [
migrations.CreateModel(
name="ShareLinkBundle",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"created",
models.DateTimeField(
blank=True,
db_index=True,
default=django.utils.timezone.now,
editable=False,
verbose_name="created",
),
),
(
"expiration",
models.DateTimeField(
blank=True,
db_index=True,
null=True,
verbose_name="expiration",
),
),
(
"slug",
models.SlugField(
blank=True,
editable=False,
unique=True,
verbose_name="slug",
),
),
(
"file_version",
models.CharField(
choices=[("archive", "Archive"), ("original", "Original")],
default="archive",
max_length=50,
),
),
(
"status",
models.CharField(
choices=[
("pending", "Pending"),
("processing", "Processing"),
("ready", "Ready"),
("failed", "Failed"),
],
default="pending",
max_length=50,
),
),
(
"size_bytes",
models.BigIntegerField(
blank=True,
null=True,
verbose_name="size (bytes)",
),
),
(
"last_error",
models.TextField(
blank=True,
verbose_name="last error",
),
),
(
"file_path",
models.CharField(
blank=True,
max_length=512,
verbose_name="file path",
),
),
(
"built_at",
models.DateTimeField(
blank=True,
null=True,
verbose_name="built at",
),
),
(
"owner",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="share_link_bundles",
to=settings.AUTH_USER_MODEL,
verbose_name="owner",
),
),
(
"deleted_at",
models.DateTimeField(blank=True, null=True),
),
(
"restored_at",
models.DateTimeField(blank=True, null=True),
),
(
"transaction_id",
models.UUIDField(blank=True, null=True),
),
],
options={
"ordering": ("-created",),
"verbose_name": "share link bundle",
"verbose_name_plural": "share link bundles",
},
),
migrations.AddField(
model_name="sharelinkbundle",
name="documents",
field=models.ManyToManyField(
related_name="share_link_bundles",
to="documents.document",
verbose_name="documents",
),
),
migrations.RunPython(
grant_share_link_bundle_permissions,
reverse_code=revoke_share_link_bundle_permissions,
),
]

View File

@@ -777,6 +777,120 @@ class ShareLink(SoftDeleteModel):
return f"Share Link for {self.document.title}"
class ShareLinkBundle(SoftDeleteModel):
class Status(models.TextChoices):
PENDING = ("pending", _("Pending"))
PROCESSING = ("processing", _("Processing"))
READY = ("ready", _("Ready"))
FAILED = ("failed", _("Failed"))
class Meta:
ordering = ("-created",)
verbose_name = _("share link bundle")
verbose_name_plural = _("share link bundles")
created = models.DateTimeField(
_("created"),
default=timezone.now,
db_index=True,
blank=True,
editable=False,
)
expiration = models.DateTimeField(
_("expiration"),
blank=True,
null=True,
db_index=True,
)
slug = models.SlugField(
_("slug"),
db_index=True,
unique=True,
blank=True,
editable=False,
)
owner = models.ForeignKey(
User,
blank=True,
null=True,
related_name="share_link_bundles",
on_delete=models.SET_NULL,
verbose_name=_("owner"),
)
file_version = models.CharField(
max_length=50,
choices=ShareLink.FileVersion.choices,
default=ShareLink.FileVersion.ARCHIVE,
)
status = models.CharField(
max_length=50,
choices=Status.choices,
default=Status.PENDING,
)
size_bytes = models.BigIntegerField(
_("size (bytes)"),
blank=True,
null=True,
)
last_error = models.TextField(
_("last error"),
blank=True,
)
file_path = models.CharField(
_("file path"),
max_length=512,
blank=True,
)
built_at = models.DateTimeField(
_("built at"),
null=True,
blank=True,
)
documents = models.ManyToManyField(
"documents.Document",
related_name="share_link_bundles",
verbose_name=_("documents"),
)
def __str__(self):
return _("Share link bundle %(slug)s") % {"slug": self.slug}
@property
def absolute_file_path(self) -> Path | None:
if not self.file_path:
return None
file_path = Path(self.file_path)
if not file_path.is_absolute():
file_path = (settings.MEDIA_ROOT / file_path).resolve()
return file_path
def remove_file(self):
path = self.absolute_file_path
if path and path.exists():
try:
path.unlink()
except OSError:
pass
def delete(self, using=None, *, keep_parents=False):
self.remove_file()
return super().delete(using=using, keep_parents=keep_parents)
def hard_delete(self, using=None, *, keep_parents=False):
self.remove_file()
return super().hard_delete(using=using, keep_parents=keep_parents)
class CustomField(models.Model):
"""
Defines the name and type of a custom field

View File

@@ -4,6 +4,7 @@ import logging
import math
import re
from datetime import datetime
from datetime import timedelta
from decimal import Decimal
from typing import TYPE_CHECKING
from typing import Literal
@@ -24,6 +25,7 @@ from django.core.validators import RegexValidator
from django.core.validators import integer_validator
from django.db.models import Count
from django.db.models.functions import Lower
from django.utils import timezone
from django.utils.crypto import get_random_string
from django.utils.dateparse import parse_datetime
from django.utils.text import slugify
@@ -61,6 +63,7 @@ from documents.models import PaperlessTask
from documents.models import SavedView
from documents.models import SavedViewFilterRule
from documents.models import ShareLink
from documents.models import ShareLinkBundle
from documents.models import StoragePath
from documents.models import Tag
from documents.models import UiSettings
@@ -2168,6 +2171,109 @@ class ShareLinkSerializer(OwnedObjectSerializer):
return super().create(validated_data)
class ShareLinkBundleSerializer(OwnedObjectSerializer):
document_ids = serializers.ListField(
child=serializers.IntegerField(min_value=1),
allow_empty=False,
write_only=True,
)
expiration_days = serializers.IntegerField(
required=False,
allow_null=True,
min_value=1,
write_only=True,
)
documents = serializers.PrimaryKeyRelatedField(
many=True,
read_only=True,
)
document_count = SerializerMethodField()
class Meta:
model = ShareLinkBundle
fields = (
"id",
"created",
"expiration",
"expiration_days",
"slug",
"file_version",
"status",
"size_bytes",
"last_error",
"built_at",
"documents",
"document_ids",
"document_count",
)
read_only_fields = (
"id",
"created",
"expiration",
"slug",
"status",
"size_bytes",
"last_error",
"built_at",
"documents",
"document_count",
)
def validate_document_ids(self, value):
unique_ids = set(value)
if len(unique_ids) != len(value):
raise serializers.ValidationError(
_("Duplicate document identifiers are not allowed."),
)
return value
def create(self, validated_data):
document_ids = validated_data.pop("document_ids")
expiration_days = validated_data.pop("expiration_days", None)
documents = validated_data.pop("documents", None)
validated_data["slug"] = get_random_string(50)
if expiration_days:
validated_data["expiration"] = timezone.now() + timedelta(
days=expiration_days,
)
else:
validated_data["expiration"] = None
share_link_bundle = super().create(validated_data)
if documents is None:
documents = list(
Document.objects.filter(pk__in=document_ids).only(
"pk",
),
)
else:
documents = list(documents)
documents_by_id = {doc.pk: doc for doc in documents}
missing = [
str(doc_id) for doc_id in document_ids if doc_id not in documents_by_id
]
if missing:
raise serializers.ValidationError(
{
"document_ids": _(
"Documents not found: %(ids)s",
)
% {"ids": ", ".join(missing)},
},
)
ordered_documents = [documents_by_id[doc_id] for doc_id in document_ids]
share_link_bundle.documents.set(ordered_documents)
share_link_bundle.document_total = len(ordered_documents)
return share_link_bundle
def get_document_count(self, obj: ShareLinkBundle) -> int:
return getattr(obj, "document_total") or obj.documents.count()
class BulkEditObjectsSerializer(SerializerWithPerms, SetPermissionsMixin):
objects = serializers.ListField(
required=True,

View File

@@ -3,7 +3,9 @@ import hashlib
import logging
import shutil
import uuid
import zipfile
from pathlib import Path
from tempfile import NamedTemporaryFile
from tempfile import TemporaryDirectory
import tqdm
@@ -22,6 +24,8 @@ from whoosh.writing import AsyncWriter
from documents import index
from documents import sanity_checker
from documents.barcodes import BarcodePlugin
from documents.bulk_download import ArchiveOnlyStrategy
from documents.bulk_download import OriginalsOnlyStrategy
from documents.caching import clear_document_caches
from documents.classifier import DocumentClassifier
from documents.classifier import load_classifier
@@ -39,6 +43,8 @@ from documents.models import CustomFieldInstance
from documents.models import Document
from documents.models import DocumentType
from documents.models import PaperlessTask
from documents.models import ShareLink
from documents.models import ShareLinkBundle
from documents.models import StoragePath
from documents.models import Tag
from documents.models import WorkflowRun
@@ -558,3 +564,121 @@ def update_document_parent_tags(tag: Tag, new_parent: Tag) -> None:
if affected:
bulk_update_documents.delay(document_ids=list(affected))
@shared_task
def build_share_link_bundle(bundle_id: int):
try:
bundle = (
ShareLinkBundle.objects.filter(pk=bundle_id)
.prefetch_related("documents")
.get()
)
except ShareLinkBundle.DoesNotExist:
logger.warning("Share link bundle %s no longer exists.", bundle_id)
return
bundle.remove_file()
bundle.status = ShareLinkBundle.Status.PROCESSING
bundle.last_error = ""
bundle.size_bytes = None
bundle.built_at = None
bundle.file_path = ""
bundle.save(
update_fields=[
"status",
"last_error",
"size_bytes",
"built_at",
"file_path",
],
)
documents = list(bundle.documents.all().order_by("pk"))
with NamedTemporaryFile(
dir=settings.SCRATCH_DIR,
suffix=".zip",
delete=False,
) as temp_zip:
temp_zip_path = Path(temp_zip.name)
try:
strategy_class = (
ArchiveOnlyStrategy
if bundle.file_version == ShareLink.FileVersion.ARCHIVE
else OriginalsOnlyStrategy
)
with zipfile.ZipFile(temp_zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
strategy = strategy_class(zipf)
for document in documents:
strategy.add_document(document)
output_dir = settings.SHARE_LINK_BUNDLE_DIR
output_dir.mkdir(parents=True, exist_ok=True)
final_path = (output_dir / f"{bundle.slug}.zip").resolve()
if final_path.exists():
final_path.unlink()
shutil.move(str(temp_zip_path), final_path)
try:
bundle.file_path = str(final_path.relative_to(settings.MEDIA_ROOT))
except ValueError:
bundle.file_path = str(final_path)
bundle.size_bytes = final_path.stat().st_size
bundle.status = ShareLinkBundle.Status.READY
bundle.built_at = timezone.now()
bundle.last_error = ""
bundle.save(
update_fields=[
"file_path",
"size_bytes",
"status",
"built_at",
"last_error",
],
)
logger.info("Built share link bundle %s", bundle.pk)
except Exception as exc:
logger.exception(
"Failed to build share link bundle %s: %s",
bundle_id,
exc,
)
bundle.status = ShareLinkBundle.Status.FAILED
bundle.last_error = str(exc)
bundle.save(update_fields=["status", "last_error"])
try:
temp_zip_path.unlink()
except OSError:
pass
raise
finally:
if temp_zip_path.exists():
try:
temp_zip_path.unlink()
except OSError:
pass
@shared_task
def cleanup_expired_share_link_bundles():
now = timezone.now()
expired_qs = ShareLinkBundle.objects.filter(
deleted_at__isnull=True,
expiration__isnull=False,
expiration__lt=now,
)
count = 0
for bundle in expired_qs.iterator():
count += 1
try:
bundle.hard_delete()
except Exception as exc:
logger.warning(
"Failed to delete expired share link bundle %s: %s",
bundle.pk,
exc,
)
if count:
logger.info("Deleted %s expired share link bundle(s)", count)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,51 @@
from documents.tests.utils import TestMigrations
class TestMigrateShareLinkBundlePermissions(TestMigrations):
migrate_from = "1074_workflowrun_deleted_at_workflowrun_restored_at_and_more"
migrate_to = "1075_sharelinkbundle"
def setUpBeforeMigration(self, apps):
User = apps.get_model("auth", "User")
Group = apps.get_model("auth", "Group")
self.Permission = apps.get_model("auth", "Permission")
self.user = User.objects.create(username="user1")
self.group = Group.objects.create(name="group1")
add_document = self.Permission.objects.get(codename="add_document")
self.user.user_permissions.add(add_document.id)
self.group.permissions.add(add_document.id)
def test_share_link_permissions_granted_to_add_document_holders(self):
share_perms = self.Permission.objects.filter(
codename__contains="sharelinkbundle",
)
self.assertTrue(self.user.user_permissions.filter(pk__in=share_perms).exists())
self.assertTrue(self.group.permissions.filter(pk__in=share_perms).exists())
class TestReverseMigrateShareLinkBundlePermissions(TestMigrations):
migrate_from = "1075_sharelinkbundle"
migrate_to = "1074_workflowrun_deleted_at_workflowrun_restored_at_and_more"
def setUpBeforeMigration(self, apps):
User = apps.get_model("auth", "User")
Group = apps.get_model("auth", "Group")
self.Permission = apps.get_model("auth", "Permission")
self.user = User.objects.create(username="user1")
self.group = Group.objects.create(name="group1")
add_document = self.Permission.objects.get(codename="add_document")
share_perms = self.Permission.objects.filter(
codename__contains="sharelinkbundle",
)
self.share_perm_ids = list(share_perms.values_list("id", flat=True))
self.user.user_permissions.add(add_document.id, *self.share_perm_ids)
self.group.permissions.add(add_document.id, *self.share_perm_ids)
def test_share_link_permissions_revoked_on_reverse(self):
self.assertFalse(
self.user.user_permissions.filter(pk__in=self.share_perm_ids).exists(),
)
self.assertFalse(
self.group.permissions.filter(pk__in=self.share_perm_ids).exists(),
)

View File

@@ -0,0 +1,619 @@
from __future__ import annotations
import zipfile
from datetime import timedelta
from pathlib import Path
from unittest import mock
from django.conf import settings
from django.contrib.auth.models import User
from django.utils import timezone
from rest_framework import serializers
from rest_framework import status
from rest_framework.test import APITestCase
from documents.filters import ShareLinkBundleFilterSet
from documents.models import ShareLink
from documents.models import ShareLinkBundle
from documents.serialisers import ShareLinkBundleSerializer
from documents.tasks import build_share_link_bundle
from documents.tasks import cleanup_expired_share_link_bundles
from documents.tests.factories import DocumentFactory
from documents.tests.utils import DirectoriesMixin
class ShareLinkBundleAPITests(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/share_link_bundles/"
def setUp(self):
super().setUp()
self.user = User.objects.create_superuser(username="bundle_admin")
self.client.force_authenticate(self.user)
self.document = DocumentFactory.create()
@mock.patch("documents.views.build_share_link_bundle.delay")
def test_create_bundle_triggers_build_job(self, delay_mock):
payload = {
"document_ids": [self.document.pk],
"file_version": ShareLink.FileVersion.ARCHIVE,
"expiration_days": 7,
}
response = self.client.post(self.ENDPOINT, payload, format="json")
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
bundle = ShareLinkBundle.objects.get(pk=response.data["id"])
self.assertEqual(bundle.documents.count(), 1)
self.assertEqual(bundle.status, ShareLinkBundle.Status.PENDING)
delay_mock.assert_called_once_with(bundle.pk)
def test_create_bundle_rejects_missing_documents(self):
payload = {
"document_ids": [9999],
"file_version": ShareLink.FileVersion.ARCHIVE,
"expiration_days": 7,
}
response = self.client.post(self.ENDPOINT, payload, format="json")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn("document_ids", response.data)
@mock.patch("documents.views.has_perms_owner_aware", return_value=False)
def test_create_bundle_rejects_insufficient_permissions(self, perms_mock):
payload = {
"document_ids": [self.document.pk],
"file_version": ShareLink.FileVersion.ARCHIVE,
"expiration_days": 7,
}
response = self.client.post(self.ENDPOINT, payload, format="json")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn("document_ids", response.data)
perms_mock.assert_called()
@mock.patch("documents.views.build_share_link_bundle.delay")
def test_rebuild_bundle_resets_state(self, delay_mock):
bundle = ShareLinkBundle.objects.create(
slug="rebuild-slug",
file_version=ShareLink.FileVersion.ARCHIVE,
status=ShareLinkBundle.Status.FAILED,
)
bundle.documents.set([self.document])
bundle.last_error = "Something went wrong"
bundle.size_bytes = 100
bundle.file_path = "path/to/file.zip"
bundle.save()
response = self.client.post(f"{self.ENDPOINT}{bundle.pk}/rebuild/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
bundle.refresh_from_db()
self.assertEqual(bundle.status, ShareLinkBundle.Status.PENDING)
self.assertEqual(bundle.last_error, "")
self.assertIsNone(bundle.size_bytes)
self.assertEqual(bundle.file_path, "")
delay_mock.assert_called_once_with(bundle.pk)
def test_rebuild_bundle_rejects_processing_status(self):
bundle = ShareLinkBundle.objects.create(
slug="processing-slug",
file_version=ShareLink.FileVersion.ARCHIVE,
status=ShareLinkBundle.Status.PROCESSING,
)
bundle.documents.set([self.document])
response = self.client.post(f"{self.ENDPOINT}{bundle.pk}/rebuild/")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn("detail", response.data)
def test_create_bundle_rejects_duplicate_documents(self):
payload = {
"document_ids": [self.document.pk, self.document.pk],
"file_version": ShareLink.FileVersion.ARCHIVE,
"expiration_days": 7,
}
response = self.client.post(self.ENDPOINT, payload, format="json")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn("document_ids", response.data)
def test_download_ready_bundle_streams_file(self):
bundle_file = Path(self.dirs.media_dir) / "bundles" / "ready.zip"
bundle_file.parent.mkdir(parents=True, exist_ok=True)
bundle_file.write_bytes(b"binary-zip-content")
bundle = ShareLinkBundle.objects.create(
slug="readyslug",
file_version=ShareLink.FileVersion.ARCHIVE,
status=ShareLinkBundle.Status.READY,
file_path=str(bundle_file),
)
bundle.documents.set([self.document])
self.client.logout()
response = self.client.get(f"/share/{bundle.slug}/")
content = b"".join(response.streaming_content)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response["Content-Type"], "application/zip")
self.assertEqual(content, b"binary-zip-content")
self.assertIn("attachment;", response["Content-Disposition"])
def test_download_pending_bundle_returns_202(self):
bundle = ShareLinkBundle.objects.create(
slug="pendingslug",
file_version=ShareLink.FileVersion.ARCHIVE,
status=ShareLinkBundle.Status.PENDING,
)
bundle.documents.set([self.document])
self.client.logout()
response = self.client.get(f"/share/{bundle.slug}/")
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
@mock.patch("documents.views.build_share_link_bundle.delay")
def test_download_failed_bundle_triggers_rebuild(self, delay_mock):
bundle_path = (
Path(settings.MEDIA_ROOT)
/ "documents"
/ "share_link_bundles"
/ "failed.zip"
)
bundle_path.parent.mkdir(parents=True, exist_ok=True)
bundle_path.write_bytes(b"old-content")
bundle = ShareLinkBundle.objects.create(
slug="failedslug",
file_version=ShareLink.FileVersion.ARCHIVE,
status=ShareLinkBundle.Status.FAILED,
file_path=str(bundle_path.relative_to(settings.MEDIA_ROOT)),
last_error="Boom",
size_bytes=10,
)
bundle.documents.set([self.document])
self.client.logout()
response = self.client.get(f"/share/{bundle.slug}/")
self.assertEqual(response.status_code, status.HTTP_503_SERVICE_UNAVAILABLE)
bundle.refresh_from_db()
self.assertEqual(bundle.status, ShareLinkBundle.Status.PENDING)
self.assertEqual(bundle.last_error, "")
self.assertIsNone(bundle.size_bytes)
self.assertEqual(bundle.file_path, "")
delay_mock.assert_called_once_with(bundle.pk)
self.assertFalse(bundle_path.exists())
@mock.patch("documents.views.build_share_link_bundle.delay")
def test_download_missing_file_triggers_rebuild(self, delay_mock):
bundle = ShareLinkBundle.objects.create(
slug="missingfileslug",
file_version=ShareLink.FileVersion.ARCHIVE,
status=ShareLinkBundle.Status.READY,
file_path=str(Path(self.dirs.media_dir) / "does-not-exist.zip"),
)
bundle.documents.set([self.document])
self.client.logout()
response = self.client.get(f"/share/{bundle.slug}/")
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
bundle.refresh_from_db()
self.assertEqual(bundle.status, ShareLinkBundle.Status.PENDING)
delay_mock.assert_called_once_with(bundle.pk)
def test_expired_share_link_redirects(self):
share_link = ShareLink.objects.create(
slug="expiredlink",
document=self.document,
file_version=ShareLink.FileVersion.ORIGINAL,
expiration=timezone.now() - timedelta(hours=1),
)
self.client.logout()
response = self.client.get(f"/share/{share_link.slug}/")
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertIn("sharelink_expired=1", response["Location"])
def test_unknown_share_link_redirects(self):
self.client.logout()
response = self.client.get("/share/unknownsharelink/")
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertIn("sharelink_notfound=1", response["Location"])
class ShareLinkBundleTaskTests(DirectoriesMixin, APITestCase):
def setUp(self):
super().setUp()
self.document = DocumentFactory.create()
def test_cleanup_expired_share_link_bundles(self):
expired_path = Path(self.dirs.media_dir) / "expired.zip"
expired_path.parent.mkdir(parents=True, exist_ok=True)
expired_path.write_bytes(b"expired")
active_path = Path(self.dirs.media_dir) / "active.zip"
active_path.write_bytes(b"active")
expired_bundle = ShareLinkBundle.objects.create(
slug="expired-bundle",
file_version=ShareLink.FileVersion.ARCHIVE,
status=ShareLinkBundle.Status.READY,
expiration=timezone.now() - timedelta(days=1),
file_path=str(expired_path),
)
expired_bundle.documents.set([self.document])
active_bundle = ShareLinkBundle.objects.create(
slug="active-bundle",
file_version=ShareLink.FileVersion.ARCHIVE,
status=ShareLinkBundle.Status.READY,
expiration=timezone.now() + timedelta(days=1),
file_path=str(active_path),
)
active_bundle.documents.set([self.document])
cleanup_expired_share_link_bundles()
self.assertFalse(ShareLinkBundle.objects.filter(pk=expired_bundle.pk).exists())
self.assertTrue(ShareLinkBundle.objects.filter(pk=active_bundle.pk).exists())
self.assertFalse(expired_path.exists())
self.assertTrue(active_path.exists())
def test_cleanup_expired_share_link_bundles_logs_on_failure(self):
expired_bundle = ShareLinkBundle.objects.create(
slug="expired-bundle",
file_version=ShareLink.FileVersion.ARCHIVE,
status=ShareLinkBundle.Status.READY,
expiration=timezone.now() - timedelta(days=1),
)
expired_bundle.documents.set([self.document])
with mock.patch.object(
ShareLinkBundle,
"hard_delete",
side_effect=RuntimeError("fail"),
):
with self.assertLogs("paperless.tasks", level="WARNING") as logs:
cleanup_expired_share_link_bundles()
self.assertTrue(
any(
"Failed to delete expired share link bundle" in msg
for msg in logs.output
),
)
class ShareLinkBundleBuildTaskTests(DirectoriesMixin, APITestCase):
def setUp(self):
super().setUp()
self.document = DocumentFactory.create(
mime_type="application/pdf",
checksum="123",
)
self.document.archive_checksum = ""
self.document.save()
self.addCleanup(
setattr,
settings,
"SHARE_LINK_BUNDLE_DIR",
settings.SHARE_LINK_BUNDLE_DIR,
)
settings.SHARE_LINK_BUNDLE_DIR = (
Path(settings.MEDIA_ROOT) / "documents" / "share_link_bundles"
)
def _write_document_file(self, *, archive: bool, content: bytes) -> Path:
if archive:
self.document.archive_filename = f"{self.document.pk:07}.pdf"
self.document.save()
path = self.document.archive_path
else:
path = self.document.source_path
path.parent.mkdir(parents=True, exist_ok=True)
path.write_bytes(content)
return path
def test_build_share_link_bundle_creates_zip_and_sets_metadata(self):
self._write_document_file(archive=False, content=b"source")
archive_path = self._write_document_file(archive=True, content=b"archive")
bundle = ShareLinkBundle.objects.create(
slug="build-archive",
file_version=ShareLink.FileVersion.ARCHIVE,
)
bundle.documents.set([self.document])
build_share_link_bundle(bundle.pk)
bundle.refresh_from_db()
self.assertEqual(bundle.status, ShareLinkBundle.Status.READY)
self.assertEqual(bundle.last_error, "")
self.assertIsNotNone(bundle.built_at)
self.assertGreater(bundle.size_bytes or 0, 0)
final_path = bundle.absolute_file_path
self.assertIsNotNone(final_path)
self.assertTrue(final_path.exists())
with zipfile.ZipFile(final_path) as zipf:
names = zipf.namelist()
self.assertEqual(len(names), 1)
self.assertEqual(zipf.read(names[0]), archive_path.read_bytes())
def test_build_share_link_bundle_overwrites_existing_file(self):
self._write_document_file(archive=False, content=b"source")
bundle = ShareLinkBundle.objects.create(
slug="overwrite",
file_version=ShareLink.FileVersion.ORIGINAL,
)
bundle.documents.set([self.document])
existing = settings.SHARE_LINK_BUNDLE_DIR / "overwrite.zip"
existing.parent.mkdir(parents=True, exist_ok=True)
existing.write_bytes(b"old")
build_share_link_bundle(bundle.pk)
bundle.refresh_from_db()
final_path = bundle.absolute_file_path
self.assertIsNotNone(final_path)
self.assertTrue(final_path.exists())
self.assertNotEqual(final_path.read_bytes(), b"old")
def test_build_share_link_bundle_stores_absolute_path_outside_media_root(self):
settings.SHARE_LINK_BUNDLE_DIR = Path(settings.DATA_DIR) / "share_link_bundles"
self._write_document_file(archive=False, content=b"source")
bundle = ShareLinkBundle.objects.create(
slug="outside-media",
file_version=ShareLink.FileVersion.ORIGINAL,
)
bundle.documents.set([self.document])
build_share_link_bundle(bundle.pk)
bundle.refresh_from_db()
self.assertTrue(Path(bundle.file_path).is_absolute())
self.assertEqual(bundle.status, ShareLinkBundle.Status.READY)
def test_build_share_link_bundle_failure_marks_failed(self):
self._write_document_file(archive=False, content=b"source")
bundle = ShareLinkBundle.objects.create(
slug="fail-bundle",
file_version=ShareLink.FileVersion.ORIGINAL,
)
bundle.documents.set([self.document])
with (
mock.patch(
"documents.tasks.OriginalsOnlyStrategy.add_document",
side_effect=RuntimeError("zip failure"),
),
mock.patch("pathlib.Path.unlink") as unlink_mock,
):
unlink_mock.side_effect = [OSError("unlink"), OSError("unlink-finally")] + [
None,
] * 5
with self.assertRaises(RuntimeError):
build_share_link_bundle(bundle.pk)
bundle.refresh_from_db()
self.assertEqual(bundle.status, ShareLinkBundle.Status.FAILED)
self.assertEqual(bundle.last_error, "zip failure")
scratch_zips = list(Path(settings.SCRATCH_DIR).glob("*.zip"))
self.assertTrue(scratch_zips)
for path in scratch_zips:
path.unlink(missing_ok=True)
def test_build_share_link_bundle_missing_bundle_noop(self):
# Should not raise when bundle does not exist
build_share_link_bundle(99999)
class ShareLinkBundleFilterSetTests(DirectoriesMixin, APITestCase):
def setUp(self):
super().setUp()
self.document = DocumentFactory.create()
self.document.checksum = "doc1checksum"
self.document.save()
self.other_document = DocumentFactory.create()
self.other_document.checksum = "doc2checksum"
self.other_document.save()
self.bundle_one = ShareLinkBundle.objects.create(
slug="bundle-one",
file_version=ShareLink.FileVersion.ORIGINAL,
)
self.bundle_one.documents.set([self.document])
self.bundle_two = ShareLinkBundle.objects.create(
slug="bundle-two",
file_version=ShareLink.FileVersion.ORIGINAL,
)
self.bundle_two.documents.set([self.other_document])
def test_filter_documents_returns_all_for_empty_value(self):
filterset = ShareLinkBundleFilterSet(
data={"documents": ""},
queryset=ShareLinkBundle.objects.all(),
)
self.assertCountEqual(filterset.qs, [self.bundle_one, self.bundle_two])
def test_filter_documents_handles_invalid_input(self):
filterset = ShareLinkBundleFilterSet(
data={"documents": "invalid"},
queryset=ShareLinkBundle.objects.all(),
)
self.assertFalse(filterset.qs.exists())
def test_filter_documents_filters_by_multiple_ids(self):
filterset = ShareLinkBundleFilterSet(
data={"documents": f"{self.document.pk},{self.other_document.pk}"},
queryset=ShareLinkBundle.objects.all(),
)
self.assertCountEqual(filterset.qs, [self.bundle_one, self.bundle_two])
def test_filter_documents_returns_queryset_for_empty_ids(self):
filterset = ShareLinkBundleFilterSet(
data={"documents": ","},
queryset=ShareLinkBundle.objects.all(),
)
self.assertCountEqual(filterset.qs, [self.bundle_one, self.bundle_two])
class ShareLinkBundleModelTests(DirectoriesMixin, APITestCase):
def test_absolute_file_path_handles_relative_and_absolute(self):
relative_path = Path("documents/share_link_bundles/relative.zip")
bundle = ShareLinkBundle.objects.create(
slug="relative-bundle",
file_version=ShareLink.FileVersion.ORIGINAL,
file_path=str(relative_path),
)
self.assertEqual(
bundle.absolute_file_path,
(Path(settings.MEDIA_ROOT) / relative_path).resolve(),
)
absolute_path = Path(self.dirs.media_dir) / "absolute.zip"
bundle.file_path = str(absolute_path)
self.assertEqual(bundle.absolute_file_path.resolve(), absolute_path.resolve())
def test_str_returns_translated_slug(self):
bundle = ShareLinkBundle.objects.create(
slug="string-slug",
file_version=ShareLink.FileVersion.ORIGINAL,
)
self.assertIn("string-slug", str(bundle))
def test_remove_file_deletes_existing_file(self):
bundle_path = (
Path(settings.MEDIA_ROOT)
/ "documents"
/ "share_link_bundles"
/ "remove.zip"
)
bundle_path.parent.mkdir(parents=True, exist_ok=True)
bundle_path.write_bytes(b"remove-me")
bundle = ShareLinkBundle.objects.create(
slug="remove-bundle",
file_version=ShareLink.FileVersion.ORIGINAL,
file_path=str(bundle_path.relative_to(settings.MEDIA_ROOT)),
)
bundle.remove_file()
self.assertFalse(bundle_path.exists())
def test_remove_file_handles_oserror(self):
bundle_path = (
Path(settings.MEDIA_ROOT)
/ "documents"
/ "share_link_bundles"
/ "remove-error.zip"
)
bundle_path.parent.mkdir(parents=True, exist_ok=True)
bundle_path.write_bytes(b"remove-me")
bundle = ShareLinkBundle.objects.create(
slug="remove-error",
file_version=ShareLink.FileVersion.ORIGINAL,
file_path=str(bundle_path.relative_to(settings.MEDIA_ROOT)),
)
with mock.patch("pathlib.Path.unlink", side_effect=OSError("fail")):
bundle.remove_file()
self.assertTrue(bundle_path.exists())
def test_delete_and_hard_delete_call_remove_file(self):
bundle_path = (
Path(settings.MEDIA_ROOT)
/ "documents"
/ "share_link_bundles"
/ "delete.zip"
)
bundle_path.parent.mkdir(parents=True, exist_ok=True)
bundle_path.write_bytes(b"remove-me")
bundle = ShareLinkBundle.objects.create(
slug="delete-bundle",
file_version=ShareLink.FileVersion.ORIGINAL,
file_path=str(bundle_path.relative_to(settings.MEDIA_ROOT)),
)
bundle.delete()
self.assertFalse(bundle_path.exists())
bundle2_path = (
Path(settings.MEDIA_ROOT)
/ "documents"
/ "share_link_bundles"
/ "harddelete.zip"
)
bundle2_path.parent.mkdir(parents=True, exist_ok=True)
bundle2_path.write_bytes(b"remove-me")
bundle2 = ShareLinkBundle.objects.create(
slug="harddelete-bundle",
file_version=ShareLink.FileVersion.ORIGINAL,
file_path=str(bundle2_path.relative_to(settings.MEDIA_ROOT)),
)
bundle2.hard_delete()
self.assertFalse(bundle2_path.exists())
class ShareLinkBundleSerializerTests(DirectoriesMixin, APITestCase):
def setUp(self):
super().setUp()
self.document = DocumentFactory.create()
def test_validate_document_ids_rejects_duplicates(self):
serializer = ShareLinkBundleSerializer(
data={
"document_ids": [self.document.pk, self.document.pk],
"file_version": ShareLink.FileVersion.ORIGINAL,
},
)
self.assertFalse(serializer.is_valid())
self.assertIn("document_ids", serializer.errors)
def test_create_assigns_documents_and_expiration(self):
serializer = ShareLinkBundleSerializer(
data={
"document_ids": [self.document.pk],
"file_version": ShareLink.FileVersion.ORIGINAL,
"expiration_days": 3,
},
)
self.assertTrue(serializer.is_valid(), serializer.errors)
bundle = serializer.save()
self.assertEqual(list(bundle.documents.all()), [self.document])
expected_expiration = timezone.now() + timedelta(days=3)
self.assertAlmostEqual(
bundle.expiration,
expected_expiration,
delta=timedelta(seconds=10),
)
def test_create_raises_when_missing_documents(self):
serializer = ShareLinkBundleSerializer(
data={
"document_ids": [self.document.pk, 9999],
"file_version": ShareLink.FileVersion.ORIGINAL,
},
)
self.assertTrue(serializer.is_valid(), serializer.errors)
with self.assertRaises(serializers.ValidationError):
serializer.save(documents=[self.document])

View File

@@ -50,6 +50,7 @@ from django.utils import timezone
from django.utils.decorators import method_decorator
from django.utils.timezone import make_aware
from django.utils.translation import get_language
from django.utils.translation import gettext_lazy as _
from django.views import View
from django.views.decorators.cache import cache_control
from django.views.decorators.http import condition
@@ -69,6 +70,7 @@ from packaging import version as packaging_version
from redis import Redis
from rest_framework import parsers
from rest_framework import serializers
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.exceptions import NotFound
from rest_framework.exceptions import ValidationError
@@ -117,6 +119,7 @@ from documents.filters import DocumentTypeFilterSet
from documents.filters import ObjectOwnedOrGrantedPermissionsFilter
from documents.filters import ObjectOwnedPermissionsFilter
from documents.filters import PaperlessTaskFilterSet
from documents.filters import ShareLinkBundleFilterSet
from documents.filters import ShareLinkFilterSet
from documents.filters import StoragePathFilterSet
from documents.filters import TagFilterSet
@@ -134,6 +137,7 @@ from documents.models import Note
from documents.models import PaperlessTask
from documents.models import SavedView
from documents.models import ShareLink
from documents.models import ShareLinkBundle
from documents.models import StoragePath
from documents.models import Tag
from documents.models import UiSettings
@@ -167,6 +171,7 @@ from documents.serialisers import PostDocumentSerializer
from documents.serialisers import RunTaskViewSerializer
from documents.serialisers import SavedViewSerializer
from documents.serialisers import SearchResultSerializer
from documents.serialisers import ShareLinkBundleSerializer
from documents.serialisers import ShareLinkSerializer
from documents.serialisers import StoragePathSerializer
from documents.serialisers import StoragePathTestSerializer
@@ -179,6 +184,7 @@ from documents.serialisers import WorkflowActionSerializer
from documents.serialisers import WorkflowSerializer
from documents.serialisers import WorkflowTriggerSerializer
from documents.signals import document_updated
from documents.tasks import build_share_link_bundle
from documents.tasks import consume_file
from documents.tasks import empty_trash
from documents.tasks import index_optimize
@@ -2272,7 +2278,7 @@ class BulkDownloadView(GenericAPIView):
follow_filename_format = serializer.validated_data.get("follow_formatting")
for document in documents:
if not has_perms_owner_aware(request.user, "view_document", document):
if not has_perms_owner_aware(request.user, "change_document", document):
return HttpResponseForbidden("Insufficient permissions")
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
@@ -2619,21 +2625,225 @@ class ShareLinkViewSet(ModelViewSet, PassUserMixin):
ordering_fields = ("created", "expiration", "document")
class ShareLinkBundleViewSet(ModelViewSet, PassUserMixin):
model = ShareLinkBundle
queryset = ShareLinkBundle.objects.all()
serializer_class = ShareLinkBundleSerializer
pagination_class = StandardPagination
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
filter_backends = (
DjangoFilterBackend,
OrderingFilter,
ObjectOwnedOrGrantedPermissionsFilter,
)
filterset_class = ShareLinkBundleFilterSet
ordering_fields = ("created", "expiration", "status")
def get_queryset(self):
return (
super()
.get_queryset()
.prefetch_related("documents")
.annotate(document_total=Count("documents", distinct=True))
)
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
document_ids = serializer.validated_data["document_ids"]
documents_qs = Document.objects.filter(pk__in=document_ids).select_related(
"owner",
)
found_ids = set(documents_qs.values_list("pk", flat=True))
missing = sorted(set(document_ids) - found_ids)
if missing:
raise ValidationError(
{
"document_ids": _(
"Documents not found: %(ids)s",
)
% {"ids": ", ".join(str(item) for item in missing)},
},
)
documents = list(documents_qs)
for document in documents:
if not has_perms_owner_aware(request.user, "view_document", document):
raise ValidationError(
{
"document_ids": _(
"Insufficient permissions to share document %(id)s.",
)
% {"id": document.pk},
},
)
document_map = {document.pk: document for document in documents}
ordered_documents = [document_map[doc_id] for doc_id in document_ids]
bundle = serializer.save(
owner=request.user,
documents=ordered_documents,
)
bundle.remove_file()
bundle.status = ShareLinkBundle.Status.PENDING
bundle.last_error = ""
bundle.size_bytes = None
bundle.built_at = None
bundle.file_path = ""
bundle.save(
update_fields=[
"status",
"last_error",
"size_bytes",
"built_at",
"file_path",
],
)
build_share_link_bundle.delay(bundle.pk)
bundle.document_total = len(ordered_documents)
response_serializer = self.get_serializer(bundle)
headers = self.get_success_headers(response_serializer.data)
return Response(
response_serializer.data,
status=status.HTTP_201_CREATED,
headers=headers,
)
@action(detail=True, methods=["post"])
def rebuild(self, request, pk=None):
bundle = self.get_object()
if bundle.status == ShareLinkBundle.Status.PROCESSING:
return Response(
{"detail": _("Bundle is already being processed.")},
status=status.HTTP_400_BAD_REQUEST,
)
bundle.remove_file()
bundle.status = ShareLinkBundle.Status.PENDING
bundle.last_error = ""
bundle.size_bytes = None
bundle.built_at = None
bundle.file_path = ""
bundle.save(
update_fields=[
"status",
"last_error",
"size_bytes",
"built_at",
"file_path",
],
)
build_share_link_bundle.delay(bundle.pk)
bundle.document_total = (
getattr(bundle, "document_total", None) or bundle.documents.count()
)
serializer = self.get_serializer(bundle)
return Response(serializer.data)
class SharedLinkView(View):
authentication_classes = []
permission_classes = []
def get(self, request, slug):
share_link = ShareLink.objects.filter(slug=slug).first()
if share_link is None:
if share_link is not None:
if (
share_link.expiration is not None
and share_link.expiration < timezone.now()
):
return HttpResponseRedirect("/accounts/login/?sharelink_expired=1")
return serve_file(
doc=share_link.document,
use_archive=share_link.file_version == "archive",
disposition="inline",
)
bundle = ShareLinkBundle.objects.filter(slug=slug).first()
if bundle is None:
return HttpResponseRedirect("/accounts/login/?sharelink_notfound=1")
if share_link.expiration is not None and share_link.expiration < timezone.now():
if bundle.expiration is not None and bundle.expiration < timezone.now():
return HttpResponseRedirect("/accounts/login/?sharelink_expired=1")
return serve_file(
doc=share_link.document,
use_archive=share_link.file_version == "archive",
disposition="inline",
if bundle.status in {
ShareLinkBundle.Status.PENDING,
ShareLinkBundle.Status.PROCESSING,
}:
return HttpResponse(
_(
"The share link bundle is still being prepared. Please try again later.",
),
status=status.HTTP_202_ACCEPTED,
)
if bundle.status == ShareLinkBundle.Status.FAILED:
bundle.remove_file()
bundle.status = ShareLinkBundle.Status.PENDING
bundle.last_error = ""
bundle.size_bytes = None
bundle.built_at = None
bundle.file_path = ""
bundle.save(
update_fields=[
"status",
"last_error",
"size_bytes",
"built_at",
"file_path",
],
)
build_share_link_bundle.delay(bundle.pk)
return HttpResponse(
_(
"The share link bundle is temporarily unavailable. A rebuild has been scheduled. Please try again later.",
),
status=status.HTTP_503_SERVICE_UNAVAILABLE,
)
file_path = bundle.absolute_file_path
if file_path is None or not file_path.exists():
bundle.status = ShareLinkBundle.Status.PENDING
bundle.last_error = ""
bundle.size_bytes = None
bundle.built_at = None
bundle.file_path = ""
bundle.save(
update_fields=[
"status",
"last_error",
"size_bytes",
"built_at",
"file_path",
],
)
build_share_link_bundle.delay(bundle.pk)
return HttpResponse(
_(
"The share link bundle is being prepared. Please try again later.",
),
status=status.HTTP_202_ACCEPTED,
)
response = FileResponse(file_path.open("rb"), content_type="application/zip")
short_slug = bundle.slug[:12]
download_name = f"paperless-share-{short_slug}.zip"
filename_normalized = (
normalize("NFKD", download_name)
.encode(
"ascii",
"ignore",
)
.decode("ascii")
)
filename_encoded = quote(download_name)
response["Content-Disposition"] = (
f"attachment; filename='{filename_normalized}'; "
f"filename*=utf-8''{filename_encoded}"
)
return response
def serve_file(*, doc: Document, use_archive: bool, disposition: str):

View File

@@ -229,6 +229,17 @@ def _parse_beat_schedule() -> dict:
"expires": 59.0 * 60.0,
},
},
{
"name": "Cleanup expired share link bundles",
"env_key": "PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON",
# Default daily at 02:00
"env_default": "0 2 * * *",
"task": "documents.tasks.cleanup_expired_share_link_bundles",
"options": {
# 1 hour before default schedule sends again
"expires": 23.0 * 60.0 * 60.0,
},
},
]
for task in tasks:
# Either get the environment setting or use the default
@@ -267,6 +278,7 @@ MEDIA_ROOT = __get_path("PAPERLESS_MEDIA_ROOT", BASE_DIR.parent / "media")
ORIGINALS_DIR = MEDIA_ROOT / "documents" / "originals"
ARCHIVE_DIR = MEDIA_ROOT / "documents" / "archive"
THUMBNAIL_DIR = MEDIA_ROOT / "documents" / "thumbnails"
SHARE_LINK_BUNDLE_DIR = MEDIA_ROOT / "documents" / "share_link_bundles"
DATA_DIR = __get_path("PAPERLESS_DATA_DIR", BASE_DIR.parent / "data")
@@ -1033,7 +1045,7 @@ CONSUMER_IGNORE_PATTERNS = list(
json.loads(
os.getenv(
"PAPERLESS_CONSUMER_IGNORE_PATTERNS",
json.dumps([]),
json.dumps(IGNORABLE_FILES),
),
),
)

View File

@@ -160,6 +160,7 @@ class TestCeleryScheduleParsing(TestCase):
SANITY_EXPIRE_TIME = ((7.0 * 24.0) - 1.0) * 60.0 * 60.0
EMPTY_TRASH_EXPIRE_TIME = 23.0 * 60.0 * 60.0
RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME = 59.0 * 60.0
CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME = 23.0 * 60.0 * 60.0
def test_schedule_configuration_default(self):
"""
@@ -204,6 +205,13 @@ class TestCeleryScheduleParsing(TestCase):
"schedule": crontab(minute="5", hour="*/1"),
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
},
"Cleanup expired share link bundles": {
"task": "documents.tasks.cleanup_expired_share_link_bundles",
"schedule": crontab(minute=0, hour=2),
"options": {
"expires": self.CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME,
},
},
},
schedule,
)
@@ -256,6 +264,13 @@ class TestCeleryScheduleParsing(TestCase):
"schedule": crontab(minute="5", hour="*/1"),
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
},
"Cleanup expired share link bundles": {
"task": "documents.tasks.cleanup_expired_share_link_bundles",
"schedule": crontab(minute=0, hour=2),
"options": {
"expires": self.CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME,
},
},
},
schedule,
)
@@ -300,6 +315,13 @@ class TestCeleryScheduleParsing(TestCase):
"schedule": crontab(minute="5", hour="*/1"),
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
},
"Cleanup expired share link bundles": {
"task": "documents.tasks.cleanup_expired_share_link_bundles",
"schedule": crontab(minute=0, hour=2),
"options": {
"expires": self.CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME,
},
},
},
schedule,
)
@@ -322,6 +344,7 @@ class TestCeleryScheduleParsing(TestCase):
"PAPERLESS_INDEX_TASK_CRON": "disable",
"PAPERLESS_EMPTY_TRASH_TASK_CRON": "disable",
"PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON": "disable",
"PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON": "disable",
},
):
schedule = _parse_beat_schedule()

View File

@@ -30,6 +30,7 @@ from documents.views import SavedViewViewSet
from documents.views import SearchAutoCompleteView
from documents.views import SelectionDataView
from documents.views import SharedLinkView
from documents.views import ShareLinkBundleViewSet
from documents.views import ShareLinkViewSet
from documents.views import StatisticsView
from documents.views import StoragePathViewSet
@@ -72,6 +73,7 @@ api_router.register(r"users", UserViewSet, basename="users")
api_router.register(r"groups", GroupViewSet, basename="groups")
api_router.register(r"mail_accounts", MailAccountViewSet)
api_router.register(r"mail_rules", MailRuleViewSet)
api_router.register(r"share_link_bundles", ShareLinkBundleViewSet)
api_router.register(r"share_links", ShareLinkViewSet)
api_router.register(r"workflow_triggers", WorkflowTriggerViewSet)
api_router.register(r"workflow_actions", WorkflowActionViewSet)

115
uv.lock generated
View File

@@ -1458,6 +1458,26 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
]
[[package]]
name = "inotify-simple"
version = "2.0.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e3/5c/bfe40e15d684bc30b0073aa97c39be410a5fbef3d33cad6f0bf2012571e0/inotify_simple-2.0.1.tar.gz", hash = "sha256:f010bbbd8283bd71a9f4eb2de94765804ede24bd47320b0e6ef4136e541cdc2c", size = 7101, upload-time = "2025-08-25T06:28:20.998Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e3/86/8be1ac7e90f80b413e81f1e235148e8db771218886a2353392f02da01be3/inotify_simple-2.0.1-py3-none-any.whl", hash = "sha256:e5da495f2064889f8e68b67f9358b0d102e03b783c2d42e5b8e132ab859a5d8a", size = 7449, upload-time = "2025-08-25T06:28:19.919Z" },
]
[[package]]
name = "inotifyrecursive"
version = "0.3.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "inotify-simple", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/19/3a/9ed038cb750a3ba8090869cf3ad50f5628077a936d911aee14ca83e40f6a/inotifyrecursive-0.3.5.tar.gz", hash = "sha256:a2c450b317693e4538416f90eb1d7858506dafe6b8b885037bd2dd9ae2dafa1e", size = 4576, upload-time = "2020-11-20T12:38:48.035Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c7/fc/4e5a141c3f7c7bed550ac1f69e599e92b6be449dd4677ec09f325cad0955/inotifyrecursive-0.3.5-py3-none-any.whl", hash = "sha256:7e5f4a2e1dc2bef0efa3b5f6b339c41fb4599055a2b54909d020e9e932cc8d2f", size = 8009, upload-time = "2020-11-20T12:38:46.981Z" },
]
[[package]]
name = "isodate"
@@ -2166,6 +2186,7 @@ dependencies = [
{ name = "gotenberg-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "httpx-oauth", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "imap-tools", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "inotifyrecursive", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "langdetect", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "nltk", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
@@ -2185,7 +2206,7 @@ dependencies = [
{ name = "setproctitle", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "tika-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "watchfiles", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "watchdog", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "whitenoise", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "whoosh-reloaded", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "zxing-cpp", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version != '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version != '3.12.*' and platform_machine == 'x86_64' and sys_platform == 'linux') or (platform_machine != 'aarch64' and platform_machine != 'x86_64' and sys_platform == 'linux') or sys_platform == 'darwin'" },
@@ -2304,6 +2325,7 @@ requires-dist = [
{ name = "granian", extras = ["uvloop"], marker = "extra == 'webserver'", specifier = "~=2.5.1" },
{ name = "httpx-oauth", specifier = "~=0.16" },
{ name = "imap-tools", specifier = "~=1.11.0" },
{ name = "inotifyrecursive", specifier = "~=0.3" },
{ name = "jinja2", specifier = "~=3.1.5" },
{ name = "langdetect", specifier = "~=1.0.9" },
{ name = "mysqlclient", marker = "extra == 'mariadb'", specifier = "~=2.2.7" },
@@ -2329,7 +2351,7 @@ requires-dist = [
{ name = "setproctitle", specifier = "~=1.3.4" },
{ name = "tika-client", specifier = "~=0.10.0" },
{ name = "tqdm", specifier = "~=4.67.1" },
{ name = "watchfiles", specifier = ">=1.1.1" },
{ name = "watchdog", specifier = "~=6.0" },
{ name = "whitenoise", specifier = "~=6.9" },
{ name = "whoosh-reloaded", specifier = ">=2.7.5" },
{ name = "zxing-cpp", marker = "(python_full_version != '3.12.*' and platform_machine == 'aarch64') or (python_full_version != '3.12.*' and platform_machine == 'x86_64') or (platform_machine != 'aarch64' and platform_machine != 'x86_64') or sys_platform != 'linux'", specifier = "~=2.3.0" },
@@ -4305,95 +4327,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" },
]
[[package]]
name = "watchfiles"
version = "1.1.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a7/1a/206e8cf2dd86fddf939165a57b4df61607a1e0add2785f170a3f616b7d9f/watchfiles-1.1.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:eef58232d32daf2ac67f42dea51a2c80f0d03379075d44a587051e63cc2e368c", size = 407318, upload-time = "2025-10-14T15:04:18.753Z" },
{ url = "https://files.pythonhosted.org/packages/b3/0f/abaf5262b9c496b5dad4ed3c0e799cbecb1f8ea512ecb6ddd46646a9fca3/watchfiles-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:03fa0f5237118a0c5e496185cafa92878568b652a2e9a9382a5151b1a0380a43", size = 394478, upload-time = "2025-10-14T15:04:20.297Z" },
{ url = "https://files.pythonhosted.org/packages/b1/04/9cc0ba88697b34b755371f5ace8d3a4d9a15719c07bdc7bd13d7d8c6a341/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca65483439f9c791897f7db49202301deb6e15fe9f8fe2fed555bf986d10c31", size = 449894, upload-time = "2025-10-14T15:04:21.527Z" },
{ url = "https://files.pythonhosted.org/packages/d2/9c/eda4615863cd8621e89aed4df680d8c3ec3da6a4cf1da113c17decd87c7f/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f0ab1c1af0cb38e3f598244c17919fb1a84d1629cc08355b0074b6d7f53138ac", size = 459065, upload-time = "2025-10-14T15:04:22.795Z" },
{ url = "https://files.pythonhosted.org/packages/84/13/f28b3f340157d03cbc8197629bc109d1098764abe1e60874622a0be5c112/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bc570d6c01c206c46deb6e935a260be44f186a2f05179f52f7fcd2be086a94d", size = 488377, upload-time = "2025-10-14T15:04:24.138Z" },
{ url = "https://files.pythonhosted.org/packages/86/93/cfa597fa9389e122488f7ffdbd6db505b3b915ca7435ecd7542e855898c2/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e84087b432b6ac94778de547e08611266f1f8ffad28c0ee4c82e028b0fc5966d", size = 595837, upload-time = "2025-10-14T15:04:25.057Z" },
{ url = "https://files.pythonhosted.org/packages/57/1e/68c1ed5652b48d89fc24d6af905d88ee4f82fa8bc491e2666004e307ded1/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:620bae625f4cb18427b1bb1a2d9426dc0dd5a5ba74c7c2cdb9de405f7b129863", size = 473456, upload-time = "2025-10-14T15:04:26.497Z" },
{ url = "https://files.pythonhosted.org/packages/d5/dc/1a680b7458ffa3b14bb64878112aefc8f2e4f73c5af763cbf0bd43100658/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:544364b2b51a9b0c7000a4b4b02f90e9423d97fbbf7e06689236443ebcad81ab", size = 455614, upload-time = "2025-10-14T15:04:27.539Z" },
{ url = "https://files.pythonhosted.org/packages/61/a5/3d782a666512e01eaa6541a72ebac1d3aae191ff4a31274a66b8dd85760c/watchfiles-1.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bbe1ef33d45bc71cf21364df962af171f96ecaeca06bd9e3d0b583efb12aec82", size = 630690, upload-time = "2025-10-14T15:04:28.495Z" },
{ url = "https://files.pythonhosted.org/packages/9b/73/bb5f38590e34687b2a9c47a244aa4dd50c56a825969c92c9c5fc7387cea1/watchfiles-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a0bb430adb19ef49389e1ad368450193a90038b5b752f4ac089ec6942c4dff4", size = 622459, upload-time = "2025-10-14T15:04:29.491Z" },
{ url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529, upload-time = "2025-10-14T15:04:32.899Z" },
{ url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384, upload-time = "2025-10-14T15:04:33.761Z" },
{ url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789, upload-time = "2025-10-14T15:04:34.679Z" },
{ url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521, upload-time = "2025-10-14T15:04:35.963Z" },
{ url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722, upload-time = "2025-10-14T15:04:37.091Z" },
{ url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088, upload-time = "2025-10-14T15:04:38.39Z" },
{ url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923, upload-time = "2025-10-14T15:04:39.666Z" },
{ url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080, upload-time = "2025-10-14T15:04:40.643Z" },
{ url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432, upload-time = "2025-10-14T15:04:41.789Z" },
{ url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046, upload-time = "2025-10-14T15:04:42.718Z" },
{ url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" },
{ url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" },
{ url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" },
{ url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" },
{ url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" },
{ url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" },
{ url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" },
{ url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" },
{ url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" },
{ url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" },
{ url = "https://files.pythonhosted.org/packages/bb/f4/f750b29225fe77139f7ae5de89d4949f5a99f934c65a1f1c0b248f26f747/watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18", size = 404321, upload-time = "2025-10-14T15:05:02.063Z" },
{ url = "https://files.pythonhosted.org/packages/2b/f9/f07a295cde762644aa4c4bb0f88921d2d141af45e735b965fb2e87858328/watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a", size = 391783, upload-time = "2025-10-14T15:05:03.052Z" },
{ url = "https://files.pythonhosted.org/packages/bc/11/fc2502457e0bea39a5c958d86d2cb69e407a4d00b85735ca724bfa6e0d1a/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219", size = 449279, upload-time = "2025-10-14T15:05:04.004Z" },
{ url = "https://files.pythonhosted.org/packages/e3/1f/d66bc15ea0b728df3ed96a539c777acfcad0eb78555ad9efcaa1274688f0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428", size = 459405, upload-time = "2025-10-14T15:05:04.942Z" },
{ url = "https://files.pythonhosted.org/packages/be/90/9f4a65c0aec3ccf032703e6db02d89a157462fbb2cf20dd415128251cac0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0", size = 488976, upload-time = "2025-10-14T15:05:05.905Z" },
{ url = "https://files.pythonhosted.org/packages/37/57/ee347af605d867f712be7029bb94c8c071732a4b44792e3176fa3c612d39/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150", size = 595506, upload-time = "2025-10-14T15:05:06.906Z" },
{ url = "https://files.pythonhosted.org/packages/a8/78/cc5ab0b86c122047f75e8fc471c67a04dee395daf847d3e59381996c8707/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae", size = 474936, upload-time = "2025-10-14T15:05:07.906Z" },
{ url = "https://files.pythonhosted.org/packages/62/da/def65b170a3815af7bd40a3e7010bf6ab53089ef1b75d05dd5385b87cf08/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d", size = 456147, upload-time = "2025-10-14T15:05:09.138Z" },
{ url = "https://files.pythonhosted.org/packages/57/99/da6573ba71166e82d288d4df0839128004c67d2778d3b566c138695f5c0b/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b", size = 630007, upload-time = "2025-10-14T15:05:10.117Z" },
{ url = "https://files.pythonhosted.org/packages/a8/51/7439c4dd39511368849eb1e53279cd3454b4a4dbace80bab88feeb83c6b5/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374", size = 622280, upload-time = "2025-10-14T15:05:11.146Z" },
{ url = "https://files.pythonhosted.org/packages/79/42/e0a7d749626f1e28c7108a99fb9bf524b501bbbeb9b261ceecde644d5a07/watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da", size = 403389, upload-time = "2025-10-14T15:05:15.777Z" },
{ url = "https://files.pythonhosted.org/packages/15/49/08732f90ce0fbbc13913f9f215c689cfc9ced345fb1bcd8829a50007cc8d/watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051", size = 389964, upload-time = "2025-10-14T15:05:16.85Z" },
{ url = "https://files.pythonhosted.org/packages/27/0d/7c315d4bd5f2538910491a0393c56bf70d333d51bc5b34bee8e68e8cea19/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e", size = 448114, upload-time = "2025-10-14T15:05:17.876Z" },
{ url = "https://files.pythonhosted.org/packages/c3/24/9e096de47a4d11bc4df41e9d1e61776393eac4cb6eb11b3e23315b78b2cc/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70", size = 460264, upload-time = "2025-10-14T15:05:18.962Z" },
{ url = "https://files.pythonhosted.org/packages/cc/0f/e8dea6375f1d3ba5fcb0b3583e2b493e77379834c74fd5a22d66d85d6540/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261", size = 487877, upload-time = "2025-10-14T15:05:20.094Z" },
{ url = "https://files.pythonhosted.org/packages/ac/5b/df24cfc6424a12deb41503b64d42fbea6b8cb357ec62ca84a5a3476f654a/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620", size = 595176, upload-time = "2025-10-14T15:05:21.134Z" },
{ url = "https://files.pythonhosted.org/packages/8f/b5/853b6757f7347de4e9b37e8cc3289283fb983cba1ab4d2d7144694871d9c/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04", size = 473577, upload-time = "2025-10-14T15:05:22.306Z" },
{ url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" },
{ url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" },
{ url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" },
{ url = "https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5", size = 404315, upload-time = "2025-10-14T15:05:26.501Z" },
{ url = "https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd", size = 390869, upload-time = "2025-10-14T15:05:27.649Z" },
{ url = "https://files.pythonhosted.org/packages/b2/7e/5643bfff5acb6539b18483128fdc0ef2cccc94a5b8fbda130c823e8ed636/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb", size = 449919, upload-time = "2025-10-14T15:05:28.701Z" },
{ url = "https://files.pythonhosted.org/packages/51/2e/c410993ba5025a9f9357c376f48976ef0e1b1aefb73b97a5ae01a5972755/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5", size = 460845, upload-time = "2025-10-14T15:05:30.064Z" },
{ url = "https://files.pythonhosted.org/packages/8e/a4/2df3b404469122e8680f0fcd06079317e48db58a2da2950fb45020947734/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3", size = 489027, upload-time = "2025-10-14T15:05:31.064Z" },
{ url = "https://files.pythonhosted.org/packages/ea/84/4587ba5b1f267167ee715b7f66e6382cca6938e0a4b870adad93e44747e6/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33", size = 595615, upload-time = "2025-10-14T15:05:32.074Z" },
{ url = "https://files.pythonhosted.org/packages/6a/0f/c6988c91d06e93cd0bb3d4a808bcf32375ca1904609835c3031799e3ecae/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510", size = 474836, upload-time = "2025-10-14T15:05:33.209Z" },
{ url = "https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05", size = 455099, upload-time = "2025-10-14T15:05:34.189Z" },
{ url = "https://files.pythonhosted.org/packages/98/e0/8c9bdba88af756a2fce230dd365fab2baf927ba42cd47521ee7498fd5211/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6", size = 630626, upload-time = "2025-10-14T15:05:35.216Z" },
{ url = "https://files.pythonhosted.org/packages/2a/84/a95db05354bf2d19e438520d92a8ca475e578c647f78f53197f5a2f17aaf/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81", size = 622519, upload-time = "2025-10-14T15:05:36.259Z" },
{ url = "https://files.pythonhosted.org/packages/47/a8/e3af2184707c29f0f14b1963c0aace6529f9d1b8582d5b99f31bbf42f59e/watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21", size = 403820, upload-time = "2025-10-14T15:05:40.932Z" },
{ url = "https://files.pythonhosted.org/packages/c0/ec/e47e307c2f4bd75f9f9e8afbe3876679b18e1bcec449beca132a1c5ffb2d/watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5", size = 390510, upload-time = "2025-10-14T15:05:41.945Z" },
{ url = "https://files.pythonhosted.org/packages/d5/a0/ad235642118090f66e7b2f18fd5c42082418404a79205cdfca50b6309c13/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7", size = 448408, upload-time = "2025-10-14T15:05:43.385Z" },
{ url = "https://files.pythonhosted.org/packages/df/85/97fa10fd5ff3332ae17e7e40e20784e419e28521549780869f1413742e9d/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101", size = 458968, upload-time = "2025-10-14T15:05:44.404Z" },
{ url = "https://files.pythonhosted.org/packages/47/c2/9059c2e8966ea5ce678166617a7f75ecba6164375f3b288e50a40dc6d489/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44", size = 488096, upload-time = "2025-10-14T15:05:45.398Z" },
{ url = "https://files.pythonhosted.org/packages/94/44/d90a9ec8ac309bc26db808a13e7bfc0e4e78b6fc051078a554e132e80160/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c", size = 596040, upload-time = "2025-10-14T15:05:46.502Z" },
{ url = "https://files.pythonhosted.org/packages/95/68/4e3479b20ca305cfc561db3ed207a8a1c745ee32bf24f2026a129d0ddb6e/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc", size = 473847, upload-time = "2025-10-14T15:05:47.484Z" },
{ url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" },
{ url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" },
{ url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" },
{ url = "https://files.pythonhosted.org/packages/ba/4c/a888c91e2e326872fa4705095d64acd8aa2fb9c1f7b9bd0588f33850516c/watchfiles-1.1.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:17ef139237dfced9da49fb7f2232c86ca9421f666d78c264c7ffca6601d154c3", size = 409611, upload-time = "2025-10-14T15:06:05.809Z" },
{ url = "https://files.pythonhosted.org/packages/1e/c7/5420d1943c8e3ce1a21c0a9330bcf7edafb6aa65d26b21dbb3267c9e8112/watchfiles-1.1.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:672b8adf25b1a0d35c96b5888b7b18699d27d4194bac8beeae75be4b7a3fc9b2", size = 396889, upload-time = "2025-10-14T15:06:07.035Z" },
{ url = "https://files.pythonhosted.org/packages/0c/e5/0072cef3804ce8d3aaddbfe7788aadff6b3d3f98a286fdbee9fd74ca59a7/watchfiles-1.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77a13aea58bc2b90173bc69f2a90de8e282648939a00a602e1dc4ee23e26b66d", size = 451616, upload-time = "2025-10-14T15:06:08.072Z" },
{ url = "https://files.pythonhosted.org/packages/83/4e/b87b71cbdfad81ad7e83358b3e447fedd281b880a03d64a760fe0a11fc2e/watchfiles-1.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b495de0bb386df6a12b18335a0285dda90260f51bdb505503c02bcd1ce27a8b", size = 458413, upload-time = "2025-10-14T15:06:09.209Z" },
{ url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250, upload-time = "2025-10-14T15:06:10.264Z" },
{ url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117, upload-time = "2025-10-14T15:06:11.28Z" },
{ url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493, upload-time = "2025-10-14T15:06:12.321Z" },
{ url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546, upload-time = "2025-10-14T15:06:13.372Z" },
]
[[package]]
name = "wcwidth"
version = "0.2.14"