Compare commits

..

1 Commits

Author SHA1 Message Date
shamoon
902ad8303b Feature: mcp server 2026-01-27 00:20:45 -08:00
20 changed files with 1237 additions and 1206 deletions

View File

@@ -60,6 +60,20 @@ The REST api provides five different forms of authentication.
[here](advanced_usage.md#openid-connect-and-social-authentication) for more
information on social accounts.
## Model Context Protocol (MCP)
Paperless-ngx exposes an MCP endpoint powered by `django-mcp-server` so MCP
clients can query data collections, run full-text document search, and invoke
DRF-backed CRUD tools.
- Endpoint: `/mcp/`
- Authentication: identical to the REST API (Basic, Session, Token, or Remote
User depending on your configuration).
The MCP server uses existing DRF viewsets and permissions. It also exposes a
`query_data_collections` tool for structured querying across published models
and a `search_documents` tool for full-text search.
## Searching for documents
Full text searching is available on the `/api/documents/` endpoint. Two

View File

@@ -36,6 +36,7 @@ dependencies = [
"django-extensions~=4.1",
"django-filter~=25.1",
"django-guardian~=3.2.0",
"django-mcp-server~=0.5.7",
"django-multiselectfield~=1.0.1",
"django-soft-delete~=1.0.18",
"django-treenode>=0.23.2",

File diff suppressed because it is too large Load Diff

View File

@@ -103,6 +103,22 @@
</div>
<div class="row mb-3">
<div class="col-md-3 col-form-label pt-0">
<span i18n>Items per page</span>
</div>
<div class="col">
<select class="form-select" formControlName="documentListItemPerPage">
<option [ngValue]="10">10</option>
<option [ngValue]="25">25</option>
<option [ngValue]="50">50</option>
<option [ngValue]="100">100</option>
</select>
</div>
</div>
<div class="row">
<div class="col-md-3 col-form-label pt-0">
<span i18n>Sidebar</span>
</div>
@@ -137,28 +153,8 @@
</button>
</div>
</div>
</div>
<div class="col-xl-6 ps-xl-5">
<h5 class="mt-3 mt-md-0" i18n>Global search</h5>
<div class="row">
<div class="col">
<pngx-input-check i18n-title title="Do not include advanced search results" formControlName="searchDbOnly"></pngx-input-check>
</div>
</div>
<div class="row mb-3">
<div class="col-md-3 col-form-label pt-0">
<span i18n>Full search links to</span>
</div>
<div class="col mb-3">
<select class="form-select" formControlName="searchLink">
<option [ngValue]="GlobalSearchType.TITLE_CONTENT" i18n>Title and content search</option>
<option [ngValue]="GlobalSearchType.ADVANCED" i18n>Advanced search</option>
</select>
</div>
</div>
<h5 class="mt-3 mt-md-0" id="update-checking" i18n>Update checking</h5>
<h5 class="mt-3" id="update-checking" i18n>Update checking</h5>
<div class="row mb-3">
<div class="col d-flex flex-row align-items-start">
<pngx-input-check i18n-title title="Enable update checking" formControlName="updateCheckingEnabled"></pngx-input-check>
@@ -183,33 +179,11 @@
<pngx-input-check i18n-title title="Show document counts in sidebar saved views" formControlName="sidebarViewsShowCount"></pngx-input-check>
</div>
</div>
</div>
</div>
<div class="col-xl-6 ps-xl-5">
<h5 class="mt-3 mt-md-0" i18n>Document editing</h5>
</ng-template>
</li>
<li [ngbNavItem]="SettingsNavIDs.Documents">
<a ngbNavLink i18n>Documents</a>
<ng-template ngbNavContent>
<div class="row">
<div class="col-xl-6 pe-xl-5">
<h5 i18n>Documents</h5>
<div class="row mb-3">
<div class="col-md-3 col-form-label pt-0">
<span i18n>Items per page</span>
</div>
<div class="col">
<select class="form-select" formControlName="documentListItemPerPage">
<option [ngValue]="10">10</option>
<option [ngValue]="25">25</option>
<option [ngValue]="50">50</option>
<option [ngValue]="100">100</option>
</select>
</div>
</div>
<h5 class="mt-3" i18n>Document editing</h5>
<div class="row">
<div class="col">
<pngx-input-check i18n-title title="Use PDF viewer provided by the browser" i18n-hint hint="This is usually faster for displaying large PDF documents, but it might not work on some browsers." formControlName="useNativePdfViewer"></pngx-input-check>
@@ -235,31 +209,31 @@
</div>
</div>
<div class="row">
<div class="row mb-3">
<div class="col">
<pngx-input-check i18n-title title="Show document thumbnail during loading" formControlName="documentEditingOverlayThumbnail"></pngx-input-check>
</div>
</div>
<div class="row mb-3">
<h5 class="mt-3" i18n>Global search</h5>
<div class="row">
<div class="col">
<p class="mb-2" i18n>Built-in fields to show:</p>
@for (option of documentDetailFieldOptions; track option.id) {
<div class="form-check ms-3">
<input class="form-check-input" type="checkbox"
[id]="'documentDetailField-' + option.id"
[checked]="isDocumentDetailFieldShown(option.id)"
(change)="toggleDocumentDetailField(option.id, $event.target.checked)" />
<label class="form-check-label" [for]="'documentDetailField-' + option.id">
{{ option.label }}
</label>
</div>
}
<p class="small text-muted mt-1" i18n>Uncheck fields to hide them on the document details page.</p>
<pngx-input-check i18n-title title="Do not include advanced search results" formControlName="searchDbOnly"></pngx-input-check>
</div>
</div>
</div>
<div class="col-xl-6 ps-xl-5">
<div class="row mb-3">
<div class="col-md-3 col-form-label pt-0">
<span i18n>Full search links to</span>
</div>
<div class="col mb-3">
<select class="form-select" formControlName="searchLink">
<option [ngValue]="GlobalSearchType.TITLE_CONTENT" i18n>Title and content search</option>
<option [ngValue]="GlobalSearchType.ADVANCED" i18n>Advanced search</option>
</select>
</div>
</div>
<h5 class="mt-3" i18n>Bulk editing</h5>
<div class="row mb-3">
<div class="col">
@@ -274,8 +248,10 @@
<pngx-input-check i18n-title title="Enable notes" formControlName="notesEnabled"></pngx-input-check>
</div>
</div>
</div>
</div>
</ng-template>
</li>

View File

@@ -201,9 +201,9 @@ describe('SettingsComponent', () => {
const navigateSpy = jest.spyOn(router, 'navigate')
const tabButtons = fixture.debugElement.queryAll(By.directive(NgbNavLink))
tabButtons[1].nativeElement.dispatchEvent(new MouseEvent('click'))
expect(navigateSpy).toHaveBeenCalledWith(['settings', 'documents'])
tabButtons[2].nativeElement.dispatchEvent(new MouseEvent('click'))
expect(navigateSpy).toHaveBeenCalledWith(['settings', 'permissions'])
tabButtons[2].nativeElement.dispatchEvent(new MouseEvent('click'))
expect(navigateSpy).toHaveBeenCalledWith(['settings', 'notifications'])
const initSpy = jest.spyOn(component, 'initialize')
component.isDirty = true // mock dirty
@@ -213,8 +213,8 @@ describe('SettingsComponent', () => {
expect(initSpy).not.toHaveBeenCalled()
navigateSpy.mockResolvedValueOnce(true) // nav accepted even though dirty
tabButtons[2].nativeElement.dispatchEvent(new MouseEvent('click'))
expect(navigateSpy).toHaveBeenCalledWith(['settings', 'permissions'])
tabButtons[1].nativeElement.dispatchEvent(new MouseEvent('click'))
expect(navigateSpy).toHaveBeenCalledWith(['settings', 'notifications'])
expect(initSpy).toHaveBeenCalled()
})
@@ -226,7 +226,7 @@ describe('SettingsComponent', () => {
activatedRoute.snapshot.fragment = '#notifications'
const scrollSpy = jest.spyOn(viewportScroller, 'scrollToAnchor')
component.ngOnInit()
expect(component.activeNavID).toEqual(4) // Notifications
expect(component.activeNavID).toEqual(3) // Notifications
component.ngAfterViewInit()
expect(scrollSpy).toHaveBeenCalledWith('#notifications')
})
@@ -251,7 +251,7 @@ describe('SettingsComponent', () => {
expect(toastErrorSpy).toHaveBeenCalled()
expect(storeSpy).toHaveBeenCalled()
expect(appearanceSettingsSpy).not.toHaveBeenCalled()
expect(setSpy).toHaveBeenCalledTimes(31)
expect(setSpy).toHaveBeenCalledTimes(30)
// succeed
storeSpy.mockReturnValueOnce(of(true))
@@ -366,22 +366,4 @@ describe('SettingsComponent', () => {
settingsService.settingsSaved.emit(true)
expect(maybeRefreshSpy).toHaveBeenCalled()
})
it('should support toggling document detail fields', () => {
completeSetup()
const field = 'storage_path'
expect(
component.settingsForm.get('documentDetailsHiddenFields').value.length
).toEqual(0)
component.toggleDocumentDetailField(field, false)
expect(
component.settingsForm.get('documentDetailsHiddenFields').value.length
).toEqual(1)
expect(component.isDocumentDetailFieldShown(field)).toBeFalsy()
component.toggleDocumentDetailField(field, true)
expect(
component.settingsForm.get('documentDetailsHiddenFields').value.length
).toEqual(0)
expect(component.isDocumentDetailFieldShown(field)).toBeTruthy()
})
})

View File

@@ -70,9 +70,9 @@ import { ComponentWithPermissions } from '../../with-permissions/with-permission
enum SettingsNavIDs {
General = 1,
Documents = 2,
Permissions = 3,
Notifications = 4,
Permissions = 2,
Notifications = 3,
SavedViews = 4,
}
const systemLanguage = { code: '', name: $localize`Use system language` }
@@ -81,25 +81,6 @@ const systemDateFormat = {
name: $localize`Use date format of display language`,
}
export enum DocumentDetailFieldID {
ArchiveSerialNumber = 'archive_serial_number',
Correspondent = 'correspondent',
DocumentType = 'document_type',
StoragePath = 'storage_path',
Tags = 'tags',
}
const documentDetailFieldOptions = [
{
id: DocumentDetailFieldID.ArchiveSerialNumber,
label: $localize`Archive serial number`,
},
{ id: DocumentDetailFieldID.Correspondent, label: $localize`Correspondent` },
{ id: DocumentDetailFieldID.DocumentType, label: $localize`Document type` },
{ id: DocumentDetailFieldID.StoragePath, label: $localize`Storage path` },
{ id: DocumentDetailFieldID.Tags, label: $localize`Tags` },
]
@Component({
selector: 'pngx-settings',
templateUrl: './settings.component.html',
@@ -165,7 +146,6 @@ export class SettingsComponent
pdfViewerDefaultZoom: new FormControl(null),
documentEditingRemoveInboxTags: new FormControl(null),
documentEditingOverlayThumbnail: new FormControl(null),
documentDetailsHiddenFields: new FormControl([]),
searchDbOnly: new FormControl(null),
searchLink: new FormControl(null),
@@ -196,8 +176,6 @@ export class SettingsComponent
public readonly ZoomSetting = ZoomSetting
public readonly documentDetailFieldOptions = documentDetailFieldOptions
get systemStatusHasErrors(): boolean {
return (
this.systemStatus.database.status === SystemStatusItemStatus.ERROR ||
@@ -358,9 +336,6 @@ export class SettingsComponent
documentEditingOverlayThumbnail: this.settings.get(
SETTINGS_KEYS.DOCUMENT_EDITING_OVERLAY_THUMBNAIL
),
documentDetailsHiddenFields: this.settings.get(
SETTINGS_KEYS.DOCUMENT_DETAILS_HIDDEN_FIELDS
),
searchDbOnly: this.settings.get(SETTINGS_KEYS.SEARCH_DB_ONLY),
searchLink: this.settings.get(SETTINGS_KEYS.SEARCH_FULL_TYPE),
}
@@ -551,10 +526,6 @@ export class SettingsComponent
SETTINGS_KEYS.DOCUMENT_EDITING_OVERLAY_THUMBNAIL,
this.settingsForm.value.documentEditingOverlayThumbnail
)
this.settings.set(
SETTINGS_KEYS.DOCUMENT_DETAILS_HIDDEN_FIELDS,
this.settingsForm.value.documentDetailsHiddenFields
)
this.settings.set(
SETTINGS_KEYS.SEARCH_DB_ONLY,
this.settingsForm.value.searchDbOnly
@@ -616,26 +587,6 @@ export class SettingsComponent
this.settingsForm.get('themeColor').patchValue('')
}
isDocumentDetailFieldShown(fieldId: string): boolean {
const hiddenFields =
this.settingsForm.value.documentDetailsHiddenFields || []
return !hiddenFields.includes(fieldId)
}
toggleDocumentDetailField(fieldId: string, checked: boolean) {
const hiddenFields = new Set(
this.settingsForm.value.documentDetailsHiddenFields || []
)
if (checked) {
hiddenFields.delete(fieldId)
} else {
hiddenFields.add(fieldId)
}
this.settingsForm
.get('documentDetailsHiddenFields')
.setValue(Array.from(hiddenFields))
}
showSystemStatus() {
const modal: NgbModalRef = this.modalService.open(
SystemStatusDialogComponent,

View File

@@ -146,26 +146,16 @@
<ng-template ngbNavContent>
<div>
<pngx-input-text #inputTitle i18n-title title="Title" formControlName="title" [horizontal]="true" [suggestion]="suggestions?.title" (keyup)="titleKeyUp($event)" [error]="error?.title"></pngx-input-text>
@if (!isFieldHidden(DocumentDetailFieldID.ArchiveSerialNumber)) {
<pngx-input-number i18n-title title="Archive serial number" [error]="error?.archive_serial_number" [horizontal]="true" formControlName='archive_serial_number'></pngx-input-number>
}
<pngx-input-number i18n-title title="Archive serial number" [error]="error?.archive_serial_number" [horizontal]="true" formControlName='archive_serial_number'></pngx-input-number>
<pngx-input-date i18n-title title="Date created" formControlName="created" [suggestions]="suggestions?.dates" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event)"
[error]="error?.created"></pngx-input-date>
@if (!isFieldHidden(DocumentDetailFieldID.Correspondent)) {
<pngx-input-select [items]="correspondents" i18n-title title="Correspondent" formControlName="correspondent" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.Correspondent)"
(createNew)="createCorrespondent($event)" [hideAddButton]="createDisabled(DataType.Correspondent)" [suggestions]="suggestions?.correspondents" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Correspondent }"></pngx-input-select>
}
@if (!isFieldHidden(DocumentDetailFieldID.DocumentType)) {
<pngx-input-select [items]="documentTypes" i18n-title title="Document type" formControlName="document_type" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.DocumentType)"
(createNew)="createDocumentType($event)" [hideAddButton]="createDisabled(DataType.DocumentType)" [suggestions]="suggestions?.document_types" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.DocumentType }"></pngx-input-select>
}
@if (!isFieldHidden(DocumentDetailFieldID.StoragePath)) {
<pngx-input-select [items]="storagePaths" i18n-title title="Storage path" formControlName="storage_path" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.StoragePath)"
(createNew)="createStoragePath($event)" [hideAddButton]="createDisabled(DataType.StoragePath)" [suggestions]="suggestions?.storage_paths" i18n-placeholder placeholder="Default" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.StoragePath }"></pngx-input-select>
}
@if (!isFieldHidden(DocumentDetailFieldID.Tags)) {
<pngx-input-tags #tagsInput formControlName="tags" [suggestions]="suggestions?.tags" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.Tag)" [hideAddButton]="createDisabled(DataType.Tag)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Tag }"></pngx-input-tags>
}
<pngx-input-select [items]="correspondents" i18n-title title="Correspondent" formControlName="correspondent" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.Correspondent)"
(createNew)="createCorrespondent($event)" [hideAddButton]="createDisabled(DataType.Correspondent)" [suggestions]="suggestions?.correspondents" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Correspondent }"></pngx-input-select>
<pngx-input-select [items]="documentTypes" i18n-title title="Document type" formControlName="document_type" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.DocumentType)"
(createNew)="createDocumentType($event)" [hideAddButton]="createDisabled(DataType.DocumentType)" [suggestions]="suggestions?.document_types" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.DocumentType }"></pngx-input-select>
<pngx-input-select [items]="storagePaths" i18n-title title="Storage path" formControlName="storage_path" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.StoragePath)"
(createNew)="createStoragePath($event)" [hideAddButton]="createDisabled(DataType.StoragePath)" [suggestions]="suggestions?.storage_paths" i18n-placeholder placeholder="Default" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.StoragePath }"></pngx-input-select>
<pngx-input-tags #tagsInput formControlName="tags" [suggestions]="suggestions?.tags" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.Tag)" [hideAddButton]="createDisabled(DataType.Tag)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Tag }"></pngx-input-tags>
@for (fieldInstance of document?.custom_fields; track fieldInstance.field; let i = $index) {
<div [formGroup]="customFieldFormFields.controls[i]">
@switch (getCustomFieldFromInstance(fieldInstance)?.data_type) {

View File

@@ -48,7 +48,6 @@ import {
} from 'src/app/data/filter-rule-type'
import { StoragePath } from 'src/app/data/storage-path'
import { Tag } from 'src/app/data/tag'
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
import { PermissionsGuard } from 'src/app/guards/permissions.guard'
import { CustomDatePipe } from 'src/app/pipes/custom-date.pipe'
import { DocumentTitlePipe } from 'src/app/pipes/document-title.pipe'
@@ -1016,7 +1015,7 @@ describe('DocumentDetailComponent', () => {
it('should display built-in pdf viewer if not disabled', () => {
initNormally()
component.document.archived_file_name = 'file.pdf'
settingsService.set(SETTINGS_KEYS.USE_NATIVE_PDF_VIEWER, false)
jest.spyOn(settingsService, 'get').mockReturnValue(false)
expect(component.useNativePdfViewer).toBeFalsy()
fixture.detectChanges()
expect(fixture.debugElement.query(By.css('pdf-viewer'))).not.toBeNull()
@@ -1025,7 +1024,7 @@ describe('DocumentDetailComponent', () => {
it('should display native pdf viewer if enabled', () => {
initNormally()
component.document.archived_file_name = 'file.pdf'
settingsService.set(SETTINGS_KEYS.USE_NATIVE_PDF_VIEWER, true)
jest.spyOn(settingsService, 'get').mockReturnValue(true)
expect(component.useNativePdfViewer).toBeTruthy()
fixture.detectChanges()
expect(fixture.debugElement.query(By.css('object'))).not.toBeNull()

View File

@@ -84,7 +84,6 @@ import { ToastService } from 'src/app/services/toast.service'
import { getFilenameFromContentDisposition } from 'src/app/utils/http'
import { ISODateAdapter } from 'src/app/utils/ngb-iso-date-adapter'
import * as UTIF from 'utif'
import { DocumentDetailFieldID } from '../admin/settings/settings.component'
import { ConfirmDialogComponent } from '../common/confirm-dialog/confirm-dialog.component'
import { PasswordRemovalConfirmDialogComponent } from '../common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component'
import { CustomFieldsDropdownComponent } from '../common/custom-fields-dropdown/custom-fields-dropdown.component'
@@ -282,8 +281,6 @@ export class DocumentDetailComponent
public readonly DataType = DataType
public readonly DocumentDetailFieldID = DocumentDetailFieldID
@ViewChild('nav') nav: NgbNav
@ViewChild('pdfPreview') set pdfPreview(element) {
// this gets called when component added or removed from DOM
@@ -330,12 +327,6 @@ export class DocumentDetailComponent
return this.settings.get(SETTINGS_KEYS.DOCUMENT_EDITING_OVERLAY_THUMBNAIL)
}
isFieldHidden(fieldId: DocumentDetailFieldID): boolean {
return this.settings
.get(SETTINGS_KEYS.DOCUMENT_DETAILS_HIDDEN_FIELDS)
.includes(fieldId)
}
private getRenderType(mimeType: string): ContentRenderType {
if (!mimeType) return ContentRenderType.Unknown
if (mimeType === 'application/pdf') {

View File

@@ -70,8 +70,6 @@ export const SETTINGS_KEYS = {
'general-settings:document-editing:remove-inbox-tags',
DOCUMENT_EDITING_OVERLAY_THUMBNAIL:
'general-settings:document-editing:overlay-thumbnail',
DOCUMENT_DETAILS_HIDDEN_FIELDS:
'general-settings:document-details:hidden-fields',
SEARCH_DB_ONLY: 'general-settings:search:db-only',
SEARCH_FULL_TYPE: 'general-settings:search:more-link',
EMPTY_TRASH_DELAY: 'trash_delay',
@@ -257,11 +255,6 @@ export const SETTINGS: UiSetting[] = [
type: 'boolean',
default: true,
},
{
key: SETTINGS_KEYS.DOCUMENT_DETAILS_HIDDEN_FIELDS,
type: 'array',
default: [],
},
{
key: SETTINGS_KEYS.SEARCH_DB_ONLY,
type: 'boolean',

View File

@@ -1,598 +0,0 @@
import math
import uuid
from time import perf_counter
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from django.core.management.base import CommandError
from django.db import reset_queries
from django.db.models import Count
from django.db.models import Q
from django.db.models import Subquery
from guardian.shortcuts import assign_perm
from documents.models import CustomField
from documents.models import CustomFieldInstance
from documents.models import Document
from documents.models import Tag
from documents.permissions import get_objects_for_user_owner_aware
from documents.permissions import permitted_document_ids
class Command(BaseCommand):
# e.g. manage.py document_perf_benchmark --documents 500000 --chunk-size 5000 --tags 40 --tags-per-doc 3 --custom-fields 6 --custom-fields-per-doc 2
help = (
"Seed a synthetic dataset and benchmark permission-filtered document queries "
"for superusers vs non-superusers."
)
def add_arguments(self, parser):
parser.add_argument(
"--documents",
type=int,
default=10000,
help="Total documents to generate (default: 10,000)",
)
parser.add_argument(
"--owner-ratio",
type=float,
default=0.6,
help="Fraction owned by the benchmarked user (default: 0.6)",
)
parser.add_argument(
"--unowned-ratio",
type=float,
default=0.1,
help="Fraction of unowned documents (default: 0.1)",
)
parser.add_argument(
"--shared-ratio",
type=float,
default=0.25,
help=(
"Fraction of other-user documents that are shared via object perms "
"with the benchmarked user (default: 0.25)"
),
)
parser.add_argument(
"--chunk-size",
type=int,
default=2000,
help="Bulk create size for documents (default: 2000)",
)
parser.add_argument(
"--iterations",
type=int,
default=3,
help="Number of timing runs per query shape (default: 3)",
)
parser.add_argument(
"--prefix",
default="perf-benchmark",
help="Title prefix used to mark generated documents (default: perf-benchmark)",
)
parser.add_argument(
"--username",
default="perf_user",
help="Username of the non-superuser to benchmark (default: perf_user)",
)
parser.add_argument(
"--other-username",
default="perf_owner",
help="Username used for documents not owned by the benchmarked user (default: perf_owner)",
)
parser.add_argument(
"--super-username",
default="perf_admin",
help="Username of the superuser baseline (default: perf_admin)",
)
parser.add_argument(
"--tags",
type=int,
default=0,
help="Number of tags to create and assign (default: 0)",
)
parser.add_argument(
"--tags-per-doc",
type=int,
default=1,
help="How many tags to attach to each document (default: 1)",
)
parser.add_argument(
"--custom-fields",
type=int,
default=0,
help="Number of string custom fields to create (default: 0)",
)
parser.add_argument(
"--custom-fields-per-doc",
type=int,
default=1,
help="How many custom field instances per document (default: 1)",
)
parser.add_argument(
"--skip-tags",
action="store_true",
help="Skip tag document_count benchmarks (useful for large datasets on Postgres)",
)
parser.add_argument(
"--skip-custom-fields",
action="store_true",
help="Skip custom field document_count benchmarks",
)
parser.add_argument(
"--reuse-existing",
action="store_true",
help="Keep previously generated documents with the given prefix instead of recreating",
)
parser.add_argument(
"--cleanup",
action="store_true",
help="Delete previously generated documents with the given prefix and exit",
)
def handle(self, *args, **options):
# keep options for downstream checks
self.options = options
document_total = options["documents"]
owner_ratio = options["owner_ratio"]
unowned_ratio = options["unowned_ratio"]
shared_ratio = options["shared_ratio"]
chunk_size = options["chunk_size"]
iterations = options["iterations"]
prefix = options["prefix"]
tags = options["tags"]
tags_per_doc = options["tags_per_doc"]
custom_fields = options["custom_fields"]
custom_fields_per_doc = options["custom_fields_per_doc"]
self._validate_ratios(owner_ratio, unowned_ratio)
if tags_per_doc < 0 or custom_fields_per_doc < 0:
raise CommandError("Per-document counts must be non-negative")
target_user, other_user, superuser = self._ensure_users(options)
skip_seed = False
if options["cleanup"]:
removed = self._cleanup(prefix)
self.stdout.write(
self.style.SUCCESS(f"Removed {removed} generated documents"),
)
return
if not options["reuse_existing"]:
removed = self._cleanup(prefix)
if removed:
self.stdout.write(f"Removed existing generated documents: {removed}")
else:
existing = Document.objects.filter(title__startswith=prefix).count()
if existing:
skip_seed = True
self.stdout.write(
f"Reusing existing dataset with prefix '{prefix}': {existing} docs",
)
if skip_seed:
dataset_size = Document.objects.filter(title__startswith=prefix).count()
self.stdout.write(
self.style.SUCCESS(
f"Dataset ready (reused): {dataset_size} docs | prefix={prefix}",
),
)
else:
self.stdout.write(
f"Seeding {document_total} documents (owner_ratio={owner_ratio}, "
f"unowned_ratio={unowned_ratio}, shared_ratio={shared_ratio})",
)
created_counts = self._seed_documents(
total=document_total,
owner_ratio=owner_ratio,
unowned_ratio=unowned_ratio,
shared_ratio=shared_ratio,
chunk_size=chunk_size,
prefix=prefix,
target_user=target_user,
other_user=other_user,
)
created_tags = []
if tags:
created_tags = self._seed_tags(prefix=prefix, count=tags)
if tags_per_doc and created_tags:
self._assign_tags_to_documents(
prefix=prefix,
tags=created_tags,
tags_per_doc=tags_per_doc,
chunk_size=chunk_size,
)
created_custom_fields = []
if custom_fields:
created_custom_fields = self._seed_custom_fields(prefix, custom_fields)
if custom_fields_per_doc and created_custom_fields:
self._seed_custom_field_instances(
prefix=prefix,
custom_fields=created_custom_fields,
per_doc=custom_fields_per_doc,
chunk_size=chunk_size,
)
dataset_size = Document.objects.filter(title__startswith=prefix).count()
self.stdout.write(
self.style.SUCCESS(
f"Dataset ready: {dataset_size} docs | owned by target {created_counts['owned']} | "
f"owned by other {created_counts['other_owned']} | unowned {created_counts['unowned']} | "
f"shared-perms {created_counts['shared']} | tags {len(created_tags)} | "
f"custom fields {len(created_custom_fields)}",
),
)
self.stdout.write("\nRunning benchmarks...\n")
self._run_benchmarks(
iterations=iterations,
target_user=target_user,
superuser=superuser,
prefix=prefix,
)
def _validate_ratios(self, owner_ratio: float, unowned_ratio: float):
if owner_ratio < 0 or unowned_ratio < 0:
raise CommandError("Ratios must be non-negative")
if owner_ratio + unowned_ratio > 1:
raise CommandError("owner-ratio + unowned-ratio cannot exceed 1.0")
def _ensure_users(self, options):
User = get_user_model()
target_user, _ = User.objects.get_or_create(
username=options["username"],
defaults={"email": "perf_user@example.com"},
)
other_user, _ = User.objects.get_or_create(
username=options["other_username"],
defaults={"email": "perf_owner@example.com"},
)
superuser, _ = User.objects.get_or_create(
username=options["super_username"],
defaults={
"email": "perf_admin@example.com",
"is_staff": True,
"is_superuser": True,
},
)
return target_user, other_user, superuser
def _cleanup(self, prefix: str) -> int:
docs_qs = Document.global_objects.filter(title__startswith=prefix)
doc_count = docs_qs.count()
if doc_count:
docs_qs.hard_delete()
tag_count = Tag.objects.filter(name__startswith=prefix).count()
if tag_count:
Tag.objects.filter(name__startswith=prefix).delete()
cf_qs = CustomField.objects.filter(name__startswith=prefix)
cf_count = cf_qs.count()
if cf_count:
cf_qs.delete()
cfi_qs = CustomFieldInstance.global_objects.filter(
document__title__startswith=prefix,
)
cfi_count = cfi_qs.count()
if cfi_count:
cfi_qs.hard_delete()
return doc_count + tag_count + cf_count + cfi_count
def _seed_documents(
self,
*,
total: int,
owner_ratio: float,
unowned_ratio: float,
shared_ratio: float,
chunk_size: int,
prefix: str,
target_user,
other_user,
) -> dict[str, int]:
target_count = math.floor(total * owner_ratio)
unowned_count = math.floor(total * unowned_ratio)
other_count = total - target_count - unowned_count
documents: list[Document] = []
other_docs: list[Document] = []
for idx in range(total):
if idx < target_count:
owner = target_user
elif idx < target_count + other_count:
owner = other_user
else:
owner = None
doc = Document(
owner=owner,
title=f"{prefix}-{idx:07d}",
mime_type="application/pdf",
checksum=self._unique_checksum(idx),
page_count=1,
)
if owner is other_user:
other_docs.append(doc)
documents.append(doc)
if len(documents) >= chunk_size:
Document.objects.bulk_create(documents, batch_size=chunk_size)
documents.clear()
if documents:
Document.objects.bulk_create(documents, batch_size=chunk_size)
shared_target = math.floor(len(other_docs) * shared_ratio)
for doc in other_docs[:shared_target]:
assign_perm("documents.view_document", target_user, doc)
return {
"owned": target_count,
"other_owned": other_count,
"unowned": unowned_count,
"shared": shared_target,
}
def _seed_tags(self, *, prefix: str, count: int) -> list[Tag]:
tags = [
Tag(
name=f"{prefix}-tag-{idx:03d}",
)
for idx in range(count)
]
Tag.objects.bulk_create(tags, ignore_conflicts=True)
return list(Tag.objects.filter(name__startswith=prefix))
def _assign_tags_to_documents(
self,
*,
prefix: str,
tags: list[Tag],
tags_per_doc: int,
chunk_size: int,
):
if not tags or tags_per_doc < 1:
return
rels = []
through = Document.tags.through
tag_ids = [t.id for t in tags]
tag_count = len(tag_ids)
iterator = (
Document.objects.filter(title__startswith=prefix)
.values_list(
"id",
flat=True,
)
.iterator()
)
for idx, doc_id in enumerate(iterator):
start = idx % tag_count
chosen = set()
for offset in range(tags_per_doc):
tag_id = tag_ids[(start + offset) % tag_count]
if tag_id in chosen:
continue
chosen.add(tag_id)
rels.append(through(document_id=doc_id, tag_id=tag_id))
if len(rels) >= chunk_size:
through.objects.bulk_create(rels, ignore_conflicts=True)
rels.clear()
if rels:
through.objects.bulk_create(rels, ignore_conflicts=True)
def _seed_custom_fields(self, prefix: str, count: int) -> list[CustomField]:
fields = [
CustomField(
name=f"{prefix}-cf-{idx:03d}",
data_type=CustomField.FieldDataType.STRING,
)
for idx in range(count)
]
CustomField.objects.bulk_create(fields, ignore_conflicts=True)
return list(CustomField.objects.filter(name__startswith=prefix))
def _seed_custom_field_instances(
self,
*,
prefix: str,
custom_fields: list[CustomField],
per_doc: int,
chunk_size: int,
):
if not custom_fields or per_doc < 1:
return
instances = []
cf_ids = [cf.id for cf in custom_fields]
cf_count = len(cf_ids)
iterator = (
Document.objects.filter(title__startswith=prefix)
.values_list(
"id",
flat=True,
)
.iterator()
)
for idx, doc_id in enumerate(iterator):
start = idx % cf_count
for offset in range(per_doc):
cf_id = cf_ids[(start + offset) % cf_count]
instances.append(
CustomFieldInstance(
document_id=doc_id,
field_id=cf_id,
value_text=f"val-{doc_id}-{cf_id}",
),
)
if len(instances) >= chunk_size:
CustomFieldInstance.objects.bulk_create(
instances,
batch_size=chunk_size,
ignore_conflicts=True,
)
instances.clear()
if instances:
CustomFieldInstance.objects.bulk_create(
instances,
batch_size=chunk_size,
ignore_conflicts=True,
)
def _run_benchmarks(self, *, iterations: int, target_user, superuser, prefix: str):
self.stdout.write("-> doc counts")
self._time_query(
label="non-superuser: id__in(values_list flat=True)",
iterations=iterations,
fn=lambda: self._count_with_values_list(target_user),
)
self._time_query(
label="non-superuser: id__in(Subquery(values_list))",
iterations=iterations,
fn=lambda: self._count_with_subquery(target_user),
)
self._time_query(
label="superuser baseline",
iterations=iterations,
fn=lambda: Document.objects.count(),
)
if not self.options.get("skip_tags"):
self.stdout.write("-> tag counts")
self._time_tag_counts(
iterations=iterations,
prefix=prefix,
user=target_user,
)
if not self.options.get("skip_custom_fields"):
self.stdout.write("-> custom field counts")
self._time_custom_field_counts(
iterations=iterations,
prefix=prefix,
user=target_user,
superuser=superuser,
)
def _count_with_values_list(self, user) -> int:
qs = get_objects_for_user_owner_aware(
user,
"documents.view_document",
Document,
)
return Document.objects.filter(id__in=qs.values_list("id", flat=True)).count()
def _count_with_subquery(self, user) -> int:
qs = get_objects_for_user_owner_aware(
user,
"documents.view_document",
Document,
)
subquery = Subquery(qs.values_list("id"))
return Document.objects.filter(id__in=subquery).count()
def _document_filter(self, user, *, use_subquery: bool):
if user is None or getattr(user, "is_superuser", False):
return Q(documents__deleted_at__isnull=True)
qs = get_objects_for_user_owner_aware(
user,
"documents.view_document",
Document,
)
ids = (
Subquery(qs.values_list("id"))
if use_subquery
else qs.values_list("id", flat=True)
)
return Q(documents__deleted_at__isnull=True, documents__id__in=ids)
def _tag_queryset(self, *, prefix: str, filter_q: Q):
return Tag.objects.filter(name__startswith=prefix).annotate(
document_count=Count("documents", filter=filter_q),
)
def _time_tag_counts(self, *, iterations: int, prefix: str, user):
if not Tag.objects.filter(name__startswith=prefix).exists():
return
self._time_query(
label="tag document_count (grouped)",
iterations=iterations,
fn=lambda: list(
Tag.documents.through.objects.filter(
document_id__in=Subquery(permitted_document_ids(user)),
)
.values("tag_id")
.annotate(c=Count("document_id"))
.values_list("tag_id", "c"),
),
)
def _time_custom_field_counts(
self,
*,
iterations: int,
prefix: str,
user,
superuser,
):
if not CustomField.objects.filter(name__startswith=prefix).exists():
return
permitted = Subquery(permitted_document_ids(user))
super_permitted = CustomFieldInstance.objects.filter(
document__deleted_at__isnull=True,
).values_list("document_id")
def _run(ids_subquery):
return list(
CustomFieldInstance.objects.filter(
document_id__in=ids_subquery,
field__name__startswith=prefix,
)
.values("field_id")
.annotate(c=Count("document_id"))
.values_list("field_id", "c"),
)
self._time_query(
label="custom fields document_count (grouped permitted)",
iterations=iterations,
fn=lambda: _run(permitted),
)
self._time_query(
label="custom fields document_count superuser baseline",
iterations=iterations,
fn=lambda: _run(super_permitted),
)
def _time_query(self, *, label: str, iterations: int, fn):
durations = []
for _ in range(iterations):
reset_queries()
start = perf_counter()
fn()
durations.append(perf_counter() - start)
avg = sum(durations) / len(durations)
self.stdout.write(
f"{label}: min={min(durations):.4f}s avg={avg:.4f}s max={max(durations):.4f}s",
)
def _unique_checksum(self, idx: int) -> str:
return f"{uuid.uuid4().hex}{idx:08d}"[:32]

481
src/documents/mcp.py Normal file
View File

@@ -0,0 +1,481 @@
from __future__ import annotations
from django.db.models import Q
from django.http import QueryDict
from mcp_server import MCPToolset
from mcp_server import ModelQueryToolset
from mcp_server import drf_publish_create_mcp_tool
from mcp_server import drf_publish_destroy_mcp_tool
from mcp_server import drf_publish_list_mcp_tool
from mcp_server import drf_publish_update_mcp_tool
from rest_framework.response import Response
from documents.models import Correspondent
from documents.models import CustomField
from documents.models import Document
from documents.models import DocumentType
from documents.models import Note
from documents.models import SavedView
from documents.models import ShareLink
from documents.models import StoragePath
from documents.models import Tag
from documents.models import Workflow
from documents.models import WorkflowAction
from documents.models import WorkflowTrigger
from documents.permissions import get_objects_for_user_owner_aware
from documents.views import CorrespondentViewSet
from documents.views import CustomFieldViewSet
from documents.views import DocumentTypeViewSet
from documents.views import SavedViewViewSet
from documents.views import ShareLinkViewSet
from documents.views import StoragePathViewSet
from documents.views import TagViewSet
from documents.views import TasksViewSet
from documents.views import UnifiedSearchViewSet
from documents.views import WorkflowActionViewSet
from documents.views import WorkflowTriggerViewSet
from documents.views import WorkflowViewSet
VIEWSET_ACTIONS = {
"create": {"post": "create"},
"list": {"get": "list"},
"update": {"put": "update"},
"destroy": {"delete": "destroy"},
}
BODY_SCHEMA = {"type": "object", "additionalProperties": True}
VIEWSET_INSTRUCTIONS = {
CorrespondentViewSet: "Manage correspondents.",
TagViewSet: "Manage tags.",
UnifiedSearchViewSet: "Search and manage documents.",
DocumentTypeViewSet: "Manage document types.",
StoragePathViewSet: "Manage storage paths.",
SavedViewViewSet: "Manage saved views.",
ShareLinkViewSet: "Manage share links.",
WorkflowTriggerViewSet: "Manage workflow triggers.",
WorkflowActionViewSet: "Manage workflow actions.",
WorkflowViewSet: "Manage workflows.",
CustomFieldViewSet: "Manage custom fields.",
TasksViewSet: "List background tasks.",
}
class OwnerAwareQueryToolsetMixin:
permission: str
def get_queryset(self):
user = getattr(self.request, "user", None)
if not user or not user.is_authenticated:
return self.model.objects.none()
if user.is_superuser:
return self.model._default_manager.all()
return get_objects_for_user_owner_aware(user, self.permission, self.model)
class DocumentQueryToolset(ModelQueryToolset):
model = Document
search_fields = ["title", "content"]
def get_queryset(self):
user = getattr(self.request, "user", None)
if not user or not user.is_authenticated:
return Document.objects.none()
if user.is_superuser:
return Document.objects.all()
return get_objects_for_user_owner_aware(
user,
"documents.view_document",
Document,
)
class CorrespondentQueryToolset(OwnerAwareQueryToolsetMixin, ModelQueryToolset):
model = Correspondent
permission = "documents.view_correspondent"
class TagQueryToolset(OwnerAwareQueryToolsetMixin, ModelQueryToolset):
model = Tag
permission = "documents.view_tag"
class DocumentTypeQueryToolset(OwnerAwareQueryToolsetMixin, ModelQueryToolset):
model = DocumentType
permission = "documents.view_documenttype"
class StoragePathQueryToolset(OwnerAwareQueryToolsetMixin, ModelQueryToolset):
model = StoragePath
permission = "documents.view_storagepath"
class SavedViewQueryToolset(OwnerAwareQueryToolsetMixin, ModelQueryToolset):
model = SavedView
permission = "documents.view_savedview"
class ShareLinkQueryToolset(OwnerAwareQueryToolsetMixin, ModelQueryToolset):
model = ShareLink
permission = "documents.view_sharelink"
class WorkflowTriggerQueryToolset(OwnerAwareQueryToolsetMixin, ModelQueryToolset):
model = WorkflowTrigger
permission = "documents.view_workflowtrigger"
class WorkflowActionQueryToolset(OwnerAwareQueryToolsetMixin, ModelQueryToolset):
model = WorkflowAction
permission = "documents.view_workflowaction"
class WorkflowQueryToolset(OwnerAwareQueryToolsetMixin, ModelQueryToolset):
model = Workflow
permission = "documents.view_workflow"
class NoteQueryToolset(ModelQueryToolset):
model = Note
def get_queryset(self):
user = getattr(self.request, "user", None)
if not user or not user.is_authenticated:
return Note.objects.none()
if user.is_superuser:
return Note.objects.all()
return Note.objects.filter(
document__in=get_objects_for_user_owner_aware(
user,
"documents.view_document",
Document,
),
)
class CustomFieldQueryToolset(ModelQueryToolset):
model = CustomField
def get_queryset(self):
user = getattr(self.request, "user", None)
base = CustomField.objects.all()
if not user or not user.is_authenticated:
return base.none()
if user.is_superuser:
return base
return base.filter(
Q(
fields__document__id__in=get_objects_for_user_owner_aware(
user,
"documents.view_document",
Document,
),
)
| Q(fields__document__isnull=True),
).distinct()
class DocumentSearchTools(MCPToolset):
def search_documents(
self,
query: str | None = None,
more_like_id: int | None = None,
fields: list[str] | None = None,
page: int | None = None,
page_size: int | None = None,
*,
full_perms: bool | None = None,
) -> dict:
"""Search documents using the full-text index."""
if not query and not more_like_id:
raise ValueError("Provide either query or more_like_id.")
request = self.request
if request is None:
raise ValueError("Request context is required.")
viewset = UnifiedSearchViewSet()
viewset.request = request
viewset.args = ()
viewset.kwargs = {}
viewset.action = "list"
viewset.format_kwarg = None
viewset.check_permissions(request)
query_params = QueryDict(mutable=True)
if query:
query_params["query"] = query
if more_like_id:
query_params["more_like_id"] = str(more_like_id)
if full_perms is not None:
query_params["full_perms"] = str(full_perms).lower()
if page:
query_params["page"] = str(page)
if page_size:
query_params["page_size"] = str(page_size)
if fields:
query_params.setlist("fields", fields)
request._request.GET = query_params
response = viewset.list(request)
if isinstance(response, Response):
return response.data
if hasattr(response, "data"):
return response.data
return {
"detail": getattr(response, "content", b"").decode() or "Search failed.",
}
drf_publish_create_mcp_tool(
CorrespondentViewSet,
actions=VIEWSET_ACTIONS["create"],
instructions=VIEWSET_INSTRUCTIONS[CorrespondentViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_list_mcp_tool(
CorrespondentViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[CorrespondentViewSet],
)
drf_publish_update_mcp_tool(
CorrespondentViewSet,
actions=VIEWSET_ACTIONS["update"],
instructions=VIEWSET_INSTRUCTIONS[CorrespondentViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_destroy_mcp_tool(
CorrespondentViewSet,
actions=VIEWSET_ACTIONS["destroy"],
instructions=VIEWSET_INSTRUCTIONS[CorrespondentViewSet],
)
drf_publish_create_mcp_tool(
TagViewSet,
actions=VIEWSET_ACTIONS["create"],
instructions=VIEWSET_INSTRUCTIONS[TagViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_list_mcp_tool(
TagViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[TagViewSet],
)
drf_publish_update_mcp_tool(
TagViewSet,
actions=VIEWSET_ACTIONS["update"],
instructions=VIEWSET_INSTRUCTIONS[TagViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_destroy_mcp_tool(
TagViewSet,
actions=VIEWSET_ACTIONS["destroy"],
instructions=VIEWSET_INSTRUCTIONS[TagViewSet],
)
drf_publish_list_mcp_tool(
UnifiedSearchViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[UnifiedSearchViewSet],
)
drf_publish_update_mcp_tool(
UnifiedSearchViewSet,
actions=VIEWSET_ACTIONS["update"],
instructions=VIEWSET_INSTRUCTIONS[UnifiedSearchViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_destroy_mcp_tool(
UnifiedSearchViewSet,
actions=VIEWSET_ACTIONS["destroy"],
instructions=VIEWSET_INSTRUCTIONS[UnifiedSearchViewSet],
)
drf_publish_create_mcp_tool(
DocumentTypeViewSet,
actions=VIEWSET_ACTIONS["create"],
instructions=VIEWSET_INSTRUCTIONS[DocumentTypeViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_list_mcp_tool(
DocumentTypeViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[DocumentTypeViewSet],
)
drf_publish_update_mcp_tool(
DocumentTypeViewSet,
actions=VIEWSET_ACTIONS["update"],
instructions=VIEWSET_INSTRUCTIONS[DocumentTypeViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_destroy_mcp_tool(
DocumentTypeViewSet,
actions=VIEWSET_ACTIONS["destroy"],
instructions=VIEWSET_INSTRUCTIONS[DocumentTypeViewSet],
)
drf_publish_create_mcp_tool(
StoragePathViewSet,
actions=VIEWSET_ACTIONS["create"],
instructions=VIEWSET_INSTRUCTIONS[StoragePathViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_list_mcp_tool(
StoragePathViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[StoragePathViewSet],
)
drf_publish_update_mcp_tool(
StoragePathViewSet,
actions=VIEWSET_ACTIONS["update"],
instructions=VIEWSET_INSTRUCTIONS[StoragePathViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_destroy_mcp_tool(
StoragePathViewSet,
actions=VIEWSET_ACTIONS["destroy"],
instructions=VIEWSET_INSTRUCTIONS[StoragePathViewSet],
)
drf_publish_create_mcp_tool(
SavedViewViewSet,
actions=VIEWSET_ACTIONS["create"],
instructions=VIEWSET_INSTRUCTIONS[SavedViewViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_list_mcp_tool(
SavedViewViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[SavedViewViewSet],
)
drf_publish_update_mcp_tool(
SavedViewViewSet,
actions=VIEWSET_ACTIONS["update"],
instructions=VIEWSET_INSTRUCTIONS[SavedViewViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_destroy_mcp_tool(
SavedViewViewSet,
actions=VIEWSET_ACTIONS["destroy"],
instructions=VIEWSET_INSTRUCTIONS[SavedViewViewSet],
)
drf_publish_create_mcp_tool(
ShareLinkViewSet,
actions=VIEWSET_ACTIONS["create"],
instructions=VIEWSET_INSTRUCTIONS[ShareLinkViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_list_mcp_tool(
ShareLinkViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[ShareLinkViewSet],
)
drf_publish_update_mcp_tool(
ShareLinkViewSet,
actions=VIEWSET_ACTIONS["update"],
instructions=VIEWSET_INSTRUCTIONS[ShareLinkViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_destroy_mcp_tool(
ShareLinkViewSet,
actions=VIEWSET_ACTIONS["destroy"],
instructions=VIEWSET_INSTRUCTIONS[ShareLinkViewSet],
)
drf_publish_create_mcp_tool(
WorkflowTriggerViewSet,
actions=VIEWSET_ACTIONS["create"],
instructions=VIEWSET_INSTRUCTIONS[WorkflowTriggerViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_list_mcp_tool(
WorkflowTriggerViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[WorkflowTriggerViewSet],
)
drf_publish_update_mcp_tool(
WorkflowTriggerViewSet,
actions=VIEWSET_ACTIONS["update"],
instructions=VIEWSET_INSTRUCTIONS[WorkflowTriggerViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_destroy_mcp_tool(
WorkflowTriggerViewSet,
actions=VIEWSET_ACTIONS["destroy"],
instructions=VIEWSET_INSTRUCTIONS[WorkflowTriggerViewSet],
)
drf_publish_create_mcp_tool(
WorkflowActionViewSet,
actions=VIEWSET_ACTIONS["create"],
instructions=VIEWSET_INSTRUCTIONS[WorkflowActionViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_list_mcp_tool(
WorkflowActionViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[WorkflowActionViewSet],
)
drf_publish_update_mcp_tool(
WorkflowActionViewSet,
actions=VIEWSET_ACTIONS["update"],
instructions=VIEWSET_INSTRUCTIONS[WorkflowActionViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_destroy_mcp_tool(
WorkflowActionViewSet,
actions=VIEWSET_ACTIONS["destroy"],
instructions=VIEWSET_INSTRUCTIONS[WorkflowActionViewSet],
)
drf_publish_create_mcp_tool(
WorkflowViewSet,
actions=VIEWSET_ACTIONS["create"],
instructions=VIEWSET_INSTRUCTIONS[WorkflowViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_list_mcp_tool(
WorkflowViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[WorkflowViewSet],
)
drf_publish_update_mcp_tool(
WorkflowViewSet,
actions=VIEWSET_ACTIONS["update"],
instructions=VIEWSET_INSTRUCTIONS[WorkflowViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_destroy_mcp_tool(
WorkflowViewSet,
actions=VIEWSET_ACTIONS["destroy"],
instructions=VIEWSET_INSTRUCTIONS[WorkflowViewSet],
)
drf_publish_create_mcp_tool(
CustomFieldViewSet,
actions=VIEWSET_ACTIONS["create"],
instructions=VIEWSET_INSTRUCTIONS[CustomFieldViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_list_mcp_tool(
CustomFieldViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[CustomFieldViewSet],
)
drf_publish_update_mcp_tool(
CustomFieldViewSet,
actions=VIEWSET_ACTIONS["update"],
instructions=VIEWSET_INSTRUCTIONS[CustomFieldViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_destroy_mcp_tool(
CustomFieldViewSet,
actions=VIEWSET_ACTIONS["destroy"],
instructions=VIEWSET_INSTRUCTIONS[CustomFieldViewSet],
)
drf_publish_list_mcp_tool(
TasksViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[TasksViewSet],
)

View File

@@ -139,25 +139,15 @@ def get_document_count_filter_for_user(user):
if getattr(user, "is_superuser", False):
return Q(documents__deleted_at__isnull=True)
return Q(
documents__id__in=permitted_document_ids(user),
documents__deleted_at__isnull=True,
documents__id__in=get_objects_for_user_owner_aware(
user,
"documents.view_document",
Document,
).values_list("id", flat=True),
)
def permitted_document_ids(user):
"""
Return a Subquery of permitted, non-deleted document IDs for the user.
Used to avoid repeated joins to the Document table in count annotations.
"""
if user is None or not getattr(user, "is_authenticated", False):
return Document.objects.none().values_list("id")
qs = get_objects_for_user_owner_aware(
user,
"documents.view_document",
Document,
).filter(deleted_at__isnull=True)
return qs.values_list("id")
def get_objects_for_user_owner_aware(
user,
perms,

View File

@@ -24,7 +24,6 @@ from django.core.validators import RegexValidator
from django.core.validators import integer_validator
from django.db.models import Count
from django.db.models import Q
from django.db.models import Subquery
from django.db.models.functions import Lower
from django.utils.crypto import get_random_string
from django.utils.dateparse import parse_datetime
@@ -72,9 +71,9 @@ from documents.models import WorkflowActionEmail
from documents.models import WorkflowActionWebhook
from documents.models import WorkflowTrigger
from documents.parsers import is_mime_type_supported
from documents.permissions import get_document_count_filter_for_user
from documents.permissions import get_groups_with_only_permission
from documents.permissions import get_objects_for_user_owner_aware
from documents.permissions import permitted_document_ids
from documents.permissions import set_permissions_for_object
from documents.regex import validate_regex_pattern
from documents.templating.filepath import validate_filepath_template_and_render
@@ -590,41 +589,18 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
if children_map is not None:
children = children_map.get(obj.pk, [])
else:
filter_q = self.context.get("document_count_filter")
request = self.context.get("request")
user = getattr(request, "user", None) if request else None
if filter_q is None:
user = getattr(request, "user", None) if request else None
filter_q = get_document_count_filter_for_user(user)
self.context["document_count_filter"] = filter_q
filter_kind = self.context.get("document_count_filter")
if filter_kind is None:
filter_kind = (
"superuser"
if user and getattr(user, "is_superuser", False)
else "restricted"
)
self.context["document_count_filter"] = filter_kind
queryset = obj.get_children_queryset().select_related("owner")
if filter_kind == "superuser":
children = queryset.annotate(
document_count=Count(
"documents",
filter=Q(documents__deleted_at__isnull=True),
distinct=True,
),
)
else:
permitted_ids = Subquery(permitted_document_ids(user))
counts = dict(
Tag.documents.through.objects.filter(
document_id__in=permitted_ids,
)
.values("tag_id")
.annotate(c=Count("document_id"))
.values_list("tag_id", "c"),
)
children = list(queryset)
for child in children:
child.document_count = counts.get(child.id, 0)
children = (
obj.get_children_queryset()
.select_related("owner")
.annotate(document_count=Count("documents", filter=filter_q))
)
view = self.context.get("view")
ordering = (
@@ -633,11 +609,7 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
else None
)
ordering = ordering or (Lower("name"),)
if hasattr(children, "order_by"):
children = children.order_by(*ordering)
else:
# children is a list (pre-fetched); apply basic ordering on name
children = sorted(children, key=lambda c: (c.name or "").lower())
children = children.order_by(*ordering)
serializer = TagSerializer(
children,

View File

@@ -33,7 +33,6 @@ from django.db.models import IntegerField
from django.db.models import Max
from django.db.models import Model
from django.db.models import Q
from django.db.models import Subquery
from django.db.models import Sum
from django.db.models import When
from django.db.models.functions import Lower
@@ -154,7 +153,6 @@ from documents.permissions import ViewDocumentsPermissions
from documents.permissions import get_document_count_filter_for_user
from documents.permissions import get_objects_for_user_owner_aware
from documents.permissions import has_perms_owner_aware
from documents.permissions import permitted_document_ids
from documents.permissions import set_permissions_for_object
from documents.schema import generate_object_with_permissions_schema
from documents.serialisers import AcknowledgeTasksViewSerializer
@@ -3009,32 +3007,27 @@ class CustomFieldViewSet(ModelViewSet):
queryset = CustomField.objects.all().order_by("-created")
def get_queryset(self):
user = self.request.user
if user is None or user.is_superuser:
return (
super()
.get_queryset()
.annotate(
document_count=Count(
"fields",
filter=Q(fields__document__deleted_at__isnull=True),
distinct=True,
),
filter = (
Q(fields__document__deleted_at__isnull=True)
if self.request.user is None or self.request.user.is_superuser
else (
Q(
fields__document__deleted_at__isnull=True,
fields__document__id__in=get_objects_for_user_owner_aware(
self.request.user,
"documents.view_document",
Document,
).values_list("id", flat=True),
)
)
permitted_ids = Subquery(permitted_document_ids(user))
)
return (
super()
.get_queryset()
.annotate(
document_count=Count(
"fields",
filter=Q(
fields__document__deleted_at__isnull=True,
fields__document_id__in=permitted_ids,
),
distinct=True,
filter=filter,
),
)
)

82
src/paperless/mcp.py Normal file
View File

@@ -0,0 +1,82 @@
from mcp_server import drf_publish_create_mcp_tool
from mcp_server import drf_publish_destroy_mcp_tool
from mcp_server import drf_publish_list_mcp_tool
from mcp_server import drf_publish_update_mcp_tool
from paperless.views import ApplicationConfigurationViewSet
from paperless.views import GroupViewSet
from paperless.views import UserViewSet
VIEWSET_ACTIONS = {
"create": {"post": "create"},
"list": {"get": "list"},
"update": {"put": "update"},
"destroy": {"delete": "destroy"},
}
BODY_SCHEMA = {"type": "object", "additionalProperties": True}
VIEWSET_INSTRUCTIONS = {
UserViewSet: "Manage Paperless users.",
GroupViewSet: "Manage Paperless groups.",
ApplicationConfigurationViewSet: "Manage application configuration.",
}
drf_publish_create_mcp_tool(
UserViewSet,
actions=VIEWSET_ACTIONS["create"],
instructions=VIEWSET_INSTRUCTIONS[UserViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_list_mcp_tool(
UserViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[UserViewSet],
)
drf_publish_update_mcp_tool(
UserViewSet,
actions=VIEWSET_ACTIONS["update"],
instructions=VIEWSET_INSTRUCTIONS[UserViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_destroy_mcp_tool(
UserViewSet,
actions=VIEWSET_ACTIONS["destroy"],
instructions=VIEWSET_INSTRUCTIONS[UserViewSet],
)
drf_publish_create_mcp_tool(
GroupViewSet,
actions=VIEWSET_ACTIONS["create"],
instructions=VIEWSET_INSTRUCTIONS[GroupViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_list_mcp_tool(
GroupViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[GroupViewSet],
)
drf_publish_update_mcp_tool(
GroupViewSet,
actions=VIEWSET_ACTIONS["update"],
instructions=VIEWSET_INSTRUCTIONS[GroupViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_destroy_mcp_tool(
GroupViewSet,
actions=VIEWSET_ACTIONS["destroy"],
instructions=VIEWSET_INSTRUCTIONS[GroupViewSet],
)
drf_publish_list_mcp_tool(
ApplicationConfigurationViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[ApplicationConfigurationViewSet],
)
drf_publish_update_mcp_tool(
ApplicationConfigurationViewSet,
actions=VIEWSET_ACTIONS["update"],
instructions=VIEWSET_INSTRUCTIONS[ApplicationConfigurationViewSet],
body_schema=BODY_SCHEMA,
)

View File

@@ -348,6 +348,7 @@ INSTALLED_APPS = [
"allauth.headless",
"drf_spectacular",
"drf_spectacular_sidecar",
"mcp_server",
"treenode",
*env_apps,
]
@@ -612,6 +613,17 @@ def _parse_remote_user_settings() -> str:
HTTP_REMOTE_USER_HEADER_NAME = _parse_remote_user_settings()
DJANGO_MCP_AUTHENTICATION_CLASSES = REST_FRAMEWORK["DEFAULT_AUTHENTICATION_CLASSES"]
DJANGO_MCP_GLOBAL_SERVER_CONFIG = {
"name": "paperless-ngx",
"instructions": (
"Use the MCP tools to search, query, and manage Paperless-ngx data. "
"Use `search_documents` for full-text search, and `query_data_collections` "
"for structured queries against available collections. "
"Write operations are exposed via DRF-backed tools for create/update/delete."
),
}
# X-Frame options for embedded PDF display:
X_FRAME_OPTIONS = "SAMEORIGIN"

View File

@@ -356,6 +356,7 @@ urlpatterns = [
],
),
),
path("", include("mcp_server.urls")),
# Root of the Frontend
re_path(
r".*",

129
src/paperless_mail/mcp.py Normal file
View File

@@ -0,0 +1,129 @@
from mcp_server import ModelQueryToolset
from mcp_server import drf_publish_create_mcp_tool
from mcp_server import drf_publish_destroy_mcp_tool
from mcp_server import drf_publish_list_mcp_tool
from mcp_server import drf_publish_update_mcp_tool
from documents.permissions import get_objects_for_user_owner_aware
from paperless_mail.models import MailAccount
from paperless_mail.models import MailRule
from paperless_mail.models import ProcessedMail
from paperless_mail.views import MailAccountViewSet
from paperless_mail.views import MailRuleViewSet
from paperless_mail.views import ProcessedMailViewSet
VIEWSET_ACTIONS = {
"create": {"post": "create"},
"list": {"get": "list"},
"update": {"put": "update"},
"destroy": {"delete": "destroy"},
}
BODY_SCHEMA = {"type": "object", "additionalProperties": True}
VIEWSET_INSTRUCTIONS = {
MailAccountViewSet: "Manage mail accounts.",
MailRuleViewSet: "Manage mail rules.",
ProcessedMailViewSet: "List processed mail.",
}
class MailAccountQueryToolset(ModelQueryToolset):
model = MailAccount
def get_queryset(self):
user = getattr(self.request, "user", None)
if not user or not user.is_authenticated:
return MailAccount.objects.none()
if user.is_superuser:
return MailAccount.objects.all()
return get_objects_for_user_owner_aware(
user,
"paperless_mail.view_mailaccount",
MailAccount,
)
class MailRuleQueryToolset(ModelQueryToolset):
model = MailRule
def get_queryset(self):
user = getattr(self.request, "user", None)
if not user or not user.is_authenticated:
return MailRule.objects.none()
if user.is_superuser:
return MailRule.objects.all()
return get_objects_for_user_owner_aware(
user,
"paperless_mail.view_mailrule",
MailRule,
)
class ProcessedMailQueryToolset(ModelQueryToolset):
model = ProcessedMail
def get_queryset(self):
user = getattr(self.request, "user", None)
if not user or not user.is_authenticated:
return ProcessedMail.objects.none()
if user.is_superuser:
return ProcessedMail.objects.all()
return get_objects_for_user_owner_aware(
user,
"paperless_mail.view_processedmail",
ProcessedMail,
)
drf_publish_create_mcp_tool(
MailAccountViewSet,
actions=VIEWSET_ACTIONS["create"],
instructions=VIEWSET_INSTRUCTIONS[MailAccountViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_list_mcp_tool(
MailAccountViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[MailAccountViewSet],
)
drf_publish_update_mcp_tool(
MailAccountViewSet,
actions=VIEWSET_ACTIONS["update"],
instructions=VIEWSET_INSTRUCTIONS[MailAccountViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_destroy_mcp_tool(
MailAccountViewSet,
actions=VIEWSET_ACTIONS["destroy"],
instructions=VIEWSET_INSTRUCTIONS[MailAccountViewSet],
)
drf_publish_create_mcp_tool(
MailRuleViewSet,
actions=VIEWSET_ACTIONS["create"],
instructions=VIEWSET_INSTRUCTIONS[MailRuleViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_list_mcp_tool(
MailRuleViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[MailRuleViewSet],
)
drf_publish_update_mcp_tool(
MailRuleViewSet,
actions=VIEWSET_ACTIONS["update"],
instructions=VIEWSET_INSTRUCTIONS[MailRuleViewSet],
body_schema=BODY_SCHEMA,
)
drf_publish_destroy_mcp_tool(
MailRuleViewSet,
actions=VIEWSET_ACTIONS["destroy"],
instructions=VIEWSET_INSTRUCTIONS[MailRuleViewSet],
)
drf_publish_list_mcp_tool(
ProcessedMailViewSet,
actions=VIEWSET_ACTIONS["list"],
instructions=VIEWSET_INSTRUCTIONS[ProcessedMailViewSet],
)

156
uv.lock generated
View File

@@ -1038,6 +1038,22 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/2f/23/63a7d868373a73d25c4a5c2dd3cce3aaeb22fbee82560d42b6e93ba01403/django_guardian-3.2.0-py3-none-any.whl", hash = "sha256:0768565a057988a93fc4a1d93649c4a794abfd7473a8408a079cfbf83c559d77", size = 134674, upload-time = "2025-09-16T10:35:51.69Z" },
]
[[package]]
name = "django-mcp-server"
version = "0.5.7"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "djangorestframework", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "inflection", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "mcp", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "uritemplate", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b2/70/e2cf268b77d0aa171b72763325279284561dbbd9b80ed4fd6975b4b7bd9c/django_mcp_server-0.5.7.tar.gz", hash = "sha256:5077f8fabf5fb621b5ce490afd0db60f21e57b3a451ed14a9f44aef545ea4eee", size = 23910, upload-time = "2025-10-10T17:13:34.681Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/01/f78a11f51437f70b4ff2d9f131d47acf82c2a4cf78d63e9cf291e3727054/django_mcp_server-0.5.7-py3-none-any.whl", hash = "sha256:04b58bf02623aaee59708c3661ffe17981acd4532587c38b6cfe2c9e7090c6d3", size = 26389, upload-time = "2025-10-10T17:13:33.56Z" },
]
[[package]]
name = "django-multiselectfield"
version = "1.0.1"
@@ -1706,6 +1722,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/45/4b/2b81e876abf77b4af3372aff731f4f6722840ebc7dcfd85778eaba271733/httpx_oauth-0.16.1-py3-none-any.whl", hash = "sha256:2fcad82f80f28d0473a0fc4b4eda223dc952050af7e3a8c8781342d850f09fb5", size = 38056, upload-time = "2024-12-20T07:23:00.394Z" },
]
[[package]]
name = "httpx-sse"
version = "0.4.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943, upload-time = "2025-10-10T21:48:22.271Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960, upload-time = "2025-10-10T21:48:21.158Z" },
]
[[package]]
name = "huggingface-hub"
version = "0.30.2"
@@ -2378,6 +2403,30 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/be/2f/5108cb3ee4ba6501748c4908b908e55f42a5b66245b4cfe0c99326e1ef6e/marshmallow-3.26.2-py3-none-any.whl", hash = "sha256:013fa8a3c4c276c24d26d84ce934dc964e2aa794345a0f8c7e5a7191482c8a73", size = 50964, upload-time = "2025-12-22T06:53:51.801Z" },
]
[[package]]
name = "mcp"
version = "1.26.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "httpx-sse", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "jsonschema", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "pydantic-settings", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "pyjwt", extra = ["crypto"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "python-multipart", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "sse-starlette", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "starlette", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "typing-inspection", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "uvicorn", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/fc/6d/62e76bbb8144d6ed86e202b5edd8a4cb631e7c8130f3f4893c3f90262b10/mcp-1.26.0.tar.gz", hash = "sha256:db6e2ef491eecc1a0d93711a76f28dec2e05999f93afd48795da1c1137142c66", size = 608005, upload-time = "2026-01-24T19:40:32.468Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fd/d9/eaa1f80170d2b7c5ba23f3b59f766f3a0bb41155fbc32a69adfa1adaaef9/mcp-1.26.0-py3-none-any.whl", hash = "sha256:904a21c33c25aa98ddbeb47273033c435e595bbacfdb177f4bd87f6dceebe1ca", size = 233615, upload-time = "2026-01-24T19:40:30.652Z" },
]
[[package]]
name = "mdurl"
version = "0.1.2"
@@ -2937,6 +2986,7 @@ dependencies = [
{ name = "django-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "django-filter", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "django-guardian", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "django-mcp-server", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "django-multiselectfield", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "django-soft-delete", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "django-treenode", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
@@ -3085,6 +3135,7 @@ requires-dist = [
{ name = "django-extensions", specifier = "~=4.1" },
{ name = "django-filter", specifier = "~=25.1" },
{ name = "django-guardian", specifier = "~=3.2.0" },
{ name = "django-mcp-server", specifier = "~=0.5.7" },
{ name = "django-multiselectfield", specifier = "~=1.0.1" },
{ name = "django-soft-delete", specifier = "~=1.0.18" },
{ name = "django-treenode", specifier = ">=0.23.2" },
@@ -3790,6 +3841,20 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" },
]
[[package]]
name = "pydantic-settings"
version = "2.12.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "python-dotenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "typing-inspection", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184, upload-time = "2025-11-10T14:25:47.013Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" },
]
[[package]]
name = "pygments"
version = "2.19.2"
@@ -4007,6 +4072,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/6c/73/9f872cb81fc5c3bb48f7227872c28975f998f3e7c2b1c16e95e6432bbb90/python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3", size = 13840, upload-time = "2022-06-07T20:16:57.763Z" },
]
[[package]]
name = "python-multipart"
version = "0.0.22"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/94/01/979e98d542a70714b0cb2b6728ed0b7c46792b695e3eaec3e20711271ca3/python_multipart-0.0.22.tar.gz", hash = "sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58", size = 37612, upload-time = "2026-01-25T10:15:56.219Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" },
]
[[package]]
name = "pytz"
version = "2025.2"
@@ -4948,6 +5022,32 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a9/5c/bfd6bd0bf979426d405cc6e71eceb8701b148b16c21d2dc3c261efc61c7b/sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca", size = 44415, upload-time = "2024-12-10T12:05:27.824Z" },
]
[[package]]
name = "sse-starlette"
version = "3.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "starlette", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8b/8d/00d280c03ffd39aaee0e86ec81e2d3b9253036a0f93f51d10503adef0e65/sse_starlette-3.2.0.tar.gz", hash = "sha256:8127594edfb51abe44eac9c49e59b0b01f1039d0c7461c6fd91d4e03b70da422", size = 27253, upload-time = "2026-01-17T13:11:05.62Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/96/7f/832f015020844a8b8f7a9cbc103dd76ba8e3875004c41e08440ea3a2b41a/sse_starlette-3.2.0-py3-none-any.whl", hash = "sha256:5876954bd51920fc2cd51baee47a080eb88a37b5b784e615abb0b283f801cdbf", size = 12763, upload-time = "2026-01-17T13:11:03.775Z" },
]
[[package]]
name = "starlette"
version = "0.52.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux')" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c4/68/79977123bb7be889ad680d79a40f339082c1978b5cfcf62c2d8d196873ac/starlette-0.52.1.tar.gz", hash = "sha256:834edd1b0a23167694292e94f597773bc3f89f362be6effee198165a35d62933", size = 2653702, upload-time = "2026-01-18T13:34:11.062Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74", size = 74272, upload-time = "2026-01-18T13:34:09.188Z" },
]
[[package]]
name = "sympy"
version = "1.13.3"
@@ -5108,13 +5208,13 @@ dependencies = [
{ name = "typing-extensions", marker = "sys_platform == 'darwin'" },
]
wheels = [
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp310-none-macosx_11_0_arm64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp311-none-macosx_11_0_arm64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp312-none-macosx_11_0_arm64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp313-cp313t-macosx_11_0_arm64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp313-none-macosx_11_0_arm64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp314-cp314-macosx_11_0_arm64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp314-cp314t-macosx_11_0_arm64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:bf1e68cfb935ae2046374ff02a7aa73dda70351b46342846f557055b3a540bf0" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:a52952a8c90a422c14627ea99b9826b7557203b46b4d0772d3ca5c7699692425" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:287242dd1f830846098b5eca847f817aa5c6015ea57ab4c1287809efea7b77eb" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8924d10d36eac8fe0652a060a03fc2ae52980841850b9a1a2ddb0f27a4f181cd" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:bcee64ae7aa65876ceeae6dcaebe75109485b213528c74939602208a20706e3f" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:defadbeb055cfcf5def58f70937145aecbd7a4bc295238ded1d0e85ae2cf0e1d" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:886f84b181f766f53265ba0a1d503011e60f53fff9d569563ef94f24160e1072" },
]
[[package]]
@@ -5138,20 +5238,20 @@ dependencies = [
{ name = "typing-extensions", marker = "sys_platform == 'linux'" },
]
wheels = [
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp310-cp310-manylinux_2_28_aarch64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp310-cp310-manylinux_2_28_x86_64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp311-cp311-manylinux_2_28_aarch64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp312-cp312-manylinux_2_28_aarch64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313-manylinux_2_28_aarch64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313-manylinux_2_28_x86_64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313t-manylinux_2_28_aarch64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313t-manylinux_2_28_x86_64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314-manylinux_2_28_aarch64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314-manylinux_2_28_x86_64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314t-manylinux_2_28_aarch64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314t-manylinux_2_28_x86_64.whl" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:10866c8a48c4aa5ae3f48538dc8a055b99c57d9c6af2bf5dd715374d9d6ddca3" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:7210713b66943fdbfcc237b2e782871b649123ac5d29f548ce8c85be4223ab38" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:0e611cfb16724e62252b67d31073bc5c490cb83e92ecdc1192762535e0e44487" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:3de2adb9b4443dc9210ef1f1b16da3647ace53553166d6360bbbd7edd6f16e4d" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3bf9b442a51a2948e41216a76d7ab00f0694cfcaaa51b6f9bcab57b7f89843e6" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7417d8c565f219d3455654cb431c6d892a3eb40246055e14d645422de13b9ea1" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:3e532e553b37ee859205a9b2d1c7977fd6922f53bbb1b9bfdd5bdc00d1a60ed4" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:39b3dff6d8fba240ae0d1bede4ca11c2531ae3b47329206512d99e17907ff74b" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:01b1884f724977a20c7da2f640f1c7b37f4a2c117a7f4a6c1c0424d14cb86322" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:031a597147fa81b1e6d79ccf1ad3ccc7fafa27941d6cf26ff5caaa384fb20e92" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:65010ab4aacce6c9a1ddfc935f986c003ca8638ded04348fd326c3e74346237c" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:88adf5157db5da1d54b1c9fe4a6c1d20ceef00e75d854e206a87dbf69e3037dc" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:3ac2b8df2c55430e836dcda31940d47f1f5f94b8731057b6f20300ebea394dd9" },
{ url = "https://download.pytorch.org/whl/cpu/torch-2.9.1%2Bcpu-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:5b688445f928f13563b7418b17c57e97bf955ab559cf73cd8f2b961f8572dbb3" },
]
[[package]]
@@ -5495,6 +5595,20 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/5d/34/257747253ad446fd155e39f0c30afda4597b3b9e28f44a9de5dee76a6509/uv-0.9.6-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:b31377ebf2d0499afc5abe3fe1abded5ca843f3a1161b432fe26eb0ce15bab8e", size = 21597889, upload-time = "2025-10-29T19:40:36.963Z" },
]
[[package]]
name = "uvicorn"
version = "0.40.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "h11", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e6689dd3b1d345c815f10f86acd044ee1fb9a4dcd0b8c5/uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea", size = 81761, upload-time = "2025-12-21T14:16:22.45Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502, upload-time = "2025-12-21T14:16:21.041Z" },
]
[[package]]
name = "uvloop"
version = "0.21.0"