mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-09-22 00:52:42 -05:00
Merge branch 'dev' into feature-ai
This commit is contained in:
@@ -49,7 +49,6 @@ services:
|
|||||||
- ./data:/usr/src/paperless/paperless-ngx/data
|
- ./data:/usr/src/paperless/paperless-ngx/data
|
||||||
- ./media:/usr/src/paperless/paperless-ngx/media
|
- ./media:/usr/src/paperless/paperless-ngx/media
|
||||||
- ./consume:/usr/src/paperless/paperless-ngx/consume
|
- ./consume:/usr/src/paperless/paperless-ngx/consume
|
||||||
- ~/.gitconfig:/usr/src/paperless/.gitconfig:ro
|
|
||||||
environment:
|
environment:
|
||||||
PAPERLESS_REDIS: redis://broker:6379
|
PAPERLESS_REDIS: redis://broker:6379
|
||||||
PAPERLESS_TIKA_ENABLED: 1
|
PAPERLESS_TIKA_ENABLED: 1
|
||||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@@ -107,3 +107,6 @@ celerybeat-schedule*
|
|||||||
/.devcontainer/data/
|
/.devcontainer/data/
|
||||||
/.devcontainer/media/
|
/.devcontainer/media/
|
||||||
/.devcontainer/redisdata/
|
/.devcontainer/redisdata/
|
||||||
|
|
||||||
|
# ignore pnpm package store folder created when setting up the devcontainer
|
||||||
|
.pnpm-store/
|
||||||
|
@@ -4,7 +4,7 @@
|
|||||||
repos:
|
repos:
|
||||||
# General hooks
|
# General hooks
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v5.0.0
|
rev: v6.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-docstring-first
|
- id: check-docstring-first
|
||||||
- id: check-json
|
- id: check-json
|
||||||
@@ -49,7 +49,7 @@ repos:
|
|||||||
- 'prettier-plugin-organize-imports@4.1.0'
|
- 'prettier-plugin-organize-imports@4.1.0'
|
||||||
# Python hooks
|
# Python hooks
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.12.2
|
rev: v0.13.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff-check
|
- id: ruff-check
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
@@ -72,7 +72,7 @@ repos:
|
|||||||
args:
|
args:
|
||||||
- "--tab"
|
- "--tab"
|
||||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||||
rev: "v0.10.0.1"
|
rev: "v0.11.0.1"
|
||||||
hooks:
|
hooks:
|
||||||
- id: shellcheck
|
- id: shellcheck
|
||||||
- repo: https://github.com/google/yamlfmt
|
- repo: https://github.com/google/yamlfmt
|
||||||
|
@@ -32,7 +32,7 @@ RUN set -eux \
|
|||||||
# Purpose: Installs s6-overlay and rootfs
|
# Purpose: Installs s6-overlay and rootfs
|
||||||
# Comments:
|
# Comments:
|
||||||
# - Don't leave anything extra in here either
|
# - Don't leave anything extra in here either
|
||||||
FROM ghcr.io/astral-sh/uv:0.8.15-python3.12-bookworm-slim AS s6-overlay-base
|
FROM ghcr.io/astral-sh/uv:0.8.17-python3.12-bookworm-slim AS s6-overlay-base
|
||||||
|
|
||||||
WORKDIR /usr/src/s6
|
WORKDIR /usr/src/s6
|
||||||
|
|
||||||
|
@@ -192,8 +192,8 @@ The endpoint supports the following optional form fields:
|
|||||||
- `tags`: Similar to correspondent. Specify this multiple times to
|
- `tags`: Similar to correspondent. Specify this multiple times to
|
||||||
have multiple tags added to the document.
|
have multiple tags added to the document.
|
||||||
- `archive_serial_number`: An optional archive serial number to set.
|
- `archive_serial_number`: An optional archive serial number to set.
|
||||||
- `custom_fields`: An array of custom field ids to assign (with an empty
|
- `custom_fields`: Either an array of custom field ids to assign (with an empty
|
||||||
value) to the document.
|
value) to the document or an object mapping field id -> value.
|
||||||
|
|
||||||
The endpoint will immediately return HTTP 200 if the document consumption
|
The endpoint will immediately return HTTP 200 if the document consumption
|
||||||
process was started successfully, with the UUID of the consumption task
|
process was started successfully, with the UUID of the consumption task
|
||||||
|
@@ -470,9 +470,14 @@ To get started:
|
|||||||
|
|
||||||
2. VS Code will prompt you with "Reopen in container". Do so and wait for the environment to start.
|
2. VS Code will prompt you with "Reopen in container". Do so and wait for the environment to start.
|
||||||
|
|
||||||
3. Initialize the project by running the task **Project Setup: Run all Init Tasks**. This
|
3. In case your host operating system is Windows:
|
||||||
|
|
||||||
|
- The Source Control view in Visual Studio Code might show: "The detected Git repository is potentially unsafe as the folder is owned by someone other than the current user." Use "Manage Unsafe Repositories" to fix this.
|
||||||
|
- Git might have detecteded modifications for all files, because Windows is using CRLF line endings. Run `git checkout .` in the containers terminal to fix this issue.
|
||||||
|
|
||||||
|
4. Initialize the project by running the task **Project Setup: Run all Init Tasks**. This
|
||||||
will initialize the database tables and create a superuser. Then you can compile the front end
|
will initialize the database tables and create a superuser. Then you can compile the front end
|
||||||
for production or run the frontend in debug mode.
|
for production or run the frontend in debug mode.
|
||||||
|
|
||||||
4. The project is ready for debugging, start either run the fullstack debug or individual debug
|
5. The project is ready for debugging, start either run the fullstack debug or individual debug
|
||||||
processes. Yo spin up the project without debugging run the task **Project Start: Run all Services**
|
processes. Yo spin up the project without debugging run the task **Project Start: Run all Services**
|
||||||
|
@@ -92,6 +92,16 @@ and more. These areas allow you to view, add, edit, delete and manage permission
|
|||||||
for these objects. You can also manage saved views, mail accounts, mail rules,
|
for these objects. You can also manage saved views, mail accounts, mail rules,
|
||||||
workflows and more from the management sections.
|
workflows and more from the management sections.
|
||||||
|
|
||||||
|
### Nested Tags
|
||||||
|
|
||||||
|
Paperless-ngx v2.19 introduces support for nested tags, allowing you to create a
|
||||||
|
hierarchy of tags, which may be useful for organizing your documents. Tags can
|
||||||
|
have a 'parent' tag, creating a tree-like structure, to a maximum depth of 5. When
|
||||||
|
a tag is added to a document, all of its parent tags are also added automatically
|
||||||
|
and similarly, when a tag is removed from a document, all of its child tags are
|
||||||
|
also removed. Additionally, assigning a parent to an existing tag will automatically
|
||||||
|
update all documents that have this tag assigned, adding the parent tag as well.
|
||||||
|
|
||||||
## Adding documents to Paperless-ngx
|
## Adding documents to Paperless-ngx
|
||||||
|
|
||||||
Once you've got Paperless setup, you need to start feeding documents
|
Once you've got Paperless setup, you need to start feeding documents
|
||||||
|
@@ -36,6 +36,7 @@ dependencies = [
|
|||||||
"django-guardian~=3.1.2",
|
"django-guardian~=3.1.2",
|
||||||
"django-multiselectfield~=1.0.1",
|
"django-multiselectfield~=1.0.1",
|
||||||
"django-soft-delete~=1.0.18",
|
"django-soft-delete~=1.0.18",
|
||||||
|
"django-treenode>=0.23.2",
|
||||||
"djangorestframework~=3.16",
|
"djangorestframework~=3.16",
|
||||||
"djangorestframework-guardian~=0.4.0",
|
"djangorestframework-guardian~=0.4.0",
|
||||||
"drf-spectacular~=0.28",
|
"drf-spectacular~=0.28",
|
||||||
@@ -57,7 +58,7 @@ dependencies = [
|
|||||||
"llama-index-llms-openai>=0.3.38",
|
"llama-index-llms-openai>=0.3.38",
|
||||||
"llama-index-vector-stores-faiss>=0.3",
|
"llama-index-vector-stores-faiss>=0.3",
|
||||||
"nltk~=3.9.1",
|
"nltk~=3.9.1",
|
||||||
"ocrmypdf~=16.10.0",
|
"ocrmypdf~=16.11.0",
|
||||||
"openai>=1.76",
|
"openai>=1.76",
|
||||||
"pathvalidate~=3.3.1",
|
"pathvalidate~=3.3.1",
|
||||||
"pdf2image~=1.17.0",
|
"pdf2image~=1.17.0",
|
||||||
@@ -125,7 +126,7 @@ testing = [
|
|||||||
lint = [
|
lint = [
|
||||||
"pre-commit~=4.3.0",
|
"pre-commit~=4.3.0",
|
||||||
"pre-commit-uv~=4.1.3",
|
"pre-commit-uv~=4.1.3",
|
||||||
"ruff~=0.12.2",
|
"ruff~=0.13.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
typing = [
|
typing = [
|
||||||
|
File diff suppressed because it is too large
Load Diff
@@ -12,6 +12,8 @@
|
|||||||
|
|
||||||
<pngx-input-color i18n-title title="Color" formControlName="color" [error]="error?.color"></pngx-input-color>
|
<pngx-input-color i18n-title title="Color" formControlName="color" [error]="error?.color"></pngx-input-color>
|
||||||
|
|
||||||
|
<pngx-input-select i18n-title title="Parent" formControlName="parent" [items]="tags" [allowNull]="true" [error]="error?.parent"></pngx-input-select>
|
||||||
|
|
||||||
<pngx-input-check i18n-title title="Inbox tag" formControlName="is_inbox_tag" i18n-hint hint="Inbox tags are automatically assigned to all consumed documents."></pngx-input-check>
|
<pngx-input-check i18n-title title="Inbox tag" formControlName="is_inbox_tag" i18n-hint hint="Inbox tags are automatically assigned to all consumed documents."></pngx-input-check>
|
||||||
<pngx-input-select i18n-title title="Matching algorithm" [items]="getMatchingAlgorithms()" formControlName="matching_algorithm"></pngx-input-select>
|
<pngx-input-select i18n-title title="Matching algorithm" [items]="getMatchingAlgorithms()" formControlName="matching_algorithm"></pngx-input-select>
|
||||||
@if (patternRequired) {
|
@if (patternRequired) {
|
||||||
|
@@ -35,11 +35,16 @@ import { TextComponent } from '../../input/text/text.component'
|
|||||||
],
|
],
|
||||||
})
|
})
|
||||||
export class TagEditDialogComponent extends EditDialogComponent<Tag> {
|
export class TagEditDialogComponent extends EditDialogComponent<Tag> {
|
||||||
|
tags: Tag[]
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
super()
|
super()
|
||||||
this.service = inject(TagService)
|
this.service = inject(TagService)
|
||||||
this.userService = inject(UserService)
|
this.userService = inject(UserService)
|
||||||
this.settingsService = inject(SettingsService)
|
this.settingsService = inject(SettingsService)
|
||||||
|
this.service.listAll().subscribe((result) => {
|
||||||
|
this.tags = result.results
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
getCreateTitle() {
|
getCreateTitle() {
|
||||||
@@ -55,6 +60,7 @@ export class TagEditDialogComponent extends EditDialogComponent<Tag> {
|
|||||||
name: new FormControl(''),
|
name: new FormControl(''),
|
||||||
color: new FormControl(randomColor()),
|
color: new FormControl(randomColor()),
|
||||||
is_inbox_tag: new FormControl(false),
|
is_inbox_tag: new FormControl(false),
|
||||||
|
parent: new FormControl(null),
|
||||||
matching_algorithm: new FormControl(DEFAULT_MATCHING_ALGORITHM),
|
matching_algorithm: new FormControl(DEFAULT_MATCHING_ALGORITHM),
|
||||||
match: new FormControl(''),
|
match: new FormControl(''),
|
||||||
is_insensitive: new FormControl(true),
|
is_insensitive: new FormControl(true),
|
||||||
|
@@ -114,6 +114,13 @@ export class FilterableDropdownSelectionModel {
|
|||||||
b.id == NEGATIVE_NULL_FILTER_VALUE)
|
b.id == NEGATIVE_NULL_FILTER_VALUE)
|
||||||
) {
|
) {
|
||||||
return 1
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
// Preserve hierarchical order when provided (e.g., Tags)
|
||||||
|
const ao = (a as any)['orderIndex']
|
||||||
|
const bo = (b as any)['orderIndex']
|
||||||
|
if (ao !== undefined && bo !== undefined) {
|
||||||
|
return ao - bo
|
||||||
} else if (
|
} else if (
|
||||||
this.getNonTemporary(a.id) == ToggleableItemState.NotSelected &&
|
this.getNonTemporary(a.id) == ToggleableItemState.NotSelected &&
|
||||||
this.getNonTemporary(b.id) != ToggleableItemState.NotSelected
|
this.getNonTemporary(b.id) != ToggleableItemState.NotSelected
|
||||||
|
@@ -15,12 +15,17 @@
|
|||||||
<i-bs width="1em" height="1em" name="x"></i-bs>
|
<i-bs width="1em" height="1em" name="x"></i-bs>
|
||||||
}
|
}
|
||||||
</div>
|
</div>
|
||||||
<div class="me-1">
|
<div class="me-1 name-cell" [style.--depth]="isTag ? getDepth() + 1 : 1">
|
||||||
@if (isTag) {
|
@if (isTag && getDepth() > 0) {
|
||||||
<pngx-tag [tag]="item" [clickable]="false"></pngx-tag>
|
<div class="indicator"></div>
|
||||||
} @else {
|
|
||||||
<small>{{item.name}}</small>
|
|
||||||
}
|
}
|
||||||
|
<div>
|
||||||
|
@if (isTag) {
|
||||||
|
<pngx-tag [tag]="item" [clickable]="false"></pngx-tag>
|
||||||
|
} @else {
|
||||||
|
<small>{{item.name}}</small>
|
||||||
|
}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@if (!hideCount) {
|
@if (!hideCount) {
|
||||||
<div class="badge bg-light text-dark rounded-pill ms-auto me-1">{{currentCount}}</div>
|
<div class="badge bg-light text-dark rounded-pill ms-auto me-1">{{currentCount}}</div>
|
||||||
|
@@ -2,3 +2,19 @@
|
|||||||
min-width: 1em;
|
min-width: 1em;
|
||||||
min-height: 1em;
|
min-height: 1em;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.name-cell {
|
||||||
|
padding-left: calc(calc(var(--depth) - 2) * 1rem);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
|
||||||
|
.indicator {
|
||||||
|
display: inline-block;
|
||||||
|
width: .8rem;
|
||||||
|
height: .8rem;
|
||||||
|
border-left: 1px solid var(--bs-secondary);
|
||||||
|
border-bottom: 1px solid var(--bs-secondary);
|
||||||
|
margin-right: .25rem;
|
||||||
|
margin-left: .5rem;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
import { Component, EventEmitter, Input, Output } from '@angular/core'
|
import { Component, EventEmitter, Input, Output } from '@angular/core'
|
||||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||||
import { MatchingModel } from 'src/app/data/matching-model'
|
import { MatchingModel } from 'src/app/data/matching-model'
|
||||||
|
import { Tag } from 'src/app/data/tag'
|
||||||
import { TagComponent } from '../../tag/tag.component'
|
import { TagComponent } from '../../tag/tag.component'
|
||||||
|
|
||||||
export enum ToggleableItemState {
|
export enum ToggleableItemState {
|
||||||
@@ -45,6 +46,10 @@ export class ToggleableDropdownButtonComponent {
|
|||||||
return 'is_inbox_tag' in this.item
|
return 'is_inbox_tag' in this.item
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getDepth(): number {
|
||||||
|
return (this.item as Tag).depth ?? 0
|
||||||
|
}
|
||||||
|
|
||||||
get currentCount(): number {
|
get currentCount(): number {
|
||||||
return this.count ?? this.item.document_count
|
return this.count ?? this.item.document_count
|
||||||
}
|
}
|
||||||
|
@@ -7,13 +7,14 @@
|
|||||||
<div class="input-group flex-nowrap">
|
<div class="input-group flex-nowrap">
|
||||||
<ng-select #tagSelect name="tags" [items]="tags" bindLabel="name" bindValue="id" [(ngModel)]="value"
|
<ng-select #tagSelect name="tags" [items]="tags" bindLabel="name" bindValue="id" [(ngModel)]="value"
|
||||||
[disabled]="disabled"
|
[disabled]="disabled"
|
||||||
[multiple]="true"
|
[multiple]="multiple"
|
||||||
[closeOnSelect]="false"
|
[closeOnSelect]="false"
|
||||||
[clearSearchOnAdd]="true"
|
[clearSearchOnAdd]="true"
|
||||||
[hideSelected]="tags.length > 0"
|
[hideSelected]="tags.length > 0"
|
||||||
[addTag]="allowCreate ? createTagRef : false"
|
[addTag]="allowCreate ? createTagRef : false"
|
||||||
addTagText="Add tag"
|
addTagText="Add tag"
|
||||||
i18n-addTagText
|
i18n-addTagText
|
||||||
|
(add)="onAdd($event)"
|
||||||
(change)="onChange(value)">
|
(change)="onChange(value)">
|
||||||
|
|
||||||
<ng-template ng-label-tmp let-item="item">
|
<ng-template ng-label-tmp let-item="item">
|
||||||
@@ -25,9 +26,20 @@
|
|||||||
</button>
|
</button>
|
||||||
</ng-template>
|
</ng-template>
|
||||||
<ng-template ng-option-tmp let-item="item" let-index="index" let-search="searchTerm">
|
<ng-template ng-option-tmp let-item="item" let-index="index" let-search="searchTerm">
|
||||||
<div class="tag-wrap">
|
<div class="tag-option-row d-flex align-items-center">
|
||||||
@if (item.id && tags) {
|
@if (item.id && tags) {
|
||||||
<pngx-tag class="me-2" [tag]="getTag(item.id)"></pngx-tag>
|
@if (getTag(item.id)?.parent) {
|
||||||
|
<i-bs name="list-nested" class="me-1"></i-bs>
|
||||||
|
<span class="hierarchy-reveal d-flex align-items-center">
|
||||||
|
<span class="parents d-flex align-items-center">
|
||||||
|
@for (p of getParentChain(item.id); track p.id) {
|
||||||
|
<span class="badge me-1" [style.background]="p.color" [style.color]="p.text_color">{{p.name}}</span>
|
||||||
|
<i-bs name="chevron-right" width=".8em" height=".8em" class="me-1"></i-bs>
|
||||||
|
}
|
||||||
|
</span>
|
||||||
|
</span>
|
||||||
|
}
|
||||||
|
<pngx-tag class="current-tag d-flex" [tag]="getTag(item.id)"></pngx-tag>
|
||||||
}
|
}
|
||||||
</div>
|
</div>
|
||||||
</ng-template>
|
</ng-template>
|
||||||
|
@@ -20,3 +20,33 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Dropdown hierarchy reveal for ng-select options
|
||||||
|
::ng-deep .ng-dropdown-panel .ng-option {
|
||||||
|
overflow-x: scroll;
|
||||||
|
|
||||||
|
.tag-option-row {
|
||||||
|
font-size: 1rem;
|
||||||
|
width: max-content;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hierarchy-reveal {
|
||||||
|
overflow: hidden;
|
||||||
|
max-width: 0;
|
||||||
|
transition: max-width 200ms ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.parents .badge {
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
::ng-deep .ng-dropdown-panel .ng-option:hover .hierarchy-reveal,
|
||||||
|
::ng-deep .ng-dropdown-panel .ng-option.ng-option-marked .hierarchy-reveal {
|
||||||
|
max-width: 1000px;
|
||||||
|
}
|
||||||
|
|
||||||
|
::ng-deep .ng-dropdown-panel .ng-option:hover .hierarchy-indicator,
|
||||||
|
::ng-deep .ng-dropdown-panel .ng-option.ng-option-marked .hierarchy-indicator {
|
||||||
|
background: transparent;
|
||||||
|
}
|
||||||
|
@@ -177,4 +177,59 @@ describe('TagsComponent', () => {
|
|||||||
component.onFilterDocuments()
|
component.onFilterDocuments()
|
||||||
expect(emitSpy).toHaveBeenCalledWith([tags[2]])
|
expect(emitSpy).toHaveBeenCalledWith([tags[2]])
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should remove all descendants from selection', () => {
|
||||||
|
const c: Tag = { id: 4, name: 'c' }
|
||||||
|
const b: Tag = { id: 3, name: 'b', children: [c] }
|
||||||
|
const a: Tag = { id: 2, name: 'a' }
|
||||||
|
const root: Tag = { id: 1, name: 'root', children: [a, b] }
|
||||||
|
|
||||||
|
const inputIDs = [2, 3, 4, 99]
|
||||||
|
const result = (component as any).removeChildren(inputIDs, root)
|
||||||
|
expect(result).toEqual([99])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should append all parents recursively', () => {
|
||||||
|
const root: Tag = { id: 1, name: 'root' }
|
||||||
|
const mid: Tag = { id: 2, name: 'mid', parent: 1 }
|
||||||
|
const leaf: Tag = { id: 3, name: 'leaf', parent: 2 }
|
||||||
|
component.tags = [root, mid, leaf]
|
||||||
|
|
||||||
|
component.value = []
|
||||||
|
component.onAdd(leaf)
|
||||||
|
expect(component.value).toEqual([2, 1])
|
||||||
|
|
||||||
|
// Calling onAdd on a root should not change value
|
||||||
|
component.onAdd(root)
|
||||||
|
expect(component.value).toEqual([2, 1])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return ancestors from root to parent using getParentChain', () => {
|
||||||
|
const root: Tag = { id: 1, name: 'root' }
|
||||||
|
const mid: Tag = { id: 2, name: 'mid', parent: 1 }
|
||||||
|
const leaf: Tag = { id: 3, name: 'leaf', parent: 2 }
|
||||||
|
component.tags = [root, mid, leaf]
|
||||||
|
|
||||||
|
expect(component.getParentChain(3).map((t) => t.id)).toEqual([1, 2])
|
||||||
|
expect(component.getParentChain(2).map((t) => t.id)).toEqual([1])
|
||||||
|
expect(component.getParentChain(1).map((t) => t.id)).toEqual([])
|
||||||
|
// Non-existent id
|
||||||
|
expect(component.getParentChain(999).map((t) => t.id)).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle cyclic parents via guard in getParentChain', () => {
|
||||||
|
const one: Tag = { id: 1, name: 'one', parent: 2 }
|
||||||
|
const two: Tag = { id: 2, name: 'two', parent: 1 }
|
||||||
|
component.tags = [one, two]
|
||||||
|
|
||||||
|
const chain = component.getParentChain(1)
|
||||||
|
// Guard avoids infinite loop; chain contains both nodes once
|
||||||
|
expect(chain.map((t) => t.id)).toEqual([1, 2])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should stop when parent does not exist in getParentChain', () => {
|
||||||
|
const lone: Tag = { id: 5, name: 'lone', parent: 999 }
|
||||||
|
component.tags = [lone]
|
||||||
|
expect(component.getParentChain(5)).toEqual([])
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
@@ -100,6 +100,9 @@ export class TagsComponent implements OnInit, ControlValueAccessor {
|
|||||||
@Input()
|
@Input()
|
||||||
horizontal: boolean = false
|
horizontal: boolean = false
|
||||||
|
|
||||||
|
@Input()
|
||||||
|
multiple: boolean = true
|
||||||
|
|
||||||
@Output()
|
@Output()
|
||||||
filterDocuments = new EventEmitter<Tag[]>()
|
filterDocuments = new EventEmitter<Tag[]>()
|
||||||
|
|
||||||
@@ -124,13 +127,40 @@ export class TagsComponent implements OnInit, ControlValueAccessor {
|
|||||||
|
|
||||||
let index = this.value.indexOf(tagID)
|
let index = this.value.indexOf(tagID)
|
||||||
if (index > -1) {
|
if (index > -1) {
|
||||||
|
const tag = this.getTag(tagID)
|
||||||
|
|
||||||
|
// remove tag
|
||||||
let oldValue = this.value
|
let oldValue = this.value
|
||||||
oldValue.splice(index, 1)
|
oldValue.splice(index, 1)
|
||||||
|
|
||||||
|
// remove children
|
||||||
|
oldValue = this.removeChildren(oldValue, tag)
|
||||||
|
|
||||||
this.value = [...oldValue]
|
this.value = [...oldValue]
|
||||||
this.onChange(this.value)
|
this.onChange(this.value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private removeChildren(tagIDs: number[], tag: Tag) {
|
||||||
|
if (tag.children?.length) {
|
||||||
|
const childIDs = tag.children.map((child) => child.id)
|
||||||
|
tagIDs = tagIDs.filter((id) => !childIDs.includes(id))
|
||||||
|
for (const child of tag.children) {
|
||||||
|
tagIDs = this.removeChildren(tagIDs, child)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return tagIDs
|
||||||
|
}
|
||||||
|
|
||||||
|
public onAdd(tag: Tag) {
|
||||||
|
if (tag.parent) {
|
||||||
|
// add all parents recursively
|
||||||
|
const parent = this.getTag(tag.parent)
|
||||||
|
this.value = [...this.value, parent.id]
|
||||||
|
this.onAdd(parent)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
createTag(name: string = null, add: boolean = false) {
|
createTag(name: string = null, add: boolean = false) {
|
||||||
var modal = this.modalService.open(TagEditDialogComponent, {
|
var modal = this.modalService.open(TagEditDialogComponent, {
|
||||||
backdrop: 'static',
|
backdrop: 'static',
|
||||||
@@ -166,6 +196,7 @@ export class TagsComponent implements OnInit, ControlValueAccessor {
|
|||||||
|
|
||||||
addTag(id) {
|
addTag(id) {
|
||||||
this.value = [...this.value, id]
|
this.value = [...this.value, id]
|
||||||
|
this.onAdd(this.getTag(id))
|
||||||
this.onChange(this.value)
|
this.onChange(this.value)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -180,4 +211,20 @@ export class TagsComponent implements OnInit, ControlValueAccessor {
|
|||||||
this.tags.filter((t) => this.value.includes(t.id))
|
this.tags.filter((t) => this.value.includes(t.id))
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getParentChain(id: number): Tag[] {
|
||||||
|
// Returns ancestors from root → immediate parent for a tag id
|
||||||
|
const chain: Tag[] = []
|
||||||
|
let current = this.getTag(id)
|
||||||
|
const guard = new Set<number>()
|
||||||
|
while (current?.parent) {
|
||||||
|
if (guard.has(current.parent)) break
|
||||||
|
guard.add(current.parent)
|
||||||
|
const parent = this.getTag(current.parent)
|
||||||
|
if (!parent) break
|
||||||
|
chain.unshift(parent)
|
||||||
|
current = parent
|
||||||
|
}
|
||||||
|
return chain
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,4 +1,8 @@
|
|||||||
@if (tag) {
|
@if (tag) {
|
||||||
|
@if (showParents && tag.parent) {
|
||||||
|
<pngx-tag [tagID]="tag.parent" [clickable]="clickable" [linkTitle]="linkTitle"></pngx-tag>
|
||||||
|
>
|
||||||
|
}
|
||||||
@if (!clickable) {
|
@if (!clickable) {
|
||||||
<span class="badge" [style.background]="tag.color" [style.color]="tag.text_color">{{tag.name}}</span>
|
<span class="badge" [style.background]="tag.color" [style.color]="tag.text_color">{{tag.name}}</span>
|
||||||
}
|
}
|
||||||
|
@@ -50,4 +50,7 @@ export class TagComponent {
|
|||||||
|
|
||||||
@Input()
|
@Input()
|
||||||
clickable: boolean = false
|
clickable: boolean = false
|
||||||
|
|
||||||
|
@Input()
|
||||||
|
showParents: boolean = false
|
||||||
}
|
}
|
||||||
|
@@ -1204,7 +1204,7 @@ describe('BulkEditorComponent', () => {
|
|||||||
expect(tagListAllSpy).toHaveBeenCalled()
|
expect(tagListAllSpy).toHaveBeenCalled()
|
||||||
|
|
||||||
expect(tagSelectionModelToggleSpy).toHaveBeenCalledWith(newTag.id)
|
expect(tagSelectionModelToggleSpy).toHaveBeenCalledWith(newTag.id)
|
||||||
expect(component.tagSelectionModel.items).toEqual(
|
expect(component.tagSelectionModel.items).toMatchObject(
|
||||||
[{ id: null, name: 'Not assigned' }].concat(tags.results as any)
|
[{ id: null, name: 'Not assigned' }].concat(tags.results as any)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@@ -37,6 +37,7 @@ import { StoragePathService } from 'src/app/services/rest/storage-path.service'
|
|||||||
import { TagService } from 'src/app/services/rest/tag.service'
|
import { TagService } from 'src/app/services/rest/tag.service'
|
||||||
import { SettingsService } from 'src/app/services/settings.service'
|
import { SettingsService } from 'src/app/services/settings.service'
|
||||||
import { ToastService } from 'src/app/services/toast.service'
|
import { ToastService } from 'src/app/services/toast.service'
|
||||||
|
import { flattenTags } from 'src/app/utils/flatten-tags'
|
||||||
import { MergeConfirmDialogComponent } from '../../common/confirm-dialog/merge-confirm-dialog/merge-confirm-dialog.component'
|
import { MergeConfirmDialogComponent } from '../../common/confirm-dialog/merge-confirm-dialog/merge-confirm-dialog.component'
|
||||||
import { RotateConfirmDialogComponent } from '../../common/confirm-dialog/rotate-confirm-dialog/rotate-confirm-dialog.component'
|
import { RotateConfirmDialogComponent } from '../../common/confirm-dialog/rotate-confirm-dialog/rotate-confirm-dialog.component'
|
||||||
import { CorrespondentEditDialogComponent } from '../../common/edit-dialog/correspondent-edit-dialog/correspondent-edit-dialog.component'
|
import { CorrespondentEditDialogComponent } from '../../common/edit-dialog/correspondent-edit-dialog/correspondent-edit-dialog.component'
|
||||||
@@ -164,7 +165,10 @@ export class BulkEditorComponent
|
|||||||
this.tagService
|
this.tagService
|
||||||
.listAll()
|
.listAll()
|
||||||
.pipe(first())
|
.pipe(first())
|
||||||
.subscribe((result) => (this.tagSelectionModel.items = result.results))
|
.subscribe(
|
||||||
|
(result) =>
|
||||||
|
(this.tagSelectionModel.items = flattenTags(result.results))
|
||||||
|
)
|
||||||
}
|
}
|
||||||
if (
|
if (
|
||||||
this.permissionService.currentUserCan(
|
this.permissionService.currentUserCan(
|
||||||
@@ -648,7 +652,7 @@ export class BulkEditorComponent
|
|||||||
)
|
)
|
||||||
.pipe(takeUntil(this.unsubscribeNotifier))
|
.pipe(takeUntil(this.unsubscribeNotifier))
|
||||||
.subscribe(({ newTag, tags }) => {
|
.subscribe(({ newTag, tags }) => {
|
||||||
this.tagSelectionModel.items = tags.results
|
this.tagSelectionModel.items = flattenTags(tags.results)
|
||||||
this.tagSelectionModel.toggle(newTag.id)
|
this.tagSelectionModel.toggle(newTag.id)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@@ -589,7 +589,7 @@ describe('FilterEditorComponent', () => {
|
|||||||
expect(component.tagSelectionModel.logicalOperator).toEqual(
|
expect(component.tagSelectionModel.logicalOperator).toEqual(
|
||||||
LogicalOperator.And
|
LogicalOperator.And
|
||||||
)
|
)
|
||||||
expect(component.tagSelectionModel.getSelectedItems()).toEqual(tags)
|
expect(component.tagSelectionModel.getSelectedItems()).toMatchObject(tags)
|
||||||
// coverage
|
// coverage
|
||||||
component.filterRules = [
|
component.filterRules = [
|
||||||
{
|
{
|
||||||
@@ -615,7 +615,7 @@ describe('FilterEditorComponent', () => {
|
|||||||
expect(component.tagSelectionModel.logicalOperator).toEqual(
|
expect(component.tagSelectionModel.logicalOperator).toEqual(
|
||||||
LogicalOperator.Or
|
LogicalOperator.Or
|
||||||
)
|
)
|
||||||
expect(component.tagSelectionModel.getSelectedItems()).toEqual(tags)
|
expect(component.tagSelectionModel.getSelectedItems()).toMatchObject(tags)
|
||||||
// coverage
|
// coverage
|
||||||
component.filterRules = [
|
component.filterRules = [
|
||||||
{
|
{
|
||||||
@@ -652,7 +652,7 @@ describe('FilterEditorComponent', () => {
|
|||||||
expect(component.tagSelectionModel.logicalOperator).toEqual(
|
expect(component.tagSelectionModel.logicalOperator).toEqual(
|
||||||
LogicalOperator.And
|
LogicalOperator.And
|
||||||
)
|
)
|
||||||
expect(component.tagSelectionModel.getExcludedItems()).toEqual(tags)
|
expect(component.tagSelectionModel.getExcludedItems()).toMatchObject(tags)
|
||||||
// coverage
|
// coverage
|
||||||
component.filterRules = [
|
component.filterRules = [
|
||||||
{
|
{
|
||||||
|
@@ -97,6 +97,7 @@ import {
|
|||||||
CustomFieldQueryExpression,
|
CustomFieldQueryExpression,
|
||||||
} from 'src/app/utils/custom-field-query-element'
|
} from 'src/app/utils/custom-field-query-element'
|
||||||
import { filterRulesDiffer } from 'src/app/utils/filter-rules'
|
import { filterRulesDiffer } from 'src/app/utils/filter-rules'
|
||||||
|
import { flattenTags } from 'src/app/utils/flatten-tags'
|
||||||
import {
|
import {
|
||||||
CustomFieldQueriesModel,
|
CustomFieldQueriesModel,
|
||||||
CustomFieldsQueryDropdownComponent,
|
CustomFieldsQueryDropdownComponent,
|
||||||
@@ -1134,7 +1135,7 @@ export class FilterEditorComponent
|
|||||||
) {
|
) {
|
||||||
this.loadingCountTotal++
|
this.loadingCountTotal++
|
||||||
this.tagService.listAll().subscribe((result) => {
|
this.tagService.listAll().subscribe((result) => {
|
||||||
this.tagSelectionModel.items = result.results
|
this.tagSelectionModel.items = flattenTags(result.results)
|
||||||
this.maybeCompleteLoading()
|
this.maybeCompleteLoading()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
import { NgClass, TitleCasePipe } from '@angular/common'
|
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
|
||||||
import { Component, inject } from '@angular/core'
|
import { Component, inject } from '@angular/core'
|
||||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||||
import {
|
import {
|
||||||
@@ -30,6 +30,7 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
|||||||
FormsModule,
|
FormsModule,
|
||||||
ReactiveFormsModule,
|
ReactiveFormsModule,
|
||||||
NgClass,
|
NgClass,
|
||||||
|
NgTemplateOutlet,
|
||||||
NgbDropdownModule,
|
NgbDropdownModule,
|
||||||
NgbPaginationModule,
|
NgbPaginationModule,
|
||||||
NgxBootstrapIconsModule,
|
NgxBootstrapIconsModule,
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
import { NgClass, TitleCasePipe } from '@angular/common'
|
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
|
||||||
import { Component, inject } from '@angular/core'
|
import { Component, inject } from '@angular/core'
|
||||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||||
import {
|
import {
|
||||||
@@ -28,6 +28,7 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
|||||||
FormsModule,
|
FormsModule,
|
||||||
ReactiveFormsModule,
|
ReactiveFormsModule,
|
||||||
NgClass,
|
NgClass,
|
||||||
|
NgTemplateOutlet,
|
||||||
NgbDropdownModule,
|
NgbDropdownModule,
|
||||||
NgbPaginationModule,
|
NgbPaginationModule,
|
||||||
NgxBootstrapIconsModule,
|
NgxBootstrapIconsModule,
|
||||||
|
@@ -54,61 +54,7 @@
|
|||||||
</tr>
|
</tr>
|
||||||
}
|
}
|
||||||
@for (object of data; track object) {
|
@for (object of data; track object) {
|
||||||
<tr (click)="toggleSelected(object); $event.stopPropagation();" class="data-row fade" [class.show]="show">
|
<ng-container [ngTemplateOutlet]="objectRow" [ngTemplateOutletContext]="{ object: object, depth: 0 }"></ng-container>
|
||||||
<td>
|
|
||||||
<div class="form-check m-0 ms-2 me-n2">
|
|
||||||
<input type="checkbox" class="form-check-input" id="{{typeName}}{{object.id}}" [checked]="selectedObjects.has(object.id)" (click)="toggleSelected(object); $event.stopPropagation();">
|
|
||||||
<label class="form-check-label" for="{{typeName}}{{object.id}}"></label>
|
|
||||||
</div>
|
|
||||||
</td>
|
|
||||||
<td scope="row"><button class="btn btn-link ms-0 ps-0 text-start" (click)="userCanEdit(object) ? openEditDialog(object) : null; $event.stopPropagation()">{{ object.name }}</button> </td>
|
|
||||||
<td scope="row" class="d-none d-sm-table-cell">{{ getMatching(object) }}</td>
|
|
||||||
<td scope="row">{{ object.document_count }}</td>
|
|
||||||
@for (column of extraColumns; track column) {
|
|
||||||
<td scope="row" [ngClass]="{ 'd-none d-sm-table-cell' : column.hideOnMobile }">
|
|
||||||
@if (column.rendersHtml) {
|
|
||||||
<div [innerHtml]="column.valueFn.call(null, object) | safeHtml"></div>
|
|
||||||
} @else if (column.monospace) {
|
|
||||||
<span class="font-monospace">{{ column.valueFn.call(null, object) }}</span>
|
|
||||||
} @else {
|
|
||||||
{{ column.valueFn.call(null, object) }}
|
|
||||||
}
|
|
||||||
</td>
|
|
||||||
}
|
|
||||||
<td scope="row">
|
|
||||||
<div class="btn-toolbar gap-2">
|
|
||||||
<div class="btn-group d-block d-sm-none">
|
|
||||||
<div ngbDropdown container="body" class="d-inline-block">
|
|
||||||
<button type="button" class="btn btn-link" id="actionsMenuMobile" (click)="$event.stopPropagation()" ngbDropdownToggle>
|
|
||||||
<i-bs name="three-dots-vertical"></i-bs>
|
|
||||||
</button>
|
|
||||||
<div ngbDropdownMenu aria-labelledby="actionsMenuMobile">
|
|
||||||
<button (click)="openEditDialog(object)" *pngxIfPermissions="{ action: PermissionAction.Change, type: permissionType }" ngbDropdownItem i18n>Edit</button>
|
|
||||||
<button class="text-danger" (click)="openDeleteDialog(object)" *pngxIfPermissions="{ action: PermissionAction.Delete, type: permissionType }" ngbDropdownItem i18n>Delete</button>
|
|
||||||
@if (object.document_count > 0) {
|
|
||||||
<button (click)="filterDocuments(object)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }" ngbDropdownItem i18n>Filter Documents ({{ object.document_count }})</button>
|
|
||||||
}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="btn-group d-none d-sm-inline-block">
|
|
||||||
<button class="btn btn-sm btn-outline-secondary" (click)="openEditDialog(object); $event.stopPropagation();" *pngxIfPermissions="{ action: PermissionAction.Change, type: permissionType }" [disabled]="!userCanEdit(object)">
|
|
||||||
<i-bs width="1em" height="1em" name="pencil"></i-bs> <ng-container i18n>Edit</ng-container>
|
|
||||||
</button>
|
|
||||||
<button class="btn btn-sm btn-outline-danger" (click)="openDeleteDialog(object); $event.stopPropagation();" *pngxIfPermissions="{ action: PermissionAction.Delete, type: permissionType }" [disabled]="!userCanDelete(object)">
|
|
||||||
<i-bs width="1em" height="1em" name="trash"></i-bs> <ng-container i18n>Delete</ng-container>
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
@if (object.document_count > 0) {
|
|
||||||
<div class="btn-group d-none d-sm-inline-block">
|
|
||||||
<button class="btn btn-sm btn-outline-secondary" (click)="filterDocuments(object); $event.stopPropagation();" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }">
|
|
||||||
<i-bs width="1em" height="1em" name="filter"></i-bs> <ng-container i18n>Documents</ng-container><span class="badge bg-light text-secondary ms-2">{{ object.document_count }}</span>
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
}
|
|
||||||
</div>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
}
|
}
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
@@ -129,3 +75,72 @@
|
|||||||
}
|
}
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
<ng-template #objectRow let-object="object" let-depth="depth">
|
||||||
|
<tr (click)="toggleSelected(object); $event.stopPropagation();" class="data-row fade" [class.show]="show">
|
||||||
|
<td>
|
||||||
|
<div class="form-check m-0 ms-2 me-n2">
|
||||||
|
<input type="checkbox" class="form-check-input" id="{{typeName}}{{object.id}}" [checked]="selectedObjects.has(object.id)" (click)="toggleSelected(object); $event.stopPropagation();">
|
||||||
|
<label class="form-check-label" for="{{typeName}}{{object.id}}"></label>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
<td scope="row" class="name-cell" style="--depth: {{depth}}">
|
||||||
|
@if (depth > 0) {
|
||||||
|
<div class="indicator"></div>
|
||||||
|
}
|
||||||
|
<button class="btn btn-link ms-0 ps-0 text-start" (click)="userCanEdit(object) ? openEditDialog(object) : null; $event.stopPropagation()">{{ object.name }}</button>
|
||||||
|
</td>
|
||||||
|
<td scope="row" class="d-none d-sm-table-cell">{{ getMatching(object) }}</td>
|
||||||
|
<td scope="row">{{ getDocumentCount(object) }}</td>
|
||||||
|
@for (column of extraColumns; track column) {
|
||||||
|
<td scope="row" [ngClass]="{ 'd-none d-sm-table-cell' : column.hideOnMobile }">
|
||||||
|
@if (column.rendersHtml) {
|
||||||
|
<div [innerHtml]="column.valueFn.call(null, object) | safeHtml"></div>
|
||||||
|
} @else if (column.monospace) {
|
||||||
|
<span class="font-monospace">{{ column.valueFn.call(null, object) }}</span>
|
||||||
|
} @else {
|
||||||
|
{{ column.valueFn.call(null, object) }}
|
||||||
|
}
|
||||||
|
</td>
|
||||||
|
}
|
||||||
|
<td scope="row">
|
||||||
|
<div class="btn-toolbar gap-2">
|
||||||
|
<div class="btn-group d-block d-sm-none">
|
||||||
|
<div ngbDropdown container="body" class="d-inline-block">
|
||||||
|
<button type="button" class="btn btn-link" id="actionsMenuMobile" (click)="$event.stopPropagation()" ngbDropdownToggle>
|
||||||
|
<i-bs name="three-dots-vertical"></i-bs>
|
||||||
|
</button>
|
||||||
|
<div ngbDropdownMenu aria-labelledby="actionsMenuMobile">
|
||||||
|
<button (click)="openEditDialog(object)" *pngxIfPermissions="{ action: PermissionAction.Change, type: permissionType }" ngbDropdownItem i18n>Edit</button>
|
||||||
|
<button class="text-danger" (click)="openDeleteDialog(object)" *pngxIfPermissions="{ action: PermissionAction.Delete, type: permissionType }" ngbDropdownItem i18n>Delete</button>
|
||||||
|
@if (getDocumentCount(object) > 0) {
|
||||||
|
<button (click)="filterDocuments(object)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }" ngbDropdownItem i18n>Filter Documents ({{ getDocumentCount(object) }})</button>
|
||||||
|
}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="btn-group d-none d-sm-inline-block">
|
||||||
|
<button class="btn btn-sm btn-outline-secondary" (click)="openEditDialog(object); $event.stopPropagation();" *pngxIfPermissions="{ action: PermissionAction.Change, type: permissionType }" [disabled]="!userCanEdit(object)">
|
||||||
|
<i-bs width="1em" height="1em" name="pencil"></i-bs> <ng-container i18n>Edit</ng-container>
|
||||||
|
</button>
|
||||||
|
<button class="btn btn-sm btn-outline-danger" (click)="openDeleteDialog(object); $event.stopPropagation();" *pngxIfPermissions="{ action: PermissionAction.Delete, type: permissionType }" [disabled]="!userCanDelete(object)">
|
||||||
|
<i-bs width="1em" height="1em" name="trash"></i-bs> <ng-container i18n>Delete</ng-container>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
@if (getDocumentCount(object) > 0) {
|
||||||
|
<div class="btn-group d-none d-sm-inline-block">
|
||||||
|
<button class="btn btn-sm btn-outline-secondary" (click)="filterDocuments(object); $event.stopPropagation();" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Document }">
|
||||||
|
<i-bs width="1em" height="1em" name="filter"></i-bs> <ng-container i18n>Documents</ng-container><span class="badge bg-light text-secondary ms-2">{{ getDocumentCount(object) }}</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
}
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
@if (object.children && object.children.length > 0) {
|
||||||
|
@for (child of object.children; track child) {
|
||||||
|
<ng-container [ngTemplateOutlet]="objectRow" [ngTemplateOutletContext]="{ object: child, depth: depth + 1 }"></ng-container>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</ng-template>
|
||||||
|
@@ -10,3 +10,17 @@ tbody tr:last-child td {
|
|||||||
.form-check {
|
.form-check {
|
||||||
min-height: 0;
|
min-height: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
td.name-cell {
|
||||||
|
padding-left: calc(calc(var(--depth) - 1) * 1.1rem);
|
||||||
|
|
||||||
|
.indicator {
|
||||||
|
display: inline-block;
|
||||||
|
width: .8rem;
|
||||||
|
height: .8rem;
|
||||||
|
border-left: 1px solid var(--bs-secondary);
|
||||||
|
border-bottom: 1px solid var(--bs-secondary);
|
||||||
|
margin-right: .25rem;
|
||||||
|
margin-left: .5rem;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@@ -79,6 +79,7 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
|||||||
@ViewChildren(SortableDirective) headers: QueryList<SortableDirective>
|
@ViewChildren(SortableDirective) headers: QueryList<SortableDirective>
|
||||||
|
|
||||||
public data: T[] = []
|
public data: T[] = []
|
||||||
|
private unfilteredData: T[] = []
|
||||||
|
|
||||||
public page = 1
|
public page = 1
|
||||||
|
|
||||||
@@ -132,6 +133,18 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
|||||||
this.reloadData()
|
this.reloadData()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected filterData(data: T[]): T[] {
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
getDocumentCount(object: MatchingModel): number {
|
||||||
|
return (
|
||||||
|
object.document_count ??
|
||||||
|
this.unfilteredData.find((d) => d.id == object.id)?.document_count ??
|
||||||
|
0
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
reloadData(extraParams: { [key: string]: any } = null) {
|
reloadData(extraParams: { [key: string]: any } = null) {
|
||||||
this.loading = true
|
this.loading = true
|
||||||
this.clearSelection()
|
this.clearSelection()
|
||||||
@@ -148,7 +161,8 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
|||||||
.pipe(
|
.pipe(
|
||||||
takeUntil(this.unsubscribeNotifier),
|
takeUntil(this.unsubscribeNotifier),
|
||||||
tap((c) => {
|
tap((c) => {
|
||||||
this.data = c.results
|
this.unfilteredData = c.results
|
||||||
|
this.data = this.filterData(c.results)
|
||||||
this.collectionSize = c.count
|
this.collectionSize = c.count
|
||||||
}),
|
}),
|
||||||
delay(100)
|
delay(100)
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
import { NgClass, TitleCasePipe } from '@angular/common'
|
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
|
||||||
import { Component, inject } from '@angular/core'
|
import { Component, inject } from '@angular/core'
|
||||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||||
import {
|
import {
|
||||||
@@ -30,6 +30,7 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
|||||||
FormsModule,
|
FormsModule,
|
||||||
ReactiveFormsModule,
|
ReactiveFormsModule,
|
||||||
NgClass,
|
NgClass,
|
||||||
|
NgTemplateOutlet,
|
||||||
NgbDropdownModule,
|
NgbDropdownModule,
|
||||||
NgbPaginationModule,
|
NgbPaginationModule,
|
||||||
NgxBootstrapIconsModule,
|
NgxBootstrapIconsModule,
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
import { NgClass, TitleCasePipe } from '@angular/common'
|
import { NgClass, NgTemplateOutlet, TitleCasePipe } from '@angular/common'
|
||||||
import { Component, inject } from '@angular/core'
|
import { Component, inject } from '@angular/core'
|
||||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||||
import {
|
import {
|
||||||
@@ -30,6 +30,7 @@ import { ManagementListComponent } from '../management-list/management-list.comp
|
|||||||
FormsModule,
|
FormsModule,
|
||||||
ReactiveFormsModule,
|
ReactiveFormsModule,
|
||||||
NgClass,
|
NgClass,
|
||||||
|
NgTemplateOutlet,
|
||||||
NgbDropdownModule,
|
NgbDropdownModule,
|
||||||
NgbPaginationModule,
|
NgbPaginationModule,
|
||||||
NgxBootstrapIconsModule,
|
NgxBootstrapIconsModule,
|
||||||
@@ -59,4 +60,8 @@ export class TagListComponent extends ManagementListComponent<Tag> {
|
|||||||
getDeleteMessage(object: Tag) {
|
getDeleteMessage(object: Tag) {
|
||||||
return $localize`Do you really want to delete the tag "${object.name}"?`
|
return $localize`Do you really want to delete the tag "${object.name}"?`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
filterData(data: Tag[]) {
|
||||||
|
return data.filter((tag) => !tag.parent)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -6,4 +6,12 @@ export interface Tag extends MatchingModel {
|
|||||||
text_color?: string
|
text_color?: string
|
||||||
|
|
||||||
is_inbox_tag?: boolean
|
is_inbox_tag?: boolean
|
||||||
|
|
||||||
|
parent?: number // Tag ID
|
||||||
|
|
||||||
|
children?: Tag[] // read-only
|
||||||
|
|
||||||
|
// UI-only: computed depth and order for hierarchical dropdowns
|
||||||
|
depth?: number
|
||||||
|
orderIndex?: number
|
||||||
}
|
}
|
||||||
|
63
src-ui/src/app/utils/flatten-tags.spec.ts
Normal file
63
src-ui/src/app/utils/flatten-tags.spec.ts
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
import type { Tag } from '../data/tag'
|
||||||
|
import { flattenTags } from './flatten-tags'
|
||||||
|
|
||||||
|
describe('flattenTags', () => {
|
||||||
|
it('returns empty array for empty input', () => {
|
||||||
|
expect(flattenTags([])).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('orders roots and children by name (case-insensitive, numeric) and sets depth/orderIndex', () => {
|
||||||
|
const input: Tag[] = [
|
||||||
|
{ id: 11, name: 'A-root' },
|
||||||
|
{ id: 10, name: 'B-root' },
|
||||||
|
{ id: 101, name: 'Child 10', parent: 11 },
|
||||||
|
{ id: 102, name: 'child 2', parent: 11 },
|
||||||
|
{ id: 201, name: 'beta', parent: 10 },
|
||||||
|
{ id: 202, name: 'Alpha', parent: 10 },
|
||||||
|
{ id: 103, name: 'Sub 1', parent: 102 },
|
||||||
|
]
|
||||||
|
|
||||||
|
const flat = flattenTags(input)
|
||||||
|
|
||||||
|
const names = flat.map((t) => t.name)
|
||||||
|
expect(names).toEqual([
|
||||||
|
'A-root',
|
||||||
|
'child 2',
|
||||||
|
'Sub 1',
|
||||||
|
'Child 10',
|
||||||
|
'B-root',
|
||||||
|
'Alpha',
|
||||||
|
'beta',
|
||||||
|
])
|
||||||
|
|
||||||
|
expect(flat.map((t) => t.depth)).toEqual([0, 1, 2, 1, 0, 1, 1])
|
||||||
|
expect(flat.map((t) => t.orderIndex)).toEqual([0, 1, 2, 3, 4, 5, 6])
|
||||||
|
|
||||||
|
// Children are rebuilt
|
||||||
|
const aRoot = flat.find((t) => t.name === 'A-root')
|
||||||
|
expect(new Set(aRoot.children?.map((c) => c.name))).toEqual(
|
||||||
|
new Set(['child 2', 'Child 10'])
|
||||||
|
)
|
||||||
|
|
||||||
|
const bRoot = flat.find((t) => t.name === 'B-root')
|
||||||
|
expect(new Set(bRoot.children?.map((c) => c.name))).toEqual(
|
||||||
|
new Set(['Alpha', 'beta'])
|
||||||
|
)
|
||||||
|
|
||||||
|
const child2 = flat.find((t) => t.name === 'child 2')
|
||||||
|
expect(new Set(child2.children?.map((c) => c.name))).toEqual(
|
||||||
|
new Set(['Sub 1'])
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('excludes orphaned nodes (with missing parent)', () => {
|
||||||
|
const input: Tag[] = [
|
||||||
|
{ id: 1, name: 'Root' },
|
||||||
|
{ id: 2, name: 'Child', parent: 1 },
|
||||||
|
{ id: 3, name: 'Orphan', parent: 999 }, // missing parent
|
||||||
|
]
|
||||||
|
|
||||||
|
const flat = flattenTags(input)
|
||||||
|
expect(flat.map((t) => t.name)).toEqual(['Root', 'Child'])
|
||||||
|
})
|
||||||
|
})
|
35
src-ui/src/app/utils/flatten-tags.ts
Normal file
35
src-ui/src/app/utils/flatten-tags.ts
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
import { Tag } from '../data/tag'
|
||||||
|
|
||||||
|
export function flattenTags(all: Tag[]): Tag[] {
|
||||||
|
const map = new Map<number, Tag>(
|
||||||
|
all.map((t) => [t.id, { ...t, children: [] }])
|
||||||
|
)
|
||||||
|
// rebuild children
|
||||||
|
for (const t of map.values()) {
|
||||||
|
if (t.parent) {
|
||||||
|
const p = map.get(t.parent)
|
||||||
|
p?.children.push(t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const roots = Array.from(map.values()).filter((t) => !t.parent)
|
||||||
|
const sortByName = (a: Tag, b: Tag) =>
|
||||||
|
a.name.localeCompare(b.name, undefined, {
|
||||||
|
sensitivity: 'base',
|
||||||
|
numeric: true,
|
||||||
|
})
|
||||||
|
const ordered: Tag[] = []
|
||||||
|
let idx = 0
|
||||||
|
const walk = (node: Tag, depth: number) => {
|
||||||
|
node.depth = depth
|
||||||
|
node.orderIndex = idx++
|
||||||
|
ordered.push(node)
|
||||||
|
if (node.children?.length) {
|
||||||
|
for (const child of [...node.children].sort(sortByName)) {
|
||||||
|
walk(child, depth + 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
roots.sort(sortByName)
|
||||||
|
roots.forEach((r) => walk(r, 0))
|
||||||
|
return ordered
|
||||||
|
}
|
@@ -57,6 +57,7 @@ import {
|
|||||||
checkLg,
|
checkLg,
|
||||||
chevronDoubleLeft,
|
chevronDoubleLeft,
|
||||||
chevronDoubleRight,
|
chevronDoubleRight,
|
||||||
|
chevronRight,
|
||||||
clipboard,
|
clipboard,
|
||||||
clipboardCheck,
|
clipboardCheck,
|
||||||
clipboardCheckFill,
|
clipboardCheckFill,
|
||||||
@@ -96,6 +97,7 @@ import {
|
|||||||
infoCircle,
|
infoCircle,
|
||||||
journals,
|
journals,
|
||||||
link,
|
link,
|
||||||
|
listNested,
|
||||||
listTask,
|
listTask,
|
||||||
listUl,
|
listUl,
|
||||||
microsoft,
|
microsoft,
|
||||||
@@ -269,6 +271,7 @@ const icons = {
|
|||||||
checkLg,
|
checkLg,
|
||||||
chevronDoubleLeft,
|
chevronDoubleLeft,
|
||||||
chevronDoubleRight,
|
chevronDoubleRight,
|
||||||
|
chevronRight,
|
||||||
clipboard,
|
clipboard,
|
||||||
clipboardCheck,
|
clipboardCheck,
|
||||||
clipboardCheckFill,
|
clipboardCheckFill,
|
||||||
@@ -308,6 +311,7 @@ const icons = {
|
|||||||
infoCircle,
|
infoCircle,
|
||||||
journals,
|
journals,
|
||||||
link,
|
link,
|
||||||
|
listNested,
|
||||||
listTask,
|
listTask,
|
||||||
listUl,
|
listUl,
|
||||||
microsoft,
|
microsoft,
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
from guardian.admin import GuardedModelAdmin
|
from guardian.admin import GuardedModelAdmin
|
||||||
|
from treenode.admin import TreeNodeModelAdmin
|
||||||
|
|
||||||
from documents.models import Correspondent
|
from documents.models import Correspondent
|
||||||
from documents.models import CustomField
|
from documents.models import CustomField
|
||||||
@@ -14,6 +15,7 @@ from documents.models import SavedViewFilterRule
|
|||||||
from documents.models import ShareLink
|
from documents.models import ShareLink
|
||||||
from documents.models import StoragePath
|
from documents.models import StoragePath
|
||||||
from documents.models import Tag
|
from documents.models import Tag
|
||||||
|
from documents.tasks import update_document_parent_tags
|
||||||
|
|
||||||
if settings.AUDIT_LOG_ENABLED:
|
if settings.AUDIT_LOG_ENABLED:
|
||||||
from auditlog.admin import LogEntryAdmin
|
from auditlog.admin import LogEntryAdmin
|
||||||
@@ -26,12 +28,25 @@ class CorrespondentAdmin(GuardedModelAdmin):
|
|||||||
list_editable = ("match", "matching_algorithm")
|
list_editable = ("match", "matching_algorithm")
|
||||||
|
|
||||||
|
|
||||||
class TagAdmin(GuardedModelAdmin):
|
class TagAdmin(GuardedModelAdmin, TreeNodeModelAdmin):
|
||||||
list_display = ("name", "color", "match", "matching_algorithm")
|
list_display = ("name", "color", "match", "matching_algorithm")
|
||||||
list_filter = ("matching_algorithm",)
|
list_filter = ("matching_algorithm",)
|
||||||
list_editable = ("color", "match", "matching_algorithm")
|
list_editable = ("color", "match", "matching_algorithm")
|
||||||
search_fields = ("color", "name")
|
search_fields = ("color", "name")
|
||||||
|
|
||||||
|
def save_model(self, request, obj, form, change):
|
||||||
|
old_parent = None
|
||||||
|
if change and obj.pk:
|
||||||
|
tag = Tag.objects.get(pk=obj.pk)
|
||||||
|
old_parent = tag.get_parent() if tag else None
|
||||||
|
|
||||||
|
super().save_model(request, obj, form, change)
|
||||||
|
|
||||||
|
# sync parent tags on documents if changed
|
||||||
|
new_parent = obj.get_parent()
|
||||||
|
if new_parent and old_parent != new_parent:
|
||||||
|
update_document_parent_tags(obj, new_parent)
|
||||||
|
|
||||||
|
|
||||||
class DocumentTypeAdmin(GuardedModelAdmin):
|
class DocumentTypeAdmin(GuardedModelAdmin):
|
||||||
list_display = ("name", "match", "matching_algorithm")
|
list_display = ("name", "match", "matching_algorithm")
|
||||||
|
@@ -1,7 +1,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import itertools
|
|
||||||
import logging
|
import logging
|
||||||
import tempfile
|
import tempfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -13,6 +12,7 @@ from celery import chord
|
|||||||
from celery import group
|
from celery import group
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.db import transaction
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
|
||||||
@@ -25,6 +25,7 @@ from documents.models import CustomFieldInstance
|
|||||||
from documents.models import Document
|
from documents.models import Document
|
||||||
from documents.models import DocumentType
|
from documents.models import DocumentType
|
||||||
from documents.models import StoragePath
|
from documents.models import StoragePath
|
||||||
|
from documents.models import Tag
|
||||||
from documents.permissions import set_permissions_for_object
|
from documents.permissions import set_permissions_for_object
|
||||||
from documents.plugins.helpers import DocumentsStatusManager
|
from documents.plugins.helpers import DocumentsStatusManager
|
||||||
from documents.tasks import bulk_update_documents
|
from documents.tasks import bulk_update_documents
|
||||||
@@ -96,31 +97,45 @@ def set_document_type(doc_ids: list[int], document_type: DocumentType) -> Litera
|
|||||||
|
|
||||||
|
|
||||||
def add_tag(doc_ids: list[int], tag: int) -> Literal["OK"]:
|
def add_tag(doc_ids: list[int], tag: int) -> Literal["OK"]:
|
||||||
qs = Document.objects.filter(Q(id__in=doc_ids) & ~Q(tags__id=tag)).only("pk")
|
tag_obj = Tag.objects.get(pk=tag)
|
||||||
affected_docs = list(qs.values_list("pk", flat=True))
|
tags_to_add = [tag_obj, *tag_obj.get_ancestors()]
|
||||||
|
|
||||||
DocumentTagRelationship = Document.tags.through
|
DocumentTagRelationship = Document.tags.through
|
||||||
|
to_create = []
|
||||||
|
affected_docs: set[int] = set()
|
||||||
|
|
||||||
DocumentTagRelationship.objects.bulk_create(
|
for t in tags_to_add:
|
||||||
[DocumentTagRelationship(document_id=doc, tag_id=tag) for doc in affected_docs],
|
qs = Document.objects.filter(Q(id__in=doc_ids) & ~Q(tags__id=t.id)).only("pk")
|
||||||
)
|
doc_ids_missing_tag = list(qs.values_list("pk", flat=True))
|
||||||
|
affected_docs.update(doc_ids_missing_tag)
|
||||||
|
to_create.extend(
|
||||||
|
DocumentTagRelationship(document_id=doc, tag_id=t.id)
|
||||||
|
for doc in doc_ids_missing_tag
|
||||||
|
)
|
||||||
|
|
||||||
bulk_update_documents.delay(document_ids=affected_docs)
|
if to_create:
|
||||||
|
DocumentTagRelationship.objects.bulk_create(to_create)
|
||||||
|
|
||||||
|
if affected_docs:
|
||||||
|
bulk_update_documents.delay(document_ids=list(affected_docs))
|
||||||
|
|
||||||
return "OK"
|
return "OK"
|
||||||
|
|
||||||
|
|
||||||
def remove_tag(doc_ids: list[int], tag: int) -> Literal["OK"]:
|
def remove_tag(doc_ids: list[int], tag: int) -> Literal["OK"]:
|
||||||
qs = Document.objects.filter(Q(id__in=doc_ids) & Q(tags__id=tag)).only("pk")
|
tag_obj = Tag.objects.get(pk=tag)
|
||||||
affected_docs = list(qs.values_list("pk", flat=True))
|
tag_ids = [tag_obj.id, *tag_obj.get_descendants_pks()]
|
||||||
|
|
||||||
DocumentTagRelationship = Document.tags.through
|
DocumentTagRelationship = Document.tags.through
|
||||||
|
qs = DocumentTagRelationship.objects.filter(
|
||||||
|
document_id__in=doc_ids,
|
||||||
|
tag_id__in=tag_ids,
|
||||||
|
)
|
||||||
|
affected_docs = list(qs.values_list("document_id", flat=True).distinct())
|
||||||
|
qs.delete()
|
||||||
|
|
||||||
DocumentTagRelationship.objects.filter(
|
if affected_docs:
|
||||||
Q(document_id__in=affected_docs) & Q(tag_id=tag),
|
bulk_update_documents.delay(document_ids=affected_docs)
|
||||||
).delete()
|
|
||||||
|
|
||||||
bulk_update_documents.delay(document_ids=affected_docs)
|
|
||||||
|
|
||||||
return "OK"
|
return "OK"
|
||||||
|
|
||||||
@@ -132,23 +147,57 @@ def modify_tags(
|
|||||||
) -> Literal["OK"]:
|
) -> Literal["OK"]:
|
||||||
qs = Document.objects.filter(id__in=doc_ids).only("pk")
|
qs = Document.objects.filter(id__in=doc_ids).only("pk")
|
||||||
affected_docs = list(qs.values_list("pk", flat=True))
|
affected_docs = list(qs.values_list("pk", flat=True))
|
||||||
|
|
||||||
DocumentTagRelationship = Document.tags.through
|
DocumentTagRelationship = Document.tags.through
|
||||||
|
|
||||||
DocumentTagRelationship.objects.filter(
|
# add with all ancestors
|
||||||
document_id__in=affected_docs,
|
expanded_add_tags: set[int] = set()
|
||||||
tag_id__in=remove_tags,
|
add_tag_objects = Tag.objects.filter(pk__in=add_tags)
|
||||||
).delete()
|
for t in add_tag_objects:
|
||||||
|
expanded_add_tags.add(int(t.id))
|
||||||
|
expanded_add_tags.update(int(pk) for pk in t.get_ancestors_pks())
|
||||||
|
|
||||||
DocumentTagRelationship.objects.bulk_create(
|
# remove with all descendants
|
||||||
[
|
expanded_remove_tags: set[int] = set()
|
||||||
DocumentTagRelationship(document_id=doc, tag_id=tag)
|
remove_tag_objects = Tag.objects.filter(pk__in=remove_tags)
|
||||||
for (doc, tag) in itertools.product(affected_docs, add_tags)
|
for t in remove_tag_objects:
|
||||||
],
|
expanded_remove_tags.add(int(t.id))
|
||||||
ignore_conflicts=True,
|
expanded_remove_tags.update(int(pk) for pk in t.get_descendants_pks())
|
||||||
)
|
|
||||||
|
|
||||||
bulk_update_documents.delay(document_ids=affected_docs)
|
try:
|
||||||
|
with transaction.atomic():
|
||||||
|
if expanded_remove_tags:
|
||||||
|
DocumentTagRelationship.objects.filter(
|
||||||
|
document_id__in=affected_docs,
|
||||||
|
tag_id__in=expanded_remove_tags,
|
||||||
|
).delete()
|
||||||
|
|
||||||
|
to_create = []
|
||||||
|
if expanded_add_tags:
|
||||||
|
existing_pairs = set(
|
||||||
|
DocumentTagRelationship.objects.filter(
|
||||||
|
document_id__in=affected_docs,
|
||||||
|
tag_id__in=expanded_add_tags,
|
||||||
|
).values_list("document_id", "tag_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
to_create = [
|
||||||
|
DocumentTagRelationship(document_id=doc, tag_id=tag)
|
||||||
|
for doc in affected_docs
|
||||||
|
for tag in expanded_add_tags
|
||||||
|
if (doc, tag) not in existing_pairs
|
||||||
|
]
|
||||||
|
|
||||||
|
if to_create:
|
||||||
|
DocumentTagRelationship.objects.bulk_create(
|
||||||
|
to_create,
|
||||||
|
ignore_conflicts=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if affected_docs:
|
||||||
|
bulk_update_documents.delay(document_ids=affected_docs)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error modifying tags: {e}")
|
||||||
|
return "ERROR"
|
||||||
|
|
||||||
return "OK"
|
return "OK"
|
||||||
|
|
||||||
|
@@ -689,7 +689,7 @@ class ConsumerPlugin(
|
|||||||
|
|
||||||
if self.metadata.tag_ids:
|
if self.metadata.tag_ids:
|
||||||
for tag_id in self.metadata.tag_ids:
|
for tag_id in self.metadata.tag_ids:
|
||||||
document.tags.add(Tag.objects.get(pk=tag_id))
|
document.add_nested_tags([Tag.objects.get(pk=tag_id)])
|
||||||
|
|
||||||
if self.metadata.storage_path_id:
|
if self.metadata.storage_path_id:
|
||||||
document.storage_path = StoragePath.objects.get(
|
document.storage_path = StoragePath.objects.get(
|
||||||
|
@@ -17,7 +17,7 @@ def move_sender_strings_to_sender_model(apps, schema_editor):
|
|||||||
if document.sender:
|
if document.sender:
|
||||||
(
|
(
|
||||||
DOCUMENT_SENDER_MAP[document.pk],
|
DOCUMENT_SENDER_MAP[document.pk],
|
||||||
created,
|
_,
|
||||||
) = sender_model.objects.get_or_create(
|
) = sender_model.objects.get_or_create(
|
||||||
name=document.sender,
|
name=document.sender,
|
||||||
defaults={"slug": slugify(document.sender)},
|
defaults={"slug": slugify(document.sender)},
|
||||||
|
@@ -0,0 +1,159 @@
|
|||||||
|
# Generated by Django 5.2.6 on 2025-09-12 18:42
|
||||||
|
|
||||||
|
import django.core.validators
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations
|
||||||
|
from django.db import models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("documents", "1070_customfieldinstance_value_long_text_and_more"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="tag",
|
||||||
|
name="tn_ancestors_count",
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
default=0,
|
||||||
|
editable=False,
|
||||||
|
verbose_name="Ancestors count",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="tag",
|
||||||
|
name="tn_ancestors_pks",
|
||||||
|
field=models.TextField(
|
||||||
|
blank=True,
|
||||||
|
default="",
|
||||||
|
editable=False,
|
||||||
|
verbose_name="Ancestors pks",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="tag",
|
||||||
|
name="tn_children_count",
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
default=0,
|
||||||
|
editable=False,
|
||||||
|
verbose_name="Children count",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="tag",
|
||||||
|
name="tn_children_pks",
|
||||||
|
field=models.TextField(
|
||||||
|
blank=True,
|
||||||
|
default="",
|
||||||
|
editable=False,
|
||||||
|
verbose_name="Children pks",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="tag",
|
||||||
|
name="tn_depth",
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
default=0,
|
||||||
|
editable=False,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.MinValueValidator(0),
|
||||||
|
django.core.validators.MaxValueValidator(10),
|
||||||
|
],
|
||||||
|
verbose_name="Depth",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="tag",
|
||||||
|
name="tn_descendants_count",
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
default=0,
|
||||||
|
editable=False,
|
||||||
|
verbose_name="Descendants count",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="tag",
|
||||||
|
name="tn_descendants_pks",
|
||||||
|
field=models.TextField(
|
||||||
|
blank=True,
|
||||||
|
default="",
|
||||||
|
editable=False,
|
||||||
|
verbose_name="Descendants pks",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="tag",
|
||||||
|
name="tn_index",
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
default=0,
|
||||||
|
editable=False,
|
||||||
|
verbose_name="Index",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="tag",
|
||||||
|
name="tn_level",
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
default=1,
|
||||||
|
editable=False,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.MinValueValidator(1),
|
||||||
|
django.core.validators.MaxValueValidator(10),
|
||||||
|
],
|
||||||
|
verbose_name="Level",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="tag",
|
||||||
|
name="tn_order",
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
default=0,
|
||||||
|
editable=False,
|
||||||
|
verbose_name="Order",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="tag",
|
||||||
|
name="tn_parent",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="tn_children",
|
||||||
|
to="documents.tag",
|
||||||
|
verbose_name="Parent",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="tag",
|
||||||
|
name="tn_priority",
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
default=0,
|
||||||
|
validators=[
|
||||||
|
django.core.validators.MinValueValidator(0),
|
||||||
|
django.core.validators.MaxValueValidator(9999999999),
|
||||||
|
],
|
||||||
|
verbose_name="Priority",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="tag",
|
||||||
|
name="tn_siblings_count",
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
default=0,
|
||||||
|
editable=False,
|
||||||
|
verbose_name="Siblings count",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="tag",
|
||||||
|
name="tn_siblings_pks",
|
||||||
|
field=models.TextField(
|
||||||
|
blank=True,
|
||||||
|
default="",
|
||||||
|
editable=False,
|
||||||
|
verbose_name="Siblings pks",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@@ -7,12 +7,14 @@ from celery import states
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.auth.models import Group
|
from django.contrib.auth.models import Group
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
|
from django.core.exceptions import ValidationError
|
||||||
from django.core.validators import MaxValueValidator
|
from django.core.validators import MaxValueValidator
|
||||||
from django.core.validators import MinValueValidator
|
from django.core.validators import MinValueValidator
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from multiselectfield import MultiSelectField
|
from multiselectfield import MultiSelectField
|
||||||
|
from treenode.models import TreeNodeModel
|
||||||
|
|
||||||
if settings.AUDIT_LOG_ENABLED:
|
if settings.AUDIT_LOG_ENABLED:
|
||||||
from auditlog.registry import auditlog
|
from auditlog.registry import auditlog
|
||||||
@@ -96,8 +98,10 @@ class Correspondent(MatchingModel):
|
|||||||
verbose_name_plural = _("correspondents")
|
verbose_name_plural = _("correspondents")
|
||||||
|
|
||||||
|
|
||||||
class Tag(MatchingModel):
|
class Tag(MatchingModel, TreeNodeModel):
|
||||||
color = models.CharField(_("color"), max_length=7, default="#a6cee3")
|
color = models.CharField(_("color"), max_length=7, default="#a6cee3")
|
||||||
|
# Maximum allowed nesting depth for tags (root = 1, max depth = 5)
|
||||||
|
MAX_NESTING_DEPTH: Final[int] = 5
|
||||||
|
|
||||||
is_inbox_tag = models.BooleanField(
|
is_inbox_tag = models.BooleanField(
|
||||||
_("is inbox tag"),
|
_("is inbox tag"),
|
||||||
@@ -108,10 +112,30 @@ class Tag(MatchingModel):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
class Meta(MatchingModel.Meta):
|
class Meta(MatchingModel.Meta, TreeNodeModel.Meta):
|
||||||
verbose_name = _("tag")
|
verbose_name = _("tag")
|
||||||
verbose_name_plural = _("tags")
|
verbose_name_plural = _("tags")
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
# Prevent self-parenting and assigning a descendant as parent
|
||||||
|
parent = self.get_parent()
|
||||||
|
if parent == self:
|
||||||
|
raise ValidationError({"parent": _("Cannot set itself as parent.")})
|
||||||
|
if parent and self.pk is not None and self.is_ancestor_of(parent):
|
||||||
|
raise ValidationError({"parent": _("Cannot set parent to a descendant.")})
|
||||||
|
|
||||||
|
# Enforce maximum nesting depth
|
||||||
|
new_parent_depth = 0
|
||||||
|
if parent:
|
||||||
|
new_parent_depth = parent.get_ancestors_count() + 1
|
||||||
|
|
||||||
|
height = 0 if self.pk is None else self.get_depth()
|
||||||
|
deepest_new_depth = (new_parent_depth + 1) + height
|
||||||
|
if deepest_new_depth > self.MAX_NESTING_DEPTH:
|
||||||
|
raise ValidationError(_("Maximum nesting depth exceeded."))
|
||||||
|
|
||||||
|
return super().clean()
|
||||||
|
|
||||||
|
|
||||||
class DocumentType(MatchingModel):
|
class DocumentType(MatchingModel):
|
||||||
class Meta(MatchingModel.Meta):
|
class Meta(MatchingModel.Meta):
|
||||||
@@ -398,6 +422,15 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
|||||||
def created_date(self):
|
def created_date(self):
|
||||||
return self.created
|
return self.created
|
||||||
|
|
||||||
|
def add_nested_tags(self, tags):
|
||||||
|
tag_ids = set()
|
||||||
|
for tag in tags:
|
||||||
|
tag_ids.add(tag.id)
|
||||||
|
tag_ids.update(tag.get_ancestors_pks())
|
||||||
|
|
||||||
|
tags_to_add = self.tags.model.objects.filter(id__in=tag_ids)
|
||||||
|
self.tags.add(*tags_to_add)
|
||||||
|
|
||||||
|
|
||||||
class SavedView(ModelWithOwner):
|
class SavedView(ModelWithOwner):
|
||||||
class DisplayMode(models.TextChoices):
|
class DisplayMode(models.TextChoices):
|
||||||
|
@@ -13,6 +13,7 @@ from django.conf import settings
|
|||||||
from django.contrib.auth.models import Group
|
from django.contrib.auth.models import Group
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.contrib.contenttypes.models import ContentType
|
from django.contrib.contenttypes.models import ContentType
|
||||||
|
from django.core.exceptions import ValidationError
|
||||||
from django.core.validators import DecimalValidator
|
from django.core.validators import DecimalValidator
|
||||||
from django.core.validators import MaxLengthValidator
|
from django.core.validators import MaxLengthValidator
|
||||||
from django.core.validators import RegexValidator
|
from django.core.validators import RegexValidator
|
||||||
@@ -540,6 +541,32 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
|
|||||||
|
|
||||||
text_color = serializers.SerializerMethodField()
|
text_color = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
# map to treenode's tn_parent
|
||||||
|
parent = serializers.PrimaryKeyRelatedField(
|
||||||
|
queryset=Tag.objects.all(),
|
||||||
|
allow_null=True,
|
||||||
|
required=False,
|
||||||
|
source="tn_parent",
|
||||||
|
)
|
||||||
|
|
||||||
|
@extend_schema_field(
|
||||||
|
field=serializers.ListSerializer(
|
||||||
|
child=serializers.PrimaryKeyRelatedField(
|
||||||
|
queryset=Tag.objects.all(),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
def get_children(self, obj):
|
||||||
|
serializer = TagSerializer(
|
||||||
|
obj.get_children(),
|
||||||
|
many=True,
|
||||||
|
context=self.context,
|
||||||
|
)
|
||||||
|
return serializer.data
|
||||||
|
|
||||||
|
# children as nested Tag objects
|
||||||
|
children = serializers.SerializerMethodField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Tag
|
model = Tag
|
||||||
fields = (
|
fields = (
|
||||||
@@ -557,6 +584,8 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
|
|||||||
"permissions",
|
"permissions",
|
||||||
"user_can_change",
|
"user_can_change",
|
||||||
"set_permissions",
|
"set_permissions",
|
||||||
|
"parent",
|
||||||
|
"children",
|
||||||
)
|
)
|
||||||
|
|
||||||
def validate_color(self, color):
|
def validate_color(self, color):
|
||||||
@@ -565,6 +594,36 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
|
|||||||
raise serializers.ValidationError(_("Invalid color."))
|
raise serializers.ValidationError(_("Invalid color."))
|
||||||
return color
|
return color
|
||||||
|
|
||||||
|
def validate(self, attrs):
|
||||||
|
# Validate when changing parent
|
||||||
|
parent = attrs.get(
|
||||||
|
"tn_parent",
|
||||||
|
self.instance.get_parent() if self.instance else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.instance:
|
||||||
|
# Temporarily set parent on the instance if updating and use model clean()
|
||||||
|
original_parent = self.instance.get_parent()
|
||||||
|
try:
|
||||||
|
# Temporarily set tn_parent in-memory to validate clean()
|
||||||
|
self.instance.tn_parent = parent
|
||||||
|
self.instance.clean()
|
||||||
|
except ValidationError as e:
|
||||||
|
logger.debug("Tag parent validation failed: %s", e)
|
||||||
|
raise e
|
||||||
|
finally:
|
||||||
|
self.instance.tn_parent = original_parent
|
||||||
|
else:
|
||||||
|
# For new instances, create a transient Tag and validate
|
||||||
|
temp = Tag(tn_parent=parent)
|
||||||
|
try:
|
||||||
|
temp.clean()
|
||||||
|
except ValidationError as e:
|
||||||
|
logger.debug("Tag parent validation failed: %s", e)
|
||||||
|
raise serializers.ValidationError({"parent": _("Invalid parent tag.")})
|
||||||
|
|
||||||
|
return super().validate(attrs)
|
||||||
|
|
||||||
|
|
||||||
class CorrespondentField(serializers.PrimaryKeyRelatedField):
|
class CorrespondentField(serializers.PrimaryKeyRelatedField):
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
@@ -1028,6 +1087,28 @@ class DocumentSerializer(
|
|||||||
custom_field_instance.field,
|
custom_field_instance.field,
|
||||||
doc_id,
|
doc_id,
|
||||||
)
|
)
|
||||||
|
if "tags" in validated_data:
|
||||||
|
# Respect tag hierarchy on updates:
|
||||||
|
# - Adding a child adds its ancestors
|
||||||
|
# - Removing a parent removes all its descendants
|
||||||
|
prev_tags = set(instance.tags.all())
|
||||||
|
requested_tags = set(validated_data["tags"])
|
||||||
|
|
||||||
|
# Tags being removed in this update and all descendants
|
||||||
|
removed_tags = prev_tags - requested_tags
|
||||||
|
blocked_tags = set(removed_tags)
|
||||||
|
for t in removed_tags:
|
||||||
|
blocked_tags.update(t.get_descendants())
|
||||||
|
|
||||||
|
# Add all parent tags
|
||||||
|
final_tags = set(requested_tags)
|
||||||
|
for t in requested_tags:
|
||||||
|
final_tags.update(t.get_ancestors())
|
||||||
|
|
||||||
|
# Drop removed parents and their descendants
|
||||||
|
final_tags.difference_update(blocked_tags)
|
||||||
|
|
||||||
|
validated_data["tags"] = list(final_tags)
|
||||||
if validated_data.get("remove_inbox_tags"):
|
if validated_data.get("remove_inbox_tags"):
|
||||||
tag_ids_being_added = (
|
tag_ids_being_added = (
|
||||||
[
|
[
|
||||||
@@ -1668,9 +1749,8 @@ class PostDocumentSerializer(serializers.Serializer):
|
|||||||
max_value=Document.ARCHIVE_SERIAL_NUMBER_MAX,
|
max_value=Document.ARCHIVE_SERIAL_NUMBER_MAX,
|
||||||
)
|
)
|
||||||
|
|
||||||
custom_fields = serializers.PrimaryKeyRelatedField(
|
# Accept either a list of custom field ids or a dict mapping id -> value
|
||||||
many=True,
|
custom_fields = serializers.JSONField(
|
||||||
queryset=CustomField.objects.all(),
|
|
||||||
label="Custom fields",
|
label="Custom fields",
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
@@ -1727,11 +1807,60 @@ class PostDocumentSerializer(serializers.Serializer):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def validate_custom_fields(self, custom_fields):
|
def validate_custom_fields(self, custom_fields):
|
||||||
if custom_fields:
|
if not custom_fields:
|
||||||
return [custom_field.id for custom_field in custom_fields]
|
|
||||||
else:
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
# Normalize single values to a list
|
||||||
|
if isinstance(custom_fields, int):
|
||||||
|
custom_fields = [custom_fields]
|
||||||
|
if isinstance(custom_fields, dict):
|
||||||
|
custom_field_serializer = CustomFieldInstanceSerializer()
|
||||||
|
normalized = {}
|
||||||
|
for field_id, value in custom_fields.items():
|
||||||
|
try:
|
||||||
|
field_id_int = int(field_id)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
_("Custom field id must be an integer: %(id)s")
|
||||||
|
% {"id": field_id},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
field = CustomField.objects.get(id=field_id_int)
|
||||||
|
except CustomField.DoesNotExist:
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
_("Custom field with id %(id)s does not exist")
|
||||||
|
% {"id": field_id_int},
|
||||||
|
)
|
||||||
|
custom_field_serializer.validate(
|
||||||
|
{
|
||||||
|
"field": field,
|
||||||
|
"value": value,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
normalized[field_id_int] = value
|
||||||
|
return normalized
|
||||||
|
elif isinstance(custom_fields, list):
|
||||||
|
try:
|
||||||
|
ids = [int(i) for i in custom_fields]
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
_(
|
||||||
|
"Custom fields must be a list of integers or an object mapping ids to values.",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
if CustomField.objects.filter(id__in=ids).count() != len(set(ids)):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
_("Some custom fields don't exist or were specified twice."),
|
||||||
|
)
|
||||||
|
return ids
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
_(
|
||||||
|
"Custom fields must be a list of integers or an object mapping ids to values.",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# custom_fields_w_values handled via validate_custom_fields
|
||||||
|
|
||||||
def validate_created(self, created):
|
def validate_created(self, created):
|
||||||
# support datetime format for created for backwards compatibility
|
# support datetime format for created for backwards compatibility
|
||||||
if isinstance(created, datetime):
|
if isinstance(created, datetime):
|
||||||
|
@@ -73,7 +73,7 @@ def add_inbox_tags(sender, document: Document, logging_group=None, **kwargs):
|
|||||||
else:
|
else:
|
||||||
tags = Tag.objects.all()
|
tags = Tag.objects.all()
|
||||||
inbox_tags = tags.filter(is_inbox_tag=True)
|
inbox_tags = tags.filter(is_inbox_tag=True)
|
||||||
document.tags.add(*inbox_tags)
|
document.add_nested_tags(inbox_tags)
|
||||||
|
|
||||||
|
|
||||||
def _suggestion_printer(
|
def _suggestion_printer(
|
||||||
@@ -262,7 +262,7 @@ def set_tags(
|
|||||||
extra={"group": logging_group},
|
extra={"group": logging_group},
|
||||||
)
|
)
|
||||||
|
|
||||||
document.tags.add(*relevant_tags)
|
document.add_nested_tags(relevant_tags)
|
||||||
|
|
||||||
|
|
||||||
def set_storage_path(
|
def set_storage_path(
|
||||||
@@ -778,14 +778,17 @@ def run_workflows(
|
|||||||
|
|
||||||
def assignment_action():
|
def assignment_action():
|
||||||
if action.assign_tags.exists():
|
if action.assign_tags.exists():
|
||||||
|
tag_ids_to_add: set[int] = set()
|
||||||
|
for tag in action.assign_tags.all():
|
||||||
|
tag_ids_to_add.add(tag.pk)
|
||||||
|
tag_ids_to_add.update(int(pk) for pk in tag.get_ancestors_pks())
|
||||||
|
|
||||||
if not use_overrides:
|
if not use_overrides:
|
||||||
doc_tag_ids.extend(action.assign_tags.values_list("pk", flat=True))
|
doc_tag_ids[:] = list(set(doc_tag_ids) | tag_ids_to_add)
|
||||||
else:
|
else:
|
||||||
if overrides.tag_ids is None:
|
if overrides.tag_ids is None:
|
||||||
overrides.tag_ids = []
|
overrides.tag_ids = []
|
||||||
overrides.tag_ids.extend(
|
overrides.tag_ids = list(set(overrides.tag_ids) | tag_ids_to_add)
|
||||||
action.assign_tags.values_list("pk", flat=True),
|
|
||||||
)
|
|
||||||
|
|
||||||
if action.assign_correspondent:
|
if action.assign_correspondent:
|
||||||
if not use_overrides:
|
if not use_overrides:
|
||||||
@@ -928,14 +931,17 @@ def run_workflows(
|
|||||||
else:
|
else:
|
||||||
overrides.tag_ids = None
|
overrides.tag_ids = None
|
||||||
else:
|
else:
|
||||||
|
tag_ids_to_remove: set[int] = set()
|
||||||
|
for tag in action.remove_tags.all():
|
||||||
|
tag_ids_to_remove.add(tag.pk)
|
||||||
|
tag_ids_to_remove.update(int(pk) for pk in tag.get_descendants_pks())
|
||||||
|
|
||||||
if not use_overrides:
|
if not use_overrides:
|
||||||
for tag in action.remove_tags.filter(
|
doc_tag_ids[:] = [t for t in doc_tag_ids if t not in tag_ids_to_remove]
|
||||||
pk__in=document.tags.values_list("pk", flat=True),
|
|
||||||
):
|
|
||||||
doc_tag_ids.remove(tag.pk)
|
|
||||||
elif overrides.tag_ids:
|
elif overrides.tag_ids:
|
||||||
for tag in action.remove_tags.filter(pk__in=overrides.tag_ids):
|
overrides.tag_ids = [
|
||||||
overrides.tag_ids.remove(tag.pk)
|
t for t in overrides.tag_ids if t not in tag_ids_to_remove
|
||||||
|
]
|
||||||
|
|
||||||
if not use_overrides and (
|
if not use_overrides and (
|
||||||
action.remove_all_correspondents
|
action.remove_all_correspondents
|
||||||
|
@@ -532,6 +532,54 @@ def check_scheduled_workflows():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def update_document_parent_tags(tag: Tag, new_parent: Tag) -> None:
|
||||||
|
"""
|
||||||
|
When a tag's parent changes, ensure all documents containing the tag also have
|
||||||
|
the parent tag (and its ancestors) applied.
|
||||||
|
"""
|
||||||
|
doc_tag_relationship = Document.tags.through
|
||||||
|
|
||||||
|
doc_ids: list[int] = list(
|
||||||
|
Document.objects.filter(tags=tag).values_list("pk", flat=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
if not doc_ids:
|
||||||
|
return
|
||||||
|
|
||||||
|
parent_ids = [new_parent.id, *new_parent.get_ancestors_pks()]
|
||||||
|
|
||||||
|
parent_ids = list(dict.fromkeys(parent_ids))
|
||||||
|
|
||||||
|
existing_pairs = set(
|
||||||
|
doc_tag_relationship.objects.filter(
|
||||||
|
document_id__in=doc_ids,
|
||||||
|
tag_id__in=parent_ids,
|
||||||
|
).values_list("document_id", "tag_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
to_create: list = []
|
||||||
|
affected: set[int] = set()
|
||||||
|
|
||||||
|
for doc_id in doc_ids:
|
||||||
|
for parent_id in parent_ids:
|
||||||
|
if (doc_id, parent_id) in existing_pairs:
|
||||||
|
continue
|
||||||
|
|
||||||
|
to_create.append(
|
||||||
|
doc_tag_relationship(document_id=doc_id, tag_id=parent_id),
|
||||||
|
)
|
||||||
|
affected.add(doc_id)
|
||||||
|
|
||||||
|
if to_create:
|
||||||
|
doc_tag_relationship.objects.bulk_create(
|
||||||
|
to_create,
|
||||||
|
ignore_conflicts=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if affected:
|
||||||
|
bulk_update_documents.delay(document_ids=list(affected))
|
||||||
|
|
||||||
|
|
||||||
@shared_task
|
@shared_task
|
||||||
def llmindex_index(
|
def llmindex_index(
|
||||||
*,
|
*,
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
import types
|
import types
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
from django.contrib.admin.sites import AdminSite
|
from django.contrib.admin.sites import AdminSite
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
@@ -7,7 +8,9 @@ from django.utils import timezone
|
|||||||
|
|
||||||
from documents import index
|
from documents import index
|
||||||
from documents.admin import DocumentAdmin
|
from documents.admin import DocumentAdmin
|
||||||
|
from documents.admin import TagAdmin
|
||||||
from documents.models import Document
|
from documents.models import Document
|
||||||
|
from documents.models import Tag
|
||||||
from documents.tests.utils import DirectoriesMixin
|
from documents.tests.utils import DirectoriesMixin
|
||||||
from paperless.admin import PaperlessUserAdmin
|
from paperless.admin import PaperlessUserAdmin
|
||||||
|
|
||||||
@@ -70,6 +73,24 @@ class TestDocumentAdmin(DirectoriesMixin, TestCase):
|
|||||||
self.assertEqual(self.doc_admin.created_(doc), "2020-04-12")
|
self.assertEqual(self.doc_admin.created_(doc), "2020-04-12")
|
||||||
|
|
||||||
|
|
||||||
|
class TestTagAdmin(DirectoriesMixin, TestCase):
|
||||||
|
def setUp(self) -> None:
|
||||||
|
super().setUp()
|
||||||
|
self.tag_admin = TagAdmin(model=Tag, admin_site=AdminSite())
|
||||||
|
|
||||||
|
@patch("documents.tasks.bulk_update_documents")
|
||||||
|
def test_parent_tags_get_added(self, mock_bulk_update):
|
||||||
|
document = Document.objects.create(title="test")
|
||||||
|
parent = Tag.objects.create(name="parent")
|
||||||
|
child = Tag.objects.create(name="child")
|
||||||
|
document.tags.add(child)
|
||||||
|
|
||||||
|
child.tn_parent = parent
|
||||||
|
self.tag_admin.save_model(None, child, None, change=True)
|
||||||
|
document.refresh_from_db()
|
||||||
|
self.assertIn(parent, document.tags.all())
|
||||||
|
|
||||||
|
|
||||||
class TestPaperlessAdmin(DirectoriesMixin, TestCase):
|
class TestPaperlessAdmin(DirectoriesMixin, TestCase):
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
super().setUp()
|
super().setUp()
|
||||||
|
@@ -839,7 +839,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||||
|
|
||||||
m.assert_called()
|
m.assert_called()
|
||||||
args, kwargs = m.call_args
|
_, kwargs = m.call_args
|
||||||
self.assertEqual(kwargs["merge"], False)
|
self.assertEqual(kwargs["merge"], False)
|
||||||
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
@@ -857,7 +857,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||||
|
|
||||||
m.assert_called()
|
m.assert_called()
|
||||||
args, kwargs = m.call_args
|
_, kwargs = m.call_args
|
||||||
self.assertEqual(kwargs["merge"], True)
|
self.assertEqual(kwargs["merge"], True)
|
||||||
|
|
||||||
@mock.patch("documents.serialisers.bulk_edit.set_storage_path")
|
@mock.patch("documents.serialisers.bulk_edit.set_storage_path")
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
import datetime
|
import datetime
|
||||||
|
import json
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import uuid
|
import uuid
|
||||||
@@ -1528,7 +1529,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
|||||||
|
|
||||||
input_doc, overrides = self.get_last_consume_delay_call_args()
|
input_doc, overrides = self.get_last_consume_delay_call_args()
|
||||||
|
|
||||||
new_overrides, msg = run_workflows(
|
new_overrides, _ = run_workflows(
|
||||||
trigger_type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
trigger_type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
||||||
document=input_doc,
|
document=input_doc,
|
||||||
logging_group=None,
|
logging_group=None,
|
||||||
@@ -1537,6 +1538,86 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
|||||||
overrides.update(new_overrides)
|
overrides.update(new_overrides)
|
||||||
self.assertEqual(overrides.custom_fields, {cf.id: None, cf2.id: 123})
|
self.assertEqual(overrides.custom_fields, {cf.id: None, cf2.id: 123})
|
||||||
|
|
||||||
|
def test_upload_with_custom_field_values(self):
|
||||||
|
"""
|
||||||
|
GIVEN: A document with a source file
|
||||||
|
WHEN: Upload the document with custom fields and values
|
||||||
|
THEN: Metadata is set correctly
|
||||||
|
"""
|
||||||
|
self.consume_file_mock.return_value = celery.result.AsyncResult(
|
||||||
|
id=str(uuid.uuid4()),
|
||||||
|
)
|
||||||
|
|
||||||
|
cf_string = CustomField.objects.create(
|
||||||
|
name="stringfield",
|
||||||
|
data_type=CustomField.FieldDataType.STRING,
|
||||||
|
)
|
||||||
|
cf_int = CustomField.objects.create(
|
||||||
|
name="intfield",
|
||||||
|
data_type=CustomField.FieldDataType.INT,
|
||||||
|
)
|
||||||
|
|
||||||
|
with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
|
||||||
|
response = self.client.post(
|
||||||
|
"/api/documents/post_document/",
|
||||||
|
{
|
||||||
|
"document": f,
|
||||||
|
"custom_fields": json.dumps(
|
||||||
|
{
|
||||||
|
str(cf_string.id): "a string",
|
||||||
|
str(cf_int.id): 123,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||||
|
|
||||||
|
self.consume_file_mock.assert_called_once()
|
||||||
|
|
||||||
|
input_doc, overrides = self.get_last_consume_delay_call_args()
|
||||||
|
|
||||||
|
self.assertEqual(input_doc.original_file.name, "simple.pdf")
|
||||||
|
self.assertEqual(overrides.filename, "simple.pdf")
|
||||||
|
self.assertEqual(
|
||||||
|
overrides.custom_fields,
|
||||||
|
{cf_string.id: "a string", cf_int.id: 123},
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_upload_with_custom_fields_errors(self):
|
||||||
|
"""
|
||||||
|
GIVEN: A document with a source file
|
||||||
|
WHEN: Upload the document with invalid custom fields payloads
|
||||||
|
THEN: The upload is rejected
|
||||||
|
"""
|
||||||
|
self.consume_file_mock.return_value = celery.result.AsyncResult(
|
||||||
|
id=str(uuid.uuid4()),
|
||||||
|
)
|
||||||
|
|
||||||
|
error_payloads = [
|
||||||
|
# Non-integer key in mapping
|
||||||
|
{"custom_fields": json.dumps({"abc": "a string"})},
|
||||||
|
# List with non-integer entry
|
||||||
|
{"custom_fields": json.dumps(["abc"])},
|
||||||
|
# Nonexistent id in mapping
|
||||||
|
{"custom_fields": json.dumps({99999999: "a string"})},
|
||||||
|
# Nonexistent id in list
|
||||||
|
{"custom_fields": json.dumps([99999999])},
|
||||||
|
# Invalid type (JSON string, not list/dict/int)
|
||||||
|
{"custom_fields": json.dumps("not-a-supported-structure")},
|
||||||
|
]
|
||||||
|
|
||||||
|
for payload in error_payloads:
|
||||||
|
with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
|
||||||
|
data = {"document": f, **payload}
|
||||||
|
response = self.client.post(
|
||||||
|
"/api/documents/post_document/",
|
||||||
|
data,
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
self.consume_file_mock.assert_not_called()
|
||||||
|
|
||||||
def test_upload_with_webui_source(self):
|
def test_upload_with_webui_source(self):
|
||||||
"""
|
"""
|
||||||
GIVEN: A document with a source file
|
GIVEN: A document with a source file
|
||||||
@@ -1557,7 +1638,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
|||||||
|
|
||||||
self.consume_file_mock.assert_called_once()
|
self.consume_file_mock.assert_called_once()
|
||||||
|
|
||||||
input_doc, overrides = self.get_last_consume_delay_call_args()
|
input_doc, _ = self.get_last_consume_delay_call_args()
|
||||||
|
|
||||||
self.assertEqual(input_doc.source, WorkflowTrigger.DocumentSourceChoices.WEB_UI)
|
self.assertEqual(input_doc.source, WorkflowTrigger.DocumentSourceChoices.WEB_UI)
|
||||||
|
|
||||||
|
@@ -74,7 +74,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 3)
|
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 3)
|
||||||
self.async_task.assert_called_once()
|
self.async_task.assert_called_once()
|
||||||
args, kwargs = self.async_task.call_args
|
_, kwargs = self.async_task.call_args
|
||||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
|
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
|
||||||
|
|
||||||
def test_unset_correspondent(self):
|
def test_unset_correspondent(self):
|
||||||
@@ -82,7 +82,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
|||||||
bulk_edit.set_correspondent([self.doc1.id, self.doc2.id, self.doc3.id], None)
|
bulk_edit.set_correspondent([self.doc1.id, self.doc2.id, self.doc3.id], None)
|
||||||
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 0)
|
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 0)
|
||||||
self.async_task.assert_called_once()
|
self.async_task.assert_called_once()
|
||||||
args, kwargs = self.async_task.call_args
|
_, kwargs = self.async_task.call_args
|
||||||
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
|
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
|
||||||
|
|
||||||
def test_set_document_type(self):
|
def test_set_document_type(self):
|
||||||
@@ -93,7 +93,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 3)
|
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 3)
|
||||||
self.async_task.assert_called_once()
|
self.async_task.assert_called_once()
|
||||||
args, kwargs = self.async_task.call_args
|
_, kwargs = self.async_task.call_args
|
||||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
|
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
|
||||||
|
|
||||||
def test_unset_document_type(self):
|
def test_unset_document_type(self):
|
||||||
@@ -101,7 +101,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
|||||||
bulk_edit.set_document_type([self.doc1.id, self.doc2.id, self.doc3.id], None)
|
bulk_edit.set_document_type([self.doc1.id, self.doc2.id, self.doc3.id], None)
|
||||||
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 0)
|
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 0)
|
||||||
self.async_task.assert_called_once()
|
self.async_task.assert_called_once()
|
||||||
args, kwargs = self.async_task.call_args
|
_, kwargs = self.async_task.call_args
|
||||||
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
|
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
|
||||||
|
|
||||||
def test_set_document_storage_path(self):
|
def test_set_document_storage_path(self):
|
||||||
@@ -123,7 +123,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
|||||||
self.assertEqual(Document.objects.filter(storage_path=None).count(), 4)
|
self.assertEqual(Document.objects.filter(storage_path=None).count(), 4)
|
||||||
|
|
||||||
self.async_task.assert_called_once()
|
self.async_task.assert_called_once()
|
||||||
args, kwargs = self.async_task.call_args
|
_, kwargs = self.async_task.call_args
|
||||||
|
|
||||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id])
|
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id])
|
||||||
|
|
||||||
@@ -154,7 +154,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
|||||||
self.assertEqual(Document.objects.filter(storage_path=None).count(), 5)
|
self.assertEqual(Document.objects.filter(storage_path=None).count(), 5)
|
||||||
|
|
||||||
self.async_task.assert_called()
|
self.async_task.assert_called()
|
||||||
args, kwargs = self.async_task.call_args
|
_, kwargs = self.async_task.call_args
|
||||||
|
|
||||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id])
|
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id])
|
||||||
|
|
||||||
@@ -166,7 +166,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 4)
|
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 4)
|
||||||
self.async_task.assert_called_once()
|
self.async_task.assert_called_once()
|
||||||
args, kwargs = self.async_task.call_args
|
_, kwargs = self.async_task.call_args
|
||||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc3.id])
|
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc3.id])
|
||||||
|
|
||||||
def test_remove_tag(self):
|
def test_remove_tag(self):
|
||||||
@@ -174,7 +174,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
|||||||
bulk_edit.remove_tag([self.doc1.id, self.doc3.id, self.doc4.id], self.t1.id)
|
bulk_edit.remove_tag([self.doc1.id, self.doc3.id, self.doc4.id], self.t1.id)
|
||||||
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 1)
|
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 1)
|
||||||
self.async_task.assert_called_once()
|
self.async_task.assert_called_once()
|
||||||
args, kwargs = self.async_task.call_args
|
_, kwargs = self.async_task.call_args
|
||||||
self.assertCountEqual(kwargs["document_ids"], [self.doc4.id])
|
self.assertCountEqual(kwargs["document_ids"], [self.doc4.id])
|
||||||
|
|
||||||
def test_modify_tags(self):
|
def test_modify_tags(self):
|
||||||
@@ -191,7 +191,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
|||||||
self.assertCountEqual(list(self.doc3.tags.all()), [self.t2, tag_unrelated])
|
self.assertCountEqual(list(self.doc3.tags.all()), [self.t2, tag_unrelated])
|
||||||
|
|
||||||
self.async_task.assert_called_once()
|
self.async_task.assert_called_once()
|
||||||
args, kwargs = self.async_task.call_args
|
_, kwargs = self.async_task.call_args
|
||||||
# TODO: doc3 should not be affected, but the query for that is rather complicated
|
# TODO: doc3 should not be affected, but the query for that is rather complicated
|
||||||
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
|
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
|
||||||
|
|
||||||
@@ -248,7 +248,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
self.async_task.assert_called_once()
|
self.async_task.assert_called_once()
|
||||||
args, kwargs = self.async_task.call_args
|
_, kwargs = self.async_task.call_args
|
||||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
|
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
|
||||||
|
|
||||||
def test_modify_custom_fields_with_values(self):
|
def test_modify_custom_fields_with_values(self):
|
||||||
@@ -325,7 +325,7 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
self.async_task.assert_called_once()
|
self.async_task.assert_called_once()
|
||||||
args, kwargs = self.async_task.call_args
|
_, kwargs = self.async_task.call_args
|
||||||
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
|
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
|
||||||
|
|
||||||
# removal of document link cf, should also remove symmetric link
|
# removal of document link cf, should also remove symmetric link
|
||||||
|
@@ -123,14 +123,14 @@ class TestRetagger(DirectoriesMixin, TestCase):
|
|||||||
|
|
||||||
def test_add_type(self):
|
def test_add_type(self):
|
||||||
call_command("document_retagger", "--document_type")
|
call_command("document_retagger", "--document_type")
|
||||||
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
|
d_first, d_second, _, _ = self.get_updated_docs()
|
||||||
|
|
||||||
self.assertEqual(d_first.document_type, self.doctype_first)
|
self.assertEqual(d_first.document_type, self.doctype_first)
|
||||||
self.assertEqual(d_second.document_type, self.doctype_second)
|
self.assertEqual(d_second.document_type, self.doctype_second)
|
||||||
|
|
||||||
def test_add_correspondent(self):
|
def test_add_correspondent(self):
|
||||||
call_command("document_retagger", "--correspondent")
|
call_command("document_retagger", "--correspondent")
|
||||||
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
|
d_first, d_second, _, _ = self.get_updated_docs()
|
||||||
|
|
||||||
self.assertEqual(d_first.correspondent, self.correspondent_first)
|
self.assertEqual(d_first.correspondent, self.correspondent_first)
|
||||||
self.assertEqual(d_second.correspondent, self.correspondent_second)
|
self.assertEqual(d_second.correspondent, self.correspondent_second)
|
||||||
@@ -160,7 +160,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
|
|||||||
|
|
||||||
def test_add_tags_suggest(self):
|
def test_add_tags_suggest(self):
|
||||||
call_command("document_retagger", "--tags", "--suggest")
|
call_command("document_retagger", "--tags", "--suggest")
|
||||||
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
|
d_first, d_second, _, d_auto = self.get_updated_docs()
|
||||||
|
|
||||||
self.assertEqual(d_first.tags.count(), 0)
|
self.assertEqual(d_first.tags.count(), 0)
|
||||||
self.assertEqual(d_second.tags.count(), 0)
|
self.assertEqual(d_second.tags.count(), 0)
|
||||||
@@ -168,14 +168,14 @@ class TestRetagger(DirectoriesMixin, TestCase):
|
|||||||
|
|
||||||
def test_add_type_suggest(self):
|
def test_add_type_suggest(self):
|
||||||
call_command("document_retagger", "--document_type", "--suggest")
|
call_command("document_retagger", "--document_type", "--suggest")
|
||||||
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
|
d_first, d_second, _, _ = self.get_updated_docs()
|
||||||
|
|
||||||
self.assertIsNone(d_first.document_type)
|
self.assertIsNone(d_first.document_type)
|
||||||
self.assertIsNone(d_second.document_type)
|
self.assertIsNone(d_second.document_type)
|
||||||
|
|
||||||
def test_add_correspondent_suggest(self):
|
def test_add_correspondent_suggest(self):
|
||||||
call_command("document_retagger", "--correspondent", "--suggest")
|
call_command("document_retagger", "--correspondent", "--suggest")
|
||||||
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
|
d_first, d_second, _, _ = self.get_updated_docs()
|
||||||
|
|
||||||
self.assertIsNone(d_first.correspondent)
|
self.assertIsNone(d_first.correspondent)
|
||||||
self.assertIsNone(d_second.correspondent)
|
self.assertIsNone(d_second.correspondent)
|
||||||
@@ -187,7 +187,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
|
|||||||
"--suggest",
|
"--suggest",
|
||||||
"--base-url=http://localhost",
|
"--base-url=http://localhost",
|
||||||
)
|
)
|
||||||
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
|
d_first, d_second, _, d_auto = self.get_updated_docs()
|
||||||
|
|
||||||
self.assertEqual(d_first.tags.count(), 0)
|
self.assertEqual(d_first.tags.count(), 0)
|
||||||
self.assertEqual(d_second.tags.count(), 0)
|
self.assertEqual(d_second.tags.count(), 0)
|
||||||
@@ -200,7 +200,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
|
|||||||
"--suggest",
|
"--suggest",
|
||||||
"--base-url=http://localhost",
|
"--base-url=http://localhost",
|
||||||
)
|
)
|
||||||
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
|
d_first, d_second, _, _ = self.get_updated_docs()
|
||||||
|
|
||||||
self.assertIsNone(d_first.document_type)
|
self.assertIsNone(d_first.document_type)
|
||||||
self.assertIsNone(d_second.document_type)
|
self.assertIsNone(d_second.document_type)
|
||||||
@@ -212,7 +212,7 @@ class TestRetagger(DirectoriesMixin, TestCase):
|
|||||||
"--suggest",
|
"--suggest",
|
||||||
"--base-url=http://localhost",
|
"--base-url=http://localhost",
|
||||||
)
|
)
|
||||||
d_first, d_second, d_unrelated, d_auto = self.get_updated_docs()
|
d_first, d_second, _, _ = self.get_updated_docs()
|
||||||
|
|
||||||
self.assertIsNone(d_first.correspondent)
|
self.assertIsNone(d_first.correspondent)
|
||||||
self.assertIsNone(d_second.correspondent)
|
self.assertIsNone(d_second.correspondent)
|
||||||
|
@@ -4,6 +4,7 @@ import shutil
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
|
import pytest
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.test import override_settings
|
from django.test import override_settings
|
||||||
|
|
||||||
@@ -281,6 +282,7 @@ class TestMigrateArchiveFilesErrors(DirectoriesMixin, TestMigrations):
|
|||||||
migrate_to = "1012_fix_archive_files"
|
migrate_to = "1012_fix_archive_files"
|
||||||
auto_migrate = False
|
auto_migrate = False
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Fails with migration tearDown util. Needs investigation.")
|
||||||
def test_archive_missing(self):
|
def test_archive_missing(self):
|
||||||
Document = self.apps.get_model("documents", "Document")
|
Document = self.apps.get_model("documents", "Document")
|
||||||
|
|
||||||
@@ -300,6 +302,7 @@ class TestMigrateArchiveFilesErrors(DirectoriesMixin, TestMigrations):
|
|||||||
self.performMigration,
|
self.performMigration,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Fails with migration tearDown util. Needs investigation.")
|
||||||
def test_parser_missing(self):
|
def test_parser_missing(self):
|
||||||
Document = self.apps.get_model("documents", "Document")
|
Document = self.apps.get_model("documents", "Document")
|
||||||
|
|
||||||
|
205
src/documents/tests/test_tag_hierarchy.py
Normal file
205
src/documents/tests/test_tag_hierarchy.py
Normal file
@@ -0,0 +1,205 @@
|
|||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
from django.contrib.auth.models import User
|
||||||
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
|
from documents import bulk_edit
|
||||||
|
from documents.models import Document
|
||||||
|
from documents.models import Tag
|
||||||
|
from documents.models import Workflow
|
||||||
|
from documents.models import WorkflowAction
|
||||||
|
from documents.models import WorkflowTrigger
|
||||||
|
from documents.signals.handlers import run_workflows
|
||||||
|
|
||||||
|
|
||||||
|
class TestTagHierarchy(APITestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.user = User.objects.create_superuser(username="admin")
|
||||||
|
self.client.force_authenticate(user=self.user)
|
||||||
|
|
||||||
|
self.parent = Tag.objects.create(name="Parent")
|
||||||
|
self.child = Tag.objects.create(name="Child", tn_parent=self.parent)
|
||||||
|
|
||||||
|
patcher = mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
||||||
|
self.async_task = patcher.start()
|
||||||
|
self.addCleanup(patcher.stop)
|
||||||
|
|
||||||
|
self.document = Document.objects.create(
|
||||||
|
title="doc",
|
||||||
|
content="",
|
||||||
|
checksum="1",
|
||||||
|
mime_type="application/pdf",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_document_api_add_child_adds_parent(self):
|
||||||
|
self.client.patch(
|
||||||
|
f"/api/documents/{self.document.pk}/",
|
||||||
|
{"tags": [self.child.pk]},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
self.document.refresh_from_db()
|
||||||
|
tags = set(self.document.tags.values_list("pk", flat=True))
|
||||||
|
assert tags == {self.parent.pk, self.child.pk}
|
||||||
|
|
||||||
|
def test_document_api_remove_parent_removes_children(self):
|
||||||
|
self.document.add_nested_tags([self.parent, self.child])
|
||||||
|
self.client.patch(
|
||||||
|
f"/api/documents/{self.document.pk}/",
|
||||||
|
{"tags": [self.child.pk]},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
self.document.refresh_from_db()
|
||||||
|
assert self.document.tags.count() == 0
|
||||||
|
|
||||||
|
def test_document_api_remove_parent_removes_child(self):
|
||||||
|
self.document.add_nested_tags([self.child])
|
||||||
|
self.client.patch(
|
||||||
|
f"/api/documents/{self.document.pk}/",
|
||||||
|
{"tags": []},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
self.document.refresh_from_db()
|
||||||
|
assert self.document.tags.count() == 0
|
||||||
|
|
||||||
|
def test_bulk_edit_respects_hierarchy(self):
|
||||||
|
bulk_edit.add_tag([self.document.pk], self.child.pk)
|
||||||
|
self.document.refresh_from_db()
|
||||||
|
tags = set(self.document.tags.values_list("pk", flat=True))
|
||||||
|
assert tags == {self.parent.pk, self.child.pk}
|
||||||
|
|
||||||
|
bulk_edit.remove_tag([self.document.pk], self.parent.pk)
|
||||||
|
self.document.refresh_from_db()
|
||||||
|
assert self.document.tags.count() == 0
|
||||||
|
|
||||||
|
bulk_edit.modify_tags([self.document.pk], [self.child.pk], [])
|
||||||
|
self.document.refresh_from_db()
|
||||||
|
tags = set(self.document.tags.values_list("pk", flat=True))
|
||||||
|
assert tags == {self.parent.pk, self.child.pk}
|
||||||
|
|
||||||
|
bulk_edit.modify_tags([self.document.pk], [], [self.parent.pk])
|
||||||
|
self.document.refresh_from_db()
|
||||||
|
assert self.document.tags.count() == 0
|
||||||
|
|
||||||
|
def test_workflow_actions(self):
|
||||||
|
workflow = Workflow.objects.create(name="wf", order=0)
|
||||||
|
trigger = WorkflowTrigger.objects.create(
|
||||||
|
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
||||||
|
)
|
||||||
|
assign_action = WorkflowAction.objects.create()
|
||||||
|
assign_action.assign_tags.add(self.child)
|
||||||
|
workflow.triggers.add(trigger)
|
||||||
|
workflow.actions.add(assign_action)
|
||||||
|
|
||||||
|
run_workflows(trigger.type, self.document)
|
||||||
|
self.document.refresh_from_db()
|
||||||
|
tags = set(self.document.tags.values_list("pk", flat=True))
|
||||||
|
assert tags == {self.parent.pk, self.child.pk}
|
||||||
|
|
||||||
|
# removal
|
||||||
|
removal_action = WorkflowAction.objects.create(
|
||||||
|
type=WorkflowAction.WorkflowActionType.REMOVAL,
|
||||||
|
)
|
||||||
|
removal_action.remove_tags.add(self.parent)
|
||||||
|
workflow.actions.clear()
|
||||||
|
workflow.actions.add(removal_action)
|
||||||
|
|
||||||
|
run_workflows(trigger.type, self.document)
|
||||||
|
self.document.refresh_from_db()
|
||||||
|
assert self.document.tags.count() == 0
|
||||||
|
|
||||||
|
def test_tag_view_parent_update_adds_parent_to_docs(self):
|
||||||
|
orphan = Tag.objects.create(name="Orphan")
|
||||||
|
self.document.tags.add(orphan)
|
||||||
|
|
||||||
|
self.client.patch(
|
||||||
|
f"/api/tags/{orphan.pk}/",
|
||||||
|
{"parent": self.parent.pk},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
self.document.refresh_from_db()
|
||||||
|
tags = set(self.document.tags.values_list("pk", flat=True))
|
||||||
|
assert tags == {self.parent.pk, orphan.pk}
|
||||||
|
|
||||||
|
def test_cannot_set_parent_to_self(self):
|
||||||
|
tag = Tag.objects.create(name="Selfie")
|
||||||
|
resp = self.client.patch(
|
||||||
|
f"/api/tags/{tag.pk}/",
|
||||||
|
{"parent": tag.pk},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert resp.status_code == 400
|
||||||
|
assert "Cannot set itself as parent" in str(resp.data["parent"])
|
||||||
|
|
||||||
|
def test_cannot_set_parent_to_descendant(self):
|
||||||
|
a = Tag.objects.create(name="A")
|
||||||
|
b = Tag.objects.create(name="B", tn_parent=a)
|
||||||
|
c = Tag.objects.create(name="C", tn_parent=b)
|
||||||
|
|
||||||
|
# Attempt to set A's parent to C (descendant) should fail
|
||||||
|
resp = self.client.patch(
|
||||||
|
f"/api/tags/{a.pk}/",
|
||||||
|
{"parent": c.pk},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert resp.status_code == 400
|
||||||
|
assert "Cannot set parent to a descendant" in str(resp.data["parent"])
|
||||||
|
|
||||||
|
def test_max_depth_on_create(self):
|
||||||
|
a = Tag.objects.create(name="A1")
|
||||||
|
b = Tag.objects.create(name="B1", tn_parent=a)
|
||||||
|
c = Tag.objects.create(name="C1", tn_parent=b)
|
||||||
|
d = Tag.objects.create(name="D1", tn_parent=c)
|
||||||
|
|
||||||
|
# Creating E under D yields depth 5: allowed
|
||||||
|
resp_ok = self.client.post(
|
||||||
|
"/api/tags/",
|
||||||
|
{"name": "E1", "parent": d.pk},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert resp_ok.status_code in (200, 201)
|
||||||
|
e_id = (
|
||||||
|
resp_ok.data["id"] if resp_ok.status_code == 201 else resp_ok.data.get("id")
|
||||||
|
)
|
||||||
|
assert e_id is not None
|
||||||
|
|
||||||
|
# Creating F under E would yield depth 6: rejected
|
||||||
|
resp_fail = self.client.post(
|
||||||
|
"/api/tags/",
|
||||||
|
{"name": "F1", "parent": e_id},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert resp_fail.status_code == 400
|
||||||
|
assert "parent" in resp_fail.data
|
||||||
|
assert "Invalid" in str(resp_fail.data["parent"])
|
||||||
|
|
||||||
|
def test_max_depth_on_move_subtree(self):
|
||||||
|
a = Tag.objects.create(name="A2")
|
||||||
|
b = Tag.objects.create(name="B2", tn_parent=a)
|
||||||
|
c = Tag.objects.create(name="C2", tn_parent=b)
|
||||||
|
d = Tag.objects.create(name="D2", tn_parent=c)
|
||||||
|
|
||||||
|
x = Tag.objects.create(name="X2")
|
||||||
|
y = Tag.objects.create(name="Y2", tn_parent=x)
|
||||||
|
assert y.parent_pk == x.pk
|
||||||
|
|
||||||
|
# Moving X under D would make deepest node Y exceed depth 5 -> reject
|
||||||
|
resp_fail = self.client.patch(
|
||||||
|
f"/api/tags/{x.pk}/",
|
||||||
|
{"parent": d.pk},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert resp_fail.status_code == 400
|
||||||
|
assert "Maximum nesting depth exceeded" in str(
|
||||||
|
resp_fail.data["non_field_errors"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Moving X under C (depth 3) should be allowed (deepest becomes 5)
|
||||||
|
resp_ok = self.client.patch(
|
||||||
|
f"/api/tags/{x.pk}/",
|
||||||
|
{"parent": c.pk},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert resp_ok.status_code in (200, 202)
|
||||||
|
x.refresh_from_db()
|
||||||
|
assert x.parent_pk == c.id
|
@@ -327,6 +327,19 @@ class TestMigrations(TransactionTestCase):
|
|||||||
def setUpBeforeMigration(self, apps):
|
def setUpBeforeMigration(self, apps):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
"""
|
||||||
|
Ensure the database schema is restored to the latest migration after
|
||||||
|
each migration test, so subsequent tests run against HEAD.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
executor = MigrationExecutor(connection)
|
||||||
|
executor.loader.build_graph()
|
||||||
|
targets = executor.loader.graph.leaf_nodes()
|
||||||
|
executor.migrate(targets)
|
||||||
|
finally:
|
||||||
|
super().tearDown()
|
||||||
|
|
||||||
|
|
||||||
class SampleDirMixin:
|
class SampleDirMixin:
|
||||||
SAMPLE_DIR = Path(__file__).parent / "samples"
|
SAMPLE_DIR = Path(__file__).parent / "samples"
|
||||||
|
@@ -175,6 +175,7 @@ from documents.tasks import empty_trash
|
|||||||
from documents.tasks import index_optimize
|
from documents.tasks import index_optimize
|
||||||
from documents.tasks import sanity_check
|
from documents.tasks import sanity_check
|
||||||
from documents.tasks import train_classifier
|
from documents.tasks import train_classifier
|
||||||
|
from documents.tasks import update_document_parent_tags
|
||||||
from documents.templating.filepath import validate_filepath_template_and_render
|
from documents.templating.filepath import validate_filepath_template_and_render
|
||||||
from documents.utils import get_boolean
|
from documents.utils import get_boolean
|
||||||
from paperless import version
|
from paperless import version
|
||||||
@@ -356,6 +357,13 @@ class TagViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
|
|||||||
filterset_class = TagFilterSet
|
filterset_class = TagFilterSet
|
||||||
ordering_fields = ("color", "name", "matching_algorithm", "match", "document_count")
|
ordering_fields = ("color", "name", "matching_algorithm", "match", "document_count")
|
||||||
|
|
||||||
|
def perform_update(self, serializer):
|
||||||
|
old_parent = self.get_object().get_parent()
|
||||||
|
tag = serializer.save()
|
||||||
|
new_parent = tag.get_parent()
|
||||||
|
if new_parent and old_parent != new_parent:
|
||||||
|
update_document_parent_tags(tag, new_parent)
|
||||||
|
|
||||||
|
|
||||||
@extend_schema_view(**generate_object_with_permissions_schema(DocumentTypeSerializer))
|
@extend_schema_view(**generate_object_with_permissions_schema(DocumentTypeSerializer))
|
||||||
class DocumentTypeViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
|
class DocumentTypeViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
|
||||||
@@ -1624,7 +1632,7 @@ class PostDocumentView(GenericAPIView):
|
|||||||
title = serializer.validated_data.get("title")
|
title = serializer.validated_data.get("title")
|
||||||
created = serializer.validated_data.get("created")
|
created = serializer.validated_data.get("created")
|
||||||
archive_serial_number = serializer.validated_data.get("archive_serial_number")
|
archive_serial_number = serializer.validated_data.get("archive_serial_number")
|
||||||
custom_field_ids = serializer.validated_data.get("custom_fields")
|
cf = serializer.validated_data.get("custom_fields")
|
||||||
from_webui = serializer.validated_data.get("from_webui")
|
from_webui = serializer.validated_data.get("from_webui")
|
||||||
|
|
||||||
t = int(mktime(datetime.now().timetuple()))
|
t = int(mktime(datetime.now().timetuple()))
|
||||||
@@ -1643,6 +1651,11 @@ class PostDocumentView(GenericAPIView):
|
|||||||
source=DocumentSource.WebUI if from_webui else DocumentSource.ApiUpload,
|
source=DocumentSource.WebUI if from_webui else DocumentSource.ApiUpload,
|
||||||
original_file=temp_file_path,
|
original_file=temp_file_path,
|
||||||
)
|
)
|
||||||
|
custom_fields = None
|
||||||
|
if isinstance(cf, dict) and cf:
|
||||||
|
custom_fields = cf
|
||||||
|
elif isinstance(cf, list) and cf:
|
||||||
|
custom_fields = dict.fromkeys(cf, None)
|
||||||
input_doc_overrides = DocumentMetadataOverrides(
|
input_doc_overrides = DocumentMetadataOverrides(
|
||||||
filename=doc_name,
|
filename=doc_name,
|
||||||
title=title,
|
title=title,
|
||||||
@@ -1653,10 +1666,7 @@ class PostDocumentView(GenericAPIView):
|
|||||||
created=created,
|
created=created,
|
||||||
asn=archive_serial_number,
|
asn=archive_serial_number,
|
||||||
owner_id=request.user.id,
|
owner_id=request.user.id,
|
||||||
# TODO: set values
|
custom_fields=custom_fields,
|
||||||
custom_fields={cf_id: None for cf_id in custom_field_ids}
|
|
||||||
if custom_field_ids
|
|
||||||
else None,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
async_task = consume_file.delay(
|
async_task = consume_file.delay(
|
||||||
|
File diff suppressed because it is too large
Load Diff
@@ -347,6 +347,7 @@ INSTALLED_APPS = [
|
|||||||
"allauth.mfa",
|
"allauth.mfa",
|
||||||
"drf_spectacular",
|
"drf_spectacular",
|
||||||
"drf_spectacular_sidecar",
|
"drf_spectacular_sidecar",
|
||||||
|
"treenode",
|
||||||
*env_apps,
|
*env_apps,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@@ -21,7 +21,7 @@ TEST_CHANNEL_LAYERS = {
|
|||||||
class TestWebSockets(TestCase):
|
class TestWebSockets(TestCase):
|
||||||
async def test_no_auth(self):
|
async def test_no_auth(self):
|
||||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||||
connected, subprotocol = await communicator.connect()
|
connected, _ = await communicator.connect()
|
||||||
self.assertFalse(connected)
|
self.assertFalse(connected)
|
||||||
await communicator.disconnect()
|
await communicator.disconnect()
|
||||||
|
|
||||||
@@ -31,7 +31,7 @@ class TestWebSockets(TestCase):
|
|||||||
_authenticated.return_value = True
|
_authenticated.return_value = True
|
||||||
|
|
||||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||||
connected, subprotocol = await communicator.connect()
|
connected, _ = await communicator.connect()
|
||||||
self.assertTrue(connected)
|
self.assertTrue(connected)
|
||||||
|
|
||||||
message = {"type": "status_update", "data": {"task_id": "test"}}
|
message = {"type": "status_update", "data": {"task_id": "test"}}
|
||||||
@@ -63,7 +63,7 @@ class TestWebSockets(TestCase):
|
|||||||
_authenticated.return_value = True
|
_authenticated.return_value = True
|
||||||
|
|
||||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||||
connected, subprotocol = await communicator.connect()
|
connected, _ = await communicator.connect()
|
||||||
self.assertTrue(connected)
|
self.assertTrue(connected)
|
||||||
|
|
||||||
await communicator.disconnect()
|
await communicator.disconnect()
|
||||||
@@ -73,7 +73,7 @@ class TestWebSockets(TestCase):
|
|||||||
_authenticated.return_value = True
|
_authenticated.return_value = True
|
||||||
|
|
||||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||||
connected, subprotocol = await communicator.connect()
|
connected, _ = await communicator.connect()
|
||||||
self.assertTrue(connected)
|
self.assertTrue(connected)
|
||||||
|
|
||||||
message = {"type": "status_update", "data": {"task_id": "test"}}
|
message = {"type": "status_update", "data": {"task_id": "test"}}
|
||||||
@@ -98,7 +98,7 @@ class TestWebSockets(TestCase):
|
|||||||
communicator.scope["user"].is_superuser = False
|
communicator.scope["user"].is_superuser = False
|
||||||
communicator.scope["user"].id = 1
|
communicator.scope["user"].id = 1
|
||||||
|
|
||||||
connected, subprotocol = await communicator.connect()
|
connected, _ = await communicator.connect()
|
||||||
self.assertTrue(connected)
|
self.assertTrue(connected)
|
||||||
|
|
||||||
# Test as owner
|
# Test as owner
|
||||||
@@ -141,7 +141,7 @@ class TestWebSockets(TestCase):
|
|||||||
_authenticated.return_value = True
|
_authenticated.return_value = True
|
||||||
|
|
||||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||||
connected, subprotocol = await communicator.connect()
|
connected, _ = await communicator.connect()
|
||||||
self.assertTrue(connected)
|
self.assertTrue(connected)
|
||||||
|
|
||||||
message = {"type": "documents_deleted", "data": {"documents": [1, 2, 3]}}
|
message = {"type": "documents_deleted", "data": {"documents": [1, 2, 3]}}
|
||||||
|
@@ -132,7 +132,7 @@ class RasterisedDocumentParser(DocumentParser):
|
|||||||
def get_dpi(self, image) -> int | None:
|
def get_dpi(self, image) -> int | None:
|
||||||
try:
|
try:
|
||||||
with Image.open(image) as im:
|
with Image.open(image) as im:
|
||||||
x, y = im.info["dpi"]
|
x, _ = im.info["dpi"]
|
||||||
return round(x)
|
return round(x)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.log.warning(f"Error while getting DPI from image {image}: {e}")
|
self.log.warning(f"Error while getting DPI from image {image}: {e}")
|
||||||
@@ -141,7 +141,7 @@ class RasterisedDocumentParser(DocumentParser):
|
|||||||
def calculate_a4_dpi(self, image) -> int | None:
|
def calculate_a4_dpi(self, image) -> int | None:
|
||||||
try:
|
try:
|
||||||
with Image.open(image) as im:
|
with Image.open(image) as im:
|
||||||
width, height = im.size
|
width, _ = im.size
|
||||||
# divide image width by A4 width (210mm) in inches.
|
# divide image width by A4 width (210mm) in inches.
|
||||||
dpi = int(width / (21 / 2.54))
|
dpi = int(width / (21 / 2.54))
|
||||||
self.log.debug(f"Estimated DPI {dpi} based on image width {width}")
|
self.log.debug(f"Estimated DPI {dpi} based on image width {width}")
|
||||||
|
69
uv.lock
generated
69
uv.lock
generated
@@ -934,15 +934,15 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "django-guardian"
|
name = "django-guardian"
|
||||||
version = "3.1.2"
|
version = "3.1.3"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux')" },
|
{ name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux')" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/28/ac/5a8f7301c0181ee9020e020a4fa519f9851726b8fd3c1177656c1f5a1be0/django_guardian-3.1.2.tar.gz", hash = "sha256:6fc93b55e5eacd1a062a959c5578b433d999a286742aa3e7e713c71046813538", size = 93422, upload-time = "2025-09-08T15:43:51.361Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/81/d3/436a44c7688fce1a978224c349ba66c95bf9103d548596b7a2694fd58c03/django_guardian-3.1.3.tar.gz", hash = "sha256:12b5e66c18c97088b0adfa033ab14be68c321c170fd3ec438898271f00a71699", size = 93571, upload-time = "2025-09-10T08:36:23.928Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/20/50/3a4a891f809c9865d30864f59f993f9535b18e3935dfad278c9682fc537f/django_guardian-3.1.2-py3-none-any.whl", hash = "sha256:6c10f88d0b7efd171ae65d7ac487a666f41eb373c6f94d80016b1a19bdfbf212", size = 127451, upload-time = "2025-09-08T15:43:49.987Z" },
|
{ url = "https://files.pythonhosted.org/packages/83/fc/6fd7b8bc7c52cbbfd1714673cfd28ff0b3fae32265c52d492ec0dee22cb8/django_guardian-3.1.3-py3-none-any.whl", hash = "sha256:90e28b40eea65c326a3a961908cc300f9e1cd69b74e88d38317a9befa167b71c", size = 127687, upload-time = "2025-09-10T08:36:22.533Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1004,6 +1004,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/cc/52/50125afcf29382b7f9d88a992e44835108dd2f1694d6d17d6d3d6fe06c81/django_stubs_ext-5.1.3-py3-none-any.whl", hash = "sha256:64561fbc53e963cc1eed2c8eb27e18b8e48dcb90771205180fe29fc8a59e55fd", size = 9034, upload-time = "2025-02-07T09:56:19.51Z" },
|
{ url = "https://files.pythonhosted.org/packages/cc/52/50125afcf29382b7f9d88a992e44835108dd2f1694d6d17d6d3d6fe06c81/django_stubs_ext-5.1.3-py3-none-any.whl", hash = "sha256:64561fbc53e963cc1eed2c8eb27e18b8e48dcb90771205180fe29fc8a59e55fd", size = 9034, upload-time = "2025-02-07T09:56:19.51Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "django-treenode"
|
||||||
|
version = "0.23.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/75/f3/274b84607fd64c0844e98659985f964190a46c2460f2523a446c4a946216/django_treenode-0.23.2.tar.gz", hash = "sha256:3c5a6ff5e0c83e34da88749f602b3013dd1ab0527f51952c616e3c21bf265d52", size = 26700, upload-time = "2025-09-04T21:16:53.497Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/00/61/e17d3dee5c6bb24b8faf0c101e17f9a8cafeba6384166176e066c80e8cbb/django_treenode-0.23.2-py3-none-any.whl", hash = "sha256:9363cb50f753654a9acfad6ec4df2a664a5f89dfdf8b55ffd964f27461bef85e", size = 21879, upload-time = "2025-09-04T21:16:51.811Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "djangorestframework"
|
name = "djangorestframework"
|
||||||
version = "3.16.0"
|
version = "3.16.0"
|
||||||
@@ -2218,7 +2227,7 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mkdocs-material"
|
name = "mkdocs-material"
|
||||||
version = "9.6.19"
|
version = "9.6.20"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "babel", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "babel", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
@@ -2234,9 +2243,9 @@ dependencies = [
|
|||||||
{ name = "pymdown-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "pymdown-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/44/94/eb0fca39b19c2251b16bc759860a50f232655c4377116fa9c0e7db11b82c/mkdocs_material-9.6.19.tar.gz", hash = "sha256:80e7b3f9acabfee9b1f68bd12c26e59c865b3d5bbfb505fd1344e970db02c4aa", size = 4038202, upload-time = "2025-09-07T17:46:40.468Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/ba/ee/6ed7fc739bd7591485c8bec67d5984508d3f2733e708f32714c21593341a/mkdocs_material-9.6.20.tar.gz", hash = "sha256:e1f84d21ec5fb730673c4259b2e0d39f8d32a3fef613e3a8e7094b012d43e790", size = 4037822, upload-time = "2025-09-15T08:48:01.816Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/02/23/a2551d1038bedc2771366f65ff3680bb3a89674cd7ca6140850c859f1f71/mkdocs_material-9.6.19-py3-none-any.whl", hash = "sha256:7492d2ac81952a467ca8a10cac915d6ea5c22876932f44b5a0f4f8e7d68ac06f", size = 9240205, upload-time = "2025-09-07T17:46:36.484Z" },
|
{ url = "https://files.pythonhosted.org/packages/67/d8/a31dd52e657bf12b20574706d07df8d767e1ab4340f9bfb9ce73950e5e59/mkdocs_material-9.6.20-py3-none-any.whl", hash = "sha256:b8d8c8b0444c7c06dd984b55ba456ce731f0035c5a1533cc86793618eb1e6c82", size = 9193367, upload-time = "2025-09-15T08:47:58.722Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -2674,7 +2683,7 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ocrmypdf"
|
name = "ocrmypdf"
|
||||||
version = "16.10.4"
|
version = "16.11.0"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "deprecation", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "deprecation", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
@@ -2687,9 +2696,9 @@ dependencies = [
|
|||||||
{ name = "pluggy", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "pluggy", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "rich", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "rich", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/cd/40/cb85e6260e5a20d08195d03541b31db4296f8f4d3442ee595686f47a75b0/ocrmypdf-16.10.4.tar.gz", hash = "sha256:de749ef5f554b63d57e68d032e7cba5500cbd5030835bf24f658f7b7a04f3dc1", size = 7003649, upload-time = "2025-07-07T20:55:01.735Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/44/af/947d6abb0cb41f99971a7a4bd33684d3cee20c9e32c8f9dc90e8c5dcf21c/ocrmypdf-16.11.0.tar.gz", hash = "sha256:d89077e503238dac35c6e565925edc8d98b71e5289853c02cacbc1d0901f1be7", size = 7015068, upload-time = "2025-09-12T08:36:53.507Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/8e/6a/53bb2b0e57f8ca8d4a021194202cc772d1ce049269e9b0cb88d1fa87a0ef/ocrmypdf-16.10.4-py3-none-any.whl", hash = "sha256:061f3165d09ffafac975cea00803802b8a75551ada9965292ea86ea382673688", size = 162559, upload-time = "2025-07-07T20:55:00.061Z" },
|
{ url = "https://files.pythonhosted.org/packages/d9/b2/eda3bb0939bf81d889812dd82cf37fa6f8769af8e31008bd586ba12fae09/ocrmypdf-16.11.0-py3-none-any.whl", hash = "sha256:13628294a309c85b21947b5c7bc7fcd202464517c14b71a050adc9dde85c48f7", size = 162883, upload-time = "2025-09-12T08:36:51.611Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -2766,6 +2775,7 @@ dependencies = [
|
|||||||
{ name = "django-guardian", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "django-guardian", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "django-multiselectfield", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "django-multiselectfield", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "django-soft-delete", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "django-soft-delete", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
|
{ name = "django-treenode", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "djangorestframework", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "djangorestframework", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "djangorestframework-guardian", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "djangorestframework-guardian", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
{ name = "drf-spectacular", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
{ name = "drf-spectacular", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||||
@@ -2911,6 +2921,7 @@ requires-dist = [
|
|||||||
{ name = "django-guardian", specifier = "~=3.1.2" },
|
{ name = "django-guardian", specifier = "~=3.1.2" },
|
||||||
{ name = "django-multiselectfield", specifier = "~=1.0.1" },
|
{ name = "django-multiselectfield", specifier = "~=1.0.1" },
|
||||||
{ name = "django-soft-delete", specifier = "~=1.0.18" },
|
{ name = "django-soft-delete", specifier = "~=1.0.18" },
|
||||||
|
{ name = "django-treenode", specifier = ">=0.23.2" },
|
||||||
{ name = "djangorestframework", specifier = "~=3.16" },
|
{ name = "djangorestframework", specifier = "~=3.16" },
|
||||||
{ name = "djangorestframework-guardian", specifier = "~=0.4.0" },
|
{ name = "djangorestframework-guardian", specifier = "~=0.4.0" },
|
||||||
{ name = "drf-spectacular", specifier = "~=0.28" },
|
{ name = "drf-spectacular", specifier = "~=0.28" },
|
||||||
@@ -2934,7 +2945,7 @@ requires-dist = [
|
|||||||
{ name = "llama-index-vector-stores-faiss", specifier = ">=0.3" },
|
{ name = "llama-index-vector-stores-faiss", specifier = ">=0.3" },
|
||||||
{ name = "mysqlclient", marker = "extra == 'mariadb'", specifier = "~=2.2.7" },
|
{ name = "mysqlclient", marker = "extra == 'mariadb'", specifier = "~=2.2.7" },
|
||||||
{ name = "nltk", specifier = "~=3.9.1" },
|
{ name = "nltk", specifier = "~=3.9.1" },
|
||||||
{ name = "ocrmypdf", specifier = "~=16.10.0" },
|
{ name = "ocrmypdf", specifier = "~=16.11.0" },
|
||||||
{ name = "openai", specifier = ">=1.76" },
|
{ name = "openai", specifier = ">=1.76" },
|
||||||
{ name = "pathvalidate", specifier = "~=3.3.1" },
|
{ name = "pathvalidate", specifier = "~=3.3.1" },
|
||||||
{ name = "pdf2image", specifier = "~=1.17.0" },
|
{ name = "pdf2image", specifier = "~=1.17.0" },
|
||||||
@@ -2984,7 +2995,7 @@ dev = [
|
|||||||
{ name = "pytest-rerunfailures" },
|
{ name = "pytest-rerunfailures" },
|
||||||
{ name = "pytest-sugar" },
|
{ name = "pytest-sugar" },
|
||||||
{ name = "pytest-xdist" },
|
{ name = "pytest-xdist" },
|
||||||
{ name = "ruff", specifier = "~=0.12.2" },
|
{ name = "ruff", specifier = "~=0.13.0" },
|
||||||
]
|
]
|
||||||
docs = [
|
docs = [
|
||||||
{ name = "mkdocs-glightbox", specifier = "~=0.5.1" },
|
{ name = "mkdocs-glightbox", specifier = "~=0.5.1" },
|
||||||
@@ -2993,7 +3004,7 @@ docs = [
|
|||||||
lint = [
|
lint = [
|
||||||
{ name = "pre-commit", specifier = "~=4.3.0" },
|
{ name = "pre-commit", specifier = "~=4.3.0" },
|
||||||
{ name = "pre-commit-uv", specifier = "~=4.1.3" },
|
{ name = "pre-commit-uv", specifier = "~=4.1.3" },
|
||||||
{ name = "ruff", specifier = "~=0.12.2" },
|
{ name = "ruff", specifier = "~=0.13.0" },
|
||||||
]
|
]
|
||||||
testing = [
|
testing = [
|
||||||
{ name = "daphne" },
|
{ name = "daphne" },
|
||||||
@@ -4190,25 +4201,25 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ruff"
|
name = "ruff"
|
||||||
version = "0.12.12"
|
version = "0.13.0"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/a8/f0/e0965dd709b8cabe6356811c0ee8c096806bb57d20b5019eb4e48a117410/ruff-0.12.12.tar.gz", hash = "sha256:b86cd3415dbe31b3b46a71c598f4c4b2f550346d1ccf6326b347cc0c8fd063d6", size = 5359915, upload-time = "2025-09-04T16:50:18.273Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/6e/1a/1f4b722862840295bcaba8c9e5261572347509548faaa99b2d57ee7bfe6a/ruff-0.13.0.tar.gz", hash = "sha256:5b4b1ee7eb35afae128ab94459b13b2baaed282b1fb0f472a73c82c996c8ae60", size = 5372863, upload-time = "2025-09-10T16:25:37.917Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/09/79/8d3d687224d88367b51c7974cec1040c4b015772bfbeffac95face14c04a/ruff-0.12.12-py3-none-linux_armv6l.whl", hash = "sha256:de1c4b916d98ab289818e55ce481e2cacfaad7710b01d1f990c497edf217dafc", size = 12116602, upload-time = "2025-09-04T16:49:18.892Z" },
|
{ url = "https://files.pythonhosted.org/packages/ac/fe/6f87b419dbe166fd30a991390221f14c5b68946f389ea07913e1719741e0/ruff-0.13.0-py3-none-linux_armv6l.whl", hash = "sha256:137f3d65d58ee828ae136a12d1dc33d992773d8f7644bc6b82714570f31b2004", size = 12187826, upload-time = "2025-09-10T16:24:39.5Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/c3/c3/6e599657fe192462f94861a09aae935b869aea8a1da07f47d6eae471397c/ruff-0.12.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7acd6045e87fac75a0b0cdedacf9ab3e1ad9d929d149785903cff9bb69ad9727", size = 12868393, upload-time = "2025-09-04T16:49:23.043Z" },
|
{ url = "https://files.pythonhosted.org/packages/e4/25/c92296b1fc36d2499e12b74a3fdb230f77af7bdf048fad7b0a62e94ed56a/ruff-0.13.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:21ae48151b66e71fd111b7d79f9ad358814ed58c339631450c66a4be33cc28b9", size = 12933428, upload-time = "2025-09-10T16:24:43.866Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/e8/d2/9e3e40d399abc95336b1843f52fc0daaceb672d0e3c9290a28ff1a96f79d/ruff-0.12.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:abf4073688d7d6da16611f2f126be86523a8ec4343d15d276c614bda8ec44edb", size = 12036967, upload-time = "2025-09-04T16:49:26.04Z" },
|
{ url = "https://files.pythonhosted.org/packages/44/cf/40bc7221a949470307d9c35b4ef5810c294e6cfa3caafb57d882731a9f42/ruff-0.13.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:64de45f4ca5441209e41742d527944635a05a6e7c05798904f39c85bafa819e3", size = 12095543, upload-time = "2025-09-10T16:24:46.638Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/e9/03/6816b2ed08836be272e87107d905f0908be5b4a40c14bfc91043e76631b8/ruff-0.12.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:968e77094b1d7a576992ac078557d1439df678a34c6fe02fd979f973af167577", size = 12276038, upload-time = "2025-09-04T16:49:29.056Z" },
|
{ url = "https://files.pythonhosted.org/packages/f1/03/8b5ff2a211efb68c63a1d03d157e924997ada87d01bebffbd13a0f3fcdeb/ruff-0.13.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b2c653ae9b9d46e0ef62fc6fbf5b979bda20a0b1d2b22f8f7eb0cde9f4963b8", size = 12312489, upload-time = "2025-09-10T16:24:49.556Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/9f/d5/707b92a61310edf358a389477eabd8af68f375c0ef858194be97ca5b6069/ruff-0.12.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42a67d16e5b1ffc6d21c5f67851e0e769517fb57a8ebad1d0781b30888aa704e", size = 11901110, upload-time = "2025-09-04T16:49:32.07Z" },
|
{ url = "https://files.pythonhosted.org/packages/37/fc/2336ef6d5e9c8d8ea8305c5f91e767d795cd4fc171a6d97ef38a5302dadc/ruff-0.13.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cec632534332062bc9eb5884a267b689085a1afea9801bf94e3ba7498a2d207", size = 11991631, upload-time = "2025-09-10T16:24:53.439Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/9d/3d/f8b1038f4b9822e26ec3d5b49cf2bc313e3c1564cceb4c1a42820bf74853/ruff-0.12.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b216ec0a0674e4b1214dcc998a5088e54eaf39417327b19ffefba1c4a1e4971e", size = 13668352, upload-time = "2025-09-04T16:49:35.148Z" },
|
{ url = "https://files.pythonhosted.org/packages/39/7f/f6d574d100fca83d32637d7f5541bea2f5e473c40020bbc7fc4a4d5b7294/ruff-0.13.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd628101d9f7d122e120ac7c17e0a0f468b19bc925501dbe03c1cb7f5415b24", size = 13720602, upload-time = "2025-09-10T16:24:56.392Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/98/0e/91421368ae6c4f3765dd41a150f760c5f725516028a6be30e58255e3c668/ruff-0.12.12-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:59f909c0fdd8f1dcdbfed0b9569b8bf428cf144bec87d9de298dcd4723f5bee8", size = 14638365, upload-time = "2025-09-04T16:49:38.892Z" },
|
{ url = "https://files.pythonhosted.org/packages/fd/c8/a8a5b81d8729b5d1f663348d11e2a9d65a7a9bd3c399763b1a51c72be1ce/ruff-0.13.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:afe37db8e1466acb173bb2a39ca92df00570e0fd7c94c72d87b51b21bb63efea", size = 14697751, upload-time = "2025-09-10T16:24:59.89Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/74/5d/88f3f06a142f58ecc8ecb0c2fe0b82343e2a2b04dcd098809f717cf74b6c/ruff-0.12.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ac93d87047e765336f0c18eacad51dad0c1c33c9df7484c40f98e1d773876f5", size = 14060812, upload-time = "2025-09-04T16:49:42.732Z" },
|
{ url = "https://files.pythonhosted.org/packages/57/f5/183ec292272ce7ec5e882aea74937f7288e88ecb500198b832c24debc6d3/ruff-0.13.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f96a8d90bb258d7d3358b372905fe7333aaacf6c39e2408b9f8ba181f4b6ef2", size = 14095317, upload-time = "2025-09-10T16:25:03.025Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/13/fc/8962e7ddd2e81863d5c92400820f650b86f97ff919c59836fbc4c1a6d84c/ruff-0.12.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:01543c137fd3650d322922e8b14cc133b8ea734617c4891c5a9fccf4bfc9aa92", size = 13050208, upload-time = "2025-09-04T16:49:46.434Z" },
|
{ url = "https://files.pythonhosted.org/packages/9f/8d/7f9771c971724701af7926c14dab31754e7b303d127b0d3f01116faef456/ruff-0.13.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b5e3d883e4f924c5298e3f2ee0f3085819c14f68d1e5b6715597681433f153", size = 13144418, upload-time = "2025-09-10T16:25:06.272Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/53/06/8deb52d48a9a624fd37390555d9589e719eac568c020b27e96eed671f25f/ruff-0.12.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afc2fa864197634e549d87fb1e7b6feb01df0a80fd510d6489e1ce8c0b1cc45", size = 13311444, upload-time = "2025-09-04T16:49:49.931Z" },
|
{ url = "https://files.pythonhosted.org/packages/a8/a6/7985ad1778e60922d4bef546688cd8a25822c58873e9ff30189cfe5dc4ab/ruff-0.13.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03447f3d18479df3d24917a92d768a89f873a7181a064858ea90a804a7538991", size = 13370843, upload-time = "2025-09-10T16:25:09.965Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/2a/81/de5a29af7eb8f341f8140867ffb93f82e4fde7256dadee79016ac87c2716/ruff-0.12.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0c0945246f5ad776cb8925e36af2438e66188d2b57d9cf2eed2c382c58b371e5", size = 13279474, upload-time = "2025-09-04T16:49:53.465Z" },
|
{ url = "https://files.pythonhosted.org/packages/64/1c/bafdd5a7a05a50cc51d9f5711da704942d8dd62df3d8c70c311e98ce9f8a/ruff-0.13.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:fbc6b1934eb1c0033da427c805e27d164bb713f8e273a024a7e86176d7f462cf", size = 13321891, upload-time = "2025-09-10T16:25:12.969Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/7f/14/d9577fdeaf791737ada1b4f5c6b59c21c3326f3f683229096cccd7674e0c/ruff-0.12.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a0fbafe8c58e37aae28b84a80ba1817f2ea552e9450156018a478bf1fa80f4e4", size = 12070204, upload-time = "2025-09-04T16:49:56.882Z" },
|
{ url = "https://files.pythonhosted.org/packages/bc/3e/7817f989cb9725ef7e8d2cee74186bf90555279e119de50c750c4b7a72fe/ruff-0.13.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8ab6a3e03665d39d4a25ee199d207a488724f022db0e1fe4002968abdb8001b", size = 12119119, upload-time = "2025-09-10T16:25:16.621Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/77/04/a910078284b47fad54506dc0af13839c418ff704e341c176f64e1127e461/ruff-0.12.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b9c456fb2fc8e1282affa932c9e40f5ec31ec9cbb66751a316bd131273b57c23", size = 11880347, upload-time = "2025-09-04T16:49:59.729Z" },
|
{ url = "https://files.pythonhosted.org/packages/58/07/9df080742e8d1080e60c426dce6e96a8faf9a371e2ce22eef662e3839c95/ruff-0.13.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2a5c62f8ccc6dd2fe259917482de7275cecc86141ee10432727c4816235bc41", size = 11961594, upload-time = "2025-09-10T16:25:19.49Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/df/58/30185fcb0e89f05e7ea82e5817b47798f7fa7179863f9d9ba6fd4fe1b098/ruff-0.12.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5f12856123b0ad0147d90b3961f5c90e7427f9acd4b40050705499c98983f489", size = 12891844, upload-time = "2025-09-04T16:50:02.591Z" },
|
{ url = "https://files.pythonhosted.org/packages/6a/f4/ae1185349197d26a2316840cb4d6c3fba61d4ac36ed728bf0228b222d71f/ruff-0.13.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b7b85ca27aeeb1ab421bc787009831cffe6048faae08ad80867edab9f2760945", size = 12933377, upload-time = "2025-09-10T16:25:22.371Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/21/9c/28a8dacce4855e6703dcb8cdf6c1705d0b23dd01d60150786cd55aa93b16/ruff-0.12.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:26a1b5a2bf7dd2c47e3b46d077cd9c0fc3b93e6c6cc9ed750bd312ae9dc302ee", size = 13360687, upload-time = "2025-09-04T16:50:05.8Z" },
|
{ url = "https://files.pythonhosted.org/packages/b6/39/e776c10a3b349fc8209a905bfb327831d7516f6058339a613a8d2aaecacd/ruff-0.13.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:79ea0c44a3032af768cabfd9616e44c24303af49d633b43e3a5096e009ebe823", size = 13418555, upload-time = "2025-09-10T16:25:25.681Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
Reference in New Issue
Block a user