Merge branch 'dev' into dev

This commit is contained in:
Trenton H 2024-12-02 16:04:46 -08:00 committed by GitHub
commit 5ab6a94b07
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
82 changed files with 2649 additions and 1011 deletions

View File

@ -283,7 +283,7 @@ jobs:
merge-multiple: true
-
name: Upload frontend coverage to Codecov
uses: codecov/codecov-action@v4
uses: codecov/codecov-action@v5
with:
# not required for public repos, but intermittently fails otherwise
token: ${{ secrets.CODECOV_TOKEN }}
@ -299,7 +299,7 @@ jobs:
path: src/
-
name: Upload coverage to Codecov
uses: codecov/codecov-action@v4
uses: codecov/codecov-action@v5
with:
# not required for public repos, but intermittently fails otherwise
token: ${{ secrets.CODECOV_TOKEN }}
@ -406,7 +406,7 @@ jobs:
-
name: Login to Docker Hub
uses: docker/login-action@v3
# Don't attempt to login is not pushing to Docker Hub
# Don't attempt to login if not pushing to Docker Hub
if: steps.push-other-places.outputs.enable == 'true'
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
@ -414,7 +414,7 @@ jobs:
-
name: Login to Quay.io
uses: docker/login-action@v3
# Don't attempt to login is not pushing to Quay.io
# Don't attempt to login if not pushing to Quay.io
if: steps.push-other-places.outputs.enable == 'true'
with:
registry: quay.io

View File

@ -48,7 +48,7 @@ repos:
exclude: "(^Pipfile\\.lock$)"
# Python hooks
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: 'v0.7.3'
rev: 'v0.8.1'
hooks:
- id: ruff
- id: ruff-format

View File

@ -23,7 +23,7 @@ djangorestframework-guardian = "*"
drf-writable-nested = "*"
bleach = "*"
celery = {extras = ["redis"], version = "*"}
channels = "~=4.1"
channels = "~=4.2"
channels-redis = "*"
concurrent-log-handler = "*"
filelock = "*"

803
Pipfile.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -331,8 +331,10 @@ Currently, there are three events that correspond to workflow trigger 'types':
be used for filtering.
3. **Document Updated**: when a document is updated. Similar to 'added' events, triggers can include filtering by content matching,
tags, doc type, or correspondent.
4. **Scheduled**: a scheduled trigger that can be used to run workflows at a specific time. The date used can be either the document
added, created, updated date or you can specify a (date) custom field. You can also specify a day offset from the date.
The following flow diagram illustrates the three trigger types:
The following flow diagram illustrates the three document trigger types:
```mermaid
flowchart TD

View File

@ -330,8 +330,13 @@ SECRET_KEY=$(LC_ALL=C tr -dc 'a-zA-Z0-9!#$%&()*+,-./:;<=>?@[\]^_`{|}~' < /dev/ur
DEFAULT_LANGUAGES=("deu eng fra ita spa")
_split_langs="${OCR_LANGUAGE//+/ }"
read -r -a OCR_LANGUAGES_ARRAY <<< "${_split_langs}"
# OCR_LANG requires underscores, replace dashes if the user gave them with underscores
readonly ocr_langs=${OCR_LANGUAGE//-/_}
# OCR_LANGS (the install version) uses dashes, not underscores, so convert underscore to dash and plus to space
install_langs=${OCR_LANGUAGE//_/-} # First convert any underscores to dashes
install_langs=${install_langs//+/ } # Then convert plus signs to spaces
read -r -a install_langs_array <<< "${install_langs}"
{
if [[ ! $URL == "" ]] ; then
@ -344,10 +349,10 @@ read -r -a OCR_LANGUAGES_ARRAY <<< "${_split_langs}"
echo "USERMAP_GID=$USERMAP_GID"
fi
echo "PAPERLESS_TIME_ZONE=$TIME_ZONE"
echo "PAPERLESS_OCR_LANGUAGE=$OCR_LANGUAGE"
echo "PAPERLESS_OCR_LANGUAGE=$ocr_langs"
echo "PAPERLESS_SECRET_KEY='$SECRET_KEY'"
if [[ ! ${DEFAULT_LANGUAGES[*]} =~ ${OCR_LANGUAGES_ARRAY[*]} ]] ; then
echo "PAPERLESS_OCR_LANGUAGES=${OCR_LANGUAGES_ARRAY[*]}"
if [[ ! ${DEFAULT_LANGUAGES[*]} =~ ${install_langs_array[*]} ]] ; then
echo "PAPERLESS_OCR_LANGUAGES=${install_langs_array[*]}"
fi
} > docker-compose.env

File diff suppressed because it is too large Load Diff

View File

@ -33,6 +33,7 @@
"ngx-ui-tour-ng-bootstrap": "^15.0.0",
"rxjs": "^7.8.1",
"tslib": "^2.8.1",
"utif": "^3.1.0",
"uuid": "^11.0.2",
"zone.js": "^0.14.8"
},
@ -13758,6 +13759,12 @@
"node": "^16.14.0 || >=18.0.0"
}
},
"node_modules/pako": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz",
"integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==",
"license": "(MIT AND Zlib)"
},
"node_modules/parent-module": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
@ -16563,6 +16570,15 @@
"requires-port": "^1.0.0"
}
},
"node_modules/utif": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/utif/-/utif-3.1.0.tgz",
"integrity": "sha512-WEo4D/xOvFW53K5f5QTaTbbiORcm2/pCL9P6qmJnup+17eYfKaEhDeX9PeQkuyEoIxlbGklDuGl8xwuXYMrrXQ==",
"license": "MIT",
"dependencies": {
"pako": "^1.0.5"
}
},
"node_modules/util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",

View File

@ -35,6 +35,7 @@
"ngx-ui-tour-ng-bootstrap": "^15.0.0",
"rxjs": "^7.8.1",
"tslib": "^2.8.1",
"utif": "^3.1.0",
"uuid": "^11.0.2",
"zone.js": "^0.14.8"
},

View File

@ -47,14 +47,19 @@
</tr>
}
@for (document of documentsInTrash; track document.id) {
<tr (click)="toggleSelected(document); $event.stopPropagation();">
<tr (click)="toggleSelected(document); $event.stopPropagation();" (mouseleave)="popupPreview.close()">
<td>
<div class="form-check m-0 ms-2 me-n2">
<input type="checkbox" class="form-check-input" id="{{document.id}}" [checked]="selectedDocuments.has(document.id)" (click)="toggleSelected(document); $event.stopPropagation();">
<label class="form-check-label" for="{{document.id}}"></label>
</div>
</td>
<td scope="row">{{ document.title }}</td>
<td scope="row">
{{ document.title }}
<pngx-preview-popup [document]="document" linkClasses="btn btn-sm btn-link" #popupPreview>
<i-bs name="eye"></i-bs>
</pngx-preview-popup>
</td>
<td scope="row" i18n>{{ getDaysRemaining(document) }} days</td>
<td scope="row">
<div class="btn-group d-block d-sm-none">

View File

@ -1,6 +1,6 @@
.pdf-viewer-container {
background-color: gray;
height: 350px;
height: 550px;
pdf-viewer {
width: 100%;

View File

@ -6,7 +6,7 @@
<div class="modal-body">
<p>{{message}}</p>
<div class="row mb-2">
<div class="col-8">
<div class="col-7">
<div class="input-group input-group-sm">
<div class="input-group-text" i18n>Page</div>
<input class="form-control" type="number" min="1" [(ngModel)]="page" />
@ -21,7 +21,7 @@
</pdf-viewer>
</div>
</div>
<div class="col-4">
<div class="col-5">
<div class="d-grid">
<button class="btn btn-sm btn-primary" (click)="addSplit()" [disabled]="!canSplit">
<i-bs name="plus-circle"></i-bs>&nbsp;
@ -44,12 +44,12 @@
</ul>
</div>
</div>
<div class="form-check form-switch mt-4">
</div>
<div class="modal-footer">
<div class="form-check form-switch me-auto">
<input class="form-check-input" type="checkbox" role="switch" id="deleteOriginalSwitch" [(ngModel)]="deleteOriginal" [disabled]="!userOwnsDocument">
<label class="form-check-label" for="deleteOriginalSwitch" i18n>Delete original document after successful split</label>
</div>
</div>
<div class="modal-footer">
<button type="button" class="btn" [class]="cancelBtnClass" (click)="cancel()" [disabled]="!buttonsEnabled">
<span class="d-inline-block" style="padding-bottom: 1px;">{{cancelBtnCaption}}</span>
</button>

View File

@ -1,6 +1,6 @@
.pdf-viewer-container {
background-color: gray;
height: 350px;
height: 500px;
pdf-viewer {
width: 100%;

View File

@ -17,7 +17,11 @@ const customFields: CustomField[] = [
name: 'Field 4',
data_type: CustomFieldDataType.Select,
extra_data: {
select_options: ['Option 1', 'Option 2', 'Option 3'],
select_options: [
{ label: 'Option 1', id: 'abc-123' },
{ label: 'Option 2', id: 'def-456' },
{ label: 'Option 3', id: 'ghi-789' },
],
},
},
{
@ -131,6 +135,8 @@ describe('CustomFieldDisplayComponent', () => {
})
it('should show select value', () => {
expect(component.getSelectValue(customFields[3], 2)).toEqual('Option 3')
expect(component.getSelectValue(customFields[3], 'ghi-789')).toEqual(
'Option 3'
)
})
})

View File

@ -117,8 +117,8 @@ export class CustomFieldDisplayComponent implements OnInit, OnDestroy {
return this.docLinkDocuments?.find((d) => d.id === docId)?.title
}
public getSelectValue(field: CustomField, index: number): string {
return field.extra_data.select_options[index]
public getSelectValue(field: CustomField, id: string): string {
return field.extra_data.select_options?.find((o) => o.id === id)?.label
}
ngOnDestroy(): void {

View File

@ -44,6 +44,8 @@
<ng-select #fieldSelects
class="paperless-input-select rounded-end"
[items]="getSelectOptionsForField(atom.field)"
bindLabel="label"
bindValue="id"
[(ngModel)]="atom.value"
[disabled]="disabled"
(mousedown)="$event.stopImmediatePropagation()"
@ -99,6 +101,8 @@
<ng-select
class="paperless-input-select rounded-end"
[items]="getSelectOptionsForField(atom.field)"
bindLabel="label"
bindValue="id"
[(ngModel)]="atom.value"
[disabled]="disabled"
[multiple]="true"

View File

@ -39,7 +39,12 @@ const customFields = [
id: 2,
name: 'Test Select Field',
data_type: CustomFieldDataType.Select,
extra_data: { select_options: ['Option 1', 'Option 2'] },
extra_data: {
select_options: [
{ label: 'Option 1', id: 'abc-123' },
{ label: 'Option 2', id: 'def-456' },
],
},
},
]
@ -128,11 +133,19 @@ describe('CustomFieldsQueryDropdownComponent', () => {
id: 1,
name: 'Test Field',
data_type: CustomFieldDataType.Select,
extra_data: { select_options: ['Option 1', 'Option 2'] },
extra_data: {
select_options: [
{ label: 'Option 1', id: 'abc-123' },
{ label: 'Option 2', id: 'def-456' },
],
},
}
component.customFields = [field]
const options = component.getSelectOptionsForField(1)
expect(options).toEqual(['Option 1', 'Option 2'])
expect(options).toEqual([
{ label: 'Option 1', id: 'abc-123' },
{ label: 'Option 2', id: 'def-456' },
])
// Fallback to empty array if field is not found
const options2 = component.getSelectOptionsForField(2)

View File

@ -311,7 +311,9 @@ export class CustomFieldsQueryDropdownComponent implements OnDestroy {
}))
}
getSelectOptionsForField(fieldID: number): string[] {
getSelectOptionsForField(
fieldID: number
): Array<{ label: string; id: string }> {
const field = this.customFields.find((field) => field.id === fieldID)
if (field) {
return field.extra_data['select_options']

View File

@ -21,8 +21,9 @@
</button>
<div formArrayName="select_options">
@for (option of objectForm.controls.extra_data.controls.select_options.controls; track option; let i = $index) {
<div class="input-group input-group-sm my-2">
<input #selectOption type="text" class="form-control" [formControl]="option" autocomplete="off">
<div class="input-group input-group-sm my-2" [formGroup]="objectForm.controls.extra_data.controls.select_options.controls[i]">
<input #selectOption type="text" class="form-control" formControlName="label" autocomplete="off">
<input type="hidden" formControlName="id">
<button type="button" class="btn btn-outline-danger" (click)="removeSelectOption(i)" i18n>Delete</button>
</div>
}

View File

@ -80,7 +80,11 @@ describe('CustomFieldEditDialogComponent', () => {
name: 'Field 1',
data_type: CustomFieldDataType.Select,
extra_data: {
select_options: ['Option 1', 'Option 2', 'Option 3'],
select_options: [
{ label: 'Option 1', id: '123-xyz' },
{ label: 'Option 2', id: '456-abc' },
{ label: 'Option 3', id: '789-123' },
],
},
}
fixture.detectChanges()
@ -94,6 +98,10 @@ describe('CustomFieldEditDialogComponent', () => {
component.dialogMode = EditDialogMode.CREATE
fixture.detectChanges()
component.ngOnInit()
expect(
component.objectForm.get('extra_data').get('select_options').value.length
).toBe(0)
component.addSelectOption()
expect(
component.objectForm.get('extra_data').get('select_options').value.length
).toBe(1)
@ -101,14 +109,10 @@ describe('CustomFieldEditDialogComponent', () => {
expect(
component.objectForm.get('extra_data').get('select_options').value.length
).toBe(2)
component.addSelectOption()
expect(
component.objectForm.get('extra_data').get('select_options').value.length
).toBe(3)
component.removeSelectOption(0)
expect(
component.objectForm.get('extra_data').get('select_options').value.length
).toBe(2)
).toBe(1)
})
it('should focus on last select option input', () => {

View File

@ -57,8 +57,15 @@ export class CustomFieldEditDialogComponent
}
if (this.object?.data_type === CustomFieldDataType.Select) {
this.selectOptions.clear()
this.object.extra_data.select_options.forEach((option) =>
this.selectOptions.push(new FormControl(option))
this.object.extra_data.select_options
.filter((option) => option)
.forEach((option) =>
this.selectOptions.push(
new FormGroup({
label: new FormControl(option.label),
id: new FormControl(option.id),
})
)
)
}
}
@ -89,7 +96,7 @@ export class CustomFieldEditDialogComponent
name: new FormControl(null),
data_type: new FormControl(null),
extra_data: new FormGroup({
select_options: new FormArray([new FormControl(null)]),
select_options: new FormArray([]),
default_currency: new FormControl(null),
}),
})
@ -104,7 +111,9 @@ export class CustomFieldEditDialogComponent
}
public addSelectOption() {
this.selectOptions.push(new FormControl(''))
this.selectOptions.push(
new FormGroup({ label: new FormControl(null), id: new FormControl(null) })
)
}
public removeSelectOption(index: number) {

View File

@ -119,6 +119,32 @@
<div [formGroup]="formGroup">
<input type="hidden" formControlName="id" />
<pngx-input-select i18n-title title="Trigger type" [horizontal]="true" [items]="triggerTypeOptions" formControlName="type"></pngx-input-select>
@if (formGroup.get('type').value === WorkflowTriggerType.Scheduled) {
<p class="small" i18n>Set scheduled trigger offset and which field to use.</p>
<div class="row">
<div class="col-4">
<pngx-input-number i18n-title title="Offset days" formControlName="schedule_offset_days" i18n-hint hint="Use 0 for immediate." [showAdd]="false" [error]="error?.schedule_offset_days"></pngx-input-number>
</div>
<div class="col-4">
<pngx-input-select i18n-title title="Relative to" formControlName="schedule_date_field" [items]="scheduleDateFieldOptions" [error]="error?.schedule_date_field"></pngx-input-select>
</div>
@if (formGroup.get('schedule_date_field').value === 'custom_field') {
<div class="col-4">
<pngx-input-select i18n-title title="Delay custom field" formControlName="schedule_date_custom_field" [items]="dateCustomFields" i18n-hint hint="Custom field to use for date." [error]="error?.schedule_date_custom_field"></pngx-input-select>
</div>
}
</div>
<div class="row">
<div class="col-4">
<pngx-input-check i18n-title title="Recurring" formControlName="schedule_is_recurring" i18n-hint hint="Trigger is recurring." [error]="error?.schedule_is_recurring"></pngx-input-check>
</div>
<div class="col-4">
@if (formGroup.get('schedule_is_recurring').value === true) {
<pngx-input-number i18n-title title="Recurring interval days" formControlName="schedule_recurring_interval_days" i18n-hint hint="Repeat the trigger every n days." [showAdd]="false" [error]="error?.schedule_recurring_interval_days"></pngx-input-number>
}
</div>
</div>
}
<p class="small" i18n>Trigger for documents that match <em>all</em> filters specified below.</p>
<div class="row">
<div class="col">
@ -128,7 +154,7 @@
<pngx-input-text i18n-title title="Filter path" formControlName="filter_path" i18n-hint hint="Apply to documents that match this path. Wildcards specified as * are allowed. Case-normalized.</a>" [error]="error?.filter_path"></pngx-input-text>
<pngx-input-select i18n-title title="Filter mail rule" [items]="mailRules" [allowNull]="true" formControlName="filter_mailrule" i18n-hint hint="Apply to documents consumed via this mail rule." [error]="error?.filter_mailrule"></pngx-input-select>
}
@if (formGroup.get('type').value === WorkflowTriggerType.DocumentAdded || formGroup.get('type').value === WorkflowTriggerType.DocumentUpdated) {
@if (formGroup.get('type').value === WorkflowTriggerType.DocumentAdded || formGroup.get('type').value === WorkflowTriggerType.DocumentUpdated || formGroup.get('type').value === WorkflowTriggerType.Scheduled) {
<pngx-input-select i18n-title title="Content matching algorithm" [items]="getMatchingAlgorithms()" formControlName="matching_algorithm"></pngx-input-select>
@if (patternRequired) {
<pngx-input-text i18n-title title="Content matching pattern" formControlName="match" [error]="error?.match"></pngx-input-text>
@ -138,7 +164,7 @@
}
}
</div>
@if (formGroup.get('type').value === WorkflowTriggerType.DocumentAdded || formGroup.get('type').value === WorkflowTriggerType.DocumentUpdated) {
@if (formGroup.get('type').value === WorkflowTriggerType.DocumentAdded || formGroup.get('type').value === WorkflowTriggerType.DocumentUpdated || formGroup.get('type').value === WorkflowTriggerType.Scheduled) {
<div class="col-md-6">
<pngx-input-tags [allowCreate]="false" i18n-title title="Has any of tags" formControlName="filter_has_tags"></pngx-input-tags>
<pngx-input-select i18n-title title="Has correspondent" [items]="correspondents" [allowNull]="true" formControlName="filter_has_correspondent"></pngx-input-select>

View File

@ -22,6 +22,7 @@ import { SwitchComponent } from '../../input/switch/switch.component'
import { EditDialogMode } from '../edit-dialog.component'
import {
DOCUMENT_SOURCE_OPTIONS,
SCHEDULE_DATE_FIELD_OPTIONS,
WORKFLOW_ACTION_OPTIONS,
WORKFLOW_TYPE_OPTIONS,
WorkflowEditDialogComponent,
@ -40,6 +41,7 @@ import {
import { MATCHING_ALGORITHMS, MATCH_AUTO } from 'src/app/data/matching-model'
import { ConfirmButtonComponent } from '../../confirm-button/confirm-button.component'
import { provideHttpClient, withInterceptorsFromDi } from '@angular/common/http'
import { CustomFieldDataType } from 'src/app/data/custom-field'
const workflow: Workflow = {
name: 'Workflow 1',
@ -148,7 +150,18 @@ describe('WorkflowEditDialogComponent', () => {
useValue: {
listAll: () =>
of({
results: [],
results: [
{
id: 1,
name: 'cf1',
data_type: CustomFieldDataType.String,
},
{
id: 2,
name: 'cf2',
data_type: CustomFieldDataType.Date,
},
],
}),
},
},
@ -186,7 +199,7 @@ describe('WorkflowEditDialogComponent', () => {
expect(editTitleSpy).toHaveBeenCalled()
})
it('should return source options, type options, type name', () => {
it('should return source options, type options, type name, schedule date field options', () => {
// coverage
expect(component.sourceOptions).toEqual(DOCUMENT_SOURCE_OPTIONS)
expect(component.triggerTypeOptions).toEqual(WORKFLOW_TYPE_OPTIONS)
@ -200,6 +213,9 @@ describe('WorkflowEditDialogComponent', () => {
component.getActionTypeOptionName(WorkflowActionType.Assignment)
).toEqual('Assignment')
expect(component.getActionTypeOptionName(null)).toEqual('')
expect(component.scheduleDateFieldOptions).toEqual(
SCHEDULE_DATE_FIELD_OPTIONS
)
})
it('should support add and remove triggers and actions', () => {

View File

@ -16,9 +16,10 @@ import { EditDialogComponent } from '../edit-dialog.component'
import { MailRuleService } from 'src/app/services/rest/mail-rule.service'
import { MailRule } from 'src/app/data/mail-rule'
import { CustomFieldsService } from 'src/app/services/rest/custom-fields.service'
import { CustomField } from 'src/app/data/custom-field'
import { CustomField, CustomFieldDataType } from 'src/app/data/custom-field'
import {
DocumentSource,
ScheduleDateField,
WorkflowTrigger,
WorkflowTriggerType,
} from 'src/app/data/workflow-trigger'
@ -48,6 +49,25 @@ export const DOCUMENT_SOURCE_OPTIONS = [
},
]
export const SCHEDULE_DATE_FIELD_OPTIONS = [
{
id: ScheduleDateField.Added,
name: $localize`Added`,
},
{
id: ScheduleDateField.Created,
name: $localize`Created`,
},
{
id: ScheduleDateField.Modified,
name: $localize`Modified`,
},
{
id: ScheduleDateField.CustomField,
name: $localize`Custom Field`,
},
]
export const WORKFLOW_TYPE_OPTIONS = [
{
id: WorkflowTriggerType.Consumption,
@ -61,6 +81,10 @@ export const WORKFLOW_TYPE_OPTIONS = [
id: WorkflowTriggerType.DocumentUpdated,
name: $localize`Document Updated`,
},
{
id: WorkflowTriggerType.Scheduled,
name: $localize`Scheduled`,
},
]
export const WORKFLOW_ACTION_OPTIONS = [
@ -96,6 +120,7 @@ export class WorkflowEditDialogComponent
storagePaths: StoragePath[]
mailRules: MailRule[]
customFields: CustomField[]
dateCustomFields: CustomField[]
expandedItem: number = null
@ -135,7 +160,12 @@ export class WorkflowEditDialogComponent
customFieldsService
.listAll()
.pipe(first())
.subscribe((result) => (this.customFields = result.results))
.subscribe((result) => {
this.customFields = result.results
this.dateCustomFields = this.customFields?.filter(
(f) => f.data_type === CustomFieldDataType.Date
)
})
}
getCreateTitle() {
@ -314,6 +344,15 @@ export class WorkflowEditDialogComponent
filter_has_document_type: new FormControl(
trigger.filter_has_document_type
),
schedule_offset_days: new FormControl(trigger.schedule_offset_days),
schedule_is_recurring: new FormControl(trigger.schedule_is_recurring),
schedule_recurring_interval_days: new FormControl(
trigger.schedule_recurring_interval_days
),
schedule_date_field: new FormControl(trigger.schedule_date_field),
schedule_date_custom_field: new FormControl(
trigger.schedule_date_custom_field
),
}),
{ emitEvent }
)
@ -388,6 +427,10 @@ export class WorkflowEditDialogComponent
return WORKFLOW_TYPE_OPTIONS
}
get scheduleDateFieldOptions() {
return SCHEDULE_DATE_FIELD_OPTIONS
}
getTriggerTypeOptionName(type: WorkflowTriggerType): string {
return this.triggerTypeOptions.find((t) => t.id === type)?.name ?? ''
}
@ -408,6 +451,11 @@ export class WorkflowEditDialogComponent
matching_algorithm: MATCH_NONE,
match: '',
is_insensitive: true,
schedule_offset_days: 0,
schedule_is_recurring: false,
schedule_recurring_interval_days: 1,
schedule_date_field: ScheduleDateField.Added,
schedule_date_custom_field: null,
}
this.object.triggers.push(trigger)
this.createTriggerField(trigger)

View File

@ -35,23 +35,31 @@
</div>
@if (selectionModel.items) {
<div class="items" #buttonItems>
@for (item of selectionModel.itemsSorted | filter: filterText:'name'; track item; let i = $index) {
@for (item of selectionModel.items | filter: filterText:'name'; track item; let i = $index) {
@if (allowSelectNone || item.id) {
<pngx-toggleable-dropdown-button
[item]="item" [hideCount]="hideCount(item)" [state]="selectionModel.get(item.id)" [count]="getUpdatedDocumentCount(item.id)" (toggled)="selectionModel.toggle(item.id)" (exclude)="excludeClicked(item.id)" (click)="setButtonItemIndex(i - 1)" [disabled]="disabled">
[item]="item"
[hideCount]="hideCount(item)"
[opacifyCount]="!editing"
[state]="selectionModel.get(item.id)"
[count]="getUpdatedDocumentCount(item.id)"
(toggled)="selectionModel.toggle(item.id)"
(exclude)="excludeClicked(item.id)"
(click)="setButtonItemIndex(i - 1)"
[disabled]="disabled">
</pngx-toggleable-dropdown-button>
}
}
</div>
}
@if (editing) {
@if ((selectionModel.itemsSorted | filter: filterText:'name').length === 0 && createRef !== undefined) {
@if ((selectionModel.items | filter: filterText:'name').length === 0 && createRef !== undefined) {
<button class="list-group-item list-group-item-action bg-light" (click)="createClicked()" [disabled]="disabled">
<small class="ms-2"><ng-container i18n>Create</ng-container> "{{filterText}}"</small>
<i-bs width="1.5em" height="1em" name="plus"></i-bs>
</button>
}
@if ((selectionModel.itemsSorted | filter: filterText:'name').length > 0) {
@if ((selectionModel.items | filter: filterText:'name').length > 0) {
<button class="list-group-item list-group-item-action bg-light" (click)="applyClicked()" [disabled]="!modelIsDirty || disabled">
<small class="ms-2" [ngClass]="{'fw-bold': modelIsDirty}" i18n>Apply</small>
<i-bs width="1.5em" height="1em" name="arrow-right"></i-bs>

View File

@ -501,7 +501,7 @@ describe('FilterableDropdownComponent & FilterableDropdownSelectionModel', () =>
component.selectionModel = selectionModel
selectionModel.toggle(items[1].id)
selectionModel.apply()
expect(selectionModel.itemsSorted).toEqual([
expect(selectionModel.items).toEqual([
nullItem,
{ id: null, name: 'Null B' },
items[1],
@ -509,6 +509,37 @@ describe('FilterableDropdownComponent & FilterableDropdownSelectionModel', () =>
])
})
it('selection model should sort items by state and document counts, if set', () => {
component.items = items.concat([{ id: 4, name: 'Item D' }])
component.selectionModel = selectionModel
component.documentCounts = [
{ id: 1, document_count: 0 }, // Tag1
{ id: 2, document_count: 1 }, // Tag2
{ id: 4, document_count: 2 },
]
component.selectionModel.apply()
expect(selectionModel.items).toEqual([
nullItem,
{ id: 4, name: 'Item D' },
items[1], // Tag2
items[0], // Tag1
])
selectionModel.toggle(items[1].id)
component.documentCounts = [
{ id: 1, document_count: 0 },
{ id: 2, document_count: 1 },
{ id: 4, document_count: 0 },
]
selectionModel.apply()
expect(selectionModel.items).toEqual([
nullItem,
items[1], // Tag2
{ id: 4, name: 'Item D' },
items[0], // Tag1
])
})
it('should set support create, keep open model and call createRef method', fakeAsync(() => {
component.items = items
component.icon = 'tag-fill'

View File

@ -43,11 +43,23 @@ export class FilterableDropdownSelectionModel {
private _intersection: Intersection = Intersection.Include
temporaryIntersection: Intersection = this._intersection
items: MatchingModel[] = []
private _documentCounts: SelectionDataItem[] = []
public set documentCounts(counts: SelectionDataItem[]) {
this._documentCounts = counts
}
get itemsSorted(): MatchingModel[] {
// TODO: this is getting called very often
return this.items.sort((a, b) => {
private _items: MatchingModel[] = []
get items(): MatchingModel[] {
return this._items
}
set items(items: MatchingModel[]) {
this._items = items
this.sortItems()
}
private sortItems() {
this._items.sort((a, b) => {
if (a.id == null && b.id != null) {
return -1
} else if (a.id != null && b.id == null) {
@ -62,6 +74,16 @@ export class FilterableDropdownSelectionModel {
this.getNonTemporary(b.id) == ToggleableItemState.NotSelected
) {
return -1
} else if (
this._documentCounts.length &&
this.getDocumentCount(a.id) > this.getDocumentCount(b.id)
) {
return -1
} else if (
this._documentCounts.length &&
this.getDocumentCount(a.id) < this.getDocumentCount(b.id)
) {
return 1
} else {
return a.name.localeCompare(b.name)
}
@ -279,6 +301,10 @@ export class FilterableDropdownSelectionModel {
)
}
getDocumentCount(id: number) {
return this._documentCounts.find((c) => c.id === id)?.document_count
}
init(map: Map<number, ToggleableItemState>) {
this.temporarySelectionStates = map
this.apply()
@ -291,6 +317,7 @@ export class FilterableDropdownSelectionModel {
})
this._logicalOperator = this.temporaryLogicalOperator
this._intersection = this.temporaryIntersection
this.sortItems()
}
reset(complete: boolean = false) {
@ -423,7 +450,11 @@ export class FilterableDropdownComponent implements OnDestroy, OnInit {
}
@Input()
documentCounts: SelectionDataItem[]
set documentCounts(counts: SelectionDataItem[]) {
if (counts) {
this.selectionModel.documentCounts = counts
}
}
@Input()
shortcutKey: string
@ -536,9 +567,7 @@ export class FilterableDropdownComponent implements OnDestroy, OnInit {
}
getUpdatedDocumentCount(id: number) {
if (this.documentCounts) {
return this.documentCounts.find((c) => c.id === id)?.document_count
}
return this.selectionModel.getDocumentCount(id)
}
listKeyDown(event: KeyboardEvent) {

View File

@ -1,4 +1,9 @@
<button class="list-group-item list-group-item-action d-flex align-items-center p-2 border-top-0 border-start-0 border-end-0 border-bottom" role="menuitem" (click)="toggleItem($event)" [disabled]="disabled">
<button
class="list-group-item list-group-item-action d-flex align-items-center p-2 border-top-0 border-start-0 border-end-0 border-bottom"
[class.opacity-50]="opacifyCount && !hideCount && currentCount === 0"
role="menuitem"
(click)="toggleItem($event)"
[disabled]="disabled">
<div class="selected-icon me-1">
@if (isChecked()) {
<i-bs width="1em" height="1em" name="check"></i-bs>
@ -18,6 +23,6 @@
}
</div>
@if (!hideCount) {
<div class="badge bg-light text-dark rounded-pill ms-auto me-1">{{count ?? item.document_count}}</div>
<div class="badge bg-light text-dark rounded-pill ms-auto me-1">{{currentCount}}</div>
}
</button>

View File

@ -29,6 +29,9 @@ export class ToggleableDropdownButtonComponent {
@Input()
hideCount: boolean = false
@Input()
opacifyCount: boolean = true
@Output()
toggled = new EventEmitter()
@ -39,6 +42,10 @@ export class ToggleableDropdownButtonComponent {
return 'is_inbox_tag' in this.item
}
get currentCount(): number {
return this.count ?? this.item.document_count
}
toggleItem(event: MouseEvent): void {
if (this.state == ToggleableItemState.Selected) {
this.exclude.emit()

View File

@ -132,12 +132,4 @@ describe('SelectComponent', () => {
const expectedTitle = `Filter documents with this ${component.title}`
expect(component.filterButtonTitle).toEqual(expectedTitle)
})
it('should support setting items as a plain array', () => {
component.itemsArray = ['foo', 'bar']
expect(component.items).toEqual([
{ id: 0, name: 'foo' },
{ id: 1, name: 'bar' },
])
})
})

View File

@ -34,11 +34,6 @@ export class SelectComponent extends AbstractInputComponent<number> {
if (items && this.value) this.checkForPrivateItems(this.value)
}
@Input()
set itemsArray(items: any[]) {
this._items = items.map((item, index) => ({ id: index, name: item }))
}
writeValue(newValue: any): void {
if (newValue && this._items) {
this.checkForPrivateItems(newValue)

View File

@ -1,4 +1,10 @@
<div class="preview-popup-container">
<a [href]="link ?? previewUrl" class="{{linkClasses}}" [target]="linkTarget" [title]="linkTitle"
[ngbPopover]="previewContent" [popoverTitle]="document.title | documentTitle" container="body"
autoClose="true" [popoverClass]="popoverClass" (mouseenter)="mouseEnterPreview()" (mouseleave)="mouseLeavePreview()" #popover="ngbPopover">
<ng-content></ng-content>
</a>
<ng-template #previewContent>
<div class="preview-popup-container">
@if (error) {
<div class="w-100 h-100 position-relative">
<p class="fst-italic position-absolute top-50 start-50 translate-middle" i18n>Error loading preview</p>
@ -27,4 +33,5 @@
}
}
}
</div>
</div>
</ng-template>

View File

@ -1,4 +1,9 @@
import { ComponentFixture, TestBed } from '@angular/core/testing'
import {
ComponentFixture,
fakeAsync,
TestBed,
tick,
} from '@angular/core/testing'
import { PreviewPopupComponent } from './preview-popup.component'
import { By } from '@angular/platform-browser'
@ -15,6 +20,8 @@ import {
withInterceptorsFromDi,
} from '@angular/common/http'
import { of, throwError } from 'rxjs'
import { NgbPopoverModule } from '@ng-bootstrap/ng-bootstrap'
import { DocumentTitlePipe } from 'src/app/pipes/document-title.pipe'
const doc = {
id: 10,
@ -34,8 +41,12 @@ describe('PreviewPopupComponent', () => {
beforeEach(() => {
TestBed.configureTestingModule({
declarations: [PreviewPopupComponent, SafeUrlPipe],
imports: [NgxBootstrapIconsModule.pick(allIcons), PdfViewerModule],
declarations: [PreviewPopupComponent, SafeUrlPipe, DocumentTitlePipe],
imports: [
NgxBootstrapIconsModule.pick(allIcons),
PdfViewerModule,
NgbPopoverModule,
],
providers: [
provideHttpClient(withInterceptorsFromDi()),
provideHttpClientTesting(),
@ -70,12 +81,14 @@ describe('PreviewPopupComponent', () => {
it('should render object if native PDF viewer enabled', () => {
settingsService.set(SETTINGS_KEYS.USE_NATIVE_PDF_VIEWER, true)
component.popover.open()
fixture.detectChanges()
expect(fixture.debugElement.query(By.css('object'))).not.toBeNull()
})
it('should render pngx viewer if native PDF viewer disabled', () => {
settingsService.set(SETTINGS_KEYS.USE_NATIVE_PDF_VIEWER, false)
component.popover.open()
fixture.detectChanges()
expect(fixture.debugElement.query(By.css('object'))).toBeNull()
expect(fixture.debugElement.query(By.css('pdf-viewer'))).not.toBeNull()
@ -83,6 +96,7 @@ describe('PreviewPopupComponent', () => {
it('should show lock icon on password error', () => {
settingsService.set(SETTINGS_KEYS.USE_NATIVE_PDF_VIEWER, false)
component.popover.open()
component.onError({ name: 'PasswordException' })
fixture.detectChanges()
expect(component.requiresPassword).toBeTruthy()
@ -93,16 +107,18 @@ describe('PreviewPopupComponent', () => {
component.document.original_file_name = 'sample.png'
component.document.mime_type = 'image/png'
component.document.archived_file_name = undefined
component.popover.open()
fixture.detectChanges()
expect(fixture.debugElement.query(By.css('object'))).not.toBeNull()
})
it('should show message on error', () => {
component.popover.open()
component.onError({})
fixture.detectChanges()
expect(fixture.debugElement.nativeElement.textContent).toContain(
'Error loading preview'
)
expect(
fixture.debugElement.query(By.css('.popover')).nativeElement.textContent
).toContain('Error loading preview')
})
it('should get text content from http if appropriate', () => {
@ -122,4 +138,17 @@ describe('PreviewPopupComponent', () => {
component.init()
expect(component.previewText).toEqual('Preview text')
})
it('should show preview on mouseover after delay to preload content', fakeAsync(() => {
component.mouseEnterPreview()
expect(component.popover.isOpen()).toBeTruthy()
tick(600)
component.close()
component.mouseEnterPreview()
tick(100)
component.mouseLeavePreview()
tick(600)
expect(component.popover.isOpen()).toBeFalsy()
}))
})

View File

@ -1,5 +1,6 @@
import { HttpClient } from '@angular/common/http'
import { Component, Input, OnDestroy } from '@angular/core'
import { Component, Input, OnDestroy, ViewChild } from '@angular/core'
import { NgbPopover } from '@ng-bootstrap/ng-bootstrap'
import { first, Subject, takeUntil } from 'rxjs'
import { Document } from 'src/app/data/document'
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
@ -23,6 +24,18 @@ export class PreviewPopupComponent implements OnDestroy {
return this._document
}
@Input()
link: string
@Input()
linkClasses: string = 'btn btn-sm btn-outline-secondary'
@Input()
linkTarget: string = '_blank'
@Input()
linkTitle: string = $localize`Open preview`
unsubscribeNotifier: Subject<any> = new Subject()
error = false
@ -31,6 +44,12 @@ export class PreviewPopupComponent implements OnDestroy {
previewText: string
@ViewChild('popover') popover: NgbPopover
mouseOnPreview: boolean
popoverClass: string = 'shadow popover-preview'
get renderAsObject(): boolean {
return (this.isPdf && this.useNativePdfViewer) || !this.isPdf
}
@ -83,4 +102,33 @@ export class PreviewPopupComponent implements OnDestroy {
this.error = true
}
}
get previewUrl() {
return this.documentService.getPreviewUrl(this.document.id)
}
mouseEnterPreview() {
this.mouseOnPreview = true
if (!this.popover.isOpen()) {
// we're going to open but hide to pre-load content during hover delay
this.popover.open()
this.popoverClass = 'shadow popover-preview pe-none opacity-0'
setTimeout(() => {
if (this.mouseOnPreview) {
// show popover
this.popoverClass = this.popoverClass.replace('pe-none opacity-0', '')
} else {
this.popover.close()
}
}, 600)
}
}
mouseLeavePreview() {
this.mouseOnPreview = false
}
public close() {
this.popover.close(false)
}
}

View File

@ -190,7 +190,8 @@
@case (CustomFieldDataType.Select) {
<pngx-input-select formControlName="value"
[title]="getCustomFieldFromInstance(fieldInstance)?.name"
[itemsArray]="getCustomFieldFromInstance(fieldInstance)?.extra_data.select_options"
[items]="getCustomFieldFromInstance(fieldInstance)?.extra_data.select_options"
bindLabel="label"
[allowNull]="true"
[horizontal]="true"
[removable]="userIsOwner"
@ -388,6 +389,15 @@
<img [src]="previewUrl | safeUrl" width="100%" height="100%" alt="{{title}}" />
</div>
}
@case (ContentRenderType.TIFF) {
@if (!tiffError) {
<div class="preview-sticky">
<img [src]="tiffURL" width="100%" height="100%" alt="{{title}}" />
</div>
} @else {
<div class="preview-sticky bg-light p-3 overflow-auto whitespace-preserve" width="100%">{{tiffError}}</div>
}
}
@case (ContentRenderType.Other) {
<object [data]="previewUrl | safeUrl" class="preview-sticky" width="100%"></object>
}

View File

@ -61,6 +61,7 @@ textarea.rtl {
width: 100%;
height: 100%;
object-fit: contain;
object-position: top;
}
.thumb-preview {

View File

@ -1270,4 +1270,46 @@ describe('DocumentDetailComponent', () => {
expect(component.createDisabled(DataType.StoragePath)).toBeFalsy()
expect(component.createDisabled(DataType.Tag)).toBeFalsy()
})
it('should call tryRenderTiff when no archive and file is tiff', () => {
initNormally()
const tiffRenderSpy = jest.spyOn(
DocumentDetailComponent.prototype as any,
'tryRenderTiff'
)
const doc = Object.assign({}, component.document)
doc.archived_file_name = null
doc.mime_type = 'image/tiff'
jest
.spyOn(documentService, 'getMetadata')
.mockReturnValue(
of({ has_archive_version: false, original_mime_type: 'image/tiff' })
)
component.updateComponent(doc)
fixture.detectChanges()
expect(component.archiveContentRenderType).toEqual(
component.ContentRenderType.TIFF
)
expect(tiffRenderSpy).toHaveBeenCalled()
})
it('should try to render tiff and show error if failed', () => {
initNormally()
// just the text request
httpTestingController.expectOne(component.previewUrl)
// invalid tiff
component['tryRenderTiff']()
httpTestingController
.expectOne(component.previewUrl)
.flush(new ArrayBuffer(100)) // arraybuffer
expect(component.tiffError).not.toBeUndefined()
// http error
component['tryRenderTiff']()
httpTestingController
.expectOne(component.previewUrl)
.error(new ErrorEvent('failed'))
expect(component.tiffError).not.toBeUndefined()
})
})

View File

@ -72,6 +72,7 @@ import { DeletePagesConfirmDialogComponent } from '../common/confirm-dialog/dele
import { HotKeyService } from 'src/app/services/hot-key.service'
import { PDFDocumentProxy } from 'ng2-pdf-viewer'
import { DataType } from 'src/app/data/datatype'
import * as UTIF from 'utif'
enum DocumentDetailNavIDs {
Details = 1,
@ -89,6 +90,7 @@ enum ContentRenderType {
Text = 'text',
Other = 'other',
Unknown = 'unknown',
TIFF = 'tiff',
}
enum ZoomSetting {
@ -136,6 +138,8 @@ export class DocumentDetailComponent
downloadUrl: string
downloadOriginalUrl: string
previewLoaded: boolean = false
tiffURL: string
tiffError: string
correspondents: Correspondent[]
documentTypes: DocumentType[]
@ -244,6 +248,8 @@ export class DocumentDetailComponent
['text/plain', 'application/csv', 'text/csv'].includes(mimeType)
) {
return ContentRenderType.Text
} else if (mimeType.indexOf('tiff') >= 0) {
return ContentRenderType.TIFF
} else if (mimeType?.indexOf('image/') === 0) {
return ContentRenderType.Image
}
@ -542,6 +548,9 @@ export class DocumentDetailComponent
this.document = doc
this.requiresPassword = false
this.updateFormForCustomFields()
if (this.archiveContentRenderType === ContentRenderType.TIFF) {
this.tryRenderTiff()
}
this.documentsService
.getMetadata(doc.id)
.pipe(
@ -721,6 +730,7 @@ export class DocumentDetailComponent
save(close: boolean = false) {
this.networkActive = true
;(document.activeElement as HTMLElement)?.dispatchEvent(new Event('change'))
this.documentsService
.update(this.document)
.pipe(first())
@ -1163,6 +1173,7 @@ export class DocumentDetailComponent
splitDocument() {
let modal = this.modalService.open(SplitConfirmDialogComponent, {
backdrop: 'static',
size: 'lg',
})
modal.componentInstance.title = $localize`Split confirm`
modal.componentInstance.messageBold = $localize`This operation will split the selected document(s) into new documents.`
@ -1201,6 +1212,7 @@ export class DocumentDetailComponent
rotateDocument() {
let modal = this.modalService.open(RotateConfirmDialogComponent, {
backdrop: 'static',
size: 'lg',
})
modal.componentInstance.title = $localize`Rotate confirm`
modal.componentInstance.messageBold = $localize`This operation will permanently rotate the original version of the current document.`
@ -1275,4 +1287,45 @@ export class DocumentDetailComponent
})
})
}
private tryRenderTiff() {
this.http.get(this.previewUrl, { responseType: 'arraybuffer' }).subscribe({
next: (res) => {
/* istanbul ignore next */
try {
// See UTIF.js > _imgLoaded
const tiffIfds: any[] = UTIF.decode(res)
var vsns = tiffIfds,
ma = 0,
page = vsns[0]
if (tiffIfds[0].subIFD) vsns = vsns.concat(tiffIfds[0].subIFD)
for (var i = 0; i < vsns.length; i++) {
var img = vsns[i]
if (img['t258'] == null || img['t258'].length < 3) continue
var ar = img['t256'] * img['t257']
if (ar > ma) {
ma = ar
page = img
}
}
UTIF.decodeImage(res, page, tiffIfds)
const rgba = UTIF.toRGBA8(page)
const { width: w, height: h } = page
var cnv = document.createElement('canvas')
cnv.width = w
cnv.height = h
var ctx = cnv.getContext('2d'),
imgd = ctx.createImageData(w, h)
for (var i = 0; i < rgba.length; i++) imgd.data[i] = rgba[i]
ctx.putImageData(imgd, 0, 0)
this.tiffURL = cnv.toDataURL()
} catch (err) {
this.tiffError = $localize`An error occurred loading tiff: ${err.toString()}`
}
},
error: (err) => {
this.tiffError = $localize`An error occurred loading tiff: ${err.toString()}`
},
})
}
}

View File

@ -782,11 +782,11 @@ export class BulkEditorComponent
rotateSelected() {
let modal = this.modalService.open(RotateConfirmDialogComponent, {
backdrop: 'static',
size: 'lg',
})
const rotateDialog = modal.componentInstance as RotateConfirmDialogComponent
rotateDialog.title = $localize`Rotate confirm`
rotateDialog.messageBold = $localize`This operation will permanently rotate the original version of ${this.list.selected.size} document(s).`
rotateDialog.message = $localize`This will alter the original copy.`
rotateDialog.btnClass = 'btn-danger'
rotateDialog.btnCaption = $localize`Proceed`
rotateDialog.documentID = Array.from(this.list.selected)[0]

View File

@ -1,4 +1,4 @@
<div class="card mb-3 shadow-sm bg-light" [class.card-selected]="selected" [class.document-card]="selectable" [class.popover-hidden]="popoverHidden" (mouseleave)="mouseLeaveCard()">
<div class="card mb-3 shadow-sm bg-light" [class.card-selected]="selected" [class.document-card]="selectable" (mouseleave)="mouseLeaveCard()">
<div class="row g-0">
<div class="col-md-2 doc-img-container rounded-start" (click)="this.toggleSelected.emit($event)" (dblclick)="dblClickDocument.emit()">
<img [src]="getThumbUrl()" class="card-img doc-img border-end rounded-start" [class.inverted]="getIsThumbInverted()">
@ -56,14 +56,9 @@
<a routerLink="/documents/{{document.id}}" class="btn btn-sm btn-outline-secondary" *pngxIfPermissions="{ action: PermissionAction.Change, type: PermissionType.Document }">
<i-bs name="file-earmark-richtext"></i-bs>&nbsp;<span class="d-none d-md-inline" i18n>Open</span>
</a>
<a class="btn btn-sm btn-outline-secondary" target="_blank" [href]="previewUrl"
[ngbPopover]="previewContent" [popoverTitle]="document.title | documentTitle"
autoClose="true" popoverClass="shadow popover-preview" (mouseenter)="mouseEnterPreview()" (mouseleave)="mouseLeavePreview()" #popover="ngbPopover">
<pngx-preview-popup [document]="document" #popupPreview>
<i-bs name="eye"></i-bs>&nbsp;<span class="d-none d-md-inline" i18n>View</span>
</a>
<ng-template #previewContent>
<pngx-preview-popup [document]="document"></pngx-preview-popup>
</ng-template>
</pngx-preview-popup>
<a class="btn btn-sm btn-outline-secondary" [href]="getDownloadUrl()">
<i-bs name="download"></i-bs>&nbsp;<span class="d-none d-md-inline" i18n>Download</span>
</a>

View File

@ -1,11 +1,6 @@
import { DatePipe } from '@angular/common'
import { provideHttpClientTesting } from '@angular/common/http/testing'
import {
ComponentFixture,
TestBed,
fakeAsync,
tick,
} from '@angular/core/testing'
import { ComponentFixture, TestBed } from '@angular/core/testing'
import { By } from '@angular/platform-browser'
import { RouterTestingModule } from '@angular/router/testing'
import {
@ -84,21 +79,6 @@ describe('DocumentCardLargeComponent', () => {
expect(fixture.nativeElement.textContent).toContain('8 pages')
})
it('should show preview on mouseover after delay to preload content', fakeAsync(() => {
component.mouseEnterPreview()
expect(component.popover.isOpen()).toBeTruthy()
expect(component.popoverHidden).toBeTruthy()
tick(600)
expect(component.popoverHidden).toBeFalsy()
component.mouseLeaveCard()
component.mouseEnterPreview()
tick(100)
component.mouseLeavePreview()
tick(600)
expect(component.popover.isOpen()).toBeFalsy()
}))
it('should trim content', () => {
expect(component.contentTrimmed).toHaveLength(503) // includes ...
})

View File

@ -12,9 +12,9 @@ import {
} from 'src/app/data/document'
import { DocumentService } from 'src/app/services/rest/document.service'
import { SettingsService } from 'src/app/services/settings.service'
import { NgbPopover } from '@ng-bootstrap/ng-bootstrap'
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
import { ComponentWithPermissions } from '../../with-permissions/with-permissions.component'
import { PreviewPopupComponent } from '../../common/preview-popup/preview-popup.component'
@Component({
selector: 'pngx-document-card-large',
@ -65,7 +65,7 @@ export class DocumentCardLargeComponent extends ComponentWithPermissions {
@Output()
clickMoreLike = new EventEmitter()
@ViewChild('popover') popover: NgbPopover
@ViewChild('popupPreview') popupPreview: PreviewPopupComponent
mouseOnPreview = false
popoverHidden = true
@ -112,29 +112,8 @@ export class DocumentCardLargeComponent extends ComponentWithPermissions {
return this.documentService.getPreviewUrl(this.document.id)
}
mouseEnterPreview() {
this.mouseOnPreview = true
if (!this.popover.isOpen()) {
// we're going to open but hide to pre-load content during hover delay
this.popover.open()
this.popoverHidden = true
setTimeout(() => {
if (this.mouseOnPreview) {
// show popover
this.popoverHidden = false
} else {
this.popover.close()
}
}, 600)
}
}
mouseLeavePreview() {
this.mouseOnPreview = false
}
mouseLeaveCard() {
this.popover.close()
this.popupPreview.close()
}
get contentTrimmed() {

View File

@ -1,5 +1,5 @@
<div class="col p-2 h-100">
<div class="card h-100 shadow-sm document-card" [class.card-selected]="selected" [class.popover-hidden]="popoverHidden" (mouseleave)="mouseLeaveCard()">
<div class="card h-100 shadow-sm document-card" [class.card-selected]="selected" (mouseleave)="mouseLeaveCard()">
<div class="border-bottom doc-img-container rounded-top" (click)="this.toggleSelected.emit($event)" (dblclick)="dblClickDocument.emit(this)">
<img class="card-img doc-img" [class.inverted]="getIsThumbInverted()" [src]="getThumbUrl()">
@ -129,14 +129,9 @@
<a routerLink="/documents/{{document.id}}" class="btn btn-sm btn-outline-secondary" title="Open" i18n-title *pngxIfPermissions="{ action: PermissionAction.Change, type: PermissionType.Document }" i18n-title>
<i-bs name="file-earmark-richtext"></i-bs>
</a>
<a [href]="previewUrl" target="_blank" class="btn btn-sm btn-outline-secondary"
[ngbPopover]="previewContent" [popoverTitle]="document.title | documentTitle"
autoClose="true" popoverClass="shadow popover-preview" (mouseenter)="mouseEnterPreview()" (mouseleave)="mouseLeavePreview()" #popover="ngbPopover">
<pngx-preview-popup [document]="document" #popupPreview>
<i-bs name="eye"></i-bs>
</a>
<ng-template #previewContent>
<pngx-preview-popup [document]="document"></pngx-preview-popup>
</ng-template>
</pngx-preview-popup>
<a [href]="getDownloadUrl()" class="btn btn-sm btn-outline-secondary" title="Download" i18n-title (click)="$event.stopPropagation()">
<i-bs name="download"></i-bs>
</a>

View File

@ -1,11 +1,6 @@
import { DatePipe } from '@angular/common'
import { provideHttpClientTesting } from '@angular/common/http/testing'
import {
ComponentFixture,
TestBed,
fakeAsync,
tick,
} from '@angular/core/testing'
import { ComponentFixture, TestBed } from '@angular/core/testing'
import { RouterTestingModule } from '@angular/router/testing'
import {
NgbPopoverModule,
@ -116,19 +111,4 @@ describe('DocumentCardSmallComponent', () => {
fixture.debugElement.queryAll(By.directive(TagComponent))
).toHaveLength(6)
})
it('should show preview on mouseover after delay to preload content', fakeAsync(() => {
component.mouseEnterPreview()
expect(component.popover.isOpen()).toBeTruthy()
expect(component.popoverHidden).toBeTruthy()
tick(600)
expect(component.popoverHidden).toBeFalsy()
component.mouseLeaveCard()
component.mouseEnterPreview()
tick(100)
component.mouseLeavePreview()
tick(600)
expect(component.popover.isOpen()).toBeFalsy()
}))
})

View File

@ -13,9 +13,9 @@ import {
} from 'src/app/data/document'
import { DocumentService } from 'src/app/services/rest/document.service'
import { SettingsService } from 'src/app/services/settings.service'
import { NgbPopover } from '@ng-bootstrap/ng-bootstrap'
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
import { ComponentWithPermissions } from '../../with-permissions/with-permissions.component'
import { PreviewPopupComponent } from '../../common/preview-popup/preview-popup.component'
@Component({
selector: 'pngx-document-card-small',
@ -61,10 +61,7 @@ export class DocumentCardSmallComponent extends ComponentWithPermissions {
moreTags: number = null
@ViewChild('popover') popover: NgbPopover
mouseOnPreview = false
popoverHidden = true
@ViewChild('popupPreview') popupPreview: PreviewPopupComponent
getIsThumbInverted() {
return this.settingsService.get(SETTINGS_KEYS.DARK_MODE_THUMB_INVERTED)
@ -78,10 +75,6 @@ export class DocumentCardSmallComponent extends ComponentWithPermissions {
return this.documentService.getDownloadUrl(this.document.id)
}
get previewUrl() {
return this.documentService.getPreviewUrl(this.document.id)
}
get privateName() {
return $localize`Private`
}
@ -100,29 +93,8 @@ export class DocumentCardSmallComponent extends ComponentWithPermissions {
)
}
mouseEnterPreview() {
this.mouseOnPreview = true
if (!this.popover.isOpen()) {
// we're going to open but hide to pre-load content during hover delay
this.popover.open()
this.popoverHidden = true
setTimeout(() => {
if (this.mouseOnPreview) {
// show popover
this.popoverHidden = false
} else {
this.popover.close()
}
}, 600)
}
}
mouseLeavePreview() {
this.mouseOnPreview = false
}
mouseLeaveCard() {
this.popover.close()
this.popupPreview.close()
}
get notesEnabled(): boolean {

View File

@ -292,7 +292,12 @@
@if (activeDisplayFields.includes(DisplayField.TITLE) || activeDisplayFields.includes(DisplayField.TAGS)) {
<td width="30%">
@if (activeDisplayFields.includes(DisplayField.TITLE)) {
<div class="d-inline-block" (mouseleave)="popupPreview.close()">
<a routerLink="/documents/{{d.id}}" title="Edit document" i18n-title style="overflow-wrap: anywhere;">{{d.title | documentTitle}}</a>
<pngx-preview-popup [document]="d" linkClasses="btn btn-sm btn-link text-secondary" linkTitle="Preview document" (click)="$event.stopPropagation()" i18n-linkTitle #popupPreview>
<i-bs name="eye"></i-bs>
</pngx-preview-popup>
</div>
}
@if (activeDisplayFields.includes(DisplayField.TAGS)) {
@for (t of d.tags$ | async; track t) {

View File

@ -72,6 +72,7 @@ import { IsNumberPipe } from 'src/app/pipes/is-number.pipe'
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
import { PermissionsService } from 'src/app/services/permissions.service'
import { NgSelectModule } from '@ng-select/ng-select'
import { PreviewPopupComponent } from '../common/preview-popup/preview-popup.component'
const docs: Document[] = [
{
@ -137,6 +138,7 @@ describe('DocumentListComponent', () => {
UsernamePipe,
SafeHtmlPipe,
IsNumberPipe,
PreviewPopupComponent,
],
imports: [
RouterTestingModule.withRoutes(routes),

View File

@ -77,14 +77,19 @@ describe('CorrespondentListComponent', () => {
it('should support very old date strings', () => {
jest.spyOn(correspondentsService, 'listFiltered').mockReturnValue(
of({
count: 1,
all: [1],
count: 2,
all: [1, 2],
results: [
{
id: 1,
name: 'Correspondent1',
last_correspondence: '1832-12-31T15:32:54-07:52:58',
},
{
id: 2,
name: 'Correspondent2',
last_correspondence: '1901-07-01T00:00:00+00:09:21',
},
],
})
)

View File

@ -52,7 +52,7 @@ export class CorrespondentListComponent extends ManagementListComponent<Correspo
date = new Date(
c.last_correspondence
?.toString()
.replace(/-(\d\d):\d\d:\d\d/gm, `-$1:00`)
.replace(/([-+])(\d\d):\d\d:\d\d/gm, `$1$2:00`)
)
}
return this.datePipe.transform(date)

View File

@ -56,7 +56,7 @@ export interface CustomField extends ObjectWithId {
name: string
created?: Date
extra_data?: {
select_options?: string[]
select_options?: Array<{ label: string; id: string }>
default_currency?: string
}
document_count?: number

View File

@ -17,6 +17,8 @@ export enum GlobalSearchType {
TITLE_CONTENT = 'title-content',
}
export const PAPERLESS_GREEN_HEX = '#17541f'
export const SETTINGS_KEYS = {
LANGUAGE: 'language',
APP_LOGO: 'app_logo',

View File

@ -10,6 +10,14 @@ export enum WorkflowTriggerType {
Consumption = 1,
DocumentAdded = 2,
DocumentUpdated = 3,
Scheduled = 4,
}
export enum ScheduleDateField {
Added = 'added',
Created = 'created',
Modified = 'modified',
CustomField = 'custom_field',
}
export interface WorkflowTrigger extends ObjectWithId {
@ -34,4 +42,14 @@ export interface WorkflowTrigger extends ObjectWithId {
filter_has_correspondent?: number // Correspondent.id
filter_has_document_type?: number // DocumentType.id
schedule_offset_days?: number
schedule_is_recurring?: boolean
schedule_recurring_interval_days?: number
schedule_date_field?: ScheduleDateField
schedule_date_custom_field?: number // CustomField.id
}

View File

@ -17,7 +17,12 @@ import {
hexToHsl,
} from 'src/app/utils/color'
import { environment } from 'src/environments/environment'
import { UiSettings, SETTINGS, SETTINGS_KEYS } from '../data/ui-settings'
import {
UiSettings,
SETTINGS,
SETTINGS_KEYS,
PAPERLESS_GREEN_HEX,
} from '../data/ui-settings'
import { User } from '../data/user'
import {
PermissionAction,
@ -420,7 +425,7 @@ export class SettingsService {
)
}
if (themeColor) {
if (themeColor?.length) {
const hsl = hexToHsl(themeColor)
const bgBrightnessEstimate = estimateBrightnessForColor(themeColor)
@ -445,6 +450,11 @@ export class SettingsService {
document.documentElement.style.removeProperty('--pngx-primary')
document.documentElement.style.removeProperty('--pngx-primary-lightness')
}
this.meta.updateTag({
name: 'theme-color',
content: themeColor?.length ? themeColor : PAPERLESS_GREEN_HEX,
})
}
getLanguageOptions(): LanguageOption[] {

View File

@ -564,11 +564,6 @@ table.table {
}
}
.popover-hidden .popover {
opacity: 0;
pointer-events: none;
}
// Tour
.tour-active .popover {
min-width: 360px;
@ -728,3 +723,27 @@ i-bs svg {
vertical-align: middle;
}
}
// fixes for buttons in preview popup
.btn-group pngx-preview-popup:not(:last-child) {
// Prevent double borders when buttons are next to each other
> .btn {
margin-left: calc(#{$btn-border-width} * -1);
}
> .btn {
@include border-end-radius(0);
}
}
.btn-group pngx-preview-popup:not(:first-child) {
> .btn {
@include border-start-radius(0);
}
}
.btn-group pngx-preview-popup {
position: relative;
flex: 1 1 auto;
> .btn {
display: block;
}
}

View File

@ -14,7 +14,7 @@ def settings(request):
app_logo = (
django_settings.APP_LOGO
if general_config.app_logo is None or len(general_config.app_logo) == 0
else general_config.app_logo
else django_settings.BASE_URL + general_config.app_logo.lstrip("/")
)
return {

View File

@ -176,9 +176,9 @@ class CustomFieldsFilter(Filter):
if fields_with_matching_selects.count() > 0:
for field in fields_with_matching_selects:
options = field.extra_data.get("select_options", [])
for index, option in enumerate(options):
if option.lower().find(value.lower()) != -1:
option_ids.extend([index])
for _, option in enumerate(options):
if option.get("label").lower().find(value.lower()) != -1:
option_ids.extend([option.get("id")])
return (
qs.filter(custom_fields__field__name__icontains=value)
| qs.filter(custom_fields__value_text__icontains=value)
@ -195,18 +195,20 @@ class CustomFieldsFilter(Filter):
return qs
class SelectField(serializers.IntegerField):
class SelectField(serializers.CharField):
def __init__(self, custom_field: CustomField):
self._options = custom_field.extra_data["select_options"]
super().__init__(min_value=0, max_value=len(self._options))
super().__init__(max_length=16)
def to_internal_value(self, data):
if not isinstance(data, int):
# If the supplied value is not an integer,
# we will try to map it to an option index.
# If the supplied value is the option label instead of the ID
try:
data = self._options.index(data)
except ValueError:
data = next(
option.get("id")
for option in self._options
if option.get("label") == data
)
except StopIteration:
pass
return super().to_internal_value(data)

View File

@ -317,10 +317,8 @@ class Command(BaseCommand):
# Check the files against the timeout
still_waiting = {}
for filepath in notified_files:
# Time of the last inotify event for this file
last_event_time = notified_files[filepath]
# last_event_time is time of the last inotify event for this file
for filepath, last_event_time in notified_files.items():
# Current time - last time over the configured timeout
waited_long_enough = (
monotonic() - last_event_time

View File

@ -294,9 +294,9 @@ class Command(CryptMixin, BaseCommand):
manifest_dict = {}
# Build an overall manifest
for key in manifest_key_to_object_query:
for key, object_query in manifest_key_to_object_query.items():
manifest_dict[key] = json.loads(
serializers.serialize("json", manifest_key_to_object_query[key]),
serializers.serialize("json", object_query),
)
self.encrypt_secret_fields(manifest_dict)
@ -370,8 +370,8 @@ class Command(CryptMixin, BaseCommand):
# 4.1 write primary manifest to target folder
manifest = []
for key in manifest_dict:
manifest.extend(manifest_dict[key])
for key, item in manifest_dict.items():
manifest.extend(item)
manifest_path = (self.target / "manifest.json").resolve()
self.check_and_write_json(
manifest,

View File

@ -34,7 +34,7 @@ from documents.settings import EXPORTER_ARCHIVE_NAME
from documents.settings import EXPORTER_CRYPTO_SETTINGS_NAME
from documents.settings import EXPORTER_FILE_NAME
from documents.settings import EXPORTER_THUMBNAIL_NAME
from documents.signals.handlers import update_cf_instance_documents
from documents.signals.handlers import check_paths_and_prune_custom_fields
from documents.signals.handlers import update_filename_and_move_files
from documents.utils import copy_file_with_basic_stats
from paperless import version
@ -262,7 +262,7 @@ class Command(CryptMixin, BaseCommand):
),
disable_signal(
post_save,
receiver=update_cf_instance_documents,
receiver=check_paths_and_prune_custom_fields,
sender=CustomField,
),
):

View File

@ -409,6 +409,7 @@ def document_matches_workflow(
elif (
trigger_type == WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED
or trigger_type == WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED
or trigger_type == WorkflowTrigger.WorkflowTriggerType.SCHEDULED
):
trigger_matched, reason = existing_document_matches_workflow(
document,

View File

@ -0,0 +1,143 @@
# Generated by Django 5.1.1 on 2024-11-05 05:19
import django.core.validators
import django.db.models.deletion
import django.utils.timezone
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "1057_paperlesstask_owner"),
]
operations = [
migrations.AddField(
model_name="workflowtrigger",
name="schedule_date_custom_field",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="documents.customfield",
verbose_name="schedule date custom field",
),
),
migrations.AddField(
model_name="workflowtrigger",
name="schedule_date_field",
field=models.CharField(
choices=[
("added", "Added"),
("created", "Created"),
("modified", "Modified"),
("custom_field", "Custom Field"),
],
default="added",
help_text="The field to check for a schedule trigger.",
max_length=20,
verbose_name="schedule date field",
),
),
migrations.AddField(
model_name="workflowtrigger",
name="schedule_is_recurring",
field=models.BooleanField(
default=False,
help_text="If the schedule should be recurring.",
verbose_name="schedule is recurring",
),
),
migrations.AddField(
model_name="workflowtrigger",
name="schedule_offset_days",
field=models.PositiveIntegerField(
default=0,
help_text="The number of days to offset the schedule trigger by.",
verbose_name="schedule offset days",
),
),
migrations.AddField(
model_name="workflowtrigger",
name="schedule_recurring_interval_days",
field=models.PositiveIntegerField(
default=1,
help_text="The number of days between recurring schedule triggers.",
validators=[django.core.validators.MinValueValidator(1)],
verbose_name="schedule recurring delay in days",
),
),
migrations.AlterField(
model_name="workflowtrigger",
name="type",
field=models.PositiveIntegerField(
choices=[
(1, "Consumption Started"),
(2, "Document Added"),
(3, "Document Updated"),
(4, "Scheduled"),
],
default=1,
verbose_name="Workflow Trigger Type",
),
),
migrations.CreateModel(
name="WorkflowRun",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"type",
models.PositiveIntegerField(
choices=[
(1, "Consumption Started"),
(2, "Document Added"),
(3, "Document Updated"),
(4, "Scheduled"),
],
null=True,
verbose_name="workflow trigger type",
),
),
(
"run_at",
models.DateTimeField(
db_index=True,
default=django.utils.timezone.now,
verbose_name="date run",
),
),
(
"document",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="workflow_runs",
to="documents.document",
verbose_name="document",
),
),
(
"workflow",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="runs",
to="documents.workflow",
verbose_name="workflow",
),
),
],
options={
"verbose_name": "workflow run",
"verbose_name_plural": "workflow runs",
},
),
]

View File

@ -0,0 +1,79 @@
# Generated by Django 5.1.1 on 2024-11-13 05:14
from django.db import migrations
from django.db import models
from django.db import transaction
from django.utils.crypto import get_random_string
def migrate_customfield_selects(apps, schema_editor):
"""
Migrate the custom field selects from a simple list of strings to a list of dictionaries with
label and id. Then update all instances of the custom field to use the new format.
"""
CustomFieldInstance = apps.get_model("documents", "CustomFieldInstance")
CustomField = apps.get_model("documents", "CustomField")
with transaction.atomic():
for custom_field in CustomField.objects.filter(
data_type="select",
): # CustomField.FieldDataType.SELECT
old_select_options = custom_field.extra_data["select_options"]
custom_field.extra_data["select_options"] = [
{"id": get_random_string(16), "label": value}
for value in old_select_options
]
custom_field.save()
for instance in CustomFieldInstance.objects.filter(field=custom_field):
if instance.value_select:
instance.value_select = custom_field.extra_data["select_options"][
int(instance.value_select)
]["id"]
instance.save()
def reverse_migrate_customfield_selects(apps, schema_editor):
"""
Reverse the migration of the custom field selects from a list of dictionaries with label and id
to a simple list of strings. Then update all instances of the custom field to use the old format,
which is just the index of the selected option.
"""
CustomFieldInstance = apps.get_model("documents", "CustomFieldInstance")
CustomField = apps.get_model("documents", "CustomField")
with transaction.atomic():
for custom_field in CustomField.objects.all():
if custom_field.data_type == "select": # CustomField.FieldDataType.SELECT
old_select_options = custom_field.extra_data["select_options"]
custom_field.extra_data["select_options"] = [
option["label"]
for option in custom_field.extra_data["select_options"]
]
custom_field.save()
for instance in CustomFieldInstance.objects.filter(field=custom_field):
instance.value_select = next(
index
for index, option in enumerate(old_select_options)
if option.get("id") == instance.value_select
)
instance.save()
class Migration(migrations.Migration):
dependencies = [
("documents", "1058_workflowtrigger_schedule_date_custom_field_and_more"),
]
operations = [
migrations.AlterField(
model_name="customfieldinstance",
name="value_select",
field=models.CharField(max_length=16, null=True),
),
migrations.RunPython(
migrate_customfield_selects,
reverse_migrate_customfield_selects,
),
]

View File

@ -947,7 +947,7 @@ class CustomFieldInstance(SoftDeleteModel):
value_document_ids = models.JSONField(null=True)
value_select = models.PositiveSmallIntegerField(null=True)
value_select = models.CharField(null=True, max_length=16)
class Meta:
ordering = ("created",)
@ -962,7 +962,11 @@ class CustomFieldInstance(SoftDeleteModel):
def __str__(self) -> str:
value = (
self.field.extra_data["select_options"][self.value_select]
next(
option.get("label")
for option in self.field.extra_data["select_options"]
if option.get("id") == self.value_select
)
if (
self.field.data_type == CustomField.FieldDataType.SELECT
and self.value_select is not None
@ -1016,12 +1020,19 @@ class WorkflowTrigger(models.Model):
CONSUMPTION = 1, _("Consumption Started")
DOCUMENT_ADDED = 2, _("Document Added")
DOCUMENT_UPDATED = 3, _("Document Updated")
SCHEDULED = 4, _("Scheduled")
class DocumentSourceChoices(models.IntegerChoices):
CONSUME_FOLDER = DocumentSource.ConsumeFolder.value, _("Consume Folder")
API_UPLOAD = DocumentSource.ApiUpload.value, _("Api Upload")
MAIL_FETCH = DocumentSource.MailFetch.value, _("Mail Fetch")
class ScheduleDateField(models.TextChoices):
ADDED = "added", _("Added")
CREATED = "created", _("Created")
MODIFIED = "modified", _("Modified")
CUSTOM_FIELD = "custom_field", _("Custom Field")
type = models.PositiveIntegerField(
_("Workflow Trigger Type"),
choices=WorkflowTriggerType.choices,
@ -1098,6 +1109,49 @@ class WorkflowTrigger(models.Model):
verbose_name=_("has this correspondent"),
)
schedule_offset_days = models.PositiveIntegerField(
_("schedule offset days"),
default=0,
help_text=_(
"The number of days to offset the schedule trigger by.",
),
)
schedule_is_recurring = models.BooleanField(
_("schedule is recurring"),
default=False,
help_text=_(
"If the schedule should be recurring.",
),
)
schedule_recurring_interval_days = models.PositiveIntegerField(
_("schedule recurring delay in days"),
default=1,
validators=[MinValueValidator(1)],
help_text=_(
"The number of days between recurring schedule triggers.",
),
)
schedule_date_field = models.CharField(
_("schedule date field"),
max_length=20,
choices=ScheduleDateField.choices,
default=ScheduleDateField.ADDED,
help_text=_(
"The field to check for a schedule trigger.",
),
)
schedule_date_custom_field = models.ForeignKey(
CustomField,
null=True,
blank=True,
on_delete=models.SET_NULL,
verbose_name=_("schedule date custom field"),
)
class Meta:
verbose_name = _("workflow trigger")
verbose_name_plural = _("workflow triggers")
@ -1348,3 +1402,39 @@ class Workflow(models.Model):
def __str__(self):
return f"Workflow: {self.name}"
class WorkflowRun(models.Model):
workflow = models.ForeignKey(
Workflow,
on_delete=models.CASCADE,
related_name="runs",
verbose_name=_("workflow"),
)
type = models.PositiveIntegerField(
_("workflow trigger type"),
choices=WorkflowTrigger.WorkflowTriggerType.choices,
null=True,
)
document = models.ForeignKey(
Document,
null=True,
on_delete=models.CASCADE,
related_name="workflow_runs",
verbose_name=_("document"),
)
run_at = models.DateTimeField(
_("date run"),
default=timezone.now,
db_index=True,
)
class Meta:
verbose_name = _("workflow run")
verbose_name_plural = _("workflow runs")
def __str__(self):
return f"WorkflowRun of {self.workflow} at {self.run_at} on {self.document}"

View File

@ -160,7 +160,7 @@ class SetPermissionsMixin:
},
}
if set_permissions is not None:
for action in permissions_dict:
for action, _ in permissions_dict.items():
if action in set_permissions:
users = set_permissions[action]["users"]
permissions_dict[action]["users"] = self._validate_user_ids(users)
@ -533,20 +533,27 @@ class CustomFieldSerializer(serializers.ModelSerializer):
if (
"data_type" in attrs
and attrs["data_type"] == CustomField.FieldDataType.SELECT
and (
) or (
self.instance
and self.instance.data_type == CustomField.FieldDataType.SELECT
):
if (
"extra_data" not in attrs
or "select_options" not in attrs["extra_data"]
or not isinstance(attrs["extra_data"]["select_options"], list)
or len(attrs["extra_data"]["select_options"]) == 0
or not all(
isinstance(option, str) and len(option) > 0
len(option.get("label", "")) > 0
for option in attrs["extra_data"]["select_options"]
)
)
):
raise serializers.ValidationError(
{"error": "extra_data.select_options must be a valid list"},
)
# labels are valid, generate ids if not present
for option in attrs["extra_data"]["select_options"]:
if option.get("id") is None:
option["id"] = get_random_string(length=16)
elif (
"data_type" in attrs
and attrs["data_type"] == CustomField.FieldDataType.MONETARY
@ -646,10 +653,14 @@ class CustomFieldInstanceSerializer(serializers.ModelSerializer):
elif field.data_type == CustomField.FieldDataType.SELECT:
select_options = field.extra_data["select_options"]
try:
select_options[data["value"]]
next(
option
for option in select_options
if option["id"] == data["value"]
)
except Exception:
raise serializers.ValidationError(
f"Value must be index of an element in {select_options}",
f"Value must be an id of an element in {select_options}",
)
elif field.data_type == CustomField.FieldDataType.DOCUMENTLINK:
doc_ids = data["value"]
@ -1772,6 +1783,11 @@ class WorkflowTriggerSerializer(serializers.ModelSerializer):
"filter_has_tags",
"filter_has_correspondent",
"filter_has_document_type",
"schedule_offset_days",
"schedule_is_recurring",
"schedule_recurring_interval_days",
"schedule_date_field",
"schedule_date_custom_field",
]
def validate(self, attrs):

View File

@ -37,6 +37,7 @@ from documents.models import PaperlessTask
from documents.models import Tag
from documents.models import Workflow
from documents.models import WorkflowAction
from documents.models import WorkflowRun
from documents.models import WorkflowTrigger
from documents.permissions import get_objects_for_user_owner_aware
from documents.permissions import set_permissions_for_object
@ -367,21 +368,6 @@ class CannotMoveFilesException(Exception):
pass
# should be disabled in /src/documents/management/commands/document_importer.py handle
@receiver(models.signals.post_save, sender=CustomField)
def update_cf_instance_documents(sender, instance: CustomField, **kwargs):
"""
'Select' custom field instances get their end-user value (e.g. in file names) from the select_options in extra_data,
which is contained in the custom field itself. So when the field is changed, we (may) need to update the file names
of all documents that have this custom field.
"""
if (
instance.data_type == CustomField.FieldDataType.SELECT
): # Only select fields, for now
for cf_instance in instance.fields.all():
update_filename_and_move_files(sender, cf_instance)
# should be disabled in /src/documents/management/commands/document_importer.py handle
@receiver(models.signals.post_save, sender=CustomFieldInstance)
@receiver(models.signals.m2m_changed, sender=Document.tags.through)
@ -520,6 +506,34 @@ def update_filename_and_move_files(
)
# should be disabled in /src/documents/management/commands/document_importer.py handle
@receiver(models.signals.post_save, sender=CustomField)
def check_paths_and_prune_custom_fields(sender, instance: CustomField, **kwargs):
"""
When a custom field is updated:
1. 'Select' custom field instances get their end-user value (e.g. in file names) from the select_options in extra_data,
which is contained in the custom field itself. So when the field is changed, we (may) need to update the file names
of all documents that have this custom field.
2. If a 'Select' field option was removed, we need to nullify the custom field instances that have the option.
"""
if (
instance.data_type == CustomField.FieldDataType.SELECT
): # Only select fields, for now
for cf_instance in instance.fields.all():
options = instance.extra_data.get("select_options", [])
try:
next(
option["label"]
for option in options
if option["id"] == cf_instance.value
)
except StopIteration:
# The value of this custom field instance is not in the select options anymore
cf_instance.value_select = None
cf_instance.save()
update_filename_and_move_files(sender, cf_instance)
def set_log_entry(sender, document: Document, logging_group=None, **kwargs):
ct = ContentType.objects.get(model="document")
user = User.objects.get(username="consumer")
@ -917,6 +931,12 @@ def run_workflows(
document.save()
document.tags.set(doc_tag_ids)
WorkflowRun.objects.create(
workflow=workflow,
type=trigger_type,
document=document if not use_overrides else None,
)
if use_overrides:
return overrides, "\n".join(messages)

View File

@ -31,10 +31,14 @@ from documents.double_sided import CollatePlugin
from documents.file_handling import create_source_path_directory
from documents.file_handling import generate_unique_filename
from documents.models import Correspondent
from documents.models import CustomFieldInstance
from documents.models import Document
from documents.models import DocumentType
from documents.models import StoragePath
from documents.models import Tag
from documents.models import Workflow
from documents.models import WorkflowRun
from documents.models import WorkflowTrigger
from documents.parsers import DocumentParser
from documents.parsers import get_parser_class_for_mime_type
from documents.plugins.base import ConsumeTaskPlugin
@ -44,6 +48,7 @@ from documents.plugins.helpers import ProgressStatusOptions
from documents.sanity_checker import SanityCheckFailedException
from documents.signals import document_updated
from documents.signals.handlers import cleanup_document_deletion
from documents.signals.handlers import run_workflows
if settings.AUDIT_LOG_ENABLED:
from auditlog.models import LogEntry
@ -337,3 +342,85 @@ def empty_trash(doc_ids=None):
cleanup_document_deletion,
sender=Document,
)
@shared_task
def check_scheduled_workflows():
scheduled_workflows: list[Workflow] = (
Workflow.objects.filter(
triggers__type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
enabled=True,
)
.distinct()
.prefetch_related("triggers")
)
if scheduled_workflows.count() > 0:
logger.debug(f"Checking {len(scheduled_workflows)} scheduled workflows")
for workflow in scheduled_workflows:
schedule_triggers = workflow.triggers.filter(
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
)
trigger: WorkflowTrigger
for trigger in schedule_triggers:
documents = Document.objects.none()
offset_td = timedelta(days=trigger.schedule_offset_days)
logger.debug(
f"Checking trigger {trigger} with offset {offset_td} against field: {trigger.schedule_date_field}",
)
match trigger.schedule_date_field:
case WorkflowTrigger.ScheduleDateField.ADDED:
documents = Document.objects.filter(
added__lt=timezone.now() - offset_td,
)
case WorkflowTrigger.ScheduleDateField.CREATED:
documents = Document.objects.filter(
created__lt=timezone.now() - offset_td,
)
case WorkflowTrigger.ScheduleDateField.MODIFIED:
documents = Document.objects.filter(
modified__lt=timezone.now() - offset_td,
)
case WorkflowTrigger.ScheduleDateField.CUSTOM_FIELD:
cf_instances = CustomFieldInstance.objects.filter(
field=trigger.schedule_date_custom_field,
value_date__lt=timezone.now() - offset_td,
)
documents = Document.objects.filter(
id__in=cf_instances.values_list("document", flat=True),
)
if documents.count() > 0:
logger.debug(
f"Found {documents.count()} documents for trigger {trigger}",
)
for document in documents:
workflow_runs = WorkflowRun.objects.filter(
document=document,
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
workflow=workflow,
).order_by("-run_at")
if not trigger.schedule_is_recurring and workflow_runs.exists():
# schedule is non-recurring and the workflow has already been run
logger.debug(
f"Skipping document {document} for non-recurring workflow {workflow} as it has already been run",
)
continue
elif (
trigger.schedule_is_recurring
and workflow_runs.exists()
and (
workflow_runs.last().run_at
> timezone.now()
- timedelta(
days=trigger.schedule_recurring_interval_days,
)
)
):
# schedule is recurring but the last run was within the number of recurring interval days
logger.debug(
f"Skipping document {document} for recurring workflow {workflow} as the last run was within the recurring interval",
)
continue
run_workflows(
WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
document,
)

View File

@ -253,7 +253,11 @@ def get_custom_fields_context(
):
options = field_instance.field.extra_data["select_options"]
value = pathvalidate.sanitize_filename(
options[int(field_instance.value)],
next(
option["label"]
for option in options
if option["id"] == field_instance.value
),
replacement_text="-",
)
else:

View File

@ -1,7 +1,9 @@
import json
from unittest import mock
from auditlog.models import LogEntry
from django.contrib.auth.models import User
from django.test import override_settings
from guardian.shortcuts import assign_perm
from rest_framework import status
from rest_framework.test import APITestCase
@ -51,8 +53,12 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.doc3.tags.add(self.t2)
self.doc4.tags.add(self.t1, self.t2)
self.sp1 = StoragePath.objects.create(name="sp1", path="Something/{checksum}")
self.cf1 = CustomField.objects.create(name="cf1", data_type="text")
self.cf2 = CustomField.objects.create(name="cf2", data_type="text")
self.cf1 = CustomField.objects.create(name="cf1", data_type="string")
self.cf2 = CustomField.objects.create(name="cf2", data_type="string")
def setup_mock(self, m, method_name, return_value="OK"):
m.return_value = return_value
m.__name__ = method_name
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
def test_api_set_correspondent(self, bulk_update_task_mock):
@ -178,7 +184,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
@mock.patch("documents.serialisers.bulk_edit.modify_tags")
def test_api_modify_tags(self, m):
m.return_value = "OK"
self.setup_mock(m, "modify_tags")
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
@ -211,7 +217,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
- API returns HTTP 400
- modify_tags is not called
"""
m.return_value = "OK"
self.setup_mock(m, "modify_tags")
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
@ -230,7 +236,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
@mock.patch("documents.serialisers.bulk_edit.modify_custom_fields")
def test_api_modify_custom_fields(self, m):
m.return_value = "OK"
self.setup_mock(m, "modify_custom_fields")
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
@ -263,8 +269,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
- API returns HTTP 400
- modify_custom_fields is not called
"""
m.return_value = "OK"
self.setup_mock(m, "modify_custom_fields")
# Missing add_custom_fields
response = self.client.post(
"/api/documents/bulk_edit/",
@ -359,7 +364,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
@mock.patch("documents.serialisers.bulk_edit.delete")
def test_api_delete(self, m):
m.return_value = "OK"
self.setup_mock(m, "delete")
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
@ -383,8 +388,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
THEN:
- set_storage_path is called with correct document IDs and storage_path ID
"""
m.return_value = "OK"
self.setup_mock(m, "set_storage_path")
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
@ -414,8 +418,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
THEN:
- set_storage_path is called with correct document IDs and None storage_path
"""
m.return_value = "OK"
self.setup_mock(m, "set_storage_path")
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
@ -728,7 +731,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
def test_set_permissions(self, m):
m.return_value = "OK"
self.setup_mock(m, "set_permissions")
user1 = User.objects.create(username="user1")
user2 = User.objects.create(username="user2")
permissions = {
@ -763,7 +766,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
def test_set_permissions_merge(self, m):
m.return_value = "OK"
self.setup_mock(m, "set_permissions")
user1 = User.objects.create(username="user1")
user2 = User.objects.create(username="user2")
permissions = {
@ -823,7 +826,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
THEN:
- User is not able to change permissions
"""
m.return_value = "OK"
self.setup_mock(m, "set_permissions")
self.doc1.owner = User.objects.get(username="temp_admin")
self.doc1.save()
user1 = User.objects.create(username="user1")
@ -875,7 +878,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
THEN:
- set_storage_path only called if user can edit all docs
"""
m.return_value = "OK"
self.setup_mock(m, "set_storage_path")
self.doc1.owner = User.objects.get(username="temp_admin")
self.doc1.save()
user1 = User.objects.create(username="user1")
@ -919,8 +922,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
@mock.patch("documents.serialisers.bulk_edit.rotate")
def test_rotate(self, m):
m.return_value = "OK"
self.setup_mock(m, "rotate")
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
@ -974,8 +976,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
@mock.patch("documents.serialisers.bulk_edit.merge")
def test_merge(self, m):
m.return_value = "OK"
self.setup_mock(m, "merge")
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
@ -1003,8 +1004,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
user1 = User.objects.create(username="user1")
self.client.force_authenticate(user=user1)
m.return_value = "OK"
self.setup_mock(m, "merge")
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
@ -1053,8 +1053,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
THEN:
- The API fails with a correct error code
"""
m.return_value = "OK"
self.setup_mock(m, "merge")
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
@ -1074,8 +1073,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
@mock.patch("documents.serialisers.bulk_edit.split")
def test_split(self, m):
m.return_value = "OK"
self.setup_mock(m, "split")
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
@ -1165,8 +1163,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
@mock.patch("documents.serialisers.bulk_edit.delete_pages")
def test_delete_pages(self, m):
m.return_value = "OK"
self.setup_mock(m, "delete_pages")
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
@ -1254,3 +1251,87 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"pages must be a list of integers", response.content)
@override_settings(AUDIT_LOG_ENABLED=True)
def test_bulk_edit_audit_log_enabled_simple_field(self):
"""
GIVEN:
- Audit log is enabled
WHEN:
- API to bulk edit documents is called
THEN:
- Audit log is created
"""
LogEntry.objects.all().delete()
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id],
"method": "set_correspondent",
"parameters": {"correspondent": self.c2.id},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(LogEntry.objects.filter(object_pk=self.doc1.id).count(), 1)
@override_settings(AUDIT_LOG_ENABLED=True)
def test_bulk_edit_audit_log_enabled_tags(self):
"""
GIVEN:
- Audit log is enabled
WHEN:
- API to bulk edit tags is called
THEN:
- Audit log is created
"""
LogEntry.objects.all().delete()
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id],
"method": "modify_tags",
"parameters": {
"add_tags": [self.t1.id],
"remove_tags": [self.t2.id],
},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(LogEntry.objects.filter(object_pk=self.doc1.id).count(), 1)
@override_settings(AUDIT_LOG_ENABLED=True)
def test_bulk_edit_audit_log_enabled_custom_fields(self):
"""
GIVEN:
- Audit log is enabled
WHEN:
- API to bulk edit custom fields is called
THEN:
- Audit log is created
"""
LogEntry.objects.all().delete()
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id],
"method": "modify_custom_fields",
"parameters": {
"add_custom_fields": [self.cf1.id],
"remove_custom_fields": [],
},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(LogEntry.objects.filter(object_pk=self.doc1.id).count(), 2)

View File

@ -1,5 +1,6 @@
import json
from datetime import date
from unittest.mock import ANY
from django.contrib.auth.models import Permission
from django.contrib.auth.models import User
@ -61,7 +62,10 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
"data_type": "select",
"name": "Select Field",
"extra_data": {
"select_options": ["Option 1", "Option 2"],
"select_options": [
{"label": "Option 1", "id": "abc-123"},
{"label": "Option 2", "id": "def-456"},
],
},
},
),
@ -73,7 +77,10 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
self.assertCountEqual(
data["extra_data"]["select_options"],
["Option 1", "Option 2"],
[
{"label": "Option 1", "id": "abc-123"},
{"label": "Option 2", "id": "def-456"},
],
)
def test_create_custom_field_nonunique_name(self):
@ -138,6 +145,133 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
)
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
def test_custom_field_select_unique_ids(self):
"""
GIVEN:
- Nothing
- Existing custom field
WHEN:
- API request to create custom field with select options without id
THEN:
- Unique ids are generated for each option
"""
resp = self.client.post(
self.ENDPOINT,
json.dumps(
{
"data_type": "select",
"name": "Select Field",
"extra_data": {
"select_options": [
{"label": "Option 1"},
{"label": "Option 2"},
],
},
},
),
content_type="application/json",
)
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
data = resp.json()
self.assertCountEqual(
data["extra_data"]["select_options"],
[
{"label": "Option 1", "id": ANY},
{"label": "Option 2", "id": ANY},
],
)
# Add a new option
resp = self.client.patch(
f"{self.ENDPOINT}{data['id']}/",
json.dumps(
{
"extra_data": {
"select_options": data["extra_data"]["select_options"]
+ [{"label": "Option 3"}],
},
},
),
content_type="application/json",
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
data = resp.json()
self.assertCountEqual(
data["extra_data"]["select_options"],
[
{"label": "Option 1", "id": ANY},
{"label": "Option 2", "id": ANY},
{"label": "Option 3", "id": ANY},
],
)
def test_custom_field_select_options_pruned(self):
"""
GIVEN:
- Select custom field exists and document instance with one of the options
WHEN:
- API request to remove an option from the select field
THEN:
- The option is removed from the field
- The option is removed from the document instance
"""
custom_field_select = CustomField.objects.create(
name="Select Field",
data_type=CustomField.FieldDataType.SELECT,
extra_data={
"select_options": [
{"label": "Option 1", "id": "abc-123"},
{"label": "Option 2", "id": "def-456"},
{"label": "Option 3", "id": "ghi-789"},
],
},
)
doc = Document.objects.create(
title="WOW",
content="the content",
checksum="123",
mime_type="application/pdf",
)
CustomFieldInstance.objects.create(
document=doc,
field=custom_field_select,
value_text="abc-123",
)
resp = self.client.patch(
f"{self.ENDPOINT}{custom_field_select.id}/",
json.dumps(
{
"extra_data": {
"select_options": [
{"label": "Option 1", "id": "abc-123"},
{"label": "Option 3", "id": "ghi-789"},
],
},
},
),
content_type="application/json",
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
data = resp.json()
self.assertCountEqual(
data["extra_data"]["select_options"],
[
{"label": "Option 1", "id": "abc-123"},
{"label": "Option 3", "id": "ghi-789"},
],
)
doc.refresh_from_db()
self.assertEqual(doc.custom_fields.first().value, None)
def test_create_custom_field_monetary_validation(self):
"""
GIVEN:
@ -261,7 +395,10 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
name="Test Custom Field Select",
data_type=CustomField.FieldDataType.SELECT,
extra_data={
"select_options": ["Option 1", "Option 2"],
"select_options": [
{"label": "Option 1", "id": "abc-123"},
{"label": "Option 2", "id": "def-456"},
],
},
)
@ -309,7 +446,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
},
{
"field": custom_field_select.id,
"value": 0,
"value": "abc-123",
},
],
},
@ -332,7 +469,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
{"field": custom_field_monetary.id, "value": "EUR11.10"},
{"field": custom_field_monetary2.id, "value": "11.1"},
{"field": custom_field_documentlink.id, "value": [doc2.id]},
{"field": custom_field_select.id, "value": 0},
{"field": custom_field_select.id, "value": "abc-123"},
],
)
@ -722,7 +859,10 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
name="Test Custom Field SELECT",
data_type=CustomField.FieldDataType.SELECT,
extra_data={
"select_options": ["Option 1", "Option 2"],
"select_options": [
{"label": "Option 1", "id": "abc-123"},
{"label": "Option 2", "id": "def-456"},
],
},
)
@ -730,7 +870,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
f"/api/documents/{doc.id}/",
data={
"custom_fields": [
{"field": custom_field_select.id, "value": 3},
{"field": custom_field_select.id, "value": "not an option"},
],
},
format="json",

View File

@ -657,13 +657,16 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
name="Test Custom Field Select",
data_type=CustomField.FieldDataType.SELECT,
extra_data={
"select_options": ["Option 1", "Choice 2"],
"select_options": [
{"label": "Option 1", "id": "abc123"},
{"label": "Choice 2", "id": "def456"},
],
},
)
CustomFieldInstance.objects.create(
document=doc1,
field=custom_field_select,
value_select=1,
value_select="def456",
)
r = self.client.get("/api/documents/?custom_fields__icontains=choice")

View File

@ -46,7 +46,13 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
# Add some options to the select_field
select = self.custom_fields["select_field"]
select.extra_data = {"select_options": ["A", "B", "C"]}
select.extra_data = {
"select_options": [
{"label": "A", "id": "abc-123"},
{"label": "B", "id": "def-456"},
{"label": "C", "id": "ghi-789"},
],
}
select.save()
# Now we will create some test documents
@ -122,9 +128,9 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
# CustomField.FieldDataType.SELECT
self._create_document(select_field=None)
self._create_document(select_field=0)
self._create_document(select_field=1)
self._create_document(select_field=2)
self._create_document(select_field="abc-123")
self._create_document(select_field="def-456")
self._create_document(select_field="ghi-789")
def _create_document(self, **kwargs):
title = str(kwargs)
@ -296,18 +302,18 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
)
def test_select(self):
# For select fields, you can either specify the index
# For select fields, you can either specify the id of the option
# or the name of the option. They function exactly the same.
self._assert_query_match_predicate(
["select_field", "exact", 1],
["select_field", "exact", "def-456"],
lambda document: "select_field" in document
and document["select_field"] == 1,
and document["select_field"] == "def-456",
)
# This is the same as:
self._assert_query_match_predicate(
["select_field", "exact", "B"],
lambda document: "select_field" in document
and document["select_field"] == 1,
and document["select_field"] == "def-456",
)
# ==========================================================#
@ -522,9 +528,9 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
def test_invalid_value(self):
self._assert_validation_error(
json.dumps(["select_field", "exact", "not an option"]),
json.dumps(["select_field", "exact", []]),
["custom_field_query", "2"],
"integer",
"string",
)
def test_invalid_logical_operator(self):

View File

@ -544,7 +544,11 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
name="test",
data_type=CustomField.FieldDataType.SELECT,
extra_data={
"select_options": ["apple", "banana", "cherry"],
"select_options": [
{"label": "apple", "id": "abc123"},
{"label": "banana", "id": "def456"},
{"label": "cherry", "id": "ghi789"},
],
},
)
doc = Document.objects.create(
@ -555,14 +559,22 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
archive_checksum="B",
mime_type="application/pdf",
)
CustomFieldInstance.objects.create(field=cf, document=doc, value_select=0)
CustomFieldInstance.objects.create(
field=cf,
document=doc,
value_select="abc123",
)
self.assertEqual(generate_filename(doc), "document_apple.pdf")
# handler should not have been called
self.assertEqual(m.call_count, 0)
cf.extra_data = {
"select_options": ["aubergine", "banana", "cherry"],
"select_options": [
{"label": "aubergine", "id": "abc123"},
{"label": "banana", "id": "def456"},
{"label": "cherry", "id": "ghi789"},
],
}
cf.save()
self.assertEqual(generate_filename(doc), "document_aubergine.pdf")
@ -1373,13 +1385,18 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
cf2 = CustomField.objects.create(
name="Select Field",
data_type=CustomField.FieldDataType.SELECT,
extra_data={"select_options": ["ChoiceOne", "ChoiceTwo"]},
extra_data={
"select_options": [
{"label": "ChoiceOne", "id": "abc=123"},
{"label": "ChoiceTwo", "id": "def-456"},
],
},
)
cfi1 = CustomFieldInstance.objects.create(
document=doc_a,
field=cf2,
value_select=0,
value_select="abc=123",
)
cfi = CustomFieldInstance.objects.create(

View File

@ -0,0 +1,87 @@
from unittest.mock import ANY
from documents.tests.utils import TestMigrations
class TestMigrateCustomFieldSelects(TestMigrations):
migrate_from = "1058_workflowtrigger_schedule_date_custom_field_and_more"
migrate_to = "1059_alter_customfieldinstance_value_select"
def setUpBeforeMigration(self, apps):
CustomField = apps.get_model("documents.CustomField")
self.old_format = CustomField.objects.create(
name="cf1",
data_type="select",
extra_data={"select_options": ["Option 1", "Option 2", "Option 3"]},
)
Document = apps.get_model("documents.Document")
doc = Document.objects.create(title="doc1")
CustomFieldInstance = apps.get_model("documents.CustomFieldInstance")
self.old_instance = CustomFieldInstance.objects.create(
field=self.old_format,
value_select=0,
document=doc,
)
def test_migrate_old_to_new_select_fields(self):
self.old_format.refresh_from_db()
self.old_instance.refresh_from_db()
self.assertEqual(
self.old_format.extra_data["select_options"],
[
{"label": "Option 1", "id": ANY},
{"label": "Option 2", "id": ANY},
{"label": "Option 3", "id": ANY},
],
)
self.assertEqual(
self.old_instance.value_select,
self.old_format.extra_data["select_options"][0]["id"],
)
class TestMigrationCustomFieldSelectsReverse(TestMigrations):
migrate_from = "1059_alter_customfieldinstance_value_select"
migrate_to = "1058_workflowtrigger_schedule_date_custom_field_and_more"
def setUpBeforeMigration(self, apps):
CustomField = apps.get_model("documents.CustomField")
self.new_format = CustomField.objects.create(
name="cf1",
data_type="select",
extra_data={
"select_options": [
{"label": "Option 1", "id": "id1"},
{"label": "Option 2", "id": "id2"},
{"label": "Option 3", "id": "id3"},
],
},
)
Document = apps.get_model("documents.Document")
doc = Document.objects.create(title="doc1")
CustomFieldInstance = apps.get_model("documents.CustomFieldInstance")
self.new_instance = CustomFieldInstance.objects.create(
field=self.new_format,
value_select="id1",
document=doc,
)
def test_migrate_new_to_old_select_fields(self):
self.new_format.refresh_from_db()
self.new_instance.refresh_from_db()
self.assertEqual(
self.new_format.extra_data["select_options"],
[
"Option 1",
"Option 2",
"Option 3",
],
)
self.assertEqual(
self.new_instance.value_select,
0,
)

View File

@ -6,12 +6,14 @@ from django.conf import settings
from django.contrib.auth.models import Permission
from django.contrib.auth.models import User
from django.test import TestCase
from django.test import override_settings
from django.utils import timezone
from rest_framework import status
from documents.models import Document
from documents.models import ShareLink
from documents.tests.utils import DirectoriesMixin
from paperless.models import ApplicationConfiguration
class TestViews(DirectoriesMixin, TestCase):
@ -67,6 +69,26 @@ class TestViews(DirectoriesMixin, TestCase):
f"frontend/{language_actual}/main.js",
)
@override_settings(BASE_URL="/paperless/")
def test_index_app_logo_with_base_url(self):
"""
GIVEN:
- Existing config with app_logo specified
WHEN:
- Index page is loaded
THEN:
- app_logo is prefixed with BASE_URL
"""
config = ApplicationConfiguration.objects.first()
config.app_logo = "/logo/example.jpg"
config.save()
self.client.force_login(self.user)
response = self.client.get("/")
self.assertEqual(
response.context["APP_LOGO"],
f"/paperless{config.app_logo}",
)
def test_share_link_views(self):
"""
GIVEN:

View File

@ -29,6 +29,7 @@ from documents.models import StoragePath
from documents.models import Tag
from documents.models import Workflow
from documents.models import WorkflowAction
from documents.models import WorkflowRun
from documents.models import WorkflowTrigger
from documents.signals import document_consumption_finished
from documents.tests.utils import DirectoriesMixin
@ -1306,6 +1307,275 @@ class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, APITestCase):
# group2 should have been added
self.assertIn(self.group2, group_perms)
def test_workflow_scheduled_trigger_created(self):
"""
GIVEN:
- Existing workflow with SCHEDULED trigger against the created field and action that assigns owner
- Existing doc that matches the trigger
WHEN:
- Scheduled workflows are checked
THEN:
- Workflow runs, document owner is updated
"""
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
schedule_offset_days=1,
schedule_date_field="created",
)
action = WorkflowAction.objects.create(
assign_title="Doc assign owner",
assign_owner=self.user2,
)
w = Workflow.objects.create(
name="Workflow 1",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
now = timezone.localtime(timezone.now())
created = now - timedelta(weeks=520)
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
created=created,
)
tasks.check_scheduled_workflows()
doc.refresh_from_db()
self.assertEqual(doc.owner, self.user2)
def test_workflow_scheduled_trigger_added(self):
"""
GIVEN:
- Existing workflow with SCHEDULED trigger against the added field and action that assigns owner
- Existing doc that matches the trigger
WHEN:
- Scheduled workflows are checked
THEN:
- Workflow runs, document owner is updated
"""
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
schedule_offset_days=1,
schedule_date_field=WorkflowTrigger.ScheduleDateField.ADDED,
)
action = WorkflowAction.objects.create(
assign_title="Doc assign owner",
assign_owner=self.user2,
)
w = Workflow.objects.create(
name="Workflow 1",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
added = timezone.now() - timedelta(days=365)
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
added=added,
)
tasks.check_scheduled_workflows()
doc.refresh_from_db()
self.assertEqual(doc.owner, self.user2)
@mock.patch("documents.models.Document.objects.filter", autospec=True)
def test_workflow_scheduled_trigger_modified(self, mock_filter):
"""
GIVEN:
- Existing workflow with SCHEDULED trigger against the modified field and action that assigns owner
- Existing doc that matches the trigger
WHEN:
- Scheduled workflows are checked
THEN:
- Workflow runs, document owner is updated
"""
# we have to mock because modified field is auto_now
mock_filter.return_value = Document.objects.all()
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
schedule_offset_days=1,
schedule_date_field=WorkflowTrigger.ScheduleDateField.MODIFIED,
)
action = WorkflowAction.objects.create(
assign_title="Doc assign owner",
assign_owner=self.user2,
)
w = Workflow.objects.create(
name="Workflow 1",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
)
tasks.check_scheduled_workflows()
doc.refresh_from_db()
self.assertEqual(doc.owner, self.user2)
def test_workflow_scheduled_trigger_custom_field(self):
"""
GIVEN:
- Existing workflow with SCHEDULED trigger against a custom field and action that assigns owner
- Existing doc that matches the trigger
WHEN:
- Scheduled workflows are checked
THEN:
- Workflow runs, document owner is updated
"""
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
schedule_offset_days=1,
schedule_date_field=WorkflowTrigger.ScheduleDateField.CUSTOM_FIELD,
schedule_date_custom_field=self.cf1,
)
action = WorkflowAction.objects.create(
assign_title="Doc assign owner",
assign_owner=self.user2,
)
w = Workflow.objects.create(
name="Workflow 1",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
)
CustomFieldInstance.objects.create(
document=doc,
field=self.cf1,
value_date=timezone.now() - timedelta(days=2),
)
tasks.check_scheduled_workflows()
doc.refresh_from_db()
self.assertEqual(doc.owner, self.user2)
def test_workflow_scheduled_already_run(self):
"""
GIVEN:
- Existing workflow with SCHEDULED trigger
- Existing doc that has already had the workflow run
WHEN:
- Scheduled workflows are checked
THEN:
- Workflow does not run again
"""
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
schedule_offset_days=1,
schedule_date_field=WorkflowTrigger.ScheduleDateField.CREATED,
)
action = WorkflowAction.objects.create(
assign_title="Doc assign owner",
assign_owner=self.user2,
)
w = Workflow.objects.create(
name="Workflow 1",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
created=timezone.now() - timedelta(days=2),
)
wr = WorkflowRun.objects.create(
workflow=w,
document=doc,
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
run_at=timezone.now(),
)
self.assertEqual(
str(wr),
f"WorkflowRun of {w} at {wr.run_at} on {doc}",
) # coverage
tasks.check_scheduled_workflows()
doc.refresh_from_db()
self.assertIsNone(doc.owner)
def test_workflow_scheduled_trigger_too_early(self):
"""
GIVEN:
- Existing workflow with SCHEDULED trigger and recurring interval of 7 days
- Workflow run date is 6 days ago
WHEN:
- Scheduled workflows are checked
THEN:
- Workflow does not run as the offset is not met
"""
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
schedule_offset_days=30,
schedule_date_field=WorkflowTrigger.ScheduleDateField.CREATED,
schedule_is_recurring=True,
schedule_recurring_interval_days=7,
)
action = WorkflowAction.objects.create(
assign_title="Doc assign owner",
assign_owner=self.user2,
)
w = Workflow.objects.create(
name="Workflow 1",
order=0,
)
w.triggers.add(trigger)
w.actions.add(action)
w.save()
doc = Document.objects.create(
title="sample test",
correspondent=self.c,
original_filename="sample.pdf",
created=timezone.now() - timedelta(days=40),
)
WorkflowRun.objects.create(
workflow=w,
document=doc,
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
run_at=timezone.now() - timedelta(days=6),
)
with self.assertLogs(level="DEBUG") as cm:
tasks.check_scheduled_workflows()
self.assertIn(
"last run was within the recurring interval",
" ".join(cm.output),
)
doc.refresh_from_db()
self.assertIsNone(doc.owner)
def test_workflow_enabled_disabled(self):
trigger = WorkflowTrigger.objects.create(
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
@ -1354,7 +1624,7 @@ class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, APITestCase):
def test_new_trigger_type_raises_exception(self):
trigger = WorkflowTrigger.objects.create(
type=4,
type=99,
)
action = WorkflowAction.objects.create(
assign_title="Doc assign owner",
@ -1370,7 +1640,7 @@ class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, APITestCase):
doc = Document.objects.create(
title="test",
)
self.assertRaises(Exception, document_matches_workflow, doc, w, 4)
self.assertRaises(Exception, document_matches_workflow, doc, w, 99)
def test_removal_action_document_updated_workflow(self):
"""

View File

@ -26,11 +26,13 @@ from django.db.models import Case
from django.db.models import Count
from django.db.models import IntegerField
from django.db.models import Max
from django.db.models import Model
from django.db.models import Q
from django.db.models import Sum
from django.db.models import When
from django.db.models.functions import Length
from django.db.models.functions import Lower
from django.db.models.manager import Manager
from django.http import Http404
from django.http import HttpResponse
from django.http import HttpResponseBadRequest
@ -426,7 +428,7 @@ class DocumentViewSet(
)
def file_response(self, pk, request, disposition):
doc = Document.objects.select_related("owner").get(id=pk)
doc = Document.global_objects.select_related("owner").get(id=pk)
if request.user is not None and not has_perms_owner_aware(
request.user,
"view_document",
@ -961,6 +963,22 @@ class SavedViewViewSet(ModelViewSet, PassUserMixin):
class BulkEditView(PassUserMixin):
MODIFIED_FIELD_BY_METHOD = {
"set_correspondent": "correspondent",
"set_document_type": "document_type",
"set_storage_path": "storage_path",
"add_tag": "tags",
"remove_tag": "tags",
"modify_tags": "tags",
"modify_custom_fields": "custom_fields",
"set_permissions": None,
"delete": "deleted_at",
"rotate": "checksum",
"delete_pages": "checksum",
"split": None,
"merge": None,
}
permission_classes = (IsAuthenticated,)
serializer_class = BulkEditSerializer
parser_classes = (parsers.JSONParser,)
@ -1013,8 +1031,53 @@ class BulkEditView(PassUserMixin):
return HttpResponseForbidden("Insufficient permissions")
try:
modified_field = self.MODIFIED_FIELD_BY_METHOD[method.__name__]
if settings.AUDIT_LOG_ENABLED and modified_field:
old_documents = {
obj["pk"]: obj
for obj in Document.objects.filter(pk__in=documents).values(
"pk",
"correspondent",
"document_type",
"storage_path",
"tags",
"custom_fields",
"deleted_at",
"checksum",
)
}
# TODO: parameter validation
result = method(documents, **parameters)
if settings.AUDIT_LOG_ENABLED and modified_field:
new_documents = Document.objects.filter(pk__in=documents)
for doc in new_documents:
old_value = old_documents[doc.pk][modified_field]
new_value = getattr(doc, modified_field)
if isinstance(new_value, Model):
# correspondent, document type, etc.
new_value = new_value.pk
elif isinstance(new_value, Manager):
# tags, custom fields
new_value = list(new_value.values_list("pk", flat=True))
LogEntry.objects.log_create(
instance=doc,
changes={
modified_field: [
old_value,
new_value,
],
},
action=LogEntry.Action.UPDATE,
actor=user,
additional_data={
"reason": f"Bulk edit: {method.__name__}",
},
)
return Response({"result": result})
except Exception as e:
logger.warning(f"An error occurred performing bulk edit: {e!s}")
@ -1546,6 +1609,12 @@ class StoragePathViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
filterset_class = StoragePathFilterSet
ordering_fields = ("name", "path", "matching_algorithm", "match", "document_count")
def get_permissions(self):
if self.action == "test":
# Test action does not require object level permissions
self.permission_classes = (IsAuthenticated,)
return super().get_permissions()
def destroy(self, request, *args, **kwargs):
"""
When a storage path is deleted, see if documents
@ -1562,17 +1631,12 @@ class StoragePathViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
return response
class StoragePathTestView(GenericAPIView):
@action(methods=["post"], detail=False)
def test(self, request):
"""
Test storage path against a document
"""
permission_classes = [IsAuthenticated]
serializer_class = StoragePathTestSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer = StoragePathTestSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
document = serializer.validated_data.get("document")

View File

@ -5,9 +5,9 @@ from paperless.checks import paths_check
from paperless.checks import settings_values_check
__all__ = [
"celery_app",
"audit_log_check",
"binaries_check",
"celery_app",
"paths_check",
"settings_values_check",
"audit_log_check",
]

View File

@ -216,6 +216,17 @@ def _parse_beat_schedule() -> dict:
"expires": 23.0 * 60.0 * 60.0,
},
},
{
"name": "Check and run scheduled workflows",
"env_key": "PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON",
# Default hourly at 5 minutes past the hour
"env_default": "5 */1 * * *",
"task": "documents.tasks.check_scheduled_workflows",
"options": {
# 1 minute before default schedule sends again
"expires": 59.0 * 60.0,
},
},
]
for task in tasks:
# Either get the environment setting or use the default

View File

@ -157,6 +157,7 @@ class TestCeleryScheduleParsing(TestCase):
INDEX_EXPIRE_TIME = 23.0 * 60.0 * 60.0
SANITY_EXPIRE_TIME = ((7.0 * 24.0) - 1.0) * 60.0 * 60.0
EMPTY_TRASH_EXPIRE_TIME = 23.0 * 60.0 * 60.0
RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME = 59.0 * 60.0
def test_schedule_configuration_default(self):
"""
@ -196,6 +197,11 @@ class TestCeleryScheduleParsing(TestCase):
"schedule": crontab(minute=0, hour="1"),
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
},
"Check and run scheduled workflows": {
"task": "documents.tasks.check_scheduled_workflows",
"schedule": crontab(minute="5", hour="*/1"),
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
},
},
schedule,
)
@ -243,6 +249,11 @@ class TestCeleryScheduleParsing(TestCase):
"schedule": crontab(minute=0, hour="1"),
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
},
"Check and run scheduled workflows": {
"task": "documents.tasks.check_scheduled_workflows",
"schedule": crontab(minute="5", hour="*/1"),
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
},
},
schedule,
)
@ -282,6 +293,11 @@ class TestCeleryScheduleParsing(TestCase):
"schedule": crontab(minute=0, hour="1"),
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
},
"Check and run scheduled workflows": {
"task": "documents.tasks.check_scheduled_workflows",
"schedule": crontab(minute="5", hour="*/1"),
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
},
},
schedule,
)
@ -303,6 +319,7 @@ class TestCeleryScheduleParsing(TestCase):
"PAPERLESS_SANITY_TASK_CRON": "disable",
"PAPERLESS_INDEX_TASK_CRON": "disable",
"PAPERLESS_EMPTY_TRASH_TASK_CRON": "disable",
"PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON": "disable",
},
):
schedule = _parse_beat_schedule()

View File

@ -11,7 +11,6 @@ from django.contrib.auth.decorators import login_required
from django.urls import path
from django.urls import re_path
from django.utils.translation import gettext_lazy as _
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.csrf import ensure_csrf_cookie
from django.views.generic import RedirectView
from django.views.static import serve
@ -35,7 +34,6 @@ from documents.views import SelectionDataView
from documents.views import SharedLinkView
from documents.views import ShareLinkViewSet
from documents.views import StatisticsView
from documents.views import StoragePathTestView
from documents.views import StoragePathViewSet
from documents.views import SystemStatusView
from documents.views import TagViewSet
@ -56,7 +54,6 @@ from paperless.views import ProfileView
from paperless.views import SocialAccountProvidersView
from paperless.views import TOTPView
from paperless.views import UserViewSet
from paperless_mail.views import MailAccountTestView
from paperless_mail.views import MailAccountViewSet
from paperless_mail.views import MailRuleViewSet
from paperless_mail.views import OauthCallbackView
@ -94,58 +91,83 @@ urlpatterns = [
namespace="rest_framework",
),
),
re_path(
"^search/autocomplete/",
SearchAutoCompleteView.as_view(),
name="autocomplete",
),
re_path(
"^search/",
include(
[
re_path(
"^$",
GlobalSearchView.as_view(),
name="global_search",
),
re_path("^statistics/", StatisticsView.as_view(), name="statistics"),
re_path(
"^documents/post_document/",
"^autocomplete/",
SearchAutoCompleteView.as_view(),
name="autocomplete",
),
],
),
),
re_path(
"^statistics/",
StatisticsView.as_view(),
name="statistics",
),
re_path(
"^documents/",
include(
[
re_path(
"^post_document/",
PostDocumentView.as_view(),
name="post_document",
),
re_path(
"^documents/bulk_edit/",
"^bulk_edit/",
BulkEditView.as_view(),
name="bulk_edit",
),
re_path(
"^documents/selection_data/",
SelectionDataView.as_view(),
name="selection_data",
),
re_path(
"^documents/bulk_download/",
"^bulk_download/",
BulkDownloadView.as_view(),
name="bulk_download",
),
re_path(
"^remote_version/",
RemoteVersionView.as_view(),
name="remoteversion",
"^selection_data/",
SelectionDataView.as_view(),
name="selection_data",
),
],
),
re_path("^ui_settings/", UiSettingsView.as_view(), name="ui_settings"),
re_path(
"^mail_accounts/test/",
MailAccountTestView.as_view(),
name="mail_accounts_test",
),
path("token/", views.obtain_auth_token),
re_path(
"^bulk_edit_objects/",
BulkEditObjectsView.as_view(),
name="bulk_edit_objects",
),
re_path(
"^remote_version/",
RemoteVersionView.as_view(),
name="remoteversion",
),
re_path(
"^ui_settings/",
UiSettingsView.as_view(),
name="ui_settings",
),
path(
"token/",
views.obtain_auth_token,
),
re_path(
"^profile/",
include(
[
re_path(
"^$",
ProfileView.as_view(),
name="profile_view",
),
path(
"generate_auth_token/",
GenerateAuthTokenView.as_view(),
@ -158,11 +180,6 @@ urlpatterns = [
"social_account_providers/",
SocialAccountProvidersView.as_view(),
),
re_path(
"^$",
ProfileView.as_view(),
name="profile_view",
),
path(
"totp/",
TOTPView.as_view(),
@ -181,11 +198,6 @@ urlpatterns = [
TrashView.as_view(),
name="trash",
),
re_path(
"^storage_paths/test/",
StoragePathTestView.as_view(),
name="storage_paths_test",
),
re_path(
r"^oauth/callback/",
OauthCallbackView.as_view(),
@ -223,14 +235,6 @@ urlpatterns = [
],
),
),
re_path(
r"^push$",
csrf_exempt(
RedirectView.as_view(
url=settings.BASE_URL + "api/documents/post_document/",
),
),
),
# Frontend assets TODO: this is pretty bad, but it works.
path(
"assets/<path:path>",

View File

@ -6,6 +6,7 @@ from django.http import HttpResponseBadRequest
from django.http import HttpResponseRedirect
from django.utils import timezone
from httpx_oauth.oauth2 import GetAccessTokenError
from rest_framework.decorators import action
from rest_framework.generics import GenericAPIView
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
@ -34,22 +35,14 @@ class MailAccountViewSet(ModelViewSet, PassUserMixin):
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
filter_backends = (ObjectOwnedOrGrantedPermissionsFilter,)
def get_permissions(self):
if self.action == "test":
# Test action does not require object level permissions
self.permission_classes = (IsAuthenticated,)
return super().get_permissions()
class MailRuleViewSet(ModelViewSet, PassUserMixin):
model = MailRule
queryset = MailRule.objects.all().order_by("order")
serializer_class = MailRuleSerializer
pagination_class = StandardPagination
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
filter_backends = (ObjectOwnedOrGrantedPermissionsFilter,)
class MailAccountTestView(GenericAPIView):
permission_classes = (IsAuthenticated,)
serializer_class = MailAccountSerializer
def post(self, request, *args, **kwargs):
@action(methods=["post"], detail=False)
def test(self, request):
logger = logging.getLogger("paperless_mail")
request.data["name"] = datetime.datetime.now().isoformat()
serializer = self.get_serializer(data=request.data)
@ -95,6 +88,16 @@ class MailAccountTestView(GenericAPIView):
return HttpResponseBadRequest("Unable to connect to server")
class MailRuleViewSet(ModelViewSet, PassUserMixin):
model = MailRule
queryset = MailRule.objects.all().order_by("order")
serializer_class = MailRuleSerializer
pagination_class = StandardPagination
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
filter_backends = (ObjectOwnedOrGrantedPermissionsFilter,)
class OauthCallbackView(GenericAPIView):
permission_classes = (IsAuthenticated,)

View File

@ -2,4 +2,4 @@
from paperless_tesseract.checks import check_default_language_available
from paperless_tesseract.checks import get_tesseract_langs
__all__ = ["get_tesseract_langs", "check_default_language_available"]
__all__ = ["check_default_language_available", "get_tesseract_langs"]