mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-05-01 11:19:32 -05:00
Merge branch 'dev' into dev
This commit is contained in:
commit
5ab6a94b07
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
@ -283,7 +283,7 @@ jobs:
|
|||||||
merge-multiple: true
|
merge-multiple: true
|
||||||
-
|
-
|
||||||
name: Upload frontend coverage to Codecov
|
name: Upload frontend coverage to Codecov
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
# not required for public repos, but intermittently fails otherwise
|
# not required for public repos, but intermittently fails otherwise
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@ -299,7 +299,7 @@ jobs:
|
|||||||
path: src/
|
path: src/
|
||||||
-
|
-
|
||||||
name: Upload coverage to Codecov
|
name: Upload coverage to Codecov
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
# not required for public repos, but intermittently fails otherwise
|
# not required for public repos, but intermittently fails otherwise
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@ -406,7 +406,7 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Login to Docker Hub
|
name: Login to Docker Hub
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
# Don't attempt to login is not pushing to Docker Hub
|
# Don't attempt to login if not pushing to Docker Hub
|
||||||
if: steps.push-other-places.outputs.enable == 'true'
|
if: steps.push-other-places.outputs.enable == 'true'
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
@ -414,7 +414,7 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Login to Quay.io
|
name: Login to Quay.io
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
# Don't attempt to login is not pushing to Quay.io
|
# Don't attempt to login if not pushing to Quay.io
|
||||||
if: steps.push-other-places.outputs.enable == 'true'
|
if: steps.push-other-places.outputs.enable == 'true'
|
||||||
with:
|
with:
|
||||||
registry: quay.io
|
registry: quay.io
|
||||||
|
@ -48,7 +48,7 @@ repos:
|
|||||||
exclude: "(^Pipfile\\.lock$)"
|
exclude: "(^Pipfile\\.lock$)"
|
||||||
# Python hooks
|
# Python hooks
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: 'v0.7.3'
|
rev: 'v0.8.1'
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
|
2
Pipfile
2
Pipfile
@ -23,7 +23,7 @@ djangorestframework-guardian = "*"
|
|||||||
drf-writable-nested = "*"
|
drf-writable-nested = "*"
|
||||||
bleach = "*"
|
bleach = "*"
|
||||||
celery = {extras = ["redis"], version = "*"}
|
celery = {extras = ["redis"], version = "*"}
|
||||||
channels = "~=4.1"
|
channels = "~=4.2"
|
||||||
channels-redis = "*"
|
channels-redis = "*"
|
||||||
concurrent-log-handler = "*"
|
concurrent-log-handler = "*"
|
||||||
filelock = "*"
|
filelock = "*"
|
||||||
|
803
Pipfile.lock
generated
803
Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
@ -331,8 +331,10 @@ Currently, there are three events that correspond to workflow trigger 'types':
|
|||||||
be used for filtering.
|
be used for filtering.
|
||||||
3. **Document Updated**: when a document is updated. Similar to 'added' events, triggers can include filtering by content matching,
|
3. **Document Updated**: when a document is updated. Similar to 'added' events, triggers can include filtering by content matching,
|
||||||
tags, doc type, or correspondent.
|
tags, doc type, or correspondent.
|
||||||
|
4. **Scheduled**: a scheduled trigger that can be used to run workflows at a specific time. The date used can be either the document
|
||||||
|
added, created, updated date or you can specify a (date) custom field. You can also specify a day offset from the date.
|
||||||
|
|
||||||
The following flow diagram illustrates the three trigger types:
|
The following flow diagram illustrates the three document trigger types:
|
||||||
|
|
||||||
```mermaid
|
```mermaid
|
||||||
flowchart TD
|
flowchart TD
|
||||||
|
@ -330,8 +330,13 @@ SECRET_KEY=$(LC_ALL=C tr -dc 'a-zA-Z0-9!#$%&()*+,-./:;<=>?@[\]^_`{|}~' < /dev/ur
|
|||||||
|
|
||||||
DEFAULT_LANGUAGES=("deu eng fra ita spa")
|
DEFAULT_LANGUAGES=("deu eng fra ita spa")
|
||||||
|
|
||||||
_split_langs="${OCR_LANGUAGE//+/ }"
|
# OCR_LANG requires underscores, replace dashes if the user gave them with underscores
|
||||||
read -r -a OCR_LANGUAGES_ARRAY <<< "${_split_langs}"
|
readonly ocr_langs=${OCR_LANGUAGE//-/_}
|
||||||
|
# OCR_LANGS (the install version) uses dashes, not underscores, so convert underscore to dash and plus to space
|
||||||
|
install_langs=${OCR_LANGUAGE//_/-} # First convert any underscores to dashes
|
||||||
|
install_langs=${install_langs//+/ } # Then convert plus signs to spaces
|
||||||
|
|
||||||
|
read -r -a install_langs_array <<< "${install_langs}"
|
||||||
|
|
||||||
{
|
{
|
||||||
if [[ ! $URL == "" ]] ; then
|
if [[ ! $URL == "" ]] ; then
|
||||||
@ -344,10 +349,10 @@ read -r -a OCR_LANGUAGES_ARRAY <<< "${_split_langs}"
|
|||||||
echo "USERMAP_GID=$USERMAP_GID"
|
echo "USERMAP_GID=$USERMAP_GID"
|
||||||
fi
|
fi
|
||||||
echo "PAPERLESS_TIME_ZONE=$TIME_ZONE"
|
echo "PAPERLESS_TIME_ZONE=$TIME_ZONE"
|
||||||
echo "PAPERLESS_OCR_LANGUAGE=$OCR_LANGUAGE"
|
echo "PAPERLESS_OCR_LANGUAGE=$ocr_langs"
|
||||||
echo "PAPERLESS_SECRET_KEY='$SECRET_KEY'"
|
echo "PAPERLESS_SECRET_KEY='$SECRET_KEY'"
|
||||||
if [[ ! ${DEFAULT_LANGUAGES[*]} =~ ${OCR_LANGUAGES_ARRAY[*]} ]] ; then
|
if [[ ! ${DEFAULT_LANGUAGES[*]} =~ ${install_langs_array[*]} ]] ; then
|
||||||
echo "PAPERLESS_OCR_LANGUAGES=${OCR_LANGUAGES_ARRAY[*]}"
|
echo "PAPERLESS_OCR_LANGUAGES=${install_langs_array[*]}"
|
||||||
fi
|
fi
|
||||||
} > docker-compose.env
|
} > docker-compose.env
|
||||||
|
|
||||||
|
File diff suppressed because it is too large
Load Diff
16
src-ui/package-lock.json
generated
16
src-ui/package-lock.json
generated
@ -33,6 +33,7 @@
|
|||||||
"ngx-ui-tour-ng-bootstrap": "^15.0.0",
|
"ngx-ui-tour-ng-bootstrap": "^15.0.0",
|
||||||
"rxjs": "^7.8.1",
|
"rxjs": "^7.8.1",
|
||||||
"tslib": "^2.8.1",
|
"tslib": "^2.8.1",
|
||||||
|
"utif": "^3.1.0",
|
||||||
"uuid": "^11.0.2",
|
"uuid": "^11.0.2",
|
||||||
"zone.js": "^0.14.8"
|
"zone.js": "^0.14.8"
|
||||||
},
|
},
|
||||||
@ -13758,6 +13759,12 @@
|
|||||||
"node": "^16.14.0 || >=18.0.0"
|
"node": "^16.14.0 || >=18.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/pako": {
|
||||||
|
"version": "1.0.11",
|
||||||
|
"resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz",
|
||||||
|
"integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==",
|
||||||
|
"license": "(MIT AND Zlib)"
|
||||||
|
},
|
||||||
"node_modules/parent-module": {
|
"node_modules/parent-module": {
|
||||||
"version": "1.0.1",
|
"version": "1.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
|
||||||
@ -16563,6 +16570,15 @@
|
|||||||
"requires-port": "^1.0.0"
|
"requires-port": "^1.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/utif": {
|
||||||
|
"version": "3.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/utif/-/utif-3.1.0.tgz",
|
||||||
|
"integrity": "sha512-WEo4D/xOvFW53K5f5QTaTbbiORcm2/pCL9P6qmJnup+17eYfKaEhDeX9PeQkuyEoIxlbGklDuGl8xwuXYMrrXQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"pako": "^1.0.5"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/util-deprecate": {
|
"node_modules/util-deprecate": {
|
||||||
"version": "1.0.2",
|
"version": "1.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||||
|
@ -35,6 +35,7 @@
|
|||||||
"ngx-ui-tour-ng-bootstrap": "^15.0.0",
|
"ngx-ui-tour-ng-bootstrap": "^15.0.0",
|
||||||
"rxjs": "^7.8.1",
|
"rxjs": "^7.8.1",
|
||||||
"tslib": "^2.8.1",
|
"tslib": "^2.8.1",
|
||||||
|
"utif": "^3.1.0",
|
||||||
"uuid": "^11.0.2",
|
"uuid": "^11.0.2",
|
||||||
"zone.js": "^0.14.8"
|
"zone.js": "^0.14.8"
|
||||||
},
|
},
|
||||||
|
@ -47,14 +47,19 @@
|
|||||||
</tr>
|
</tr>
|
||||||
}
|
}
|
||||||
@for (document of documentsInTrash; track document.id) {
|
@for (document of documentsInTrash; track document.id) {
|
||||||
<tr (click)="toggleSelected(document); $event.stopPropagation();">
|
<tr (click)="toggleSelected(document); $event.stopPropagation();" (mouseleave)="popupPreview.close()">
|
||||||
<td>
|
<td>
|
||||||
<div class="form-check m-0 ms-2 me-n2">
|
<div class="form-check m-0 ms-2 me-n2">
|
||||||
<input type="checkbox" class="form-check-input" id="{{document.id}}" [checked]="selectedDocuments.has(document.id)" (click)="toggleSelected(document); $event.stopPropagation();">
|
<input type="checkbox" class="form-check-input" id="{{document.id}}" [checked]="selectedDocuments.has(document.id)" (click)="toggleSelected(document); $event.stopPropagation();">
|
||||||
<label class="form-check-label" for="{{document.id}}"></label>
|
<label class="form-check-label" for="{{document.id}}"></label>
|
||||||
</div>
|
</div>
|
||||||
</td>
|
</td>
|
||||||
<td scope="row">{{ document.title }}</td>
|
<td scope="row">
|
||||||
|
{{ document.title }}
|
||||||
|
<pngx-preview-popup [document]="document" linkClasses="btn btn-sm btn-link" #popupPreview>
|
||||||
|
<i-bs name="eye"></i-bs>
|
||||||
|
</pngx-preview-popup>
|
||||||
|
</td>
|
||||||
<td scope="row" i18n>{{ getDaysRemaining(document) }} days</td>
|
<td scope="row" i18n>{{ getDaysRemaining(document) }} days</td>
|
||||||
<td scope="row">
|
<td scope="row">
|
||||||
<div class="btn-group d-block d-sm-none">
|
<div class="btn-group d-block d-sm-none">
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
.pdf-viewer-container {
|
.pdf-viewer-container {
|
||||||
background-color: gray;
|
background-color: gray;
|
||||||
height: 350px;
|
height: 550px;
|
||||||
|
|
||||||
pdf-viewer {
|
pdf-viewer {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
<div class="modal-body">
|
<div class="modal-body">
|
||||||
<p>{{message}}</p>
|
<p>{{message}}</p>
|
||||||
<div class="row mb-2">
|
<div class="row mb-2">
|
||||||
<div class="col-8">
|
<div class="col-7">
|
||||||
<div class="input-group input-group-sm">
|
<div class="input-group input-group-sm">
|
||||||
<div class="input-group-text" i18n>Page</div>
|
<div class="input-group-text" i18n>Page</div>
|
||||||
<input class="form-control" type="number" min="1" [(ngModel)]="page" />
|
<input class="form-control" type="number" min="1" [(ngModel)]="page" />
|
||||||
@ -21,7 +21,7 @@
|
|||||||
</pdf-viewer>
|
</pdf-viewer>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="col-4">
|
<div class="col-5">
|
||||||
<div class="d-grid">
|
<div class="d-grid">
|
||||||
<button class="btn btn-sm btn-primary" (click)="addSplit()" [disabled]="!canSplit">
|
<button class="btn btn-sm btn-primary" (click)="addSplit()" [disabled]="!canSplit">
|
||||||
<i-bs name="plus-circle"></i-bs>
|
<i-bs name="plus-circle"></i-bs>
|
||||||
@ -44,12 +44,12 @@
|
|||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="form-check form-switch mt-4">
|
</div>
|
||||||
|
<div class="modal-footer">
|
||||||
|
<div class="form-check form-switch me-auto">
|
||||||
<input class="form-check-input" type="checkbox" role="switch" id="deleteOriginalSwitch" [(ngModel)]="deleteOriginal" [disabled]="!userOwnsDocument">
|
<input class="form-check-input" type="checkbox" role="switch" id="deleteOriginalSwitch" [(ngModel)]="deleteOriginal" [disabled]="!userOwnsDocument">
|
||||||
<label class="form-check-label" for="deleteOriginalSwitch" i18n>Delete original document after successful split</label>
|
<label class="form-check-label" for="deleteOriginalSwitch" i18n>Delete original document after successful split</label>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
|
||||||
<div class="modal-footer">
|
|
||||||
<button type="button" class="btn" [class]="cancelBtnClass" (click)="cancel()" [disabled]="!buttonsEnabled">
|
<button type="button" class="btn" [class]="cancelBtnClass" (click)="cancel()" [disabled]="!buttonsEnabled">
|
||||||
<span class="d-inline-block" style="padding-bottom: 1px;">{{cancelBtnCaption}}</span>
|
<span class="d-inline-block" style="padding-bottom: 1px;">{{cancelBtnCaption}}</span>
|
||||||
</button>
|
</button>
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
.pdf-viewer-container {
|
.pdf-viewer-container {
|
||||||
background-color: gray;
|
background-color: gray;
|
||||||
height: 350px;
|
height: 500px;
|
||||||
|
|
||||||
pdf-viewer {
|
pdf-viewer {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
|
@ -17,7 +17,11 @@ const customFields: CustomField[] = [
|
|||||||
name: 'Field 4',
|
name: 'Field 4',
|
||||||
data_type: CustomFieldDataType.Select,
|
data_type: CustomFieldDataType.Select,
|
||||||
extra_data: {
|
extra_data: {
|
||||||
select_options: ['Option 1', 'Option 2', 'Option 3'],
|
select_options: [
|
||||||
|
{ label: 'Option 1', id: 'abc-123' },
|
||||||
|
{ label: 'Option 2', id: 'def-456' },
|
||||||
|
{ label: 'Option 3', id: 'ghi-789' },
|
||||||
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -131,6 +135,8 @@ describe('CustomFieldDisplayComponent', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should show select value', () => {
|
it('should show select value', () => {
|
||||||
expect(component.getSelectValue(customFields[3], 2)).toEqual('Option 3')
|
expect(component.getSelectValue(customFields[3], 'ghi-789')).toEqual(
|
||||||
|
'Option 3'
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -117,8 +117,8 @@ export class CustomFieldDisplayComponent implements OnInit, OnDestroy {
|
|||||||
return this.docLinkDocuments?.find((d) => d.id === docId)?.title
|
return this.docLinkDocuments?.find((d) => d.id === docId)?.title
|
||||||
}
|
}
|
||||||
|
|
||||||
public getSelectValue(field: CustomField, index: number): string {
|
public getSelectValue(field: CustomField, id: string): string {
|
||||||
return field.extra_data.select_options[index]
|
return field.extra_data.select_options?.find((o) => o.id === id)?.label
|
||||||
}
|
}
|
||||||
|
|
||||||
ngOnDestroy(): void {
|
ngOnDestroy(): void {
|
||||||
|
@ -44,6 +44,8 @@
|
|||||||
<ng-select #fieldSelects
|
<ng-select #fieldSelects
|
||||||
class="paperless-input-select rounded-end"
|
class="paperless-input-select rounded-end"
|
||||||
[items]="getSelectOptionsForField(atom.field)"
|
[items]="getSelectOptionsForField(atom.field)"
|
||||||
|
bindLabel="label"
|
||||||
|
bindValue="id"
|
||||||
[(ngModel)]="atom.value"
|
[(ngModel)]="atom.value"
|
||||||
[disabled]="disabled"
|
[disabled]="disabled"
|
||||||
(mousedown)="$event.stopImmediatePropagation()"
|
(mousedown)="$event.stopImmediatePropagation()"
|
||||||
@ -99,6 +101,8 @@
|
|||||||
<ng-select
|
<ng-select
|
||||||
class="paperless-input-select rounded-end"
|
class="paperless-input-select rounded-end"
|
||||||
[items]="getSelectOptionsForField(atom.field)"
|
[items]="getSelectOptionsForField(atom.field)"
|
||||||
|
bindLabel="label"
|
||||||
|
bindValue="id"
|
||||||
[(ngModel)]="atom.value"
|
[(ngModel)]="atom.value"
|
||||||
[disabled]="disabled"
|
[disabled]="disabled"
|
||||||
[multiple]="true"
|
[multiple]="true"
|
||||||
|
@ -39,7 +39,12 @@ const customFields = [
|
|||||||
id: 2,
|
id: 2,
|
||||||
name: 'Test Select Field',
|
name: 'Test Select Field',
|
||||||
data_type: CustomFieldDataType.Select,
|
data_type: CustomFieldDataType.Select,
|
||||||
extra_data: { select_options: ['Option 1', 'Option 2'] },
|
extra_data: {
|
||||||
|
select_options: [
|
||||||
|
{ label: 'Option 1', id: 'abc-123' },
|
||||||
|
{ label: 'Option 2', id: 'def-456' },
|
||||||
|
],
|
||||||
|
},
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -128,11 +133,19 @@ describe('CustomFieldsQueryDropdownComponent', () => {
|
|||||||
id: 1,
|
id: 1,
|
||||||
name: 'Test Field',
|
name: 'Test Field',
|
||||||
data_type: CustomFieldDataType.Select,
|
data_type: CustomFieldDataType.Select,
|
||||||
extra_data: { select_options: ['Option 1', 'Option 2'] },
|
extra_data: {
|
||||||
|
select_options: [
|
||||||
|
{ label: 'Option 1', id: 'abc-123' },
|
||||||
|
{ label: 'Option 2', id: 'def-456' },
|
||||||
|
],
|
||||||
|
},
|
||||||
}
|
}
|
||||||
component.customFields = [field]
|
component.customFields = [field]
|
||||||
const options = component.getSelectOptionsForField(1)
|
const options = component.getSelectOptionsForField(1)
|
||||||
expect(options).toEqual(['Option 1', 'Option 2'])
|
expect(options).toEqual([
|
||||||
|
{ label: 'Option 1', id: 'abc-123' },
|
||||||
|
{ label: 'Option 2', id: 'def-456' },
|
||||||
|
])
|
||||||
|
|
||||||
// Fallback to empty array if field is not found
|
// Fallback to empty array if field is not found
|
||||||
const options2 = component.getSelectOptionsForField(2)
|
const options2 = component.getSelectOptionsForField(2)
|
||||||
|
@ -311,7 +311,9 @@ export class CustomFieldsQueryDropdownComponent implements OnDestroy {
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
getSelectOptionsForField(fieldID: number): string[] {
|
getSelectOptionsForField(
|
||||||
|
fieldID: number
|
||||||
|
): Array<{ label: string; id: string }> {
|
||||||
const field = this.customFields.find((field) => field.id === fieldID)
|
const field = this.customFields.find((field) => field.id === fieldID)
|
||||||
if (field) {
|
if (field) {
|
||||||
return field.extra_data['select_options']
|
return field.extra_data['select_options']
|
||||||
|
@ -21,8 +21,9 @@
|
|||||||
</button>
|
</button>
|
||||||
<div formArrayName="select_options">
|
<div formArrayName="select_options">
|
||||||
@for (option of objectForm.controls.extra_data.controls.select_options.controls; track option; let i = $index) {
|
@for (option of objectForm.controls.extra_data.controls.select_options.controls; track option; let i = $index) {
|
||||||
<div class="input-group input-group-sm my-2">
|
<div class="input-group input-group-sm my-2" [formGroup]="objectForm.controls.extra_data.controls.select_options.controls[i]">
|
||||||
<input #selectOption type="text" class="form-control" [formControl]="option" autocomplete="off">
|
<input #selectOption type="text" class="form-control" formControlName="label" autocomplete="off">
|
||||||
|
<input type="hidden" formControlName="id">
|
||||||
<button type="button" class="btn btn-outline-danger" (click)="removeSelectOption(i)" i18n>Delete</button>
|
<button type="button" class="btn btn-outline-danger" (click)="removeSelectOption(i)" i18n>Delete</button>
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
|
@ -80,7 +80,11 @@ describe('CustomFieldEditDialogComponent', () => {
|
|||||||
name: 'Field 1',
|
name: 'Field 1',
|
||||||
data_type: CustomFieldDataType.Select,
|
data_type: CustomFieldDataType.Select,
|
||||||
extra_data: {
|
extra_data: {
|
||||||
select_options: ['Option 1', 'Option 2', 'Option 3'],
|
select_options: [
|
||||||
|
{ label: 'Option 1', id: '123-xyz' },
|
||||||
|
{ label: 'Option 2', id: '456-abc' },
|
||||||
|
{ label: 'Option 3', id: '789-123' },
|
||||||
|
],
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
fixture.detectChanges()
|
fixture.detectChanges()
|
||||||
@ -94,6 +98,10 @@ describe('CustomFieldEditDialogComponent', () => {
|
|||||||
component.dialogMode = EditDialogMode.CREATE
|
component.dialogMode = EditDialogMode.CREATE
|
||||||
fixture.detectChanges()
|
fixture.detectChanges()
|
||||||
component.ngOnInit()
|
component.ngOnInit()
|
||||||
|
expect(
|
||||||
|
component.objectForm.get('extra_data').get('select_options').value.length
|
||||||
|
).toBe(0)
|
||||||
|
component.addSelectOption()
|
||||||
expect(
|
expect(
|
||||||
component.objectForm.get('extra_data').get('select_options').value.length
|
component.objectForm.get('extra_data').get('select_options').value.length
|
||||||
).toBe(1)
|
).toBe(1)
|
||||||
@ -101,14 +109,10 @@ describe('CustomFieldEditDialogComponent', () => {
|
|||||||
expect(
|
expect(
|
||||||
component.objectForm.get('extra_data').get('select_options').value.length
|
component.objectForm.get('extra_data').get('select_options').value.length
|
||||||
).toBe(2)
|
).toBe(2)
|
||||||
component.addSelectOption()
|
|
||||||
expect(
|
|
||||||
component.objectForm.get('extra_data').get('select_options').value.length
|
|
||||||
).toBe(3)
|
|
||||||
component.removeSelectOption(0)
|
component.removeSelectOption(0)
|
||||||
expect(
|
expect(
|
||||||
component.objectForm.get('extra_data').get('select_options').value.length
|
component.objectForm.get('extra_data').get('select_options').value.length
|
||||||
).toBe(2)
|
).toBe(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should focus on last select option input', () => {
|
it('should focus on last select option input', () => {
|
||||||
|
@ -57,9 +57,16 @@ export class CustomFieldEditDialogComponent
|
|||||||
}
|
}
|
||||||
if (this.object?.data_type === CustomFieldDataType.Select) {
|
if (this.object?.data_type === CustomFieldDataType.Select) {
|
||||||
this.selectOptions.clear()
|
this.selectOptions.clear()
|
||||||
this.object.extra_data.select_options.forEach((option) =>
|
this.object.extra_data.select_options
|
||||||
this.selectOptions.push(new FormControl(option))
|
.filter((option) => option)
|
||||||
)
|
.forEach((option) =>
|
||||||
|
this.selectOptions.push(
|
||||||
|
new FormGroup({
|
||||||
|
label: new FormControl(option.label),
|
||||||
|
id: new FormControl(option.id),
|
||||||
|
})
|
||||||
|
)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -89,7 +96,7 @@ export class CustomFieldEditDialogComponent
|
|||||||
name: new FormControl(null),
|
name: new FormControl(null),
|
||||||
data_type: new FormControl(null),
|
data_type: new FormControl(null),
|
||||||
extra_data: new FormGroup({
|
extra_data: new FormGroup({
|
||||||
select_options: new FormArray([new FormControl(null)]),
|
select_options: new FormArray([]),
|
||||||
default_currency: new FormControl(null),
|
default_currency: new FormControl(null),
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
@ -104,7 +111,9 @@ export class CustomFieldEditDialogComponent
|
|||||||
}
|
}
|
||||||
|
|
||||||
public addSelectOption() {
|
public addSelectOption() {
|
||||||
this.selectOptions.push(new FormControl(''))
|
this.selectOptions.push(
|
||||||
|
new FormGroup({ label: new FormControl(null), id: new FormControl(null) })
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
public removeSelectOption(index: number) {
|
public removeSelectOption(index: number) {
|
||||||
|
@ -119,6 +119,32 @@
|
|||||||
<div [formGroup]="formGroup">
|
<div [formGroup]="formGroup">
|
||||||
<input type="hidden" formControlName="id" />
|
<input type="hidden" formControlName="id" />
|
||||||
<pngx-input-select i18n-title title="Trigger type" [horizontal]="true" [items]="triggerTypeOptions" formControlName="type"></pngx-input-select>
|
<pngx-input-select i18n-title title="Trigger type" [horizontal]="true" [items]="triggerTypeOptions" formControlName="type"></pngx-input-select>
|
||||||
|
@if (formGroup.get('type').value === WorkflowTriggerType.Scheduled) {
|
||||||
|
<p class="small" i18n>Set scheduled trigger offset and which field to use.</p>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-4">
|
||||||
|
<pngx-input-number i18n-title title="Offset days" formControlName="schedule_offset_days" i18n-hint hint="Use 0 for immediate." [showAdd]="false" [error]="error?.schedule_offset_days"></pngx-input-number>
|
||||||
|
</div>
|
||||||
|
<div class="col-4">
|
||||||
|
<pngx-input-select i18n-title title="Relative to" formControlName="schedule_date_field" [items]="scheduleDateFieldOptions" [error]="error?.schedule_date_field"></pngx-input-select>
|
||||||
|
</div>
|
||||||
|
@if (formGroup.get('schedule_date_field').value === 'custom_field') {
|
||||||
|
<div class="col-4">
|
||||||
|
<pngx-input-select i18n-title title="Delay custom field" formControlName="schedule_date_custom_field" [items]="dateCustomFields" i18n-hint hint="Custom field to use for date." [error]="error?.schedule_date_custom_field"></pngx-input-select>
|
||||||
|
</div>
|
||||||
|
}
|
||||||
|
</div>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-4">
|
||||||
|
<pngx-input-check i18n-title title="Recurring" formControlName="schedule_is_recurring" i18n-hint hint="Trigger is recurring." [error]="error?.schedule_is_recurring"></pngx-input-check>
|
||||||
|
</div>
|
||||||
|
<div class="col-4">
|
||||||
|
@if (formGroup.get('schedule_is_recurring').value === true) {
|
||||||
|
<pngx-input-number i18n-title title="Recurring interval days" formControlName="schedule_recurring_interval_days" i18n-hint hint="Repeat the trigger every n days." [showAdd]="false" [error]="error?.schedule_recurring_interval_days"></pngx-input-number>
|
||||||
|
}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
}
|
||||||
<p class="small" i18n>Trigger for documents that match <em>all</em> filters specified below.</p>
|
<p class="small" i18n>Trigger for documents that match <em>all</em> filters specified below.</p>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col">
|
<div class="col">
|
||||||
@ -128,7 +154,7 @@
|
|||||||
<pngx-input-text i18n-title title="Filter path" formControlName="filter_path" i18n-hint hint="Apply to documents that match this path. Wildcards specified as * are allowed. Case-normalized.</a>" [error]="error?.filter_path"></pngx-input-text>
|
<pngx-input-text i18n-title title="Filter path" formControlName="filter_path" i18n-hint hint="Apply to documents that match this path. Wildcards specified as * are allowed. Case-normalized.</a>" [error]="error?.filter_path"></pngx-input-text>
|
||||||
<pngx-input-select i18n-title title="Filter mail rule" [items]="mailRules" [allowNull]="true" formControlName="filter_mailrule" i18n-hint hint="Apply to documents consumed via this mail rule." [error]="error?.filter_mailrule"></pngx-input-select>
|
<pngx-input-select i18n-title title="Filter mail rule" [items]="mailRules" [allowNull]="true" formControlName="filter_mailrule" i18n-hint hint="Apply to documents consumed via this mail rule." [error]="error?.filter_mailrule"></pngx-input-select>
|
||||||
}
|
}
|
||||||
@if (formGroup.get('type').value === WorkflowTriggerType.DocumentAdded || formGroup.get('type').value === WorkflowTriggerType.DocumentUpdated) {
|
@if (formGroup.get('type').value === WorkflowTriggerType.DocumentAdded || formGroup.get('type').value === WorkflowTriggerType.DocumentUpdated || formGroup.get('type').value === WorkflowTriggerType.Scheduled) {
|
||||||
<pngx-input-select i18n-title title="Content matching algorithm" [items]="getMatchingAlgorithms()" formControlName="matching_algorithm"></pngx-input-select>
|
<pngx-input-select i18n-title title="Content matching algorithm" [items]="getMatchingAlgorithms()" formControlName="matching_algorithm"></pngx-input-select>
|
||||||
@if (patternRequired) {
|
@if (patternRequired) {
|
||||||
<pngx-input-text i18n-title title="Content matching pattern" formControlName="match" [error]="error?.match"></pngx-input-text>
|
<pngx-input-text i18n-title title="Content matching pattern" formControlName="match" [error]="error?.match"></pngx-input-text>
|
||||||
@ -138,7 +164,7 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
</div>
|
</div>
|
||||||
@if (formGroup.get('type').value === WorkflowTriggerType.DocumentAdded || formGroup.get('type').value === WorkflowTriggerType.DocumentUpdated) {
|
@if (formGroup.get('type').value === WorkflowTriggerType.DocumentAdded || formGroup.get('type').value === WorkflowTriggerType.DocumentUpdated || formGroup.get('type').value === WorkflowTriggerType.Scheduled) {
|
||||||
<div class="col-md-6">
|
<div class="col-md-6">
|
||||||
<pngx-input-tags [allowCreate]="false" i18n-title title="Has any of tags" formControlName="filter_has_tags"></pngx-input-tags>
|
<pngx-input-tags [allowCreate]="false" i18n-title title="Has any of tags" formControlName="filter_has_tags"></pngx-input-tags>
|
||||||
<pngx-input-select i18n-title title="Has correspondent" [items]="correspondents" [allowNull]="true" formControlName="filter_has_correspondent"></pngx-input-select>
|
<pngx-input-select i18n-title title="Has correspondent" [items]="correspondents" [allowNull]="true" formControlName="filter_has_correspondent"></pngx-input-select>
|
||||||
|
@ -22,6 +22,7 @@ import { SwitchComponent } from '../../input/switch/switch.component'
|
|||||||
import { EditDialogMode } from '../edit-dialog.component'
|
import { EditDialogMode } from '../edit-dialog.component'
|
||||||
import {
|
import {
|
||||||
DOCUMENT_SOURCE_OPTIONS,
|
DOCUMENT_SOURCE_OPTIONS,
|
||||||
|
SCHEDULE_DATE_FIELD_OPTIONS,
|
||||||
WORKFLOW_ACTION_OPTIONS,
|
WORKFLOW_ACTION_OPTIONS,
|
||||||
WORKFLOW_TYPE_OPTIONS,
|
WORKFLOW_TYPE_OPTIONS,
|
||||||
WorkflowEditDialogComponent,
|
WorkflowEditDialogComponent,
|
||||||
@ -40,6 +41,7 @@ import {
|
|||||||
import { MATCHING_ALGORITHMS, MATCH_AUTO } from 'src/app/data/matching-model'
|
import { MATCHING_ALGORITHMS, MATCH_AUTO } from 'src/app/data/matching-model'
|
||||||
import { ConfirmButtonComponent } from '../../confirm-button/confirm-button.component'
|
import { ConfirmButtonComponent } from '../../confirm-button/confirm-button.component'
|
||||||
import { provideHttpClient, withInterceptorsFromDi } from '@angular/common/http'
|
import { provideHttpClient, withInterceptorsFromDi } from '@angular/common/http'
|
||||||
|
import { CustomFieldDataType } from 'src/app/data/custom-field'
|
||||||
|
|
||||||
const workflow: Workflow = {
|
const workflow: Workflow = {
|
||||||
name: 'Workflow 1',
|
name: 'Workflow 1',
|
||||||
@ -148,7 +150,18 @@ describe('WorkflowEditDialogComponent', () => {
|
|||||||
useValue: {
|
useValue: {
|
||||||
listAll: () =>
|
listAll: () =>
|
||||||
of({
|
of({
|
||||||
results: [],
|
results: [
|
||||||
|
{
|
||||||
|
id: 1,
|
||||||
|
name: 'cf1',
|
||||||
|
data_type: CustomFieldDataType.String,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 2,
|
||||||
|
name: 'cf2',
|
||||||
|
data_type: CustomFieldDataType.Date,
|
||||||
|
},
|
||||||
|
],
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -186,7 +199,7 @@ describe('WorkflowEditDialogComponent', () => {
|
|||||||
expect(editTitleSpy).toHaveBeenCalled()
|
expect(editTitleSpy).toHaveBeenCalled()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return source options, type options, type name', () => {
|
it('should return source options, type options, type name, schedule date field options', () => {
|
||||||
// coverage
|
// coverage
|
||||||
expect(component.sourceOptions).toEqual(DOCUMENT_SOURCE_OPTIONS)
|
expect(component.sourceOptions).toEqual(DOCUMENT_SOURCE_OPTIONS)
|
||||||
expect(component.triggerTypeOptions).toEqual(WORKFLOW_TYPE_OPTIONS)
|
expect(component.triggerTypeOptions).toEqual(WORKFLOW_TYPE_OPTIONS)
|
||||||
@ -200,6 +213,9 @@ describe('WorkflowEditDialogComponent', () => {
|
|||||||
component.getActionTypeOptionName(WorkflowActionType.Assignment)
|
component.getActionTypeOptionName(WorkflowActionType.Assignment)
|
||||||
).toEqual('Assignment')
|
).toEqual('Assignment')
|
||||||
expect(component.getActionTypeOptionName(null)).toEqual('')
|
expect(component.getActionTypeOptionName(null)).toEqual('')
|
||||||
|
expect(component.scheduleDateFieldOptions).toEqual(
|
||||||
|
SCHEDULE_DATE_FIELD_OPTIONS
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should support add and remove triggers and actions', () => {
|
it('should support add and remove triggers and actions', () => {
|
||||||
|
@ -16,9 +16,10 @@ import { EditDialogComponent } from '../edit-dialog.component'
|
|||||||
import { MailRuleService } from 'src/app/services/rest/mail-rule.service'
|
import { MailRuleService } from 'src/app/services/rest/mail-rule.service'
|
||||||
import { MailRule } from 'src/app/data/mail-rule'
|
import { MailRule } from 'src/app/data/mail-rule'
|
||||||
import { CustomFieldsService } from 'src/app/services/rest/custom-fields.service'
|
import { CustomFieldsService } from 'src/app/services/rest/custom-fields.service'
|
||||||
import { CustomField } from 'src/app/data/custom-field'
|
import { CustomField, CustomFieldDataType } from 'src/app/data/custom-field'
|
||||||
import {
|
import {
|
||||||
DocumentSource,
|
DocumentSource,
|
||||||
|
ScheduleDateField,
|
||||||
WorkflowTrigger,
|
WorkflowTrigger,
|
||||||
WorkflowTriggerType,
|
WorkflowTriggerType,
|
||||||
} from 'src/app/data/workflow-trigger'
|
} from 'src/app/data/workflow-trigger'
|
||||||
@ -48,6 +49,25 @@ export const DOCUMENT_SOURCE_OPTIONS = [
|
|||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
export const SCHEDULE_DATE_FIELD_OPTIONS = [
|
||||||
|
{
|
||||||
|
id: ScheduleDateField.Added,
|
||||||
|
name: $localize`Added`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: ScheduleDateField.Created,
|
||||||
|
name: $localize`Created`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: ScheduleDateField.Modified,
|
||||||
|
name: $localize`Modified`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: ScheduleDateField.CustomField,
|
||||||
|
name: $localize`Custom Field`,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
export const WORKFLOW_TYPE_OPTIONS = [
|
export const WORKFLOW_TYPE_OPTIONS = [
|
||||||
{
|
{
|
||||||
id: WorkflowTriggerType.Consumption,
|
id: WorkflowTriggerType.Consumption,
|
||||||
@ -61,6 +81,10 @@ export const WORKFLOW_TYPE_OPTIONS = [
|
|||||||
id: WorkflowTriggerType.DocumentUpdated,
|
id: WorkflowTriggerType.DocumentUpdated,
|
||||||
name: $localize`Document Updated`,
|
name: $localize`Document Updated`,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
id: WorkflowTriggerType.Scheduled,
|
||||||
|
name: $localize`Scheduled`,
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
export const WORKFLOW_ACTION_OPTIONS = [
|
export const WORKFLOW_ACTION_OPTIONS = [
|
||||||
@ -96,6 +120,7 @@ export class WorkflowEditDialogComponent
|
|||||||
storagePaths: StoragePath[]
|
storagePaths: StoragePath[]
|
||||||
mailRules: MailRule[]
|
mailRules: MailRule[]
|
||||||
customFields: CustomField[]
|
customFields: CustomField[]
|
||||||
|
dateCustomFields: CustomField[]
|
||||||
|
|
||||||
expandedItem: number = null
|
expandedItem: number = null
|
||||||
|
|
||||||
@ -135,7 +160,12 @@ export class WorkflowEditDialogComponent
|
|||||||
customFieldsService
|
customFieldsService
|
||||||
.listAll()
|
.listAll()
|
||||||
.pipe(first())
|
.pipe(first())
|
||||||
.subscribe((result) => (this.customFields = result.results))
|
.subscribe((result) => {
|
||||||
|
this.customFields = result.results
|
||||||
|
this.dateCustomFields = this.customFields?.filter(
|
||||||
|
(f) => f.data_type === CustomFieldDataType.Date
|
||||||
|
)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
getCreateTitle() {
|
getCreateTitle() {
|
||||||
@ -314,6 +344,15 @@ export class WorkflowEditDialogComponent
|
|||||||
filter_has_document_type: new FormControl(
|
filter_has_document_type: new FormControl(
|
||||||
trigger.filter_has_document_type
|
trigger.filter_has_document_type
|
||||||
),
|
),
|
||||||
|
schedule_offset_days: new FormControl(trigger.schedule_offset_days),
|
||||||
|
schedule_is_recurring: new FormControl(trigger.schedule_is_recurring),
|
||||||
|
schedule_recurring_interval_days: new FormControl(
|
||||||
|
trigger.schedule_recurring_interval_days
|
||||||
|
),
|
||||||
|
schedule_date_field: new FormControl(trigger.schedule_date_field),
|
||||||
|
schedule_date_custom_field: new FormControl(
|
||||||
|
trigger.schedule_date_custom_field
|
||||||
|
),
|
||||||
}),
|
}),
|
||||||
{ emitEvent }
|
{ emitEvent }
|
||||||
)
|
)
|
||||||
@ -388,6 +427,10 @@ export class WorkflowEditDialogComponent
|
|||||||
return WORKFLOW_TYPE_OPTIONS
|
return WORKFLOW_TYPE_OPTIONS
|
||||||
}
|
}
|
||||||
|
|
||||||
|
get scheduleDateFieldOptions() {
|
||||||
|
return SCHEDULE_DATE_FIELD_OPTIONS
|
||||||
|
}
|
||||||
|
|
||||||
getTriggerTypeOptionName(type: WorkflowTriggerType): string {
|
getTriggerTypeOptionName(type: WorkflowTriggerType): string {
|
||||||
return this.triggerTypeOptions.find((t) => t.id === type)?.name ?? ''
|
return this.triggerTypeOptions.find((t) => t.id === type)?.name ?? ''
|
||||||
}
|
}
|
||||||
@ -408,6 +451,11 @@ export class WorkflowEditDialogComponent
|
|||||||
matching_algorithm: MATCH_NONE,
|
matching_algorithm: MATCH_NONE,
|
||||||
match: '',
|
match: '',
|
||||||
is_insensitive: true,
|
is_insensitive: true,
|
||||||
|
schedule_offset_days: 0,
|
||||||
|
schedule_is_recurring: false,
|
||||||
|
schedule_recurring_interval_days: 1,
|
||||||
|
schedule_date_field: ScheduleDateField.Added,
|
||||||
|
schedule_date_custom_field: null,
|
||||||
}
|
}
|
||||||
this.object.triggers.push(trigger)
|
this.object.triggers.push(trigger)
|
||||||
this.createTriggerField(trigger)
|
this.createTriggerField(trigger)
|
||||||
|
@ -35,23 +35,31 @@
|
|||||||
</div>
|
</div>
|
||||||
@if (selectionModel.items) {
|
@if (selectionModel.items) {
|
||||||
<div class="items" #buttonItems>
|
<div class="items" #buttonItems>
|
||||||
@for (item of selectionModel.itemsSorted | filter: filterText:'name'; track item; let i = $index) {
|
@for (item of selectionModel.items | filter: filterText:'name'; track item; let i = $index) {
|
||||||
@if (allowSelectNone || item.id) {
|
@if (allowSelectNone || item.id) {
|
||||||
<pngx-toggleable-dropdown-button
|
<pngx-toggleable-dropdown-button
|
||||||
[item]="item" [hideCount]="hideCount(item)" [state]="selectionModel.get(item.id)" [count]="getUpdatedDocumentCount(item.id)" (toggled)="selectionModel.toggle(item.id)" (exclude)="excludeClicked(item.id)" (click)="setButtonItemIndex(i - 1)" [disabled]="disabled">
|
[item]="item"
|
||||||
|
[hideCount]="hideCount(item)"
|
||||||
|
[opacifyCount]="!editing"
|
||||||
|
[state]="selectionModel.get(item.id)"
|
||||||
|
[count]="getUpdatedDocumentCount(item.id)"
|
||||||
|
(toggled)="selectionModel.toggle(item.id)"
|
||||||
|
(exclude)="excludeClicked(item.id)"
|
||||||
|
(click)="setButtonItemIndex(i - 1)"
|
||||||
|
[disabled]="disabled">
|
||||||
</pngx-toggleable-dropdown-button>
|
</pngx-toggleable-dropdown-button>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
@if (editing) {
|
@if (editing) {
|
||||||
@if ((selectionModel.itemsSorted | filter: filterText:'name').length === 0 && createRef !== undefined) {
|
@if ((selectionModel.items | filter: filterText:'name').length === 0 && createRef !== undefined) {
|
||||||
<button class="list-group-item list-group-item-action bg-light" (click)="createClicked()" [disabled]="disabled">
|
<button class="list-group-item list-group-item-action bg-light" (click)="createClicked()" [disabled]="disabled">
|
||||||
<small class="ms-2"><ng-container i18n>Create</ng-container> "{{filterText}}"</small>
|
<small class="ms-2"><ng-container i18n>Create</ng-container> "{{filterText}}"</small>
|
||||||
<i-bs width="1.5em" height="1em" name="plus"></i-bs>
|
<i-bs width="1.5em" height="1em" name="plus"></i-bs>
|
||||||
</button>
|
</button>
|
||||||
}
|
}
|
||||||
@if ((selectionModel.itemsSorted | filter: filterText:'name').length > 0) {
|
@if ((selectionModel.items | filter: filterText:'name').length > 0) {
|
||||||
<button class="list-group-item list-group-item-action bg-light" (click)="applyClicked()" [disabled]="!modelIsDirty || disabled">
|
<button class="list-group-item list-group-item-action bg-light" (click)="applyClicked()" [disabled]="!modelIsDirty || disabled">
|
||||||
<small class="ms-2" [ngClass]="{'fw-bold': modelIsDirty}" i18n>Apply</small>
|
<small class="ms-2" [ngClass]="{'fw-bold': modelIsDirty}" i18n>Apply</small>
|
||||||
<i-bs width="1.5em" height="1em" name="arrow-right"></i-bs>
|
<i-bs width="1.5em" height="1em" name="arrow-right"></i-bs>
|
||||||
|
@ -501,7 +501,7 @@ describe('FilterableDropdownComponent & FilterableDropdownSelectionModel', () =>
|
|||||||
component.selectionModel = selectionModel
|
component.selectionModel = selectionModel
|
||||||
selectionModel.toggle(items[1].id)
|
selectionModel.toggle(items[1].id)
|
||||||
selectionModel.apply()
|
selectionModel.apply()
|
||||||
expect(selectionModel.itemsSorted).toEqual([
|
expect(selectionModel.items).toEqual([
|
||||||
nullItem,
|
nullItem,
|
||||||
{ id: null, name: 'Null B' },
|
{ id: null, name: 'Null B' },
|
||||||
items[1],
|
items[1],
|
||||||
@ -509,6 +509,37 @@ describe('FilterableDropdownComponent & FilterableDropdownSelectionModel', () =>
|
|||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('selection model should sort items by state and document counts, if set', () => {
|
||||||
|
component.items = items.concat([{ id: 4, name: 'Item D' }])
|
||||||
|
component.selectionModel = selectionModel
|
||||||
|
component.documentCounts = [
|
||||||
|
{ id: 1, document_count: 0 }, // Tag1
|
||||||
|
{ id: 2, document_count: 1 }, // Tag2
|
||||||
|
{ id: 4, document_count: 2 },
|
||||||
|
]
|
||||||
|
component.selectionModel.apply()
|
||||||
|
expect(selectionModel.items).toEqual([
|
||||||
|
nullItem,
|
||||||
|
{ id: 4, name: 'Item D' },
|
||||||
|
items[1], // Tag2
|
||||||
|
items[0], // Tag1
|
||||||
|
])
|
||||||
|
|
||||||
|
selectionModel.toggle(items[1].id)
|
||||||
|
component.documentCounts = [
|
||||||
|
{ id: 1, document_count: 0 },
|
||||||
|
{ id: 2, document_count: 1 },
|
||||||
|
{ id: 4, document_count: 0 },
|
||||||
|
]
|
||||||
|
selectionModel.apply()
|
||||||
|
expect(selectionModel.items).toEqual([
|
||||||
|
nullItem,
|
||||||
|
items[1], // Tag2
|
||||||
|
{ id: 4, name: 'Item D' },
|
||||||
|
items[0], // Tag1
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
it('should set support create, keep open model and call createRef method', fakeAsync(() => {
|
it('should set support create, keep open model and call createRef method', fakeAsync(() => {
|
||||||
component.items = items
|
component.items = items
|
||||||
component.icon = 'tag-fill'
|
component.icon = 'tag-fill'
|
||||||
|
@ -43,11 +43,23 @@ export class FilterableDropdownSelectionModel {
|
|||||||
private _intersection: Intersection = Intersection.Include
|
private _intersection: Intersection = Intersection.Include
|
||||||
temporaryIntersection: Intersection = this._intersection
|
temporaryIntersection: Intersection = this._intersection
|
||||||
|
|
||||||
items: MatchingModel[] = []
|
private _documentCounts: SelectionDataItem[] = []
|
||||||
|
public set documentCounts(counts: SelectionDataItem[]) {
|
||||||
|
this._documentCounts = counts
|
||||||
|
}
|
||||||
|
|
||||||
get itemsSorted(): MatchingModel[] {
|
private _items: MatchingModel[] = []
|
||||||
// TODO: this is getting called very often
|
get items(): MatchingModel[] {
|
||||||
return this.items.sort((a, b) => {
|
return this._items
|
||||||
|
}
|
||||||
|
|
||||||
|
set items(items: MatchingModel[]) {
|
||||||
|
this._items = items
|
||||||
|
this.sortItems()
|
||||||
|
}
|
||||||
|
|
||||||
|
private sortItems() {
|
||||||
|
this._items.sort((a, b) => {
|
||||||
if (a.id == null && b.id != null) {
|
if (a.id == null && b.id != null) {
|
||||||
return -1
|
return -1
|
||||||
} else if (a.id != null && b.id == null) {
|
} else if (a.id != null && b.id == null) {
|
||||||
@ -62,6 +74,16 @@ export class FilterableDropdownSelectionModel {
|
|||||||
this.getNonTemporary(b.id) == ToggleableItemState.NotSelected
|
this.getNonTemporary(b.id) == ToggleableItemState.NotSelected
|
||||||
) {
|
) {
|
||||||
return -1
|
return -1
|
||||||
|
} else if (
|
||||||
|
this._documentCounts.length &&
|
||||||
|
this.getDocumentCount(a.id) > this.getDocumentCount(b.id)
|
||||||
|
) {
|
||||||
|
return -1
|
||||||
|
} else if (
|
||||||
|
this._documentCounts.length &&
|
||||||
|
this.getDocumentCount(a.id) < this.getDocumentCount(b.id)
|
||||||
|
) {
|
||||||
|
return 1
|
||||||
} else {
|
} else {
|
||||||
return a.name.localeCompare(b.name)
|
return a.name.localeCompare(b.name)
|
||||||
}
|
}
|
||||||
@ -279,6 +301,10 @@ export class FilterableDropdownSelectionModel {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getDocumentCount(id: number) {
|
||||||
|
return this._documentCounts.find((c) => c.id === id)?.document_count
|
||||||
|
}
|
||||||
|
|
||||||
init(map: Map<number, ToggleableItemState>) {
|
init(map: Map<number, ToggleableItemState>) {
|
||||||
this.temporarySelectionStates = map
|
this.temporarySelectionStates = map
|
||||||
this.apply()
|
this.apply()
|
||||||
@ -291,6 +317,7 @@ export class FilterableDropdownSelectionModel {
|
|||||||
})
|
})
|
||||||
this._logicalOperator = this.temporaryLogicalOperator
|
this._logicalOperator = this.temporaryLogicalOperator
|
||||||
this._intersection = this.temporaryIntersection
|
this._intersection = this.temporaryIntersection
|
||||||
|
this.sortItems()
|
||||||
}
|
}
|
||||||
|
|
||||||
reset(complete: boolean = false) {
|
reset(complete: boolean = false) {
|
||||||
@ -423,7 +450,11 @@ export class FilterableDropdownComponent implements OnDestroy, OnInit {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Input()
|
@Input()
|
||||||
documentCounts: SelectionDataItem[]
|
set documentCounts(counts: SelectionDataItem[]) {
|
||||||
|
if (counts) {
|
||||||
|
this.selectionModel.documentCounts = counts
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Input()
|
@Input()
|
||||||
shortcutKey: string
|
shortcutKey: string
|
||||||
@ -536,9 +567,7 @@ export class FilterableDropdownComponent implements OnDestroy, OnInit {
|
|||||||
}
|
}
|
||||||
|
|
||||||
getUpdatedDocumentCount(id: number) {
|
getUpdatedDocumentCount(id: number) {
|
||||||
if (this.documentCounts) {
|
return this.selectionModel.getDocumentCount(id)
|
||||||
return this.documentCounts.find((c) => c.id === id)?.document_count
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
listKeyDown(event: KeyboardEvent) {
|
listKeyDown(event: KeyboardEvent) {
|
||||||
|
@ -1,4 +1,9 @@
|
|||||||
<button class="list-group-item list-group-item-action d-flex align-items-center p-2 border-top-0 border-start-0 border-end-0 border-bottom" role="menuitem" (click)="toggleItem($event)" [disabled]="disabled">
|
<button
|
||||||
|
class="list-group-item list-group-item-action d-flex align-items-center p-2 border-top-0 border-start-0 border-end-0 border-bottom"
|
||||||
|
[class.opacity-50]="opacifyCount && !hideCount && currentCount === 0"
|
||||||
|
role="menuitem"
|
||||||
|
(click)="toggleItem($event)"
|
||||||
|
[disabled]="disabled">
|
||||||
<div class="selected-icon me-1">
|
<div class="selected-icon me-1">
|
||||||
@if (isChecked()) {
|
@if (isChecked()) {
|
||||||
<i-bs width="1em" height="1em" name="check"></i-bs>
|
<i-bs width="1em" height="1em" name="check"></i-bs>
|
||||||
@ -18,6 +23,6 @@
|
|||||||
}
|
}
|
||||||
</div>
|
</div>
|
||||||
@if (!hideCount) {
|
@if (!hideCount) {
|
||||||
<div class="badge bg-light text-dark rounded-pill ms-auto me-1">{{count ?? item.document_count}}</div>
|
<div class="badge bg-light text-dark rounded-pill ms-auto me-1">{{currentCount}}</div>
|
||||||
}
|
}
|
||||||
</button>
|
</button>
|
||||||
|
@ -29,6 +29,9 @@ export class ToggleableDropdownButtonComponent {
|
|||||||
@Input()
|
@Input()
|
||||||
hideCount: boolean = false
|
hideCount: boolean = false
|
||||||
|
|
||||||
|
@Input()
|
||||||
|
opacifyCount: boolean = true
|
||||||
|
|
||||||
@Output()
|
@Output()
|
||||||
toggled = new EventEmitter()
|
toggled = new EventEmitter()
|
||||||
|
|
||||||
@ -39,6 +42,10 @@ export class ToggleableDropdownButtonComponent {
|
|||||||
return 'is_inbox_tag' in this.item
|
return 'is_inbox_tag' in this.item
|
||||||
}
|
}
|
||||||
|
|
||||||
|
get currentCount(): number {
|
||||||
|
return this.count ?? this.item.document_count
|
||||||
|
}
|
||||||
|
|
||||||
toggleItem(event: MouseEvent): void {
|
toggleItem(event: MouseEvent): void {
|
||||||
if (this.state == ToggleableItemState.Selected) {
|
if (this.state == ToggleableItemState.Selected) {
|
||||||
this.exclude.emit()
|
this.exclude.emit()
|
||||||
|
@ -132,12 +132,4 @@ describe('SelectComponent', () => {
|
|||||||
const expectedTitle = `Filter documents with this ${component.title}`
|
const expectedTitle = `Filter documents with this ${component.title}`
|
||||||
expect(component.filterButtonTitle).toEqual(expectedTitle)
|
expect(component.filterButtonTitle).toEqual(expectedTitle)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should support setting items as a plain array', () => {
|
|
||||||
component.itemsArray = ['foo', 'bar']
|
|
||||||
expect(component.items).toEqual([
|
|
||||||
{ id: 0, name: 'foo' },
|
|
||||||
{ id: 1, name: 'bar' },
|
|
||||||
])
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
@ -34,11 +34,6 @@ export class SelectComponent extends AbstractInputComponent<number> {
|
|||||||
if (items && this.value) this.checkForPrivateItems(this.value)
|
if (items && this.value) this.checkForPrivateItems(this.value)
|
||||||
}
|
}
|
||||||
|
|
||||||
@Input()
|
|
||||||
set itemsArray(items: any[]) {
|
|
||||||
this._items = items.map((item, index) => ({ id: index, name: item }))
|
|
||||||
}
|
|
||||||
|
|
||||||
writeValue(newValue: any): void {
|
writeValue(newValue: any): void {
|
||||||
if (newValue && this._items) {
|
if (newValue && this._items) {
|
||||||
this.checkForPrivateItems(newValue)
|
this.checkForPrivateItems(newValue)
|
||||||
|
@ -1,30 +1,37 @@
|
|||||||
<div class="preview-popup-container">
|
<a [href]="link ?? previewUrl" class="{{linkClasses}}" [target]="linkTarget" [title]="linkTitle"
|
||||||
@if (error) {
|
[ngbPopover]="previewContent" [popoverTitle]="document.title | documentTitle" container="body"
|
||||||
<div class="w-100 h-100 position-relative">
|
autoClose="true" [popoverClass]="popoverClass" (mouseenter)="mouseEnterPreview()" (mouseleave)="mouseLeavePreview()" #popover="ngbPopover">
|
||||||
<p class="fst-italic position-absolute top-50 start-50 translate-middle" i18n>Error loading preview</p>
|
<ng-content></ng-content>
|
||||||
</div>
|
</a>
|
||||||
} @else {
|
<ng-template #previewContent>
|
||||||
@if (renderAsObject) {
|
<div class="preview-popup-container">
|
||||||
@if (previewText) {
|
@if (error) {
|
||||||
<div class="bg-light p-3 overflow-auto whitespace-preserve" width="100%">{{previewText}}</div>
|
<div class="w-100 h-100 position-relative">
|
||||||
} @else {
|
<p class="fst-italic position-absolute top-50 start-50 translate-middle" i18n>Error loading preview</p>
|
||||||
<object [data]="previewURL | safeUrl" width="100%" class="bg-light" [class.p-2]="!isPdf"></object>
|
</div>
|
||||||
}
|
|
||||||
} @else {
|
} @else {
|
||||||
@if (requiresPassword) {
|
@if (renderAsObject) {
|
||||||
<div class="w-100 h-100 position-relative">
|
@if (previewText) {
|
||||||
<i-bs width="2em" height="2em" class="position-absolute top-50 start-50 translate-middle" name="file-earmark-lock"></i-bs>
|
<div class="bg-light p-3 overflow-auto whitespace-preserve" width="100%">{{previewText}}</div>
|
||||||
</div>
|
} @else {
|
||||||
}
|
<object [data]="previewURL | safeUrl" width="100%" class="bg-light" [class.p-2]="!isPdf"></object>
|
||||||
@if (!requiresPassword) {
|
}
|
||||||
<pdf-viewer
|
} @else {
|
||||||
[src]="previewURL"
|
@if (requiresPassword) {
|
||||||
[original-size]="false"
|
<div class="w-100 h-100 position-relative">
|
||||||
[show-borders]="false"
|
<i-bs width="2em" height="2em" class="position-absolute top-50 start-50 translate-middle" name="file-earmark-lock"></i-bs>
|
||||||
[show-all]="true"
|
</div>
|
||||||
(error)="onError($event)">
|
}
|
||||||
</pdf-viewer>
|
@if (!requiresPassword) {
|
||||||
|
<pdf-viewer
|
||||||
|
[src]="previewURL"
|
||||||
|
[original-size]="false"
|
||||||
|
[show-borders]="false"
|
||||||
|
[show-all]="true"
|
||||||
|
(error)="onError($event)">
|
||||||
|
</pdf-viewer>
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
</div>
|
||||||
</div>
|
</ng-template>
|
||||||
|
@ -1,4 +1,9 @@
|
|||||||
import { ComponentFixture, TestBed } from '@angular/core/testing'
|
import {
|
||||||
|
ComponentFixture,
|
||||||
|
fakeAsync,
|
||||||
|
TestBed,
|
||||||
|
tick,
|
||||||
|
} from '@angular/core/testing'
|
||||||
|
|
||||||
import { PreviewPopupComponent } from './preview-popup.component'
|
import { PreviewPopupComponent } from './preview-popup.component'
|
||||||
import { By } from '@angular/platform-browser'
|
import { By } from '@angular/platform-browser'
|
||||||
@ -15,6 +20,8 @@ import {
|
|||||||
withInterceptorsFromDi,
|
withInterceptorsFromDi,
|
||||||
} from '@angular/common/http'
|
} from '@angular/common/http'
|
||||||
import { of, throwError } from 'rxjs'
|
import { of, throwError } from 'rxjs'
|
||||||
|
import { NgbPopoverModule } from '@ng-bootstrap/ng-bootstrap'
|
||||||
|
import { DocumentTitlePipe } from 'src/app/pipes/document-title.pipe'
|
||||||
|
|
||||||
const doc = {
|
const doc = {
|
||||||
id: 10,
|
id: 10,
|
||||||
@ -34,8 +41,12 @@ describe('PreviewPopupComponent', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
TestBed.configureTestingModule({
|
TestBed.configureTestingModule({
|
||||||
declarations: [PreviewPopupComponent, SafeUrlPipe],
|
declarations: [PreviewPopupComponent, SafeUrlPipe, DocumentTitlePipe],
|
||||||
imports: [NgxBootstrapIconsModule.pick(allIcons), PdfViewerModule],
|
imports: [
|
||||||
|
NgxBootstrapIconsModule.pick(allIcons),
|
||||||
|
PdfViewerModule,
|
||||||
|
NgbPopoverModule,
|
||||||
|
],
|
||||||
providers: [
|
providers: [
|
||||||
provideHttpClient(withInterceptorsFromDi()),
|
provideHttpClient(withInterceptorsFromDi()),
|
||||||
provideHttpClientTesting(),
|
provideHttpClientTesting(),
|
||||||
@ -70,12 +81,14 @@ describe('PreviewPopupComponent', () => {
|
|||||||
|
|
||||||
it('should render object if native PDF viewer enabled', () => {
|
it('should render object if native PDF viewer enabled', () => {
|
||||||
settingsService.set(SETTINGS_KEYS.USE_NATIVE_PDF_VIEWER, true)
|
settingsService.set(SETTINGS_KEYS.USE_NATIVE_PDF_VIEWER, true)
|
||||||
|
component.popover.open()
|
||||||
fixture.detectChanges()
|
fixture.detectChanges()
|
||||||
expect(fixture.debugElement.query(By.css('object'))).not.toBeNull()
|
expect(fixture.debugElement.query(By.css('object'))).not.toBeNull()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should render pngx viewer if native PDF viewer disabled', () => {
|
it('should render pngx viewer if native PDF viewer disabled', () => {
|
||||||
settingsService.set(SETTINGS_KEYS.USE_NATIVE_PDF_VIEWER, false)
|
settingsService.set(SETTINGS_KEYS.USE_NATIVE_PDF_VIEWER, false)
|
||||||
|
component.popover.open()
|
||||||
fixture.detectChanges()
|
fixture.detectChanges()
|
||||||
expect(fixture.debugElement.query(By.css('object'))).toBeNull()
|
expect(fixture.debugElement.query(By.css('object'))).toBeNull()
|
||||||
expect(fixture.debugElement.query(By.css('pdf-viewer'))).not.toBeNull()
|
expect(fixture.debugElement.query(By.css('pdf-viewer'))).not.toBeNull()
|
||||||
@ -83,6 +96,7 @@ describe('PreviewPopupComponent', () => {
|
|||||||
|
|
||||||
it('should show lock icon on password error', () => {
|
it('should show lock icon on password error', () => {
|
||||||
settingsService.set(SETTINGS_KEYS.USE_NATIVE_PDF_VIEWER, false)
|
settingsService.set(SETTINGS_KEYS.USE_NATIVE_PDF_VIEWER, false)
|
||||||
|
component.popover.open()
|
||||||
component.onError({ name: 'PasswordException' })
|
component.onError({ name: 'PasswordException' })
|
||||||
fixture.detectChanges()
|
fixture.detectChanges()
|
||||||
expect(component.requiresPassword).toBeTruthy()
|
expect(component.requiresPassword).toBeTruthy()
|
||||||
@ -93,16 +107,18 @@ describe('PreviewPopupComponent', () => {
|
|||||||
component.document.original_file_name = 'sample.png'
|
component.document.original_file_name = 'sample.png'
|
||||||
component.document.mime_type = 'image/png'
|
component.document.mime_type = 'image/png'
|
||||||
component.document.archived_file_name = undefined
|
component.document.archived_file_name = undefined
|
||||||
|
component.popover.open()
|
||||||
fixture.detectChanges()
|
fixture.detectChanges()
|
||||||
expect(fixture.debugElement.query(By.css('object'))).not.toBeNull()
|
expect(fixture.debugElement.query(By.css('object'))).not.toBeNull()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should show message on error', () => {
|
it('should show message on error', () => {
|
||||||
|
component.popover.open()
|
||||||
component.onError({})
|
component.onError({})
|
||||||
fixture.detectChanges()
|
fixture.detectChanges()
|
||||||
expect(fixture.debugElement.nativeElement.textContent).toContain(
|
expect(
|
||||||
'Error loading preview'
|
fixture.debugElement.query(By.css('.popover')).nativeElement.textContent
|
||||||
)
|
).toContain('Error loading preview')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should get text content from http if appropriate', () => {
|
it('should get text content from http if appropriate', () => {
|
||||||
@ -122,4 +138,17 @@ describe('PreviewPopupComponent', () => {
|
|||||||
component.init()
|
component.init()
|
||||||
expect(component.previewText).toEqual('Preview text')
|
expect(component.previewText).toEqual('Preview text')
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should show preview on mouseover after delay to preload content', fakeAsync(() => {
|
||||||
|
component.mouseEnterPreview()
|
||||||
|
expect(component.popover.isOpen()).toBeTruthy()
|
||||||
|
tick(600)
|
||||||
|
component.close()
|
||||||
|
|
||||||
|
component.mouseEnterPreview()
|
||||||
|
tick(100)
|
||||||
|
component.mouseLeavePreview()
|
||||||
|
tick(600)
|
||||||
|
expect(component.popover.isOpen()).toBeFalsy()
|
||||||
|
}))
|
||||||
})
|
})
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import { HttpClient } from '@angular/common/http'
|
import { HttpClient } from '@angular/common/http'
|
||||||
import { Component, Input, OnDestroy } from '@angular/core'
|
import { Component, Input, OnDestroy, ViewChild } from '@angular/core'
|
||||||
|
import { NgbPopover } from '@ng-bootstrap/ng-bootstrap'
|
||||||
import { first, Subject, takeUntil } from 'rxjs'
|
import { first, Subject, takeUntil } from 'rxjs'
|
||||||
import { Document } from 'src/app/data/document'
|
import { Document } from 'src/app/data/document'
|
||||||
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
|
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
|
||||||
@ -23,6 +24,18 @@ export class PreviewPopupComponent implements OnDestroy {
|
|||||||
return this._document
|
return this._document
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Input()
|
||||||
|
link: string
|
||||||
|
|
||||||
|
@Input()
|
||||||
|
linkClasses: string = 'btn btn-sm btn-outline-secondary'
|
||||||
|
|
||||||
|
@Input()
|
||||||
|
linkTarget: string = '_blank'
|
||||||
|
|
||||||
|
@Input()
|
||||||
|
linkTitle: string = $localize`Open preview`
|
||||||
|
|
||||||
unsubscribeNotifier: Subject<any> = new Subject()
|
unsubscribeNotifier: Subject<any> = new Subject()
|
||||||
|
|
||||||
error = false
|
error = false
|
||||||
@ -31,6 +44,12 @@ export class PreviewPopupComponent implements OnDestroy {
|
|||||||
|
|
||||||
previewText: string
|
previewText: string
|
||||||
|
|
||||||
|
@ViewChild('popover') popover: NgbPopover
|
||||||
|
|
||||||
|
mouseOnPreview: boolean
|
||||||
|
|
||||||
|
popoverClass: string = 'shadow popover-preview'
|
||||||
|
|
||||||
get renderAsObject(): boolean {
|
get renderAsObject(): boolean {
|
||||||
return (this.isPdf && this.useNativePdfViewer) || !this.isPdf
|
return (this.isPdf && this.useNativePdfViewer) || !this.isPdf
|
||||||
}
|
}
|
||||||
@ -83,4 +102,33 @@ export class PreviewPopupComponent implements OnDestroy {
|
|||||||
this.error = true
|
this.error = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
get previewUrl() {
|
||||||
|
return this.documentService.getPreviewUrl(this.document.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
mouseEnterPreview() {
|
||||||
|
this.mouseOnPreview = true
|
||||||
|
if (!this.popover.isOpen()) {
|
||||||
|
// we're going to open but hide to pre-load content during hover delay
|
||||||
|
this.popover.open()
|
||||||
|
this.popoverClass = 'shadow popover-preview pe-none opacity-0'
|
||||||
|
setTimeout(() => {
|
||||||
|
if (this.mouseOnPreview) {
|
||||||
|
// show popover
|
||||||
|
this.popoverClass = this.popoverClass.replace('pe-none opacity-0', '')
|
||||||
|
} else {
|
||||||
|
this.popover.close()
|
||||||
|
}
|
||||||
|
}, 600)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mouseLeavePreview() {
|
||||||
|
this.mouseOnPreview = false
|
||||||
|
}
|
||||||
|
|
||||||
|
public close() {
|
||||||
|
this.popover.close(false)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -190,7 +190,8 @@
|
|||||||
@case (CustomFieldDataType.Select) {
|
@case (CustomFieldDataType.Select) {
|
||||||
<pngx-input-select formControlName="value"
|
<pngx-input-select formControlName="value"
|
||||||
[title]="getCustomFieldFromInstance(fieldInstance)?.name"
|
[title]="getCustomFieldFromInstance(fieldInstance)?.name"
|
||||||
[itemsArray]="getCustomFieldFromInstance(fieldInstance)?.extra_data.select_options"
|
[items]="getCustomFieldFromInstance(fieldInstance)?.extra_data.select_options"
|
||||||
|
bindLabel="label"
|
||||||
[allowNull]="true"
|
[allowNull]="true"
|
||||||
[horizontal]="true"
|
[horizontal]="true"
|
||||||
[removable]="userIsOwner"
|
[removable]="userIsOwner"
|
||||||
@ -388,6 +389,15 @@
|
|||||||
<img [src]="previewUrl | safeUrl" width="100%" height="100%" alt="{{title}}" />
|
<img [src]="previewUrl | safeUrl" width="100%" height="100%" alt="{{title}}" />
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
|
@case (ContentRenderType.TIFF) {
|
||||||
|
@if (!tiffError) {
|
||||||
|
<div class="preview-sticky">
|
||||||
|
<img [src]="tiffURL" width="100%" height="100%" alt="{{title}}" />
|
||||||
|
</div>
|
||||||
|
} @else {
|
||||||
|
<div class="preview-sticky bg-light p-3 overflow-auto whitespace-preserve" width="100%">{{tiffError}}</div>
|
||||||
|
}
|
||||||
|
}
|
||||||
@case (ContentRenderType.Other) {
|
@case (ContentRenderType.Other) {
|
||||||
<object [data]="previewUrl | safeUrl" class="preview-sticky" width="100%"></object>
|
<object [data]="previewUrl | safeUrl" class="preview-sticky" width="100%"></object>
|
||||||
}
|
}
|
||||||
|
@ -61,6 +61,7 @@ textarea.rtl {
|
|||||||
width: 100%;
|
width: 100%;
|
||||||
height: 100%;
|
height: 100%;
|
||||||
object-fit: contain;
|
object-fit: contain;
|
||||||
|
object-position: top;
|
||||||
}
|
}
|
||||||
|
|
||||||
.thumb-preview {
|
.thumb-preview {
|
||||||
|
@ -1270,4 +1270,46 @@ describe('DocumentDetailComponent', () => {
|
|||||||
expect(component.createDisabled(DataType.StoragePath)).toBeFalsy()
|
expect(component.createDisabled(DataType.StoragePath)).toBeFalsy()
|
||||||
expect(component.createDisabled(DataType.Tag)).toBeFalsy()
|
expect(component.createDisabled(DataType.Tag)).toBeFalsy()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should call tryRenderTiff when no archive and file is tiff', () => {
|
||||||
|
initNormally()
|
||||||
|
const tiffRenderSpy = jest.spyOn(
|
||||||
|
DocumentDetailComponent.prototype as any,
|
||||||
|
'tryRenderTiff'
|
||||||
|
)
|
||||||
|
const doc = Object.assign({}, component.document)
|
||||||
|
doc.archived_file_name = null
|
||||||
|
doc.mime_type = 'image/tiff'
|
||||||
|
jest
|
||||||
|
.spyOn(documentService, 'getMetadata')
|
||||||
|
.mockReturnValue(
|
||||||
|
of({ has_archive_version: false, original_mime_type: 'image/tiff' })
|
||||||
|
)
|
||||||
|
component.updateComponent(doc)
|
||||||
|
fixture.detectChanges()
|
||||||
|
expect(component.archiveContentRenderType).toEqual(
|
||||||
|
component.ContentRenderType.TIFF
|
||||||
|
)
|
||||||
|
expect(tiffRenderSpy).toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should try to render tiff and show error if failed', () => {
|
||||||
|
initNormally()
|
||||||
|
// just the text request
|
||||||
|
httpTestingController.expectOne(component.previewUrl)
|
||||||
|
|
||||||
|
// invalid tiff
|
||||||
|
component['tryRenderTiff']()
|
||||||
|
httpTestingController
|
||||||
|
.expectOne(component.previewUrl)
|
||||||
|
.flush(new ArrayBuffer(100)) // arraybuffer
|
||||||
|
expect(component.tiffError).not.toBeUndefined()
|
||||||
|
|
||||||
|
// http error
|
||||||
|
component['tryRenderTiff']()
|
||||||
|
httpTestingController
|
||||||
|
.expectOne(component.previewUrl)
|
||||||
|
.error(new ErrorEvent('failed'))
|
||||||
|
expect(component.tiffError).not.toBeUndefined()
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
@ -72,6 +72,7 @@ import { DeletePagesConfirmDialogComponent } from '../common/confirm-dialog/dele
|
|||||||
import { HotKeyService } from 'src/app/services/hot-key.service'
|
import { HotKeyService } from 'src/app/services/hot-key.service'
|
||||||
import { PDFDocumentProxy } from 'ng2-pdf-viewer'
|
import { PDFDocumentProxy } from 'ng2-pdf-viewer'
|
||||||
import { DataType } from 'src/app/data/datatype'
|
import { DataType } from 'src/app/data/datatype'
|
||||||
|
import * as UTIF from 'utif'
|
||||||
|
|
||||||
enum DocumentDetailNavIDs {
|
enum DocumentDetailNavIDs {
|
||||||
Details = 1,
|
Details = 1,
|
||||||
@ -89,6 +90,7 @@ enum ContentRenderType {
|
|||||||
Text = 'text',
|
Text = 'text',
|
||||||
Other = 'other',
|
Other = 'other',
|
||||||
Unknown = 'unknown',
|
Unknown = 'unknown',
|
||||||
|
TIFF = 'tiff',
|
||||||
}
|
}
|
||||||
|
|
||||||
enum ZoomSetting {
|
enum ZoomSetting {
|
||||||
@ -136,6 +138,8 @@ export class DocumentDetailComponent
|
|||||||
downloadUrl: string
|
downloadUrl: string
|
||||||
downloadOriginalUrl: string
|
downloadOriginalUrl: string
|
||||||
previewLoaded: boolean = false
|
previewLoaded: boolean = false
|
||||||
|
tiffURL: string
|
||||||
|
tiffError: string
|
||||||
|
|
||||||
correspondents: Correspondent[]
|
correspondents: Correspondent[]
|
||||||
documentTypes: DocumentType[]
|
documentTypes: DocumentType[]
|
||||||
@ -244,6 +248,8 @@ export class DocumentDetailComponent
|
|||||||
['text/plain', 'application/csv', 'text/csv'].includes(mimeType)
|
['text/plain', 'application/csv', 'text/csv'].includes(mimeType)
|
||||||
) {
|
) {
|
||||||
return ContentRenderType.Text
|
return ContentRenderType.Text
|
||||||
|
} else if (mimeType.indexOf('tiff') >= 0) {
|
||||||
|
return ContentRenderType.TIFF
|
||||||
} else if (mimeType?.indexOf('image/') === 0) {
|
} else if (mimeType?.indexOf('image/') === 0) {
|
||||||
return ContentRenderType.Image
|
return ContentRenderType.Image
|
||||||
}
|
}
|
||||||
@ -542,6 +548,9 @@ export class DocumentDetailComponent
|
|||||||
this.document = doc
|
this.document = doc
|
||||||
this.requiresPassword = false
|
this.requiresPassword = false
|
||||||
this.updateFormForCustomFields()
|
this.updateFormForCustomFields()
|
||||||
|
if (this.archiveContentRenderType === ContentRenderType.TIFF) {
|
||||||
|
this.tryRenderTiff()
|
||||||
|
}
|
||||||
this.documentsService
|
this.documentsService
|
||||||
.getMetadata(doc.id)
|
.getMetadata(doc.id)
|
||||||
.pipe(
|
.pipe(
|
||||||
@ -721,6 +730,7 @@ export class DocumentDetailComponent
|
|||||||
|
|
||||||
save(close: boolean = false) {
|
save(close: boolean = false) {
|
||||||
this.networkActive = true
|
this.networkActive = true
|
||||||
|
;(document.activeElement as HTMLElement)?.dispatchEvent(new Event('change'))
|
||||||
this.documentsService
|
this.documentsService
|
||||||
.update(this.document)
|
.update(this.document)
|
||||||
.pipe(first())
|
.pipe(first())
|
||||||
@ -1163,6 +1173,7 @@ export class DocumentDetailComponent
|
|||||||
splitDocument() {
|
splitDocument() {
|
||||||
let modal = this.modalService.open(SplitConfirmDialogComponent, {
|
let modal = this.modalService.open(SplitConfirmDialogComponent, {
|
||||||
backdrop: 'static',
|
backdrop: 'static',
|
||||||
|
size: 'lg',
|
||||||
})
|
})
|
||||||
modal.componentInstance.title = $localize`Split confirm`
|
modal.componentInstance.title = $localize`Split confirm`
|
||||||
modal.componentInstance.messageBold = $localize`This operation will split the selected document(s) into new documents.`
|
modal.componentInstance.messageBold = $localize`This operation will split the selected document(s) into new documents.`
|
||||||
@ -1201,6 +1212,7 @@ export class DocumentDetailComponent
|
|||||||
rotateDocument() {
|
rotateDocument() {
|
||||||
let modal = this.modalService.open(RotateConfirmDialogComponent, {
|
let modal = this.modalService.open(RotateConfirmDialogComponent, {
|
||||||
backdrop: 'static',
|
backdrop: 'static',
|
||||||
|
size: 'lg',
|
||||||
})
|
})
|
||||||
modal.componentInstance.title = $localize`Rotate confirm`
|
modal.componentInstance.title = $localize`Rotate confirm`
|
||||||
modal.componentInstance.messageBold = $localize`This operation will permanently rotate the original version of the current document.`
|
modal.componentInstance.messageBold = $localize`This operation will permanently rotate the original version of the current document.`
|
||||||
@ -1275,4 +1287,45 @@ export class DocumentDetailComponent
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private tryRenderTiff() {
|
||||||
|
this.http.get(this.previewUrl, { responseType: 'arraybuffer' }).subscribe({
|
||||||
|
next: (res) => {
|
||||||
|
/* istanbul ignore next */
|
||||||
|
try {
|
||||||
|
// See UTIF.js > _imgLoaded
|
||||||
|
const tiffIfds: any[] = UTIF.decode(res)
|
||||||
|
var vsns = tiffIfds,
|
||||||
|
ma = 0,
|
||||||
|
page = vsns[0]
|
||||||
|
if (tiffIfds[0].subIFD) vsns = vsns.concat(tiffIfds[0].subIFD)
|
||||||
|
for (var i = 0; i < vsns.length; i++) {
|
||||||
|
var img = vsns[i]
|
||||||
|
if (img['t258'] == null || img['t258'].length < 3) continue
|
||||||
|
var ar = img['t256'] * img['t257']
|
||||||
|
if (ar > ma) {
|
||||||
|
ma = ar
|
||||||
|
page = img
|
||||||
|
}
|
||||||
|
}
|
||||||
|
UTIF.decodeImage(res, page, tiffIfds)
|
||||||
|
const rgba = UTIF.toRGBA8(page)
|
||||||
|
const { width: w, height: h } = page
|
||||||
|
var cnv = document.createElement('canvas')
|
||||||
|
cnv.width = w
|
||||||
|
cnv.height = h
|
||||||
|
var ctx = cnv.getContext('2d'),
|
||||||
|
imgd = ctx.createImageData(w, h)
|
||||||
|
for (var i = 0; i < rgba.length; i++) imgd.data[i] = rgba[i]
|
||||||
|
ctx.putImageData(imgd, 0, 0)
|
||||||
|
this.tiffURL = cnv.toDataURL()
|
||||||
|
} catch (err) {
|
||||||
|
this.tiffError = $localize`An error occurred loading tiff: ${err.toString()}`
|
||||||
|
}
|
||||||
|
},
|
||||||
|
error: (err) => {
|
||||||
|
this.tiffError = $localize`An error occurred loading tiff: ${err.toString()}`
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -782,11 +782,11 @@ export class BulkEditorComponent
|
|||||||
rotateSelected() {
|
rotateSelected() {
|
||||||
let modal = this.modalService.open(RotateConfirmDialogComponent, {
|
let modal = this.modalService.open(RotateConfirmDialogComponent, {
|
||||||
backdrop: 'static',
|
backdrop: 'static',
|
||||||
|
size: 'lg',
|
||||||
})
|
})
|
||||||
const rotateDialog = modal.componentInstance as RotateConfirmDialogComponent
|
const rotateDialog = modal.componentInstance as RotateConfirmDialogComponent
|
||||||
rotateDialog.title = $localize`Rotate confirm`
|
rotateDialog.title = $localize`Rotate confirm`
|
||||||
rotateDialog.messageBold = $localize`This operation will permanently rotate the original version of ${this.list.selected.size} document(s).`
|
rotateDialog.messageBold = $localize`This operation will permanently rotate the original version of ${this.list.selected.size} document(s).`
|
||||||
rotateDialog.message = $localize`This will alter the original copy.`
|
|
||||||
rotateDialog.btnClass = 'btn-danger'
|
rotateDialog.btnClass = 'btn-danger'
|
||||||
rotateDialog.btnCaption = $localize`Proceed`
|
rotateDialog.btnCaption = $localize`Proceed`
|
||||||
rotateDialog.documentID = Array.from(this.list.selected)[0]
|
rotateDialog.documentID = Array.from(this.list.selected)[0]
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
<div class="card mb-3 shadow-sm bg-light" [class.card-selected]="selected" [class.document-card]="selectable" [class.popover-hidden]="popoverHidden" (mouseleave)="mouseLeaveCard()">
|
<div class="card mb-3 shadow-sm bg-light" [class.card-selected]="selected" [class.document-card]="selectable" (mouseleave)="mouseLeaveCard()">
|
||||||
<div class="row g-0">
|
<div class="row g-0">
|
||||||
<div class="col-md-2 doc-img-container rounded-start" (click)="this.toggleSelected.emit($event)" (dblclick)="dblClickDocument.emit()">
|
<div class="col-md-2 doc-img-container rounded-start" (click)="this.toggleSelected.emit($event)" (dblclick)="dblClickDocument.emit()">
|
||||||
<img [src]="getThumbUrl()" class="card-img doc-img border-end rounded-start" [class.inverted]="getIsThumbInverted()">
|
<img [src]="getThumbUrl()" class="card-img doc-img border-end rounded-start" [class.inverted]="getIsThumbInverted()">
|
||||||
@ -56,14 +56,9 @@
|
|||||||
<a routerLink="/documents/{{document.id}}" class="btn btn-sm btn-outline-secondary" *pngxIfPermissions="{ action: PermissionAction.Change, type: PermissionType.Document }">
|
<a routerLink="/documents/{{document.id}}" class="btn btn-sm btn-outline-secondary" *pngxIfPermissions="{ action: PermissionAction.Change, type: PermissionType.Document }">
|
||||||
<i-bs name="file-earmark-richtext"></i-bs> <span class="d-none d-md-inline" i18n>Open</span>
|
<i-bs name="file-earmark-richtext"></i-bs> <span class="d-none d-md-inline" i18n>Open</span>
|
||||||
</a>
|
</a>
|
||||||
<a class="btn btn-sm btn-outline-secondary" target="_blank" [href]="previewUrl"
|
<pngx-preview-popup [document]="document" #popupPreview>
|
||||||
[ngbPopover]="previewContent" [popoverTitle]="document.title | documentTitle"
|
|
||||||
autoClose="true" popoverClass="shadow popover-preview" (mouseenter)="mouseEnterPreview()" (mouseleave)="mouseLeavePreview()" #popover="ngbPopover">
|
|
||||||
<i-bs name="eye"></i-bs> <span class="d-none d-md-inline" i18n>View</span>
|
<i-bs name="eye"></i-bs> <span class="d-none d-md-inline" i18n>View</span>
|
||||||
</a>
|
</pngx-preview-popup>
|
||||||
<ng-template #previewContent>
|
|
||||||
<pngx-preview-popup [document]="document"></pngx-preview-popup>
|
|
||||||
</ng-template>
|
|
||||||
<a class="btn btn-sm btn-outline-secondary" [href]="getDownloadUrl()">
|
<a class="btn btn-sm btn-outline-secondary" [href]="getDownloadUrl()">
|
||||||
<i-bs name="download"></i-bs> <span class="d-none d-md-inline" i18n>Download</span>
|
<i-bs name="download"></i-bs> <span class="d-none d-md-inline" i18n>Download</span>
|
||||||
</a>
|
</a>
|
||||||
|
@ -1,11 +1,6 @@
|
|||||||
import { DatePipe } from '@angular/common'
|
import { DatePipe } from '@angular/common'
|
||||||
import { provideHttpClientTesting } from '@angular/common/http/testing'
|
import { provideHttpClientTesting } from '@angular/common/http/testing'
|
||||||
import {
|
import { ComponentFixture, TestBed } from '@angular/core/testing'
|
||||||
ComponentFixture,
|
|
||||||
TestBed,
|
|
||||||
fakeAsync,
|
|
||||||
tick,
|
|
||||||
} from '@angular/core/testing'
|
|
||||||
import { By } from '@angular/platform-browser'
|
import { By } from '@angular/platform-browser'
|
||||||
import { RouterTestingModule } from '@angular/router/testing'
|
import { RouterTestingModule } from '@angular/router/testing'
|
||||||
import {
|
import {
|
||||||
@ -84,21 +79,6 @@ describe('DocumentCardLargeComponent', () => {
|
|||||||
expect(fixture.nativeElement.textContent).toContain('8 pages')
|
expect(fixture.nativeElement.textContent).toContain('8 pages')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should show preview on mouseover after delay to preload content', fakeAsync(() => {
|
|
||||||
component.mouseEnterPreview()
|
|
||||||
expect(component.popover.isOpen()).toBeTruthy()
|
|
||||||
expect(component.popoverHidden).toBeTruthy()
|
|
||||||
tick(600)
|
|
||||||
expect(component.popoverHidden).toBeFalsy()
|
|
||||||
component.mouseLeaveCard()
|
|
||||||
|
|
||||||
component.mouseEnterPreview()
|
|
||||||
tick(100)
|
|
||||||
component.mouseLeavePreview()
|
|
||||||
tick(600)
|
|
||||||
expect(component.popover.isOpen()).toBeFalsy()
|
|
||||||
}))
|
|
||||||
|
|
||||||
it('should trim content', () => {
|
it('should trim content', () => {
|
||||||
expect(component.contentTrimmed).toHaveLength(503) // includes ...
|
expect(component.contentTrimmed).toHaveLength(503) // includes ...
|
||||||
})
|
})
|
||||||
|
@ -12,9 +12,9 @@ import {
|
|||||||
} from 'src/app/data/document'
|
} from 'src/app/data/document'
|
||||||
import { DocumentService } from 'src/app/services/rest/document.service'
|
import { DocumentService } from 'src/app/services/rest/document.service'
|
||||||
import { SettingsService } from 'src/app/services/settings.service'
|
import { SettingsService } from 'src/app/services/settings.service'
|
||||||
import { NgbPopover } from '@ng-bootstrap/ng-bootstrap'
|
|
||||||
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
|
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
|
||||||
import { ComponentWithPermissions } from '../../with-permissions/with-permissions.component'
|
import { ComponentWithPermissions } from '../../with-permissions/with-permissions.component'
|
||||||
|
import { PreviewPopupComponent } from '../../common/preview-popup/preview-popup.component'
|
||||||
|
|
||||||
@Component({
|
@Component({
|
||||||
selector: 'pngx-document-card-large',
|
selector: 'pngx-document-card-large',
|
||||||
@ -65,7 +65,7 @@ export class DocumentCardLargeComponent extends ComponentWithPermissions {
|
|||||||
@Output()
|
@Output()
|
||||||
clickMoreLike = new EventEmitter()
|
clickMoreLike = new EventEmitter()
|
||||||
|
|
||||||
@ViewChild('popover') popover: NgbPopover
|
@ViewChild('popupPreview') popupPreview: PreviewPopupComponent
|
||||||
|
|
||||||
mouseOnPreview = false
|
mouseOnPreview = false
|
||||||
popoverHidden = true
|
popoverHidden = true
|
||||||
@ -112,29 +112,8 @@ export class DocumentCardLargeComponent extends ComponentWithPermissions {
|
|||||||
return this.documentService.getPreviewUrl(this.document.id)
|
return this.documentService.getPreviewUrl(this.document.id)
|
||||||
}
|
}
|
||||||
|
|
||||||
mouseEnterPreview() {
|
|
||||||
this.mouseOnPreview = true
|
|
||||||
if (!this.popover.isOpen()) {
|
|
||||||
// we're going to open but hide to pre-load content during hover delay
|
|
||||||
this.popover.open()
|
|
||||||
this.popoverHidden = true
|
|
||||||
setTimeout(() => {
|
|
||||||
if (this.mouseOnPreview) {
|
|
||||||
// show popover
|
|
||||||
this.popoverHidden = false
|
|
||||||
} else {
|
|
||||||
this.popover.close()
|
|
||||||
}
|
|
||||||
}, 600)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mouseLeavePreview() {
|
|
||||||
this.mouseOnPreview = false
|
|
||||||
}
|
|
||||||
|
|
||||||
mouseLeaveCard() {
|
mouseLeaveCard() {
|
||||||
this.popover.close()
|
this.popupPreview.close()
|
||||||
}
|
}
|
||||||
|
|
||||||
get contentTrimmed() {
|
get contentTrimmed() {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
<div class="col p-2 h-100">
|
<div class="col p-2 h-100">
|
||||||
<div class="card h-100 shadow-sm document-card" [class.card-selected]="selected" [class.popover-hidden]="popoverHidden" (mouseleave)="mouseLeaveCard()">
|
<div class="card h-100 shadow-sm document-card" [class.card-selected]="selected" (mouseleave)="mouseLeaveCard()">
|
||||||
<div class="border-bottom doc-img-container rounded-top" (click)="this.toggleSelected.emit($event)" (dblclick)="dblClickDocument.emit(this)">
|
<div class="border-bottom doc-img-container rounded-top" (click)="this.toggleSelected.emit($event)" (dblclick)="dblClickDocument.emit(this)">
|
||||||
<img class="card-img doc-img" [class.inverted]="getIsThumbInverted()" [src]="getThumbUrl()">
|
<img class="card-img doc-img" [class.inverted]="getIsThumbInverted()" [src]="getThumbUrl()">
|
||||||
|
|
||||||
@ -129,14 +129,9 @@
|
|||||||
<a routerLink="/documents/{{document.id}}" class="btn btn-sm btn-outline-secondary" title="Open" i18n-title *pngxIfPermissions="{ action: PermissionAction.Change, type: PermissionType.Document }" i18n-title>
|
<a routerLink="/documents/{{document.id}}" class="btn btn-sm btn-outline-secondary" title="Open" i18n-title *pngxIfPermissions="{ action: PermissionAction.Change, type: PermissionType.Document }" i18n-title>
|
||||||
<i-bs name="file-earmark-richtext"></i-bs>
|
<i-bs name="file-earmark-richtext"></i-bs>
|
||||||
</a>
|
</a>
|
||||||
<a [href]="previewUrl" target="_blank" class="btn btn-sm btn-outline-secondary"
|
<pngx-preview-popup [document]="document" #popupPreview>
|
||||||
[ngbPopover]="previewContent" [popoverTitle]="document.title | documentTitle"
|
|
||||||
autoClose="true" popoverClass="shadow popover-preview" (mouseenter)="mouseEnterPreview()" (mouseleave)="mouseLeavePreview()" #popover="ngbPopover">
|
|
||||||
<i-bs name="eye"></i-bs>
|
<i-bs name="eye"></i-bs>
|
||||||
</a>
|
</pngx-preview-popup>
|
||||||
<ng-template #previewContent>
|
|
||||||
<pngx-preview-popup [document]="document"></pngx-preview-popup>
|
|
||||||
</ng-template>
|
|
||||||
<a [href]="getDownloadUrl()" class="btn btn-sm btn-outline-secondary" title="Download" i18n-title (click)="$event.stopPropagation()">
|
<a [href]="getDownloadUrl()" class="btn btn-sm btn-outline-secondary" title="Download" i18n-title (click)="$event.stopPropagation()">
|
||||||
<i-bs name="download"></i-bs>
|
<i-bs name="download"></i-bs>
|
||||||
</a>
|
</a>
|
||||||
|
@ -1,11 +1,6 @@
|
|||||||
import { DatePipe } from '@angular/common'
|
import { DatePipe } from '@angular/common'
|
||||||
import { provideHttpClientTesting } from '@angular/common/http/testing'
|
import { provideHttpClientTesting } from '@angular/common/http/testing'
|
||||||
import {
|
import { ComponentFixture, TestBed } from '@angular/core/testing'
|
||||||
ComponentFixture,
|
|
||||||
TestBed,
|
|
||||||
fakeAsync,
|
|
||||||
tick,
|
|
||||||
} from '@angular/core/testing'
|
|
||||||
import { RouterTestingModule } from '@angular/router/testing'
|
import { RouterTestingModule } from '@angular/router/testing'
|
||||||
import {
|
import {
|
||||||
NgbPopoverModule,
|
NgbPopoverModule,
|
||||||
@ -116,19 +111,4 @@ describe('DocumentCardSmallComponent', () => {
|
|||||||
fixture.debugElement.queryAll(By.directive(TagComponent))
|
fixture.debugElement.queryAll(By.directive(TagComponent))
|
||||||
).toHaveLength(6)
|
).toHaveLength(6)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should show preview on mouseover after delay to preload content', fakeAsync(() => {
|
|
||||||
component.mouseEnterPreview()
|
|
||||||
expect(component.popover.isOpen()).toBeTruthy()
|
|
||||||
expect(component.popoverHidden).toBeTruthy()
|
|
||||||
tick(600)
|
|
||||||
expect(component.popoverHidden).toBeFalsy()
|
|
||||||
component.mouseLeaveCard()
|
|
||||||
|
|
||||||
component.mouseEnterPreview()
|
|
||||||
tick(100)
|
|
||||||
component.mouseLeavePreview()
|
|
||||||
tick(600)
|
|
||||||
expect(component.popover.isOpen()).toBeFalsy()
|
|
||||||
}))
|
|
||||||
})
|
})
|
||||||
|
@ -13,9 +13,9 @@ import {
|
|||||||
} from 'src/app/data/document'
|
} from 'src/app/data/document'
|
||||||
import { DocumentService } from 'src/app/services/rest/document.service'
|
import { DocumentService } from 'src/app/services/rest/document.service'
|
||||||
import { SettingsService } from 'src/app/services/settings.service'
|
import { SettingsService } from 'src/app/services/settings.service'
|
||||||
import { NgbPopover } from '@ng-bootstrap/ng-bootstrap'
|
|
||||||
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
|
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
|
||||||
import { ComponentWithPermissions } from '../../with-permissions/with-permissions.component'
|
import { ComponentWithPermissions } from '../../with-permissions/with-permissions.component'
|
||||||
|
import { PreviewPopupComponent } from '../../common/preview-popup/preview-popup.component'
|
||||||
|
|
||||||
@Component({
|
@Component({
|
||||||
selector: 'pngx-document-card-small',
|
selector: 'pngx-document-card-small',
|
||||||
@ -61,10 +61,7 @@ export class DocumentCardSmallComponent extends ComponentWithPermissions {
|
|||||||
|
|
||||||
moreTags: number = null
|
moreTags: number = null
|
||||||
|
|
||||||
@ViewChild('popover') popover: NgbPopover
|
@ViewChild('popupPreview') popupPreview: PreviewPopupComponent
|
||||||
|
|
||||||
mouseOnPreview = false
|
|
||||||
popoverHidden = true
|
|
||||||
|
|
||||||
getIsThumbInverted() {
|
getIsThumbInverted() {
|
||||||
return this.settingsService.get(SETTINGS_KEYS.DARK_MODE_THUMB_INVERTED)
|
return this.settingsService.get(SETTINGS_KEYS.DARK_MODE_THUMB_INVERTED)
|
||||||
@ -78,10 +75,6 @@ export class DocumentCardSmallComponent extends ComponentWithPermissions {
|
|||||||
return this.documentService.getDownloadUrl(this.document.id)
|
return this.documentService.getDownloadUrl(this.document.id)
|
||||||
}
|
}
|
||||||
|
|
||||||
get previewUrl() {
|
|
||||||
return this.documentService.getPreviewUrl(this.document.id)
|
|
||||||
}
|
|
||||||
|
|
||||||
get privateName() {
|
get privateName() {
|
||||||
return $localize`Private`
|
return $localize`Private`
|
||||||
}
|
}
|
||||||
@ -100,29 +93,8 @@ export class DocumentCardSmallComponent extends ComponentWithPermissions {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
mouseEnterPreview() {
|
|
||||||
this.mouseOnPreview = true
|
|
||||||
if (!this.popover.isOpen()) {
|
|
||||||
// we're going to open but hide to pre-load content during hover delay
|
|
||||||
this.popover.open()
|
|
||||||
this.popoverHidden = true
|
|
||||||
setTimeout(() => {
|
|
||||||
if (this.mouseOnPreview) {
|
|
||||||
// show popover
|
|
||||||
this.popoverHidden = false
|
|
||||||
} else {
|
|
||||||
this.popover.close()
|
|
||||||
}
|
|
||||||
}, 600)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
mouseLeavePreview() {
|
|
||||||
this.mouseOnPreview = false
|
|
||||||
}
|
|
||||||
|
|
||||||
mouseLeaveCard() {
|
mouseLeaveCard() {
|
||||||
this.popover.close()
|
this.popupPreview.close()
|
||||||
}
|
}
|
||||||
|
|
||||||
get notesEnabled(): boolean {
|
get notesEnabled(): boolean {
|
||||||
|
@ -292,7 +292,12 @@
|
|||||||
@if (activeDisplayFields.includes(DisplayField.TITLE) || activeDisplayFields.includes(DisplayField.TAGS)) {
|
@if (activeDisplayFields.includes(DisplayField.TITLE) || activeDisplayFields.includes(DisplayField.TAGS)) {
|
||||||
<td width="30%">
|
<td width="30%">
|
||||||
@if (activeDisplayFields.includes(DisplayField.TITLE)) {
|
@if (activeDisplayFields.includes(DisplayField.TITLE)) {
|
||||||
<a routerLink="/documents/{{d.id}}" title="Edit document" i18n-title style="overflow-wrap: anywhere;">{{d.title | documentTitle}}</a>
|
<div class="d-inline-block" (mouseleave)="popupPreview.close()">
|
||||||
|
<a routerLink="/documents/{{d.id}}" title="Edit document" i18n-title style="overflow-wrap: anywhere;">{{d.title | documentTitle}}</a>
|
||||||
|
<pngx-preview-popup [document]="d" linkClasses="btn btn-sm btn-link text-secondary" linkTitle="Preview document" (click)="$event.stopPropagation()" i18n-linkTitle #popupPreview>
|
||||||
|
<i-bs name="eye"></i-bs>
|
||||||
|
</pngx-preview-popup>
|
||||||
|
</div>
|
||||||
}
|
}
|
||||||
@if (activeDisplayFields.includes(DisplayField.TAGS)) {
|
@if (activeDisplayFields.includes(DisplayField.TAGS)) {
|
||||||
@for (t of d.tags$ | async; track t) {
|
@for (t of d.tags$ | async; track t) {
|
||||||
|
@ -72,6 +72,7 @@ import { IsNumberPipe } from 'src/app/pipes/is-number.pipe'
|
|||||||
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
|
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
|
||||||
import { PermissionsService } from 'src/app/services/permissions.service'
|
import { PermissionsService } from 'src/app/services/permissions.service'
|
||||||
import { NgSelectModule } from '@ng-select/ng-select'
|
import { NgSelectModule } from '@ng-select/ng-select'
|
||||||
|
import { PreviewPopupComponent } from '../common/preview-popup/preview-popup.component'
|
||||||
|
|
||||||
const docs: Document[] = [
|
const docs: Document[] = [
|
||||||
{
|
{
|
||||||
@ -137,6 +138,7 @@ describe('DocumentListComponent', () => {
|
|||||||
UsernamePipe,
|
UsernamePipe,
|
||||||
SafeHtmlPipe,
|
SafeHtmlPipe,
|
||||||
IsNumberPipe,
|
IsNumberPipe,
|
||||||
|
PreviewPopupComponent,
|
||||||
],
|
],
|
||||||
imports: [
|
imports: [
|
||||||
RouterTestingModule.withRoutes(routes),
|
RouterTestingModule.withRoutes(routes),
|
||||||
|
@ -77,14 +77,19 @@ describe('CorrespondentListComponent', () => {
|
|||||||
it('should support very old date strings', () => {
|
it('should support very old date strings', () => {
|
||||||
jest.spyOn(correspondentsService, 'listFiltered').mockReturnValue(
|
jest.spyOn(correspondentsService, 'listFiltered').mockReturnValue(
|
||||||
of({
|
of({
|
||||||
count: 1,
|
count: 2,
|
||||||
all: [1],
|
all: [1, 2],
|
||||||
results: [
|
results: [
|
||||||
{
|
{
|
||||||
id: 1,
|
id: 1,
|
||||||
name: 'Correspondent1',
|
name: 'Correspondent1',
|
||||||
last_correspondence: '1832-12-31T15:32:54-07:52:58',
|
last_correspondence: '1832-12-31T15:32:54-07:52:58',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
id: 2,
|
||||||
|
name: 'Correspondent2',
|
||||||
|
last_correspondence: '1901-07-01T00:00:00+00:09:21',
|
||||||
|
},
|
||||||
],
|
],
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
@ -52,7 +52,7 @@ export class CorrespondentListComponent extends ManagementListComponent<Correspo
|
|||||||
date = new Date(
|
date = new Date(
|
||||||
c.last_correspondence
|
c.last_correspondence
|
||||||
?.toString()
|
?.toString()
|
||||||
.replace(/-(\d\d):\d\d:\d\d/gm, `-$1:00`)
|
.replace(/([-+])(\d\d):\d\d:\d\d/gm, `$1$2:00`)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
return this.datePipe.transform(date)
|
return this.datePipe.transform(date)
|
||||||
|
@ -56,7 +56,7 @@ export interface CustomField extends ObjectWithId {
|
|||||||
name: string
|
name: string
|
||||||
created?: Date
|
created?: Date
|
||||||
extra_data?: {
|
extra_data?: {
|
||||||
select_options?: string[]
|
select_options?: Array<{ label: string; id: string }>
|
||||||
default_currency?: string
|
default_currency?: string
|
||||||
}
|
}
|
||||||
document_count?: number
|
document_count?: number
|
||||||
|
@ -17,6 +17,8 @@ export enum GlobalSearchType {
|
|||||||
TITLE_CONTENT = 'title-content',
|
TITLE_CONTENT = 'title-content',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const PAPERLESS_GREEN_HEX = '#17541f'
|
||||||
|
|
||||||
export const SETTINGS_KEYS = {
|
export const SETTINGS_KEYS = {
|
||||||
LANGUAGE: 'language',
|
LANGUAGE: 'language',
|
||||||
APP_LOGO: 'app_logo',
|
APP_LOGO: 'app_logo',
|
||||||
|
@ -10,6 +10,14 @@ export enum WorkflowTriggerType {
|
|||||||
Consumption = 1,
|
Consumption = 1,
|
||||||
DocumentAdded = 2,
|
DocumentAdded = 2,
|
||||||
DocumentUpdated = 3,
|
DocumentUpdated = 3,
|
||||||
|
Scheduled = 4,
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum ScheduleDateField {
|
||||||
|
Added = 'added',
|
||||||
|
Created = 'created',
|
||||||
|
Modified = 'modified',
|
||||||
|
CustomField = 'custom_field',
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface WorkflowTrigger extends ObjectWithId {
|
export interface WorkflowTrigger extends ObjectWithId {
|
||||||
@ -34,4 +42,14 @@ export interface WorkflowTrigger extends ObjectWithId {
|
|||||||
filter_has_correspondent?: number // Correspondent.id
|
filter_has_correspondent?: number // Correspondent.id
|
||||||
|
|
||||||
filter_has_document_type?: number // DocumentType.id
|
filter_has_document_type?: number // DocumentType.id
|
||||||
|
|
||||||
|
schedule_offset_days?: number
|
||||||
|
|
||||||
|
schedule_is_recurring?: boolean
|
||||||
|
|
||||||
|
schedule_recurring_interval_days?: number
|
||||||
|
|
||||||
|
schedule_date_field?: ScheduleDateField
|
||||||
|
|
||||||
|
schedule_date_custom_field?: number // CustomField.id
|
||||||
}
|
}
|
||||||
|
@ -17,7 +17,12 @@ import {
|
|||||||
hexToHsl,
|
hexToHsl,
|
||||||
} from 'src/app/utils/color'
|
} from 'src/app/utils/color'
|
||||||
import { environment } from 'src/environments/environment'
|
import { environment } from 'src/environments/environment'
|
||||||
import { UiSettings, SETTINGS, SETTINGS_KEYS } from '../data/ui-settings'
|
import {
|
||||||
|
UiSettings,
|
||||||
|
SETTINGS,
|
||||||
|
SETTINGS_KEYS,
|
||||||
|
PAPERLESS_GREEN_HEX,
|
||||||
|
} from '../data/ui-settings'
|
||||||
import { User } from '../data/user'
|
import { User } from '../data/user'
|
||||||
import {
|
import {
|
||||||
PermissionAction,
|
PermissionAction,
|
||||||
@ -420,7 +425,7 @@ export class SettingsService {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (themeColor) {
|
if (themeColor?.length) {
|
||||||
const hsl = hexToHsl(themeColor)
|
const hsl = hexToHsl(themeColor)
|
||||||
const bgBrightnessEstimate = estimateBrightnessForColor(themeColor)
|
const bgBrightnessEstimate = estimateBrightnessForColor(themeColor)
|
||||||
|
|
||||||
@ -445,6 +450,11 @@ export class SettingsService {
|
|||||||
document.documentElement.style.removeProperty('--pngx-primary')
|
document.documentElement.style.removeProperty('--pngx-primary')
|
||||||
document.documentElement.style.removeProperty('--pngx-primary-lightness')
|
document.documentElement.style.removeProperty('--pngx-primary-lightness')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this.meta.updateTag({
|
||||||
|
name: 'theme-color',
|
||||||
|
content: themeColor?.length ? themeColor : PAPERLESS_GREEN_HEX,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
getLanguageOptions(): LanguageOption[] {
|
getLanguageOptions(): LanguageOption[] {
|
||||||
|
@ -564,11 +564,6 @@ table.table {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.popover-hidden .popover {
|
|
||||||
opacity: 0;
|
|
||||||
pointer-events: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Tour
|
// Tour
|
||||||
.tour-active .popover {
|
.tour-active .popover {
|
||||||
min-width: 360px;
|
min-width: 360px;
|
||||||
@ -728,3 +723,27 @@ i-bs svg {
|
|||||||
vertical-align: middle;
|
vertical-align: middle;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// fixes for buttons in preview popup
|
||||||
|
.btn-group pngx-preview-popup:not(:last-child) {
|
||||||
|
// Prevent double borders when buttons are next to each other
|
||||||
|
> .btn {
|
||||||
|
margin-left: calc(#{$btn-border-width} * -1);
|
||||||
|
}
|
||||||
|
> .btn {
|
||||||
|
@include border-end-radius(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.btn-group pngx-preview-popup:not(:first-child) {
|
||||||
|
> .btn {
|
||||||
|
@include border-start-radius(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.btn-group pngx-preview-popup {
|
||||||
|
position: relative;
|
||||||
|
flex: 1 1 auto;
|
||||||
|
|
||||||
|
> .btn {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -14,7 +14,7 @@ def settings(request):
|
|||||||
app_logo = (
|
app_logo = (
|
||||||
django_settings.APP_LOGO
|
django_settings.APP_LOGO
|
||||||
if general_config.app_logo is None or len(general_config.app_logo) == 0
|
if general_config.app_logo is None or len(general_config.app_logo) == 0
|
||||||
else general_config.app_logo
|
else django_settings.BASE_URL + general_config.app_logo.lstrip("/")
|
||||||
)
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -176,9 +176,9 @@ class CustomFieldsFilter(Filter):
|
|||||||
if fields_with_matching_selects.count() > 0:
|
if fields_with_matching_selects.count() > 0:
|
||||||
for field in fields_with_matching_selects:
|
for field in fields_with_matching_selects:
|
||||||
options = field.extra_data.get("select_options", [])
|
options = field.extra_data.get("select_options", [])
|
||||||
for index, option in enumerate(options):
|
for _, option in enumerate(options):
|
||||||
if option.lower().find(value.lower()) != -1:
|
if option.get("label").lower().find(value.lower()) != -1:
|
||||||
option_ids.extend([index])
|
option_ids.extend([option.get("id")])
|
||||||
return (
|
return (
|
||||||
qs.filter(custom_fields__field__name__icontains=value)
|
qs.filter(custom_fields__field__name__icontains=value)
|
||||||
| qs.filter(custom_fields__value_text__icontains=value)
|
| qs.filter(custom_fields__value_text__icontains=value)
|
||||||
@ -195,19 +195,21 @@ class CustomFieldsFilter(Filter):
|
|||||||
return qs
|
return qs
|
||||||
|
|
||||||
|
|
||||||
class SelectField(serializers.IntegerField):
|
class SelectField(serializers.CharField):
|
||||||
def __init__(self, custom_field: CustomField):
|
def __init__(self, custom_field: CustomField):
|
||||||
self._options = custom_field.extra_data["select_options"]
|
self._options = custom_field.extra_data["select_options"]
|
||||||
super().__init__(min_value=0, max_value=len(self._options))
|
super().__init__(max_length=16)
|
||||||
|
|
||||||
def to_internal_value(self, data):
|
def to_internal_value(self, data):
|
||||||
if not isinstance(data, int):
|
# If the supplied value is the option label instead of the ID
|
||||||
# If the supplied value is not an integer,
|
try:
|
||||||
# we will try to map it to an option index.
|
data = next(
|
||||||
try:
|
option.get("id")
|
||||||
data = self._options.index(data)
|
for option in self._options
|
||||||
except ValueError:
|
if option.get("label") == data
|
||||||
pass
|
)
|
||||||
|
except StopIteration:
|
||||||
|
pass
|
||||||
return super().to_internal_value(data)
|
return super().to_internal_value(data)
|
||||||
|
|
||||||
|
|
||||||
|
@ -317,10 +317,8 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
# Check the files against the timeout
|
# Check the files against the timeout
|
||||||
still_waiting = {}
|
still_waiting = {}
|
||||||
for filepath in notified_files:
|
# last_event_time is time of the last inotify event for this file
|
||||||
# Time of the last inotify event for this file
|
for filepath, last_event_time in notified_files.items():
|
||||||
last_event_time = notified_files[filepath]
|
|
||||||
|
|
||||||
# Current time - last time over the configured timeout
|
# Current time - last time over the configured timeout
|
||||||
waited_long_enough = (
|
waited_long_enough = (
|
||||||
monotonic() - last_event_time
|
monotonic() - last_event_time
|
||||||
|
@ -294,9 +294,9 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
manifest_dict = {}
|
manifest_dict = {}
|
||||||
|
|
||||||
# Build an overall manifest
|
# Build an overall manifest
|
||||||
for key in manifest_key_to_object_query:
|
for key, object_query in manifest_key_to_object_query.items():
|
||||||
manifest_dict[key] = json.loads(
|
manifest_dict[key] = json.loads(
|
||||||
serializers.serialize("json", manifest_key_to_object_query[key]),
|
serializers.serialize("json", object_query),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.encrypt_secret_fields(manifest_dict)
|
self.encrypt_secret_fields(manifest_dict)
|
||||||
@ -370,8 +370,8 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
|
|
||||||
# 4.1 write primary manifest to target folder
|
# 4.1 write primary manifest to target folder
|
||||||
manifest = []
|
manifest = []
|
||||||
for key in manifest_dict:
|
for key, item in manifest_dict.items():
|
||||||
manifest.extend(manifest_dict[key])
|
manifest.extend(item)
|
||||||
manifest_path = (self.target / "manifest.json").resolve()
|
manifest_path = (self.target / "manifest.json").resolve()
|
||||||
self.check_and_write_json(
|
self.check_and_write_json(
|
||||||
manifest,
|
manifest,
|
||||||
|
@ -34,7 +34,7 @@ from documents.settings import EXPORTER_ARCHIVE_NAME
|
|||||||
from documents.settings import EXPORTER_CRYPTO_SETTINGS_NAME
|
from documents.settings import EXPORTER_CRYPTO_SETTINGS_NAME
|
||||||
from documents.settings import EXPORTER_FILE_NAME
|
from documents.settings import EXPORTER_FILE_NAME
|
||||||
from documents.settings import EXPORTER_THUMBNAIL_NAME
|
from documents.settings import EXPORTER_THUMBNAIL_NAME
|
||||||
from documents.signals.handlers import update_cf_instance_documents
|
from documents.signals.handlers import check_paths_and_prune_custom_fields
|
||||||
from documents.signals.handlers import update_filename_and_move_files
|
from documents.signals.handlers import update_filename_and_move_files
|
||||||
from documents.utils import copy_file_with_basic_stats
|
from documents.utils import copy_file_with_basic_stats
|
||||||
from paperless import version
|
from paperless import version
|
||||||
@ -262,7 +262,7 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
),
|
),
|
||||||
disable_signal(
|
disable_signal(
|
||||||
post_save,
|
post_save,
|
||||||
receiver=update_cf_instance_documents,
|
receiver=check_paths_and_prune_custom_fields,
|
||||||
sender=CustomField,
|
sender=CustomField,
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
|
@ -409,6 +409,7 @@ def document_matches_workflow(
|
|||||||
elif (
|
elif (
|
||||||
trigger_type == WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED
|
trigger_type == WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED
|
||||||
or trigger_type == WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED
|
or trigger_type == WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED
|
||||||
|
or trigger_type == WorkflowTrigger.WorkflowTriggerType.SCHEDULED
|
||||||
):
|
):
|
||||||
trigger_matched, reason = existing_document_matches_workflow(
|
trigger_matched, reason = existing_document_matches_workflow(
|
||||||
document,
|
document,
|
||||||
|
@ -0,0 +1,143 @@
|
|||||||
|
# Generated by Django 5.1.1 on 2024-11-05 05:19
|
||||||
|
|
||||||
|
import django.core.validators
|
||||||
|
import django.db.models.deletion
|
||||||
|
import django.utils.timezone
|
||||||
|
from django.db import migrations
|
||||||
|
from django.db import models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("documents", "1057_paperlesstask_owner"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="workflowtrigger",
|
||||||
|
name="schedule_date_custom_field",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
to="documents.customfield",
|
||||||
|
verbose_name="schedule date custom field",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="workflowtrigger",
|
||||||
|
name="schedule_date_field",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("added", "Added"),
|
||||||
|
("created", "Created"),
|
||||||
|
("modified", "Modified"),
|
||||||
|
("custom_field", "Custom Field"),
|
||||||
|
],
|
||||||
|
default="added",
|
||||||
|
help_text="The field to check for a schedule trigger.",
|
||||||
|
max_length=20,
|
||||||
|
verbose_name="schedule date field",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="workflowtrigger",
|
||||||
|
name="schedule_is_recurring",
|
||||||
|
field=models.BooleanField(
|
||||||
|
default=False,
|
||||||
|
help_text="If the schedule should be recurring.",
|
||||||
|
verbose_name="schedule is recurring",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="workflowtrigger",
|
||||||
|
name="schedule_offset_days",
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
default=0,
|
||||||
|
help_text="The number of days to offset the schedule trigger by.",
|
||||||
|
verbose_name="schedule offset days",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="workflowtrigger",
|
||||||
|
name="schedule_recurring_interval_days",
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
default=1,
|
||||||
|
help_text="The number of days between recurring schedule triggers.",
|
||||||
|
validators=[django.core.validators.MinValueValidator(1)],
|
||||||
|
verbose_name="schedule recurring delay in days",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="workflowtrigger",
|
||||||
|
name="type",
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
choices=[
|
||||||
|
(1, "Consumption Started"),
|
||||||
|
(2, "Document Added"),
|
||||||
|
(3, "Document Updated"),
|
||||||
|
(4, "Scheduled"),
|
||||||
|
],
|
||||||
|
default=1,
|
||||||
|
verbose_name="Workflow Trigger Type",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="WorkflowRun",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.AutoField(
|
||||||
|
auto_created=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
verbose_name="ID",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"type",
|
||||||
|
models.PositiveIntegerField(
|
||||||
|
choices=[
|
||||||
|
(1, "Consumption Started"),
|
||||||
|
(2, "Document Added"),
|
||||||
|
(3, "Document Updated"),
|
||||||
|
(4, "Scheduled"),
|
||||||
|
],
|
||||||
|
null=True,
|
||||||
|
verbose_name="workflow trigger type",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"run_at",
|
||||||
|
models.DateTimeField(
|
||||||
|
db_index=True,
|
||||||
|
default=django.utils.timezone.now,
|
||||||
|
verbose_name="date run",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"document",
|
||||||
|
models.ForeignKey(
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="workflow_runs",
|
||||||
|
to="documents.document",
|
||||||
|
verbose_name="document",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"workflow",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="runs",
|
||||||
|
to="documents.workflow",
|
||||||
|
verbose_name="workflow",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "workflow run",
|
||||||
|
"verbose_name_plural": "workflow runs",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,79 @@
|
|||||||
|
# Generated by Django 5.1.1 on 2024-11-13 05:14
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
from django.db import models
|
||||||
|
from django.db import transaction
|
||||||
|
from django.utils.crypto import get_random_string
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_customfield_selects(apps, schema_editor):
|
||||||
|
"""
|
||||||
|
Migrate the custom field selects from a simple list of strings to a list of dictionaries with
|
||||||
|
label and id. Then update all instances of the custom field to use the new format.
|
||||||
|
"""
|
||||||
|
CustomFieldInstance = apps.get_model("documents", "CustomFieldInstance")
|
||||||
|
CustomField = apps.get_model("documents", "CustomField")
|
||||||
|
|
||||||
|
with transaction.atomic():
|
||||||
|
for custom_field in CustomField.objects.filter(
|
||||||
|
data_type="select",
|
||||||
|
): # CustomField.FieldDataType.SELECT
|
||||||
|
old_select_options = custom_field.extra_data["select_options"]
|
||||||
|
custom_field.extra_data["select_options"] = [
|
||||||
|
{"id": get_random_string(16), "label": value}
|
||||||
|
for value in old_select_options
|
||||||
|
]
|
||||||
|
custom_field.save()
|
||||||
|
|
||||||
|
for instance in CustomFieldInstance.objects.filter(field=custom_field):
|
||||||
|
if instance.value_select:
|
||||||
|
instance.value_select = custom_field.extra_data["select_options"][
|
||||||
|
int(instance.value_select)
|
||||||
|
]["id"]
|
||||||
|
instance.save()
|
||||||
|
|
||||||
|
|
||||||
|
def reverse_migrate_customfield_selects(apps, schema_editor):
|
||||||
|
"""
|
||||||
|
Reverse the migration of the custom field selects from a list of dictionaries with label and id
|
||||||
|
to a simple list of strings. Then update all instances of the custom field to use the old format,
|
||||||
|
which is just the index of the selected option.
|
||||||
|
"""
|
||||||
|
CustomFieldInstance = apps.get_model("documents", "CustomFieldInstance")
|
||||||
|
CustomField = apps.get_model("documents", "CustomField")
|
||||||
|
|
||||||
|
with transaction.atomic():
|
||||||
|
for custom_field in CustomField.objects.all():
|
||||||
|
if custom_field.data_type == "select": # CustomField.FieldDataType.SELECT
|
||||||
|
old_select_options = custom_field.extra_data["select_options"]
|
||||||
|
custom_field.extra_data["select_options"] = [
|
||||||
|
option["label"]
|
||||||
|
for option in custom_field.extra_data["select_options"]
|
||||||
|
]
|
||||||
|
custom_field.save()
|
||||||
|
|
||||||
|
for instance in CustomFieldInstance.objects.filter(field=custom_field):
|
||||||
|
instance.value_select = next(
|
||||||
|
index
|
||||||
|
for index, option in enumerate(old_select_options)
|
||||||
|
if option.get("id") == instance.value_select
|
||||||
|
)
|
||||||
|
instance.save()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("documents", "1058_workflowtrigger_schedule_date_custom_field_and_more"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="customfieldinstance",
|
||||||
|
name="value_select",
|
||||||
|
field=models.CharField(max_length=16, null=True),
|
||||||
|
),
|
||||||
|
migrations.RunPython(
|
||||||
|
migrate_customfield_selects,
|
||||||
|
reverse_migrate_customfield_selects,
|
||||||
|
),
|
||||||
|
]
|
@ -947,7 +947,7 @@ class CustomFieldInstance(SoftDeleteModel):
|
|||||||
|
|
||||||
value_document_ids = models.JSONField(null=True)
|
value_document_ids = models.JSONField(null=True)
|
||||||
|
|
||||||
value_select = models.PositiveSmallIntegerField(null=True)
|
value_select = models.CharField(null=True, max_length=16)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ("created",)
|
ordering = ("created",)
|
||||||
@ -962,7 +962,11 @@ class CustomFieldInstance(SoftDeleteModel):
|
|||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
value = (
|
value = (
|
||||||
self.field.extra_data["select_options"][self.value_select]
|
next(
|
||||||
|
option.get("label")
|
||||||
|
for option in self.field.extra_data["select_options"]
|
||||||
|
if option.get("id") == self.value_select
|
||||||
|
)
|
||||||
if (
|
if (
|
||||||
self.field.data_type == CustomField.FieldDataType.SELECT
|
self.field.data_type == CustomField.FieldDataType.SELECT
|
||||||
and self.value_select is not None
|
and self.value_select is not None
|
||||||
@ -1016,12 +1020,19 @@ class WorkflowTrigger(models.Model):
|
|||||||
CONSUMPTION = 1, _("Consumption Started")
|
CONSUMPTION = 1, _("Consumption Started")
|
||||||
DOCUMENT_ADDED = 2, _("Document Added")
|
DOCUMENT_ADDED = 2, _("Document Added")
|
||||||
DOCUMENT_UPDATED = 3, _("Document Updated")
|
DOCUMENT_UPDATED = 3, _("Document Updated")
|
||||||
|
SCHEDULED = 4, _("Scheduled")
|
||||||
|
|
||||||
class DocumentSourceChoices(models.IntegerChoices):
|
class DocumentSourceChoices(models.IntegerChoices):
|
||||||
CONSUME_FOLDER = DocumentSource.ConsumeFolder.value, _("Consume Folder")
|
CONSUME_FOLDER = DocumentSource.ConsumeFolder.value, _("Consume Folder")
|
||||||
API_UPLOAD = DocumentSource.ApiUpload.value, _("Api Upload")
|
API_UPLOAD = DocumentSource.ApiUpload.value, _("Api Upload")
|
||||||
MAIL_FETCH = DocumentSource.MailFetch.value, _("Mail Fetch")
|
MAIL_FETCH = DocumentSource.MailFetch.value, _("Mail Fetch")
|
||||||
|
|
||||||
|
class ScheduleDateField(models.TextChoices):
|
||||||
|
ADDED = "added", _("Added")
|
||||||
|
CREATED = "created", _("Created")
|
||||||
|
MODIFIED = "modified", _("Modified")
|
||||||
|
CUSTOM_FIELD = "custom_field", _("Custom Field")
|
||||||
|
|
||||||
type = models.PositiveIntegerField(
|
type = models.PositiveIntegerField(
|
||||||
_("Workflow Trigger Type"),
|
_("Workflow Trigger Type"),
|
||||||
choices=WorkflowTriggerType.choices,
|
choices=WorkflowTriggerType.choices,
|
||||||
@ -1098,6 +1109,49 @@ class WorkflowTrigger(models.Model):
|
|||||||
verbose_name=_("has this correspondent"),
|
verbose_name=_("has this correspondent"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
schedule_offset_days = models.PositiveIntegerField(
|
||||||
|
_("schedule offset days"),
|
||||||
|
default=0,
|
||||||
|
help_text=_(
|
||||||
|
"The number of days to offset the schedule trigger by.",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
schedule_is_recurring = models.BooleanField(
|
||||||
|
_("schedule is recurring"),
|
||||||
|
default=False,
|
||||||
|
help_text=_(
|
||||||
|
"If the schedule should be recurring.",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
schedule_recurring_interval_days = models.PositiveIntegerField(
|
||||||
|
_("schedule recurring delay in days"),
|
||||||
|
default=1,
|
||||||
|
validators=[MinValueValidator(1)],
|
||||||
|
help_text=_(
|
||||||
|
"The number of days between recurring schedule triggers.",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
schedule_date_field = models.CharField(
|
||||||
|
_("schedule date field"),
|
||||||
|
max_length=20,
|
||||||
|
choices=ScheduleDateField.choices,
|
||||||
|
default=ScheduleDateField.ADDED,
|
||||||
|
help_text=_(
|
||||||
|
"The field to check for a schedule trigger.",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
schedule_date_custom_field = models.ForeignKey(
|
||||||
|
CustomField,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
verbose_name=_("schedule date custom field"),
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _("workflow trigger")
|
verbose_name = _("workflow trigger")
|
||||||
verbose_name_plural = _("workflow triggers")
|
verbose_name_plural = _("workflow triggers")
|
||||||
@ -1348,3 +1402,39 @@ class Workflow(models.Model):
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"Workflow: {self.name}"
|
return f"Workflow: {self.name}"
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowRun(models.Model):
|
||||||
|
workflow = models.ForeignKey(
|
||||||
|
Workflow,
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
related_name="runs",
|
||||||
|
verbose_name=_("workflow"),
|
||||||
|
)
|
||||||
|
|
||||||
|
type = models.PositiveIntegerField(
|
||||||
|
_("workflow trigger type"),
|
||||||
|
choices=WorkflowTrigger.WorkflowTriggerType.choices,
|
||||||
|
null=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
document = models.ForeignKey(
|
||||||
|
Document,
|
||||||
|
null=True,
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
related_name="workflow_runs",
|
||||||
|
verbose_name=_("document"),
|
||||||
|
)
|
||||||
|
|
||||||
|
run_at = models.DateTimeField(
|
||||||
|
_("date run"),
|
||||||
|
default=timezone.now,
|
||||||
|
db_index=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("workflow run")
|
||||||
|
verbose_name_plural = _("workflow runs")
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"WorkflowRun of {self.workflow} at {self.run_at} on {self.document}"
|
||||||
|
@ -160,7 +160,7 @@ class SetPermissionsMixin:
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
if set_permissions is not None:
|
if set_permissions is not None:
|
||||||
for action in permissions_dict:
|
for action, _ in permissions_dict.items():
|
||||||
if action in set_permissions:
|
if action in set_permissions:
|
||||||
users = set_permissions[action]["users"]
|
users = set_permissions[action]["users"]
|
||||||
permissions_dict[action]["users"] = self._validate_user_ids(users)
|
permissions_dict[action]["users"] = self._validate_user_ids(users)
|
||||||
@ -533,20 +533,27 @@ class CustomFieldSerializer(serializers.ModelSerializer):
|
|||||||
if (
|
if (
|
||||||
"data_type" in attrs
|
"data_type" in attrs
|
||||||
and attrs["data_type"] == CustomField.FieldDataType.SELECT
|
and attrs["data_type"] == CustomField.FieldDataType.SELECT
|
||||||
and (
|
) or (
|
||||||
|
self.instance
|
||||||
|
and self.instance.data_type == CustomField.FieldDataType.SELECT
|
||||||
|
):
|
||||||
|
if (
|
||||||
"extra_data" not in attrs
|
"extra_data" not in attrs
|
||||||
or "select_options" not in attrs["extra_data"]
|
or "select_options" not in attrs["extra_data"]
|
||||||
or not isinstance(attrs["extra_data"]["select_options"], list)
|
or not isinstance(attrs["extra_data"]["select_options"], list)
|
||||||
or len(attrs["extra_data"]["select_options"]) == 0
|
or len(attrs["extra_data"]["select_options"]) == 0
|
||||||
or not all(
|
or not all(
|
||||||
isinstance(option, str) and len(option) > 0
|
len(option.get("label", "")) > 0
|
||||||
for option in attrs["extra_data"]["select_options"]
|
for option in attrs["extra_data"]["select_options"]
|
||||||
)
|
)
|
||||||
)
|
):
|
||||||
):
|
raise serializers.ValidationError(
|
||||||
raise serializers.ValidationError(
|
{"error": "extra_data.select_options must be a valid list"},
|
||||||
{"error": "extra_data.select_options must be a valid list"},
|
)
|
||||||
)
|
# labels are valid, generate ids if not present
|
||||||
|
for option in attrs["extra_data"]["select_options"]:
|
||||||
|
if option.get("id") is None:
|
||||||
|
option["id"] = get_random_string(length=16)
|
||||||
elif (
|
elif (
|
||||||
"data_type" in attrs
|
"data_type" in attrs
|
||||||
and attrs["data_type"] == CustomField.FieldDataType.MONETARY
|
and attrs["data_type"] == CustomField.FieldDataType.MONETARY
|
||||||
@ -646,10 +653,14 @@ class CustomFieldInstanceSerializer(serializers.ModelSerializer):
|
|||||||
elif field.data_type == CustomField.FieldDataType.SELECT:
|
elif field.data_type == CustomField.FieldDataType.SELECT:
|
||||||
select_options = field.extra_data["select_options"]
|
select_options = field.extra_data["select_options"]
|
||||||
try:
|
try:
|
||||||
select_options[data["value"]]
|
next(
|
||||||
|
option
|
||||||
|
for option in select_options
|
||||||
|
if option["id"] == data["value"]
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
raise serializers.ValidationError(
|
raise serializers.ValidationError(
|
||||||
f"Value must be index of an element in {select_options}",
|
f"Value must be an id of an element in {select_options}",
|
||||||
)
|
)
|
||||||
elif field.data_type == CustomField.FieldDataType.DOCUMENTLINK:
|
elif field.data_type == CustomField.FieldDataType.DOCUMENTLINK:
|
||||||
doc_ids = data["value"]
|
doc_ids = data["value"]
|
||||||
@ -1772,6 +1783,11 @@ class WorkflowTriggerSerializer(serializers.ModelSerializer):
|
|||||||
"filter_has_tags",
|
"filter_has_tags",
|
||||||
"filter_has_correspondent",
|
"filter_has_correspondent",
|
||||||
"filter_has_document_type",
|
"filter_has_document_type",
|
||||||
|
"schedule_offset_days",
|
||||||
|
"schedule_is_recurring",
|
||||||
|
"schedule_recurring_interval_days",
|
||||||
|
"schedule_date_field",
|
||||||
|
"schedule_date_custom_field",
|
||||||
]
|
]
|
||||||
|
|
||||||
def validate(self, attrs):
|
def validate(self, attrs):
|
||||||
|
@ -37,6 +37,7 @@ from documents.models import PaperlessTask
|
|||||||
from documents.models import Tag
|
from documents.models import Tag
|
||||||
from documents.models import Workflow
|
from documents.models import Workflow
|
||||||
from documents.models import WorkflowAction
|
from documents.models import WorkflowAction
|
||||||
|
from documents.models import WorkflowRun
|
||||||
from documents.models import WorkflowTrigger
|
from documents.models import WorkflowTrigger
|
||||||
from documents.permissions import get_objects_for_user_owner_aware
|
from documents.permissions import get_objects_for_user_owner_aware
|
||||||
from documents.permissions import set_permissions_for_object
|
from documents.permissions import set_permissions_for_object
|
||||||
@ -367,21 +368,6 @@ class CannotMoveFilesException(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
# should be disabled in /src/documents/management/commands/document_importer.py handle
|
|
||||||
@receiver(models.signals.post_save, sender=CustomField)
|
|
||||||
def update_cf_instance_documents(sender, instance: CustomField, **kwargs):
|
|
||||||
"""
|
|
||||||
'Select' custom field instances get their end-user value (e.g. in file names) from the select_options in extra_data,
|
|
||||||
which is contained in the custom field itself. So when the field is changed, we (may) need to update the file names
|
|
||||||
of all documents that have this custom field.
|
|
||||||
"""
|
|
||||||
if (
|
|
||||||
instance.data_type == CustomField.FieldDataType.SELECT
|
|
||||||
): # Only select fields, for now
|
|
||||||
for cf_instance in instance.fields.all():
|
|
||||||
update_filename_and_move_files(sender, cf_instance)
|
|
||||||
|
|
||||||
|
|
||||||
# should be disabled in /src/documents/management/commands/document_importer.py handle
|
# should be disabled in /src/documents/management/commands/document_importer.py handle
|
||||||
@receiver(models.signals.post_save, sender=CustomFieldInstance)
|
@receiver(models.signals.post_save, sender=CustomFieldInstance)
|
||||||
@receiver(models.signals.m2m_changed, sender=Document.tags.through)
|
@receiver(models.signals.m2m_changed, sender=Document.tags.through)
|
||||||
@ -520,6 +506,34 @@ def update_filename_and_move_files(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# should be disabled in /src/documents/management/commands/document_importer.py handle
|
||||||
|
@receiver(models.signals.post_save, sender=CustomField)
|
||||||
|
def check_paths_and_prune_custom_fields(sender, instance: CustomField, **kwargs):
|
||||||
|
"""
|
||||||
|
When a custom field is updated:
|
||||||
|
1. 'Select' custom field instances get their end-user value (e.g. in file names) from the select_options in extra_data,
|
||||||
|
which is contained in the custom field itself. So when the field is changed, we (may) need to update the file names
|
||||||
|
of all documents that have this custom field.
|
||||||
|
2. If a 'Select' field option was removed, we need to nullify the custom field instances that have the option.
|
||||||
|
"""
|
||||||
|
if (
|
||||||
|
instance.data_type == CustomField.FieldDataType.SELECT
|
||||||
|
): # Only select fields, for now
|
||||||
|
for cf_instance in instance.fields.all():
|
||||||
|
options = instance.extra_data.get("select_options", [])
|
||||||
|
try:
|
||||||
|
next(
|
||||||
|
option["label"]
|
||||||
|
for option in options
|
||||||
|
if option["id"] == cf_instance.value
|
||||||
|
)
|
||||||
|
except StopIteration:
|
||||||
|
# The value of this custom field instance is not in the select options anymore
|
||||||
|
cf_instance.value_select = None
|
||||||
|
cf_instance.save()
|
||||||
|
update_filename_and_move_files(sender, cf_instance)
|
||||||
|
|
||||||
|
|
||||||
def set_log_entry(sender, document: Document, logging_group=None, **kwargs):
|
def set_log_entry(sender, document: Document, logging_group=None, **kwargs):
|
||||||
ct = ContentType.objects.get(model="document")
|
ct = ContentType.objects.get(model="document")
|
||||||
user = User.objects.get(username="consumer")
|
user = User.objects.get(username="consumer")
|
||||||
@ -917,6 +931,12 @@ def run_workflows(
|
|||||||
document.save()
|
document.save()
|
||||||
document.tags.set(doc_tag_ids)
|
document.tags.set(doc_tag_ids)
|
||||||
|
|
||||||
|
WorkflowRun.objects.create(
|
||||||
|
workflow=workflow,
|
||||||
|
type=trigger_type,
|
||||||
|
document=document if not use_overrides else None,
|
||||||
|
)
|
||||||
|
|
||||||
if use_overrides:
|
if use_overrides:
|
||||||
return overrides, "\n".join(messages)
|
return overrides, "\n".join(messages)
|
||||||
|
|
||||||
|
@ -31,10 +31,14 @@ from documents.double_sided import CollatePlugin
|
|||||||
from documents.file_handling import create_source_path_directory
|
from documents.file_handling import create_source_path_directory
|
||||||
from documents.file_handling import generate_unique_filename
|
from documents.file_handling import generate_unique_filename
|
||||||
from documents.models import Correspondent
|
from documents.models import Correspondent
|
||||||
|
from documents.models import CustomFieldInstance
|
||||||
from documents.models import Document
|
from documents.models import Document
|
||||||
from documents.models import DocumentType
|
from documents.models import DocumentType
|
||||||
from documents.models import StoragePath
|
from documents.models import StoragePath
|
||||||
from documents.models import Tag
|
from documents.models import Tag
|
||||||
|
from documents.models import Workflow
|
||||||
|
from documents.models import WorkflowRun
|
||||||
|
from documents.models import WorkflowTrigger
|
||||||
from documents.parsers import DocumentParser
|
from documents.parsers import DocumentParser
|
||||||
from documents.parsers import get_parser_class_for_mime_type
|
from documents.parsers import get_parser_class_for_mime_type
|
||||||
from documents.plugins.base import ConsumeTaskPlugin
|
from documents.plugins.base import ConsumeTaskPlugin
|
||||||
@ -44,6 +48,7 @@ from documents.plugins.helpers import ProgressStatusOptions
|
|||||||
from documents.sanity_checker import SanityCheckFailedException
|
from documents.sanity_checker import SanityCheckFailedException
|
||||||
from documents.signals import document_updated
|
from documents.signals import document_updated
|
||||||
from documents.signals.handlers import cleanup_document_deletion
|
from documents.signals.handlers import cleanup_document_deletion
|
||||||
|
from documents.signals.handlers import run_workflows
|
||||||
|
|
||||||
if settings.AUDIT_LOG_ENABLED:
|
if settings.AUDIT_LOG_ENABLED:
|
||||||
from auditlog.models import LogEntry
|
from auditlog.models import LogEntry
|
||||||
@ -337,3 +342,85 @@ def empty_trash(doc_ids=None):
|
|||||||
cleanup_document_deletion,
|
cleanup_document_deletion,
|
||||||
sender=Document,
|
sender=Document,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task
|
||||||
|
def check_scheduled_workflows():
|
||||||
|
scheduled_workflows: list[Workflow] = (
|
||||||
|
Workflow.objects.filter(
|
||||||
|
triggers__type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||||
|
enabled=True,
|
||||||
|
)
|
||||||
|
.distinct()
|
||||||
|
.prefetch_related("triggers")
|
||||||
|
)
|
||||||
|
if scheduled_workflows.count() > 0:
|
||||||
|
logger.debug(f"Checking {len(scheduled_workflows)} scheduled workflows")
|
||||||
|
for workflow in scheduled_workflows:
|
||||||
|
schedule_triggers = workflow.triggers.filter(
|
||||||
|
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||||
|
)
|
||||||
|
trigger: WorkflowTrigger
|
||||||
|
for trigger in schedule_triggers:
|
||||||
|
documents = Document.objects.none()
|
||||||
|
offset_td = timedelta(days=trigger.schedule_offset_days)
|
||||||
|
logger.debug(
|
||||||
|
f"Checking trigger {trigger} with offset {offset_td} against field: {trigger.schedule_date_field}",
|
||||||
|
)
|
||||||
|
match trigger.schedule_date_field:
|
||||||
|
case WorkflowTrigger.ScheduleDateField.ADDED:
|
||||||
|
documents = Document.objects.filter(
|
||||||
|
added__lt=timezone.now() - offset_td,
|
||||||
|
)
|
||||||
|
case WorkflowTrigger.ScheduleDateField.CREATED:
|
||||||
|
documents = Document.objects.filter(
|
||||||
|
created__lt=timezone.now() - offset_td,
|
||||||
|
)
|
||||||
|
case WorkflowTrigger.ScheduleDateField.MODIFIED:
|
||||||
|
documents = Document.objects.filter(
|
||||||
|
modified__lt=timezone.now() - offset_td,
|
||||||
|
)
|
||||||
|
case WorkflowTrigger.ScheduleDateField.CUSTOM_FIELD:
|
||||||
|
cf_instances = CustomFieldInstance.objects.filter(
|
||||||
|
field=trigger.schedule_date_custom_field,
|
||||||
|
value_date__lt=timezone.now() - offset_td,
|
||||||
|
)
|
||||||
|
documents = Document.objects.filter(
|
||||||
|
id__in=cf_instances.values_list("document", flat=True),
|
||||||
|
)
|
||||||
|
if documents.count() > 0:
|
||||||
|
logger.debug(
|
||||||
|
f"Found {documents.count()} documents for trigger {trigger}",
|
||||||
|
)
|
||||||
|
for document in documents:
|
||||||
|
workflow_runs = WorkflowRun.objects.filter(
|
||||||
|
document=document,
|
||||||
|
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||||
|
workflow=workflow,
|
||||||
|
).order_by("-run_at")
|
||||||
|
if not trigger.schedule_is_recurring and workflow_runs.exists():
|
||||||
|
# schedule is non-recurring and the workflow has already been run
|
||||||
|
logger.debug(
|
||||||
|
f"Skipping document {document} for non-recurring workflow {workflow} as it has already been run",
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
elif (
|
||||||
|
trigger.schedule_is_recurring
|
||||||
|
and workflow_runs.exists()
|
||||||
|
and (
|
||||||
|
workflow_runs.last().run_at
|
||||||
|
> timezone.now()
|
||||||
|
- timedelta(
|
||||||
|
days=trigger.schedule_recurring_interval_days,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
):
|
||||||
|
# schedule is recurring but the last run was within the number of recurring interval days
|
||||||
|
logger.debug(
|
||||||
|
f"Skipping document {document} for recurring workflow {workflow} as the last run was within the recurring interval",
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
run_workflows(
|
||||||
|
WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||||
|
document,
|
||||||
|
)
|
||||||
|
@ -253,7 +253,11 @@ def get_custom_fields_context(
|
|||||||
):
|
):
|
||||||
options = field_instance.field.extra_data["select_options"]
|
options = field_instance.field.extra_data["select_options"]
|
||||||
value = pathvalidate.sanitize_filename(
|
value = pathvalidate.sanitize_filename(
|
||||||
options[int(field_instance.value)],
|
next(
|
||||||
|
option["label"]
|
||||||
|
for option in options
|
||||||
|
if option["id"] == field_instance.value
|
||||||
|
),
|
||||||
replacement_text="-",
|
replacement_text="-",
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
import json
|
import json
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
|
from auditlog.models import LogEntry
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
|
from django.test import override_settings
|
||||||
from guardian.shortcuts import assign_perm
|
from guardian.shortcuts import assign_perm
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
@ -51,8 +53,12 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
self.doc3.tags.add(self.t2)
|
self.doc3.tags.add(self.t2)
|
||||||
self.doc4.tags.add(self.t1, self.t2)
|
self.doc4.tags.add(self.t1, self.t2)
|
||||||
self.sp1 = StoragePath.objects.create(name="sp1", path="Something/{checksum}")
|
self.sp1 = StoragePath.objects.create(name="sp1", path="Something/{checksum}")
|
||||||
self.cf1 = CustomField.objects.create(name="cf1", data_type="text")
|
self.cf1 = CustomField.objects.create(name="cf1", data_type="string")
|
||||||
self.cf2 = CustomField.objects.create(name="cf2", data_type="text")
|
self.cf2 = CustomField.objects.create(name="cf2", data_type="string")
|
||||||
|
|
||||||
|
def setup_mock(self, m, method_name, return_value="OK"):
|
||||||
|
m.return_value = return_value
|
||||||
|
m.__name__ = method_name
|
||||||
|
|
||||||
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
|
||||||
def test_api_set_correspondent(self, bulk_update_task_mock):
|
def test_api_set_correspondent(self, bulk_update_task_mock):
|
||||||
@ -178,7 +184,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
|
|
||||||
@mock.patch("documents.serialisers.bulk_edit.modify_tags")
|
@mock.patch("documents.serialisers.bulk_edit.modify_tags")
|
||||||
def test_api_modify_tags(self, m):
|
def test_api_modify_tags(self, m):
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "modify_tags")
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
"/api/documents/bulk_edit/",
|
"/api/documents/bulk_edit/",
|
||||||
json.dumps(
|
json.dumps(
|
||||||
@ -211,7 +217,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
- API returns HTTP 400
|
- API returns HTTP 400
|
||||||
- modify_tags is not called
|
- modify_tags is not called
|
||||||
"""
|
"""
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "modify_tags")
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
"/api/documents/bulk_edit/",
|
"/api/documents/bulk_edit/",
|
||||||
json.dumps(
|
json.dumps(
|
||||||
@ -230,7 +236,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
|
|
||||||
@mock.patch("documents.serialisers.bulk_edit.modify_custom_fields")
|
@mock.patch("documents.serialisers.bulk_edit.modify_custom_fields")
|
||||||
def test_api_modify_custom_fields(self, m):
|
def test_api_modify_custom_fields(self, m):
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "modify_custom_fields")
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
"/api/documents/bulk_edit/",
|
"/api/documents/bulk_edit/",
|
||||||
json.dumps(
|
json.dumps(
|
||||||
@ -263,8 +269,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
- API returns HTTP 400
|
- API returns HTTP 400
|
||||||
- modify_custom_fields is not called
|
- modify_custom_fields is not called
|
||||||
"""
|
"""
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "modify_custom_fields")
|
||||||
|
|
||||||
# Missing add_custom_fields
|
# Missing add_custom_fields
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
"/api/documents/bulk_edit/",
|
"/api/documents/bulk_edit/",
|
||||||
@ -359,7 +364,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
|
|
||||||
@mock.patch("documents.serialisers.bulk_edit.delete")
|
@mock.patch("documents.serialisers.bulk_edit.delete")
|
||||||
def test_api_delete(self, m):
|
def test_api_delete(self, m):
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "delete")
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
"/api/documents/bulk_edit/",
|
"/api/documents/bulk_edit/",
|
||||||
json.dumps(
|
json.dumps(
|
||||||
@ -383,8 +388,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
THEN:
|
THEN:
|
||||||
- set_storage_path is called with correct document IDs and storage_path ID
|
- set_storage_path is called with correct document IDs and storage_path ID
|
||||||
"""
|
"""
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "set_storage_path")
|
||||||
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
"/api/documents/bulk_edit/",
|
"/api/documents/bulk_edit/",
|
||||||
json.dumps(
|
json.dumps(
|
||||||
@ -414,8 +418,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
THEN:
|
THEN:
|
||||||
- set_storage_path is called with correct document IDs and None storage_path
|
- set_storage_path is called with correct document IDs and None storage_path
|
||||||
"""
|
"""
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "set_storage_path")
|
||||||
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
"/api/documents/bulk_edit/",
|
"/api/documents/bulk_edit/",
|
||||||
json.dumps(
|
json.dumps(
|
||||||
@ -728,7 +731,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
|
|
||||||
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
|
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
|
||||||
def test_set_permissions(self, m):
|
def test_set_permissions(self, m):
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "set_permissions")
|
||||||
user1 = User.objects.create(username="user1")
|
user1 = User.objects.create(username="user1")
|
||||||
user2 = User.objects.create(username="user2")
|
user2 = User.objects.create(username="user2")
|
||||||
permissions = {
|
permissions = {
|
||||||
@ -763,7 +766,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
|
|
||||||
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
|
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
|
||||||
def test_set_permissions_merge(self, m):
|
def test_set_permissions_merge(self, m):
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "set_permissions")
|
||||||
user1 = User.objects.create(username="user1")
|
user1 = User.objects.create(username="user1")
|
||||||
user2 = User.objects.create(username="user2")
|
user2 = User.objects.create(username="user2")
|
||||||
permissions = {
|
permissions = {
|
||||||
@ -823,7 +826,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
THEN:
|
THEN:
|
||||||
- User is not able to change permissions
|
- User is not able to change permissions
|
||||||
"""
|
"""
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "set_permissions")
|
||||||
self.doc1.owner = User.objects.get(username="temp_admin")
|
self.doc1.owner = User.objects.get(username="temp_admin")
|
||||||
self.doc1.save()
|
self.doc1.save()
|
||||||
user1 = User.objects.create(username="user1")
|
user1 = User.objects.create(username="user1")
|
||||||
@ -875,7 +878,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
THEN:
|
THEN:
|
||||||
- set_storage_path only called if user can edit all docs
|
- set_storage_path only called if user can edit all docs
|
||||||
"""
|
"""
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "set_storage_path")
|
||||||
self.doc1.owner = User.objects.get(username="temp_admin")
|
self.doc1.owner = User.objects.get(username="temp_admin")
|
||||||
self.doc1.save()
|
self.doc1.save()
|
||||||
user1 = User.objects.create(username="user1")
|
user1 = User.objects.create(username="user1")
|
||||||
@ -919,8 +922,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
|
|
||||||
@mock.patch("documents.serialisers.bulk_edit.rotate")
|
@mock.patch("documents.serialisers.bulk_edit.rotate")
|
||||||
def test_rotate(self, m):
|
def test_rotate(self, m):
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "rotate")
|
||||||
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
"/api/documents/bulk_edit/",
|
"/api/documents/bulk_edit/",
|
||||||
json.dumps(
|
json.dumps(
|
||||||
@ -974,8 +976,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
|
|
||||||
@mock.patch("documents.serialisers.bulk_edit.merge")
|
@mock.patch("documents.serialisers.bulk_edit.merge")
|
||||||
def test_merge(self, m):
|
def test_merge(self, m):
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "merge")
|
||||||
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
"/api/documents/bulk_edit/",
|
"/api/documents/bulk_edit/",
|
||||||
json.dumps(
|
json.dumps(
|
||||||
@ -1003,8 +1004,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
user1 = User.objects.create(username="user1")
|
user1 = User.objects.create(username="user1")
|
||||||
self.client.force_authenticate(user=user1)
|
self.client.force_authenticate(user=user1)
|
||||||
|
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "merge")
|
||||||
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
"/api/documents/bulk_edit/",
|
"/api/documents/bulk_edit/",
|
||||||
json.dumps(
|
json.dumps(
|
||||||
@ -1053,8 +1053,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
THEN:
|
THEN:
|
||||||
- The API fails with a correct error code
|
- The API fails with a correct error code
|
||||||
"""
|
"""
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "merge")
|
||||||
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
"/api/documents/bulk_edit/",
|
"/api/documents/bulk_edit/",
|
||||||
json.dumps(
|
json.dumps(
|
||||||
@ -1074,8 +1073,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
|
|
||||||
@mock.patch("documents.serialisers.bulk_edit.split")
|
@mock.patch("documents.serialisers.bulk_edit.split")
|
||||||
def test_split(self, m):
|
def test_split(self, m):
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "split")
|
||||||
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
"/api/documents/bulk_edit/",
|
"/api/documents/bulk_edit/",
|
||||||
json.dumps(
|
json.dumps(
|
||||||
@ -1165,8 +1163,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
|
|
||||||
@mock.patch("documents.serialisers.bulk_edit.delete_pages")
|
@mock.patch("documents.serialisers.bulk_edit.delete_pages")
|
||||||
def test_delete_pages(self, m):
|
def test_delete_pages(self, m):
|
||||||
m.return_value = "OK"
|
self.setup_mock(m, "delete_pages")
|
||||||
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
"/api/documents/bulk_edit/",
|
"/api/documents/bulk_edit/",
|
||||||
json.dumps(
|
json.dumps(
|
||||||
@ -1254,3 +1251,87 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
|||||||
|
|
||||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||||
self.assertIn(b"pages must be a list of integers", response.content)
|
self.assertIn(b"pages must be a list of integers", response.content)
|
||||||
|
|
||||||
|
@override_settings(AUDIT_LOG_ENABLED=True)
|
||||||
|
def test_bulk_edit_audit_log_enabled_simple_field(self):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Audit log is enabled
|
||||||
|
WHEN:
|
||||||
|
- API to bulk edit documents is called
|
||||||
|
THEN:
|
||||||
|
- Audit log is created
|
||||||
|
"""
|
||||||
|
LogEntry.objects.all().delete()
|
||||||
|
response = self.client.post(
|
||||||
|
"/api/documents/bulk_edit/",
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
"documents": [self.doc1.id],
|
||||||
|
"method": "set_correspondent",
|
||||||
|
"parameters": {"correspondent": self.c2.id},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||||
|
self.assertEqual(LogEntry.objects.filter(object_pk=self.doc1.id).count(), 1)
|
||||||
|
|
||||||
|
@override_settings(AUDIT_LOG_ENABLED=True)
|
||||||
|
def test_bulk_edit_audit_log_enabled_tags(self):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Audit log is enabled
|
||||||
|
WHEN:
|
||||||
|
- API to bulk edit tags is called
|
||||||
|
THEN:
|
||||||
|
- Audit log is created
|
||||||
|
"""
|
||||||
|
LogEntry.objects.all().delete()
|
||||||
|
response = self.client.post(
|
||||||
|
"/api/documents/bulk_edit/",
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
"documents": [self.doc1.id],
|
||||||
|
"method": "modify_tags",
|
||||||
|
"parameters": {
|
||||||
|
"add_tags": [self.t1.id],
|
||||||
|
"remove_tags": [self.t2.id],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||||
|
self.assertEqual(LogEntry.objects.filter(object_pk=self.doc1.id).count(), 1)
|
||||||
|
|
||||||
|
@override_settings(AUDIT_LOG_ENABLED=True)
|
||||||
|
def test_bulk_edit_audit_log_enabled_custom_fields(self):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Audit log is enabled
|
||||||
|
WHEN:
|
||||||
|
- API to bulk edit custom fields is called
|
||||||
|
THEN:
|
||||||
|
- Audit log is created
|
||||||
|
"""
|
||||||
|
LogEntry.objects.all().delete()
|
||||||
|
response = self.client.post(
|
||||||
|
"/api/documents/bulk_edit/",
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
"documents": [self.doc1.id],
|
||||||
|
"method": "modify_custom_fields",
|
||||||
|
"parameters": {
|
||||||
|
"add_custom_fields": [self.cf1.id],
|
||||||
|
"remove_custom_fields": [],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||||
|
self.assertEqual(LogEntry.objects.filter(object_pk=self.doc1.id).count(), 2)
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
from datetime import date
|
from datetime import date
|
||||||
|
from unittest.mock import ANY
|
||||||
|
|
||||||
from django.contrib.auth.models import Permission
|
from django.contrib.auth.models import Permission
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
@ -61,7 +62,10 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
|||||||
"data_type": "select",
|
"data_type": "select",
|
||||||
"name": "Select Field",
|
"name": "Select Field",
|
||||||
"extra_data": {
|
"extra_data": {
|
||||||
"select_options": ["Option 1", "Option 2"],
|
"select_options": [
|
||||||
|
{"label": "Option 1", "id": "abc-123"},
|
||||||
|
{"label": "Option 2", "id": "def-456"},
|
||||||
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
@ -73,7 +77,10 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
|||||||
|
|
||||||
self.assertCountEqual(
|
self.assertCountEqual(
|
||||||
data["extra_data"]["select_options"],
|
data["extra_data"]["select_options"],
|
||||||
["Option 1", "Option 2"],
|
[
|
||||||
|
{"label": "Option 1", "id": "abc-123"},
|
||||||
|
{"label": "Option 2", "id": "def-456"},
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_create_custom_field_nonunique_name(self):
|
def test_create_custom_field_nonunique_name(self):
|
||||||
@ -138,6 +145,133 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
|
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
def test_custom_field_select_unique_ids(self):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Nothing
|
||||||
|
- Existing custom field
|
||||||
|
WHEN:
|
||||||
|
- API request to create custom field with select options without id
|
||||||
|
THEN:
|
||||||
|
- Unique ids are generated for each option
|
||||||
|
"""
|
||||||
|
resp = self.client.post(
|
||||||
|
self.ENDPOINT,
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
"data_type": "select",
|
||||||
|
"name": "Select Field",
|
||||||
|
"extra_data": {
|
||||||
|
"select_options": [
|
||||||
|
{"label": "Option 1"},
|
||||||
|
{"label": "Option 2"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
|
||||||
|
|
||||||
|
data = resp.json()
|
||||||
|
|
||||||
|
self.assertCountEqual(
|
||||||
|
data["extra_data"]["select_options"],
|
||||||
|
[
|
||||||
|
{"label": "Option 1", "id": ANY},
|
||||||
|
{"label": "Option 2", "id": ANY},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add a new option
|
||||||
|
resp = self.client.patch(
|
||||||
|
f"{self.ENDPOINT}{data['id']}/",
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
"extra_data": {
|
||||||
|
"select_options": data["extra_data"]["select_options"]
|
||||||
|
+ [{"label": "Option 3"}],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||||
|
|
||||||
|
data = resp.json()
|
||||||
|
|
||||||
|
self.assertCountEqual(
|
||||||
|
data["extra_data"]["select_options"],
|
||||||
|
[
|
||||||
|
{"label": "Option 1", "id": ANY},
|
||||||
|
{"label": "Option 2", "id": ANY},
|
||||||
|
{"label": "Option 3", "id": ANY},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_custom_field_select_options_pruned(self):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Select custom field exists and document instance with one of the options
|
||||||
|
WHEN:
|
||||||
|
- API request to remove an option from the select field
|
||||||
|
THEN:
|
||||||
|
- The option is removed from the field
|
||||||
|
- The option is removed from the document instance
|
||||||
|
"""
|
||||||
|
custom_field_select = CustomField.objects.create(
|
||||||
|
name="Select Field",
|
||||||
|
data_type=CustomField.FieldDataType.SELECT,
|
||||||
|
extra_data={
|
||||||
|
"select_options": [
|
||||||
|
{"label": "Option 1", "id": "abc-123"},
|
||||||
|
{"label": "Option 2", "id": "def-456"},
|
||||||
|
{"label": "Option 3", "id": "ghi-789"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
doc = Document.objects.create(
|
||||||
|
title="WOW",
|
||||||
|
content="the content",
|
||||||
|
checksum="123",
|
||||||
|
mime_type="application/pdf",
|
||||||
|
)
|
||||||
|
CustomFieldInstance.objects.create(
|
||||||
|
document=doc,
|
||||||
|
field=custom_field_select,
|
||||||
|
value_text="abc-123",
|
||||||
|
)
|
||||||
|
|
||||||
|
resp = self.client.patch(
|
||||||
|
f"{self.ENDPOINT}{custom_field_select.id}/",
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
"extra_data": {
|
||||||
|
"select_options": [
|
||||||
|
{"label": "Option 1", "id": "abc-123"},
|
||||||
|
{"label": "Option 3", "id": "ghi-789"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||||
|
|
||||||
|
data = resp.json()
|
||||||
|
|
||||||
|
self.assertCountEqual(
|
||||||
|
data["extra_data"]["select_options"],
|
||||||
|
[
|
||||||
|
{"label": "Option 1", "id": "abc-123"},
|
||||||
|
{"label": "Option 3", "id": "ghi-789"},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
doc.refresh_from_db()
|
||||||
|
self.assertEqual(doc.custom_fields.first().value, None)
|
||||||
|
|
||||||
def test_create_custom_field_monetary_validation(self):
|
def test_create_custom_field_monetary_validation(self):
|
||||||
"""
|
"""
|
||||||
GIVEN:
|
GIVEN:
|
||||||
@ -261,7 +395,10 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
|||||||
name="Test Custom Field Select",
|
name="Test Custom Field Select",
|
||||||
data_type=CustomField.FieldDataType.SELECT,
|
data_type=CustomField.FieldDataType.SELECT,
|
||||||
extra_data={
|
extra_data={
|
||||||
"select_options": ["Option 1", "Option 2"],
|
"select_options": [
|
||||||
|
{"label": "Option 1", "id": "abc-123"},
|
||||||
|
{"label": "Option 2", "id": "def-456"},
|
||||||
|
],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -309,7 +446,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": custom_field_select.id,
|
"field": custom_field_select.id,
|
||||||
"value": 0,
|
"value": "abc-123",
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
@ -332,7 +469,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
|||||||
{"field": custom_field_monetary.id, "value": "EUR11.10"},
|
{"field": custom_field_monetary.id, "value": "EUR11.10"},
|
||||||
{"field": custom_field_monetary2.id, "value": "11.1"},
|
{"field": custom_field_monetary2.id, "value": "11.1"},
|
||||||
{"field": custom_field_documentlink.id, "value": [doc2.id]},
|
{"field": custom_field_documentlink.id, "value": [doc2.id]},
|
||||||
{"field": custom_field_select.id, "value": 0},
|
{"field": custom_field_select.id, "value": "abc-123"},
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -722,7 +859,10 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
|||||||
name="Test Custom Field SELECT",
|
name="Test Custom Field SELECT",
|
||||||
data_type=CustomField.FieldDataType.SELECT,
|
data_type=CustomField.FieldDataType.SELECT,
|
||||||
extra_data={
|
extra_data={
|
||||||
"select_options": ["Option 1", "Option 2"],
|
"select_options": [
|
||||||
|
{"label": "Option 1", "id": "abc-123"},
|
||||||
|
{"label": "Option 2", "id": "def-456"},
|
||||||
|
],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -730,7 +870,7 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
|||||||
f"/api/documents/{doc.id}/",
|
f"/api/documents/{doc.id}/",
|
||||||
data={
|
data={
|
||||||
"custom_fields": [
|
"custom_fields": [
|
||||||
{"field": custom_field_select.id, "value": 3},
|
{"field": custom_field_select.id, "value": "not an option"},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
format="json",
|
format="json",
|
||||||
|
@ -657,13 +657,16 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
|||||||
name="Test Custom Field Select",
|
name="Test Custom Field Select",
|
||||||
data_type=CustomField.FieldDataType.SELECT,
|
data_type=CustomField.FieldDataType.SELECT,
|
||||||
extra_data={
|
extra_data={
|
||||||
"select_options": ["Option 1", "Choice 2"],
|
"select_options": [
|
||||||
|
{"label": "Option 1", "id": "abc123"},
|
||||||
|
{"label": "Choice 2", "id": "def456"},
|
||||||
|
],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
CustomFieldInstance.objects.create(
|
CustomFieldInstance.objects.create(
|
||||||
document=doc1,
|
document=doc1,
|
||||||
field=custom_field_select,
|
field=custom_field_select,
|
||||||
value_select=1,
|
value_select="def456",
|
||||||
)
|
)
|
||||||
|
|
||||||
r = self.client.get("/api/documents/?custom_fields__icontains=choice")
|
r = self.client.get("/api/documents/?custom_fields__icontains=choice")
|
||||||
|
@ -46,7 +46,13 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
|
|||||||
|
|
||||||
# Add some options to the select_field
|
# Add some options to the select_field
|
||||||
select = self.custom_fields["select_field"]
|
select = self.custom_fields["select_field"]
|
||||||
select.extra_data = {"select_options": ["A", "B", "C"]}
|
select.extra_data = {
|
||||||
|
"select_options": [
|
||||||
|
{"label": "A", "id": "abc-123"},
|
||||||
|
{"label": "B", "id": "def-456"},
|
||||||
|
{"label": "C", "id": "ghi-789"},
|
||||||
|
],
|
||||||
|
}
|
||||||
select.save()
|
select.save()
|
||||||
|
|
||||||
# Now we will create some test documents
|
# Now we will create some test documents
|
||||||
@ -122,9 +128,9 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
|
|||||||
|
|
||||||
# CustomField.FieldDataType.SELECT
|
# CustomField.FieldDataType.SELECT
|
||||||
self._create_document(select_field=None)
|
self._create_document(select_field=None)
|
||||||
self._create_document(select_field=0)
|
self._create_document(select_field="abc-123")
|
||||||
self._create_document(select_field=1)
|
self._create_document(select_field="def-456")
|
||||||
self._create_document(select_field=2)
|
self._create_document(select_field="ghi-789")
|
||||||
|
|
||||||
def _create_document(self, **kwargs):
|
def _create_document(self, **kwargs):
|
||||||
title = str(kwargs)
|
title = str(kwargs)
|
||||||
@ -296,18 +302,18 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_select(self):
|
def test_select(self):
|
||||||
# For select fields, you can either specify the index
|
# For select fields, you can either specify the id of the option
|
||||||
# or the name of the option. They function exactly the same.
|
# or the name of the option. They function exactly the same.
|
||||||
self._assert_query_match_predicate(
|
self._assert_query_match_predicate(
|
||||||
["select_field", "exact", 1],
|
["select_field", "exact", "def-456"],
|
||||||
lambda document: "select_field" in document
|
lambda document: "select_field" in document
|
||||||
and document["select_field"] == 1,
|
and document["select_field"] == "def-456",
|
||||||
)
|
)
|
||||||
# This is the same as:
|
# This is the same as:
|
||||||
self._assert_query_match_predicate(
|
self._assert_query_match_predicate(
|
||||||
["select_field", "exact", "B"],
|
["select_field", "exact", "B"],
|
||||||
lambda document: "select_field" in document
|
lambda document: "select_field" in document
|
||||||
and document["select_field"] == 1,
|
and document["select_field"] == "def-456",
|
||||||
)
|
)
|
||||||
|
|
||||||
# ==========================================================#
|
# ==========================================================#
|
||||||
@ -522,9 +528,9 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
|
|||||||
|
|
||||||
def test_invalid_value(self):
|
def test_invalid_value(self):
|
||||||
self._assert_validation_error(
|
self._assert_validation_error(
|
||||||
json.dumps(["select_field", "exact", "not an option"]),
|
json.dumps(["select_field", "exact", []]),
|
||||||
["custom_field_query", "2"],
|
["custom_field_query", "2"],
|
||||||
"integer",
|
"string",
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_invalid_logical_operator(self):
|
def test_invalid_logical_operator(self):
|
||||||
|
@ -544,7 +544,11 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
name="test",
|
name="test",
|
||||||
data_type=CustomField.FieldDataType.SELECT,
|
data_type=CustomField.FieldDataType.SELECT,
|
||||||
extra_data={
|
extra_data={
|
||||||
"select_options": ["apple", "banana", "cherry"],
|
"select_options": [
|
||||||
|
{"label": "apple", "id": "abc123"},
|
||||||
|
{"label": "banana", "id": "def456"},
|
||||||
|
{"label": "cherry", "id": "ghi789"},
|
||||||
|
],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
doc = Document.objects.create(
|
doc = Document.objects.create(
|
||||||
@ -555,14 +559,22 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
archive_checksum="B",
|
archive_checksum="B",
|
||||||
mime_type="application/pdf",
|
mime_type="application/pdf",
|
||||||
)
|
)
|
||||||
CustomFieldInstance.objects.create(field=cf, document=doc, value_select=0)
|
CustomFieldInstance.objects.create(
|
||||||
|
field=cf,
|
||||||
|
document=doc,
|
||||||
|
value_select="abc123",
|
||||||
|
)
|
||||||
|
|
||||||
self.assertEqual(generate_filename(doc), "document_apple.pdf")
|
self.assertEqual(generate_filename(doc), "document_apple.pdf")
|
||||||
|
|
||||||
# handler should not have been called
|
# handler should not have been called
|
||||||
self.assertEqual(m.call_count, 0)
|
self.assertEqual(m.call_count, 0)
|
||||||
cf.extra_data = {
|
cf.extra_data = {
|
||||||
"select_options": ["aubergine", "banana", "cherry"],
|
"select_options": [
|
||||||
|
{"label": "aubergine", "id": "abc123"},
|
||||||
|
{"label": "banana", "id": "def456"},
|
||||||
|
{"label": "cherry", "id": "ghi789"},
|
||||||
|
],
|
||||||
}
|
}
|
||||||
cf.save()
|
cf.save()
|
||||||
self.assertEqual(generate_filename(doc), "document_aubergine.pdf")
|
self.assertEqual(generate_filename(doc), "document_aubergine.pdf")
|
||||||
@ -1373,13 +1385,18 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
cf2 = CustomField.objects.create(
|
cf2 = CustomField.objects.create(
|
||||||
name="Select Field",
|
name="Select Field",
|
||||||
data_type=CustomField.FieldDataType.SELECT,
|
data_type=CustomField.FieldDataType.SELECT,
|
||||||
extra_data={"select_options": ["ChoiceOne", "ChoiceTwo"]},
|
extra_data={
|
||||||
|
"select_options": [
|
||||||
|
{"label": "ChoiceOne", "id": "abc=123"},
|
||||||
|
{"label": "ChoiceTwo", "id": "def-456"},
|
||||||
|
],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
cfi1 = CustomFieldInstance.objects.create(
|
cfi1 = CustomFieldInstance.objects.create(
|
||||||
document=doc_a,
|
document=doc_a,
|
||||||
field=cf2,
|
field=cf2,
|
||||||
value_select=0,
|
value_select="abc=123",
|
||||||
)
|
)
|
||||||
|
|
||||||
cfi = CustomFieldInstance.objects.create(
|
cfi = CustomFieldInstance.objects.create(
|
||||||
|
87
src/documents/tests/test_migration_custom_field_selects.py
Normal file
87
src/documents/tests/test_migration_custom_field_selects.py
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
from unittest.mock import ANY
|
||||||
|
|
||||||
|
from documents.tests.utils import TestMigrations
|
||||||
|
|
||||||
|
|
||||||
|
class TestMigrateCustomFieldSelects(TestMigrations):
|
||||||
|
migrate_from = "1058_workflowtrigger_schedule_date_custom_field_and_more"
|
||||||
|
migrate_to = "1059_alter_customfieldinstance_value_select"
|
||||||
|
|
||||||
|
def setUpBeforeMigration(self, apps):
|
||||||
|
CustomField = apps.get_model("documents.CustomField")
|
||||||
|
self.old_format = CustomField.objects.create(
|
||||||
|
name="cf1",
|
||||||
|
data_type="select",
|
||||||
|
extra_data={"select_options": ["Option 1", "Option 2", "Option 3"]},
|
||||||
|
)
|
||||||
|
Document = apps.get_model("documents.Document")
|
||||||
|
doc = Document.objects.create(title="doc1")
|
||||||
|
CustomFieldInstance = apps.get_model("documents.CustomFieldInstance")
|
||||||
|
self.old_instance = CustomFieldInstance.objects.create(
|
||||||
|
field=self.old_format,
|
||||||
|
value_select=0,
|
||||||
|
document=doc,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_migrate_old_to_new_select_fields(self):
|
||||||
|
self.old_format.refresh_from_db()
|
||||||
|
self.old_instance.refresh_from_db()
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
self.old_format.extra_data["select_options"],
|
||||||
|
[
|
||||||
|
{"label": "Option 1", "id": ANY},
|
||||||
|
{"label": "Option 2", "id": ANY},
|
||||||
|
{"label": "Option 3", "id": ANY},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
self.old_instance.value_select,
|
||||||
|
self.old_format.extra_data["select_options"][0]["id"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestMigrationCustomFieldSelectsReverse(TestMigrations):
|
||||||
|
migrate_from = "1059_alter_customfieldinstance_value_select"
|
||||||
|
migrate_to = "1058_workflowtrigger_schedule_date_custom_field_and_more"
|
||||||
|
|
||||||
|
def setUpBeforeMigration(self, apps):
|
||||||
|
CustomField = apps.get_model("documents.CustomField")
|
||||||
|
self.new_format = CustomField.objects.create(
|
||||||
|
name="cf1",
|
||||||
|
data_type="select",
|
||||||
|
extra_data={
|
||||||
|
"select_options": [
|
||||||
|
{"label": "Option 1", "id": "id1"},
|
||||||
|
{"label": "Option 2", "id": "id2"},
|
||||||
|
{"label": "Option 3", "id": "id3"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
Document = apps.get_model("documents.Document")
|
||||||
|
doc = Document.objects.create(title="doc1")
|
||||||
|
CustomFieldInstance = apps.get_model("documents.CustomFieldInstance")
|
||||||
|
self.new_instance = CustomFieldInstance.objects.create(
|
||||||
|
field=self.new_format,
|
||||||
|
value_select="id1",
|
||||||
|
document=doc,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_migrate_new_to_old_select_fields(self):
|
||||||
|
self.new_format.refresh_from_db()
|
||||||
|
self.new_instance.refresh_from_db()
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
self.new_format.extra_data["select_options"],
|
||||||
|
[
|
||||||
|
"Option 1",
|
||||||
|
"Option 2",
|
||||||
|
"Option 3",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
self.new_instance.value_select,
|
||||||
|
0,
|
||||||
|
)
|
@ -6,12 +6,14 @@ from django.conf import settings
|
|||||||
from django.contrib.auth.models import Permission
|
from django.contrib.auth.models import Permission
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
from django.test import override_settings
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
|
|
||||||
from documents.models import Document
|
from documents.models import Document
|
||||||
from documents.models import ShareLink
|
from documents.models import ShareLink
|
||||||
from documents.tests.utils import DirectoriesMixin
|
from documents.tests.utils import DirectoriesMixin
|
||||||
|
from paperless.models import ApplicationConfiguration
|
||||||
|
|
||||||
|
|
||||||
class TestViews(DirectoriesMixin, TestCase):
|
class TestViews(DirectoriesMixin, TestCase):
|
||||||
@ -67,6 +69,26 @@ class TestViews(DirectoriesMixin, TestCase):
|
|||||||
f"frontend/{language_actual}/main.js",
|
f"frontend/{language_actual}/main.js",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@override_settings(BASE_URL="/paperless/")
|
||||||
|
def test_index_app_logo_with_base_url(self):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Existing config with app_logo specified
|
||||||
|
WHEN:
|
||||||
|
- Index page is loaded
|
||||||
|
THEN:
|
||||||
|
- app_logo is prefixed with BASE_URL
|
||||||
|
"""
|
||||||
|
config = ApplicationConfiguration.objects.first()
|
||||||
|
config.app_logo = "/logo/example.jpg"
|
||||||
|
config.save()
|
||||||
|
self.client.force_login(self.user)
|
||||||
|
response = self.client.get("/")
|
||||||
|
self.assertEqual(
|
||||||
|
response.context["APP_LOGO"],
|
||||||
|
f"/paperless{config.app_logo}",
|
||||||
|
)
|
||||||
|
|
||||||
def test_share_link_views(self):
|
def test_share_link_views(self):
|
||||||
"""
|
"""
|
||||||
GIVEN:
|
GIVEN:
|
||||||
|
@ -29,6 +29,7 @@ from documents.models import StoragePath
|
|||||||
from documents.models import Tag
|
from documents.models import Tag
|
||||||
from documents.models import Workflow
|
from documents.models import Workflow
|
||||||
from documents.models import WorkflowAction
|
from documents.models import WorkflowAction
|
||||||
|
from documents.models import WorkflowRun
|
||||||
from documents.models import WorkflowTrigger
|
from documents.models import WorkflowTrigger
|
||||||
from documents.signals import document_consumption_finished
|
from documents.signals import document_consumption_finished
|
||||||
from documents.tests.utils import DirectoriesMixin
|
from documents.tests.utils import DirectoriesMixin
|
||||||
@ -1306,6 +1307,275 @@ class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, APITestCase):
|
|||||||
# group2 should have been added
|
# group2 should have been added
|
||||||
self.assertIn(self.group2, group_perms)
|
self.assertIn(self.group2, group_perms)
|
||||||
|
|
||||||
|
def test_workflow_scheduled_trigger_created(self):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Existing workflow with SCHEDULED trigger against the created field and action that assigns owner
|
||||||
|
- Existing doc that matches the trigger
|
||||||
|
WHEN:
|
||||||
|
- Scheduled workflows are checked
|
||||||
|
THEN:
|
||||||
|
- Workflow runs, document owner is updated
|
||||||
|
"""
|
||||||
|
trigger = WorkflowTrigger.objects.create(
|
||||||
|
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||||
|
schedule_offset_days=1,
|
||||||
|
schedule_date_field="created",
|
||||||
|
)
|
||||||
|
action = WorkflowAction.objects.create(
|
||||||
|
assign_title="Doc assign owner",
|
||||||
|
assign_owner=self.user2,
|
||||||
|
)
|
||||||
|
w = Workflow.objects.create(
|
||||||
|
name="Workflow 1",
|
||||||
|
order=0,
|
||||||
|
)
|
||||||
|
w.triggers.add(trigger)
|
||||||
|
w.actions.add(action)
|
||||||
|
w.save()
|
||||||
|
|
||||||
|
now = timezone.localtime(timezone.now())
|
||||||
|
created = now - timedelta(weeks=520)
|
||||||
|
doc = Document.objects.create(
|
||||||
|
title="sample test",
|
||||||
|
correspondent=self.c,
|
||||||
|
original_filename="sample.pdf",
|
||||||
|
created=created,
|
||||||
|
)
|
||||||
|
|
||||||
|
tasks.check_scheduled_workflows()
|
||||||
|
|
||||||
|
doc.refresh_from_db()
|
||||||
|
self.assertEqual(doc.owner, self.user2)
|
||||||
|
|
||||||
|
def test_workflow_scheduled_trigger_added(self):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Existing workflow with SCHEDULED trigger against the added field and action that assigns owner
|
||||||
|
- Existing doc that matches the trigger
|
||||||
|
WHEN:
|
||||||
|
- Scheduled workflows are checked
|
||||||
|
THEN:
|
||||||
|
- Workflow runs, document owner is updated
|
||||||
|
"""
|
||||||
|
trigger = WorkflowTrigger.objects.create(
|
||||||
|
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||||
|
schedule_offset_days=1,
|
||||||
|
schedule_date_field=WorkflowTrigger.ScheduleDateField.ADDED,
|
||||||
|
)
|
||||||
|
action = WorkflowAction.objects.create(
|
||||||
|
assign_title="Doc assign owner",
|
||||||
|
assign_owner=self.user2,
|
||||||
|
)
|
||||||
|
w = Workflow.objects.create(
|
||||||
|
name="Workflow 1",
|
||||||
|
order=0,
|
||||||
|
)
|
||||||
|
w.triggers.add(trigger)
|
||||||
|
w.actions.add(action)
|
||||||
|
w.save()
|
||||||
|
|
||||||
|
added = timezone.now() - timedelta(days=365)
|
||||||
|
doc = Document.objects.create(
|
||||||
|
title="sample test",
|
||||||
|
correspondent=self.c,
|
||||||
|
original_filename="sample.pdf",
|
||||||
|
added=added,
|
||||||
|
)
|
||||||
|
|
||||||
|
tasks.check_scheduled_workflows()
|
||||||
|
|
||||||
|
doc.refresh_from_db()
|
||||||
|
self.assertEqual(doc.owner, self.user2)
|
||||||
|
|
||||||
|
@mock.patch("documents.models.Document.objects.filter", autospec=True)
|
||||||
|
def test_workflow_scheduled_trigger_modified(self, mock_filter):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Existing workflow with SCHEDULED trigger against the modified field and action that assigns owner
|
||||||
|
- Existing doc that matches the trigger
|
||||||
|
WHEN:
|
||||||
|
- Scheduled workflows are checked
|
||||||
|
THEN:
|
||||||
|
- Workflow runs, document owner is updated
|
||||||
|
"""
|
||||||
|
# we have to mock because modified field is auto_now
|
||||||
|
mock_filter.return_value = Document.objects.all()
|
||||||
|
trigger = WorkflowTrigger.objects.create(
|
||||||
|
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||||
|
schedule_offset_days=1,
|
||||||
|
schedule_date_field=WorkflowTrigger.ScheduleDateField.MODIFIED,
|
||||||
|
)
|
||||||
|
action = WorkflowAction.objects.create(
|
||||||
|
assign_title="Doc assign owner",
|
||||||
|
assign_owner=self.user2,
|
||||||
|
)
|
||||||
|
w = Workflow.objects.create(
|
||||||
|
name="Workflow 1",
|
||||||
|
order=0,
|
||||||
|
)
|
||||||
|
w.triggers.add(trigger)
|
||||||
|
w.actions.add(action)
|
||||||
|
w.save()
|
||||||
|
|
||||||
|
doc = Document.objects.create(
|
||||||
|
title="sample test",
|
||||||
|
correspondent=self.c,
|
||||||
|
original_filename="sample.pdf",
|
||||||
|
)
|
||||||
|
|
||||||
|
tasks.check_scheduled_workflows()
|
||||||
|
|
||||||
|
doc.refresh_from_db()
|
||||||
|
self.assertEqual(doc.owner, self.user2)
|
||||||
|
|
||||||
|
def test_workflow_scheduled_trigger_custom_field(self):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Existing workflow with SCHEDULED trigger against a custom field and action that assigns owner
|
||||||
|
- Existing doc that matches the trigger
|
||||||
|
WHEN:
|
||||||
|
- Scheduled workflows are checked
|
||||||
|
THEN:
|
||||||
|
- Workflow runs, document owner is updated
|
||||||
|
"""
|
||||||
|
trigger = WorkflowTrigger.objects.create(
|
||||||
|
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||||
|
schedule_offset_days=1,
|
||||||
|
schedule_date_field=WorkflowTrigger.ScheduleDateField.CUSTOM_FIELD,
|
||||||
|
schedule_date_custom_field=self.cf1,
|
||||||
|
)
|
||||||
|
action = WorkflowAction.objects.create(
|
||||||
|
assign_title="Doc assign owner",
|
||||||
|
assign_owner=self.user2,
|
||||||
|
)
|
||||||
|
w = Workflow.objects.create(
|
||||||
|
name="Workflow 1",
|
||||||
|
order=0,
|
||||||
|
)
|
||||||
|
w.triggers.add(trigger)
|
||||||
|
w.actions.add(action)
|
||||||
|
w.save()
|
||||||
|
|
||||||
|
doc = Document.objects.create(
|
||||||
|
title="sample test",
|
||||||
|
correspondent=self.c,
|
||||||
|
original_filename="sample.pdf",
|
||||||
|
)
|
||||||
|
CustomFieldInstance.objects.create(
|
||||||
|
document=doc,
|
||||||
|
field=self.cf1,
|
||||||
|
value_date=timezone.now() - timedelta(days=2),
|
||||||
|
)
|
||||||
|
|
||||||
|
tasks.check_scheduled_workflows()
|
||||||
|
|
||||||
|
doc.refresh_from_db()
|
||||||
|
self.assertEqual(doc.owner, self.user2)
|
||||||
|
|
||||||
|
def test_workflow_scheduled_already_run(self):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Existing workflow with SCHEDULED trigger
|
||||||
|
- Existing doc that has already had the workflow run
|
||||||
|
WHEN:
|
||||||
|
- Scheduled workflows are checked
|
||||||
|
THEN:
|
||||||
|
- Workflow does not run again
|
||||||
|
"""
|
||||||
|
trigger = WorkflowTrigger.objects.create(
|
||||||
|
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||||
|
schedule_offset_days=1,
|
||||||
|
schedule_date_field=WorkflowTrigger.ScheduleDateField.CREATED,
|
||||||
|
)
|
||||||
|
action = WorkflowAction.objects.create(
|
||||||
|
assign_title="Doc assign owner",
|
||||||
|
assign_owner=self.user2,
|
||||||
|
)
|
||||||
|
w = Workflow.objects.create(
|
||||||
|
name="Workflow 1",
|
||||||
|
order=0,
|
||||||
|
)
|
||||||
|
w.triggers.add(trigger)
|
||||||
|
w.actions.add(action)
|
||||||
|
w.save()
|
||||||
|
|
||||||
|
doc = Document.objects.create(
|
||||||
|
title="sample test",
|
||||||
|
correspondent=self.c,
|
||||||
|
original_filename="sample.pdf",
|
||||||
|
created=timezone.now() - timedelta(days=2),
|
||||||
|
)
|
||||||
|
|
||||||
|
wr = WorkflowRun.objects.create(
|
||||||
|
workflow=w,
|
||||||
|
document=doc,
|
||||||
|
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||||
|
run_at=timezone.now(),
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
str(wr),
|
||||||
|
f"WorkflowRun of {w} at {wr.run_at} on {doc}",
|
||||||
|
) # coverage
|
||||||
|
|
||||||
|
tasks.check_scheduled_workflows()
|
||||||
|
|
||||||
|
doc.refresh_from_db()
|
||||||
|
self.assertIsNone(doc.owner)
|
||||||
|
|
||||||
|
def test_workflow_scheduled_trigger_too_early(self):
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- Existing workflow with SCHEDULED trigger and recurring interval of 7 days
|
||||||
|
- Workflow run date is 6 days ago
|
||||||
|
WHEN:
|
||||||
|
- Scheduled workflows are checked
|
||||||
|
THEN:
|
||||||
|
- Workflow does not run as the offset is not met
|
||||||
|
"""
|
||||||
|
trigger = WorkflowTrigger.objects.create(
|
||||||
|
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||||
|
schedule_offset_days=30,
|
||||||
|
schedule_date_field=WorkflowTrigger.ScheduleDateField.CREATED,
|
||||||
|
schedule_is_recurring=True,
|
||||||
|
schedule_recurring_interval_days=7,
|
||||||
|
)
|
||||||
|
action = WorkflowAction.objects.create(
|
||||||
|
assign_title="Doc assign owner",
|
||||||
|
assign_owner=self.user2,
|
||||||
|
)
|
||||||
|
w = Workflow.objects.create(
|
||||||
|
name="Workflow 1",
|
||||||
|
order=0,
|
||||||
|
)
|
||||||
|
w.triggers.add(trigger)
|
||||||
|
w.actions.add(action)
|
||||||
|
w.save()
|
||||||
|
|
||||||
|
doc = Document.objects.create(
|
||||||
|
title="sample test",
|
||||||
|
correspondent=self.c,
|
||||||
|
original_filename="sample.pdf",
|
||||||
|
created=timezone.now() - timedelta(days=40),
|
||||||
|
)
|
||||||
|
|
||||||
|
WorkflowRun.objects.create(
|
||||||
|
workflow=w,
|
||||||
|
document=doc,
|
||||||
|
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||||
|
run_at=timezone.now() - timedelta(days=6),
|
||||||
|
)
|
||||||
|
|
||||||
|
with self.assertLogs(level="DEBUG") as cm:
|
||||||
|
tasks.check_scheduled_workflows()
|
||||||
|
self.assertIn(
|
||||||
|
"last run was within the recurring interval",
|
||||||
|
" ".join(cm.output),
|
||||||
|
)
|
||||||
|
|
||||||
|
doc.refresh_from_db()
|
||||||
|
self.assertIsNone(doc.owner)
|
||||||
|
|
||||||
def test_workflow_enabled_disabled(self):
|
def test_workflow_enabled_disabled(self):
|
||||||
trigger = WorkflowTrigger.objects.create(
|
trigger = WorkflowTrigger.objects.create(
|
||||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
||||||
@ -1354,7 +1624,7 @@ class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, APITestCase):
|
|||||||
|
|
||||||
def test_new_trigger_type_raises_exception(self):
|
def test_new_trigger_type_raises_exception(self):
|
||||||
trigger = WorkflowTrigger.objects.create(
|
trigger = WorkflowTrigger.objects.create(
|
||||||
type=4,
|
type=99,
|
||||||
)
|
)
|
||||||
action = WorkflowAction.objects.create(
|
action = WorkflowAction.objects.create(
|
||||||
assign_title="Doc assign owner",
|
assign_title="Doc assign owner",
|
||||||
@ -1370,7 +1640,7 @@ class TestWorkflows(DirectoriesMixin, FileSystemAssertsMixin, APITestCase):
|
|||||||
doc = Document.objects.create(
|
doc = Document.objects.create(
|
||||||
title="test",
|
title="test",
|
||||||
)
|
)
|
||||||
self.assertRaises(Exception, document_matches_workflow, doc, w, 4)
|
self.assertRaises(Exception, document_matches_workflow, doc, w, 99)
|
||||||
|
|
||||||
def test_removal_action_document_updated_workflow(self):
|
def test_removal_action_document_updated_workflow(self):
|
||||||
"""
|
"""
|
||||||
|
@ -26,11 +26,13 @@ from django.db.models import Case
|
|||||||
from django.db.models import Count
|
from django.db.models import Count
|
||||||
from django.db.models import IntegerField
|
from django.db.models import IntegerField
|
||||||
from django.db.models import Max
|
from django.db.models import Max
|
||||||
|
from django.db.models import Model
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
from django.db.models import Sum
|
from django.db.models import Sum
|
||||||
from django.db.models import When
|
from django.db.models import When
|
||||||
from django.db.models.functions import Length
|
from django.db.models.functions import Length
|
||||||
from django.db.models.functions import Lower
|
from django.db.models.functions import Lower
|
||||||
|
from django.db.models.manager import Manager
|
||||||
from django.http import Http404
|
from django.http import Http404
|
||||||
from django.http import HttpResponse
|
from django.http import HttpResponse
|
||||||
from django.http import HttpResponseBadRequest
|
from django.http import HttpResponseBadRequest
|
||||||
@ -426,7 +428,7 @@ class DocumentViewSet(
|
|||||||
)
|
)
|
||||||
|
|
||||||
def file_response(self, pk, request, disposition):
|
def file_response(self, pk, request, disposition):
|
||||||
doc = Document.objects.select_related("owner").get(id=pk)
|
doc = Document.global_objects.select_related("owner").get(id=pk)
|
||||||
if request.user is not None and not has_perms_owner_aware(
|
if request.user is not None and not has_perms_owner_aware(
|
||||||
request.user,
|
request.user,
|
||||||
"view_document",
|
"view_document",
|
||||||
@ -961,6 +963,22 @@ class SavedViewViewSet(ModelViewSet, PassUserMixin):
|
|||||||
|
|
||||||
|
|
||||||
class BulkEditView(PassUserMixin):
|
class BulkEditView(PassUserMixin):
|
||||||
|
MODIFIED_FIELD_BY_METHOD = {
|
||||||
|
"set_correspondent": "correspondent",
|
||||||
|
"set_document_type": "document_type",
|
||||||
|
"set_storage_path": "storage_path",
|
||||||
|
"add_tag": "tags",
|
||||||
|
"remove_tag": "tags",
|
||||||
|
"modify_tags": "tags",
|
||||||
|
"modify_custom_fields": "custom_fields",
|
||||||
|
"set_permissions": None,
|
||||||
|
"delete": "deleted_at",
|
||||||
|
"rotate": "checksum",
|
||||||
|
"delete_pages": "checksum",
|
||||||
|
"split": None,
|
||||||
|
"merge": None,
|
||||||
|
}
|
||||||
|
|
||||||
permission_classes = (IsAuthenticated,)
|
permission_classes = (IsAuthenticated,)
|
||||||
serializer_class = BulkEditSerializer
|
serializer_class = BulkEditSerializer
|
||||||
parser_classes = (parsers.JSONParser,)
|
parser_classes = (parsers.JSONParser,)
|
||||||
@ -1013,8 +1031,53 @@ class BulkEditView(PassUserMixin):
|
|||||||
return HttpResponseForbidden("Insufficient permissions")
|
return HttpResponseForbidden("Insufficient permissions")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
modified_field = self.MODIFIED_FIELD_BY_METHOD[method.__name__]
|
||||||
|
if settings.AUDIT_LOG_ENABLED and modified_field:
|
||||||
|
old_documents = {
|
||||||
|
obj["pk"]: obj
|
||||||
|
for obj in Document.objects.filter(pk__in=documents).values(
|
||||||
|
"pk",
|
||||||
|
"correspondent",
|
||||||
|
"document_type",
|
||||||
|
"storage_path",
|
||||||
|
"tags",
|
||||||
|
"custom_fields",
|
||||||
|
"deleted_at",
|
||||||
|
"checksum",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
# TODO: parameter validation
|
# TODO: parameter validation
|
||||||
result = method(documents, **parameters)
|
result = method(documents, **parameters)
|
||||||
|
|
||||||
|
if settings.AUDIT_LOG_ENABLED and modified_field:
|
||||||
|
new_documents = Document.objects.filter(pk__in=documents)
|
||||||
|
for doc in new_documents:
|
||||||
|
old_value = old_documents[doc.pk][modified_field]
|
||||||
|
new_value = getattr(doc, modified_field)
|
||||||
|
|
||||||
|
if isinstance(new_value, Model):
|
||||||
|
# correspondent, document type, etc.
|
||||||
|
new_value = new_value.pk
|
||||||
|
elif isinstance(new_value, Manager):
|
||||||
|
# tags, custom fields
|
||||||
|
new_value = list(new_value.values_list("pk", flat=True))
|
||||||
|
|
||||||
|
LogEntry.objects.log_create(
|
||||||
|
instance=doc,
|
||||||
|
changes={
|
||||||
|
modified_field: [
|
||||||
|
old_value,
|
||||||
|
new_value,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
action=LogEntry.Action.UPDATE,
|
||||||
|
actor=user,
|
||||||
|
additional_data={
|
||||||
|
"reason": f"Bulk edit: {method.__name__}",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
return Response({"result": result})
|
return Response({"result": result})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"An error occurred performing bulk edit: {e!s}")
|
logger.warning(f"An error occurred performing bulk edit: {e!s}")
|
||||||
@ -1546,6 +1609,12 @@ class StoragePathViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
|
|||||||
filterset_class = StoragePathFilterSet
|
filterset_class = StoragePathFilterSet
|
||||||
ordering_fields = ("name", "path", "matching_algorithm", "match", "document_count")
|
ordering_fields = ("name", "path", "matching_algorithm", "match", "document_count")
|
||||||
|
|
||||||
|
def get_permissions(self):
|
||||||
|
if self.action == "test":
|
||||||
|
# Test action does not require object level permissions
|
||||||
|
self.permission_classes = (IsAuthenticated,)
|
||||||
|
return super().get_permissions()
|
||||||
|
|
||||||
def destroy(self, request, *args, **kwargs):
|
def destroy(self, request, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
When a storage path is deleted, see if documents
|
When a storage path is deleted, see if documents
|
||||||
@ -1562,17 +1631,12 @@ class StoragePathViewSet(ModelViewSet, PermissionsAwareDocumentCountMixin):
|
|||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@action(methods=["post"], detail=False)
|
||||||
class StoragePathTestView(GenericAPIView):
|
def test(self, request):
|
||||||
"""
|
"""
|
||||||
Test storage path against a document
|
Test storage path against a document
|
||||||
"""
|
"""
|
||||||
|
serializer = StoragePathTestSerializer(data=request.data)
|
||||||
permission_classes = [IsAuthenticated]
|
|
||||||
serializer_class = StoragePathTestSerializer
|
|
||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
|
||||||
serializer = self.get_serializer(data=request.data)
|
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
document = serializer.validated_data.get("document")
|
document = serializer.validated_data.get("document")
|
||||||
|
@ -5,9 +5,9 @@ from paperless.checks import paths_check
|
|||||||
from paperless.checks import settings_values_check
|
from paperless.checks import settings_values_check
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"celery_app",
|
"audit_log_check",
|
||||||
"binaries_check",
|
"binaries_check",
|
||||||
|
"celery_app",
|
||||||
"paths_check",
|
"paths_check",
|
||||||
"settings_values_check",
|
"settings_values_check",
|
||||||
"audit_log_check",
|
|
||||||
]
|
]
|
||||||
|
@ -216,6 +216,17 @@ def _parse_beat_schedule() -> dict:
|
|||||||
"expires": 23.0 * 60.0 * 60.0,
|
"expires": 23.0 * 60.0 * 60.0,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "Check and run scheduled workflows",
|
||||||
|
"env_key": "PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON",
|
||||||
|
# Default hourly at 5 minutes past the hour
|
||||||
|
"env_default": "5 */1 * * *",
|
||||||
|
"task": "documents.tasks.check_scheduled_workflows",
|
||||||
|
"options": {
|
||||||
|
# 1 minute before default schedule sends again
|
||||||
|
"expires": 59.0 * 60.0,
|
||||||
|
},
|
||||||
|
},
|
||||||
]
|
]
|
||||||
for task in tasks:
|
for task in tasks:
|
||||||
# Either get the environment setting or use the default
|
# Either get the environment setting or use the default
|
||||||
|
@ -157,6 +157,7 @@ class TestCeleryScheduleParsing(TestCase):
|
|||||||
INDEX_EXPIRE_TIME = 23.0 * 60.0 * 60.0
|
INDEX_EXPIRE_TIME = 23.0 * 60.0 * 60.0
|
||||||
SANITY_EXPIRE_TIME = ((7.0 * 24.0) - 1.0) * 60.0 * 60.0
|
SANITY_EXPIRE_TIME = ((7.0 * 24.0) - 1.0) * 60.0 * 60.0
|
||||||
EMPTY_TRASH_EXPIRE_TIME = 23.0 * 60.0 * 60.0
|
EMPTY_TRASH_EXPIRE_TIME = 23.0 * 60.0 * 60.0
|
||||||
|
RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME = 59.0 * 60.0
|
||||||
|
|
||||||
def test_schedule_configuration_default(self):
|
def test_schedule_configuration_default(self):
|
||||||
"""
|
"""
|
||||||
@ -196,6 +197,11 @@ class TestCeleryScheduleParsing(TestCase):
|
|||||||
"schedule": crontab(minute=0, hour="1"),
|
"schedule": crontab(minute=0, hour="1"),
|
||||||
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
|
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
|
||||||
},
|
},
|
||||||
|
"Check and run scheduled workflows": {
|
||||||
|
"task": "documents.tasks.check_scheduled_workflows",
|
||||||
|
"schedule": crontab(minute="5", hour="*/1"),
|
||||||
|
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
schedule,
|
schedule,
|
||||||
)
|
)
|
||||||
@ -243,6 +249,11 @@ class TestCeleryScheduleParsing(TestCase):
|
|||||||
"schedule": crontab(minute=0, hour="1"),
|
"schedule": crontab(minute=0, hour="1"),
|
||||||
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
|
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
|
||||||
},
|
},
|
||||||
|
"Check and run scheduled workflows": {
|
||||||
|
"task": "documents.tasks.check_scheduled_workflows",
|
||||||
|
"schedule": crontab(minute="5", hour="*/1"),
|
||||||
|
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
schedule,
|
schedule,
|
||||||
)
|
)
|
||||||
@ -282,6 +293,11 @@ class TestCeleryScheduleParsing(TestCase):
|
|||||||
"schedule": crontab(minute=0, hour="1"),
|
"schedule": crontab(minute=0, hour="1"),
|
||||||
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
|
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
|
||||||
},
|
},
|
||||||
|
"Check and run scheduled workflows": {
|
||||||
|
"task": "documents.tasks.check_scheduled_workflows",
|
||||||
|
"schedule": crontab(minute="5", hour="*/1"),
|
||||||
|
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
schedule,
|
schedule,
|
||||||
)
|
)
|
||||||
@ -303,6 +319,7 @@ class TestCeleryScheduleParsing(TestCase):
|
|||||||
"PAPERLESS_SANITY_TASK_CRON": "disable",
|
"PAPERLESS_SANITY_TASK_CRON": "disable",
|
||||||
"PAPERLESS_INDEX_TASK_CRON": "disable",
|
"PAPERLESS_INDEX_TASK_CRON": "disable",
|
||||||
"PAPERLESS_EMPTY_TRASH_TASK_CRON": "disable",
|
"PAPERLESS_EMPTY_TRASH_TASK_CRON": "disable",
|
||||||
|
"PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON": "disable",
|
||||||
},
|
},
|
||||||
):
|
):
|
||||||
schedule = _parse_beat_schedule()
|
schedule = _parse_beat_schedule()
|
||||||
|
@ -11,7 +11,6 @@ from django.contrib.auth.decorators import login_required
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
from django.urls import re_path
|
from django.urls import re_path
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.views.decorators.csrf import csrf_exempt
|
|
||||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||||
from django.views.generic import RedirectView
|
from django.views.generic import RedirectView
|
||||||
from django.views.static import serve
|
from django.views.static import serve
|
||||||
@ -35,7 +34,6 @@ from documents.views import SelectionDataView
|
|||||||
from documents.views import SharedLinkView
|
from documents.views import SharedLinkView
|
||||||
from documents.views import ShareLinkViewSet
|
from documents.views import ShareLinkViewSet
|
||||||
from documents.views import StatisticsView
|
from documents.views import StatisticsView
|
||||||
from documents.views import StoragePathTestView
|
|
||||||
from documents.views import StoragePathViewSet
|
from documents.views import StoragePathViewSet
|
||||||
from documents.views import SystemStatusView
|
from documents.views import SystemStatusView
|
||||||
from documents.views import TagViewSet
|
from documents.views import TagViewSet
|
||||||
@ -56,7 +54,6 @@ from paperless.views import ProfileView
|
|||||||
from paperless.views import SocialAccountProvidersView
|
from paperless.views import SocialAccountProvidersView
|
||||||
from paperless.views import TOTPView
|
from paperless.views import TOTPView
|
||||||
from paperless.views import UserViewSet
|
from paperless.views import UserViewSet
|
||||||
from paperless_mail.views import MailAccountTestView
|
|
||||||
from paperless_mail.views import MailAccountViewSet
|
from paperless_mail.views import MailAccountViewSet
|
||||||
from paperless_mail.views import MailRuleViewSet
|
from paperless_mail.views import MailRuleViewSet
|
||||||
from paperless_mail.views import OauthCallbackView
|
from paperless_mail.views import OauthCallbackView
|
||||||
@ -94,58 +91,83 @@ urlpatterns = [
|
|||||||
namespace="rest_framework",
|
namespace="rest_framework",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
re_path(
|
|
||||||
"^search/autocomplete/",
|
|
||||||
SearchAutoCompleteView.as_view(),
|
|
||||||
name="autocomplete",
|
|
||||||
),
|
|
||||||
re_path(
|
re_path(
|
||||||
"^search/",
|
"^search/",
|
||||||
GlobalSearchView.as_view(),
|
include(
|
||||||
name="global_search",
|
[
|
||||||
),
|
re_path(
|
||||||
re_path("^statistics/", StatisticsView.as_view(), name="statistics"),
|
"^$",
|
||||||
re_path(
|
GlobalSearchView.as_view(),
|
||||||
"^documents/post_document/",
|
name="global_search",
|
||||||
PostDocumentView.as_view(),
|
),
|
||||||
name="post_document",
|
re_path(
|
||||||
|
"^autocomplete/",
|
||||||
|
SearchAutoCompleteView.as_view(),
|
||||||
|
name="autocomplete",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
),
|
),
|
||||||
re_path(
|
re_path(
|
||||||
"^documents/bulk_edit/",
|
"^statistics/",
|
||||||
BulkEditView.as_view(),
|
StatisticsView.as_view(),
|
||||||
name="bulk_edit",
|
name="statistics",
|
||||||
),
|
),
|
||||||
re_path(
|
re_path(
|
||||||
"^documents/selection_data/",
|
"^documents/",
|
||||||
SelectionDataView.as_view(),
|
include(
|
||||||
name="selection_data",
|
[
|
||||||
|
re_path(
|
||||||
|
"^post_document/",
|
||||||
|
PostDocumentView.as_view(),
|
||||||
|
name="post_document",
|
||||||
|
),
|
||||||
|
re_path(
|
||||||
|
"^bulk_edit/",
|
||||||
|
BulkEditView.as_view(),
|
||||||
|
name="bulk_edit",
|
||||||
|
),
|
||||||
|
re_path(
|
||||||
|
"^bulk_download/",
|
||||||
|
BulkDownloadView.as_view(),
|
||||||
|
name="bulk_download",
|
||||||
|
),
|
||||||
|
re_path(
|
||||||
|
"^selection_data/",
|
||||||
|
SelectionDataView.as_view(),
|
||||||
|
name="selection_data",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
),
|
),
|
||||||
re_path(
|
|
||||||
"^documents/bulk_download/",
|
|
||||||
BulkDownloadView.as_view(),
|
|
||||||
name="bulk_download",
|
|
||||||
),
|
|
||||||
re_path(
|
|
||||||
"^remote_version/",
|
|
||||||
RemoteVersionView.as_view(),
|
|
||||||
name="remoteversion",
|
|
||||||
),
|
|
||||||
re_path("^ui_settings/", UiSettingsView.as_view(), name="ui_settings"),
|
|
||||||
re_path(
|
|
||||||
"^mail_accounts/test/",
|
|
||||||
MailAccountTestView.as_view(),
|
|
||||||
name="mail_accounts_test",
|
|
||||||
),
|
|
||||||
path("token/", views.obtain_auth_token),
|
|
||||||
re_path(
|
re_path(
|
||||||
"^bulk_edit_objects/",
|
"^bulk_edit_objects/",
|
||||||
BulkEditObjectsView.as_view(),
|
BulkEditObjectsView.as_view(),
|
||||||
name="bulk_edit_objects",
|
name="bulk_edit_objects",
|
||||||
),
|
),
|
||||||
|
re_path(
|
||||||
|
"^remote_version/",
|
||||||
|
RemoteVersionView.as_view(),
|
||||||
|
name="remoteversion",
|
||||||
|
),
|
||||||
|
re_path(
|
||||||
|
"^ui_settings/",
|
||||||
|
UiSettingsView.as_view(),
|
||||||
|
name="ui_settings",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"token/",
|
||||||
|
views.obtain_auth_token,
|
||||||
|
),
|
||||||
re_path(
|
re_path(
|
||||||
"^profile/",
|
"^profile/",
|
||||||
include(
|
include(
|
||||||
[
|
[
|
||||||
|
re_path(
|
||||||
|
"^$",
|
||||||
|
ProfileView.as_view(),
|
||||||
|
name="profile_view",
|
||||||
|
),
|
||||||
path(
|
path(
|
||||||
"generate_auth_token/",
|
"generate_auth_token/",
|
||||||
GenerateAuthTokenView.as_view(),
|
GenerateAuthTokenView.as_view(),
|
||||||
@ -158,11 +180,6 @@ urlpatterns = [
|
|||||||
"social_account_providers/",
|
"social_account_providers/",
|
||||||
SocialAccountProvidersView.as_view(),
|
SocialAccountProvidersView.as_view(),
|
||||||
),
|
),
|
||||||
re_path(
|
|
||||||
"^$",
|
|
||||||
ProfileView.as_view(),
|
|
||||||
name="profile_view",
|
|
||||||
),
|
|
||||||
path(
|
path(
|
||||||
"totp/",
|
"totp/",
|
||||||
TOTPView.as_view(),
|
TOTPView.as_view(),
|
||||||
@ -181,11 +198,6 @@ urlpatterns = [
|
|||||||
TrashView.as_view(),
|
TrashView.as_view(),
|
||||||
name="trash",
|
name="trash",
|
||||||
),
|
),
|
||||||
re_path(
|
|
||||||
"^storage_paths/test/",
|
|
||||||
StoragePathTestView.as_view(),
|
|
||||||
name="storage_paths_test",
|
|
||||||
),
|
|
||||||
re_path(
|
re_path(
|
||||||
r"^oauth/callback/",
|
r"^oauth/callback/",
|
||||||
OauthCallbackView.as_view(),
|
OauthCallbackView.as_view(),
|
||||||
@ -223,14 +235,6 @@ urlpatterns = [
|
|||||||
],
|
],
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
re_path(
|
|
||||||
r"^push$",
|
|
||||||
csrf_exempt(
|
|
||||||
RedirectView.as_view(
|
|
||||||
url=settings.BASE_URL + "api/documents/post_document/",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
# Frontend assets TODO: this is pretty bad, but it works.
|
# Frontend assets TODO: this is pretty bad, but it works.
|
||||||
path(
|
path(
|
||||||
"assets/<path:path>",
|
"assets/<path:path>",
|
||||||
|
@ -6,6 +6,7 @@ from django.http import HttpResponseBadRequest
|
|||||||
from django.http import HttpResponseRedirect
|
from django.http import HttpResponseRedirect
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from httpx_oauth.oauth2 import GetAccessTokenError
|
from httpx_oauth.oauth2 import GetAccessTokenError
|
||||||
|
from rest_framework.decorators import action
|
||||||
from rest_framework.generics import GenericAPIView
|
from rest_framework.generics import GenericAPIView
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAuthenticated
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
@ -34,22 +35,14 @@ class MailAccountViewSet(ModelViewSet, PassUserMixin):
|
|||||||
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
|
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
|
||||||
filter_backends = (ObjectOwnedOrGrantedPermissionsFilter,)
|
filter_backends = (ObjectOwnedOrGrantedPermissionsFilter,)
|
||||||
|
|
||||||
|
def get_permissions(self):
|
||||||
|
if self.action == "test":
|
||||||
|
# Test action does not require object level permissions
|
||||||
|
self.permission_classes = (IsAuthenticated,)
|
||||||
|
return super().get_permissions()
|
||||||
|
|
||||||
class MailRuleViewSet(ModelViewSet, PassUserMixin):
|
@action(methods=["post"], detail=False)
|
||||||
model = MailRule
|
def test(self, request):
|
||||||
|
|
||||||
queryset = MailRule.objects.all().order_by("order")
|
|
||||||
serializer_class = MailRuleSerializer
|
|
||||||
pagination_class = StandardPagination
|
|
||||||
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
|
|
||||||
filter_backends = (ObjectOwnedOrGrantedPermissionsFilter,)
|
|
||||||
|
|
||||||
|
|
||||||
class MailAccountTestView(GenericAPIView):
|
|
||||||
permission_classes = (IsAuthenticated,)
|
|
||||||
serializer_class = MailAccountSerializer
|
|
||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
|
||||||
logger = logging.getLogger("paperless_mail")
|
logger = logging.getLogger("paperless_mail")
|
||||||
request.data["name"] = datetime.datetime.now().isoformat()
|
request.data["name"] = datetime.datetime.now().isoformat()
|
||||||
serializer = self.get_serializer(data=request.data)
|
serializer = self.get_serializer(data=request.data)
|
||||||
@ -95,6 +88,16 @@ class MailAccountTestView(GenericAPIView):
|
|||||||
return HttpResponseBadRequest("Unable to connect to server")
|
return HttpResponseBadRequest("Unable to connect to server")
|
||||||
|
|
||||||
|
|
||||||
|
class MailRuleViewSet(ModelViewSet, PassUserMixin):
|
||||||
|
model = MailRule
|
||||||
|
|
||||||
|
queryset = MailRule.objects.all().order_by("order")
|
||||||
|
serializer_class = MailRuleSerializer
|
||||||
|
pagination_class = StandardPagination
|
||||||
|
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
|
||||||
|
filter_backends = (ObjectOwnedOrGrantedPermissionsFilter,)
|
||||||
|
|
||||||
|
|
||||||
class OauthCallbackView(GenericAPIView):
|
class OauthCallbackView(GenericAPIView):
|
||||||
permission_classes = (IsAuthenticated,)
|
permission_classes = (IsAuthenticated,)
|
||||||
|
|
||||||
|
@ -2,4 +2,4 @@
|
|||||||
from paperless_tesseract.checks import check_default_language_available
|
from paperless_tesseract.checks import check_default_language_available
|
||||||
from paperless_tesseract.checks import get_tesseract_langs
|
from paperless_tesseract.checks import get_tesseract_langs
|
||||||
|
|
||||||
__all__ = ["get_tesseract_langs", "check_default_language_available"]
|
__all__ = ["check_default_language_available", "get_tesseract_langs"]
|
||||||
|
Loading…
x
Reference in New Issue
Block a user