mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-12-31 13:58:04 -06:00
Compare commits
10 Commits
v2.16.2
...
b06c0a0eba
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b06c0a0eba | ||
|
|
e9746aa0e3 | ||
|
|
bfaab21589 | ||
|
|
3849569bd1 | ||
|
|
c40a7751b9 | ||
|
|
f39463ff4e | ||
|
|
2ada8ec681 | ||
|
|
4c6fdbb21f | ||
|
|
889c4378a9 | ||
|
|
06dd039083 |
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
@@ -6,9 +6,13 @@ on:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
# https://semver.org/#spec-item-9
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-beta.rc[0-9]+'
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
pull_request:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
env:
|
||||
DEFAULT_UV_VERSION: "0.6.x"
|
||||
DEFAULT_UV_VERSION: "0.7.x"
|
||||
# This is the default version of Python to use in most steps which aren't specific
|
||||
DEFAULT_PYTHON_VERSION: "3.11"
|
||||
jobs:
|
||||
|
||||
@@ -32,7 +32,7 @@ RUN set -eux \
|
||||
# Purpose: Installs s6-overlay and rootfs
|
||||
# Comments:
|
||||
# - Don't leave anything extra in here either
|
||||
FROM ghcr.io/astral-sh/uv:0.6.16-python3.12-bookworm-slim AS s6-overlay-base
|
||||
FROM ghcr.io/astral-sh/uv:0.7.8-python3.12-bookworm-slim AS s6-overlay-base
|
||||
|
||||
WORKDIR /usr/src/s6
|
||||
|
||||
|
||||
@@ -179,6 +179,7 @@ variables:
|
||||
| ---------------------------- | ---------------------------------------------- |
|
||||
| `DOCUMENT_ID` | Database primary key of the document |
|
||||
| `DOCUMENT_FILE_NAME` | Formatted filename, not including paths |
|
||||
| `DOCUMENT_TYPE` | The document type (if any) |
|
||||
| `DOCUMENT_CREATED` | Date & time when document created |
|
||||
| `DOCUMENT_MODIFIED` | Date & time when document was last modified |
|
||||
| `DOCUMENT_ADDED` | Date & time when document was added |
|
||||
|
||||
@@ -1,5 +1,30 @@
|
||||
# Changelog
|
||||
|
||||
## paperless-ngx 2.16.2
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fix: accept datetime for created [@shamoon](https://github.com/shamoon) ([#10021](https://github.com/paperless-ngx/paperless-ngx/pull/10021))
|
||||
- Fix: created date fixes in v2.16 [@shamoon](https://github.com/shamoon) ([#10026](https://github.com/paperless-ngx/paperless-ngx/pull/10026))
|
||||
- Fix: mark fields for created objects as dirty [@shamoon](https://github.com/shamoon) ([#10022](https://github.com/paperless-ngx/paperless-ngx/pull/10022))
|
||||
- Fix: add fallback to copyfile on PermissionError @samuel-kosmann ([#10023](https://github.com/paperless-ngx/paperless-ngx/pull/10023))
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Chore: warn users about removal of postgres v13 support [@shamoon](https://github.com/shamoon) ([#9980](https://github.com/paperless-ngx/paperless-ngx/pull/9980))
|
||||
|
||||
### All App Changes
|
||||
|
||||
<details>
|
||||
<summary>5 changes</summary>
|
||||
|
||||
- Fix: accept datetime for created [@shamoon](https://github.com/shamoon) ([#10021](https://github.com/paperless-ngx/paperless-ngx/pull/10021))
|
||||
- Fix: add fallback to copyfile on PermissionError @samuel-kosmann ([#10023](https://github.com/paperless-ngx/paperless-ngx/pull/10023))
|
||||
- Fix: created date fixes in v2.16 [@shamoon](https://github.com/shamoon) ([#10026](https://github.com/paperless-ngx/paperless-ngx/pull/10026))
|
||||
- Fix: mark fields for created objects as dirty [@shamoon](https://github.com/shamoon) ([#10022](https://github.com/paperless-ngx/paperless-ngx/pull/10022))
|
||||
- Chore: warn users about removal of postgres v13 support [@shamoon](https://github.com/shamoon) ([#9980](https://github.com/paperless-ngx/paperless-ngx/pull/9980))
|
||||
</details>
|
||||
|
||||
## paperless-ngx 2.16.1
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
@@ -6,6 +6,7 @@ A document with an id of ${DOCUMENT_ID} was just consumed. I know the
|
||||
following additional information about it:
|
||||
|
||||
* Generated File Name: ${DOCUMENT_FILE_NAME}
|
||||
* Document type: ${DOCUMENT_TYPE}
|
||||
* Archive Path: ${DOCUMENT_ARCHIVE_PATH}
|
||||
* Source Path: ${DOCUMENT_SOURCE_PATH}
|
||||
* Created: ${DOCUMENT_CREATED}
|
||||
|
||||
@@ -1084,7 +1084,7 @@
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/app-frame/global-search/global-search.component.ts</context>
|
||||
<context context-type="linenumber">120</context>
|
||||
<context context-type="linenumber">121</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="2818183879511244335" datatype="html">
|
||||
@@ -3092,22 +3092,22 @@
|
||||
<source>Successfully updated object.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/app-frame/global-search/global-search.component.ts</context>
|
||||
<context context-type="linenumber">209</context>
|
||||
<context context-type="linenumber">210</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/app-frame/global-search/global-search.component.ts</context>
|
||||
<context context-type="linenumber">247</context>
|
||||
<context context-type="linenumber">248</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="1801333259018423190" datatype="html">
|
||||
<source>Error occurred saving object.</source>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/app-frame/global-search/global-search.component.ts</context>
|
||||
<context context-type="linenumber">212</context>
|
||||
<context context-type="linenumber">213</context>
|
||||
</context-group>
|
||||
<context-group purpose="location">
|
||||
<context context-type="sourcefile">src/app/components/app-frame/global-search/global-search.component.ts</context>
|
||||
<context context-type="linenumber">250</context>
|
||||
<context context-type="linenumber">251</context>
|
||||
</context-group>
|
||||
</trans-unit>
|
||||
<trans-unit id="8193912662253833654" datatype="html">
|
||||
|
||||
@@ -529,6 +529,17 @@ describe('GlobalSearchComponent', () => {
|
||||
expect(dispatchSpy).toHaveBeenCalledTimes(2) // once for keydown, second for click
|
||||
})
|
||||
|
||||
it('should support using base href in navigateOrOpenInNewWindow', () => {
|
||||
jest
|
||||
.spyOn(component['locationStrategy'], 'getBaseHref')
|
||||
.mockReturnValue('/base/')
|
||||
const openSpy = jest.spyOn(window, 'open')
|
||||
const event = new Event('click')
|
||||
event['ctrlKey'] = true
|
||||
component.primaryAction(DataType.Document, { id: 1 }, event as any)
|
||||
expect(openSpy).toHaveBeenCalledWith('/base/documents/1', '_blank')
|
||||
})
|
||||
|
||||
it('should support title content search and advanced search', () => {
|
||||
const qfSpy = jest.spyOn(documentListViewService, 'quickFilter')
|
||||
component.query = 'test'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { NgTemplateOutlet } from '@angular/common'
|
||||
import { LocationStrategy, NgTemplateOutlet } from '@angular/common'
|
||||
import {
|
||||
Component,
|
||||
ElementRef,
|
||||
@@ -99,7 +99,8 @@ export class GlobalSearchComponent implements OnInit {
|
||||
private permissionsService: PermissionsService,
|
||||
private toastService: ToastService,
|
||||
private hotkeyService: HotKeyService,
|
||||
private settingsService: SettingsService
|
||||
private settingsService: SettingsService,
|
||||
private locationStrategy: LocationStrategy
|
||||
) {
|
||||
this.queryDebounce = new Subject<string>()
|
||||
|
||||
@@ -421,10 +422,13 @@ export class GlobalSearchComponent implements OnInit {
|
||||
extras: Object = {}
|
||||
) {
|
||||
if (newWindow) {
|
||||
const url = this.router.serializeUrl(
|
||||
const serializedUrl = this.router.serializeUrl(
|
||||
this.router.createUrlTree(commands, extras)
|
||||
)
|
||||
window.open(url, '_blank')
|
||||
const baseHref = this.locationStrategy.getBaseHref()
|
||||
const fullUrl =
|
||||
baseHref.replace(/\/+$/, '') + '/' + serializedUrl.replace(/^\/+/, '')
|
||||
window.open(fullUrl, '_blank')
|
||||
} else {
|
||||
this.router.navigate(commands, extras)
|
||||
}
|
||||
|
||||
@@ -303,6 +303,7 @@ class ConsumerPlugin(
|
||||
script_env = os.environ.copy()
|
||||
|
||||
script_env["DOCUMENT_ID"] = str(document.pk)
|
||||
script_env["DOCUMENT_TYPE"] = str(document.document_type)
|
||||
script_env["DOCUMENT_CREATED"] = str(document.created)
|
||||
script_env["DOCUMENT_MODIFIED"] = str(document.modified)
|
||||
script_env["DOCUMENT_ADDED"] = str(document.added)
|
||||
|
||||
@@ -39,7 +39,8 @@ def migrate_date(apps, schema_editor):
|
||||
f"[1067_alter_document_created] {total_updated} of {total_checked} processed...",
|
||||
)
|
||||
|
||||
print(f"[1067_alter_document_created] completed for {total_checked} documents.")
|
||||
if total_checked > 0:
|
||||
print(f"[1067_alter_document_created] completed for {total_checked} documents.")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -387,6 +387,18 @@ def empty_trash(doc_ids=None):
|
||||
|
||||
@shared_task
|
||||
def check_scheduled_workflows():
|
||||
"""
|
||||
Check and run all enabled scheduled workflows.
|
||||
|
||||
Scheduled triggers are evaluated based on a target date field (e.g. added, created, modified, or a custom date field),
|
||||
combined with a day offset.
|
||||
|
||||
The offset is mathematically negated resulting in the following behavior:
|
||||
- Positive offsets mean the workflow should trigger BEFORE the specified date (e.g., offset = +7 → trigger 7 days before)
|
||||
- Negative offsets mean the workflow should trigger AFTER the specified date (e.g., offset = -7 → trigger 7 days after)
|
||||
|
||||
Once a document satisfies this condition, and recurring/non-recurring constraints are met, the workflow is run.
|
||||
"""
|
||||
scheduled_workflows: list[Workflow] = (
|
||||
Workflow.objects.filter(
|
||||
triggers__type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
|
||||
@@ -1174,12 +1174,16 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
|
||||
m.assert_called_once()
|
||||
|
||||
@mock.patch("documents.consumer.run_subprocess")
|
||||
def test_post_consume_script_with_correspondent(self, m):
|
||||
def test_post_consume_script_with_correspondent_and_type(self, m):
|
||||
with tempfile.NamedTemporaryFile() as script:
|
||||
with override_settings(POST_CONSUME_SCRIPT=script.name):
|
||||
c = Correspondent.objects.create(name="my_bank")
|
||||
t = DocumentType.objects.create(
|
||||
name="Test type",
|
||||
)
|
||||
doc = Document.objects.create(
|
||||
title="Test",
|
||||
document_type=t,
|
||||
mime_type="application/pdf",
|
||||
correspondent=c,
|
||||
)
|
||||
@@ -1207,6 +1211,7 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
|
||||
|
||||
subset = {
|
||||
"DOCUMENT_ID": str(doc.pk),
|
||||
"DOCUMENT_TYPE": "Test type",
|
||||
"DOCUMENT_DOWNLOAD_URL": f"/api/documents/{doc.pk}/download/",
|
||||
"DOCUMENT_THUMBNAIL_URL": f"/api/documents/{doc.pk}/thumb/",
|
||||
"DOCUMENT_CORRESPONDENT": "my_bank",
|
||||
|
||||
@@ -1610,7 +1610,7 @@ class TestWorkflows(
|
||||
doc.refresh_from_db()
|
||||
self.assertIsNone(doc.owner)
|
||||
|
||||
def test_workflow_scheduled_trigger_negative_offset(self):
|
||||
def test_workflow_scheduled_trigger_negative_offset_customfield(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow with SCHEDULED trigger and negative offset of -7 days (so 7 days after date)
|
||||
@@ -1662,6 +1662,55 @@ class TestWorkflows(
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.owner, self.user2)
|
||||
|
||||
def test_workflow_scheduled_trigger_negative_offset_created(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow with SCHEDULED trigger and negative offset of -7 days (so 7 days after date)
|
||||
- Created date set to 8 days ago → trigger time = 1 day ago and 5 days ago
|
||||
WHEN:
|
||||
- Scheduled workflows are checked for document
|
||||
THEN:
|
||||
- Workflow runs and document owner is updated for the first document, not the second
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
schedule_offset_days=-7,
|
||||
schedule_date_field=WorkflowTrigger.ScheduleDateField.CREATED,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
assign_title="Doc assign owner",
|
||||
assign_owner=self.user2,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
checksum="1",
|
||||
created=timezone.now().date() - timedelta(days=8),
|
||||
)
|
||||
|
||||
doc2 = Document.objects.create(
|
||||
title="sample test 2",
|
||||
correspondent=self.c,
|
||||
original_filename="sample2.pdf",
|
||||
checksum="2",
|
||||
created=timezone.now().date() - timedelta(days=5),
|
||||
)
|
||||
|
||||
tasks.check_scheduled_workflows()
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.owner, self.user2)
|
||||
doc2.refresh_from_db()
|
||||
self.assertIsNone(doc2.owner) # has not triggered yet
|
||||
|
||||
def test_workflow_enabled_disabled(self):
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
||||
|
||||
@@ -23,11 +23,17 @@ def copy_basic_file_stats(source: Path | str, dest: Path | str) -> None:
|
||||
|
||||
The extended attribute copy does weird things with SELinux and files
|
||||
copied from temporary directories and copystat doesn't allow disabling
|
||||
these copies
|
||||
these copies.
|
||||
|
||||
If there is a PermissionError, skip copying file stats.
|
||||
"""
|
||||
source, dest = _coerce_to_path(source, dest)
|
||||
src_stat = source.stat()
|
||||
utime(dest, ns=(src_stat.st_atime_ns, src_stat.st_mtime_ns))
|
||||
|
||||
try:
|
||||
utime(dest, ns=(src_stat.st_atime_ns, src_stat.st_mtime_ns))
|
||||
except PermissionError:
|
||||
pass
|
||||
|
||||
|
||||
def copy_file_with_basic_stats(
|
||||
|
||||
Reference in New Issue
Block a user