Compare commits

...

6 Commits

Author SHA1 Message Date
dependabot[bot]
4cd770f25e Chore(deps): Bump django-cors-headers from 4.8.0 to 4.9.0
Bumps [django-cors-headers](https://github.com/adamchainz/django-cors-headers) from 4.8.0 to 4.9.0.
- [Changelog](https://github.com/adamchainz/django-cors-headers/blob/main/CHANGELOG.rst)
- [Commits](https://github.com/adamchainz/django-cors-headers/compare/4.8.0...4.9.0)

---
updated-dependencies:
- dependency-name: django-cors-headers
  dependency-version: 4.9.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-25 07:47:28 +00:00
shamoon
764ad059d1 Revert "Chore: Enable SonarQube scanning (#10904)" (#10934)
This reverts commit 8d1f23e9d6.
2025-09-25 00:45:36 -07:00
shamoon
5e47069934 Fix select option removal and pagination update (#10933) 2025-09-25 00:42:43 -07:00
DerRockWolf
4ff09c4cf4 Enhancement: support workflow path matching of barcode-split documents (#10723) 2025-09-24 21:03:03 +00:00
shamoon
53b393dab5 Chore: remove conditional from pre-commit job in CI (#10916) 2025-09-24 13:43:09 -07:00
shamoon
6119c215e7 Fix: skip fuzzy matching for empty document content (#10914) 2025-09-22 23:30:24 -07:00
11 changed files with 105 additions and 131 deletions

View File

@@ -17,11 +17,52 @@ env:
DEFAULT_PYTHON_VERSION: "3.11"
NLTK_DATA: "/usr/share/nltk_data"
jobs:
detect-duplicate:
name: Detect Duplicate Run
runs-on: ubuntu-24.04
outputs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- name: Check if workflow should run
id: check
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
if (context.eventName !== 'push') {
core.info('Not a push event; running workflow.');
core.setOutput('should_run', 'true');
return;
}
const ref = context.ref || '';
if (!ref.startsWith('refs/heads/')) {
core.info('Push is not to a branch; running workflow.');
core.setOutput('should_run', 'true');
return;
}
const branch = ref.substring('refs/heads/'.length);
const { owner, repo } = context.repo;
const prs = await github.paginate(github.rest.pulls.list, {
owner,
repo,
state: 'open',
head: `${owner}:${branch}`,
per_page: 100,
});
if (prs.length === 0) {
core.info(`No open PR found for ${branch}; running workflow.`);
core.setOutput('should_run', 'true');
} else {
core.info(`Found ${prs.length} open PR(s) for ${branch}; skipping duplicate push run.`);
core.setOutput('should_run', 'false');
}
pre-commit:
# We want to run on external PRs, but not on our own internal PRs as they'll be run
# by the push to the branch. Without this if check, checks are duplicated since
# internal PRs match both the push and pull_request events.
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
needs:
- detect-duplicate
if: needs.detect-duplicate.outputs.should_run == 'true'
name: Linting Checks
runs-on: ubuntu-24.04
steps:
@@ -151,18 +192,6 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
flags: backend-python-${{ matrix.python-version }}
files: coverage.xml
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4
if: always()
with:
name: backend-coverage-${{ matrix.python-version }}
path: |
.coverage
coverage.xml
junit.xml
retention-days: 1
include-hidden-files: true
if-no-files-found: error
- name: Stop containers
if: always()
run: |
@@ -245,17 +274,6 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
flags: frontend-node-${{ matrix.node-version }}
directory: src-ui/coverage/
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4
if: always()
with:
name: frontend-coverage-${{ matrix.shard-index }}
path: |
src-ui/coverage/lcov.info
src-ui/coverage/coverage-final.json
src-ui/junit.xml
retention-days: 1
if-no-files-found: error
tests-frontend-e2e:
name: "Frontend E2E Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
runs-on: ubuntu-24.04
@@ -336,74 +354,6 @@ jobs:
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
run: cd src-ui && pnpm run build --configuration=production
sonarqube-analysis:
name: "SonarQube Analysis"
runs-on: ubuntu-24.04
needs:
- tests-backend
- tests-frontend
if: github.repository_owner == 'paperless-ngx'
steps:
- name: Checkout
uses: actions/checkout@v5
with:
fetch-depth: 0
- name: Download all backend coverage
uses: actions/download-artifact@v5.0.0
with:
pattern: backend-coverage-*
path: ./coverage/
- name: Download all frontend coverage
uses: actions/download-artifact@v5.0.0
with:
pattern: frontend-coverage-*
path: ./coverage/
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install coverage tools
run: |
pip install coverage
npm install -g nyc
# Merge backend coverage from all Python versions
- name: Merge backend coverage
run: |
coverage combine coverage/backend-coverage-*/.coverage
coverage xml -o merged-backend-coverage.xml
# Merge frontend coverage from all shards
- name: Merge frontend coverage
run: |
# Find all coverage-final.json files from the shards, exit with error if none found
shopt -s nullglob
files=(coverage/frontend-coverage-*/coverage/coverage-final.json)
if [ ${#files[@]} -eq 0 ]; then
echo "No frontend coverage JSON found under coverage/" >&2
exit 1
fi
# Create .nyc_output directory and copy each shard's coverage JSON into it with a unique name
mkdir -p .nyc_output
for coverage_json in "${files[@]}"; do
shard=$(basename "$(dirname "$(dirname "$coverage_json")")")
cp "$coverage_json" ".nyc_output/${shard}.json"
done
npx nyc merge .nyc_output .nyc_output/out.json
npx nyc report --reporter=lcovonly --report-dir coverage
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4.6.2
with:
name: merged-coverage
path: |
merged-backend-coverage.xml
.nyc_output/*
coverage/lcov.info
retention-days: 7
if-no-files-found: error
include-hidden-files: true
- name: SonarQube Analysis
uses: SonarSource/sonarqube-scan-action@v5
env:
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
build-docker-image:
name: Build Docker image for ${{ github.ref_name }}
runs-on: ubuntu-24.04

View File

@@ -30,7 +30,7 @@ dependencies = [
"django-cachalot~=2.8.0",
"django-celery-results~=2.6.0",
"django-compression-middleware~=0.5.0",
"django-cors-headers~=4.8.0",
"django-cors-headers~=4.9.0",
"django-extensions~=4.1",
"django-filter~=25.1",
"django-guardian~=3.1.2",
@@ -255,7 +255,6 @@ PAPERLESS_DISABLE_DBHANDLER = "true"
PAPERLESS_CACHE_BACKEND = "django.core.cache.backends.locmem.LocMemCache"
[tool.coverage.run]
relative_files = true
source = [
"src/",
]

View File

@@ -1,24 +0,0 @@
sonar.projectKey=paperless-ngx_paperless-ngx
sonar.organization=paperless-ngx
sonar.projectName=Paperless-ngx
sonar.projectVersion=1.0
# Source and test directories
sonar.sources=src/,src-ui/
sonar.test.inclusions=**/test_*.py,**/tests.py,**/*.spec.ts,**/*.test.ts
# Language specific settings
sonar.python.version=3.10,3.11,3.12,3.13
# Coverage reports
sonar.python.coverage.reportPaths=merged-backend-coverage.xml
sonar.javascript.lcov.reportPaths=coverage/lcov.info
# Test execution reports
sonar.junit.reportPaths=**/junit.xml,**/test-results.xml
# Encoding
sonar.sourceEncoding=UTF-8
# Exclusions
sonar.exclusions=**/migrations/**,**/node_modules/**,**/static/**,**/venv/**,**/.venv/**,**/dist/**

View File

@@ -177,10 +177,16 @@ export class CustomFieldEditDialogComponent
}
public removeSelectOption(index: number) {
this.selectOptions.removeAt(index)
this._allSelectOptions.splice(
index + (this.selectOptionsPage - 1) * SELECT_OPTION_PAGE_SIZE,
1
const globalIndex =
index + (this.selectOptionsPage - 1) * SELECT_OPTION_PAGE_SIZE
this._allSelectOptions.splice(globalIndex, 1)
const totalPages = Math.max(
1,
Math.ceil(this._allSelectOptions.length / SELECT_OPTION_PAGE_SIZE)
)
const targetPage = Math.min(this.selectOptionsPage, totalPages)
this.selectOptionsPage = targetPage
}
}

View File

@@ -164,6 +164,9 @@ class BarcodePlugin(ConsumeTaskPlugin):
mailrule_id=self.input_doc.mailrule_id,
# Can't use same folder or the consume might grab it again
original_file=(tmp_dir / new_document.name).resolve(),
# Adding optional original_path for later uses in
# workflow matching
original_path=self.input_doc.original_file,
),
# All the same metadata
self.metadata,

View File

@@ -156,6 +156,7 @@ class ConsumableDocument:
source: DocumentSource
original_file: Path
original_path: Path | None = None
mailrule_id: int | None = None
mime_type: str = dataclasses.field(init=False, default=None)

View File

@@ -92,6 +92,9 @@ class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand):
# doc to doc is obviously not useful
if first_doc.pk == second_doc.pk:
continue
# Skip empty documents (e.g. password-protected)
if first_doc.content.strip() == "" or second_doc.content.strip() == "":
continue
# Skip matching which have already been matched together
# doc 1 to doc 2 is the same as doc 2 to doc 1
doc_1_to_doc_2 = (first_doc.pk, second_doc.pk)

View File

@@ -314,11 +314,19 @@ def consumable_document_matches_workflow(
trigger_matched = False
# Document path vs trigger path
# Use the original_path if set, else us the original_file
match_against = (
document.original_path
if document.original_path is not None
else document.original_file
)
if (
trigger.filter_path is not None
and len(trigger.filter_path) > 0
and not fnmatch(
document.original_file,
match_against,
trigger.filter_path,
)
):

View File

@@ -614,14 +614,16 @@ class TestBarcodeNewConsume(
self.assertIsNotFile(temp_copy)
# Check the split files exist
# Check the original_path is set
# Check the source is unchanged
# Check the overrides are unchanged
for (
new_input_doc,
new_doc_overrides,
) in self.get_all_consume_delay_call_args():
self.assertEqual(new_input_doc.source, DocumentSource.ConsumeFolder)
self.assertIsFile(new_input_doc.original_file)
self.assertEqual(new_input_doc.original_path, temp_copy)
self.assertEqual(new_input_doc.source, DocumentSource.ConsumeFolder)
self.assertEqual(overrides, new_doc_overrides)

View File

@@ -206,3 +206,29 @@ class TestFuzzyMatchCommand(TestCase):
self.assertEqual(Document.objects.count(), 2)
self.assertIsNotNone(Document.objects.get(pk=1))
self.assertIsNotNone(Document.objects.get(pk=2))
def test_empty_content(self):
"""
GIVEN:
- 2 documents exist, content is empty (pw-protected)
WHEN:
- Command is called
THEN:
- No matches are found
"""
Document.objects.create(
checksum="BEEFCAFE",
title="A",
content="",
mime_type="application/pdf",
filename="test.pdf",
)
Document.objects.create(
checksum="DEADBEAF",
title="A",
content="",
mime_type="application/pdf",
filename="other_test.pdf",
)
stdout, _ = self.call_command()
self.assertIn("No matches found", stdout)

8
uv.lock generated
View File

@@ -730,15 +730,15 @@ wheels = [
[[package]]
name = "django-cors-headers"
version = "4.8.0"
version = "4.9.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "asgiref", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/89/8e/6225441edcfe179bf4861e9e67489e33375e0b66316c8d7b9edaae863d37/django_cors_headers-4.8.0.tar.gz", hash = "sha256:0a12a2efcd59a3cea741e44db8ab589e929949de5bc4cdf35a29c6ae77297686", size = 21425, upload-time = "2025-09-08T15:58:05.34Z" }
sdist = { url = "https://files.pythonhosted.org/packages/21/39/55822b15b7ec87410f34cd16ce04065ff390e50f9e29f31d6d116fc80456/django_cors_headers-4.9.0.tar.gz", hash = "sha256:fe5d7cb59fdc2c8c646ce84b727ac2bca8912a247e6e68e1fb507372178e59e8", size = 21458, upload-time = "2025-09-18T10:40:52.326Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ac/b3/29ef49d6ff7800f323f3d98cde7777b3cfdda133de8feea84cffafea4578/django_cors_headers-4.8.0-py3-none-any.whl", hash = "sha256:3b883f4c6d07848673218456a5e070d8ab51f97341c1f27d0242ca167e7272ab", size = 12804, upload-time = "2025-09-08T15:58:03.882Z" },
{ url = "https://files.pythonhosted.org/packages/30/d8/19ed1e47badf477d17fb177c1c19b5a21da0fd2d9f093f23be3fb86c5fab/django_cors_headers-4.9.0-py3-none-any.whl", hash = "sha256:15c7f20727f90044dcee2216a9fd7303741a864865f0c3657e28b7056f61b449", size = 12809, upload-time = "2025-09-18T10:40:50.843Z" },
]
[[package]]
@@ -2182,7 +2182,7 @@ requires-dist = [
{ name = "django-cachalot", specifier = "~=2.8.0" },
{ name = "django-celery-results", specifier = "~=2.6.0" },
{ name = "django-compression-middleware", specifier = "~=0.5.0" },
{ name = "django-cors-headers", specifier = "~=4.8.0" },
{ name = "django-cors-headers", specifier = "~=4.9.0" },
{ name = "django-extensions", specifier = "~=4.1" },
{ name = "django-filter", specifier = "~=25.1" },
{ name = "django-guardian", specifier = "~=3.1.2" },