Compare commits

...

6 Commits

Author SHA1 Message Date
dependabot[bot]
86399a2552 Chore(deps): Bump whitenoise from 6.10.0 to 6.11.0
Bumps [whitenoise](https://github.com/evansd/whitenoise) from 6.10.0 to 6.11.0.
- [Changelog](https://github.com/evansd/whitenoise/blob/main/docs/changelog.rst)
- [Commits](https://github.com/evansd/whitenoise/compare/6.10.0...6.11.0)

---
updated-dependencies:
- dependency-name: whitenoise
  dependency-version: 6.11.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-25 07:47:48 +00:00
shamoon
764ad059d1 Revert "Chore: Enable SonarQube scanning (#10904)" (#10934)
This reverts commit 8d1f23e9d6.
2025-09-25 00:45:36 -07:00
shamoon
5e47069934 Fix select option removal and pagination update (#10933) 2025-09-25 00:42:43 -07:00
DerRockWolf
4ff09c4cf4 Enhancement: support workflow path matching of barcode-split documents (#10723) 2025-09-24 21:03:03 +00:00
shamoon
53b393dab5 Chore: remove conditional from pre-commit job in CI (#10916) 2025-09-24 13:43:09 -07:00
shamoon
6119c215e7 Fix: skip fuzzy matching for empty document content (#10914) 2025-09-22 23:30:24 -07:00
11 changed files with 103 additions and 129 deletions

View File

@@ -17,11 +17,52 @@ env:
DEFAULT_PYTHON_VERSION: "3.11"
NLTK_DATA: "/usr/share/nltk_data"
jobs:
detect-duplicate:
name: Detect Duplicate Run
runs-on: ubuntu-24.04
outputs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- name: Check if workflow should run
id: check
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
if (context.eventName !== 'push') {
core.info('Not a push event; running workflow.');
core.setOutput('should_run', 'true');
return;
}
const ref = context.ref || '';
if (!ref.startsWith('refs/heads/')) {
core.info('Push is not to a branch; running workflow.');
core.setOutput('should_run', 'true');
return;
}
const branch = ref.substring('refs/heads/'.length);
const { owner, repo } = context.repo;
const prs = await github.paginate(github.rest.pulls.list, {
owner,
repo,
state: 'open',
head: `${owner}:${branch}`,
per_page: 100,
});
if (prs.length === 0) {
core.info(`No open PR found for ${branch}; running workflow.`);
core.setOutput('should_run', 'true');
} else {
core.info(`Found ${prs.length} open PR(s) for ${branch}; skipping duplicate push run.`);
core.setOutput('should_run', 'false');
}
pre-commit:
# We want to run on external PRs, but not on our own internal PRs as they'll be run
# by the push to the branch. Without this if check, checks are duplicated since
# internal PRs match both the push and pull_request events.
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
needs:
- detect-duplicate
if: needs.detect-duplicate.outputs.should_run == 'true'
name: Linting Checks
runs-on: ubuntu-24.04
steps:
@@ -151,18 +192,6 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
flags: backend-python-${{ matrix.python-version }}
files: coverage.xml
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4
if: always()
with:
name: backend-coverage-${{ matrix.python-version }}
path: |
.coverage
coverage.xml
junit.xml
retention-days: 1
include-hidden-files: true
if-no-files-found: error
- name: Stop containers
if: always()
run: |
@@ -245,17 +274,6 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
flags: frontend-node-${{ matrix.node-version }}
directory: src-ui/coverage/
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4
if: always()
with:
name: frontend-coverage-${{ matrix.shard-index }}
path: |
src-ui/coverage/lcov.info
src-ui/coverage/coverage-final.json
src-ui/junit.xml
retention-days: 1
if-no-files-found: error
tests-frontend-e2e:
name: "Frontend E2E Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
runs-on: ubuntu-24.04
@@ -336,74 +354,6 @@ jobs:
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
run: cd src-ui && pnpm run build --configuration=production
sonarqube-analysis:
name: "SonarQube Analysis"
runs-on: ubuntu-24.04
needs:
- tests-backend
- tests-frontend
if: github.repository_owner == 'paperless-ngx'
steps:
- name: Checkout
uses: actions/checkout@v5
with:
fetch-depth: 0
- name: Download all backend coverage
uses: actions/download-artifact@v5.0.0
with:
pattern: backend-coverage-*
path: ./coverage/
- name: Download all frontend coverage
uses: actions/download-artifact@v5.0.0
with:
pattern: frontend-coverage-*
path: ./coverage/
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install coverage tools
run: |
pip install coverage
npm install -g nyc
# Merge backend coverage from all Python versions
- name: Merge backend coverage
run: |
coverage combine coverage/backend-coverage-*/.coverage
coverage xml -o merged-backend-coverage.xml
# Merge frontend coverage from all shards
- name: Merge frontend coverage
run: |
# Find all coverage-final.json files from the shards, exit with error if none found
shopt -s nullglob
files=(coverage/frontend-coverage-*/coverage/coverage-final.json)
if [ ${#files[@]} -eq 0 ]; then
echo "No frontend coverage JSON found under coverage/" >&2
exit 1
fi
# Create .nyc_output directory and copy each shard's coverage JSON into it with a unique name
mkdir -p .nyc_output
for coverage_json in "${files[@]}"; do
shard=$(basename "$(dirname "$(dirname "$coverage_json")")")
cp "$coverage_json" ".nyc_output/${shard}.json"
done
npx nyc merge .nyc_output .nyc_output/out.json
npx nyc report --reporter=lcovonly --report-dir coverage
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4.6.2
with:
name: merged-coverage
path: |
merged-backend-coverage.xml
.nyc_output/*
coverage/lcov.info
retention-days: 7
if-no-files-found: error
include-hidden-files: true
- name: SonarQube Analysis
uses: SonarSource/sonarqube-scan-action@v5
env:
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
build-docker-image:
name: Build Docker image for ${{ github.ref_name }}
runs-on: ubuntu-24.04

View File

@@ -255,7 +255,6 @@ PAPERLESS_DISABLE_DBHANDLER = "true"
PAPERLESS_CACHE_BACKEND = "django.core.cache.backends.locmem.LocMemCache"
[tool.coverage.run]
relative_files = true
source = [
"src/",
]

View File

@@ -1,24 +0,0 @@
sonar.projectKey=paperless-ngx_paperless-ngx
sonar.organization=paperless-ngx
sonar.projectName=Paperless-ngx
sonar.projectVersion=1.0
# Source and test directories
sonar.sources=src/,src-ui/
sonar.test.inclusions=**/test_*.py,**/tests.py,**/*.spec.ts,**/*.test.ts
# Language specific settings
sonar.python.version=3.10,3.11,3.12,3.13
# Coverage reports
sonar.python.coverage.reportPaths=merged-backend-coverage.xml
sonar.javascript.lcov.reportPaths=coverage/lcov.info
# Test execution reports
sonar.junit.reportPaths=**/junit.xml,**/test-results.xml
# Encoding
sonar.sourceEncoding=UTF-8
# Exclusions
sonar.exclusions=**/migrations/**,**/node_modules/**,**/static/**,**/venv/**,**/.venv/**,**/dist/**

View File

@@ -177,10 +177,16 @@ export class CustomFieldEditDialogComponent
}
public removeSelectOption(index: number) {
this.selectOptions.removeAt(index)
this._allSelectOptions.splice(
index + (this.selectOptionsPage - 1) * SELECT_OPTION_PAGE_SIZE,
1
const globalIndex =
index + (this.selectOptionsPage - 1) * SELECT_OPTION_PAGE_SIZE
this._allSelectOptions.splice(globalIndex, 1)
const totalPages = Math.max(
1,
Math.ceil(this._allSelectOptions.length / SELECT_OPTION_PAGE_SIZE)
)
const targetPage = Math.min(this.selectOptionsPage, totalPages)
this.selectOptionsPage = targetPage
}
}

View File

@@ -164,6 +164,9 @@ class BarcodePlugin(ConsumeTaskPlugin):
mailrule_id=self.input_doc.mailrule_id,
# Can't use same folder or the consume might grab it again
original_file=(tmp_dir / new_document.name).resolve(),
# Adding optional original_path for later uses in
# workflow matching
original_path=self.input_doc.original_file,
),
# All the same metadata
self.metadata,

View File

@@ -156,6 +156,7 @@ class ConsumableDocument:
source: DocumentSource
original_file: Path
original_path: Path | None = None
mailrule_id: int | None = None
mime_type: str = dataclasses.field(init=False, default=None)

View File

@@ -92,6 +92,9 @@ class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand):
# doc to doc is obviously not useful
if first_doc.pk == second_doc.pk:
continue
# Skip empty documents (e.g. password-protected)
if first_doc.content.strip() == "" or second_doc.content.strip() == "":
continue
# Skip matching which have already been matched together
# doc 1 to doc 2 is the same as doc 2 to doc 1
doc_1_to_doc_2 = (first_doc.pk, second_doc.pk)

View File

@@ -314,11 +314,19 @@ def consumable_document_matches_workflow(
trigger_matched = False
# Document path vs trigger path
# Use the original_path if set, else us the original_file
match_against = (
document.original_path
if document.original_path is not None
else document.original_file
)
if (
trigger.filter_path is not None
and len(trigger.filter_path) > 0
and not fnmatch(
document.original_file,
match_against,
trigger.filter_path,
)
):

View File

@@ -614,14 +614,16 @@ class TestBarcodeNewConsume(
self.assertIsNotFile(temp_copy)
# Check the split files exist
# Check the original_path is set
# Check the source is unchanged
# Check the overrides are unchanged
for (
new_input_doc,
new_doc_overrides,
) in self.get_all_consume_delay_call_args():
self.assertEqual(new_input_doc.source, DocumentSource.ConsumeFolder)
self.assertIsFile(new_input_doc.original_file)
self.assertEqual(new_input_doc.original_path, temp_copy)
self.assertEqual(new_input_doc.source, DocumentSource.ConsumeFolder)
self.assertEqual(overrides, new_doc_overrides)

View File

@@ -206,3 +206,29 @@ class TestFuzzyMatchCommand(TestCase):
self.assertEqual(Document.objects.count(), 2)
self.assertIsNotNone(Document.objects.get(pk=1))
self.assertIsNotNone(Document.objects.get(pk=2))
def test_empty_content(self):
"""
GIVEN:
- 2 documents exist, content is empty (pw-protected)
WHEN:
- Command is called
THEN:
- No matches are found
"""
Document.objects.create(
checksum="BEEFCAFE",
title="A",
content="",
mime_type="application/pdf",
filename="test.pdf",
)
Document.objects.create(
checksum="DEADBEAF",
title="A",
content="",
mime_type="application/pdf",
filename="other_test.pdf",
)
stdout, _ = self.call_command()
self.assertIn("No matches found", stdout)

6
uv.lock generated
View File

@@ -4095,11 +4095,11 @@ wheels = [
[[package]]
name = "whitenoise"
version = "6.10.0"
version = "6.11.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/27/9a/4f4b84ff1f3a5c3cbc8070b6ecbbab6cd121c385244c9d24d80bb284190f/whitenoise-6.10.0.tar.gz", hash = "sha256:7b7e53de65d749cb1ce4a7100e751d9742e323b52746f9f93944c0d348ea2d02", size = 26412, upload-time = "2025-09-09T11:07:24.694Z" }
sdist = { url = "https://files.pythonhosted.org/packages/15/95/8c81ec6b6ebcbf8aca2de7603070ccf37dbb873b03f20708e0f7c1664bc6/whitenoise-6.11.0.tar.gz", hash = "sha256:0f5bfce6061ae6611cd9396a8231e088722e4fc67bc13a111be74c738d99375f", size = 26432, upload-time = "2025-09-18T09:16:10.995Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/cb/3b/4fa26e02935334fa0eb1422c938b1db796c55de7a432cc86b9d8cf97260c/whitenoise-6.10.0-py3-none-any.whl", hash = "sha256:bad74a40b33b055ba59731b6048dd08d5647f273b72bef922aa43ddd287b02da", size = 20194, upload-time = "2025-09-09T11:07:23.544Z" },
{ url = "https://files.pythonhosted.org/packages/6c/e9/4366332f9295fe0647d7d3251ce18f5615fbcb12d02c79a26f8dba9221b3/whitenoise-6.11.0-py3-none-any.whl", hash = "sha256:b2aeb45950597236f53b5342b3121c5de69c8da0109362aee506ce88e022d258", size = 20197, upload-time = "2025-09-18T09:16:09.754Z" },
]
[[package]]