Compare commits

...

4 Commits

Author SHA1 Message Date
shamoon
764ad059d1 Revert "Chore: Enable SonarQube scanning (#10904)" (#10934)
This reverts commit 8d1f23e9d6.
2025-09-25 00:45:36 -07:00
shamoon
5e47069934 Fix select option removal and pagination update (#10933) 2025-09-25 00:42:43 -07:00
DerRockWolf
4ff09c4cf4 Enhancement: support workflow path matching of barcode-split documents (#10723) 2025-09-24 21:03:03 +00:00
shamoon
53b393dab5 Chore: remove conditional from pre-commit job in CI (#10916) 2025-09-24 13:43:09 -07:00
8 changed files with 71 additions and 126 deletions

View File

@@ -17,11 +17,52 @@ env:
DEFAULT_PYTHON_VERSION: "3.11"
NLTK_DATA: "/usr/share/nltk_data"
jobs:
detect-duplicate:
name: Detect Duplicate Run
runs-on: ubuntu-24.04
outputs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- name: Check if workflow should run
id: check
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
if (context.eventName !== 'push') {
core.info('Not a push event; running workflow.');
core.setOutput('should_run', 'true');
return;
}
const ref = context.ref || '';
if (!ref.startsWith('refs/heads/')) {
core.info('Push is not to a branch; running workflow.');
core.setOutput('should_run', 'true');
return;
}
const branch = ref.substring('refs/heads/'.length);
const { owner, repo } = context.repo;
const prs = await github.paginate(github.rest.pulls.list, {
owner,
repo,
state: 'open',
head: `${owner}:${branch}`,
per_page: 100,
});
if (prs.length === 0) {
core.info(`No open PR found for ${branch}; running workflow.`);
core.setOutput('should_run', 'true');
} else {
core.info(`Found ${prs.length} open PR(s) for ${branch}; skipping duplicate push run.`);
core.setOutput('should_run', 'false');
}
pre-commit:
# We want to run on external PRs, but not on our own internal PRs as they'll be run
# by the push to the branch. Without this if check, checks are duplicated since
# internal PRs match both the push and pull_request events.
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
needs:
- detect-duplicate
if: needs.detect-duplicate.outputs.should_run == 'true'
name: Linting Checks
runs-on: ubuntu-24.04
steps:
@@ -151,18 +192,6 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
flags: backend-python-${{ matrix.python-version }}
files: coverage.xml
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4
if: always()
with:
name: backend-coverage-${{ matrix.python-version }}
path: |
.coverage
coverage.xml
junit.xml
retention-days: 1
include-hidden-files: true
if-no-files-found: error
- name: Stop containers
if: always()
run: |
@@ -245,17 +274,6 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
flags: frontend-node-${{ matrix.node-version }}
directory: src-ui/coverage/
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4
if: always()
with:
name: frontend-coverage-${{ matrix.shard-index }}
path: |
src-ui/coverage/lcov.info
src-ui/coverage/coverage-final.json
src-ui/junit.xml
retention-days: 1
if-no-files-found: error
tests-frontend-e2e:
name: "Frontend E2E Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
runs-on: ubuntu-24.04
@@ -336,74 +354,6 @@ jobs:
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
run: cd src-ui && pnpm run build --configuration=production
sonarqube-analysis:
name: "SonarQube Analysis"
runs-on: ubuntu-24.04
needs:
- tests-backend
- tests-frontend
if: github.repository_owner == 'paperless-ngx'
steps:
- name: Checkout
uses: actions/checkout@v5
with:
fetch-depth: 0
- name: Download all backend coverage
uses: actions/download-artifact@v5.0.0
with:
pattern: backend-coverage-*
path: ./coverage/
- name: Download all frontend coverage
uses: actions/download-artifact@v5.0.0
with:
pattern: frontend-coverage-*
path: ./coverage/
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install coverage tools
run: |
pip install coverage
npm install -g nyc
# Merge backend coverage from all Python versions
- name: Merge backend coverage
run: |
coverage combine coverage/backend-coverage-*/.coverage
coverage xml -o merged-backend-coverage.xml
# Merge frontend coverage from all shards
- name: Merge frontend coverage
run: |
# Find all coverage-final.json files from the shards, exit with error if none found
shopt -s nullglob
files=(coverage/frontend-coverage-*/coverage/coverage-final.json)
if [ ${#files[@]} -eq 0 ]; then
echo "No frontend coverage JSON found under coverage/" >&2
exit 1
fi
# Create .nyc_output directory and copy each shard's coverage JSON into it with a unique name
mkdir -p .nyc_output
for coverage_json in "${files[@]}"; do
shard=$(basename "$(dirname "$(dirname "$coverage_json")")")
cp "$coverage_json" ".nyc_output/${shard}.json"
done
npx nyc merge .nyc_output .nyc_output/out.json
npx nyc report --reporter=lcovonly --report-dir coverage
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4.6.2
with:
name: merged-coverage
path: |
merged-backend-coverage.xml
.nyc_output/*
coverage/lcov.info
retention-days: 7
if-no-files-found: error
include-hidden-files: true
- name: SonarQube Analysis
uses: SonarSource/sonarqube-scan-action@v5
env:
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
build-docker-image:
name: Build Docker image for ${{ github.ref_name }}
runs-on: ubuntu-24.04

View File

@@ -255,7 +255,6 @@ PAPERLESS_DISABLE_DBHANDLER = "true"
PAPERLESS_CACHE_BACKEND = "django.core.cache.backends.locmem.LocMemCache"
[tool.coverage.run]
relative_files = true
source = [
"src/",
]

View File

@@ -1,24 +0,0 @@
sonar.projectKey=paperless-ngx_paperless-ngx
sonar.organization=paperless-ngx
sonar.projectName=Paperless-ngx
sonar.projectVersion=1.0
# Source and test directories
sonar.sources=src/,src-ui/
sonar.test.inclusions=**/test_*.py,**/tests.py,**/*.spec.ts,**/*.test.ts
# Language specific settings
sonar.python.version=3.10,3.11,3.12,3.13
# Coverage reports
sonar.python.coverage.reportPaths=merged-backend-coverage.xml
sonar.javascript.lcov.reportPaths=coverage/lcov.info
# Test execution reports
sonar.junit.reportPaths=**/junit.xml,**/test-results.xml
# Encoding
sonar.sourceEncoding=UTF-8
# Exclusions
sonar.exclusions=**/migrations/**,**/node_modules/**,**/static/**,**/venv/**,**/.venv/**,**/dist/**

View File

@@ -177,10 +177,16 @@ export class CustomFieldEditDialogComponent
}
public removeSelectOption(index: number) {
this.selectOptions.removeAt(index)
this._allSelectOptions.splice(
index + (this.selectOptionsPage - 1) * SELECT_OPTION_PAGE_SIZE,
1
const globalIndex =
index + (this.selectOptionsPage - 1) * SELECT_OPTION_PAGE_SIZE
this._allSelectOptions.splice(globalIndex, 1)
const totalPages = Math.max(
1,
Math.ceil(this._allSelectOptions.length / SELECT_OPTION_PAGE_SIZE)
)
const targetPage = Math.min(this.selectOptionsPage, totalPages)
this.selectOptionsPage = targetPage
}
}

View File

@@ -164,6 +164,9 @@ class BarcodePlugin(ConsumeTaskPlugin):
mailrule_id=self.input_doc.mailrule_id,
# Can't use same folder or the consume might grab it again
original_file=(tmp_dir / new_document.name).resolve(),
# Adding optional original_path for later uses in
# workflow matching
original_path=self.input_doc.original_file,
),
# All the same metadata
self.metadata,

View File

@@ -156,6 +156,7 @@ class ConsumableDocument:
source: DocumentSource
original_file: Path
original_path: Path | None = None
mailrule_id: int | None = None
mime_type: str = dataclasses.field(init=False, default=None)

View File

@@ -314,11 +314,19 @@ def consumable_document_matches_workflow(
trigger_matched = False
# Document path vs trigger path
# Use the original_path if set, else us the original_file
match_against = (
document.original_path
if document.original_path is not None
else document.original_file
)
if (
trigger.filter_path is not None
and len(trigger.filter_path) > 0
and not fnmatch(
document.original_file,
match_against,
trigger.filter_path,
)
):

View File

@@ -614,14 +614,16 @@ class TestBarcodeNewConsume(
self.assertIsNotFile(temp_copy)
# Check the split files exist
# Check the original_path is set
# Check the source is unchanged
# Check the overrides are unchanged
for (
new_input_doc,
new_doc_overrides,
) in self.get_all_consume_delay_call_args():
self.assertEqual(new_input_doc.source, DocumentSource.ConsumeFolder)
self.assertIsFile(new_input_doc.original_file)
self.assertEqual(new_input_doc.original_path, temp_copy)
self.assertEqual(new_input_doc.source, DocumentSource.ConsumeFolder)
self.assertEqual(overrides, new_doc_overrides)