Compare commits

..

1 Commits

Author SHA1 Message Date
dependabot[bot]
3a6c245f54 docker(deps): bump astral-sh/uv
Bumps [astral-sh/uv](https://github.com/astral-sh/uv) from 0.8.17-python3.12-bookworm-slim to 0.8.19-python3.12-bookworm-slim.
- [Release notes](https://github.com/astral-sh/uv/releases)
- [Changelog](https://github.com/astral-sh/uv/blob/main/CHANGELOG.md)
- [Commits](https://github.com/astral-sh/uv/compare/0.8.17...0.8.19)

---
updated-dependencies:
- dependency-name: astral-sh/uv
  dependency-version: 0.8.19-python3.12-bookworm-slim
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-09-22 21:02:08 +00:00
11 changed files with 127 additions and 550 deletions

View File

@@ -17,52 +17,11 @@ env:
DEFAULT_PYTHON_VERSION: "3.11"
NLTK_DATA: "/usr/share/nltk_data"
jobs:
detect-duplicate:
name: Detect Duplicate Run
runs-on: ubuntu-24.04
outputs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- name: Check if workflow should run
id: check
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
if (context.eventName !== 'push') {
core.info('Not a push event; running workflow.');
core.setOutput('should_run', 'true');
return;
}
const ref = context.ref || '';
if (!ref.startsWith('refs/heads/')) {
core.info('Push is not to a branch; running workflow.');
core.setOutput('should_run', 'true');
return;
}
const branch = ref.substring('refs/heads/'.length);
const { owner, repo } = context.repo;
const prs = await github.paginate(github.rest.pulls.list, {
owner,
repo,
state: 'open',
head: `${owner}:${branch}`,
per_page: 100,
});
if (prs.length === 0) {
core.info(`No open PR found for ${branch}; running workflow.`);
core.setOutput('should_run', 'true');
} else {
core.info(`Found ${prs.length} open PR(s) for ${branch}; skipping duplicate push run.`);
core.setOutput('should_run', 'false');
}
pre-commit:
needs:
- detect-duplicate
if: needs.detect-duplicate.outputs.should_run == 'true'
# We want to run on external PRs, but not on our own internal PRs as they'll be run
# by the push to the branch. Without this if check, checks are duplicated since
# internal PRs match both the push and pull_request events.
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
name: Linting Checks
runs-on: ubuntu-24.04
steps:
@@ -192,6 +151,18 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
flags: backend-python-${{ matrix.python-version }}
files: coverage.xml
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4
if: always()
with:
name: backend-coverage-${{ matrix.python-version }}
path: |
.coverage
coverage.xml
junit.xml
retention-days: 1
include-hidden-files: true
if-no-files-found: error
- name: Stop containers
if: always()
run: |
@@ -274,6 +245,17 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
flags: frontend-node-${{ matrix.node-version }}
directory: src-ui/coverage/
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4
if: always()
with:
name: frontend-coverage-${{ matrix.shard-index }}
path: |
src-ui/coverage/lcov.info
src-ui/coverage/coverage-final.json
src-ui/junit.xml
retention-days: 1
if-no-files-found: error
tests-frontend-e2e:
name: "Frontend E2E Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
runs-on: ubuntu-24.04
@@ -322,455 +304,6 @@ jobs:
run: cd src-ui && pnpm exec playwright install
- name: Run Playwright e2e tests
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
codecov-comment:
name: "Codecov PR Comment"
runs-on: ubuntu-24.04
needs:
- tests-backend
- tests-frontend
- tests-frontend-e2e
if: github.event_name == 'pull_request'
permissions:
contents: read
pull-requests: write
steps:
- name: Gather pull request context
id: pr
uses: actions/github-script@v7
with:
script: |
const pr = context.payload.pull_request;
if (!pr) {
core.info('No associated pull request. Skipping.');
core.setOutput('shouldRun', 'false');
return;
}
core.setOutput('shouldRun', 'true');
core.setOutput('prNumber', pr.number.toString());
core.setOutput('headSha', pr.head.sha);
- name: Fetch Codecov coverage
id: coverage
if: steps.pr.outputs.shouldRun == 'true'
uses: actions/github-script@v7
env:
COMMIT_SHA: ${{ steps.pr.outputs.headSha }}
PR_NUMBER: ${{ steps.pr.outputs.prNumber }}
with:
script: |
const commitSha = process.env.COMMIT_SHA;
const prNumber = process.env.PR_NUMBER;
const owner = context.repo.owner;
const repo = context.repo.repo;
const service = 'gh';
const baseUrl = `https://api.codecov.io/api/v2/${service}/${owner}/repos/${repo}`;
const commitUrl = `${baseUrl}/commits/${commitSha}`;
const maxAttempts = 20;
const waitMs = 15000;
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
let data;
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
core.info(`Fetching Codecov report (attempt ${attempt}/${maxAttempts})`);
let response;
try {
response = await fetch(commitUrl, {
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
},
});
} catch (error) {
core.warning(`Codecov fetch failed: ${error}. Waiting before retrying.`);
await sleep(waitMs);
continue;
}
if (response.status === 404) {
core.info('Report not ready yet (404). Waiting before retrying.');
await sleep(waitMs);
continue;
}
if ([429, 500, 502, 503, 504].includes(response.status)) {
const text = await response.text().catch(() => '');
core.info(`Codecov API transient error ${response.status}: ${text}. Waiting before retrying.`);
await sleep(waitMs);
continue;
}
if (!response.ok) {
const text = await response.text().catch(() => '');
core.warning(`Codecov API returned ${response.status}: ${text}. Skipping comment.`);
core.setOutput('shouldComment', 'false');
return;
}
data = await response.json().catch((error) => {
core.warning(`Failed to parse Codecov response: ${error}.`);
return undefined;
});
if (data && Object.keys(data).length > 0) {
break;
}
core.info('Report payload empty. Waiting before retrying.');
await sleep(waitMs);
}
if (!data && prNumber) {
core.info('Attempting to retrieve coverage from PR endpoint.');
const prUrl = `${baseUrl}/pulls/${prNumber}`;
let prResponse;
try {
prResponse = await fetch(prUrl, {
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
},
});
} catch (error) {
core.warning(`Codecov PR fetch failed: ${error}.`);
}
if (prResponse) {
if ([429, 500, 502, 503, 504].includes(prResponse.status)) {
const text = await prResponse.text().catch(() => '');
core.info(`Codecov PR endpoint transient error ${prResponse.status}: ${text}.`);
} else if (!prResponse.ok) {
const text = await prResponse.text().catch(() => '');
core.warning(`Codecov PR endpoint returned ${prResponse.status}: ${text}.`);
} else {
const prData = await prResponse.json().catch((error) => {
core.warning(`Failed to parse Codecov PR response: ${error}.`);
return undefined;
});
if (prData?.latest_report) {
data = { report: prData.latest_report };
} else if (prData?.head_totals) {
const headTotals = prData.head_totals;
const baseTotals = prData.base_totals;
let compareTotals;
if (baseTotals && headTotals) {
const headCoverage = Number(headTotals.coverage);
const baseCoverage = Number(baseTotals.coverage);
if (Number.isFinite(headCoverage) && Number.isFinite(baseCoverage)) {
compareTotals = {
base_coverage: baseCoverage,
coverage_change: headCoverage - baseCoverage,
};
}
}
data = {
report: {
totals: headTotals,
compare: compareTotals ? { totals: compareTotals } : undefined,
totals_by_flag: [],
},
head_totals: headTotals,
base_totals: baseTotals,
};
} else {
data = prData;
}
}
}
}
if (!data) {
core.warning('Unable to retrieve Codecov report after multiple attempts.');
core.setOutput('shouldComment', 'false');
return;
}
const toNumber = (value) => {
if (value === null || value === undefined || value === '') {
return undefined;
}
const num = Number(value);
return Number.isFinite(num) ? num : undefined;
};
const reportData = data.report || data;
const totals = reportData.totals ?? data.head_totals ?? data.totals;
if (!totals) {
core.warning('Codecov response does not contain coverage totals.');
core.setOutput('shouldComment', 'false');
return;
}
let compareTotals = reportData.compare?.totals ?? data.compare?.totals;
if (!compareTotals && data.base_totals) {
const baseCoverageValue = toNumber(data.base_totals.coverage);
if (baseCoverageValue !== undefined) {
const headCoverageValue = toNumber((data.head_totals ?? {}).coverage);
compareTotals = {
base_coverage: baseCoverageValue,
coverage_change:
headCoverageValue !== undefined ? headCoverageValue - baseCoverageValue : undefined,
};
}
}
const coverage = toNumber(totals.coverage);
const baseCoverage = toNumber(compareTotals?.base_coverage ?? compareTotals?.base);
let delta = toNumber(
compareTotals?.coverage_change ??
compareTotals?.coverage_diff ??
totals.delta ??
totals.diff ??
totals.change,
);
if (delta === undefined && coverage !== undefined && baseCoverage !== undefined) {
delta = coverage - baseCoverage;
}
const formatPercent = (value) => {
if (value === undefined) return '—';
return `${value.toFixed(2)}%`;
};
const formatDelta = (value) => {
if (value === undefined) return '—';
const sign = value >= 0 ? '+' : '';
return `${sign}${value.toFixed(2)}%`;
};
const shortSha = commitSha.slice(0, 7);
const reportBaseUrl = `https://app.codecov.io/gh/${owner}/${repo}`;
const commitReportUrl = `${reportBaseUrl}/commit/${commitSha}?src=pr&el=comment`;
const prReportUrl = prNumber
? `${reportBaseUrl}/pull/${prNumber}?src=pr&el=comment`
: commitReportUrl;
const findBaseCommitSha = () =>
data?.report?.compare?.base_commitid ??
data?.report?.compare?.base?.commitid ??
data?.report?.base_commitid ??
data?.compare?.base_commitid ??
data?.compare?.base?.commitid ??
data?.base_commitid ??
data?.base?.commitid;
const baseCommitSha = findBaseCommitSha();
const baseCommitUrl = baseCommitSha
? `${reportBaseUrl}/commit/${baseCommitSha}?src=pr&el=comment`
: undefined;
const baseShortSha = baseCommitSha ? baseCommitSha.slice(0, 7) : undefined;
const lines = ['<!-- codecov-coverage-comment -->'];
lines.push(`## [Codecov](${prReportUrl}) Report`);
lines.push('');
if (coverage !== undefined) {
lines.push(`:white_check_mark: Project coverage for \`${shortSha}\` is ${formatPercent(coverage)}.`);
} else {
lines.push(':warning: Coverage for the head commit is unavailable.');
}
if (baseCoverage !== undefined) {
const changeEmoji = delta === undefined ? ':grey_question:' : delta >= 0 ? ':white_check_mark:' : ':small_red_triangle_down:';
const baseCoverageText = `Base${baseShortSha ? ` \`${baseShortSha}\`` : ''} ${formatPercent(baseCoverage)}`;
const baseLink = baseCommitUrl ? `[${baseCoverageText}](${baseCommitUrl})` : baseCoverageText;
const changeText =
delta !== undefined
? `${baseLink} (${formatDelta(delta)})`
: `${baseLink} (change unknown)`;
lines.push(`${changeEmoji} ${changeText}.`);
}
lines.push(`:clipboard: [View full report on Codecov](${commitReportUrl}).`);
const normalizeTotals = (value) => {
if (!value) return undefined;
if (value.totals && typeof value.totals === 'object') return value.totals;
return value;
};
const headTotals = normalizeTotals(totals) ?? {};
const baseTotals =
normalizeTotals(data.base_totals) ??
normalizeTotals(reportData.base_totals) ??
normalizeTotals(reportData.compare?.base_totals) ??
normalizeTotals(reportData.compare?.base);
const formatInteger = (value) => {
if (value === undefined) return '—';
return value.toLocaleString('en-US');
};
const formatIntegerDelta = (value) => {
if (value === undefined) return '—';
const sign = value >= 0 ? '+' : '';
return `${sign}${value.toLocaleString('en-US')}`;
};
const getInteger = (value) => {
const num = toNumber(value);
return Number.isFinite(num) ? Math.round(num) : undefined;
};
const metrics = [];
metrics.push({
label: 'Coverage',
base: baseCoverage,
head: coverage,
diff: delta,
format: formatPercent,
formatDiff: formatDelta,
});
const pushIntegerMetric = (label, headValueRaw, baseValueRaw) => {
const headValue = getInteger(headValueRaw);
const baseValue = getInteger(baseValueRaw);
if (headValue === undefined && baseValue === undefined) {
return;
}
const diff = headValue !== undefined && baseValue !== undefined ? headValue - baseValue : undefined;
metrics.push({
label,
base: baseValue,
head: headValue,
diff,
format: formatInteger,
formatDiff: formatIntegerDelta,
});
};
pushIntegerMetric('Files', headTotals.files, baseTotals?.files);
pushIntegerMetric('Lines', headTotals.lines, baseTotals?.lines);
pushIntegerMetric('Branches', headTotals.branches, baseTotals?.branches);
pushIntegerMetric('Hits', headTotals.hits, baseTotals?.hits);
pushIntegerMetric('Misses', headTotals.misses, baseTotals?.misses);
const hasMetricData = metrics.some((metric) => metric.base !== undefined || metric.head !== undefined);
if (hasMetricData) {
lines.push('');
lines.push('<details><summary>Coverage summary</summary>');
lines.push('');
lines.push('| Metric | Base | Head | Δ |');
lines.push('| --- | --- | --- | --- |');
for (const metric of metrics) {
const baseValue = metric.base !== undefined ? metric.format(metric.base) : '—';
const headValue = metric.head !== undefined ? metric.format(metric.head) : '—';
const diffValue = metric.diff !== undefined ? metric.formatDiff(metric.diff) : '—';
lines.push(`| ${metric.label} | ${baseValue} | ${headValue} | ${diffValue} |`);
}
lines.push('');
lines.push('</details>');
}
const normalizeEntries = (raw) => {
if (!raw) return [];
if (Array.isArray(raw)) return raw;
if (typeof raw === 'object') {
return Object.entries(raw).map(([name, totals]) => ({ name, ...(typeof totals === 'object' ? totals : { coverage: totals }) }));
}
return [];
};
const buildTableRows = (entries) => {
const rows = [];
for (const entry of entries) {
const label = entry.flag ?? entry.name ?? entry.component ?? entry.id;
const entryTotals = entry.totals ?? entry;
const entryCoverage = toNumber(entryTotals?.coverage);
if (!label || entryCoverage === undefined) {
continue;
}
const entryDelta = toNumber(
entryTotals?.coverage_change ??
entryTotals?.coverage_diff ??
entryTotals?.delta ??
entryTotals?.diff ??
entryTotals?.change,
);
const coverageText = entryCoverage !== undefined ? `\`${formatPercent(entryCoverage)}\`` : '—';
const deltaText = entryDelta !== undefined ? `\`${formatDelta(entryDelta)}\`` : '—';
rows.push(`| ${label} | ${coverageText} | ${deltaText} |`);
}
return rows;
};
const componentEntries = normalizeEntries(reportData.components ?? data.components);
const flagEntries = normalizeEntries(reportData.totals_by_flag ?? data.totals_by_flag);
if (componentEntries.length) {
const componentsLink = prNumber
? `${reportBaseUrl}/pull/${prNumber}/components?src=pr&el=components`
: `${commitReportUrl}`;
const componentRows = buildTableRows(componentEntries);
if (componentRows.length) {
lines.push('');
lines.push(`[Components report](${componentsLink})`);
lines.push('');
lines.push('| Component | Coverage | Δ |');
lines.push('| --- | --- | --- |');
lines.push(...componentRows);
}
}
if (flagEntries.length) {
const flagsLink = prNumber
? `${reportBaseUrl}/pull/${prNumber}/flags?src=pr&el=flags`
: `${commitReportUrl}`;
const flagRows = buildTableRows(flagEntries);
if (flagRows.length) {
lines.push('');
lines.push(`[Flags report](${flagsLink})`);
lines.push('');
lines.push('| Flag | Coverage | Δ |');
lines.push('| --- | --- | --- |');
lines.push(...flagRows);
}
}
const commentBody = lines.join('\n');
const shouldComment = coverage !== undefined;
core.setOutput('shouldComment', shouldComment ? 'true' : 'false');
if (shouldComment) {
core.setOutput('commentBody', commentBody);
}
- name: Upsert coverage comment
if: steps.pr.outputs.shouldRun == 'true' && steps.coverage.outputs.shouldComment == 'true'
uses: actions/github-script@v7
env:
PR_NUMBER: ${{ steps.pr.outputs.prNumber }}
COMMENT_BODY: ${{ steps.coverage.outputs.commentBody }}
with:
script: |
const prNumber = Number(process.env.PR_NUMBER);
const body = process.env.COMMENT_BODY;
const marker = '<!-- codecov-coverage-comment -->';
const { data: comments } = await github.rest.issues.listComments({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
per_page: 100,
});
const existing = comments.find((comment) => comment.body?.includes(marker));
if (existing) {
core.info(`Updating existing coverage comment (id: ${existing.id}).`);
await github.rest.issues.updateComment({
owner: context.repo.owner,
repo: context.repo.repo,
comment_id: existing.id,
body,
});
} else {
core.info('Creating new coverage comment.');
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
body,
});
}
frontend-bundle-analysis:
name: "Frontend Bundle Analysis"
runs-on: ubuntu-24.04
@@ -803,6 +336,74 @@ jobs:
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
run: cd src-ui && pnpm run build --configuration=production
sonarqube-analysis:
name: "SonarQube Analysis"
runs-on: ubuntu-24.04
needs:
- tests-backend
- tests-frontend
if: github.repository_owner == 'paperless-ngx'
steps:
- name: Checkout
uses: actions/checkout@v5
with:
fetch-depth: 0
- name: Download all backend coverage
uses: actions/download-artifact@v5.0.0
with:
pattern: backend-coverage-*
path: ./coverage/
- name: Download all frontend coverage
uses: actions/download-artifact@v5.0.0
with:
pattern: frontend-coverage-*
path: ./coverage/
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install coverage tools
run: |
pip install coverage
npm install -g nyc
# Merge backend coverage from all Python versions
- name: Merge backend coverage
run: |
coverage combine coverage/backend-coverage-*/.coverage
coverage xml -o merged-backend-coverage.xml
# Merge frontend coverage from all shards
- name: Merge frontend coverage
run: |
# Find all coverage-final.json files from the shards, exit with error if none found
shopt -s nullglob
files=(coverage/frontend-coverage-*/coverage/coverage-final.json)
if [ ${#files[@]} -eq 0 ]; then
echo "No frontend coverage JSON found under coverage/" >&2
exit 1
fi
# Create .nyc_output directory and copy each shard's coverage JSON into it with a unique name
mkdir -p .nyc_output
for coverage_json in "${files[@]}"; do
shard=$(basename "$(dirname "$(dirname "$coverage_json")")")
cp "$coverage_json" ".nyc_output/${shard}.json"
done
npx nyc merge .nyc_output .nyc_output/out.json
npx nyc report --reporter=lcovonly --report-dir coverage
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4.6.2
with:
name: merged-coverage
path: |
merged-backend-coverage.xml
.nyc_output/*
coverage/lcov.info
retention-days: 7
if-no-files-found: error
include-hidden-files: true
- name: SonarQube Analysis
uses: SonarSource/sonarqube-scan-action@v5
env:
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
build-docker-image:
name: Build Docker image for ${{ github.ref_name }}
runs-on: ubuntu-24.04

View File

@@ -32,7 +32,7 @@ RUN set -eux \
# Purpose: Installs s6-overlay and rootfs
# Comments:
# - Don't leave anything extra in here either
FROM ghcr.io/astral-sh/uv:0.8.17-python3.12-bookworm-slim AS s6-overlay-base
FROM ghcr.io/astral-sh/uv:0.8.19-python3.12-bookworm-slim AS s6-overlay-base
WORKDIR /usr/src/s6

View File

@@ -255,6 +255,7 @@ PAPERLESS_DISABLE_DBHANDLER = "true"
PAPERLESS_CACHE_BACKEND = "django.core.cache.backends.locmem.LocMemCache"
[tool.coverage.run]
relative_files = true
source = [
"src/",
]

24
sonar-project.properties Normal file
View File

@@ -0,0 +1,24 @@
sonar.projectKey=paperless-ngx_paperless-ngx
sonar.organization=paperless-ngx
sonar.projectName=Paperless-ngx
sonar.projectVersion=1.0
# Source and test directories
sonar.sources=src/,src-ui/
sonar.test.inclusions=**/test_*.py,**/tests.py,**/*.spec.ts,**/*.test.ts
# Language specific settings
sonar.python.version=3.10,3.11,3.12,3.13
# Coverage reports
sonar.python.coverage.reportPaths=merged-backend-coverage.xml
sonar.javascript.lcov.reportPaths=coverage/lcov.info
# Test execution reports
sonar.junit.reportPaths=**/junit.xml,**/test-results.xml
# Encoding
sonar.sourceEncoding=UTF-8
# Exclusions
sonar.exclusions=**/migrations/**,**/node_modules/**,**/static/**,**/venv/**,**/.venv/**,**/dist/**

View File

@@ -177,16 +177,10 @@ export class CustomFieldEditDialogComponent
}
public removeSelectOption(index: number) {
const globalIndex =
index + (this.selectOptionsPage - 1) * SELECT_OPTION_PAGE_SIZE
this._allSelectOptions.splice(globalIndex, 1)
const totalPages = Math.max(
1,
Math.ceil(this._allSelectOptions.length / SELECT_OPTION_PAGE_SIZE)
this.selectOptions.removeAt(index)
this._allSelectOptions.splice(
index + (this.selectOptionsPage - 1) * SELECT_OPTION_PAGE_SIZE,
1
)
const targetPage = Math.min(this.selectOptionsPage, totalPages)
this.selectOptionsPage = targetPage
}
}

View File

@@ -164,9 +164,6 @@ class BarcodePlugin(ConsumeTaskPlugin):
mailrule_id=self.input_doc.mailrule_id,
# Can't use same folder or the consume might grab it again
original_file=(tmp_dir / new_document.name).resolve(),
# Adding optional original_path for later uses in
# workflow matching
original_path=self.input_doc.original_file,
),
# All the same metadata
self.metadata,

View File

@@ -156,7 +156,6 @@ class ConsumableDocument:
source: DocumentSource
original_file: Path
original_path: Path | None = None
mailrule_id: int | None = None
mime_type: str = dataclasses.field(init=False, default=None)

View File

@@ -92,9 +92,6 @@ class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand):
# doc to doc is obviously not useful
if first_doc.pk == second_doc.pk:
continue
# Skip empty documents (e.g. password-protected)
if first_doc.content.strip() == "" or second_doc.content.strip() == "":
continue
# Skip matching which have already been matched together
# doc 1 to doc 2 is the same as doc 2 to doc 1
doc_1_to_doc_2 = (first_doc.pk, second_doc.pk)

View File

@@ -314,19 +314,11 @@ def consumable_document_matches_workflow(
trigger_matched = False
# Document path vs trigger path
# Use the original_path if set, else us the original_file
match_against = (
document.original_path
if document.original_path is not None
else document.original_file
)
if (
trigger.filter_path is not None
and len(trigger.filter_path) > 0
and not fnmatch(
match_against,
document.original_file,
trigger.filter_path,
)
):

View File

@@ -614,16 +614,14 @@ class TestBarcodeNewConsume(
self.assertIsNotFile(temp_copy)
# Check the split files exist
# Check the original_path is set
# Check the source is unchanged
# Check the overrides are unchanged
for (
new_input_doc,
new_doc_overrides,
) in self.get_all_consume_delay_call_args():
self.assertIsFile(new_input_doc.original_file)
self.assertEqual(new_input_doc.original_path, temp_copy)
self.assertEqual(new_input_doc.source, DocumentSource.ConsumeFolder)
self.assertIsFile(new_input_doc.original_file)
self.assertEqual(overrides, new_doc_overrides)

View File

@@ -206,29 +206,3 @@ class TestFuzzyMatchCommand(TestCase):
self.assertEqual(Document.objects.count(), 2)
self.assertIsNotNone(Document.objects.get(pk=1))
self.assertIsNotNone(Document.objects.get(pk=2))
def test_empty_content(self):
"""
GIVEN:
- 2 documents exist, content is empty (pw-protected)
WHEN:
- Command is called
THEN:
- No matches are found
"""
Document.objects.create(
checksum="BEEFCAFE",
title="A",
content="",
mime_type="application/pdf",
filename="test.pdf",
)
Document.objects.create(
checksum="DEADBEAF",
title="A",
content="",
mime_type="application/pdf",
filename="other_test.pdf",
)
stdout, _ = self.call_command()
self.assertIn("No matches found", stdout)