Compare commits

..

10 Commits

Author SHA1 Message Date
shamoon
770fb2d60e Update build-and-release.yml 2025-09-24 15:22:15 -07:00
shamoon
c8ef9e663a Yikes, try split ci workflow 2025-09-24 15:03:41 -07:00
shamoon
2195e4af45 Ok, lets try manual Codecov comments 2025-09-24 14:48:06 -07:00
shamoon
c6716905a4 Revert "Chore: Enable SonarQube scanning (#10904)"
This reverts commit 8d1f23e9d6.
2025-09-24 14:38:22 -07:00
shamoon
850ee5a415 Revert "Chore: remove conditional from pre-commit job in CI (#10916)"
This reverts commit 53b393dab5.
2025-09-24 14:38:19 -07:00
shamoon
b25b5abdb0 Revert "Development: try separating sonar scan"
This reverts commit 68e0559053.
2025-09-24 14:38:13 -07:00
shamoon
68e0559053 Development: try separating sonar scan 2025-09-24 14:26:05 -07:00
DerRockWolf
4ff09c4cf4 Enhancement: support workflow path matching of barcode-split documents (#10723) 2025-09-24 21:03:03 +00:00
shamoon
53b393dab5 Chore: remove conditional from pre-commit job in CI (#10916) 2025-09-24 13:43:09 -07:00
shamoon
6119c215e7 Fix: skip fuzzy matching for empty document content (#10914) 2025-09-22 23:30:24 -07:00
12 changed files with 696 additions and 440 deletions

430
.github/workflows/build-and-release.yml vendored Normal file
View File

@@ -0,0 +1,430 @@
name: 'Build and Release'
on:
workflow_run:
workflows:
- ci
types:
- completed
permissions:
contents: write
packages: write
pull-requests: write
env:
DEFAULT_UV_VERSION: "0.8.x"
DEFAULT_PYTHON_VERSION: "3.11"
NLTK_DATA: "/usr/share/nltk_data"
jobs:
prepare:
if: >-
github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'push'
name: Prepare build context
runs-on: ubuntu-24.04
outputs:
should-build: ${{ steps.determine.outputs.should-build }}
ref: ${{ steps.determine.outputs.ref }}
ref-name: ${{ steps.determine.outputs.ref-name }}
sha: ${{ steps.determine.outputs.sha }}
is-tag: ${{ steps.determine.outputs.is-tag }}
is-release-target: ${{ steps.determine.outputs.is-release-target }}
is-beta-rc: ${{ steps.determine.outputs.is-beta-rc }}
steps:
- name: Determine ref information
id: determine
uses: actions/github-script@v7
with:
script: |
const run = context.payload.workflow_run;
const owner = context.repo.owner;
const repo = context.repo.repo;
const sha = run.head_sha;
const branch = run.head_branch;
let ref = undefined;
let refName = undefined;
if (branch) {
ref = `refs/heads/${branch}`;
refName = branch;
} else {
const iterator = github.paginate.iterator(
github.rest.repos.listTags,
{
owner,
repo,
per_page: 100,
},
);
for await (const { data } of iterator) {
const match = data.find((tag) => tag.commit?.sha === sha);
if (match) {
ref = `refs/tags/${match.name}`;
refName = match.name;
break;
}
}
}
const outputs = {
shouldBuild: false,
ref: ref ?? '',
refName: refName ?? '',
sha,
isTag: ref?.startsWith('refs/tags/') ?? false,
isReleaseTarget: false,
isBetaRc: false,
};
if (!ref || !refName) {
core.info('No matching ref found for workflow run; skipping post-CI workflow.');
} else {
const allowed =
ref.startsWith('refs/heads/feature-') ||
ref.startsWith('refs/heads/fix-') ||
ref.startsWith('refs/heads/l10n_') ||
ref === 'refs/heads/dev' ||
ref === 'refs/heads/beta' ||
ref.includes('beta.rc') ||
ref.startsWith('refs/tags/v');
const isBetaRc = refName.includes('beta.rc');
const isReleaseTarget = outputs.isTag && (refName.startsWith('v') || isBetaRc);
outputs.shouldBuild = allowed;
outputs.isReleaseTarget = isReleaseTarget;
outputs.isBetaRc = isBetaRc;
}
core.setOutput('should-build', outputs.shouldBuild ? 'true' : 'false');
core.setOutput('ref', outputs.ref);
core.setOutput('ref-name', outputs.refName);
core.setOutput('sha', outputs.sha);
core.setOutput('is-tag', outputs.isTag ? 'true' : 'false');
core.setOutput('is-release-target', outputs.isReleaseTarget ? 'true' : 'false');
core.setOutput('is-beta-rc', outputs.isBetaRc ? 'true' : 'false');
build-docker-image:
needs: prepare
if: needs.prepare.outputs.should-build == 'true'
name: Build Docker image for ${{ needs.prepare.outputs.ref-name }}
runs-on: ubuntu-24.04
concurrency:
group: ${{ github.workflow }}-build-docker-image-${{ needs.prepare.outputs.ref-name || needs.prepare.outputs.sha }}
cancel-in-progress: true
env:
REF: ${{ needs.prepare.outputs.ref }}
REF_NAME: ${{ needs.prepare.outputs.ref-name }}
SHA: ${{ needs.prepare.outputs.sha }}
steps:
- name: Checkout
uses: actions/checkout@v5
with:
ref: ${{ env.SHA }}
- name: Check pushing to Docker Hub
id: push-other-places
env:
REPOSITORY_OWNER: ${{ github.repository_owner }}
REF_NAME: ${{ env.REF_NAME }}
REF: ${{ env.REF }}
run: |
if [[ "$REPOSITORY_OWNER" == "paperless-ngx" ]] && \
([[ "$REF_NAME" == "dev" ]] || [[ "$REF_NAME" == "beta" ]] || [[ "$REF" == refs/tags/v* ]]); then
echo "Enabling DockerHub image push"
echo "enable=true" >> "$GITHUB_OUTPUT"
else
echo "Not pushing to DockerHub"
echo "enable=false" >> "$GITHUB_OUTPUT"
fi
- name: Set ghcr repository name
id: set-ghcr-repository
run: |
ghcr_name=$(echo "${{ github.repository }}" | awk '{ print tolower($0) }')
echo "Name is ${ghcr_name}"
echo "ghcr-repository=${ghcr_name}" >> "$GITHUB_OUTPUT"
- name: Gather Docker metadata
id: docker-meta
uses: docker/metadata-action@v5
env:
GITHUB_REF: ${{ env.REF }}
GITHUB_REF_NAME: ${{ env.REF_NAME }}
GITHUB_SHA: ${{ env.SHA }}
with:
images: |
ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}
name=paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
name=quay.io/paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
tags: |
type=ref,event=branch
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
platforms: arm64
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
if: steps.push-other-places.outputs.enable == 'true'
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Quay.io
if: steps.push-other-places.outputs.enable == 'true'
uses: docker/login-action@v3
with:
registry: quay.io
username: ${{ secrets.QUAY_USERNAME }}
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.docker-meta.outputs.tags }}
labels: ${{ steps.docker-meta.outputs.labels }}
build-args: |
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
cache-from: |
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ env.REF_NAME }}
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:dev
cache-to: |
type=registry,mode=max,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ env.REF_NAME }}
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
- name: Export frontend artifact from docker
run: |
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
- name: Upload frontend artifact
uses: actions/upload-artifact@v4
with:
name: frontend-compiled
path: src/documents/static/frontend/
retention-days: 7
build-release:
needs:
- prepare
- build-docker-image
if: needs.prepare.outputs.should-build == 'true'
name: Build release bundle
runs-on: ubuntu-24.04
env:
REF_NAME: ${{ needs.prepare.outputs.ref-name }}
SHA: ${{ needs.prepare.outputs.sha }}
CI_RUN_ID: ${{ github.event.workflow_run.id }}
steps:
- name: Checkout
uses: actions/checkout@v5
with:
ref: ${{ env.SHA }}
- name: Set up Python
id: setup-python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install Python dependencies
run: |
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
- name: Install system dependencies
run: |
sudo apt-get update -qq
sudo apt-get install -qq --no-install-recommends gettext liblept5
- name: Download frontend artifact
uses: actions/download-artifact@v5
with:
name: frontend-compiled
path: src/documents/static/frontend/
- name: Download documentation artifact
uses: actions/download-artifact@v5
with:
name: documentation
path: docs/_build/html/
run-id: ${{ env.CI_RUN_ID }}
- name: Generate requirements file
run: |
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
- name: Compile messages
run: |
cd src/
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
manage.py compilemessages
- name: Collect static files
run: |
cd src/
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
manage.py collectstatic --no-input
- name: Move files
run: |
echo "Making dist folders"
for directory in dist \
dist/paperless-ngx \
dist/paperless-ngx/scripts;
do
mkdir --verbose --parents ${directory}
done
echo "Copying basic files"
for file_name in .dockerignore \
.env \
Dockerfile \
pyproject.toml \
uv.lock \
requirements.txt \
LICENSE \
README.md \
paperless.conf.example
do
cp --verbose ${file_name} dist/paperless-ngx/
done
mv --verbose dist/paperless-ngx/paperless.conf.example dist/paperless-ngx/paperless.conf
echo "Copying Docker related files"
cp --recursive docker/ dist/paperless-ngx/docker
echo "Copying startup scripts"
cp --verbose scripts/*.service scripts/*.sh scripts/*.socket dist/paperless-ngx/scripts/
echo "Copying source files"
cp --recursive src/ dist/paperless-ngx/src
echo "Copying documentation"
cp --recursive docs/_build/html/ dist/paperless-ngx/docs
mv --verbose static dist/paperless-ngx
- name: Make release package
run: |
echo "Creating release archive"
cd dist
sudo chown -R 1000:1000 paperless-ngx/
tar -cJf paperless-ngx.tar.xz paperless-ngx/
- name: Upload release artifact
uses: actions/upload-artifact@v4
with:
name: release
path: dist/paperless-ngx.tar.xz
retention-days: 7
publish-release:
needs:
- prepare
- build-release
if: needs.prepare.outputs.is-release-target == 'true'
name: Publish release
runs-on: ubuntu-24.04
outputs:
prerelease: ${{ steps.get_version.outputs.prerelease }}
changelog: ${{ steps.create-release.outputs.body }}
version: ${{ steps.get_version.outputs.version }}
steps:
- name: Download release artifact
uses: actions/download-artifact@v5
with:
name: release
path: ./
- name: Get version
id: get_version
run: |
echo "version=${{ needs.prepare.outputs.ref-name }}" >> "$GITHUB_OUTPUT"
if [[ ${{ needs.prepare.outputs.is-beta-rc }} == 'true' ]]; then
echo "prerelease=true" >> "$GITHUB_OUTPUT"
else
echo "prerelease=false" >> "$GITHUB_OUTPUT"
fi
- name: Create Release and Changelog
id: create-release
uses: release-drafter/release-drafter@v6
with:
name: Paperless-ngx ${{ steps.get_version.outputs.version }}
tag: ${{ steps.get_version.outputs.version }}
version: ${{ steps.get_version.outputs.version }}
prerelease: ${{ steps.get_version.outputs.prerelease }}
publish: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload release archive
id: upload-release-asset
uses: shogo82148/actions-upload-release-asset@v1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
upload_url: ${{ steps.create-release.outputs.upload_url }}
asset_path: ./paperless-ngx.tar.xz
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
asset_content_type: application/x-xz
append-changelog:
needs:
- publish-release
if: needs.publish-release.outputs.prerelease == 'false'
name: Append changelog to docs
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v5
with:
ref: main
- name: Set up Python
id: setup-python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Append Changelog to docs
id: append-Changelog
working-directory: docs
run: |
git branch ${{ needs.publish-release.outputs.version }}-changelog
git checkout ${{ needs.publish-release.outputs.version }}-changelog
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
echo "Manually linking usernames"
sed -i -r 's|@([a-zA-Z0-9_]+) \(\[#|[@\1](https://github.com/\1) ([#|g' changelog-new.md
echo "Removing unneeded comment tags"
sed -i -r 's|@<!---->|@|g' changelog-new.md
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
mv changelog-new.md changelog.md
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
pre-commit run --files changelog.md || true
git config --global user.name "github-actions"
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
git push origin ${{ needs.publish-release.outputs.version }}-changelog
- name: Create Pull Request
uses: actions/github-script@v7
with:
script: |
const { repo, owner } = context.repo;
const result = await github.rest.pulls.create({
title: 'Documentation: Add ${{ needs.publish-release.outputs.version }} changelog',
owner,
repo,
head: '${{ needs.publish-release.outputs.version }}-changelog',
base: 'main',
body: 'This PR is auto-generated by CI.'
});
github.rest.issues.addLabels({
owner,
repo,
issue_number: result.data.number,
labels: ['documentation', 'skip-changelog']
});

View File

@@ -151,18 +151,6 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
flags: backend-python-${{ matrix.python-version }}
files: coverage.xml
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4
if: always()
with:
name: backend-coverage-${{ matrix.python-version }}
path: |
.coverage
coverage.xml
junit.xml
retention-days: 1
include-hidden-files: true
if-no-files-found: error
- name: Stop containers
if: always()
run: |
@@ -245,17 +233,6 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
flags: frontend-node-${{ matrix.node-version }}
directory: src-ui/coverage/
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4
if: always()
with:
name: frontend-coverage-${{ matrix.shard-index }}
path: |
src-ui/coverage/lcov.info
src-ui/coverage/coverage-final.json
src-ui/junit.xml
retention-days: 1
if-no-files-found: error
tests-frontend-e2e:
name: "Frontend E2E Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
runs-on: ubuntu-24.04
@@ -336,392 +313,3 @@ jobs:
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
run: cd src-ui && pnpm run build --configuration=production
sonarqube-analysis:
name: "SonarQube Analysis"
runs-on: ubuntu-24.04
needs:
- tests-backend
- tests-frontend
if: github.repository_owner == 'paperless-ngx'
steps:
- name: Checkout
uses: actions/checkout@v5
with:
fetch-depth: 0
- name: Download all backend coverage
uses: actions/download-artifact@v5.0.0
with:
pattern: backend-coverage-*
path: ./coverage/
- name: Download all frontend coverage
uses: actions/download-artifact@v5.0.0
with:
pattern: frontend-coverage-*
path: ./coverage/
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install coverage tools
run: |
pip install coverage
npm install -g nyc
# Merge backend coverage from all Python versions
- name: Merge backend coverage
run: |
coverage combine coverage/backend-coverage-*/.coverage
coverage xml -o merged-backend-coverage.xml
# Merge frontend coverage from all shards
- name: Merge frontend coverage
run: |
# Find all coverage-final.json files from the shards, exit with error if none found
shopt -s nullglob
files=(coverage/frontend-coverage-*/coverage/coverage-final.json)
if [ ${#files[@]} -eq 0 ]; then
echo "No frontend coverage JSON found under coverage/" >&2
exit 1
fi
# Create .nyc_output directory and copy each shard's coverage JSON into it with a unique name
mkdir -p .nyc_output
for coverage_json in "${files[@]}"; do
shard=$(basename "$(dirname "$(dirname "$coverage_json")")")
cp "$coverage_json" ".nyc_output/${shard}.json"
done
npx nyc merge .nyc_output .nyc_output/out.json
npx nyc report --reporter=lcovonly --report-dir coverage
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4.6.2
with:
name: merged-coverage
path: |
merged-backend-coverage.xml
.nyc_output/*
coverage/lcov.info
retention-days: 7
if-no-files-found: error
include-hidden-files: true
- name: SonarQube Analysis
uses: SonarSource/sonarqube-scan-action@v5
env:
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
build-docker-image:
name: Build Docker image for ${{ github.ref_name }}
runs-on: ubuntu-24.04
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || startsWith(github.ref, 'refs/heads/fix-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v') || startsWith(github.ref, 'refs/heads/l10n_'))
concurrency:
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
cancel-in-progress: true
needs:
- tests-backend
- tests-frontend
- tests-frontend-e2e
steps:
- name: Check pushing to Docker Hub
id: push-other-places
# Only push to Dockerhub from the main repo AND the ref is either:
# main
# dev
# beta
# a tag
# Otherwise forks would require a Docker Hub account and secrets setup
run: |
if [[ ${{ github.repository_owner }} == "paperless-ngx" && ( ${{ github.ref_name }} == "dev" || ${{ github.ref_name }} == "beta" || ${{ startsWith(github.ref, 'refs/tags/v') }} == "true" ) ]] ; then
echo "Enabling DockerHub image push"
echo "enable=true" >> $GITHUB_OUTPUT
else
echo "Not pushing to DockerHub"
echo "enable=false" >> $GITHUB_OUTPUT
fi
- name: Set ghcr repository name
id: set-ghcr-repository
run: |
ghcr_name=$(echo "${{ github.repository }}" | awk '{ print tolower($0) }')
echo "Name is ${ghcr_name}"
echo "ghcr-repository=${ghcr_name}" >> $GITHUB_OUTPUT
- name: Gather Docker metadata
id: docker-meta
uses: docker/metadata-action@v5
with:
images: |
ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}
name=paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
name=quay.io/paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
tags: |
# Tag branches with branch name
type=ref,event=branch
# Process semver tags
# For a tag x.y.z or vX.Y.Z, output an x.y.z and x.y image tag
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
- name: Checkout
uses: actions/checkout@v5
# If https://github.com/docker/buildx/issues/1044 is resolved,
# the append input with a native arm64 arch could be used to
# significantly speed up building
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
platforms: arm64
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
uses: docker/login-action@v3
# Don't attempt to login if not pushing to Docker Hub
if: steps.push-other-places.outputs.enable == 'true'
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Quay.io
uses: docker/login-action@v3
# Don't attempt to login if not pushing to Quay.io
if: steps.push-other-places.outputs.enable == 'true'
with:
registry: quay.io
username: ${{ secrets.QUAY_USERNAME }}
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
platforms: linux/amd64,linux/arm64
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.docker-meta.outputs.tags }}
labels: ${{ steps.docker-meta.outputs.labels }}
build-args: |
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
# Get cache layers from this branch, then dev
# This allows new branches to get at least some cache benefits, generally from dev
cache-from: |
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:dev
cache-to: |
type=registry,mode=max,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
- name: Export frontend artifact from docker
run: |
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
- name: Upload frontend artifact
uses: actions/upload-artifact@v4
with:
name: frontend-compiled
path: src/documents/static/frontend/
retention-days: 7
build-release:
name: "Build Release"
needs:
- build-docker-image
- documentation
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v5
- name: Set up Python
id: setup-python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ steps.setup-python.outputs.python-version }}
- name: Install Python dependencies
run: |
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
- name: Install system dependencies
run: |
sudo apt-get update -qq
sudo apt-get install -qq --no-install-recommends gettext liblept5
- name: Download frontend artifact
uses: actions/download-artifact@v5
with:
name: frontend-compiled
path: src/documents/static/frontend/
- name: Download documentation artifact
uses: actions/download-artifact@v5
with:
name: documentation
path: docs/_build/html/
- name: Generate requirements file
run: |
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
- name: Compile messages
run: |
cd src/
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
manage.py compilemessages
- name: Collect static files
run: |
cd src/
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
manage.py collectstatic --no-input
- name: Move files
run: |
echo "Making dist folders"
for directory in dist \
dist/paperless-ngx \
dist/paperless-ngx/scripts;
do
mkdir --verbose --parents ${directory}
done
echo "Copying basic files"
for file_name in .dockerignore \
.env \
Dockerfile \
pyproject.toml \
uv.lock \
requirements.txt \
LICENSE \
README.md \
paperless.conf.example
do
cp --verbose ${file_name} dist/paperless-ngx/
done
mv --verbose dist/paperless-ngx/paperless.conf.example dist/paperless-ngx/paperless.conf
echo "Copying Docker related files"
cp --recursive docker/ dist/paperless-ngx/docker
echo "Copying startup scripts"
cp --verbose scripts/*.service scripts/*.sh scripts/*.socket dist/paperless-ngx/scripts/
echo "Copying source files"
cp --recursive src/ dist/paperless-ngx/src
echo "Copying documentation"
cp --recursive docs/_build/html/ dist/paperless-ngx/docs
mv --verbose static dist/paperless-ngx
- name: Make release package
run: |
echo "Creating release archive"
cd dist
sudo chown -R 1000:1000 paperless-ngx/
tar -cJf paperless-ngx.tar.xz paperless-ngx/
- name: Upload release artifact
uses: actions/upload-artifact@v4
with:
name: release
path: dist/paperless-ngx.tar.xz
retention-days: 7
publish-release:
name: "Publish Release"
runs-on: ubuntu-24.04
outputs:
prerelease: ${{ steps.get_version.outputs.prerelease }}
changelog: ${{ steps.create-release.outputs.body }}
version: ${{ steps.get_version.outputs.version }}
needs:
- build-release
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
steps:
- name: Download release artifact
uses: actions/download-artifact@v5
with:
name: release
path: ./
- name: Get version
id: get_version
run: |
echo "version=${{ github.ref_name }}" >> $GITHUB_OUTPUT
if [[ ${{ contains(github.ref_name, '-beta.rc') }} == 'true' ]]; then
echo "prerelease=true" >> $GITHUB_OUTPUT
else
echo "prerelease=false" >> $GITHUB_OUTPUT
fi
- name: Create Release and Changelog
id: create-release
uses: release-drafter/release-drafter@v6
with:
name: Paperless-ngx ${{ steps.get_version.outputs.version }}
tag: ${{ steps.get_version.outputs.version }}
version: ${{ steps.get_version.outputs.version }}
prerelease: ${{ steps.get_version.outputs.prerelease }}
publish: true # ensures release is not marked as draft
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload release archive
id: upload-release-asset
uses: shogo82148/actions-upload-release-asset@v1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
upload_url: ${{ steps.create-release.outputs.upload_url }}
asset_path: ./paperless-ngx.tar.xz
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
asset_content_type: application/x-xz
append-changelog:
name: "Append Changelog"
runs-on: ubuntu-24.04
needs:
- publish-release
if: needs.publish-release.outputs.prerelease == 'false'
steps:
- name: Checkout
uses: actions/checkout@v5
with:
ref: main
- name: Set up Python
id: setup-python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Append Changelog to docs
id: append-Changelog
working-directory: docs
run: |
git branch ${{ needs.publish-release.outputs.version }}-changelog
git checkout ${{ needs.publish-release.outputs.version }}-changelog
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
echo "Manually linking usernames"
sed -i -r 's|@([a-zA-Z0-9_]+) \(\[#|[@\1](https://github.com/\1) ([#|g' changelog-new.md
echo "Removing unneeded comment tags"
sed -i -r 's|@<!---->|@|g' changelog-new.md
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
mv changelog-new.md changelog.md
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
pre-commit run --files changelog.md || true
git config --global user.name "github-actions"
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
git push origin ${{ needs.publish-release.outputs.version }}-changelog
- name: Create Pull Request
uses: actions/github-script@v7
with:
script: |
const { repo, owner } = context.repo;
const result = await github.rest.pulls.create({
title: 'Documentation: Add ${{ needs.publish-release.outputs.version }} changelog',
owner,
repo,
head: '${{ needs.publish-release.outputs.version }}-changelog',
base: 'main',
body: 'This PR is auto-generated by CI.'
});
github.rest.issues.addLabels({
owner,
repo,
issue_number: result.data.number,
labels: ['documentation', 'skip-changelog']
});

220
.github/workflows/codecov-comment.yml vendored Normal file
View File

@@ -0,0 +1,220 @@
name: Codecov PR Comment
on:
workflow_run:
workflows:
- ci
types:
- completed
permissions:
contents: read
pull-requests: write
jobs:
comment:
if: >-
github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success'
runs-on: ubuntu-24.04
steps:
- name: Gather pull request context
id: pr
uses: actions/github-script@v7
with:
script: |
const run = context.payload.workflow_run;
if (!run.pull_requests || run.pull_requests.length === 0) {
core.info('No associated pull request. Skipping.');
return { shouldRun: false };
}
const pr = run.pull_requests[0];
return {
shouldRun: true,
prNumber: pr.number,
headSha: run.head_sha,
};
- name: Fetch Codecov coverage
id: coverage
if: steps.pr.outputs.shouldRun == 'true'
uses: actions/github-script@v7
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
COMMIT_SHA: ${{ steps.pr.outputs.headSha }}
with:
script: |
const token = process.env.CODECOV_TOKEN;
if (!token) {
core.warning('CODECOV_TOKEN secret is not available; skipping comment.');
core.setOutput('shouldComment', 'false');
return;
}
const commitSha = process.env.COMMIT_SHA;
const owner = context.repo.owner;
const repo = context.repo.repo;
const url = `https://codecov.io/api/v2/github/${owner}/repos/${repo}/commits/${commitSha}/report`;
const maxAttempts = 10;
const waitMs = 15000;
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
let data;
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
core.info(`Fetching Codecov report (attempt ${attempt}/${maxAttempts})`);
const response = await fetch(url, {
headers: {
Authorization: `Bearer ${token}`,
'Content-Type': 'application/json',
Accept: 'application/json',
},
});
if (response.status === 404) {
core.info('Report not ready yet (404). Waiting before retrying.');
await sleep(waitMs);
continue;
}
if (!response.ok) {
const text = await response.text();
throw new Error(`Codecov API returned ${response.status}: ${text}`);
}
data = await response.json();
if (data && Object.keys(data).length > 0) {
break;
}
core.info('Report payload empty. Waiting before retrying.');
await sleep(waitMs);
}
if (!data) {
core.warning('Unable to retrieve Codecov report after multiple attempts.');
core.setOutput('shouldComment', 'false');
return;
}
const totals = data.report?.totals ?? data.commit?.totals ?? data.totals;
if (!totals) {
core.warning('Codecov response does not contain coverage totals.');
core.setOutput('shouldComment', 'false');
return;
}
const compareTotals = data.report?.compare?.totals ?? data.compare?.totals;
const flagsRaw = data.report?.totals_by_flag ?? data.report?.components ?? [];
const toNumber = (value) => {
if (value === null || value === undefined || value === '') {
return undefined;
}
const num = Number(value);
return Number.isFinite(num) ? num : undefined;
};
const coverage = toNumber(totals.coverage);
const baseCoverage = toNumber(compareTotals?.base_coverage ?? compareTotals?.base);
const delta = toNumber(
compareTotals?.coverage_change ??
compareTotals?.coverage_diff ??
totals.delta ??
totals.diff ??
totals.change,
);
const formatPercent = (value) => {
if (value === undefined) return '—';
return `${value.toFixed(2)}%`;
};
const formatDelta = (value) => {
if (value === undefined) return '—';
const sign = value >= 0 ? '+' : '';
return `${sign}${value.toFixed(2)}%`;
};
const shortSha = commitSha.slice(0, 7);
const lines = [
'<!-- codecov-coverage-comment -->',
'**Codecov Coverage**',
'',
`- Head \`${shortSha}\`: ${formatPercent(coverage)}`,
];
if (baseCoverage !== undefined) {
lines.push(`- Base: ${formatPercent(baseCoverage)}`);
}
if (delta !== undefined) {
lines.push(`- Change: ${formatDelta(delta)}`);
}
const flagEntries = Array.isArray(flagsRaw)
? flagsRaw
: Object.entries(flagsRaw).map(([name, totals]) => ({ name, totals }));
const flagRows = [];
for (const entry of flagEntries) {
const label = entry.flag ?? entry.name ?? entry.component ?? entry.id;
const entryTotals = entry.totals ?? entry;
const entryCoverage = toNumber(entryTotals?.coverage);
const entryDelta = toNumber(
entryTotals?.coverage_change ??
entryTotals?.coverage_diff ??
entryTotals?.delta ??
entryTotals?.diff,
);
if (!label || entryCoverage === undefined) {
continue;
}
flagRows.push(`| ${label} | ${formatPercent(entryCoverage)} | ${formatDelta(entryDelta)} |`);
}
if (flagRows.length) {
lines.push('');
lines.push('| Flag | Coverage | Change |');
lines.push('| --- | --- | --- |');
lines.push(...flagRows);
}
const commentBody = lines.join('\n');
const shouldComment = coverage !== undefined;
core.setOutput('shouldComment', shouldComment ? 'true' : 'false');
if (shouldComment) {
core.setOutput('commentBody', commentBody);
}
- name: Upsert coverage comment
if: steps.pr.outputs.shouldRun == 'true' && steps.coverage.outputs.shouldComment == 'true'
uses: actions/github-script@v7
env:
PR_NUMBER: ${{ steps.pr.outputs.prNumber }}
COMMENT_BODY: ${{ steps.coverage.outputs.commentBody }}
with:
script: |
const prNumber = Number(process.env.PR_NUMBER);
const body = process.env.COMMENT_BODY;
const marker = '<!-- codecov-coverage-comment -->';
const { data: comments } = await github.rest.issues.listComments({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
per_page: 100,
});
const existing = comments.find((comment) => comment.body?.includes(marker));
if (existing) {
core.info(`Updating existing coverage comment (id: ${existing.id}).`);
await github.rest.issues.updateComment({
owner: context.repo.owner,
repo: context.repo.repo,
comment_id: existing.id,
body,
});
} else {
core.info('Creating new coverage comment.');
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
body,
});
}

View File

@@ -32,7 +32,7 @@ RUN set -eux \
# Purpose: Installs s6-overlay and rootfs
# Comments:
# - Don't leave anything extra in here either
FROM ghcr.io/astral-sh/uv:0.8.19-python3.12-bookworm-slim AS s6-overlay-base
FROM ghcr.io/astral-sh/uv:0.8.17-python3.12-bookworm-slim AS s6-overlay-base
WORKDIR /usr/src/s6

View File

@@ -255,7 +255,6 @@ PAPERLESS_DISABLE_DBHANDLER = "true"
PAPERLESS_CACHE_BACKEND = "django.core.cache.backends.locmem.LocMemCache"
[tool.coverage.run]
relative_files = true
source = [
"src/",
]

View File

@@ -1,24 +0,0 @@
sonar.projectKey=paperless-ngx_paperless-ngx
sonar.organization=paperless-ngx
sonar.projectName=Paperless-ngx
sonar.projectVersion=1.0
# Source and test directories
sonar.sources=src/,src-ui/
sonar.test.inclusions=**/test_*.py,**/tests.py,**/*.spec.ts,**/*.test.ts
# Language specific settings
sonar.python.version=3.10,3.11,3.12,3.13
# Coverage reports
sonar.python.coverage.reportPaths=merged-backend-coverage.xml
sonar.javascript.lcov.reportPaths=coverage/lcov.info
# Test execution reports
sonar.junit.reportPaths=**/junit.xml,**/test-results.xml
# Encoding
sonar.sourceEncoding=UTF-8
# Exclusions
sonar.exclusions=**/migrations/**,**/node_modules/**,**/static/**,**/venv/**,**/.venv/**,**/dist/**

View File

@@ -164,6 +164,9 @@ class BarcodePlugin(ConsumeTaskPlugin):
mailrule_id=self.input_doc.mailrule_id,
# Can't use same folder or the consume might grab it again
original_file=(tmp_dir / new_document.name).resolve(),
# Adding optional original_path for later uses in
# workflow matching
original_path=self.input_doc.original_file,
),
# All the same metadata
self.metadata,

View File

@@ -156,6 +156,7 @@ class ConsumableDocument:
source: DocumentSource
original_file: Path
original_path: Path | None = None
mailrule_id: int | None = None
mime_type: str = dataclasses.field(init=False, default=None)

View File

@@ -92,6 +92,9 @@ class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand):
# doc to doc is obviously not useful
if first_doc.pk == second_doc.pk:
continue
# Skip empty documents (e.g. password-protected)
if first_doc.content.strip() == "" or second_doc.content.strip() == "":
continue
# Skip matching which have already been matched together
# doc 1 to doc 2 is the same as doc 2 to doc 1
doc_1_to_doc_2 = (first_doc.pk, second_doc.pk)

View File

@@ -314,11 +314,19 @@ def consumable_document_matches_workflow(
trigger_matched = False
# Document path vs trigger path
# Use the original_path if set, else us the original_file
match_against = (
document.original_path
if document.original_path is not None
else document.original_file
)
if (
trigger.filter_path is not None
and len(trigger.filter_path) > 0
and not fnmatch(
document.original_file,
match_against,
trigger.filter_path,
)
):

View File

@@ -614,14 +614,16 @@ class TestBarcodeNewConsume(
self.assertIsNotFile(temp_copy)
# Check the split files exist
# Check the original_path is set
# Check the source is unchanged
# Check the overrides are unchanged
for (
new_input_doc,
new_doc_overrides,
) in self.get_all_consume_delay_call_args():
self.assertEqual(new_input_doc.source, DocumentSource.ConsumeFolder)
self.assertIsFile(new_input_doc.original_file)
self.assertEqual(new_input_doc.original_path, temp_copy)
self.assertEqual(new_input_doc.source, DocumentSource.ConsumeFolder)
self.assertEqual(overrides, new_doc_overrides)

View File

@@ -206,3 +206,29 @@ class TestFuzzyMatchCommand(TestCase):
self.assertEqual(Document.objects.count(), 2)
self.assertIsNotNone(Document.objects.get(pk=1))
self.assertIsNotNone(Document.objects.get(pk=2))
def test_empty_content(self):
"""
GIVEN:
- 2 documents exist, content is empty (pw-protected)
WHEN:
- Command is called
THEN:
- No matches are found
"""
Document.objects.create(
checksum="BEEFCAFE",
title="A",
content="",
mime_type="application/pdf",
filename="test.pdf",
)
Document.objects.create(
checksum="DEADBEAF",
title="A",
content="",
mime_type="application/pdf",
filename="other_test.pdf",
)
stdout, _ = self.call_command()
self.assertIn("No matches found", stdout)