Compare commits

..

7 Commits

Author SHA1 Message Date
shamoon
770fb2d60e Update build-and-release.yml 2025-09-24 15:22:15 -07:00
shamoon
c8ef9e663a Yikes, try split ci workflow 2025-09-24 15:03:41 -07:00
shamoon
2195e4af45 Ok, lets try manual Codecov comments 2025-09-24 14:48:06 -07:00
shamoon
c6716905a4 Revert "Chore: Enable SonarQube scanning (#10904)"
This reverts commit 8d1f23e9d6.
2025-09-24 14:38:22 -07:00
shamoon
850ee5a415 Revert "Chore: remove conditional from pre-commit job in CI (#10916)"
This reverts commit 53b393dab5.
2025-09-24 14:38:19 -07:00
shamoon
b25b5abdb0 Revert "Development: try separating sonar scan"
This reverts commit 68e0559053.
2025-09-24 14:38:13 -07:00
shamoon
68e0559053 Development: try separating sonar scan 2025-09-24 14:26:05 -07:00
5 changed files with 675 additions and 393 deletions

430
.github/workflows/build-and-release.yml vendored Normal file
View File

@@ -0,0 +1,430 @@
name: 'Build and Release'
on:
workflow_run:
workflows:
- ci
types:
- completed
permissions:
contents: write
packages: write
pull-requests: write
env:
DEFAULT_UV_VERSION: "0.8.x"
DEFAULT_PYTHON_VERSION: "3.11"
NLTK_DATA: "/usr/share/nltk_data"
jobs:
prepare:
if: >-
github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'push'
name: Prepare build context
runs-on: ubuntu-24.04
outputs:
should-build: ${{ steps.determine.outputs.should-build }}
ref: ${{ steps.determine.outputs.ref }}
ref-name: ${{ steps.determine.outputs.ref-name }}
sha: ${{ steps.determine.outputs.sha }}
is-tag: ${{ steps.determine.outputs.is-tag }}
is-release-target: ${{ steps.determine.outputs.is-release-target }}
is-beta-rc: ${{ steps.determine.outputs.is-beta-rc }}
steps:
- name: Determine ref information
id: determine
uses: actions/github-script@v7
with:
script: |
const run = context.payload.workflow_run;
const owner = context.repo.owner;
const repo = context.repo.repo;
const sha = run.head_sha;
const branch = run.head_branch;
let ref = undefined;
let refName = undefined;
if (branch) {
ref = `refs/heads/${branch}`;
refName = branch;
} else {
const iterator = github.paginate.iterator(
github.rest.repos.listTags,
{
owner,
repo,
per_page: 100,
},
);
for await (const { data } of iterator) {
const match = data.find((tag) => tag.commit?.sha === sha);
if (match) {
ref = `refs/tags/${match.name}`;
refName = match.name;
break;
}
}
}
const outputs = {
shouldBuild: false,
ref: ref ?? '',
refName: refName ?? '',
sha,
isTag: ref?.startsWith('refs/tags/') ?? false,
isReleaseTarget: false,
isBetaRc: false,
};
if (!ref || !refName) {
core.info('No matching ref found for workflow run; skipping post-CI workflow.');
} else {
const allowed =
ref.startsWith('refs/heads/feature-') ||
ref.startsWith('refs/heads/fix-') ||
ref.startsWith('refs/heads/l10n_') ||
ref === 'refs/heads/dev' ||
ref === 'refs/heads/beta' ||
ref.includes('beta.rc') ||
ref.startsWith('refs/tags/v');
const isBetaRc = refName.includes('beta.rc');
const isReleaseTarget = outputs.isTag && (refName.startsWith('v') || isBetaRc);
outputs.shouldBuild = allowed;
outputs.isReleaseTarget = isReleaseTarget;
outputs.isBetaRc = isBetaRc;
}
core.setOutput('should-build', outputs.shouldBuild ? 'true' : 'false');
core.setOutput('ref', outputs.ref);
core.setOutput('ref-name', outputs.refName);
core.setOutput('sha', outputs.sha);
core.setOutput('is-tag', outputs.isTag ? 'true' : 'false');
core.setOutput('is-release-target', outputs.isReleaseTarget ? 'true' : 'false');
core.setOutput('is-beta-rc', outputs.isBetaRc ? 'true' : 'false');
build-docker-image:
needs: prepare
if: needs.prepare.outputs.should-build == 'true'
name: Build Docker image for ${{ needs.prepare.outputs.ref-name }}
runs-on: ubuntu-24.04
concurrency:
group: ${{ github.workflow }}-build-docker-image-${{ needs.prepare.outputs.ref-name || needs.prepare.outputs.sha }}
cancel-in-progress: true
env:
REF: ${{ needs.prepare.outputs.ref }}
REF_NAME: ${{ needs.prepare.outputs.ref-name }}
SHA: ${{ needs.prepare.outputs.sha }}
steps:
- name: Checkout
uses: actions/checkout@v5
with:
ref: ${{ env.SHA }}
- name: Check pushing to Docker Hub
id: push-other-places
env:
REPOSITORY_OWNER: ${{ github.repository_owner }}
REF_NAME: ${{ env.REF_NAME }}
REF: ${{ env.REF }}
run: |
if [[ "$REPOSITORY_OWNER" == "paperless-ngx" ]] && \
([[ "$REF_NAME" == "dev" ]] || [[ "$REF_NAME" == "beta" ]] || [[ "$REF" == refs/tags/v* ]]); then
echo "Enabling DockerHub image push"
echo "enable=true" >> "$GITHUB_OUTPUT"
else
echo "Not pushing to DockerHub"
echo "enable=false" >> "$GITHUB_OUTPUT"
fi
- name: Set ghcr repository name
id: set-ghcr-repository
run: |
ghcr_name=$(echo "${{ github.repository }}" | awk '{ print tolower($0) }')
echo "Name is ${ghcr_name}"
echo "ghcr-repository=${ghcr_name}" >> "$GITHUB_OUTPUT"
- name: Gather Docker metadata
id: docker-meta
uses: docker/metadata-action@v5
env:
GITHUB_REF: ${{ env.REF }}
GITHUB_REF_NAME: ${{ env.REF_NAME }}
GITHUB_SHA: ${{ env.SHA }}
with:
images: |
ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}
name=paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
name=quay.io/paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
tags: |
type=ref,event=branch
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
platforms: arm64
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
if: steps.push-other-places.outputs.enable == 'true'
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Quay.io
if: steps.push-other-places.outputs.enable == 'true'
uses: docker/login-action@v3
with:
registry: quay.io
username: ${{ secrets.QUAY_USERNAME }}
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.docker-meta.outputs.tags }}
labels: ${{ steps.docker-meta.outputs.labels }}
build-args: |
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
cache-from: |
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ env.REF_NAME }}
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:dev
cache-to: |
type=registry,mode=max,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ env.REF_NAME }}
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
- name: Export frontend artifact from docker
run: |
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
- name: Upload frontend artifact
uses: actions/upload-artifact@v4
with:
name: frontend-compiled
path: src/documents/static/frontend/
retention-days: 7
build-release:
needs:
- prepare
- build-docker-image
if: needs.prepare.outputs.should-build == 'true'
name: Build release bundle
runs-on: ubuntu-24.04
env:
REF_NAME: ${{ needs.prepare.outputs.ref-name }}
SHA: ${{ needs.prepare.outputs.sha }}
CI_RUN_ID: ${{ github.event.workflow_run.id }}
steps:
- name: Checkout
uses: actions/checkout@v5
with:
ref: ${{ env.SHA }}
- name: Set up Python
id: setup-python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install Python dependencies
run: |
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
- name: Install system dependencies
run: |
sudo apt-get update -qq
sudo apt-get install -qq --no-install-recommends gettext liblept5
- name: Download frontend artifact
uses: actions/download-artifact@v5
with:
name: frontend-compiled
path: src/documents/static/frontend/
- name: Download documentation artifact
uses: actions/download-artifact@v5
with:
name: documentation
path: docs/_build/html/
run-id: ${{ env.CI_RUN_ID }}
- name: Generate requirements file
run: |
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
- name: Compile messages
run: |
cd src/
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
manage.py compilemessages
- name: Collect static files
run: |
cd src/
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
manage.py collectstatic --no-input
- name: Move files
run: |
echo "Making dist folders"
for directory in dist \
dist/paperless-ngx \
dist/paperless-ngx/scripts;
do
mkdir --verbose --parents ${directory}
done
echo "Copying basic files"
for file_name in .dockerignore \
.env \
Dockerfile \
pyproject.toml \
uv.lock \
requirements.txt \
LICENSE \
README.md \
paperless.conf.example
do
cp --verbose ${file_name} dist/paperless-ngx/
done
mv --verbose dist/paperless-ngx/paperless.conf.example dist/paperless-ngx/paperless.conf
echo "Copying Docker related files"
cp --recursive docker/ dist/paperless-ngx/docker
echo "Copying startup scripts"
cp --verbose scripts/*.service scripts/*.sh scripts/*.socket dist/paperless-ngx/scripts/
echo "Copying source files"
cp --recursive src/ dist/paperless-ngx/src
echo "Copying documentation"
cp --recursive docs/_build/html/ dist/paperless-ngx/docs
mv --verbose static dist/paperless-ngx
- name: Make release package
run: |
echo "Creating release archive"
cd dist
sudo chown -R 1000:1000 paperless-ngx/
tar -cJf paperless-ngx.tar.xz paperless-ngx/
- name: Upload release artifact
uses: actions/upload-artifact@v4
with:
name: release
path: dist/paperless-ngx.tar.xz
retention-days: 7
publish-release:
needs:
- prepare
- build-release
if: needs.prepare.outputs.is-release-target == 'true'
name: Publish release
runs-on: ubuntu-24.04
outputs:
prerelease: ${{ steps.get_version.outputs.prerelease }}
changelog: ${{ steps.create-release.outputs.body }}
version: ${{ steps.get_version.outputs.version }}
steps:
- name: Download release artifact
uses: actions/download-artifact@v5
with:
name: release
path: ./
- name: Get version
id: get_version
run: |
echo "version=${{ needs.prepare.outputs.ref-name }}" >> "$GITHUB_OUTPUT"
if [[ ${{ needs.prepare.outputs.is-beta-rc }} == 'true' ]]; then
echo "prerelease=true" >> "$GITHUB_OUTPUT"
else
echo "prerelease=false" >> "$GITHUB_OUTPUT"
fi
- name: Create Release and Changelog
id: create-release
uses: release-drafter/release-drafter@v6
with:
name: Paperless-ngx ${{ steps.get_version.outputs.version }}
tag: ${{ steps.get_version.outputs.version }}
version: ${{ steps.get_version.outputs.version }}
prerelease: ${{ steps.get_version.outputs.prerelease }}
publish: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload release archive
id: upload-release-asset
uses: shogo82148/actions-upload-release-asset@v1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
upload_url: ${{ steps.create-release.outputs.upload_url }}
asset_path: ./paperless-ngx.tar.xz
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
asset_content_type: application/x-xz
append-changelog:
needs:
- publish-release
if: needs.publish-release.outputs.prerelease == 'false'
name: Append changelog to docs
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v5
with:
ref: main
- name: Set up Python
id: setup-python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Append Changelog to docs
id: append-Changelog
working-directory: docs
run: |
git branch ${{ needs.publish-release.outputs.version }}-changelog
git checkout ${{ needs.publish-release.outputs.version }}-changelog
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
echo "Manually linking usernames"
sed -i -r 's|@([a-zA-Z0-9_]+) \(\[#|[@\1](https://github.com/\1) ([#|g' changelog-new.md
echo "Removing unneeded comment tags"
sed -i -r 's|@<!---->|@|g' changelog-new.md
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
mv changelog-new.md changelog.md
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
pre-commit run --files changelog.md || true
git config --global user.name "github-actions"
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
git push origin ${{ needs.publish-release.outputs.version }}-changelog
- name: Create Pull Request
uses: actions/github-script@v7
with:
script: |
const { repo, owner } = context.repo;
const result = await github.rest.pulls.create({
title: 'Documentation: Add ${{ needs.publish-release.outputs.version }} changelog',
owner,
repo,
head: '${{ needs.publish-release.outputs.version }}-changelog',
base: 'main',
body: 'This PR is auto-generated by CI.'
});
github.rest.issues.addLabels({
owner,
repo,
issue_number: result.data.number,
labels: ['documentation', 'skip-changelog']
});

View File

@@ -17,52 +17,11 @@ env:
DEFAULT_PYTHON_VERSION: "3.11"
NLTK_DATA: "/usr/share/nltk_data"
jobs:
detect-duplicate:
name: Detect Duplicate Run
runs-on: ubuntu-24.04
outputs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- name: Check if workflow should run
id: check
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
if (context.eventName !== 'push') {
core.info('Not a push event; running workflow.');
core.setOutput('should_run', 'true');
return;
}
const ref = context.ref || '';
if (!ref.startsWith('refs/heads/')) {
core.info('Push is not to a branch; running workflow.');
core.setOutput('should_run', 'true');
return;
}
const branch = ref.substring('refs/heads/'.length);
const { owner, repo } = context.repo;
const prs = await github.paginate(github.rest.pulls.list, {
owner,
repo,
state: 'open',
head: `${owner}:${branch}`,
per_page: 100,
});
if (prs.length === 0) {
core.info(`No open PR found for ${branch}; running workflow.`);
core.setOutput('should_run', 'true');
} else {
core.info(`Found ${prs.length} open PR(s) for ${branch}; skipping duplicate push run.`);
core.setOutput('should_run', 'false');
}
pre-commit:
needs:
- detect-duplicate
if: needs.detect-duplicate.outputs.should_run == 'true'
# We want to run on external PRs, but not on our own internal PRs as they'll be run
# by the push to the branch. Without this if check, checks are duplicated since
# internal PRs match both the push and pull_request events.
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
name: Linting Checks
runs-on: ubuntu-24.04
steps:
@@ -354,324 +313,3 @@ jobs:
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
run: cd src-ui && pnpm run build --configuration=production
build-docker-image:
name: Build Docker image for ${{ github.ref_name }}
runs-on: ubuntu-24.04
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || startsWith(github.ref, 'refs/heads/fix-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v') || startsWith(github.ref, 'refs/heads/l10n_'))
concurrency:
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
cancel-in-progress: true
needs:
- tests-backend
- tests-frontend
- tests-frontend-e2e
steps:
- name: Check pushing to Docker Hub
id: push-other-places
# Only push to Dockerhub from the main repo AND the ref is either:
# main
# dev
# beta
# a tag
# Otherwise forks would require a Docker Hub account and secrets setup
run: |
if [[ ${{ github.repository_owner }} == "paperless-ngx" && ( ${{ github.ref_name }} == "dev" || ${{ github.ref_name }} == "beta" || ${{ startsWith(github.ref, 'refs/tags/v') }} == "true" ) ]] ; then
echo "Enabling DockerHub image push"
echo "enable=true" >> $GITHUB_OUTPUT
else
echo "Not pushing to DockerHub"
echo "enable=false" >> $GITHUB_OUTPUT
fi
- name: Set ghcr repository name
id: set-ghcr-repository
run: |
ghcr_name=$(echo "${{ github.repository }}" | awk '{ print tolower($0) }')
echo "Name is ${ghcr_name}"
echo "ghcr-repository=${ghcr_name}" >> $GITHUB_OUTPUT
- name: Gather Docker metadata
id: docker-meta
uses: docker/metadata-action@v5
with:
images: |
ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}
name=paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
name=quay.io/paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
tags: |
# Tag branches with branch name
type=ref,event=branch
# Process semver tags
# For a tag x.y.z or vX.Y.Z, output an x.y.z and x.y image tag
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
- name: Checkout
uses: actions/checkout@v5
# If https://github.com/docker/buildx/issues/1044 is resolved,
# the append input with a native arm64 arch could be used to
# significantly speed up building
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
platforms: arm64
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
uses: docker/login-action@v3
# Don't attempt to login if not pushing to Docker Hub
if: steps.push-other-places.outputs.enable == 'true'
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Quay.io
uses: docker/login-action@v3
# Don't attempt to login if not pushing to Quay.io
if: steps.push-other-places.outputs.enable == 'true'
with:
registry: quay.io
username: ${{ secrets.QUAY_USERNAME }}
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
platforms: linux/amd64,linux/arm64
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.docker-meta.outputs.tags }}
labels: ${{ steps.docker-meta.outputs.labels }}
build-args: |
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
# Get cache layers from this branch, then dev
# This allows new branches to get at least some cache benefits, generally from dev
cache-from: |
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:dev
cache-to: |
type=registry,mode=max,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
- name: Export frontend artifact from docker
run: |
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
- name: Upload frontend artifact
uses: actions/upload-artifact@v4
with:
name: frontend-compiled
path: src/documents/static/frontend/
retention-days: 7
build-release:
name: "Build Release"
needs:
- build-docker-image
- documentation
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v5
- name: Set up Python
id: setup-python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ steps.setup-python.outputs.python-version }}
- name: Install Python dependencies
run: |
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
- name: Install system dependencies
run: |
sudo apt-get update -qq
sudo apt-get install -qq --no-install-recommends gettext liblept5
- name: Download frontend artifact
uses: actions/download-artifact@v5
with:
name: frontend-compiled
path: src/documents/static/frontend/
- name: Download documentation artifact
uses: actions/download-artifact@v5
with:
name: documentation
path: docs/_build/html/
- name: Generate requirements file
run: |
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
- name: Compile messages
run: |
cd src/
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
manage.py compilemessages
- name: Collect static files
run: |
cd src/
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
manage.py collectstatic --no-input
- name: Move files
run: |
echo "Making dist folders"
for directory in dist \
dist/paperless-ngx \
dist/paperless-ngx/scripts;
do
mkdir --verbose --parents ${directory}
done
echo "Copying basic files"
for file_name in .dockerignore \
.env \
Dockerfile \
pyproject.toml \
uv.lock \
requirements.txt \
LICENSE \
README.md \
paperless.conf.example
do
cp --verbose ${file_name} dist/paperless-ngx/
done
mv --verbose dist/paperless-ngx/paperless.conf.example dist/paperless-ngx/paperless.conf
echo "Copying Docker related files"
cp --recursive docker/ dist/paperless-ngx/docker
echo "Copying startup scripts"
cp --verbose scripts/*.service scripts/*.sh scripts/*.socket dist/paperless-ngx/scripts/
echo "Copying source files"
cp --recursive src/ dist/paperless-ngx/src
echo "Copying documentation"
cp --recursive docs/_build/html/ dist/paperless-ngx/docs
mv --verbose static dist/paperless-ngx
- name: Make release package
run: |
echo "Creating release archive"
cd dist
sudo chown -R 1000:1000 paperless-ngx/
tar -cJf paperless-ngx.tar.xz paperless-ngx/
- name: Upload release artifact
uses: actions/upload-artifact@v4
with:
name: release
path: dist/paperless-ngx.tar.xz
retention-days: 7
publish-release:
name: "Publish Release"
runs-on: ubuntu-24.04
outputs:
prerelease: ${{ steps.get_version.outputs.prerelease }}
changelog: ${{ steps.create-release.outputs.body }}
version: ${{ steps.get_version.outputs.version }}
needs:
- build-release
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
steps:
- name: Download release artifact
uses: actions/download-artifact@v5
with:
name: release
path: ./
- name: Get version
id: get_version
run: |
echo "version=${{ github.ref_name }}" >> $GITHUB_OUTPUT
if [[ ${{ contains(github.ref_name, '-beta.rc') }} == 'true' ]]; then
echo "prerelease=true" >> $GITHUB_OUTPUT
else
echo "prerelease=false" >> $GITHUB_OUTPUT
fi
- name: Create Release and Changelog
id: create-release
uses: release-drafter/release-drafter@v6
with:
name: Paperless-ngx ${{ steps.get_version.outputs.version }}
tag: ${{ steps.get_version.outputs.version }}
version: ${{ steps.get_version.outputs.version }}
prerelease: ${{ steps.get_version.outputs.prerelease }}
publish: true # ensures release is not marked as draft
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload release archive
id: upload-release-asset
uses: shogo82148/actions-upload-release-asset@v1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
upload_url: ${{ steps.create-release.outputs.upload_url }}
asset_path: ./paperless-ngx.tar.xz
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
asset_content_type: application/x-xz
append-changelog:
name: "Append Changelog"
runs-on: ubuntu-24.04
needs:
- publish-release
if: needs.publish-release.outputs.prerelease == 'false'
steps:
- name: Checkout
uses: actions/checkout@v5
with:
ref: main
- name: Set up Python
id: setup-python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Append Changelog to docs
id: append-Changelog
working-directory: docs
run: |
git branch ${{ needs.publish-release.outputs.version }}-changelog
git checkout ${{ needs.publish-release.outputs.version }}-changelog
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
echo "Manually linking usernames"
sed -i -r 's|@([a-zA-Z0-9_]+) \(\[#|[@\1](https://github.com/\1) ([#|g' changelog-new.md
echo "Removing unneeded comment tags"
sed -i -r 's|@<!---->|@|g' changelog-new.md
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
mv changelog-new.md changelog.md
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
pre-commit run --files changelog.md || true
git config --global user.name "github-actions"
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
git push origin ${{ needs.publish-release.outputs.version }}-changelog
- name: Create Pull Request
uses: actions/github-script@v7
with:
script: |
const { repo, owner } = context.repo;
const result = await github.rest.pulls.create({
title: 'Documentation: Add ${{ needs.publish-release.outputs.version }} changelog',
owner,
repo,
head: '${{ needs.publish-release.outputs.version }}-changelog',
base: 'main',
body: 'This PR is auto-generated by CI.'
});
github.rest.issues.addLabels({
owner,
repo,
issue_number: result.data.number,
labels: ['documentation', 'skip-changelog']
});

220
.github/workflows/codecov-comment.yml vendored Normal file
View File

@@ -0,0 +1,220 @@
name: Codecov PR Comment
on:
workflow_run:
workflows:
- ci
types:
- completed
permissions:
contents: read
pull-requests: write
jobs:
comment:
if: >-
github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success'
runs-on: ubuntu-24.04
steps:
- name: Gather pull request context
id: pr
uses: actions/github-script@v7
with:
script: |
const run = context.payload.workflow_run;
if (!run.pull_requests || run.pull_requests.length === 0) {
core.info('No associated pull request. Skipping.');
return { shouldRun: false };
}
const pr = run.pull_requests[0];
return {
shouldRun: true,
prNumber: pr.number,
headSha: run.head_sha,
};
- name: Fetch Codecov coverage
id: coverage
if: steps.pr.outputs.shouldRun == 'true'
uses: actions/github-script@v7
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
COMMIT_SHA: ${{ steps.pr.outputs.headSha }}
with:
script: |
const token = process.env.CODECOV_TOKEN;
if (!token) {
core.warning('CODECOV_TOKEN secret is not available; skipping comment.');
core.setOutput('shouldComment', 'false');
return;
}
const commitSha = process.env.COMMIT_SHA;
const owner = context.repo.owner;
const repo = context.repo.repo;
const url = `https://codecov.io/api/v2/github/${owner}/repos/${repo}/commits/${commitSha}/report`;
const maxAttempts = 10;
const waitMs = 15000;
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
let data;
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
core.info(`Fetching Codecov report (attempt ${attempt}/${maxAttempts})`);
const response = await fetch(url, {
headers: {
Authorization: `Bearer ${token}`,
'Content-Type': 'application/json',
Accept: 'application/json',
},
});
if (response.status === 404) {
core.info('Report not ready yet (404). Waiting before retrying.');
await sleep(waitMs);
continue;
}
if (!response.ok) {
const text = await response.text();
throw new Error(`Codecov API returned ${response.status}: ${text}`);
}
data = await response.json();
if (data && Object.keys(data).length > 0) {
break;
}
core.info('Report payload empty. Waiting before retrying.');
await sleep(waitMs);
}
if (!data) {
core.warning('Unable to retrieve Codecov report after multiple attempts.');
core.setOutput('shouldComment', 'false');
return;
}
const totals = data.report?.totals ?? data.commit?.totals ?? data.totals;
if (!totals) {
core.warning('Codecov response does not contain coverage totals.');
core.setOutput('shouldComment', 'false');
return;
}
const compareTotals = data.report?.compare?.totals ?? data.compare?.totals;
const flagsRaw = data.report?.totals_by_flag ?? data.report?.components ?? [];
const toNumber = (value) => {
if (value === null || value === undefined || value === '') {
return undefined;
}
const num = Number(value);
return Number.isFinite(num) ? num : undefined;
};
const coverage = toNumber(totals.coverage);
const baseCoverage = toNumber(compareTotals?.base_coverage ?? compareTotals?.base);
const delta = toNumber(
compareTotals?.coverage_change ??
compareTotals?.coverage_diff ??
totals.delta ??
totals.diff ??
totals.change,
);
const formatPercent = (value) => {
if (value === undefined) return '—';
return `${value.toFixed(2)}%`;
};
const formatDelta = (value) => {
if (value === undefined) return '—';
const sign = value >= 0 ? '+' : '';
return `${sign}${value.toFixed(2)}%`;
};
const shortSha = commitSha.slice(0, 7);
const lines = [
'<!-- codecov-coverage-comment -->',
'**Codecov Coverage**',
'',
`- Head \`${shortSha}\`: ${formatPercent(coverage)}`,
];
if (baseCoverage !== undefined) {
lines.push(`- Base: ${formatPercent(baseCoverage)}`);
}
if (delta !== undefined) {
lines.push(`- Change: ${formatDelta(delta)}`);
}
const flagEntries = Array.isArray(flagsRaw)
? flagsRaw
: Object.entries(flagsRaw).map(([name, totals]) => ({ name, totals }));
const flagRows = [];
for (const entry of flagEntries) {
const label = entry.flag ?? entry.name ?? entry.component ?? entry.id;
const entryTotals = entry.totals ?? entry;
const entryCoverage = toNumber(entryTotals?.coverage);
const entryDelta = toNumber(
entryTotals?.coverage_change ??
entryTotals?.coverage_diff ??
entryTotals?.delta ??
entryTotals?.diff,
);
if (!label || entryCoverage === undefined) {
continue;
}
flagRows.push(`| ${label} | ${formatPercent(entryCoverage)} | ${formatDelta(entryDelta)} |`);
}
if (flagRows.length) {
lines.push('');
lines.push('| Flag | Coverage | Change |');
lines.push('| --- | --- | --- |');
lines.push(...flagRows);
}
const commentBody = lines.join('\n');
const shouldComment = coverage !== undefined;
core.setOutput('shouldComment', shouldComment ? 'true' : 'false');
if (shouldComment) {
core.setOutput('commentBody', commentBody);
}
- name: Upsert coverage comment
if: steps.pr.outputs.shouldRun == 'true' && steps.coverage.outputs.shouldComment == 'true'
uses: actions/github-script@v7
env:
PR_NUMBER: ${{ steps.pr.outputs.prNumber }}
COMMENT_BODY: ${{ steps.coverage.outputs.commentBody }}
with:
script: |
const prNumber = Number(process.env.PR_NUMBER);
const body = process.env.COMMENT_BODY;
const marker = '<!-- codecov-coverage-comment -->';
const { data: comments } = await github.rest.issues.listComments({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
per_page: 100,
});
const existing = comments.find((comment) => comment.body?.includes(marker));
if (existing) {
core.info(`Updating existing coverage comment (id: ${existing.id}).`);
await github.rest.issues.updateComment({
owner: context.repo.owner,
repo: context.repo.repo,
comment_id: existing.id,
body,
});
} else {
core.info('Creating new coverage comment.');
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
body,
});
}

View File

@@ -177,16 +177,10 @@ export class CustomFieldEditDialogComponent
}
public removeSelectOption(index: number) {
const globalIndex =
index + (this.selectOptionsPage - 1) * SELECT_OPTION_PAGE_SIZE
this._allSelectOptions.splice(globalIndex, 1)
const totalPages = Math.max(
1,
Math.ceil(this._allSelectOptions.length / SELECT_OPTION_PAGE_SIZE)
this.selectOptions.removeAt(index)
this._allSelectOptions.splice(
index + (this.selectOptionsPage - 1) * SELECT_OPTION_PAGE_SIZE,
1
)
const targetPage = Math.min(this.selectOptionsPage, totalPages)
this.selectOptionsPage = targetPage
}
}

34
uv.lock generated
View File

@@ -3291,25 +3291,25 @@ wheels = [
[[package]]
name = "ruff"
version = "0.13.1"
version = "0.13.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ab/33/c8e89216845615d14d2d42ba2bee404e7206a8db782f33400754f3799f05/ruff-0.13.1.tar.gz", hash = "sha256:88074c3849087f153d4bb22e92243ad4c1b366d7055f98726bc19aa08dc12d51", size = 5397987, upload-time = "2025-09-18T19:52:44.33Z" }
sdist = { url = "https://files.pythonhosted.org/packages/6e/1a/1f4b722862840295bcaba8c9e5261572347509548faaa99b2d57ee7bfe6a/ruff-0.13.0.tar.gz", hash = "sha256:5b4b1ee7eb35afae128ab94459b13b2baaed282b1fb0f472a73c82c996c8ae60", size = 5372863, upload-time = "2025-09-10T16:25:37.917Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f3/41/ca37e340938f45cfb8557a97a5c347e718ef34702546b174e5300dbb1f28/ruff-0.13.1-py3-none-linux_armv6l.whl", hash = "sha256:b2abff595cc3cbfa55e509d89439b5a09a6ee3c252d92020bd2de240836cf45b", size = 12304308, upload-time = "2025-09-18T19:51:56.253Z" },
{ url = "https://files.pythonhosted.org/packages/ff/84/ba378ef4129415066c3e1c80d84e539a0d52feb250685091f874804f28af/ruff-0.13.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4ee9f4249bf7f8bb3984c41bfaf6a658162cdb1b22e3103eabc7dd1dc5579334", size = 12937258, upload-time = "2025-09-18T19:52:00.184Z" },
{ url = "https://files.pythonhosted.org/packages/8d/b6/ec5e4559ae0ad955515c176910d6d7c93edcbc0ed1a3195a41179c58431d/ruff-0.13.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5c5da4af5f6418c07d75e6f3224e08147441f5d1eac2e6ce10dcce5e616a3bae", size = 12214554, upload-time = "2025-09-18T19:52:02.753Z" },
{ url = "https://files.pythonhosted.org/packages/70/d6/cb3e3b4f03b9b0c4d4d8f06126d34b3394f6b4d764912fe80a1300696ef6/ruff-0.13.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80524f84a01355a59a93cef98d804e2137639823bcee2931f5028e71134a954e", size = 12448181, upload-time = "2025-09-18T19:52:05.279Z" },
{ url = "https://files.pythonhosted.org/packages/d2/ea/bf60cb46d7ade706a246cd3fb99e4cfe854efa3dfbe530d049c684da24ff/ruff-0.13.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff7f5ce8d7988767dd46a148192a14d0f48d1baea733f055d9064875c7d50389", size = 12104599, upload-time = "2025-09-18T19:52:07.497Z" },
{ url = "https://files.pythonhosted.org/packages/2d/3e/05f72f4c3d3a69e65d55a13e1dd1ade76c106d8546e7e54501d31f1dc54a/ruff-0.13.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c55d84715061f8b05469cdc9a446aa6c7294cd4bd55e86a89e572dba14374f8c", size = 13791178, upload-time = "2025-09-18T19:52:10.189Z" },
{ url = "https://files.pythonhosted.org/packages/81/e7/01b1fc403dd45d6cfe600725270ecc6a8f8a48a55bc6521ad820ed3ceaf8/ruff-0.13.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ac57fed932d90fa1624c946dc67a0a3388d65a7edc7d2d8e4ca7bddaa789b3b0", size = 14814474, upload-time = "2025-09-18T19:52:12.866Z" },
{ url = "https://files.pythonhosted.org/packages/fa/92/d9e183d4ed6185a8df2ce9faa3f22e80e95b5f88d9cc3d86a6d94331da3f/ruff-0.13.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c366a71d5b4f41f86a008694f7a0d75fe409ec298685ff72dc882f882d532e36", size = 14217531, upload-time = "2025-09-18T19:52:15.245Z" },
{ url = "https://files.pythonhosted.org/packages/3b/4a/6ddb1b11d60888be224d721e01bdd2d81faaf1720592858ab8bac3600466/ruff-0.13.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4ea9d1b5ad3e7a83ee8ebb1229c33e5fe771e833d6d3dcfca7b77d95b060d38", size = 13265267, upload-time = "2025-09-18T19:52:17.649Z" },
{ url = "https://files.pythonhosted.org/packages/81/98/3f1d18a8d9ea33ef2ad508f0417fcb182c99b23258ec5e53d15db8289809/ruff-0.13.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0f70202996055b555d3d74b626406476cc692f37b13bac8828acff058c9966a", size = 13243120, upload-time = "2025-09-18T19:52:20.332Z" },
{ url = "https://files.pythonhosted.org/packages/8d/86/b6ce62ce9c12765fa6c65078d1938d2490b2b1d9273d0de384952b43c490/ruff-0.13.1-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:f8cff7a105dad631085d9505b491db33848007d6b487c3c1979dd8d9b2963783", size = 13443084, upload-time = "2025-09-18T19:52:23.032Z" },
{ url = "https://files.pythonhosted.org/packages/a1/6e/af7943466a41338d04503fb5a81b2fd07251bd272f546622e5b1599a7976/ruff-0.13.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:9761e84255443316a258dd7dfbd9bfb59c756e52237ed42494917b2577697c6a", size = 12295105, upload-time = "2025-09-18T19:52:25.263Z" },
{ url = "https://files.pythonhosted.org/packages/3f/97/0249b9a24f0f3ebd12f007e81c87cec6d311de566885e9309fcbac5b24cc/ruff-0.13.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:3d376a88c3102ef228b102211ef4a6d13df330cb0f5ca56fdac04ccec2a99700", size = 12072284, upload-time = "2025-09-18T19:52:27.478Z" },
{ url = "https://files.pythonhosted.org/packages/f6/85/0b64693b2c99d62ae65236ef74508ba39c3febd01466ef7f354885e5050c/ruff-0.13.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:cbefd60082b517a82c6ec8836989775ac05f8991715d228b3c1d86ccc7df7dae", size = 12970314, upload-time = "2025-09-18T19:52:30.212Z" },
{ url = "https://files.pythonhosted.org/packages/96/fc/342e9f28179915d28b3747b7654f932ca472afbf7090fc0c4011e802f494/ruff-0.13.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:dd16b9a5a499fe73f3c2ef09a7885cb1d97058614d601809d37c422ed1525317", size = 13422360, upload-time = "2025-09-18T19:52:32.676Z" },
{ url = "https://files.pythonhosted.org/packages/ac/fe/6f87b419dbe166fd30a991390221f14c5b68946f389ea07913e1719741e0/ruff-0.13.0-py3-none-linux_armv6l.whl", hash = "sha256:137f3d65d58ee828ae136a12d1dc33d992773d8f7644bc6b82714570f31b2004", size = 12187826, upload-time = "2025-09-10T16:24:39.5Z" },
{ url = "https://files.pythonhosted.org/packages/e4/25/c92296b1fc36d2499e12b74a3fdb230f77af7bdf048fad7b0a62e94ed56a/ruff-0.13.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:21ae48151b66e71fd111b7d79f9ad358814ed58c339631450c66a4be33cc28b9", size = 12933428, upload-time = "2025-09-10T16:24:43.866Z" },
{ url = "https://files.pythonhosted.org/packages/44/cf/40bc7221a949470307d9c35b4ef5810c294e6cfa3caafb57d882731a9f42/ruff-0.13.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:64de45f4ca5441209e41742d527944635a05a6e7c05798904f39c85bafa819e3", size = 12095543, upload-time = "2025-09-10T16:24:46.638Z" },
{ url = "https://files.pythonhosted.org/packages/f1/03/8b5ff2a211efb68c63a1d03d157e924997ada87d01bebffbd13a0f3fcdeb/ruff-0.13.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b2c653ae9b9d46e0ef62fc6fbf5b979bda20a0b1d2b22f8f7eb0cde9f4963b8", size = 12312489, upload-time = "2025-09-10T16:24:49.556Z" },
{ url = "https://files.pythonhosted.org/packages/37/fc/2336ef6d5e9c8d8ea8305c5f91e767d795cd4fc171a6d97ef38a5302dadc/ruff-0.13.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cec632534332062bc9eb5884a267b689085a1afea9801bf94e3ba7498a2d207", size = 11991631, upload-time = "2025-09-10T16:24:53.439Z" },
{ url = "https://files.pythonhosted.org/packages/39/7f/f6d574d100fca83d32637d7f5541bea2f5e473c40020bbc7fc4a4d5b7294/ruff-0.13.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd628101d9f7d122e120ac7c17e0a0f468b19bc925501dbe03c1cb7f5415b24", size = 13720602, upload-time = "2025-09-10T16:24:56.392Z" },
{ url = "https://files.pythonhosted.org/packages/fd/c8/a8a5b81d8729b5d1f663348d11e2a9d65a7a9bd3c399763b1a51c72be1ce/ruff-0.13.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:afe37db8e1466acb173bb2a39ca92df00570e0fd7c94c72d87b51b21bb63efea", size = 14697751, upload-time = "2025-09-10T16:24:59.89Z" },
{ url = "https://files.pythonhosted.org/packages/57/f5/183ec292272ce7ec5e882aea74937f7288e88ecb500198b832c24debc6d3/ruff-0.13.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f96a8d90bb258d7d3358b372905fe7333aaacf6c39e2408b9f8ba181f4b6ef2", size = 14095317, upload-time = "2025-09-10T16:25:03.025Z" },
{ url = "https://files.pythonhosted.org/packages/9f/8d/7f9771c971724701af7926c14dab31754e7b303d127b0d3f01116faef456/ruff-0.13.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b5e3d883e4f924c5298e3f2ee0f3085819c14f68d1e5b6715597681433f153", size = 13144418, upload-time = "2025-09-10T16:25:06.272Z" },
{ url = "https://files.pythonhosted.org/packages/a8/a6/7985ad1778e60922d4bef546688cd8a25822c58873e9ff30189cfe5dc4ab/ruff-0.13.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03447f3d18479df3d24917a92d768a89f873a7181a064858ea90a804a7538991", size = 13370843, upload-time = "2025-09-10T16:25:09.965Z" },
{ url = "https://files.pythonhosted.org/packages/64/1c/bafdd5a7a05a50cc51d9f5711da704942d8dd62df3d8c70c311e98ce9f8a/ruff-0.13.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:fbc6b1934eb1c0033da427c805e27d164bb713f8e273a024a7e86176d7f462cf", size = 13321891, upload-time = "2025-09-10T16:25:12.969Z" },
{ url = "https://files.pythonhosted.org/packages/bc/3e/7817f989cb9725ef7e8d2cee74186bf90555279e119de50c750c4b7a72fe/ruff-0.13.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8ab6a3e03665d39d4a25ee199d207a488724f022db0e1fe4002968abdb8001b", size = 12119119, upload-time = "2025-09-10T16:25:16.621Z" },
{ url = "https://files.pythonhosted.org/packages/58/07/9df080742e8d1080e60c426dce6e96a8faf9a371e2ce22eef662e3839c95/ruff-0.13.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2a5c62f8ccc6dd2fe259917482de7275cecc86141ee10432727c4816235bc41", size = 11961594, upload-time = "2025-09-10T16:25:19.49Z" },
{ url = "https://files.pythonhosted.org/packages/6a/f4/ae1185349197d26a2316840cb4d6c3fba61d4ac36ed728bf0228b222d71f/ruff-0.13.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b7b85ca27aeeb1ab421bc787009831cffe6048faae08ad80867edab9f2760945", size = 12933377, upload-time = "2025-09-10T16:25:22.371Z" },
{ url = "https://files.pythonhosted.org/packages/b6/39/e776c10a3b349fc8209a905bfb327831d7516f6058339a613a8d2aaecacd/ruff-0.13.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:79ea0c44a3032af768cabfd9616e44c24303af49d633b43e3a5096e009ebe823", size = 13418555, upload-time = "2025-09-10T16:25:25.681Z" },
]
[[package]]