mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-10-26 03:36:08 -05:00
Compare commits
7 Commits
9b05f16a0f
...
fix-chore-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
770fb2d60e | ||
|
|
c8ef9e663a | ||
|
|
2195e4af45 | ||
|
|
c6716905a4 | ||
|
|
850ee5a415 | ||
|
|
b25b5abdb0 | ||
|
|
68e0559053 |
430
.github/workflows/build-and-release.yml
vendored
Normal file
430
.github/workflows/build-and-release.yml
vendored
Normal file
@@ -0,0 +1,430 @@
|
||||
name: 'Build and Release'
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- ci
|
||||
types:
|
||||
- completed
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write
|
||||
pull-requests: write
|
||||
env:
|
||||
DEFAULT_UV_VERSION: "0.8.x"
|
||||
DEFAULT_PYTHON_VERSION: "3.11"
|
||||
NLTK_DATA: "/usr/share/nltk_data"
|
||||
jobs:
|
||||
prepare:
|
||||
if: >-
|
||||
github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'push'
|
||||
name: Prepare build context
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
should-build: ${{ steps.determine.outputs.should-build }}
|
||||
ref: ${{ steps.determine.outputs.ref }}
|
||||
ref-name: ${{ steps.determine.outputs.ref-name }}
|
||||
sha: ${{ steps.determine.outputs.sha }}
|
||||
is-tag: ${{ steps.determine.outputs.is-tag }}
|
||||
is-release-target: ${{ steps.determine.outputs.is-release-target }}
|
||||
is-beta-rc: ${{ steps.determine.outputs.is-beta-rc }}
|
||||
steps:
|
||||
- name: Determine ref information
|
||||
id: determine
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const run = context.payload.workflow_run;
|
||||
const owner = context.repo.owner;
|
||||
const repo = context.repo.repo;
|
||||
const sha = run.head_sha;
|
||||
const branch = run.head_branch;
|
||||
|
||||
let ref = undefined;
|
||||
let refName = undefined;
|
||||
|
||||
if (branch) {
|
||||
ref = `refs/heads/${branch}`;
|
||||
refName = branch;
|
||||
} else {
|
||||
const iterator = github.paginate.iterator(
|
||||
github.rest.repos.listTags,
|
||||
{
|
||||
owner,
|
||||
repo,
|
||||
per_page: 100,
|
||||
},
|
||||
);
|
||||
|
||||
for await (const { data } of iterator) {
|
||||
const match = data.find((tag) => tag.commit?.sha === sha);
|
||||
if (match) {
|
||||
ref = `refs/tags/${match.name}`;
|
||||
refName = match.name;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const outputs = {
|
||||
shouldBuild: false,
|
||||
ref: ref ?? '',
|
||||
refName: refName ?? '',
|
||||
sha,
|
||||
isTag: ref?.startsWith('refs/tags/') ?? false,
|
||||
isReleaseTarget: false,
|
||||
isBetaRc: false,
|
||||
};
|
||||
|
||||
if (!ref || !refName) {
|
||||
core.info('No matching ref found for workflow run; skipping post-CI workflow.');
|
||||
} else {
|
||||
const allowed =
|
||||
ref.startsWith('refs/heads/feature-') ||
|
||||
ref.startsWith('refs/heads/fix-') ||
|
||||
ref.startsWith('refs/heads/l10n_') ||
|
||||
ref === 'refs/heads/dev' ||
|
||||
ref === 'refs/heads/beta' ||
|
||||
ref.includes('beta.rc') ||
|
||||
ref.startsWith('refs/tags/v');
|
||||
|
||||
const isBetaRc = refName.includes('beta.rc');
|
||||
const isReleaseTarget = outputs.isTag && (refName.startsWith('v') || isBetaRc);
|
||||
|
||||
outputs.shouldBuild = allowed;
|
||||
outputs.isReleaseTarget = isReleaseTarget;
|
||||
outputs.isBetaRc = isBetaRc;
|
||||
}
|
||||
|
||||
core.setOutput('should-build', outputs.shouldBuild ? 'true' : 'false');
|
||||
core.setOutput('ref', outputs.ref);
|
||||
core.setOutput('ref-name', outputs.refName);
|
||||
core.setOutput('sha', outputs.sha);
|
||||
core.setOutput('is-tag', outputs.isTag ? 'true' : 'false');
|
||||
core.setOutput('is-release-target', outputs.isReleaseTarget ? 'true' : 'false');
|
||||
core.setOutput('is-beta-rc', outputs.isBetaRc ? 'true' : 'false');
|
||||
build-docker-image:
|
||||
needs: prepare
|
||||
if: needs.prepare.outputs.should-build == 'true'
|
||||
name: Build Docker image for ${{ needs.prepare.outputs.ref-name }}
|
||||
runs-on: ubuntu-24.04
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-build-docker-image-${{ needs.prepare.outputs.ref-name || needs.prepare.outputs.sha }}
|
||||
cancel-in-progress: true
|
||||
env:
|
||||
REF: ${{ needs.prepare.outputs.ref }}
|
||||
REF_NAME: ${{ needs.prepare.outputs.ref-name }}
|
||||
SHA: ${{ needs.prepare.outputs.sha }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ env.SHA }}
|
||||
- name: Check pushing to Docker Hub
|
||||
id: push-other-places
|
||||
env:
|
||||
REPOSITORY_OWNER: ${{ github.repository_owner }}
|
||||
REF_NAME: ${{ env.REF_NAME }}
|
||||
REF: ${{ env.REF }}
|
||||
run: |
|
||||
if [[ "$REPOSITORY_OWNER" == "paperless-ngx" ]] && \
|
||||
([[ "$REF_NAME" == "dev" ]] || [[ "$REF_NAME" == "beta" ]] || [[ "$REF" == refs/tags/v* ]]); then
|
||||
echo "Enabling DockerHub image push"
|
||||
echo "enable=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "Not pushing to DockerHub"
|
||||
echo "enable=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
- name: Set ghcr repository name
|
||||
id: set-ghcr-repository
|
||||
run: |
|
||||
ghcr_name=$(echo "${{ github.repository }}" | awk '{ print tolower($0) }')
|
||||
echo "Name is ${ghcr_name}"
|
||||
echo "ghcr-repository=${ghcr_name}" >> "$GITHUB_OUTPUT"
|
||||
- name: Gather Docker metadata
|
||||
id: docker-meta
|
||||
uses: docker/metadata-action@v5
|
||||
env:
|
||||
GITHUB_REF: ${{ env.REF }}
|
||||
GITHUB_REF_NAME: ${{ env.REF_NAME }}
|
||||
GITHUB_SHA: ${{ env.SHA }}
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}
|
||||
name=paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
|
||||
name=quay.io/paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: arm64
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to Docker Hub
|
||||
if: steps.push-other-places.outputs.enable == 'true'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Login to Quay.io
|
||||
if: steps.push-other-places.outputs.enable == 'true'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.QUAY_USERNAME }}
|
||||
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.docker-meta.outputs.tags }}
|
||||
labels: ${{ steps.docker-meta.outputs.labels }}
|
||||
build-args: |
|
||||
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
|
||||
cache-from: |
|
||||
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ env.REF_NAME }}
|
||||
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:dev
|
||||
cache-to: |
|
||||
type=registry,mode=max,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ env.REF_NAME }}
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||
- name: Export frontend artifact from docker
|
||||
run: |
|
||||
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
|
||||
- name: Upload frontend artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: frontend-compiled
|
||||
path: src/documents/static/frontend/
|
||||
retention-days: 7
|
||||
build-release:
|
||||
needs:
|
||||
- prepare
|
||||
- build-docker-image
|
||||
if: needs.prepare.outputs.should-build == 'true'
|
||||
name: Build release bundle
|
||||
runs-on: ubuntu-24.04
|
||||
env:
|
||||
REF_NAME: ${{ needs.prepare.outputs.ref-name }}
|
||||
SHA: ${{ needs.prepare.outputs.sha }}
|
||||
CI_RUN_ID: ${{ github.event.workflow_run.id }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ env.SHA }}
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends gettext liblept5
|
||||
- name: Download frontend artifact
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: frontend-compiled
|
||||
path: src/documents/static/frontend/
|
||||
- name: Download documentation artifact
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: documentation
|
||||
path: docs/_build/html/
|
||||
run-id: ${{ env.CI_RUN_ID }}
|
||||
- name: Generate requirements file
|
||||
run: |
|
||||
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
|
||||
- name: Compile messages
|
||||
run: |
|
||||
cd src/
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
manage.py compilemessages
|
||||
- name: Collect static files
|
||||
run: |
|
||||
cd src/
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
manage.py collectstatic --no-input
|
||||
- name: Move files
|
||||
run: |
|
||||
echo "Making dist folders"
|
||||
for directory in dist \
|
||||
dist/paperless-ngx \
|
||||
dist/paperless-ngx/scripts;
|
||||
do
|
||||
mkdir --verbose --parents ${directory}
|
||||
done
|
||||
|
||||
echo "Copying basic files"
|
||||
for file_name in .dockerignore \
|
||||
.env \
|
||||
Dockerfile \
|
||||
pyproject.toml \
|
||||
uv.lock \
|
||||
requirements.txt \
|
||||
LICENSE \
|
||||
README.md \
|
||||
paperless.conf.example
|
||||
do
|
||||
cp --verbose ${file_name} dist/paperless-ngx/
|
||||
done
|
||||
mv --verbose dist/paperless-ngx/paperless.conf.example dist/paperless-ngx/paperless.conf
|
||||
|
||||
echo "Copying Docker related files"
|
||||
cp --recursive docker/ dist/paperless-ngx/docker
|
||||
|
||||
echo "Copying startup scripts"
|
||||
cp --verbose scripts/*.service scripts/*.sh scripts/*.socket dist/paperless-ngx/scripts/
|
||||
|
||||
echo "Copying source files"
|
||||
cp --recursive src/ dist/paperless-ngx/src
|
||||
echo "Copying documentation"
|
||||
cp --recursive docs/_build/html/ dist/paperless-ngx/docs
|
||||
|
||||
mv --verbose static dist/paperless-ngx
|
||||
- name: Make release package
|
||||
run: |
|
||||
echo "Creating release archive"
|
||||
cd dist
|
||||
sudo chown -R 1000:1000 paperless-ngx/
|
||||
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
||||
- name: Upload release artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release
|
||||
path: dist/paperless-ngx.tar.xz
|
||||
retention-days: 7
|
||||
publish-release:
|
||||
needs:
|
||||
- prepare
|
||||
- build-release
|
||||
if: needs.prepare.outputs.is-release-target == 'true'
|
||||
name: Publish release
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
||||
changelog: ${{ steps.create-release.outputs.body }}
|
||||
version: ${{ steps.get_version.outputs.version }}
|
||||
steps:
|
||||
- name: Download release artifact
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: release
|
||||
path: ./
|
||||
- name: Get version
|
||||
id: get_version
|
||||
run: |
|
||||
echo "version=${{ needs.prepare.outputs.ref-name }}" >> "$GITHUB_OUTPUT"
|
||||
if [[ ${{ needs.prepare.outputs.is-beta-rc }} == 'true' ]]; then
|
||||
echo "prerelease=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "prerelease=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
- name: Create Release and Changelog
|
||||
id: create-release
|
||||
uses: release-drafter/release-drafter@v6
|
||||
with:
|
||||
name: Paperless-ngx ${{ steps.get_version.outputs.version }}
|
||||
tag: ${{ steps.get_version.outputs.version }}
|
||||
version: ${{ steps.get_version.outputs.version }}
|
||||
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
||||
publish: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Upload release archive
|
||||
id: upload-release-asset
|
||||
uses: shogo82148/actions-upload-release-asset@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
asset_path: ./paperless-ngx.tar.xz
|
||||
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
|
||||
asset_content_type: application/x-xz
|
||||
append-changelog:
|
||||
needs:
|
||||
- publish-release
|
||||
if: needs.publish-release.outputs.prerelease == 'false'
|
||||
name: Append changelog to docs
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: main
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Append Changelog to docs
|
||||
id: append-Changelog
|
||||
working-directory: docs
|
||||
run: |
|
||||
git branch ${{ needs.publish-release.outputs.version }}-changelog
|
||||
git checkout ${{ needs.publish-release.outputs.version }}-changelog
|
||||
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
|
||||
echo "Manually linking usernames"
|
||||
sed -i -r 's|@([a-zA-Z0-9_]+) \(\[#|[@\1](https://github.com/\1) ([#|g' changelog-new.md
|
||||
echo "Removing unneeded comment tags"
|
||||
sed -i -r 's|@<!---->|@|g' changelog-new.md
|
||||
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
|
||||
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
|
||||
mv changelog-new.md changelog.md
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
pre-commit run --files changelog.md || true
|
||||
git config --global user.name "github-actions"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
|
||||
git push origin ${{ needs.publish-release.outputs.version }}-changelog
|
||||
- name: Create Pull Request
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const { repo, owner } = context.repo;
|
||||
const result = await github.rest.pulls.create({
|
||||
title: 'Documentation: Add ${{ needs.publish-release.outputs.version }} changelog',
|
||||
owner,
|
||||
repo,
|
||||
head: '${{ needs.publish-release.outputs.version }}-changelog',
|
||||
base: 'main',
|
||||
body: 'This PR is auto-generated by CI.'
|
||||
});
|
||||
github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: result.data.number,
|
||||
labels: ['documentation', 'skip-changelog']
|
||||
});
|
||||
374
.github/workflows/ci.yml
vendored
374
.github/workflows/ci.yml
vendored
@@ -17,52 +17,11 @@ env:
|
||||
DEFAULT_PYTHON_VERSION: "3.11"
|
||||
NLTK_DATA: "/usr/share/nltk_data"
|
||||
jobs:
|
||||
detect-duplicate:
|
||||
name: Detect Duplicate Run
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- name: Check if workflow should run
|
||||
id: check
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
if (context.eventName !== 'push') {
|
||||
core.info('Not a push event; running workflow.');
|
||||
core.setOutput('should_run', 'true');
|
||||
return;
|
||||
}
|
||||
|
||||
const ref = context.ref || '';
|
||||
if (!ref.startsWith('refs/heads/')) {
|
||||
core.info('Push is not to a branch; running workflow.');
|
||||
core.setOutput('should_run', 'true');
|
||||
return;
|
||||
}
|
||||
|
||||
const branch = ref.substring('refs/heads/'.length);
|
||||
const { owner, repo } = context.repo;
|
||||
const prs = await github.paginate(github.rest.pulls.list, {
|
||||
owner,
|
||||
repo,
|
||||
state: 'open',
|
||||
head: `${owner}:${branch}`,
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
if (prs.length === 0) {
|
||||
core.info(`No open PR found for ${branch}; running workflow.`);
|
||||
core.setOutput('should_run', 'true');
|
||||
} else {
|
||||
core.info(`Found ${prs.length} open PR(s) for ${branch}; skipping duplicate push run.`);
|
||||
core.setOutput('should_run', 'false');
|
||||
}
|
||||
pre-commit:
|
||||
needs:
|
||||
- detect-duplicate
|
||||
if: needs.detect-duplicate.outputs.should_run == 'true'
|
||||
# We want to run on external PRs, but not on our own internal PRs as they'll be run
|
||||
# by the push to the branch. Without this if check, checks are duplicated since
|
||||
# internal PRs match both the push and pull_request events.
|
||||
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
|
||||
name: Linting Checks
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
@@ -183,11 +142,13 @@ jobs:
|
||||
if: always()
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend-python-${{ matrix.python-version }}
|
||||
files: junit.xml
|
||||
- name: Upload backend coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend-python-${{ matrix.python-version }}
|
||||
files: coverage.xml
|
||||
- name: Stop containers
|
||||
@@ -263,11 +224,13 @@ jobs:
|
||||
uses: codecov/test-results-action@v1
|
||||
if: always()
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: frontend-node-${{ matrix.node-version }}
|
||||
directory: src-ui/
|
||||
- name: Upload frontend coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: frontend-node-${{ matrix.node-version }}
|
||||
directory: src-ui/coverage/
|
||||
tests-frontend-e2e:
|
||||
@@ -350,324 +313,3 @@ jobs:
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: cd src-ui && pnpm run build --configuration=production
|
||||
build-docker-image:
|
||||
name: Build Docker image for ${{ github.ref_name }}
|
||||
runs-on: ubuntu-24.04
|
||||
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || startsWith(github.ref, 'refs/heads/fix-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v') || startsWith(github.ref, 'refs/heads/l10n_'))
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
|
||||
cancel-in-progress: true
|
||||
needs:
|
||||
- tests-backend
|
||||
- tests-frontend
|
||||
- tests-frontend-e2e
|
||||
steps:
|
||||
- name: Check pushing to Docker Hub
|
||||
id: push-other-places
|
||||
# Only push to Dockerhub from the main repo AND the ref is either:
|
||||
# main
|
||||
# dev
|
||||
# beta
|
||||
# a tag
|
||||
# Otherwise forks would require a Docker Hub account and secrets setup
|
||||
run: |
|
||||
if [[ ${{ github.repository_owner }} == "paperless-ngx" && ( ${{ github.ref_name }} == "dev" || ${{ github.ref_name }} == "beta" || ${{ startsWith(github.ref, 'refs/tags/v') }} == "true" ) ]] ; then
|
||||
echo "Enabling DockerHub image push"
|
||||
echo "enable=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Not pushing to DockerHub"
|
||||
echo "enable=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Set ghcr repository name
|
||||
id: set-ghcr-repository
|
||||
run: |
|
||||
ghcr_name=$(echo "${{ github.repository }}" | awk '{ print tolower($0) }')
|
||||
echo "Name is ${ghcr_name}"
|
||||
echo "ghcr-repository=${ghcr_name}" >> $GITHUB_OUTPUT
|
||||
- name: Gather Docker metadata
|
||||
id: docker-meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}
|
||||
name=paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
|
||||
name=quay.io/paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
|
||||
tags: |
|
||||
# Tag branches with branch name
|
||||
type=ref,event=branch
|
||||
# Process semver tags
|
||||
# For a tag x.y.z or vX.Y.Z, output an x.y.z and x.y image tag
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
# If https://github.com/docker/buildx/issues/1044 is resolved,
|
||||
# the append input with a native arm64 arch could be used to
|
||||
# significantly speed up building
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: arm64
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
# Don't attempt to login if not pushing to Docker Hub
|
||||
if: steps.push-other-places.outputs.enable == 'true'
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Login to Quay.io
|
||||
uses: docker/login-action@v3
|
||||
# Don't attempt to login if not pushing to Quay.io
|
||||
if: steps.push-other-places.outputs.enable == 'true'
|
||||
with:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.QUAY_USERNAME }}
|
||||
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.docker-meta.outputs.tags }}
|
||||
labels: ${{ steps.docker-meta.outputs.labels }}
|
||||
build-args: |
|
||||
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
|
||||
# Get cache layers from this branch, then dev
|
||||
# This allows new branches to get at least some cache benefits, generally from dev
|
||||
cache-from: |
|
||||
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
||||
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:dev
|
||||
cache-to: |
|
||||
type=registry,mode=max,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||
- name: Export frontend artifact from docker
|
||||
run: |
|
||||
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
|
||||
- name: Upload frontend artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: frontend-compiled
|
||||
path: src/documents/static/frontend/
|
||||
retention-days: 7
|
||||
build-release:
|
||||
name: "Build Release"
|
||||
needs:
|
||||
- build-docker-image
|
||||
- documentation
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends gettext liblept5
|
||||
- name: Download frontend artifact
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: frontend-compiled
|
||||
path: src/documents/static/frontend/
|
||||
- name: Download documentation artifact
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: documentation
|
||||
path: docs/_build/html/
|
||||
- name: Generate requirements file
|
||||
run: |
|
||||
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
|
||||
- name: Compile messages
|
||||
run: |
|
||||
cd src/
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
manage.py compilemessages
|
||||
- name: Collect static files
|
||||
run: |
|
||||
cd src/
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
manage.py collectstatic --no-input
|
||||
- name: Move files
|
||||
run: |
|
||||
echo "Making dist folders"
|
||||
for directory in dist \
|
||||
dist/paperless-ngx \
|
||||
dist/paperless-ngx/scripts;
|
||||
do
|
||||
mkdir --verbose --parents ${directory}
|
||||
done
|
||||
|
||||
echo "Copying basic files"
|
||||
for file_name in .dockerignore \
|
||||
.env \
|
||||
Dockerfile \
|
||||
pyproject.toml \
|
||||
uv.lock \
|
||||
requirements.txt \
|
||||
LICENSE \
|
||||
README.md \
|
||||
paperless.conf.example
|
||||
do
|
||||
cp --verbose ${file_name} dist/paperless-ngx/
|
||||
done
|
||||
mv --verbose dist/paperless-ngx/paperless.conf.example dist/paperless-ngx/paperless.conf
|
||||
|
||||
echo "Copying Docker related files"
|
||||
cp --recursive docker/ dist/paperless-ngx/docker
|
||||
|
||||
echo "Copying startup scripts"
|
||||
cp --verbose scripts/*.service scripts/*.sh scripts/*.socket dist/paperless-ngx/scripts/
|
||||
|
||||
echo "Copying source files"
|
||||
cp --recursive src/ dist/paperless-ngx/src
|
||||
echo "Copying documentation"
|
||||
cp --recursive docs/_build/html/ dist/paperless-ngx/docs
|
||||
|
||||
mv --verbose static dist/paperless-ngx
|
||||
- name: Make release package
|
||||
run: |
|
||||
echo "Creating release archive"
|
||||
cd dist
|
||||
sudo chown -R 1000:1000 paperless-ngx/
|
||||
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
||||
- name: Upload release artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release
|
||||
path: dist/paperless-ngx.tar.xz
|
||||
retention-days: 7
|
||||
publish-release:
|
||||
name: "Publish Release"
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
||||
changelog: ${{ steps.create-release.outputs.body }}
|
||||
version: ${{ steps.get_version.outputs.version }}
|
||||
needs:
|
||||
- build-release
|
||||
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
|
||||
steps:
|
||||
- name: Download release artifact
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: release
|
||||
path: ./
|
||||
- name: Get version
|
||||
id: get_version
|
||||
run: |
|
||||
echo "version=${{ github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
if [[ ${{ contains(github.ref_name, '-beta.rc') }} == 'true' ]]; then
|
||||
echo "prerelease=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "prerelease=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Create Release and Changelog
|
||||
id: create-release
|
||||
uses: release-drafter/release-drafter@v6
|
||||
with:
|
||||
name: Paperless-ngx ${{ steps.get_version.outputs.version }}
|
||||
tag: ${{ steps.get_version.outputs.version }}
|
||||
version: ${{ steps.get_version.outputs.version }}
|
||||
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
||||
publish: true # ensures release is not marked as draft
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Upload release archive
|
||||
id: upload-release-asset
|
||||
uses: shogo82148/actions-upload-release-asset@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
asset_path: ./paperless-ngx.tar.xz
|
||||
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
|
||||
asset_content_type: application/x-xz
|
||||
append-changelog:
|
||||
name: "Append Changelog"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- publish-release
|
||||
if: needs.publish-release.outputs.prerelease == 'false'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: main
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
- name: Append Changelog to docs
|
||||
id: append-Changelog
|
||||
working-directory: docs
|
||||
run: |
|
||||
git branch ${{ needs.publish-release.outputs.version }}-changelog
|
||||
git checkout ${{ needs.publish-release.outputs.version }}-changelog
|
||||
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
|
||||
echo "Manually linking usernames"
|
||||
sed -i -r 's|@([a-zA-Z0-9_]+) \(\[#|[@\1](https://github.com/\1) ([#|g' changelog-new.md
|
||||
echo "Removing unneeded comment tags"
|
||||
sed -i -r 's|@<!---->|@|g' changelog-new.md
|
||||
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
|
||||
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
|
||||
mv changelog-new.md changelog.md
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
pre-commit run --files changelog.md || true
|
||||
git config --global user.name "github-actions"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
|
||||
git push origin ${{ needs.publish-release.outputs.version }}-changelog
|
||||
- name: Create Pull Request
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const { repo, owner } = context.repo;
|
||||
const result = await github.rest.pulls.create({
|
||||
title: 'Documentation: Add ${{ needs.publish-release.outputs.version }} changelog',
|
||||
owner,
|
||||
repo,
|
||||
head: '${{ needs.publish-release.outputs.version }}-changelog',
|
||||
base: 'main',
|
||||
body: 'This PR is auto-generated by CI.'
|
||||
});
|
||||
github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: result.data.number,
|
||||
labels: ['documentation', 'skip-changelog']
|
||||
});
|
||||
|
||||
220
.github/workflows/codecov-comment.yml
vendored
Normal file
220
.github/workflows/codecov-comment.yml
vendored
Normal file
@@ -0,0 +1,220 @@
|
||||
name: Codecov PR Comment
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- ci
|
||||
types:
|
||||
- completed
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
jobs:
|
||||
comment:
|
||||
if: >-
|
||||
github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success'
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Gather pull request context
|
||||
id: pr
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const run = context.payload.workflow_run;
|
||||
if (!run.pull_requests || run.pull_requests.length === 0) {
|
||||
core.info('No associated pull request. Skipping.');
|
||||
return { shouldRun: false };
|
||||
}
|
||||
|
||||
const pr = run.pull_requests[0];
|
||||
return {
|
||||
shouldRun: true,
|
||||
prNumber: pr.number,
|
||||
headSha: run.head_sha,
|
||||
};
|
||||
- name: Fetch Codecov coverage
|
||||
id: coverage
|
||||
if: steps.pr.outputs.shouldRun == 'true'
|
||||
uses: actions/github-script@v7
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
COMMIT_SHA: ${{ steps.pr.outputs.headSha }}
|
||||
with:
|
||||
script: |
|
||||
const token = process.env.CODECOV_TOKEN;
|
||||
if (!token) {
|
||||
core.warning('CODECOV_TOKEN secret is not available; skipping comment.');
|
||||
core.setOutput('shouldComment', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
const commitSha = process.env.COMMIT_SHA;
|
||||
const owner = context.repo.owner;
|
||||
const repo = context.repo.repo;
|
||||
const url = `https://codecov.io/api/v2/github/${owner}/repos/${repo}/commits/${commitSha}/report`;
|
||||
const maxAttempts = 10;
|
||||
const waitMs = 15000;
|
||||
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
||||
|
||||
let data;
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||
core.info(`Fetching Codecov report (attempt ${attempt}/${maxAttempts})`);
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (response.status === 404) {
|
||||
core.info('Report not ready yet (404). Waiting before retrying.');
|
||||
await sleep(waitMs);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
throw new Error(`Codecov API returned ${response.status}: ${text}`);
|
||||
}
|
||||
|
||||
data = await response.json();
|
||||
if (data && Object.keys(data).length > 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
core.info('Report payload empty. Waiting before retrying.');
|
||||
await sleep(waitMs);
|
||||
}
|
||||
|
||||
if (!data) {
|
||||
core.warning('Unable to retrieve Codecov report after multiple attempts.');
|
||||
core.setOutput('shouldComment', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
const totals = data.report?.totals ?? data.commit?.totals ?? data.totals;
|
||||
if (!totals) {
|
||||
core.warning('Codecov response does not contain coverage totals.');
|
||||
core.setOutput('shouldComment', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
const compareTotals = data.report?.compare?.totals ?? data.compare?.totals;
|
||||
const flagsRaw = data.report?.totals_by_flag ?? data.report?.components ?? [];
|
||||
|
||||
const toNumber = (value) => {
|
||||
if (value === null || value === undefined || value === '') {
|
||||
return undefined;
|
||||
}
|
||||
const num = Number(value);
|
||||
return Number.isFinite(num) ? num : undefined;
|
||||
};
|
||||
|
||||
const coverage = toNumber(totals.coverage);
|
||||
const baseCoverage = toNumber(compareTotals?.base_coverage ?? compareTotals?.base);
|
||||
const delta = toNumber(
|
||||
compareTotals?.coverage_change ??
|
||||
compareTotals?.coverage_diff ??
|
||||
totals.delta ??
|
||||
totals.diff ??
|
||||
totals.change,
|
||||
);
|
||||
|
||||
const formatPercent = (value) => {
|
||||
if (value === undefined) return '—';
|
||||
return `${value.toFixed(2)}%`;
|
||||
};
|
||||
|
||||
const formatDelta = (value) => {
|
||||
if (value === undefined) return '—';
|
||||
const sign = value >= 0 ? '+' : '';
|
||||
return `${sign}${value.toFixed(2)}%`;
|
||||
};
|
||||
|
||||
const shortSha = commitSha.slice(0, 7);
|
||||
const lines = [
|
||||
'<!-- codecov-coverage-comment -->',
|
||||
'**Codecov Coverage**',
|
||||
'',
|
||||
`- Head \`${shortSha}\`: ${formatPercent(coverage)}`,
|
||||
];
|
||||
|
||||
if (baseCoverage !== undefined) {
|
||||
lines.push(`- Base: ${formatPercent(baseCoverage)}`);
|
||||
}
|
||||
|
||||
if (delta !== undefined) {
|
||||
lines.push(`- Change: ${formatDelta(delta)}`);
|
||||
}
|
||||
|
||||
const flagEntries = Array.isArray(flagsRaw)
|
||||
? flagsRaw
|
||||
: Object.entries(flagsRaw).map(([name, totals]) => ({ name, totals }));
|
||||
|
||||
const flagRows = [];
|
||||
for (const entry of flagEntries) {
|
||||
const label = entry.flag ?? entry.name ?? entry.component ?? entry.id;
|
||||
const entryTotals = entry.totals ?? entry;
|
||||
const entryCoverage = toNumber(entryTotals?.coverage);
|
||||
const entryDelta = toNumber(
|
||||
entryTotals?.coverage_change ??
|
||||
entryTotals?.coverage_diff ??
|
||||
entryTotals?.delta ??
|
||||
entryTotals?.diff,
|
||||
);
|
||||
if (!label || entryCoverage === undefined) {
|
||||
continue;
|
||||
}
|
||||
flagRows.push(`| ${label} | ${formatPercent(entryCoverage)} | ${formatDelta(entryDelta)} |`);
|
||||
}
|
||||
|
||||
if (flagRows.length) {
|
||||
lines.push('');
|
||||
lines.push('| Flag | Coverage | Change |');
|
||||
lines.push('| --- | --- | --- |');
|
||||
lines.push(...flagRows);
|
||||
}
|
||||
|
||||
const commentBody = lines.join('\n');
|
||||
const shouldComment = coverage !== undefined;
|
||||
core.setOutput('shouldComment', shouldComment ? 'true' : 'false');
|
||||
if (shouldComment) {
|
||||
core.setOutput('commentBody', commentBody);
|
||||
}
|
||||
- name: Upsert coverage comment
|
||||
if: steps.pr.outputs.shouldRun == 'true' && steps.coverage.outputs.shouldComment == 'true'
|
||||
uses: actions/github-script@v7
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.pr.outputs.prNumber }}
|
||||
COMMENT_BODY: ${{ steps.coverage.outputs.commentBody }}
|
||||
with:
|
||||
script: |
|
||||
const prNumber = Number(process.env.PR_NUMBER);
|
||||
const body = process.env.COMMENT_BODY;
|
||||
const marker = '<!-- codecov-coverage-comment -->';
|
||||
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: prNumber,
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
const existing = comments.find((comment) => comment.body?.includes(marker));
|
||||
if (existing) {
|
||||
core.info(`Updating existing coverage comment (id: ${existing.id}).`);
|
||||
await github.rest.issues.updateComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: existing.id,
|
||||
body,
|
||||
});
|
||||
} else {
|
||||
core.info('Creating new coverage comment.');
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: prNumber,
|
||||
body,
|
||||
});
|
||||
}
|
||||
@@ -32,7 +32,7 @@ RUN set -eux \
|
||||
# Purpose: Installs s6-overlay and rootfs
|
||||
# Comments:
|
||||
# - Don't leave anything extra in here either
|
||||
FROM ghcr.io/astral-sh/uv:0.8.22-python3.12-bookworm-slim AS s6-overlay-base
|
||||
FROM ghcr.io/astral-sh/uv:0.8.17-python3.12-bookworm-slim AS s6-overlay-base
|
||||
|
||||
WORKDIR /usr/src/s6
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ dependencies = [
|
||||
"django-cachalot~=2.8.0",
|
||||
"django-celery-results~=2.6.0",
|
||||
"django-compression-middleware~=0.5.0",
|
||||
"django-cors-headers~=4.9.0",
|
||||
"django-cors-headers~=4.8.0",
|
||||
"django-extensions~=4.1",
|
||||
"django-filter~=25.1",
|
||||
"django-guardian~=3.1.2",
|
||||
|
||||
@@ -177,16 +177,10 @@ export class CustomFieldEditDialogComponent
|
||||
}
|
||||
|
||||
public removeSelectOption(index: number) {
|
||||
const globalIndex =
|
||||
index + (this.selectOptionsPage - 1) * SELECT_OPTION_PAGE_SIZE
|
||||
this._allSelectOptions.splice(globalIndex, 1)
|
||||
|
||||
const totalPages = Math.max(
|
||||
1,
|
||||
Math.ceil(this._allSelectOptions.length / SELECT_OPTION_PAGE_SIZE)
|
||||
this.selectOptions.removeAt(index)
|
||||
this._allSelectOptions.splice(
|
||||
index + (this.selectOptionsPage - 1) * SELECT_OPTION_PAGE_SIZE,
|
||||
1
|
||||
)
|
||||
const targetPage = Math.min(this.selectOptionsPage, totalPages)
|
||||
|
||||
this.selectOptionsPage = targetPage
|
||||
}
|
||||
}
|
||||
|
||||
42
uv.lock
generated
42
uv.lock
generated
@@ -730,15 +730,15 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "django-cors-headers"
|
||||
version = "4.9.0"
|
||||
version = "4.8.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "asgiref", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/21/39/55822b15b7ec87410f34cd16ce04065ff390e50f9e29f31d6d116fc80456/django_cors_headers-4.9.0.tar.gz", hash = "sha256:fe5d7cb59fdc2c8c646ce84b727ac2bca8912a247e6e68e1fb507372178e59e8", size = 21458, upload-time = "2025-09-18T10:40:52.326Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/89/8e/6225441edcfe179bf4861e9e67489e33375e0b66316c8d7b9edaae863d37/django_cors_headers-4.8.0.tar.gz", hash = "sha256:0a12a2efcd59a3cea741e44db8ab589e929949de5bc4cdf35a29c6ae77297686", size = 21425, upload-time = "2025-09-08T15:58:05.34Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/30/d8/19ed1e47badf477d17fb177c1c19b5a21da0fd2d9f093f23be3fb86c5fab/django_cors_headers-4.9.0-py3-none-any.whl", hash = "sha256:15c7f20727f90044dcee2216a9fd7303741a864865f0c3657e28b7056f61b449", size = 12809, upload-time = "2025-09-18T10:40:50.843Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/b3/29ef49d6ff7800f323f3d98cde7777b3cfdda133de8feea84cffafea4578/django_cors_headers-4.8.0-py3-none-any.whl", hash = "sha256:3b883f4c6d07848673218456a5e070d8ab51f97341c1f27d0242ca167e7272ab", size = 12804, upload-time = "2025-09-08T15:58:03.882Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2182,7 +2182,7 @@ requires-dist = [
|
||||
{ name = "django-cachalot", specifier = "~=2.8.0" },
|
||||
{ name = "django-celery-results", specifier = "~=2.6.0" },
|
||||
{ name = "django-compression-middleware", specifier = "~=0.5.0" },
|
||||
{ name = "django-cors-headers", specifier = "~=4.9.0" },
|
||||
{ name = "django-cors-headers", specifier = "~=4.8.0" },
|
||||
{ name = "django-extensions", specifier = "~=4.1" },
|
||||
{ name = "django-filter", specifier = "~=25.1" },
|
||||
{ name = "django-guardian", specifier = "~=3.1.2" },
|
||||
@@ -3291,25 +3291,25 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.13.2"
|
||||
version = "0.13.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/02/df/8d7d8c515d33adfc540e2edf6c6021ea1c5a58a678d8cfce9fae59aabcab/ruff-0.13.2.tar.gz", hash = "sha256:cb12fffd32fb16d32cef4ed16d8c7cdc27ed7c944eaa98d99d01ab7ab0b710ff", size = 5416417, upload-time = "2025-09-25T14:54:09.936Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6e/1a/1f4b722862840295bcaba8c9e5261572347509548faaa99b2d57ee7bfe6a/ruff-0.13.0.tar.gz", hash = "sha256:5b4b1ee7eb35afae128ab94459b13b2baaed282b1fb0f472a73c82c996c8ae60", size = 5372863, upload-time = "2025-09-10T16:25:37.917Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/84/5716a7fa4758e41bf70e603e13637c42cfb9dbf7ceb07180211b9bbf75ef/ruff-0.13.2-py3-none-linux_armv6l.whl", hash = "sha256:3796345842b55f033a78285e4f1641078f902020d8450cade03aad01bffd81c3", size = 12343254, upload-time = "2025-09-25T14:53:27.784Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/77/c7042582401bb9ac8eff25360e9335e901d7a1c0749a2b28ba4ecb239991/ruff-0.13.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ff7e4dda12e683e9709ac89e2dd436abf31a4d8a8fc3d89656231ed808e231d2", size = 13040891, upload-time = "2025-09-25T14:53:31.38Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/15/125a7f76eb295cb34d19c6778e3a82ace33730ad4e6f28d3427e134a02e0/ruff-0.13.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c75e9d2a2fafd1fdd895d0e7e24b44355984affdde1c412a6f6d3f6e16b22d46", size = 12243588, upload-time = "2025-09-25T14:53:33.543Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/eb/0093ae04a70f81f8be7fd7ed6456e926b65d238fc122311293d033fdf91e/ruff-0.13.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cceac74e7bbc53ed7d15d1042ffe7b6577bf294611ad90393bf9b2a0f0ec7cb6", size = 12491359, upload-time = "2025-09-25T14:53:35.892Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/fe/72b525948a6956f07dad4a6f122336b6a05f2e3fd27471cea612349fedb9/ruff-0.13.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae3f469b5465ba6d9721383ae9d49310c19b452a161b57507764d7ef15f4b07", size = 12162486, upload-time = "2025-09-25T14:53:38.171Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/e3/0fac422bbbfb2ea838023e0d9fcf1f30183d83ab2482800e2cb892d02dfe/ruff-0.13.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f8f9e3cd6714358238cd6626b9d43026ed19c0c018376ac1ef3c3a04ffb42d8", size = 13871203, upload-time = "2025-09-25T14:53:41.943Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/82/b721c8e3ec5df6d83ba0e45dcf00892c4f98b325256c42c38ef136496cbf/ruff-0.13.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c6ed79584a8f6cbe2e5d7dbacf7cc1ee29cbdb5df1172e77fbdadc8bb85a1f89", size = 14929635, upload-time = "2025-09-25T14:53:43.953Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/a0/ad56faf6daa507b83079a1ad7a11694b87d61e6bf01c66bd82b466f21821/ruff-0.13.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aed130b2fde049cea2019f55deb939103123cdd191105f97a0599a3e753d61b0", size = 14338783, upload-time = "2025-09-25T14:53:46.205Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/77/ad1d9156db8f99cd01ee7e29d74b34050e8075a8438e589121fcd25c4b08/ruff-0.13.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1887c230c2c9d65ed1b4e4cfe4d255577ea28b718ae226c348ae68df958191aa", size = 13355322, upload-time = "2025-09-25T14:53:48.164Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/8b/e87cfca2be6f8b9f41f0bb12dc48c6455e2d66df46fe61bb441a226f1089/ruff-0.13.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bcb10276b69b3cfea3a102ca119ffe5c6ba3901e20e60cf9efb53fa417633c3", size = 13354427, upload-time = "2025-09-25T14:53:50.486Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/df/bf382f3fbead082a575edb860897287f42b1b3c694bafa16bc9904c11ed3/ruff-0.13.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:afa721017aa55a555b2ff7944816587f1cb813c2c0a882d158f59b832da1660d", size = 13537637, upload-time = "2025-09-25T14:53:52.887Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/70/1fb7a7c8a6fc8bd15636288a46e209e81913b87988f26e1913d0851e54f4/ruff-0.13.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1dbc875cf3720c64b3990fef8939334e74cb0ca65b8dbc61d1f439201a38101b", size = 12340025, upload-time = "2025-09-25T14:53:54.88Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/27/1e5b3f1c23ca5dd4106d9d580e5c13d9acb70288bff614b3d7b638378cc9/ruff-0.13.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939a1b2a960e9742e9a347e5bbc9b3c3d2c716f86c6ae273d9cbd64f193f22", size = 12133449, upload-time = "2025-09-25T14:53:57.089Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/09/b92a5ccee289f11ab128df57d5911224197d8d55ef3bd2043534ff72ca54/ruff-0.13.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:50e2d52acb8de3804fc5f6e2fa3ae9bdc6812410a9e46837e673ad1f90a18736", size = 13051369, upload-time = "2025-09-25T14:53:59.124Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/99/26c9d1c7d8150f45e346dc045cc49f23e961efceb4a70c47dea0960dea9a/ruff-0.13.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3196bc13ab2110c176b9a4ae5ff7ab676faaa1964b330a1383ba20e1e19645f2", size = 13523644, upload-time = "2025-09-25T14:54:01.622Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/fe/6f87b419dbe166fd30a991390221f14c5b68946f389ea07913e1719741e0/ruff-0.13.0-py3-none-linux_armv6l.whl", hash = "sha256:137f3d65d58ee828ae136a12d1dc33d992773d8f7644bc6b82714570f31b2004", size = 12187826, upload-time = "2025-09-10T16:24:39.5Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/25/c92296b1fc36d2499e12b74a3fdb230f77af7bdf048fad7b0a62e94ed56a/ruff-0.13.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:21ae48151b66e71fd111b7d79f9ad358814ed58c339631450c66a4be33cc28b9", size = 12933428, upload-time = "2025-09-10T16:24:43.866Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/cf/40bc7221a949470307d9c35b4ef5810c294e6cfa3caafb57d882731a9f42/ruff-0.13.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:64de45f4ca5441209e41742d527944635a05a6e7c05798904f39c85bafa819e3", size = 12095543, upload-time = "2025-09-10T16:24:46.638Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/03/8b5ff2a211efb68c63a1d03d157e924997ada87d01bebffbd13a0f3fcdeb/ruff-0.13.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b2c653ae9b9d46e0ef62fc6fbf5b979bda20a0b1d2b22f8f7eb0cde9f4963b8", size = 12312489, upload-time = "2025-09-10T16:24:49.556Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/fc/2336ef6d5e9c8d8ea8305c5f91e767d795cd4fc171a6d97ef38a5302dadc/ruff-0.13.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cec632534332062bc9eb5884a267b689085a1afea9801bf94e3ba7498a2d207", size = 11991631, upload-time = "2025-09-10T16:24:53.439Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/7f/f6d574d100fca83d32637d7f5541bea2f5e473c40020bbc7fc4a4d5b7294/ruff-0.13.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd628101d9f7d122e120ac7c17e0a0f468b19bc925501dbe03c1cb7f5415b24", size = 13720602, upload-time = "2025-09-10T16:24:56.392Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/c8/a8a5b81d8729b5d1f663348d11e2a9d65a7a9bd3c399763b1a51c72be1ce/ruff-0.13.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:afe37db8e1466acb173bb2a39ca92df00570e0fd7c94c72d87b51b21bb63efea", size = 14697751, upload-time = "2025-09-10T16:24:59.89Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/f5/183ec292272ce7ec5e882aea74937f7288e88ecb500198b832c24debc6d3/ruff-0.13.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f96a8d90bb258d7d3358b372905fe7333aaacf6c39e2408b9f8ba181f4b6ef2", size = 14095317, upload-time = "2025-09-10T16:25:03.025Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/8d/7f9771c971724701af7926c14dab31754e7b303d127b0d3f01116faef456/ruff-0.13.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b5e3d883e4f924c5298e3f2ee0f3085819c14f68d1e5b6715597681433f153", size = 13144418, upload-time = "2025-09-10T16:25:06.272Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/a6/7985ad1778e60922d4bef546688cd8a25822c58873e9ff30189cfe5dc4ab/ruff-0.13.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03447f3d18479df3d24917a92d768a89f873a7181a064858ea90a804a7538991", size = 13370843, upload-time = "2025-09-10T16:25:09.965Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/1c/bafdd5a7a05a50cc51d9f5711da704942d8dd62df3d8c70c311e98ce9f8a/ruff-0.13.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:fbc6b1934eb1c0033da427c805e27d164bb713f8e273a024a7e86176d7f462cf", size = 13321891, upload-time = "2025-09-10T16:25:12.969Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/3e/7817f989cb9725ef7e8d2cee74186bf90555279e119de50c750c4b7a72fe/ruff-0.13.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8ab6a3e03665d39d4a25ee199d207a488724f022db0e1fe4002968abdb8001b", size = 12119119, upload-time = "2025-09-10T16:25:16.621Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/07/9df080742e8d1080e60c426dce6e96a8faf9a371e2ce22eef662e3839c95/ruff-0.13.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2a5c62f8ccc6dd2fe259917482de7275cecc86141ee10432727c4816235bc41", size = 11961594, upload-time = "2025-09-10T16:25:19.49Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/f4/ae1185349197d26a2316840cb4d6c3fba61d4ac36ed728bf0228b222d71f/ruff-0.13.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b7b85ca27aeeb1ab421bc787009831cffe6048faae08ad80867edab9f2760945", size = 12933377, upload-time = "2025-09-10T16:25:22.371Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/39/e776c10a3b349fc8209a905bfb327831d7516f6058339a613a8d2aaecacd/ruff-0.13.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:79ea0c44a3032af768cabfd9616e44c24303af49d633b43e3a5096e009ebe823", size = 13418555, upload-time = "2025-09-10T16:25:25.681Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
Reference in New Issue
Block a user