Merge pull request #1240 from paperless-ngx/beta

[Beta] Paperless-ngx v1.8.0 Release Candidate 1
This commit is contained in:
shamoon
2022-07-28 15:17:30 -07:00
committed by GitHub
277 changed files with 56739 additions and 28967 deletions

254
.github/scripts/cleanup-tags.py vendored Normal file
View File

@@ -0,0 +1,254 @@
import logging
import os
from argparse import ArgumentParser
from typing import Final
from typing import List
from urllib.parse import quote
import requests
from common import get_log_level
logger = logging.getLogger("cleanup-tags")
class GithubContainerRegistry:
def __init__(
self,
session: requests.Session,
token: str,
owner_or_org: str,
):
self._session: requests.Session = session
self._token = token
self._owner_or_org = owner_or_org
# https://docs.github.com/en/rest/branches/branches
self._BRANCHES_ENDPOINT = "https://api.github.com/repos/{OWNER}/{REPO}/branches"
if self._owner_or_org == "paperless-ngx":
# https://docs.github.com/en/rest/packages#get-all-package-versions-for-a-package-owned-by-an-organization
self._PACKAGES_VERSIONS_ENDPOINT = "https://api.github.com/orgs/{ORG}/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions"
# https://docs.github.com/en/rest/packages#delete-package-version-for-an-organization
self._PACKAGE_VERSION_DELETE_ENDPOINT = "https://api.github.com/orgs/{ORG}/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions/{PACKAGE_VERSION_ID}"
else:
# https://docs.github.com/en/rest/packages#get-all-package-versions-for-a-package-owned-by-the-authenticated-user
self._PACKAGES_VERSIONS_ENDPOINT = "https://api.github.com/user/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions"
# https://docs.github.com/en/rest/packages#delete-a-package-version-for-the-authenticated-user
self._PACKAGE_VERSION_DELETE_ENDPOINT = "https://api.github.com/user/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions/{PACKAGE_VERSION_ID}"
def __enter__(self):
self._session.headers.update(
{
"Accept": "application/vnd.github.v3+json",
"Authorization": f"token {self._token}",
},
)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if "Accept" in self._session.headers:
del self._session.headers["Accept"]
if "Authorization" in self._session.headers:
del self._session.headers["Authorization"]
def _read_all_pages(self, endpoint):
internal_data = []
while True:
resp = self._session.get(endpoint)
if resp.status_code == 200:
internal_data += resp.json()
if "next" in resp.links:
endpoint = resp.links["next"]["url"]
else:
logger.debug("Exiting pagination loop")
break
else:
logger.warning(f"Request to {endpoint} return HTTP {resp.status_code}")
break
return internal_data
def get_branches(self, repo: str):
endpoint = self._BRANCHES_ENDPOINT.format(OWNER=self._owner_or_org, REPO=repo)
internal_data = self._read_all_pages(endpoint)
return internal_data
def filter_branches_by_name_pattern(self, branch_data, pattern: str):
matches = {}
for branch in branch_data:
if branch["name"].startswith(pattern):
matches[branch["name"]] = branch
return matches
def get_package_versions(
self,
package_name: str,
package_type: str = "container",
) -> List:
package_name = quote(package_name, safe="")
endpoint = self._PACKAGES_VERSIONS_ENDPOINT.format(
ORG=self._owner_or_org,
PACKAGE_TYPE=package_type,
PACKAGE_NAME=package_name,
)
internal_data = self._read_all_pages(endpoint)
return internal_data
def filter_packages_by_tag_pattern(self, package_data, pattern: str):
matches = {}
for package in package_data:
if "metadata" in package and "container" in package["metadata"]:
container_metadata = package["metadata"]["container"]
if "tags" in container_metadata:
container_tags = container_metadata["tags"]
for tag in container_tags:
if tag.startswith(pattern):
matches[tag] = package
break
return matches
def filter_packages_untagged(self, package_data):
matches = {}
for package in package_data:
if "metadata" in package and "container" in package["metadata"]:
container_metadata = package["metadata"]["container"]
if "tags" in container_metadata:
container_tags = container_metadata["tags"]
if not len(container_tags):
matches[package["name"]] = package
return matches
def delete_package_version(self, package_name, package_data):
package_name = quote(package_name, safe="")
endpoint = self._PACKAGE_VERSION_DELETE_ENDPOINT.format(
ORG=self._owner_or_org,
PACKAGE_TYPE=package_data["metadata"]["package_type"],
PACKAGE_NAME=package_name,
PACKAGE_VERSION_ID=package_data["id"],
)
resp = self._session.delete(endpoint)
if resp.status_code != 204:
logger.warning(
f"Request to delete {endpoint} returned HTTP {resp.status_code}",
)
def _main():
parser = ArgumentParser(
description="Using the GitHub API locate and optionally delete container"
" tags which no longer have an associated feature branch",
)
parser.add_argument(
"--delete",
action="store_true",
default=False,
help="If provided, actually delete the container tags",
)
# TODO There's a lot of untagged images, do those need to stay for anything?
parser.add_argument(
"--untagged",
action="store_true",
default=False,
help="If provided, delete untagged containers as well",
)
parser.add_argument(
"--loglevel",
default="info",
help="Configures the logging level",
)
args = parser.parse_args()
logging.basicConfig(
level=get_log_level(args),
datefmt="%Y-%m-%d %H:%M:%S",
format="%(asctime)s %(levelname)-8s %(message)s",
)
repo_owner: Final[str] = os.environ["GITHUB_REPOSITORY_OWNER"]
repo: Final[str] = os.environ["GITHUB_REPOSITORY"]
gh_token: Final[str] = os.environ["GITHUB_TOKEN"]
with requests.session() as sess:
with GithubContainerRegistry(sess, gh_token, repo_owner) as gh_api:
all_branches = gh_api.get_branches("paperless-ngx")
logger.info(f"Located {len(all_branches)} branches of {repo_owner}/{repo} ")
feature_branches = gh_api.filter_branches_by_name_pattern(
all_branches,
"feature-",
)
logger.info(f"Located {len(feature_branches)} feature branches")
for package_name in ["paperless-ngx", "paperless-ngx/builder/cache/app"]:
all_package_versions = gh_api.get_package_versions(package_name)
logger.info(
f"Located {len(all_package_versions)} versions of package {package_name}",
)
packages_tagged_feature = gh_api.filter_packages_by_tag_pattern(
all_package_versions,
"feature-",
)
logger.info(
f'Located {len(packages_tagged_feature)} versions of package {package_name} tagged "feature-"',
)
untagged_packages = gh_api.filter_packages_untagged(
all_package_versions,
)
logger.info(
f"Located {len(untagged_packages)} untagged versions of package {package_name}",
)
to_delete = list(
set(packages_tagged_feature.keys()) - set(feature_branches.keys()),
)
logger.info(
f"Located {len(to_delete)} versions of package {package_name} to delete",
)
for tag_to_delete in to_delete:
package_version_info = packages_tagged_feature[tag_to_delete]
if args.delete:
logger.info(
f"Deleting {tag_to_delete} (id {package_version_info['id']})",
)
gh_api.delete_package_version(
package_name,
package_version_info,
)
else:
logger.info(
f"Would delete {tag_to_delete} (id {package_version_info['id']})",
)
if args.untagged:
logger.info(f"Deleting untagged packages of {package_name}")
for to_delete_name in untagged_packages:
to_delete_version = untagged_packages[to_delete_name]
logger.info(f"Deleting id {to_delete_version['id']}")
if args.delete:
gh_api.delete_package_version(
package_name,
to_delete_version,
)
else:
logger.info("Leaving untagged images untouched")
if __name__ == "__main__":
_main()

View File

@@ -1,4 +1,6 @@
#!/usr/bin/env python3
import logging
from argparse import ArgumentError
def get_image_tag(
@@ -9,7 +11,7 @@ def get_image_tag(
"""
Returns a string representing the normal image for a given package
"""
return f"ghcr.io/{repo_name}/builder/{pkg_name}:{pkg_version}"
return f"ghcr.io/{repo_name.lower()}/builder/{pkg_name}:{pkg_version}"
def get_cache_image_tag(
@@ -24,4 +26,19 @@ def get_cache_image_tag(
Registry type caching is utilized for the builder images, to allow fast
rebuilds, generally almost instant for the same version
"""
return f"ghcr.io/{repo_name}/builder/cache/{pkg_name}:{pkg_version}"
return f"ghcr.io/{repo_name.lower()}/builder/cache/{pkg_name}:{pkg_version}"
def get_log_level(args) -> int:
levels = {
"critical": logging.CRITICAL,
"error": logging.ERROR,
"warn": logging.WARNING,
"warning": logging.WARNING,
"info": logging.INFO,
"debug": logging.DEBUG,
}
level = levels.get(args.loglevel.lower())
if level is None:
level = logging.INFO
return level

View File

@@ -50,7 +50,6 @@ def _main():
# Default output values
version = None
git_tag = None
extra_config = {}
if args.package in pipfile_data["default"]:
@@ -59,12 +58,6 @@ def _main():
pkg_version = pkg_data["version"].split("==")[-1]
version = pkg_version
# Based on the package, generate the expected Git tag name
if args.package == "pikepdf":
git_tag = f"v{pkg_version}"
elif args.package == "psycopg2":
git_tag = pkg_version.replace(".", "_")
# Any extra/special values needed
if args.package == "pikepdf":
extra_config["qpdf_version"] = build_json["qpdf"]["version"]
@@ -72,8 +65,6 @@ def _main():
elif args.package in build_json:
version = build_json[args.package]["version"]
if "git_tag" in build_json[args.package]:
git_tag = build_json[args.package]["git_tag"]
else:
raise NotImplementedError(args.package)
@@ -81,7 +72,6 @@ def _main():
output = {
"name": args.package,
"version": version,
"git_tag": git_tag,
"image_tag": get_image_tag(repo_name, args.package, version),
"cache_tag": get_cache_image_tag(
repo_name,

View File

@@ -26,7 +26,7 @@ jobs:
run: pipx install pipenv
-
name: Set up Python
uses: actions/setup-python@v3
uses: actions/setup-python@v4
with:
python-version: 3.9
cache: "pipenv"
@@ -57,17 +57,29 @@ jobs:
name: Prepare Docker Pipeline Data
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v'))
runs-on: ubuntu-20.04
# If the push triggered the installer library workflow, wait for it to
# complete here. This ensures the required versions for the final
# image have been built, while not waiting at all if the versions haven't changed
concurrency:
group: build-installer-library
cancel-in-progress: false
needs:
- documentation
- ci-backend
- ci-frontend
steps:
-
name: Set ghcr repository name
id: set-ghcr-repository
run: |
ghcr_name=$(echo "${GITHUB_REPOSITORY}" | awk '{ print tolower($0) }')
echo ::set-output name=repository::${ghcr_name}
-
name: Checkout
uses: actions/checkout@v3
-
name: Set up Python
uses: actions/setup-python@v3
uses: actions/setup-python@v4
with:
python-version: "3.9"
-
@@ -109,6 +121,8 @@ jobs:
outputs:
ghcr-repository: ${{ steps.set-ghcr-repository.outputs.repository }}
qpdf-json: ${{ steps.qpdf-setup.outputs.qpdf-json }}
pikepdf-json: ${{ steps.pikepdf-setup.outputs.pikepdf-json }}
@@ -117,55 +131,6 @@ jobs:
jbig2enc-json: ${{ steps.jbig2enc-setup.outputs.jbig2enc-json}}
build-qpdf-debs:
name: qpdf
needs:
- prepare-docker-build
uses: ./.github/workflows/reusable-workflow-builder.yml
with:
dockerfile: ./docker-builders/Dockerfile.qpdf
build-json: ${{ needs.prepare-docker-build.outputs.qpdf-json }}
build-args: |
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
build-jbig2enc:
name: jbig2enc
needs:
- prepare-docker-build
uses: ./.github/workflows/reusable-workflow-builder.yml
with:
dockerfile: ./docker-builders/Dockerfile.jbig2enc
build-json: ${{ needs.prepare-docker-build.outputs.jbig2enc-json }}
build-args: |
JBIG2ENC_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.jbig2enc-json).version }}
build-psycopg2-wheel:
name: psycopg2
needs:
- prepare-docker-build
uses: ./.github/workflows/reusable-workflow-builder.yml
with:
dockerfile: ./docker-builders/Dockerfile.psycopg2
build-json: ${{ needs.prepare-docker-build.outputs.psycopg2-json }}
build-args: |
PSYCOPG2_GIT_TAG=${{ fromJSON(needs.prepare-docker-build.outputs.psycopg2-json).git_tag }}
PSYCOPG2_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.psycopg2-json).version }}
build-pikepdf-wheel:
name: pikepdf
needs:
- prepare-docker-build
- build-qpdf-debs
uses: ./.github/workflows/reusable-workflow-builder.yml
with:
dockerfile: ./docker-builders/Dockerfile.pikepdf
build-json: ${{ needs.prepare-docker-build.outputs.pikepdf-json }}
build-args: |
REPO=${{ github.repository }}
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
PIKEPDF_GIT_TAG=${{ fromJSON(needs.prepare-docker-build.outputs.pikepdf-json).git_tag }}
PIKEPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.pikepdf-json).version }}
# build and push image to docker hub.
build-docker-image:
runs-on: ubuntu-20.04
@@ -174,29 +139,31 @@ jobs:
cancel-in-progress: true
needs:
- prepare-docker-build
- build-psycopg2-wheel
- build-jbig2enc
- build-qpdf-debs
- build-pikepdf-wheel
steps:
-
name: Check pushing to Docker Hub
id: docker-hub
# Only push to Dockerhub from the main repo
# Only push to Dockerhub from the main repo AND the ref is either:
# main
# dev
# beta
# a tag
# Otherwise forks would require a Docker Hub account and secrets setup
run: |
if [[ ${{ github.repository }} == "paperless-ngx/paperless-ngx" ]] ; then
if [[ ${{ needs.prepare-docker-build.outputs.ghcr-repository }} == "paperless-ngx/paperless-ngx" && ( ${{ github.ref_name }} == "main" || ${{ github.ref_name }} == "dev" || ${{ github.ref_name }} == "beta" || ${{ startsWith(github.ref, 'refs/tags/v') }} == "true" ) ]] ; then
echo "Enabling DockerHub image push"
echo ::set-output name=enable::"true"
else
echo "Not pushing to DockerHub"
echo ::set-output name=enable::"false"
fi
-
name: Gather Docker metadata
id: docker-meta
uses: docker/metadata-action@v3
uses: docker/metadata-action@v4
with:
images: |
ghcr.io/${{ github.repository }}
ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}
name=paperlessngx/paperless-ngx,enable=${{ steps.docker-hub.outputs.enable }}
tags: |
# Tag branches with branch name
@@ -210,20 +177,20 @@ jobs:
uses: actions/checkout@v3
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
uses: docker/setup-buildx-action@v2
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
uses: docker/setup-qemu-action@v2
-
name: Login to Github Container Registry
uses: docker/login-action@v1
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
-
name: Login to Docker Hub
uses: docker/login-action@v1
uses: docker/login-action@v2
# Don't attempt to login is not pushing to Docker Hub
if: steps.docker-hub.outputs.enable == 'true'
with:
@@ -231,7 +198,7 @@ jobs:
password: ${{ secrets.DOCKERHUB_TOKEN }}
-
name: Build and push
uses: docker/build-push-action@v2
uses: docker/build-push-action@v3
with:
context: .
file: ./Dockerfile
@@ -247,11 +214,11 @@ jobs:
# Get cache layers from this branch, then dev, then main
# This allows new branches to get at least some cache benefits, generally from dev
cache-from: |
type=registry,ref=ghcr.io/${{ github.repository }}/builder/cache/app:${{ github.ref_name }}
type=registry,ref=ghcr.io/${{ github.repository }}/builder/cache/app:dev
type=registry,ref=ghcr.io/${{ github.repository }}/builder/cache/app:main
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:dev
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:main
cache-to: |
type=registry,mode=max,ref=ghcr.io/${{ github.repository }}/builder/cache/app:${{ github.ref_name }}
type=registry,mode=max,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
-
name: Inspect image
run: |
@@ -278,7 +245,7 @@ jobs:
uses: actions/checkout@v3
-
name: Set up Python
uses: actions/setup-python@v3
uses: actions/setup-python@v4
with:
python-version: 3.9
-
@@ -338,6 +305,10 @@ jobs:
publish-release:
runs-on: ubuntu-20.04
outputs:
prerelease: ${{ steps.get_version.outputs.prerelease }}
changelog: ${{ steps.create-release.outputs.body }}
version: ${{ steps.get_version.outputs.version }}
needs:
- build-release
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
@@ -381,6 +352,13 @@ jobs:
asset_path: ./paperless-ngx.tar.xz
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
asset_content_type: application/x-xz
append-changelog:
runs-on: ubuntu-20.04
needs:
- publish-release
if: needs.publish-release.outputs.prerelease == 'false'
steps:
-
name: Checkout
uses: actions/checkout@v3
@@ -391,11 +369,33 @@ jobs:
id: append-Changelog
working-directory: docs
run: |
echo -e "# Changelog\n\n${{ steps.create-release.outputs.body }}\n" > changelog-new.md
git branch ${{ needs.publish-release.outputs.version }}-changelog
git checkout ${{ needs.publish-release.outputs.version }}-changelog
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
mv changelog-new.md changelog.md
git config --global user.name "github-actions"
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git commit -am "Changelog ${{ steps.get_version.outputs.version }} - GHA"
git push origin HEAD:main
git push origin ${{ needs.publish-release.outputs.version }}-changelog
-
name: Create Pull Request
uses: actions/github-script@v6
with:
script: |
const { repo, owner } = context.repo;
const result = await github.rest.pulls.create({
title: '[Documentation] Add ${{ needs.publish-release.outputs.version }} changelog',
owner,
repo,
head: '${{ needs.publish-release.outputs.version }}-changelog',
base: 'main',
body: 'This PR is auto-generated by CI.'
});
github.rest.issues.addLabels({
owner,
repo,
issue_number: result.data.number,
labels: ['documentation']
});

48
.github/workflows/cleanup-tags.yml vendored Normal file
View File

@@ -0,0 +1,48 @@
name: Cleanup Image Tags
on:
schedule:
- cron: '0 0 * * SAT'
delete:
pull_request:
types:
- closed
push:
paths:
- ".github/workflows/cleanup-tags.yml"
- ".github/scripts/cleanup-tags.py"
- ".github/scripts/common.py"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
jobs:
cleanup:
name: Cleanup Image Tags
runs-on: ubuntu-20.04
permissions:
packages: write
steps:
-
name: Checkout
uses: actions/checkout@v3
-
name: Login to Github Container Registry
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
-
name: Set up Python
uses: actions/setup-python@v3
with:
python-version: "3.9"
-
name: Install requests
run: |
python -m pip install requests
-
name: Cleanup feature tags
run: |
python ${GITHUB_WORKSPACE}/.github/scripts/cleanup-tags.py --loglevel info --delete

147
.github/workflows/installer-library.yml vendored Normal file
View File

@@ -0,0 +1,147 @@
# This workflow will run to update the installer library of
# Docker images. These are the images which provide updated wheels
# .deb installation packages or maybe just some compiled library
name: Build Image Library
on:
push:
# Must match one of these branches AND one of the paths
# to be triggered
branches:
- "main"
- "dev"
- "library-*"
- "feature-*"
paths:
# Trigger the workflow if a Dockerfile changed
- "docker-builders/**"
# Trigger if a package was updated
- ".build-config.json"
- "Pipfile.lock"
# Also trigger on workflow changes related to the library
- ".github/workflows/installer-library.yml"
- ".github/workflows/reusable-workflow-builder.yml"
- ".github/scripts/**"
# Set a workflow level concurrency group so primary workflow
# can wait for this to complete if needed
# DO NOT CHANGE without updating main workflow group
concurrency:
group: build-installer-library
cancel-in-progress: false
jobs:
prepare-docker-build:
name: Prepare Docker Image Version Data
runs-on: ubuntu-20.04
steps:
-
name: Set ghcr repository name
id: set-ghcr-repository
run: |
ghcr_name=$(echo "${GITHUB_REPOSITORY}" | awk '{ print tolower($0) }')
echo ::set-output name=repository::${ghcr_name}
-
name: Checkout
uses: actions/checkout@v3
-
name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.9"
-
name: Setup qpdf image
id: qpdf-setup
run: |
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py qpdf)
echo ${build_json}
echo ::set-output name=qpdf-json::${build_json}
-
name: Setup psycopg2 image
id: psycopg2-setup
run: |
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py psycopg2)
echo ${build_json}
echo ::set-output name=psycopg2-json::${build_json}
-
name: Setup pikepdf image
id: pikepdf-setup
run: |
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py pikepdf)
echo ${build_json}
echo ::set-output name=pikepdf-json::${build_json}
-
name: Setup jbig2enc image
id: jbig2enc-setup
run: |
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py jbig2enc)
echo ${build_json}
echo ::set-output name=jbig2enc-json::${build_json}
outputs:
ghcr-repository: ${{ steps.set-ghcr-repository.outputs.repository }}
qpdf-json: ${{ steps.qpdf-setup.outputs.qpdf-json }}
pikepdf-json: ${{ steps.pikepdf-setup.outputs.pikepdf-json }}
psycopg2-json: ${{ steps.psycopg2-setup.outputs.psycopg2-json }}
jbig2enc-json: ${{ steps.jbig2enc-setup.outputs.jbig2enc-json}}
build-qpdf-debs:
name: qpdf
needs:
- prepare-docker-build
uses: ./.github/workflows/reusable-workflow-builder.yml
with:
dockerfile: ./docker-builders/Dockerfile.qpdf
build-json: ${{ needs.prepare-docker-build.outputs.qpdf-json }}
build-args: |
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
build-jbig2enc:
name: jbig2enc
needs:
- prepare-docker-build
uses: ./.github/workflows/reusable-workflow-builder.yml
with:
dockerfile: ./docker-builders/Dockerfile.jbig2enc
build-json: ${{ needs.prepare-docker-build.outputs.jbig2enc-json }}
build-args: |
JBIG2ENC_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.jbig2enc-json).version }}
build-psycopg2-wheel:
name: psycopg2
needs:
- prepare-docker-build
uses: ./.github/workflows/reusable-workflow-builder.yml
with:
dockerfile: ./docker-builders/Dockerfile.psycopg2
build-json: ${{ needs.prepare-docker-build.outputs.psycopg2-json }}
build-args: |
PSYCOPG2_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.psycopg2-json).version }}
build-pikepdf-wheel:
name: pikepdf
needs:
- prepare-docker-build
- build-qpdf-debs
uses: ./.github/workflows/reusable-workflow-builder.yml
with:
dockerfile: ./docker-builders/Dockerfile.pikepdf
build-json: ${{ needs.prepare-docker-build.outputs.pikepdf-json }}
build-args: |
REPO=${{ needs.prepare-docker-build.outputs.ghcr-repository }}
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
PIKEPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.pikepdf-json).version }}

View File

@@ -65,7 +65,7 @@ jobs:
run: pipx install pipenv
-
name: Set up Python
uses: actions/setup-python@v3
uses: actions/setup-python@v4
with:
python-version: "${{ matrix.python-version }}"
cache: "pipenv"
@@ -74,7 +74,7 @@ jobs:
name: Install system dependencies
run: |
sudo apt-get update -qq
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript optipng libzbar0 poppler-utils
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
-
name: Install Python dependencies
run: |
@@ -87,7 +87,7 @@ jobs:
-
name: Get changed files
id: changed-files-specific
uses: tj-actions/changed-files@v19
uses: tj-actions/changed-files@v23.1
with:
files: |
src/**
@@ -106,3 +106,24 @@ jobs:
run: |
cd src/
pipenv run coveralls --service=github
dockerfile-lint:
name: "Lint ${{ matrix.dockerfile }}"
runs-on: ubuntu-20.04
strategy:
matrix:
dockerfile:
- Dockerfile
- docker-builders/Dockerfile.qpdf
- docker-builders/Dockerfile.jbig2enc
- docker-builders/Dockerfile.psycopg2
- docker-builders/Dockerfile.pikepdf
fail-fast: false
steps:
-
name: Checkout
uses: actions/checkout@v3
-
uses: hadolint/hadolint-action@v2.1.0
with:
dockerfile: ${{ matrix.dockerfile }}

View File

@@ -28,20 +28,20 @@ jobs:
uses: actions/checkout@v3
-
name: Login to Github Container Registry
uses: docker/login-action@v1
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
uses: docker/setup-buildx-action@v2
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
uses: docker/setup-qemu-action@v2
-
name: Build ${{ fromJSON(inputs.build-json).name }}
uses: docker/build-push-action@v2
uses: docker/build-push-action@v3
with:
context: .
file: ${{ inputs.dockerfile }}