mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-11-23 23:49:08 -06:00
Compare commits
6 Commits
feature-st
...
feature-ai
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
886218b123 | ||
|
|
3dcbdd7391 | ||
|
|
6f8103f237 | ||
|
|
a435a577aa | ||
|
|
51cb822382 | ||
|
|
10bb9baceb |
73
.github/workflows/ci.yml
vendored
73
.github/workflows/ci.yml
vendored
@@ -17,7 +17,52 @@ env:
|
||||
DEFAULT_PYTHON_VERSION: "3.11"
|
||||
NLTK_DATA: "/usr/share/nltk_data"
|
||||
jobs:
|
||||
detect-duplicate:
|
||||
name: Detect Duplicate Run
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- name: Check if workflow should run
|
||||
id: check
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
if (context.eventName !== 'push') {
|
||||
core.info('Not a push event; running workflow.');
|
||||
core.setOutput('should_run', 'true');
|
||||
return;
|
||||
}
|
||||
|
||||
const ref = context.ref || '';
|
||||
if (!ref.startsWith('refs/heads/')) {
|
||||
core.info('Push is not to a branch; running workflow.');
|
||||
core.setOutput('should_run', 'true');
|
||||
return;
|
||||
}
|
||||
|
||||
const branch = ref.substring('refs/heads/'.length);
|
||||
const { owner, repo } = context.repo;
|
||||
const prs = await github.paginate(github.rest.pulls.list, {
|
||||
owner,
|
||||
repo,
|
||||
state: 'open',
|
||||
head: `${owner}:${branch}`,
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
if (prs.length === 0) {
|
||||
core.info(`No open PR found for ${branch}; running workflow.`);
|
||||
core.setOutput('should_run', 'true');
|
||||
} else {
|
||||
core.info(`Found ${prs.length} open PR(s) for ${branch}; skipping duplicate push run.`);
|
||||
core.setOutput('should_run', 'false');
|
||||
}
|
||||
pre-commit:
|
||||
needs:
|
||||
- detect-duplicate
|
||||
if: needs.detect-duplicate.outputs.should_run == 'true'
|
||||
name: Linting Checks
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
@@ -314,6 +359,10 @@ jobs:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
|
||||
cancel-in-progress: true
|
||||
needs:
|
||||
- tests-backend
|
||||
- tests-frontend
|
||||
- tests-frontend-e2e
|
||||
steps:
|
||||
- name: Prepare build variables
|
||||
id: build-vars
|
||||
@@ -406,24 +455,6 @@ jobs:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.QUAY_USERNAME }}
|
||||
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
||||
- name: Inspect cache/disk
|
||||
if: always()
|
||||
run: |
|
||||
df -h
|
||||
docker system df -v
|
||||
docker buildx du
|
||||
- name: Maximize space
|
||||
run: |
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf /usr/local/share/boost
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
- name: Inspect cache/disk
|
||||
if: always()
|
||||
run: |
|
||||
df -h
|
||||
docker system df -v
|
||||
docker buildx du
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
@@ -445,12 +476,6 @@ jobs:
|
||||
if: steps.build-vars.outputs.can-push == 'true'
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||
- name: Inspect cache/disk
|
||||
if: always()
|
||||
run: |
|
||||
df -h
|
||||
docker system df -v
|
||||
docker buildx du
|
||||
- name: Export frontend artifact from docker
|
||||
if: steps.build-vars.outputs.can-push == 'true'
|
||||
run: |
|
||||
|
||||
@@ -193,13 +193,15 @@ ARG BUILD_PACKAGES="\
|
||||
pkg-config"
|
||||
|
||||
# hadolint ignore=DL3042
|
||||
RUN set -eux \
|
||||
RUN --mount=type=cache,target=${UV_CACHE_DIR},id=python-cache \
|
||||
set -eux \
|
||||
&& echo "Installing build system packages" \
|
||||
&& apt-get update \
|
||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||
&& echo "Installing Python requirements" \
|
||||
&& uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt \
|
||||
&& uv pip install --system --no-python-downloads --python-preference system --requirements requirements.txt \
|
||||
&& UV_INDEX="https://pypi.org/simple https://download.pytorch.org/whl/cpu" UV_INDEX_STRATEGY=unsafe-best-match \
|
||||
uv pip install --system --no-python-downloads --python-preference system --requirements requirements.txt \
|
||||
&& echo "Installing NLTK data" \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" snowball_data \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" stopwords \
|
||||
|
||||
@@ -75,6 +75,7 @@ dependencies = [
|
||||
"sentence-transformers>=4.1",
|
||||
"setproctitle~=1.3.4",
|
||||
"tika-client~=0.10.0",
|
||||
"torch==2.7.0+cpu; sys_platform=='linux'",
|
||||
"tqdm~=4.67.1",
|
||||
"watchdog~=6.0",
|
||||
"whitenoise~=6.9",
|
||||
@@ -172,8 +173,14 @@ psycopg-c = [
|
||||
]
|
||||
tiktoken = { index = "pytorch-cpu" }
|
||||
tokenizers = { index = "pytorch-cpu" }
|
||||
torch = { index = "pytorch-cpu" }
|
||||
torchvision = { index = "pytorch-cpu" }
|
||||
torch = [
|
||||
{ index = "pytorch-cpu", marker = "sys_platform == 'linux'" },
|
||||
{ index = "pypi", marker = "sys_platform != 'linux'" },
|
||||
]
|
||||
torchvision = [
|
||||
{ index = "pytorch-cpu", marker = "sys_platform == 'linux'" },
|
||||
{ index = "pypi", marker = "sys_platform != 'linux'" },
|
||||
]
|
||||
zxing-cpp = [
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
||||
|
||||
Reference in New Issue
Block a user