mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-04-11 10:00:48 -05:00
Compare commits
No commits in common. "dev" and "v2.7.2" have entirely different histories.
30
.codecov.yml
30
.codecov.yml
@ -1,22 +1,19 @@
|
|||||||
codecov:
|
codecov:
|
||||||
require_ci_to_pass: true
|
require_ci_to_pass: true
|
||||||
# https://docs.codecov.com/docs/components
|
# https://docs.codecov.com/docs/flags#recommended-automatic-flag-management
|
||||||
component_management:
|
# Require each flag to have 1 upload before notification
|
||||||
individual_components:
|
flag_management:
|
||||||
- component_id: backend
|
individual_flags:
|
||||||
|
- name: backend
|
||||||
paths:
|
paths:
|
||||||
- src/**
|
- src/
|
||||||
- component_id: frontend
|
- name: frontend
|
||||||
paths:
|
paths:
|
||||||
- src-ui/**
|
- src-ui/
|
||||||
# https://docs.codecov.com/docs/pull-request-comments
|
# https://docs.codecov.com/docs/pull-request-comments
|
||||||
# codecov will only comment if coverage changes
|
# codecov will only comment if coverage changes
|
||||||
comment:
|
comment:
|
||||||
layout: "header, diff, components, flags, files"
|
|
||||||
require_changes: true
|
require_changes: true
|
||||||
# https://docs.codecov.com/docs/javascript-bundle-analysis
|
|
||||||
require_bundle_changes: true
|
|
||||||
bundle_change_threshold: "50Kb"
|
|
||||||
coverage:
|
coverage:
|
||||||
status:
|
status:
|
||||||
project:
|
project:
|
||||||
@ -25,12 +22,7 @@ coverage:
|
|||||||
threshold: 1%
|
threshold: 1%
|
||||||
patch:
|
patch:
|
||||||
default:
|
default:
|
||||||
# For the changed lines only, target 100% covered, but
|
# For the changed lines only, target 75% covered, but
|
||||||
# allow as low as 75%
|
# allow as low as 50%
|
||||||
target: 100%
|
target: 75%
|
||||||
threshold: 25%
|
threshold: 25%
|
||||||
# https://docs.codecov.com/docs/javascript-bundle-analysis
|
|
||||||
bundle_analysis:
|
|
||||||
# Fail if the bundle size increases by more than 1MB
|
|
||||||
warning_threshold: "1MB"
|
|
||||||
status: true
|
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
[codespell]
|
[codespell]
|
||||||
write-changes = True
|
write-changes = True
|
||||||
ignore-words-list = criterias,afterall,valeu,ureue,equest,ure,assertIn
|
ignore-words-list = criterias,afterall,valeu,ureue,equest,ure
|
||||||
|
@ -1,175 +0,0 @@
|
|||||||
# syntax=docker/dockerfile:1
|
|
||||||
|
|
||||||
FROM --platform=$BUILDPLATFORM docker.io/node:20-bookworm-slim as main-app
|
|
||||||
|
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
|
||||||
|
|
||||||
# Buildx provided, must be defined to use though
|
|
||||||
ARG TARGETARCH
|
|
||||||
|
|
||||||
# Can be workflow provided, defaults set for manual building
|
|
||||||
ARG JBIG2ENC_VERSION=0.29
|
|
||||||
ARG QPDF_VERSION=11.9.0
|
|
||||||
ARG GS_VERSION=10.03.1
|
|
||||||
|
|
||||||
# Set Python environment variables
|
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
|
||||||
PYTHONUNBUFFERED=1 \
|
|
||||||
# Ignore warning from Whitenoise
|
|
||||||
PYTHONWARNINGS="ignore:::django.http.response:517" \
|
|
||||||
PNGX_CONTAINERIZED=1
|
|
||||||
|
|
||||||
#
|
|
||||||
# Begin installation and configuration
|
|
||||||
# Order the steps below from least often changed to most
|
|
||||||
#
|
|
||||||
|
|
||||||
# Packages need for running
|
|
||||||
ARG RUNTIME_PACKAGES="\
|
|
||||||
# General utils
|
|
||||||
curl \
|
|
||||||
# Docker specific
|
|
||||||
gosu \
|
|
||||||
# Timezones support
|
|
||||||
tzdata \
|
|
||||||
# fonts for text file thumbnail generation
|
|
||||||
fonts-liberation \
|
|
||||||
gettext \
|
|
||||||
ghostscript \
|
|
||||||
gnupg \
|
|
||||||
icc-profiles-free \
|
|
||||||
imagemagick \
|
|
||||||
# PostgreSQL
|
|
||||||
postgresql-client \
|
|
||||||
# MySQL / MariaDB
|
|
||||||
mariadb-client \
|
|
||||||
# OCRmyPDF dependencies
|
|
||||||
tesseract-ocr \
|
|
||||||
tesseract-ocr-eng \
|
|
||||||
tesseract-ocr-deu \
|
|
||||||
tesseract-ocr-fra \
|
|
||||||
tesseract-ocr-ita \
|
|
||||||
tesseract-ocr-spa \
|
|
||||||
unpaper \
|
|
||||||
pngquant \
|
|
||||||
jbig2dec \
|
|
||||||
# lxml
|
|
||||||
libxml2 \
|
|
||||||
libxslt1.1 \
|
|
||||||
# itself
|
|
||||||
qpdf \
|
|
||||||
# Mime type detection
|
|
||||||
file \
|
|
||||||
libmagic1 \
|
|
||||||
media-types \
|
|
||||||
zlib1g \
|
|
||||||
# Barcode splitter
|
|
||||||
libzbar0 \
|
|
||||||
poppler-utils \
|
|
||||||
htop \
|
|
||||||
sudo"
|
|
||||||
|
|
||||||
# Install basic runtime packages.
|
|
||||||
# These change very infrequently
|
|
||||||
RUN set -eux \
|
|
||||||
echo "Installing system packages" \
|
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES}
|
|
||||||
|
|
||||||
ARG PYTHON_PACKAGES="ca-certificates"
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
echo "Installing python packages" \
|
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install --yes --quiet ${PYTHON_PACKAGES}
|
|
||||||
|
|
||||||
COPY --from=ghcr.io/astral-sh/uv:0.6 /uv /bin/uv
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Installing pre-built updates" \
|
|
||||||
&& echo "Installing qpdf ${QPDF_VERSION}" \
|
|
||||||
&& curl --fail --silent --show-error --location \
|
|
||||||
--output libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
&& curl --fail --silent --show-error --location \
|
|
||||||
--output qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
&& dpkg --install ./libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
&& dpkg --install ./qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
&& echo "Installing Ghostscript ${GS_VERSION}" \
|
|
||||||
&& curl --fail --silent --show-error --location \
|
|
||||||
--output libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
|
||||||
&& curl --fail --silent --show-error --location \
|
|
||||||
--output ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
|
||||||
&& curl --fail --silent --show-error --location \
|
|
||||||
--output libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
|
|
||||||
&& dpkg --install ./libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
|
|
||||||
&& dpkg --install ./libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
|
||||||
&& dpkg --install ./ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
|
||||||
&& echo "Installing jbig2enc" \
|
|
||||||
&& curl --fail --silent --show-error --location \
|
|
||||||
--output jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/jbig2enc-${JBIG2ENC_VERSION}/jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
&& dpkg --install ./jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb
|
|
||||||
|
|
||||||
# setup docker-specific things
|
|
||||||
# These change sometimes, but rarely
|
|
||||||
WORKDIR /usr/src/paperless/src/docker/
|
|
||||||
|
|
||||||
COPY [ \
|
|
||||||
"docker/rootfs/etc/ImageMagick-6/paperless-policy.xml", \
|
|
||||||
"./" \
|
|
||||||
]
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Configuring ImageMagick" \
|
|
||||||
&& mv paperless-policy.xml /etc/ImageMagick-6/policy.xml
|
|
||||||
|
|
||||||
COPY --from=ghcr.io/astral-sh/uv:0.6 /uv /bin/uv
|
|
||||||
|
|
||||||
# Packages needed only for building a few quick Python
|
|
||||||
# dependencies
|
|
||||||
ARG BUILD_PACKAGES="\
|
|
||||||
build-essential \
|
|
||||||
git \
|
|
||||||
# https://www.psycopg.org/docs/install.html#prerequisites
|
|
||||||
libpq-dev \
|
|
||||||
# https://github.com/PyMySQL/mysqlclient#linux
|
|
||||||
default-libmysqlclient-dev \
|
|
||||||
pkg-config"
|
|
||||||
|
|
||||||
# hadolint ignore=DL3042
|
|
||||||
RUN --mount=type=cache,target=/root/.cache/uv,id=pip-cache \
|
|
||||||
set -eux \
|
|
||||||
&& echo "Installing build system packages" \
|
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install --yes --quiet ${BUILD_PACKAGES}
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& npm update -g pnpm
|
|
||||||
|
|
||||||
# add users, setup scripts
|
|
||||||
# Mount the compiled frontend to expected location
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Setting up user/group" \
|
|
||||||
&& groupmod --new-name paperless node \
|
|
||||||
&& usermod --login paperless --home /usr/src/paperless node \
|
|
||||||
&& usermod -s /bin/bash paperless \
|
|
||||||
&& echo "paperless ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers \
|
|
||||||
&& echo "Creating volume directories" \
|
|
||||||
&& mkdir --parents --verbose /usr/src/paperless/paperless-ngx/data \
|
|
||||||
&& mkdir --parents --verbose /usr/src/paperless/paperless-ngx/media \
|
|
||||||
&& mkdir --parents --verbose /usr/src/paperless/paperless-ngx/consume \
|
|
||||||
&& mkdir --parents --verbose /usr/src/paperless/paperless-ngx/export \
|
|
||||||
&& mkdir --parents --verbose /usr/src/paperless/paperless-ngx/.venv \
|
|
||||||
&& echo "Adjusting all permissions" \
|
|
||||||
&& chown --from root:root --changes --recursive paperless:paperless /usr/src/paperless
|
|
||||||
|
|
||||||
VOLUME ["/usr/src/paperless/paperless-ngx/data", \
|
|
||||||
"/usr/src/paperless/paperless-ngx/media", \
|
|
||||||
"/usr/src/paperless/paperless-ngx/consume", \
|
|
||||||
"/usr/src/paperless/paperless-ngx/export", \
|
|
||||||
"/usr/src/paperless/paperless-ngx/.venv"]
|
|
@ -1,117 +0,0 @@
|
|||||||
# Paperless-ngx Development Environment
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
Welcome to the Paperless-ngx development environment! This setup uses VSCode DevContainers to provide a consistent and seamless development experience.
|
|
||||||
|
|
||||||
### What are DevContainers?
|
|
||||||
|
|
||||||
DevContainers are a feature in VSCode that allows you to develop within a Docker container. This ensures that your development environment is consistent across different machines and setups. By defining a containerized environment, you can eliminate the "works on my machine" problem.
|
|
||||||
|
|
||||||
### Advantages of DevContainers
|
|
||||||
|
|
||||||
- **Consistency**: Same environment for all developers.
|
|
||||||
- **Isolation**: Separate development environment from your local machine.
|
|
||||||
- **Reproducibility**: Easily recreate the environment on any machine.
|
|
||||||
- **Pre-configured Tools**: Include all necessary tools and dependencies in the container.
|
|
||||||
|
|
||||||
## DevContainer Setup
|
|
||||||
|
|
||||||
The DevContainer configuration provides up all the necessary services for Paperless-ngx, including:
|
|
||||||
|
|
||||||
- Redis
|
|
||||||
- Gotenberg
|
|
||||||
- Tika
|
|
||||||
|
|
||||||
Data is stored using Docker volumes to ensure persistence across container restarts.
|
|
||||||
|
|
||||||
## Configuration Files
|
|
||||||
|
|
||||||
The setup includes debugging configurations (`launch.json`) and tasks (`tasks.json`) to help you manage and debug various parts of the project:
|
|
||||||
|
|
||||||
- **Backend Debugging:**
|
|
||||||
- `manage.py runserver`
|
|
||||||
- `manage.py document-consumer`
|
|
||||||
- `celery`
|
|
||||||
- **Maintenance Tasks:**
|
|
||||||
- Create superuser
|
|
||||||
- Run migrations
|
|
||||||
- Recreate virtual environment (`.venv` with `uv`)
|
|
||||||
- Compile frontend assets
|
|
||||||
|
|
||||||
## Getting Started
|
|
||||||
|
|
||||||
### Step 1: Running the DevContainer
|
|
||||||
|
|
||||||
To start the DevContainer:
|
|
||||||
|
|
||||||
1. Open VSCode.
|
|
||||||
2. Open the project folder.
|
|
||||||
3. Open the command palette:
|
|
||||||
- **Windows/Linux**: `Ctrl+Shift+P`
|
|
||||||
- **Mac**: `Cmd+Shift+P`
|
|
||||||
4. Type and select `Dev Containers: Rebuild and Reopen in Container`.
|
|
||||||
|
|
||||||
VSCode will build and start the DevContainer environment.
|
|
||||||
|
|
||||||
### Step 2: Initial Setup
|
|
||||||
|
|
||||||
Once the DevContainer is up and running, perform the following steps:
|
|
||||||
|
|
||||||
1. **Compile Frontend Assets**:
|
|
||||||
|
|
||||||
- Open the command palette:
|
|
||||||
- **Windows/Linux**: `Ctrl+Shift+P`
|
|
||||||
- **Mac**: `Cmd+Shift+P`
|
|
||||||
- Select `Tasks: Run Task`.
|
|
||||||
- Choose `Frontend Compile`.
|
|
||||||
|
|
||||||
2. **Run Database Migrations**:
|
|
||||||
|
|
||||||
- Open the command palette:
|
|
||||||
- **Windows/Linux**: `Ctrl+Shift+P`
|
|
||||||
- **Mac**: `Cmd+Shift+P`
|
|
||||||
- Select `Tasks: Run Task`.
|
|
||||||
- Choose `Migrate Database`.
|
|
||||||
|
|
||||||
3. **Create Superuser**:
|
|
||||||
- Open the command palette:
|
|
||||||
- **Windows/Linux**: `Ctrl+Shift+P`
|
|
||||||
- **Mac**: `Cmd+Shift+P`
|
|
||||||
- Select `Tasks: Run Task`.
|
|
||||||
- Choose `Create Superuser`.
|
|
||||||
|
|
||||||
### Debugging and Running Services
|
|
||||||
|
|
||||||
You can start and debug backend services either as debugging sessions via `launch.json` or as tasks.
|
|
||||||
|
|
||||||
#### Using `launch.json`
|
|
||||||
|
|
||||||
1. Press `F5` or go to the **Run and Debug** view in VSCode.
|
|
||||||
2. Select the desired configuration:
|
|
||||||
- `Runserver`
|
|
||||||
- `Document Consumer`
|
|
||||||
- `Celery`
|
|
||||||
|
|
||||||
#### Using Tasks
|
|
||||||
|
|
||||||
1. Open the command palette:
|
|
||||||
- **Windows/Linux**: `Ctrl+Shift+P`
|
|
||||||
- **Mac**: `Cmd+Shift+P`
|
|
||||||
2. Select `Tasks: Run Task`.
|
|
||||||
3. Choose the desired task:
|
|
||||||
- `Runserver`
|
|
||||||
- `Document Consumer`
|
|
||||||
- `Celery`
|
|
||||||
|
|
||||||
### Additional Maintenance Tasks
|
|
||||||
|
|
||||||
Additional tasks are available for common maintenance operations:
|
|
||||||
|
|
||||||
- **Recreate .venv**: For setting up the virtual environment using `uv`.
|
|
||||||
- **Migrate Database**: To apply database migrations.
|
|
||||||
- **Create Superuser**: To create an admin user for the application.
|
|
||||||
|
|
||||||
## Let's Get Started!
|
|
||||||
|
|
||||||
Follow the steps above to get your development environment up and running. Happy coding!
|
|
@ -1,28 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "Paperless Development",
|
|
||||||
"dockerComposeFile": "docker-compose.devcontainer.sqlite-tika.yml",
|
|
||||||
"service": "paperless-development",
|
|
||||||
"workspaceFolder": "/usr/src/paperless/paperless-ngx",
|
|
||||||
"postCreateCommand": "/bin/bash -c 'uv sync --group dev && uv run pre-commit install'",
|
|
||||||
"customizations": {
|
|
||||||
"vscode": {
|
|
||||||
"extensions": [
|
|
||||||
"mhutchie.git-graph",
|
|
||||||
"ms-python.python",
|
|
||||||
"ms-vscode.js-debug-nightly",
|
|
||||||
"eamodio.gitlens",
|
|
||||||
"yzhang.markdown-all-in-one"
|
|
||||||
],
|
|
||||||
"settings": {
|
|
||||||
"python.defaultInterpreterPath": "/usr/src/paperless/paperless-ngx/.venv/bin/python",
|
|
||||||
"python.pythonPath": "/usr/src/paperless/paperless-ngx/.venv/bin/python",
|
|
||||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
|
||||||
"editor.formatOnPaste": false,
|
|
||||||
"editor.formatOnSave": true,
|
|
||||||
"editor.formatOnType": true,
|
|
||||||
"files.trimTrailingWhitespace": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"remoteUser": "paperless"
|
|
||||||
}
|
|
@ -1,86 +0,0 @@
|
|||||||
# Docker Compose file for developing Paperless NGX in VSCode DevContainers.
|
|
||||||
# This file contains everything Paperless NGX needs to run.
|
|
||||||
# Paperless supports amd64, arm, and arm64 hardware.
|
|
||||||
# All compose files of Paperless configure it in the following way:
|
|
||||||
#
|
|
||||||
# - Paperless is (re)started on system boot if it was running before shutdown.
|
|
||||||
# - Docker volumes for storing data are managed by Docker.
|
|
||||||
# - Folders for importing and exporting files are created in the same directory
|
|
||||||
# as this file and mounted to the correct folders inside the container.
|
|
||||||
# - Paperless listens on port 8000.
|
|
||||||
#
|
|
||||||
# SQLite is used as the database. The SQLite file is stored in the data volume.
|
|
||||||
#
|
|
||||||
# In addition, this Docker Compose file adds the following optional
|
|
||||||
# configurations:
|
|
||||||
#
|
|
||||||
# - Apache Tika and Gotenberg servers are started with Paperless NGX and Paperless
|
|
||||||
# is configured to use these services. These provide support for consuming
|
|
||||||
# Office documents (Word, Excel, PowerPoint, and their LibreOffice counterparts).
|
|
||||||
#
|
|
||||||
# This file is intended only to be used through VSCOde devcontainers. See README.md
|
|
||||||
# in the folder .devcontainer.
|
|
||||||
|
|
||||||
|
|
||||||
services:
|
|
||||||
broker:
|
|
||||||
image: docker.io/library/redis:7
|
|
||||||
restart: unless-stopped
|
|
||||||
volumes:
|
|
||||||
- ./redisdata:/data
|
|
||||||
|
|
||||||
# No ports need to be exposed; the VSCode DevContainer plugin manages them.
|
|
||||||
paperless-development:
|
|
||||||
image: paperless-ngx
|
|
||||||
build:
|
|
||||||
context: ../ # Dockerfile cannot access files from parent directories if context is not set.
|
|
||||||
dockerfile: ./.devcontainer/Dockerfile
|
|
||||||
restart: unless-stopped
|
|
||||||
depends_on:
|
|
||||||
- broker
|
|
||||||
- gotenberg
|
|
||||||
- tika
|
|
||||||
volumes:
|
|
||||||
- ..:/usr/src/paperless/paperless-ngx:delegated
|
|
||||||
- ../.devcontainer/vscode:/usr/src/paperless/paperless-ngx/.vscode:delegated # VSCode config files
|
|
||||||
- virtualenv:/usr/src/paperless/paperless-ngx/.venv # Virtual environment persisted in volume
|
|
||||||
- /usr/src/paperless/paperless-ngx/src/documents/static/frontend # Static frontend files exist only in container
|
|
||||||
- /usr/src/paperless/paperless-ngx/src/.pytest_cache
|
|
||||||
- /usr/src/paperless/paperless-ngx/.ruff_cache
|
|
||||||
- /usr/src/paperless/paperless-ngx/htmlcov
|
|
||||||
- /usr/src/paperless/paperless-ngx/.coverage
|
|
||||||
- ./data:/usr/src/paperless/paperless-ngx/data
|
|
||||||
- ./media:/usr/src/paperless/paperless-ngx/media
|
|
||||||
- ./consume:/usr/src/paperless/paperless-ngx/consume
|
|
||||||
- ~/.gitconfig:/usr/src/paperless/.gitconfig:ro
|
|
||||||
environment:
|
|
||||||
PAPERLESS_REDIS: redis://broker:6379
|
|
||||||
PAPERLESS_TIKA_ENABLED: 1
|
|
||||||
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
|
||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
|
||||||
PAPERLESS_STATICDIR: ./src/documents/static
|
|
||||||
PAPERLESS_DEBUG: true
|
|
||||||
|
|
||||||
# Overrides default command so things don't shut down after the process ends.
|
|
||||||
command: /bin/sh -c "chown -R paperless:paperless /usr/src/paperless/paperless-ngx/src/documents/static/frontend && chown -R paperless:paperless /usr/src/paperless/paperless-ngx/.ruff_cache && while sleep 1000; do :; done"
|
|
||||||
|
|
||||||
gotenberg:
|
|
||||||
image: docker.io/gotenberg/gotenberg:8.17
|
|
||||||
restart: unless-stopped
|
|
||||||
|
|
||||||
# The Gotenberg Chromium route is used to convert .eml files. We do not
|
|
||||||
# want to allow external content like tracking pixels or even JavaScript.
|
|
||||||
command:
|
|
||||||
- "gotenberg"
|
|
||||||
- "--chromium-disable-javascript=true"
|
|
||||||
- "--chromium-allow-list=file:///tmp/.*"
|
|
||||||
|
|
||||||
tika:
|
|
||||||
image: docker.io/apache/tika:latest
|
|
||||||
restart: unless-stopped
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
data:
|
|
||||||
media:
|
|
||||||
redisdata:
|
|
||||||
virtualenv:
|
|
@ -1,58 +0,0 @@
|
|||||||
{
|
|
||||||
"version": "0.2.0",
|
|
||||||
"configurations": [
|
|
||||||
{
|
|
||||||
"name": "Chrome: Debug Angular Frontend",
|
|
||||||
"description": "Debug the Angular Dev Frontend in Chrome",
|
|
||||||
"type": "chrome",
|
|
||||||
"request": "launch",
|
|
||||||
"url": "http://localhost:4200",
|
|
||||||
"webRoot": "${workspaceFolder}/src-ui",
|
|
||||||
"preLaunchTask": "Start: Frontend Angular"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Debug: Backend Server (manage.py runserver)",
|
|
||||||
"description": "Debug the Django Backend Server",
|
|
||||||
"type": "python",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "${workspaceFolder}/src/manage.py",
|
|
||||||
"args": [
|
|
||||||
"runserver"
|
|
||||||
],
|
|
||||||
"django": true,
|
|
||||||
"console": "integratedTerminal",
|
|
||||||
"env": {
|
|
||||||
"PYTHONPATH": "${workspaceFolder}/src"
|
|
||||||
},
|
|
||||||
"python": "${workspaceFolder}/.venv/bin/python"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Debug: Consumer Service (manage.py document_consumer)",
|
|
||||||
"description": "Debug the Consumer Service which processes files from a directory",
|
|
||||||
"type": "python",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "${workspaceFolder}/src/manage.py",
|
|
||||||
"args": [
|
|
||||||
"document_consumer"
|
|
||||||
],
|
|
||||||
"django": true,
|
|
||||||
"console": "integratedTerminal",
|
|
||||||
"env": {
|
|
||||||
"PYTHONPATH": "${workspaceFolder}/src"
|
|
||||||
},
|
|
||||||
"python": "${workspaceFolder}/.venv/bin/python"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"compounds": [
|
|
||||||
{
|
|
||||||
"name": "Debug: FullStack",
|
|
||||||
"description": "Debug run the Angular dev frontend, Django backend, and consumer service",
|
|
||||||
"configurations": [
|
|
||||||
"Chrome: Debug Angular Frontend",
|
|
||||||
"Debug: Backend Server (manage.py runserver)",
|
|
||||||
"Debug: Consumer Service (manage.py document_consumer)"
|
|
||||||
],
|
|
||||||
"preLaunchTask": "Start: Celery Worker"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
@ -1,11 +0,0 @@
|
|||||||
{
|
|
||||||
"python.testing.pytestArgs": [
|
|
||||||
"src"
|
|
||||||
],
|
|
||||||
"python.testing.unittestEnabled": false,
|
|
||||||
"python.testing.pytestEnabled": true,
|
|
||||||
"files.watcherExclude": {
|
|
||||||
"**/.venv/**": true,
|
|
||||||
"**/pytest_cache/**": true
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,223 +0,0 @@
|
|||||||
{
|
|
||||||
"version": "2.0.0",
|
|
||||||
"tasks": [
|
|
||||||
{
|
|
||||||
"label": "Start: Celery Worker",
|
|
||||||
"description": "Start the Celery Worker which processes background and consume tasks",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "uv run celery --app paperless worker -l DEBUG",
|
|
||||||
"isBackground": true,
|
|
||||||
"options": {
|
|
||||||
"cwd": "${workspaceFolder}/src"
|
|
||||||
},
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "custom",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": ".",
|
|
||||||
"file": 1,
|
|
||||||
"location": 2,
|
|
||||||
"message": 3
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"background": {
|
|
||||||
"activeOnStart": true,
|
|
||||||
"beginsPattern": "celery.*",
|
|
||||||
"endsPattern": "ready"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Start: Frontend Angular",
|
|
||||||
"description": "Start the Frontend Angular Dev Server",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "pnpm start",
|
|
||||||
"isBackground": true,
|
|
||||||
"options": {
|
|
||||||
"cwd": "${workspaceFolder}/src-ui"
|
|
||||||
},
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "custom",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": ".",
|
|
||||||
"file": 1,
|
|
||||||
"location": 2,
|
|
||||||
"message": 3
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"background": {
|
|
||||||
"activeOnStart": true,
|
|
||||||
"beginsPattern": ".*",
|
|
||||||
"endsPattern": "Compiled successfully"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Start: Consumer Service (manage.py document_consumer)",
|
|
||||||
"description": "Start the Consumer Service which processes files from a directory",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "uv run python manage.py document_consumer",
|
|
||||||
"group": "build",
|
|
||||||
"presentation": {
|
|
||||||
"echo": true,
|
|
||||||
"reveal": "always",
|
|
||||||
"focus": false,
|
|
||||||
"panel": "shared",
|
|
||||||
"showReuseMessage": false,
|
|
||||||
"clear": true,
|
|
||||||
"revealProblems": "onProblem"
|
|
||||||
},
|
|
||||||
"options": {
|
|
||||||
"cwd": "${workspaceFolder}/src"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Start: Backend Server (manage.py runserver)",
|
|
||||||
"description": "Start the Backend Server which serves the Django API and the compiled Angular frontend",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "uv run python manage.py runserver",
|
|
||||||
"group": "build",
|
|
||||||
"presentation": {
|
|
||||||
"echo": true,
|
|
||||||
"reveal": "always",
|
|
||||||
"focus": false,
|
|
||||||
"panel": "shared",
|
|
||||||
"showReuseMessage": false,
|
|
||||||
"clear": true,
|
|
||||||
"revealProblems": "onProblem"
|
|
||||||
},
|
|
||||||
"options": {
|
|
||||||
"cwd": "${workspaceFolder}/src"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Maintenance: manage.py migrate",
|
|
||||||
"description": "Apply database migrations",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "uv run python manage.py migrate",
|
|
||||||
"group": "none",
|
|
||||||
"presentation": {
|
|
||||||
"echo": true,
|
|
||||||
"reveal": "always",
|
|
||||||
"focus": true,
|
|
||||||
"panel": "shared",
|
|
||||||
"showReuseMessage": false,
|
|
||||||
"clear": true,
|
|
||||||
"revealProblems": "onProblem"
|
|
||||||
},
|
|
||||||
"options": {
|
|
||||||
"cwd": "${workspaceFolder}/src"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Maintenance: Build Documentation",
|
|
||||||
"description": "Build the documentation with MkDocs",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "uv run mkdocs build --config-file mkdocs.yml && uv run mkdocs serve",
|
|
||||||
"group": "none",
|
|
||||||
"presentation": {
|
|
||||||
"echo": true,
|
|
||||||
"reveal": "always",
|
|
||||||
"focus": true,
|
|
||||||
"panel": "shared",
|
|
||||||
"showReuseMessage": false,
|
|
||||||
"clear": true,
|
|
||||||
"revealProblems": "onProblem"
|
|
||||||
},
|
|
||||||
"options": {
|
|
||||||
"cwd": "${workspaceFolder}"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Maintenance: manage.py createsuperuser",
|
|
||||||
"description": "Create a superuser",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "uv run python manage.py createsuperuser",
|
|
||||||
"group": "none",
|
|
||||||
"presentation": {
|
|
||||||
"echo": true,
|
|
||||||
"reveal": "always",
|
|
||||||
"focus": true,
|
|
||||||
"panel": "shared",
|
|
||||||
"showReuseMessage": false,
|
|
||||||
"clear": true,
|
|
||||||
"revealProblems": "onProblem"
|
|
||||||
},
|
|
||||||
"options": {
|
|
||||||
"cwd": "${workspaceFolder}/src"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Maintenance: recreate .venv",
|
|
||||||
"description": "Recreate the python virtual environment and install python dependencies",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "rm -R -v .venv/* || uv install --dev",
|
|
||||||
"group": "none",
|
|
||||||
"presentation": {
|
|
||||||
"echo": true,
|
|
||||||
"reveal": "always",
|
|
||||||
"focus": true,
|
|
||||||
"panel": "shared",
|
|
||||||
"showReuseMessage": false,
|
|
||||||
"clear": true,
|
|
||||||
"revealProblems": "onProblem"
|
|
||||||
},
|
|
||||||
"options": {
|
|
||||||
"cwd": "${workspaceFolder}"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Maintenance: Install Frontend Dependencies",
|
|
||||||
"description": "Install frontend (pnpm) dependencies",
|
|
||||||
"type": "pnpm",
|
|
||||||
"script": "install",
|
|
||||||
"path": "src-ui",
|
|
||||||
"group": "clean",
|
|
||||||
"problemMatcher": [],
|
|
||||||
"detail": "install dependencies from package"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"description": "Clean install frontend dependencies and build the frontend for production",
|
|
||||||
"label": "Maintenance: Compile frontend for production",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "pnpm install && ./node_modules/.bin/ng build --configuration production",
|
|
||||||
"group": "none",
|
|
||||||
"presentation": {
|
|
||||||
"echo": true,
|
|
||||||
"reveal": "always",
|
|
||||||
"focus": true,
|
|
||||||
"panel": "shared",
|
|
||||||
"showReuseMessage": false,
|
|
||||||
"clear": true,
|
|
||||||
"revealProblems": "onProblem"
|
|
||||||
},
|
|
||||||
"options": {
|
|
||||||
"cwd": "${workspaceFolder}/src-ui"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Project Setup: Run all Init Tasks",
|
|
||||||
"description": "Runs all init tasks to setup the project including migrate the database, create a superuser and compile the frontend for production",
|
|
||||||
"dependsOrder": "sequence",
|
|
||||||
"dependsOn": [
|
|
||||||
"Maintenance: manage.py migrate",
|
|
||||||
"Maintenance: manage.py createsuperuser",
|
|
||||||
"Maintenance: Compile frontend for production"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Project Start: Run all Services",
|
|
||||||
"description": "Runs all services required to start the project including the Celery Worker, the Consumer Service and the Backend Server",
|
|
||||||
"dependsOn": [
|
|
||||||
"Start: Celery Worker",
|
|
||||||
"Start: Consumer Service (manage.py document_consumer)",
|
|
||||||
"Start: Backend Server (manage.py runserver)"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
@ -26,5 +26,3 @@
|
|||||||
./dist
|
./dist
|
||||||
./scripts
|
./scripts
|
||||||
./resources
|
./resources
|
||||||
# Other stuff
|
|
||||||
**/*.drawio.png
|
|
||||||
|
@ -27,6 +27,9 @@ indent_style = space
|
|||||||
[*.md]
|
[*.md]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
|
|
||||||
|
[Pipfile.lock]
|
||||||
|
indent_style = space
|
||||||
|
|
||||||
# Tests don't get a line width restriction. It's still a good idea to follow
|
# Tests don't get a line width restriction. It's still a good idea to follow
|
||||||
# the 79 character rule, but in the interests of clarity, tests often need to
|
# the 79 character rule, but in the interests of clarity, tests often need to
|
||||||
# violate it.
|
# violate it.
|
||||||
|
1
.github/FUNDING.yml
vendored
1
.github/FUNDING.yml
vendored
@ -1 +0,0 @@
|
|||||||
github: [shamoon, stumpylog]
|
|
17
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
17
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@ -9,7 +9,7 @@ body:
|
|||||||
### ⚠️ Please remember: issues are for *bugs*
|
### ⚠️ Please remember: issues are for *bugs*
|
||||||
That is, something you believe affects every single user of Paperless-ngx, not just you. If you're not sure, start with one of the other options below.
|
That is, something you believe affects every single user of Paperless-ngx, not just you. If you're not sure, start with one of the other options below.
|
||||||
|
|
||||||
Also, note that **Paperless-ngx does not perform OCR or archive file creation itself**, those are handled by other tools. Problems with OCR or archive versions of specific files should likely be raised 'upstream', see https://github.com/ocrmypdf/OCRmyPDF/issues or https://github.com/tesseract-ocr/tesseract/issues
|
Also, note that **Paperless-ngx does not perform OCR itself**, that is handled by other tools. Problems with OCR of specific files should likely be raised 'upstream', see https://github.com/ocrmypdf/OCRmyPDF/issues or https://github.com/tesseract-ocr/tesseract/issues
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
@ -86,23 +86,22 @@ body:
|
|||||||
description: Note there are significant differences from the official image and linuxserver.io, please check if your issue is specific to the third-party image.
|
description: Note there are significant differences from the official image and linuxserver.io, please check if your issue is specific to the third-party image.
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
|
||||||
id: system-status
|
|
||||||
attributes:
|
|
||||||
label: System status
|
|
||||||
description: If available, copy & paste the system status output from Settings > System Status > Copy
|
|
||||||
render: json
|
|
||||||
- type: input
|
- type: input
|
||||||
id: browser
|
id: browser
|
||||||
attributes:
|
attributes:
|
||||||
label: Browser
|
label: Browser
|
||||||
description: Which browser you are using, if relevant.
|
description: Which browser you are using, if relevant.
|
||||||
placeholder: e.g. Chrome, Safari
|
placeholder: e.g. Chrome, Safari
|
||||||
- type: textarea
|
- type: input
|
||||||
id: config-changes
|
id: config-changes
|
||||||
attributes:
|
attributes:
|
||||||
label: Configuration changes
|
label: Configuration changes
|
||||||
description: Any configuration changes you made in `docker-compose.yml`, `docker-compose.env` or `paperless.conf`.
|
description: Any configuration changes you made in `docker-compose.yml`, `docker-compose.env` or `paperless.conf`.
|
||||||
|
- type: input
|
||||||
|
id: other
|
||||||
|
attributes:
|
||||||
|
label: Other
|
||||||
|
description: Any other relevant details.
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: required-checks
|
id: required-checks
|
||||||
attributes:
|
attributes:
|
||||||
@ -110,8 +109,6 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I believe this issue is a bug that affects all users of Paperless-ngx, not something specific to my installation.
|
- label: I believe this issue is a bug that affects all users of Paperless-ngx, not something specific to my installation.
|
||||||
required: true
|
required: true
|
||||||
- label: This issue is not about the OCR or archive creation of a specific file(s). Otherwise, please see above regarding OCR tools.
|
|
||||||
required: true
|
|
||||||
- label: I have already searched for relevant existing issues and discussions before opening this report.
|
- label: I have already searched for relevant existing issues and discussions before opening this report.
|
||||||
required: true
|
required: true
|
||||||
- label: I have updated the title field above with a concise description.
|
- label: I have updated the title field above with a concise description.
|
||||||
|
4
.github/ISSUE_TEMPLATE/config.yml
vendored
4
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -2,10 +2,10 @@ blank_issues_enabled: false
|
|||||||
contact_links:
|
contact_links:
|
||||||
- name: 🤔 Questions and Help
|
- name: 🤔 Questions and Help
|
||||||
url: https://github.com/paperless-ngx/paperless-ngx/discussions
|
url: https://github.com/paperless-ngx/paperless-ngx/discussions
|
||||||
about: General questions or support for using Paperless-ngx.
|
about: This issue tracker is not for support questions. Please refer to our Discussions.
|
||||||
- name: 💬 Chat
|
- name: 💬 Chat
|
||||||
url: https://matrix.to/#/#paperlessngx:matrix.org
|
url: https://matrix.to/#/#paperlessngx:matrix.org
|
||||||
about: Want to discuss Paperless-ngx with others? Check out our chat.
|
about: Want to discuss Paperless-ngx with others? Check out our chat.
|
||||||
- name: 🚀 Feature Request
|
- name: 🚀 Feature Request
|
||||||
url: https://github.com/paperless-ngx/paperless-ngx/discussions/new?category=feature-requests
|
url: https://github.com/paperless-ngx/paperless-ngx/discussions/new?category=feature-requests
|
||||||
about: Remember to search for existing feature requests and "up-vote" those that you like.
|
about: Remember to search for existing feature requests and "up-vote" any you like
|
||||||
|
67
.github/dependabot.yml
vendored
67
.github/dependabot.yml
vendored
@ -1,15 +1,12 @@
|
|||||||
# Please see the documentation for all configuration options:
|
# https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates#package-ecosystem
|
||||||
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
|
||||||
|
|
||||||
version: 2
|
version: 2
|
||||||
# Required for uv support for now
|
|
||||||
enable-beta-ecosystems: true
|
|
||||||
updates:
|
updates:
|
||||||
|
|
||||||
# Enable version updates for pnpm
|
# Enable version updates for npm
|
||||||
- package-ecosystem: "npm"
|
- package-ecosystem: "npm"
|
||||||
target-branch: "dev"
|
target-branch: "dev"
|
||||||
# Look for `pnpm-lock.yaml` file in the `/src-ui` directory
|
# Look for `package.json` and `lock` files in the `/src-ui` directory
|
||||||
directory: "/src-ui"
|
directory: "/src-ui"
|
||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
schedule:
|
schedule:
|
||||||
@ -37,8 +34,9 @@ updates:
|
|||||||
- "eslint"
|
- "eslint"
|
||||||
|
|
||||||
# Enable version updates for Python
|
# Enable version updates for Python
|
||||||
- package-ecosystem: "uv"
|
- package-ecosystem: "pip"
|
||||||
target-branch: "dev"
|
target-branch: "dev"
|
||||||
|
# Look for a `Pipfile` in the `root` directory
|
||||||
directory: "/"
|
directory: "/"
|
||||||
# Check for updates once a week
|
# Check for updates once a week
|
||||||
schedule:
|
schedule:
|
||||||
@ -49,13 +47,15 @@ updates:
|
|||||||
# Add reviewers
|
# Add reviewers
|
||||||
reviewers:
|
reviewers:
|
||||||
- "paperless-ngx/backend"
|
- "paperless-ngx/backend"
|
||||||
|
ignore:
|
||||||
|
- dependency-name: "uvicorn"
|
||||||
groups:
|
groups:
|
||||||
development:
|
development:
|
||||||
patterns:
|
patterns:
|
||||||
- "*pytest*"
|
- "*pytest*"
|
||||||
|
- "black"
|
||||||
- "ruff"
|
- "ruff"
|
||||||
- "mkdocs-material"
|
- "mkdocs-material"
|
||||||
- "pre-commit*"
|
|
||||||
django:
|
django:
|
||||||
patterns:
|
patterns:
|
||||||
- "*django*"
|
- "*django*"
|
||||||
@ -66,10 +66,6 @@ updates:
|
|||||||
update-types:
|
update-types:
|
||||||
- "minor"
|
- "minor"
|
||||||
- "patch"
|
- "patch"
|
||||||
pre-built:
|
|
||||||
patterns:
|
|
||||||
- psycopg*
|
|
||||||
- zxing-cpp
|
|
||||||
|
|
||||||
# Enable updates for GitHub Actions
|
# Enable updates for GitHub Actions
|
||||||
- package-ecosystem: "github-actions"
|
- package-ecosystem: "github-actions"
|
||||||
@ -90,50 +86,3 @@ updates:
|
|||||||
- "major"
|
- "major"
|
||||||
- "minor"
|
- "minor"
|
||||||
- "patch"
|
- "patch"
|
||||||
|
|
||||||
# Update Dockerfile in root directory
|
|
||||||
- package-ecosystem: "docker"
|
|
||||||
directory: "/"
|
|
||||||
schedule:
|
|
||||||
interval: "weekly"
|
|
||||||
open-pull-requests-limit: 5
|
|
||||||
reviewers:
|
|
||||||
- "paperless-ngx/ci-cd"
|
|
||||||
labels:
|
|
||||||
- "ci-cd"
|
|
||||||
- "dependencies"
|
|
||||||
commit-message:
|
|
||||||
prefix: "docker"
|
|
||||||
include: "scope"
|
|
||||||
|
|
||||||
# Update Docker Compose files in docker/compose directory
|
|
||||||
- package-ecosystem: "docker-compose"
|
|
||||||
directory: "/docker/compose/"
|
|
||||||
schedule:
|
|
||||||
interval: "weekly"
|
|
||||||
open-pull-requests-limit: 5
|
|
||||||
reviewers:
|
|
||||||
- "paperless-ngx/ci-cd"
|
|
||||||
labels:
|
|
||||||
- "ci-cd"
|
|
||||||
- "dependencies"
|
|
||||||
commit-message:
|
|
||||||
prefix: "docker-compose"
|
|
||||||
include: "scope"
|
|
||||||
groups:
|
|
||||||
# Individual groups for each image
|
|
||||||
gotenberg:
|
|
||||||
patterns:
|
|
||||||
- "docker.io/gotenberg/gotenberg*"
|
|
||||||
tika:
|
|
||||||
patterns:
|
|
||||||
- "docker.io/apache/tika*"
|
|
||||||
redis:
|
|
||||||
patterns:
|
|
||||||
- "docker.io/library/redis*"
|
|
||||||
mariadb:
|
|
||||||
patterns:
|
|
||||||
- "docker.io/library/mariadb*"
|
|
||||||
postgres:
|
|
||||||
patterns:
|
|
||||||
- "docker.io/library/postgres*"
|
|
||||||
|
340
.github/workflows/ci.yml
vendored
340
.github/workflows/ci.yml
vendored
@ -14,9 +14,11 @@ on:
|
|||||||
- 'translations**'
|
- 'translations**'
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DEFAULT_UV_VERSION: "0.6.x"
|
# This is the version of pipenv all the steps will use
|
||||||
|
# If changing this, change Dockerfile
|
||||||
|
DEFAULT_PIP_ENV_VERSION: "2023.12.1"
|
||||||
# This is the default version of Python to use in most steps which aren't specific
|
# This is the default version of Python to use in most steps which aren't specific
|
||||||
DEFAULT_PYTHON_VERSION: "3.11"
|
DEFAULT_PYTHON_VERSION: "3.10"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-commit:
|
pre-commit:
|
||||||
@ -28,7 +30,7 @@ jobs:
|
|||||||
github.repository
|
github.repository
|
||||||
|
|
||||||
name: Linting Checks
|
name: Linting Checks
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout repository
|
name: Checkout repository
|
||||||
@ -44,7 +46,7 @@ jobs:
|
|||||||
|
|
||||||
documentation:
|
documentation:
|
||||||
name: "Build & Deploy Documentation"
|
name: "Build & Deploy Documentation"
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-22.04
|
||||||
needs:
|
needs:
|
||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
@ -57,25 +59,24 @@ jobs:
|
|||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
|
cache: "pipenv"
|
||||||
|
cache-dependency-path: 'Pipfile.lock'
|
||||||
-
|
-
|
||||||
name: Install uv
|
name: Install pipenv
|
||||||
uses: astral-sh/setup-uv@v5
|
|
||||||
with:
|
|
||||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
|
||||||
enable-cache: true
|
|
||||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
|
||||||
-
|
|
||||||
name: Install Python dependencies
|
|
||||||
run: |
|
run: |
|
||||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
pip install --user pipenv==${{ env.DEFAULT_PIP_ENV_VERSION }}
|
||||||
|
-
|
||||||
|
name: Install dependencies
|
||||||
|
run: |
|
||||||
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
||||||
|
-
|
||||||
|
name: List installed Python dependencies
|
||||||
|
run: |
|
||||||
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pip list
|
||||||
-
|
-
|
||||||
name: Make documentation
|
name: Make documentation
|
||||||
run: |
|
run: |
|
||||||
uv run \
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run mkdocs build --config-file ./mkdocs.yml
|
||||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
|
||||||
--dev \
|
|
||||||
--frozen \
|
|
||||||
mkdocs build --config-file ./mkdocs.yml
|
|
||||||
-
|
-
|
||||||
name: Deploy documentation
|
name: Deploy documentation
|
||||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||||
@ -83,11 +84,7 @@ jobs:
|
|||||||
echo "docs.paperless-ngx.com" > "${{ github.workspace }}/docs/CNAME"
|
echo "docs.paperless-ngx.com" > "${{ github.workspace }}/docs/CNAME"
|
||||||
git config --global user.name "${{ github.actor }}"
|
git config --global user.name "${{ github.actor }}"
|
||||||
git config --global user.email "${{ github.actor }}@users.noreply.github.com"
|
git config --global user.email "${{ github.actor }}@users.noreply.github.com"
|
||||||
uv run \
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run mkdocs gh-deploy --force --no-history
|
||||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
|
||||||
--dev \
|
|
||||||
--frozen \
|
|
||||||
mkdocs gh-deploy --force --no-history
|
|
||||||
-
|
-
|
||||||
name: Upload artifact
|
name: Upload artifact
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
@ -98,12 +95,12 @@ jobs:
|
|||||||
|
|
||||||
tests-backend:
|
tests-backend:
|
||||||
name: "Backend Tests (Python ${{ matrix.python-version }})"
|
name: "Backend Tests (Python ${{ matrix.python-version }})"
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-22.04
|
||||||
needs:
|
needs:
|
||||||
- pre-commit
|
- pre-commit
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ['3.10', '3.11', '3.12']
|
python-version: ['3.9', '3.10', '3.11']
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
@ -120,13 +117,12 @@ jobs:
|
|||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "${{ matrix.python-version }}"
|
python-version: "${{ matrix.python-version }}"
|
||||||
|
cache: "pipenv"
|
||||||
|
cache-dependency-path: 'Pipfile.lock'
|
||||||
-
|
-
|
||||||
name: Install uv
|
name: Install pipenv
|
||||||
uses: astral-sh/setup-uv@v5
|
run: |
|
||||||
with:
|
pip install --user pipenv==${{ env.DEFAULT_PIP_ENV_VERSION }}
|
||||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
|
||||||
enable-cache: true
|
|
||||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
|
||||||
-
|
-
|
||||||
name: Install system dependencies
|
name: Install system dependencies
|
||||||
run: |
|
run: |
|
||||||
@ -135,18 +131,16 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Configure ImageMagick
|
name: Configure ImageMagick
|
||||||
run: |
|
run: |
|
||||||
sudo cp docker/rootfs/etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml
|
sudo cp docker/imagemagick-policy.xml /etc/ImageMagick-6/policy.xml
|
||||||
-
|
-
|
||||||
name: Install Python dependencies
|
name: Install Python dependencies
|
||||||
run: |
|
run: |
|
||||||
uv sync \
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run python --version
|
||||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
||||||
--group testing \
|
|
||||||
--frozen
|
|
||||||
-
|
-
|
||||||
name: List installed Python dependencies
|
name: List installed Python dependencies
|
||||||
run: |
|
run: |
|
||||||
uv pip list
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pip list
|
||||||
-
|
-
|
||||||
name: Tests
|
name: Tests
|
||||||
env:
|
env:
|
||||||
@ -156,26 +150,17 @@ jobs:
|
|||||||
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
|
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
|
||||||
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
|
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
|
||||||
run: |
|
run: |
|
||||||
uv run \
|
cd src/
|
||||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pytest -ra
|
||||||
--dev \
|
|
||||||
--frozen \
|
|
||||||
pytest
|
|
||||||
-
|
-
|
||||||
name: Upload backend test results to Codecov
|
name: Upload coverage
|
||||||
if: always()
|
if: ${{ matrix.python-version == env.DEFAULT_PYTHON_VERSION }}
|
||||||
uses: codecov/test-results-action@v1
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
name: backend-coverage-report
|
||||||
flags: backend-python-${{ matrix.python-version }}
|
path: src/coverage.xml
|
||||||
files: junit.xml
|
retention-days: 7
|
||||||
-
|
if-no-files-found: warn
|
||||||
name: Upload backend coverage to Codecov
|
|
||||||
uses: codecov/codecov-action@v5
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
flags: backend-python-${{ matrix.python-version }}
|
|
||||||
files: coverage.xml
|
|
||||||
-
|
-
|
||||||
name: Stop containers
|
name: Stop containers
|
||||||
if: always()
|
if: always()
|
||||||
@ -183,46 +168,42 @@ jobs:
|
|||||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml logs
|
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml logs
|
||||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml down
|
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml down
|
||||||
|
|
||||||
install-frontend-dependencies:
|
install-frontend-depedendencies:
|
||||||
name: "Install Frontend Dependencies"
|
name: "Install Frontend Dependencies"
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-22.04
|
||||||
needs:
|
needs:
|
||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Install pnpm
|
|
||||||
uses: pnpm/action-setup@v4
|
|
||||||
with:
|
|
||||||
version: 10
|
|
||||||
-
|
-
|
||||||
name: Use Node.js 20
|
name: Use Node.js 20
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
cache: 'pnpm'
|
cache: 'npm'
|
||||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
cache-dependency-path: 'src-ui/package-lock.json'
|
||||||
- name: Cache frontend dependencies
|
- name: Cache frontend dependencies
|
||||||
id: cache-frontend-deps
|
id: cache-frontend-deps
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.pnpm-store
|
~/.npm
|
||||||
~/.cache
|
~/.cache
|
||||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
||||||
-
|
-
|
||||||
name: Install dependencies
|
name: Install dependencies
|
||||||
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
||||||
run: cd src-ui && pnpm install
|
run: cd src-ui && npm ci
|
||||||
-
|
-
|
||||||
name: Install Playwright
|
name: Install Playwright
|
||||||
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
||||||
run: cd src-ui && pnpm playwright install --with-deps
|
run: cd src-ui && npx playwright install --with-deps
|
||||||
|
|
||||||
tests-frontend:
|
tests-frontend:
|
||||||
name: "Frontend Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
name: "Frontend Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-22.04
|
||||||
needs:
|
needs:
|
||||||
- install-frontend-dependencies
|
- install-frontend-depedendencies
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
@ -231,92 +212,104 @@ jobs:
|
|||||||
shard-count: [4]
|
shard-count: [4]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Install pnpm
|
|
||||||
uses: pnpm/action-setup@v4
|
|
||||||
with:
|
|
||||||
version: 10
|
|
||||||
-
|
-
|
||||||
name: Use Node.js 20
|
name: Use Node.js 20
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
cache: 'pnpm'
|
cache: 'npm'
|
||||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
cache-dependency-path: 'src-ui/package-lock.json'
|
||||||
- name: Cache frontend dependencies
|
- name: Cache frontend dependencies
|
||||||
id: cache-frontend-deps
|
id: cache-frontend-deps
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.pnpm-store
|
~/.npm
|
||||||
~/.cache
|
|
||||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
|
||||||
- name: Re-link Angular cli
|
|
||||||
run: cd src-ui && pnpm link @angular/cli
|
|
||||||
-
|
|
||||||
name: Linting checks
|
|
||||||
run: cd src-ui && pnpm run lint
|
|
||||||
-
|
|
||||||
name: Run Jest unit tests
|
|
||||||
run: cd src-ui && pnpm run test --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
|
||||||
-
|
|
||||||
name: Run Playwright e2e tests
|
|
||||||
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
|
||||||
-
|
|
||||||
name: Upload frontend test results to Codecov
|
|
||||||
uses: codecov/test-results-action@v1
|
|
||||||
if: always()
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
flags: frontend-node-${{ matrix.node-version }}
|
|
||||||
directory: src-ui/
|
|
||||||
-
|
|
||||||
name: Upload frontend coverage to Codecov
|
|
||||||
uses: codecov/codecov-action@v5
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
flags: frontend-node-${{ matrix.node-version }}
|
|
||||||
directory: src-ui/coverage/
|
|
||||||
|
|
||||||
frontend-bundle-analysis:
|
|
||||||
name: "Frontend Bundle Analysis"
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
needs:
|
|
||||||
- tests-frontend
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
-
|
|
||||||
name: Install pnpm
|
|
||||||
uses: pnpm/action-setup@v4
|
|
||||||
with:
|
|
||||||
version: 10
|
|
||||||
-
|
|
||||||
name: Use Node.js 20
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 20.x
|
|
||||||
cache: 'pnpm'
|
|
||||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
|
||||||
-
|
|
||||||
name: Cache frontend dependencies
|
|
||||||
id: cache-frontend-deps
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
~/.pnpm-store
|
|
||||||
~/.cache
|
~/.cache
|
||||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
||||||
|
- name: Re-link Angular cli
|
||||||
|
run: cd src-ui && npm link @angular/cli
|
||||||
-
|
-
|
||||||
name: Re-link Angular cli
|
name: Linting checks
|
||||||
run: cd src-ui && pnpm link @angular/cli
|
run: cd src-ui && npm run lint
|
||||||
-
|
-
|
||||||
name: Build frontend and upload analysis
|
name: Run Jest unit tests
|
||||||
env:
|
run: cd src-ui && npm run test -- --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
-
|
||||||
run: cd src-ui && pnpm run build --configuration=production
|
name: Upload Jest coverage
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: jest-coverage-report-${{ matrix.shard-index }}
|
||||||
|
path: |
|
||||||
|
src-ui/coverage/coverage-final.json
|
||||||
|
src-ui/coverage/lcov.info
|
||||||
|
src-ui/coverage/clover.xml
|
||||||
|
retention-days: 7
|
||||||
|
if-no-files-found: warn
|
||||||
|
-
|
||||||
|
name: Run Playwright e2e tests
|
||||||
|
run: cd src-ui && npx playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||||
|
-
|
||||||
|
name: Upload Playwright test results
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: playwright-report-${{ matrix.shard-index }}
|
||||||
|
path: src-ui/playwright-report
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
|
tests-coverage-upload:
|
||||||
|
name: "Upload Coverage"
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs:
|
||||||
|
- tests-backend
|
||||||
|
- tests-frontend
|
||||||
|
steps:
|
||||||
|
-
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
-
|
||||||
|
name: Download frontend jest coverage
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: src-ui/coverage/
|
||||||
|
pattern: jest-coverage-report-*
|
||||||
|
-
|
||||||
|
name: Download frontend playwright coverage
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: src-ui/coverage/
|
||||||
|
pattern: playwright-report-*
|
||||||
|
merge-multiple: true
|
||||||
|
-
|
||||||
|
name: Upload frontend coverage to Codecov
|
||||||
|
uses: codecov/codecov-action@v4
|
||||||
|
with:
|
||||||
|
# not required for public repos, but intermittently fails otherwise
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
flags: frontend
|
||||||
|
directory: src-ui/coverage/
|
||||||
|
# dont include backend coverage files here
|
||||||
|
files: '!coverage.xml'
|
||||||
|
-
|
||||||
|
name: Download backend coverage
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: backend-coverage-report
|
||||||
|
path: src/
|
||||||
|
-
|
||||||
|
name: Upload coverage to Codecov
|
||||||
|
uses: codecov/codecov-action@v4
|
||||||
|
with:
|
||||||
|
# not required for public repos, but intermittently fails otherwise
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
# future expansion
|
||||||
|
flags: backend
|
||||||
|
directory: src/
|
||||||
|
|
||||||
build-docker-image:
|
build-docker-image:
|
||||||
name: Build Docker image for ${{ github.ref_name }}
|
name: Build Docker image for ${{ github.ref_name }}
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-22.04
|
||||||
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || startsWith(github.ref, 'refs/heads/fix-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v'))
|
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || startsWith(github.ref, 'refs/heads/fix-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v'))
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
|
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
|
||||||
@ -389,7 +382,7 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Login to Docker Hub
|
name: Login to Docker Hub
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
# Don't attempt to login if not pushing to Docker Hub
|
# Don't attempt to login is not pushing to Docker Hub
|
||||||
if: steps.push-other-places.outputs.enable == 'true'
|
if: steps.push-other-places.outputs.enable == 'true'
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
@ -397,7 +390,7 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Login to Quay.io
|
name: Login to Quay.io
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
# Don't attempt to login if not pushing to Quay.io
|
# Don't attempt to login is not pushing to Quay.io
|
||||||
if: steps.push-other-places.outputs.enable == 'true'
|
if: steps.push-other-places.outputs.enable == 'true'
|
||||||
with:
|
with:
|
||||||
registry: quay.io
|
registry: quay.io
|
||||||
@ -405,7 +398,7 @@ jobs:
|
|||||||
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
||||||
-
|
-
|
||||||
name: Build and push
|
name: Build and push
|
||||||
uses: docker/build-push-action@v6
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
@ -413,8 +406,6 @@ jobs:
|
|||||||
push: ${{ github.event_name != 'pull_request' }}
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
tags: ${{ steps.docker-meta.outputs.tags }}
|
tags: ${{ steps.docker-meta.outputs.tags }}
|
||||||
labels: ${{ steps.docker-meta.outputs.labels }}
|
labels: ${{ steps.docker-meta.outputs.labels }}
|
||||||
build-args: |
|
|
||||||
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
|
|
||||||
# Get cache layers from this branch, then dev
|
# Get cache layers from this branch, then dev
|
||||||
# This allows new branches to get at least some cache benefits, generally from dev
|
# This allows new branches to get at least some cache benefits, generally from dev
|
||||||
cache-from: |
|
cache-from: |
|
||||||
@ -444,7 +435,7 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- build-docker-image
|
- build-docker-image
|
||||||
- documentation
|
- documentation
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
@ -455,17 +446,22 @@ jobs:
|
|||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
|
cache: "pipenv"
|
||||||
|
cache-dependency-path: 'Pipfile.lock'
|
||||||
-
|
-
|
||||||
name: Install uv
|
name: Install pipenv + tools
|
||||||
uses: astral-sh/setup-uv@v5
|
run: |
|
||||||
with:
|
pip install --upgrade --user pipenv==${{ env.DEFAULT_PIP_ENV_VERSION }} setuptools wheel
|
||||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
|
||||||
enable-cache: true
|
|
||||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
|
||||||
-
|
-
|
||||||
name: Install Python dependencies
|
name: Install Python dependencies
|
||||||
run: |
|
run: |
|
||||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
||||||
|
-
|
||||||
|
name: Patch whitenoise
|
||||||
|
run: |
|
||||||
|
curl --fail --silent --show-error --location --output 484.patch https://github.com/evansd/whitenoise/pull/484.patch
|
||||||
|
patch -d $(pipenv --venv)/lib/python3.10/site-packages --verbose -p2 < 484.patch
|
||||||
|
rm 484.patch
|
||||||
-
|
-
|
||||||
name: Install system dependencies
|
name: Install system dependencies
|
||||||
run: |
|
run: |
|
||||||
@ -486,21 +482,17 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Generate requirements file
|
name: Generate requirements file
|
||||||
run: |
|
run: |
|
||||||
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} requirements > requirements.txt
|
||||||
-
|
-
|
||||||
name: Compile messages
|
name: Compile messages
|
||||||
run: |
|
run: |
|
||||||
cd src/
|
cd src/
|
||||||
uv run \
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run python3 manage.py compilemessages
|
||||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
|
||||||
manage.py compilemessages
|
|
||||||
-
|
-
|
||||||
name: Collect static files
|
name: Collect static files
|
||||||
run: |
|
run: |
|
||||||
cd src/
|
cd src/
|
||||||
uv run \
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run python3 manage.py collectstatic --no-input
|
||||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
|
||||||
manage.py collectstatic --no-input
|
|
||||||
-
|
-
|
||||||
name: Move files
|
name: Move files
|
||||||
run: |
|
run: |
|
||||||
@ -516,12 +508,13 @@ jobs:
|
|||||||
for file_name in .dockerignore \
|
for file_name in .dockerignore \
|
||||||
.env \
|
.env \
|
||||||
Dockerfile \
|
Dockerfile \
|
||||||
pyproject.toml \
|
Pipfile \
|
||||||
uv.lock \
|
Pipfile.lock \
|
||||||
requirements.txt \
|
requirements.txt \
|
||||||
LICENSE \
|
LICENSE \
|
||||||
README.md \
|
README.md \
|
||||||
paperless.conf.example
|
paperless.conf.example \
|
||||||
|
gunicorn.conf.py
|
||||||
do
|
do
|
||||||
cp --verbose ${file_name} dist/paperless-ngx/
|
cp --verbose ${file_name} dist/paperless-ngx/
|
||||||
done
|
done
|
||||||
@ -556,7 +549,7 @@ jobs:
|
|||||||
|
|
||||||
publish-release:
|
publish-release:
|
||||||
name: "Publish Release"
|
name: "Publish Release"
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-22.04
|
||||||
outputs:
|
outputs:
|
||||||
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
||||||
changelog: ${{ steps.create-release.outputs.body }}
|
changelog: ${{ steps.create-release.outputs.body }}
|
||||||
@ -606,7 +599,7 @@ jobs:
|
|||||||
|
|
||||||
append-changelog:
|
append-changelog:
|
||||||
name: "Append Changelog"
|
name: "Append Changelog"
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-22.04
|
||||||
needs:
|
needs:
|
||||||
- publish-release
|
- publish-release
|
||||||
if: needs.publish-release.outputs.prerelease == 'false'
|
if: needs.publish-release.outputs.prerelease == 'false'
|
||||||
@ -618,17 +611,15 @@ jobs:
|
|||||||
ref: main
|
ref: main
|
||||||
-
|
-
|
||||||
name: Set up Python
|
name: Set up Python
|
||||||
id: setup-python
|
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
|
cache: "pipenv"
|
||||||
|
cache-dependency-path: 'Pipfile.lock'
|
||||||
-
|
-
|
||||||
name: Install uv
|
name: Install pipenv + tools
|
||||||
uses: astral-sh/setup-uv@v5
|
run: |
|
||||||
with:
|
pip install --upgrade --user pipenv==${{ env.DEFAULT_PIP_ENV_VERSION }} setuptools wheel
|
||||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
|
||||||
enable-cache: true
|
|
||||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
|
||||||
-
|
-
|
||||||
name: Append Changelog to docs
|
name: Append Changelog to docs
|
||||||
id: append-Changelog
|
id: append-Changelog
|
||||||
@ -638,16 +629,11 @@ jobs:
|
|||||||
git checkout ${{ needs.publish-release.outputs.version }}-changelog
|
git checkout ${{ needs.publish-release.outputs.version }}-changelog
|
||||||
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
|
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
|
||||||
echo "Manually linking usernames"
|
echo "Manually linking usernames"
|
||||||
sed -i -r 's|@([a-zA-Z0-9_]+) \(\[#|[@\1](https://github.com/\1) ([#|g' changelog-new.md
|
sed -i -r 's|@(.+?) \(\[#|[@\1](https://github.com/\1) ([#|ig' changelog-new.md
|
||||||
echo "Removing unneeded comment tags"
|
|
||||||
sed -i -r 's|@<!---->|@|g' changelog-new.md
|
|
||||||
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
|
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
|
||||||
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
|
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
|
||||||
mv changelog-new.md changelog.md
|
mv changelog-new.md changelog.md
|
||||||
uv run \
|
pipenv run pre-commit run --files changelog.md || true
|
||||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
|
||||||
--dev \
|
|
||||||
pre-commit run --files changelog.md || true
|
|
||||||
git config --global user.name "github-actions"
|
git config --global user.name "github-actions"
|
||||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||||
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
|
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
|
||||||
|
8
.github/workflows/cleanup-tags.yml
vendored
8
.github/workflows/cleanup-tags.yml
vendored
@ -21,7 +21,7 @@ jobs:
|
|||||||
cleanup-images:
|
cleanup-images:
|
||||||
name: Cleanup Image Tags for ${{ matrix.primary-name }}
|
name: Cleanup Image Tags for ${{ matrix.primary-name }}
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
if: github.repository_owner == 'paperless-ngx'
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-22.04
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
@ -33,7 +33,7 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Clean temporary images
|
name: Clean temporary images
|
||||||
if: "${{ env.TOKEN != '' }}"
|
if: "${{ env.TOKEN != '' }}"
|
||||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.10.0
|
uses: stumpylog/image-cleaner-action/ephemeral@v0.5.0
|
||||||
with:
|
with:
|
||||||
token: "${{ env.TOKEN }}"
|
token: "${{ env.TOKEN }}"
|
||||||
owner: "${{ github.repository_owner }}"
|
owner: "${{ github.repository_owner }}"
|
||||||
@ -47,7 +47,7 @@ jobs:
|
|||||||
cleanup-untagged-images:
|
cleanup-untagged-images:
|
||||||
name: Cleanup Untagged Images Tags for ${{ matrix.primary-name }}
|
name: Cleanup Untagged Images Tags for ${{ matrix.primary-name }}
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
if: github.repository_owner == 'paperless-ngx'
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-22.04
|
||||||
needs:
|
needs:
|
||||||
- cleanup-images
|
- cleanup-images
|
||||||
strategy:
|
strategy:
|
||||||
@ -61,7 +61,7 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Clean untagged images
|
name: Clean untagged images
|
||||||
if: "${{ env.TOKEN != '' }}"
|
if: "${{ env.TOKEN != '' }}"
|
||||||
uses: stumpylog/image-cleaner-action/untagged@v0.10.0
|
uses: stumpylog/image-cleaner-action/untagged@v0.5.0
|
||||||
with:
|
with:
|
||||||
token: "${{ env.TOKEN }}"
|
token: "${{ env.TOKEN }}"
|
||||||
owner: "${{ github.repository_owner }}"
|
owner: "${{ github.repository_owner }}"
|
||||||
|
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@ -23,7 +23,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
analyze:
|
analyze:
|
||||||
name: Analyze
|
name: Analyze
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-22.04
|
||||||
permissions:
|
permissions:
|
||||||
actions: read
|
actions: read
|
||||||
contents: read
|
contents: read
|
||||||
|
5
.github/workflows/crowdin.yml
vendored
5
.github/workflows/crowdin.yml
vendored
@ -15,14 +15,13 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
synchronize-with-crowdin:
|
synchronize-with-crowdin:
|
||||||
name: Crowdin Sync
|
name: Crowdin Sync
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
runs-on: ubuntu-latest
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
- name: crowdin action
|
- name: crowdin action
|
||||||
uses: crowdin/github-action@v2
|
uses: crowdin/github-action@v1
|
||||||
with:
|
with:
|
||||||
upload_translations: false
|
upload_translations: false
|
||||||
download_translations: true
|
download_translations: true
|
||||||
|
2
.github/workflows/project-actions.yml
vendored
2
.github/workflows/project-actions.yml
vendored
@ -15,7 +15,7 @@ permissions:
|
|||||||
jobs:
|
jobs:
|
||||||
pr_opened_or_reopened:
|
pr_opened_or_reopened:
|
||||||
name: pr_opened_or_reopened
|
name: pr_opened_or_reopened
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-22.04
|
||||||
permissions:
|
permissions:
|
||||||
# write permission is required for autolabeler
|
# write permission is required for autolabeler
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
33
.github/workflows/repo-maintenance.yml
vendored
33
.github/workflows/repo-maintenance.yml
vendored
@ -16,14 +16,13 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
name: 'Stale'
|
name: 'Stale'
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
runs-on: ubuntu-latest
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v9
|
- uses: actions/stale@v9
|
||||||
with:
|
with:
|
||||||
days-before-stale: 7
|
days-before-stale: 7
|
||||||
days-before-close: 14
|
days-before-close: 14
|
||||||
any-of-labels: 'stale,cant-reproduce,not a bug'
|
any-of-labels: 'cant-reproduce,not a bug'
|
||||||
stale-issue-label: stale
|
stale-issue-label: stale
|
||||||
stale-pr-label: stale
|
stale-pr-label: stale
|
||||||
stale-issue-message: >
|
stale-issue-message: >
|
||||||
@ -32,8 +31,7 @@ jobs:
|
|||||||
for your contributions. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
for your contributions. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||||
lock-threads:
|
lock-threads:
|
||||||
name: 'Lock Old Threads'
|
name: 'Lock Old Threads'
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
runs-on: ubuntu-latest
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
steps:
|
steps:
|
||||||
- uses: dessant/lock-threads@v5
|
- uses: dessant/lock-threads@v5
|
||||||
with:
|
with:
|
||||||
@ -58,8 +56,7 @@ jobs:
|
|||||||
See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||||
close-answered-discussions:
|
close-answered-discussions:
|
||||||
name: 'Close Answered Discussions'
|
name: 'Close Answered Discussions'
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
runs-on: ubuntu-latest
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/github-script@v7
|
- uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
@ -115,8 +112,7 @@ jobs:
|
|||||||
}
|
}
|
||||||
close-outdated-discussions:
|
close-outdated-discussions:
|
||||||
name: 'Close Outdated Discussions'
|
name: 'Close Outdated Discussions'
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
runs-on: ubuntu-latest
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/github-script@v7
|
- uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
@ -207,8 +203,7 @@ jobs:
|
|||||||
}
|
}
|
||||||
close-unsupported-feature-requests:
|
close-unsupported-feature-requests:
|
||||||
name: 'Close Unsupported Feature Requests'
|
name: 'Close Unsupported Feature Requests'
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
runs-on: ubuntu-latest
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/github-script@v7
|
- uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
@ -217,20 +212,15 @@ jobs:
|
|||||||
return new Promise(resolve => setTimeout(resolve, ms));
|
return new Promise(resolve => setTimeout(resolve, ms));
|
||||||
}
|
}
|
||||||
|
|
||||||
const CUTOFF_MAX_COUNT = 80;
|
|
||||||
const CUTOFF_1_DAYS = 180;
|
const CUTOFF_1_DAYS = 180;
|
||||||
const CUTOFF_1_COUNT = 5;
|
const CUTOFF_1_COUNT = 5;
|
||||||
const CUTOFF_2_DAYS = 365;
|
const CUTOFF_2_DAYS = 365;
|
||||||
const CUTOFF_2_COUNT = 20;
|
const CUTOFF_2_COUNT = 10;
|
||||||
const CUTOFF_3_DAYS = 730;
|
|
||||||
const CUTOFF_3_COUNT = 40;
|
|
||||||
|
|
||||||
const cutoff1Date = new Date();
|
const cutoff1Date = new Date();
|
||||||
cutoff1Date.setDate(cutoff1Date.getDate() - CUTOFF_1_DAYS);
|
cutoff1Date.setDate(cutoff1Date.getDate() - CUTOFF_1_DAYS);
|
||||||
const cutoff2Date = new Date();
|
const cutoff2Date = new Date();
|
||||||
cutoff2Date.setDate(cutoff2Date.getDate() - CUTOFF_2_DAYS);
|
cutoff2Date.setDate(cutoff2Date.getDate() - CUTOFF_2_DAYS);
|
||||||
const cutoff3Date = new Date();
|
|
||||||
cutoff3Date.setDate(cutoff3Date.getDate() - CUTOFF_3_DAYS);
|
|
||||||
|
|
||||||
const query = `query(
|
const query = `query(
|
||||||
$owner:String!,
|
$owner:String!,
|
||||||
@ -260,12 +250,9 @@ jobs:
|
|||||||
const result = await github.graphql(query, variables);
|
const result = await github.graphql(query, variables);
|
||||||
|
|
||||||
for (const discussion of result.repository.discussions.nodes) {
|
for (const discussion of result.repository.discussions.nodes) {
|
||||||
const discussionUpdatedDate = new Date(discussion.updatedAt);
|
const discussionDate = new Date(discussion.updatedAt);
|
||||||
const discussionCreatedDate = new Date(discussion.createdAt);
|
if ((discussionDate < cutoff1Date && discussion.upvoteCount < CUTOFF_1_COUNT) ||
|
||||||
if ((discussionUpdatedDate < cutoff1Date && discussion.upvoteCount < CUTOFF_MAX_COUNT) ||
|
(discussionDate < cutoff2Date && discussion.upvoteCount < CUTOFF_2_COUNT)) {
|
||||||
(discussionCreatedDate < cutoff1Date && discussion.upvoteCount < CUTOFF_1_COUNT) ||
|
|
||||||
(discussionCreatedDate < cutoff2Date && discussion.upvoteCount < CUTOFF_2_COUNT) ||
|
|
||||||
(discussionCreatedDate < cutoff3Date && discussion.upvoteCount < CUTOFF_3_COUNT)) {
|
|
||||||
console.log(`Closing discussion #${discussion.number} (${discussion.id}), last updated at ${discussion.updatedAt} with votes ${discussion.upvoteCount}`);
|
console.log(`Closing discussion #${discussion.number} (${discussion.id}), last updated at ${discussion.updatedAt} with votes ${discussion.upvoteCount}`);
|
||||||
const addCommentMutation = `mutation($discussion:ID!, $body:String!) {
|
const addCommentMutation = `mutation($discussion:ID!, $body:String!) {
|
||||||
addDiscussionComment(input:{discussionId:$discussion, body:$body}) {
|
addDiscussionComment(input:{discussionId:$discussion, body:$body}) {
|
||||||
|
10
.gitignore
vendored
10
.gitignore
vendored
@ -22,7 +22,6 @@ var/
|
|||||||
*.egg-info/
|
*.egg-info/
|
||||||
.installed.cfg
|
.installed.cfg
|
||||||
*.egg
|
*.egg
|
||||||
/src/paperless_mail/templates/node_modules
|
|
||||||
|
|
||||||
# PyInstaller
|
# PyInstaller
|
||||||
# Usually these files are written by a python script from a template
|
# Usually these files are written by a python script from a template
|
||||||
@ -44,7 +43,6 @@ nosetests.xml
|
|||||||
coverage.xml
|
coverage.xml
|
||||||
*,cover
|
*,cover
|
||||||
.pytest_cache
|
.pytest_cache
|
||||||
junit.xml
|
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
@ -67,8 +65,6 @@ target/
|
|||||||
.vscode
|
.vscode
|
||||||
/src-ui/.vscode
|
/src-ui/.vscode
|
||||||
/docs/.vscode
|
/docs/.vscode
|
||||||
.vscode-server
|
|
||||||
*CommandMarker
|
|
||||||
|
|
||||||
# Other stuff that doesn't belong
|
# Other stuff that doesn't belong
|
||||||
.virtualenv
|
.virtualenv
|
||||||
@ -101,9 +97,3 @@ scripts/nuke
|
|||||||
|
|
||||||
# celery schedule file
|
# celery schedule file
|
||||||
celerybeat-schedule*
|
celerybeat-schedule*
|
||||||
|
|
||||||
# ignore .devcontainer sub folders
|
|
||||||
/.devcontainer/consume/
|
|
||||||
/.devcontainer/data/
|
|
||||||
/.devcontainer/media/
|
|
||||||
/.devcontainer/redisdata/
|
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
repos:
|
repos:
|
||||||
# General hooks
|
# General hooks
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v5.0.0
|
rev: v4.5.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-docstring-first
|
- id: check-docstring-first
|
||||||
- id: check-json
|
- id: check-json
|
||||||
@ -29,35 +29,31 @@ repos:
|
|||||||
- id: check-case-conflict
|
- id: check-case-conflict
|
||||||
- id: detect-private-key
|
- id: detect-private-key
|
||||||
- repo: https://github.com/codespell-project/codespell
|
- repo: https://github.com/codespell-project/codespell
|
||||||
rev: v2.4.0
|
rev: v2.2.6
|
||||||
hooks:
|
hooks:
|
||||||
- id: codespell
|
- id: codespell
|
||||||
exclude: "(^src-ui/src/locale/)|(^src-ui/pnpm-lock.yaml)|(^src-ui/e2e/)|(^src/paperless_mail/tests/samples/)"
|
exclude: "(^src-ui/src/locale/)|(^src-ui/e2e/)|(^src/paperless_mail/tests/samples/)"
|
||||||
exclude_types:
|
exclude_types:
|
||||||
- pofile
|
- pofile
|
||||||
- json
|
- json
|
||||||
# See https://github.com/prettier/prettier/issues/15742 for the fork reason
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
- repo: https://github.com/rbubley/mirrors-prettier
|
rev: 'v3.1.0'
|
||||||
rev: 'v3.3.3'
|
|
||||||
hooks:
|
hooks:
|
||||||
- id: prettier
|
- id: prettier
|
||||||
types_or:
|
types_or:
|
||||||
- javascript
|
- javascript
|
||||||
- ts
|
- ts
|
||||||
- markdown
|
- markdown
|
||||||
additional_dependencies:
|
exclude: "(^Pipfile\\.lock$)"
|
||||||
- prettier@3.3.3
|
|
||||||
- 'prettier-plugin-organize-imports@4.1.0'
|
|
||||||
# Python hooks
|
# Python hooks
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.9.9
|
rev: 'v0.3.5'
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
- id: ruff-format
|
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||||
- repo: https://github.com/tox-dev/pyproject-fmt
|
rev: 24.3.0
|
||||||
rev: "v2.5.1"
|
|
||||||
hooks:
|
hooks:
|
||||||
- id: pyproject-fmt
|
- id: black
|
||||||
# Dockerfile hooks
|
# Dockerfile hooks
|
||||||
- repo: https://github.com/AleksaC/hadolint-py
|
- repo: https://github.com/AleksaC/hadolint-py
|
||||||
rev: v2.12.0.3
|
rev: v2.12.0.3
|
||||||
@ -68,8 +64,6 @@ repos:
|
|||||||
rev: v6.2.1
|
rev: v6.2.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: beautysh
|
- id: beautysh
|
||||||
additional_dependencies:
|
|
||||||
- setuptools
|
|
||||||
args:
|
args:
|
||||||
- "--tab"
|
- "--tab"
|
||||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||||
|
16
.prettierrc
Normal file
16
.prettierrc
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
# https://prettier.io/docs/en/options.html#semicolons
|
||||||
|
"semi": false,
|
||||||
|
# https://prettier.io/docs/en/options.html#quotes
|
||||||
|
"singleQuote": true,
|
||||||
|
# https://prettier.io/docs/en/options.html#trailing-commas
|
||||||
|
"trailingComma": "es5",
|
||||||
|
"overrides": [
|
||||||
|
{
|
||||||
|
"files": ["index.md", "administration.md"],
|
||||||
|
"options": {
|
||||||
|
"tabWidth": 4
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@ -1,19 +0,0 @@
|
|||||||
const config = {
|
|
||||||
// https://prettier.io/docs/en/options.html#semicolons
|
|
||||||
semi: false,
|
|
||||||
// https://prettier.io/docs/en/options.html#quotes
|
|
||||||
singleQuote: true,
|
|
||||||
// https://prettier.io/docs/en/options.html#trailing-commas
|
|
||||||
trailingComma: 'es5',
|
|
||||||
overrides: [
|
|
||||||
{
|
|
||||||
files: ['docs/*.md'],
|
|
||||||
options: {
|
|
||||||
tabWidth: 4,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
plugins: [require('prettier-plugin-organize-imports')],
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = config
|
|
1
.python-version
Normal file
1
.python-version
Normal file
@ -0,0 +1 @@
|
|||||||
|
3.9.18
|
47
.ruff.toml
Normal file
47
.ruff.toml
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
fix = true
|
||||||
|
line-length = 88
|
||||||
|
respect-gitignore = true
|
||||||
|
src = ["src"]
|
||||||
|
target-version = "py39"
|
||||||
|
output-format = "grouped"
|
||||||
|
show-fixes = true
|
||||||
|
|
||||||
|
# https://docs.astral.sh/ruff/settings/
|
||||||
|
# https://docs.astral.sh/ruff/rules/
|
||||||
|
[lint]
|
||||||
|
extend-select = [
|
||||||
|
"W", # https://docs.astral.sh/ruff/rules/#pycodestyle-e-w
|
||||||
|
"I", # https://docs.astral.sh/ruff/rules/#isort-i
|
||||||
|
"UP", # https://docs.astral.sh/ruff/rules/#pyupgrade-up
|
||||||
|
"COM", # https://docs.astral.sh/ruff/rules/#flake8-commas-com
|
||||||
|
"DJ", # https://docs.astral.sh/ruff/rules/#flake8-django-dj
|
||||||
|
"EXE", # https://docs.astral.sh/ruff/rules/#flake8-executable-exe
|
||||||
|
"ISC", # https://docs.astral.sh/ruff/rules/#flake8-implicit-str-concat-isc
|
||||||
|
"ICN", # https://docs.astral.sh/ruff/rules/#flake8-import-conventions-icn
|
||||||
|
"G201", # https://docs.astral.sh/ruff/rules/#flake8-logging-format-g
|
||||||
|
"INP", # https://docs.astral.sh/ruff/rules/#flake8-no-pep420-inp
|
||||||
|
"PIE", # https://docs.astral.sh/ruff/rules/#flake8-pie-pie
|
||||||
|
"Q", # https://docs.astral.sh/ruff/rules/#flake8-quotes-q
|
||||||
|
"RSE", # https://docs.astral.sh/ruff/rules/#flake8-raise-rse
|
||||||
|
"T20", # https://docs.astral.sh/ruff/rules/#flake8-print-t20
|
||||||
|
"SIM", # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim
|
||||||
|
"TID", # https://docs.astral.sh/ruff/rules/#flake8-tidy-imports-tid
|
||||||
|
"TCH", # https://docs.astral.sh/ruff/rules/#flake8-type-checking-tch
|
||||||
|
"PLC", # https://docs.astral.sh/ruff/rules/#pylint-pl
|
||||||
|
"PLE", # https://docs.astral.sh/ruff/rules/#pylint-pl
|
||||||
|
"RUF", # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf
|
||||||
|
"FLY", # https://docs.astral.sh/ruff/rules/#flynt-fly
|
||||||
|
]
|
||||||
|
# TODO PTH https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth
|
||||||
|
ignore = ["DJ001", "SIM105", "RUF012"]
|
||||||
|
|
||||||
|
[lint.per-file-ignores]
|
||||||
|
".github/scripts/*.py" = ["E501", "INP001", "SIM117"]
|
||||||
|
"docker/wait-for-redis.py" = ["INP001", "T201"]
|
||||||
|
"*/tests/*.py" = ["E501", "SIM117"]
|
||||||
|
"*/migrations/*.py" = ["E501", "SIM", "T201"]
|
||||||
|
"src/paperless_tesseract/tests/test_parser.py" = ["RUF001"]
|
||||||
|
"src/documents/models.py" = ["SIM115"]
|
||||||
|
|
||||||
|
[lint.isort]
|
||||||
|
force-single-line = true
|
@ -5,6 +5,5 @@
|
|||||||
/src-ui/ @paperless-ngx/frontend
|
/src-ui/ @paperless-ngx/frontend
|
||||||
|
|
||||||
/src/ @paperless-ngx/backend
|
/src/ @paperless-ngx/backend
|
||||||
pyproject.toml @paperless-ngx/backend
|
Pipfile* @paperless-ngx/backend
|
||||||
uv.lock @paperless-ngx/backend
|
|
||||||
*.py @paperless-ngx/backend
|
*.py @paperless-ngx/backend
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
We as members, contributors, and leaders pledge to make participation in our
|
We as members, contributors, and leaders pledge to make participation in our
|
||||||
community a harassment-free experience for everyone, regardless of age, body
|
community a harassment-free experience for everyone, regardless of age, body
|
||||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||||
identity and expression, level of experience, education, socioeconomic status,
|
identity and expression, level of experience, education, socio-economic status,
|
||||||
nationality, personal appearance, race, religion, or sexual identity
|
nationality, personal appearance, race, religion, or sexual identity
|
||||||
and orientation.
|
and orientation.
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ If you want to implement something big:
|
|||||||
|
|
||||||
## Python
|
## Python
|
||||||
|
|
||||||
Paperless supports python 3.10 - 3.12 at this time. We format Python code with [ruff](https://docs.astral.sh/ruff/formatter/).
|
Paperless supports python 3.9 - 3.11. We format Python code with [Black](https://github.com/psf/black).
|
||||||
|
|
||||||
## Branches
|
## Branches
|
||||||
|
|
||||||
@ -81,7 +81,7 @@ Some notes about translation:
|
|||||||
|
|
||||||
If a language has already been added, and you would like to contribute new translations or change existing translations, please read the "Translation" section in the README.md file for further details on that.
|
If a language has already been added, and you would like to contribute new translations or change existing translations, please read the "Translation" section in the README.md file for further details on that.
|
||||||
|
|
||||||
If you would like the project to be translated to another language, first head over to https://crowdin.com/project/paperless-ngx to check if that language has already been enabled for translation.
|
If you would like the project to be translated to another language, first head over to https://crwd.in/paperless-ngx to check if that language has already been enabled for translation.
|
||||||
If not, please request the language to be added by creating an issue on GitHub. The issue should contain:
|
If not, please request the language to be added by creating an issue on GitHub. The issue should contain:
|
||||||
|
|
||||||
- English name of the language (the localized name can be added on Crowdin).
|
- English name of the language (the localized name can be added on Crowdin).
|
||||||
@ -147,7 +147,7 @@ community members. That said, in an effort to keep the repository organized and
|
|||||||
- Issues, pull requests and discussions that are closed will be locked after 30 days of inactivity.
|
- Issues, pull requests and discussions that are closed will be locked after 30 days of inactivity.
|
||||||
- Discussions with a marked answer will be automatically closed.
|
- Discussions with a marked answer will be automatically closed.
|
||||||
- Discussions in the 'General' or 'Support' categories will be closed after 180 days of inactivity.
|
- Discussions in the 'General' or 'Support' categories will be closed after 180 days of inactivity.
|
||||||
- Feature requests that do not meet the following thresholds will be closed: 180 days of inactivity, < 5 "up-votes" after 180 days, < 20 "up-votes" after 1 year or < 80 "up-votes" at 2 years.
|
- Feature requests that do not meet the following thresholds will be closed: 5 "up-votes" after 180 days of inactivity or 10 "up-votes" after 365 days.
|
||||||
|
|
||||||
In all cases, threads can be re-opened by project maintainers and, of course, users can always create a new discussion for related concerns.
|
In all cases, threads can be re-opened by project maintainers and, of course, users can always create a new discussion for related concerns.
|
||||||
Finally, remember that all information remains searchable and 'closed' feature requests can still serve as inspiration for new features.
|
Finally, remember that all information remains searchable and 'closed' feature requests can still serve as inspiration for new features.
|
||||||
|
207
Dockerfile
207
Dockerfile
@ -4,90 +4,40 @@
|
|||||||
# Stage: compile-frontend
|
# Stage: compile-frontend
|
||||||
# Purpose: Compiles the frontend
|
# Purpose: Compiles the frontend
|
||||||
# Notes:
|
# Notes:
|
||||||
# - Does PNPM stuff with Typescript and such
|
# - Does NPM stuff with Typescript and such
|
||||||
FROM --platform=$BUILDPLATFORM docker.io/node:20-bookworm-slim AS compile-frontend
|
FROM --platform=$BUILDPLATFORM docker.io/node:20-bookworm-slim AS compile-frontend
|
||||||
|
|
||||||
COPY ./src-ui /src/src-ui
|
COPY ./src-ui /src/src-ui
|
||||||
|
|
||||||
WORKDIR /src/src-ui
|
WORKDIR /src/src-ui
|
||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
&& npm update -g pnpm \
|
&& npm update npm -g \
|
||||||
&& npm install -g corepack@latest \
|
&& npm ci
|
||||||
&& corepack enable \
|
|
||||||
&& pnpm install
|
|
||||||
|
|
||||||
ARG PNGX_TAG_VERSION=
|
|
||||||
# Add the tag to the environment file if its a tagged dev build
|
|
||||||
RUN set -eux && \
|
|
||||||
case "${PNGX_TAG_VERSION}" in \
|
|
||||||
dev|beta|fix*|feature*) \
|
|
||||||
sed -i -E "s/version: '([0-9\.]+)'/version: '\1 #${PNGX_TAG_VERSION}'/g" /src/src-ui/src/environments/environment.prod.ts \
|
|
||||||
;; \
|
|
||||||
esac
|
|
||||||
|
|
||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
&& ./node_modules/.bin/ng build --configuration production
|
&& ./node_modules/.bin/ng build --configuration production
|
||||||
|
|
||||||
# Stage: s6-overlay-base
|
# Stage: pipenv-base
|
||||||
# Purpose: Installs s6-overlay and rootfs
|
# Purpose: Generates a requirements.txt file for building
|
||||||
# Comments:
|
# Comments:
|
||||||
# - Don't leave anything extra in here either
|
# - pipenv dependencies are not left in the final image
|
||||||
FROM ghcr.io/astral-sh/uv:0.6.13-python3.12-bookworm-slim AS s6-overlay-base
|
# - pipenv can't touch the final image somehow
|
||||||
|
FROM --platform=$BUILDPLATFORM docker.io/python:3.11-alpine as pipenv-base
|
||||||
|
|
||||||
WORKDIR /usr/src/s6
|
WORKDIR /usr/src/pipenv
|
||||||
|
|
||||||
# https://github.com/just-containers/s6-overlay#customizing-s6-overlay-behaviour
|
COPY Pipfile* ./
|
||||||
ENV \
|
|
||||||
S6_BEHAVIOUR_IF_STAGE2_FAILS=2 \
|
|
||||||
S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0 \
|
|
||||||
S6_VERBOSITY=1 \
|
|
||||||
PATH=/command:$PATH
|
|
||||||
|
|
||||||
# Buildx provided, must be defined to use though
|
|
||||||
ARG TARGETARCH
|
|
||||||
ARG TARGETVARIANT
|
|
||||||
# Lock this version
|
|
||||||
ARG S6_OVERLAY_VERSION=3.2.0.2
|
|
||||||
|
|
||||||
ARG S6_BUILD_TIME_PKGS="curl \
|
|
||||||
xz-utils"
|
|
||||||
|
|
||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
&& echo "Installing build time packages" \
|
&& echo "Installing pipenv" \
|
||||||
&& apt-get update \
|
&& python3 -m pip install --no-cache-dir --upgrade pipenv==2023.12.1 \
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${S6_BUILD_TIME_PKGS} \
|
&& echo "Generating requirement.txt" \
|
||||||
&& echo "Determining arch" \
|
&& pipenv requirements > requirements.txt
|
||||||
&& S6_ARCH="" \
|
|
||||||
&& if [ "${TARGETARCH}${TARGETVARIANT}" = "amd64" ]; then S6_ARCH="x86_64"; \
|
|
||||||
elif [ "${TARGETARCH}${TARGETVARIANT}" = "arm64" ]; then S6_ARCH="aarch64"; fi\
|
|
||||||
&& if [ -z "${S6_ARCH}" ]; then { echo "Error: Not able to determine arch"; exit 1; }; fi \
|
|
||||||
&& echo "Installing s6-overlay for ${S6_ARCH}" \
|
|
||||||
&& curl --fail --silent --no-progress-meter --show-error --location --remote-name-all --parallel --parallel-max 4 \
|
|
||||||
"https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-noarch.tar.xz" \
|
|
||||||
"https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-noarch.tar.xz.sha256" \
|
|
||||||
"https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-${S6_ARCH}.tar.xz" \
|
|
||||||
"https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-${S6_ARCH}.tar.xz.sha256" \
|
|
||||||
&& echo "Validating s6-archive checksums" \
|
|
||||||
&& sha256sum --check ./*.sha256 \
|
|
||||||
&& echo "Unpacking archives" \
|
|
||||||
&& tar --directory / -Jxpf s6-overlay-noarch.tar.xz \
|
|
||||||
&& tar --directory / -Jxpf s6-overlay-${S6_ARCH}.tar.xz \
|
|
||||||
&& echo "Removing downloaded archives" \
|
|
||||||
&& rm ./*.tar.xz \
|
|
||||||
&& rm ./*.sha256 \
|
|
||||||
&& echo "Cleaning up image" \
|
|
||||||
&& apt-get --yes purge ${S6_BUILD_TIME_PKGS} \
|
|
||||||
&& apt-get --yes autoremove --purge \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Copy our service defs and filesystem
|
|
||||||
COPY ./docker/rootfs /
|
|
||||||
|
|
||||||
# Stage: main-app
|
# Stage: main-app
|
||||||
# Purpose: The final image
|
# Purpose: The final image
|
||||||
# Comments:
|
# Comments:
|
||||||
# - Don't leave anything extra in here
|
# - Don't leave anything extra in here
|
||||||
FROM s6-overlay-base AS main-app
|
FROM docker.io/python:3.11-slim-bookworm as main-app
|
||||||
|
|
||||||
LABEL org.opencontainers.image.authors="paperless-ngx team <hello@paperless-ngx.com>"
|
LABEL org.opencontainers.image.authors="paperless-ngx team <hello@paperless-ngx.com>"
|
||||||
LABEL org.opencontainers.image.documentation="https://docs.paperless-ngx.com/"
|
LABEL org.opencontainers.image.documentation="https://docs.paperless-ngx.com/"
|
||||||
@ -101,19 +51,16 @@ ARG DEBIAN_FRONTEND=noninteractive
|
|||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
|
|
||||||
# Can be workflow provided, defaults set for manual building
|
# Can be workflow provided, defaults set for manual building
|
||||||
ARG JBIG2ENC_VERSION=0.30
|
ARG JBIG2ENC_VERSION=0.29
|
||||||
ARG QPDF_VERSION=11.9.0
|
ARG QPDF_VERSION=11.6.4
|
||||||
ARG GS_VERSION=10.03.1
|
ARG GS_VERSION=10.02.1
|
||||||
|
|
||||||
# Set Python environment variables
|
# Set Python environment variables
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||||
PYTHONUNBUFFERED=1 \
|
PYTHONUNBUFFERED=1 \
|
||||||
# Ignore warning from Whitenoise about async iterators
|
# Ignore warning from Whitenoise
|
||||||
PYTHONWARNINGS="ignore:::django.http.response:517" \
|
PYTHONWARNINGS="ignore:::django.http.response:517" \
|
||||||
PNGX_CONTAINERIZED=1 \
|
PNGX_CONTAINERIZED=1
|
||||||
# https://docs.astral.sh/uv/reference/settings/#link-mode
|
|
||||||
UV_LINK_MODE=copy \
|
|
||||||
UV_CACHE_DIR=/cache/uv/
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Begin installation and configuration
|
# Begin installation and configuration
|
||||||
@ -136,6 +83,7 @@ ARG RUNTIME_PACKAGES="\
|
|||||||
icc-profiles-free \
|
icc-profiles-free \
|
||||||
imagemagick \
|
imagemagick \
|
||||||
# PostgreSQL
|
# PostgreSQL
|
||||||
|
libpq5 \
|
||||||
postgresql-client \
|
postgresql-client \
|
||||||
# MySQL / MariaDB
|
# MySQL / MariaDB
|
||||||
mariadb-client \
|
mariadb-client \
|
||||||
@ -170,60 +118,124 @@ RUN set -eux \
|
|||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES} \
|
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES} \
|
||||||
&& echo "Installing pre-built updates" \
|
&& echo "Installing pre-built updates" \
|
||||||
&& curl --fail --silent --no-progress-meter --show-error --location --remote-name-all --parallel --parallel-max 4 \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/jbig2enc-${JBIG2ENC_VERSION}/jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
&& echo "Installing qpdf ${QPDF_VERSION}" \
|
&& echo "Installing qpdf ${QPDF_VERSION}" \
|
||||||
|
&& curl --fail --silent --show-error --location \
|
||||||
|
--output libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||||
|
&& curl --fail --silent --show-error --location \
|
||||||
|
--output qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||||
&& dpkg --install ./libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
&& dpkg --install ./libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||||
&& dpkg --install ./qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
&& dpkg --install ./qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||||
&& echo "Installing Ghostscript ${GS_VERSION}" \
|
&& echo "Installing Ghostscript ${GS_VERSION}" \
|
||||||
&& dpkg --install ./libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
|
&& curl --fail --silent --show-error --location \
|
||||||
&& dpkg --install ./libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
--output libgs10_${GS_VERSION}.dfsg-2_${TARGETARCH}.deb \
|
||||||
&& dpkg --install ./ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
||||||
|
&& curl --fail --silent --show-error --location \
|
||||||
|
--output ghostscript_${GS_VERSION}.dfsg-2_${TARGETARCH}.deb \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
||||||
|
&& curl --fail --silent --show-error --location \
|
||||||
|
--output libgs10-common_${GS_VERSION}.dfsg-2_all.deb \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
|
||||||
|
&& dpkg --install ./libgs10-common_${GS_VERSION}.dfsg-2_all.deb \
|
||||||
|
&& dpkg --install ./libgs10_${GS_VERSION}.dfsg-2_${TARGETARCH}.deb \
|
||||||
|
&& dpkg --install ./ghostscript_${GS_VERSION}.dfsg-2_${TARGETARCH}.deb \
|
||||||
&& echo "Installing jbig2enc" \
|
&& echo "Installing jbig2enc" \
|
||||||
|
&& curl --fail --silent --show-error --location \
|
||||||
|
--output jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/jbig2enc-${JBIG2ENC_VERSION}/jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
||||||
&& dpkg --install ./jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
&& dpkg --install ./jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
||||||
&& echo "Configuring imagemagick" \
|
|
||||||
&& cp /etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml \
|
|
||||||
&& echo "Cleaning up image layer" \
|
&& echo "Cleaning up image layer" \
|
||||||
&& rm --force --verbose *.deb \
|
&& rm --force --verbose *.deb \
|
||||||
&& rm --recursive --force --verbose /var/lib/apt/lists/*
|
&& rm --recursive --force --verbose /var/lib/apt/lists/* \
|
||||||
|
&& echo "Installing supervisor" \
|
||||||
|
&& python3 -m pip install --default-timeout=1000 --upgrade --no-cache-dir supervisor==4.2.5
|
||||||
|
|
||||||
|
# Copy gunicorn config
|
||||||
|
# Changes very infrequently
|
||||||
|
WORKDIR /usr/src/paperless/
|
||||||
|
|
||||||
|
COPY gunicorn.conf.py .
|
||||||
|
|
||||||
|
# setup docker-specific things
|
||||||
|
# These change sometimes, but rarely
|
||||||
|
WORKDIR /usr/src/paperless/src/docker/
|
||||||
|
|
||||||
|
COPY [ \
|
||||||
|
"docker/imagemagick-policy.xml", \
|
||||||
|
"docker/supervisord.conf", \
|
||||||
|
"docker/docker-entrypoint.sh", \
|
||||||
|
"docker/docker-prepare.sh", \
|
||||||
|
"docker/paperless_cmd.sh", \
|
||||||
|
"docker/wait-for-redis.py", \
|
||||||
|
"docker/env-from-file.sh", \
|
||||||
|
"docker/management_script.sh", \
|
||||||
|
"docker/flower-conditional.sh", \
|
||||||
|
"docker/install_management_commands.sh", \
|
||||||
|
"/usr/src/paperless/src/docker/" \
|
||||||
|
]
|
||||||
|
|
||||||
|
RUN set -eux \
|
||||||
|
&& echo "Configuring ImageMagick" \
|
||||||
|
&& mv imagemagick-policy.xml /etc/ImageMagick-6/policy.xml \
|
||||||
|
&& echo "Configuring supervisord" \
|
||||||
|
&& mkdir /var/log/supervisord /var/run/supervisord \
|
||||||
|
&& mv supervisord.conf /etc/supervisord.conf \
|
||||||
|
&& echo "Setting up Docker scripts" \
|
||||||
|
&& mv docker-entrypoint.sh /sbin/docker-entrypoint.sh \
|
||||||
|
&& chmod 755 /sbin/docker-entrypoint.sh \
|
||||||
|
&& mv docker-prepare.sh /sbin/docker-prepare.sh \
|
||||||
|
&& chmod 755 /sbin/docker-prepare.sh \
|
||||||
|
&& mv wait-for-redis.py /sbin/wait-for-redis.py \
|
||||||
|
&& chmod 755 /sbin/wait-for-redis.py \
|
||||||
|
&& mv env-from-file.sh /sbin/env-from-file.sh \
|
||||||
|
&& chmod 755 /sbin/env-from-file.sh \
|
||||||
|
&& mv paperless_cmd.sh /usr/local/bin/paperless_cmd.sh \
|
||||||
|
&& chmod 755 /usr/local/bin/paperless_cmd.sh \
|
||||||
|
&& mv flower-conditional.sh /usr/local/bin/flower-conditional.sh \
|
||||||
|
&& chmod 755 /usr/local/bin/flower-conditional.sh \
|
||||||
|
&& echo "Installing management commands" \
|
||||||
|
&& chmod +x install_management_commands.sh \
|
||||||
|
&& ./install_management_commands.sh
|
||||||
|
|
||||||
WORKDIR /usr/src/paperless/src/
|
WORKDIR /usr/src/paperless/src/
|
||||||
|
|
||||||
# Python dependencies
|
# Python dependencies
|
||||||
# Change pretty frequently
|
# Change pretty frequently
|
||||||
COPY --chown=1000:1000 ["pyproject.toml", "uv.lock", "/usr/src/paperless/src/"]
|
COPY --from=pipenv-base /usr/src/pipenv/requirements.txt ./
|
||||||
|
|
||||||
# Packages needed only for building a few quick Python
|
# Packages needed only for building a few quick Python
|
||||||
# dependencies
|
# dependencies
|
||||||
ARG BUILD_PACKAGES="\
|
ARG BUILD_PACKAGES="\
|
||||||
build-essential \
|
build-essential \
|
||||||
|
git \
|
||||||
|
# https://www.psycopg.org/docs/install.html#prerequisites
|
||||||
|
libpq-dev \
|
||||||
# https://github.com/PyMySQL/mysqlclient#linux
|
# https://github.com/PyMySQL/mysqlclient#linux
|
||||||
default-libmysqlclient-dev \
|
default-libmysqlclient-dev \
|
||||||
pkg-config"
|
pkg-config"
|
||||||
|
|
||||||
# hadolint ignore=DL3042
|
# hadolint ignore=DL3042
|
||||||
RUN --mount=type=cache,target=${UV_CACHE_DIR},id=python-cache \
|
RUN --mount=type=cache,target=/root/.cache/pip/,id=pip-cache \
|
||||||
set -eux \
|
set -eux \
|
||||||
&& echo "Installing build system packages" \
|
&& echo "Installing build system packages" \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||||
|
&& python3 -m pip install --no-cache-dir --upgrade wheel \
|
||||||
&& echo "Installing Python requirements" \
|
&& echo "Installing Python requirements" \
|
||||||
&& uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt \
|
&& python3 -m pip install --default-timeout=1000 --requirement requirements.txt \
|
||||||
&& uv pip install --system --no-python-downloads --python-preference system --requirements requirements.txt \
|
&& echo "Patching whitenoise for compression speedup" \
|
||||||
|
&& curl --fail --silent --show-error --location --output 484.patch https://github.com/evansd/whitenoise/pull/484.patch \
|
||||||
|
&& patch -d /usr/local/lib/python3.11/site-packages --verbose -p2 < 484.patch \
|
||||||
|
&& rm 484.patch \
|
||||||
&& echo "Installing NLTK data" \
|
&& echo "Installing NLTK data" \
|
||||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" snowball_data \
|
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" snowball_data \
|
||||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" stopwords \
|
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" stopwords \
|
||||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" punkt_tab \
|
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" punkt \
|
||||||
&& echo "Cleaning up image" \
|
&& echo "Cleaning up image" \
|
||||||
&& apt-get --yes purge ${BUILD_PACKAGES} \
|
&& apt-get --yes purge ${BUILD_PACKAGES} \
|
||||||
&& apt-get --yes autoremove --purge \
|
&& apt-get --yes autoremove --purge \
|
||||||
&& apt-get clean --yes \
|
&& apt-get clean --yes \
|
||||||
&& rm --recursive --force --verbose *.whl \
|
|
||||||
&& rm --recursive --force --verbose /var/lib/apt/lists/* \
|
&& rm --recursive --force --verbose /var/lib/apt/lists/* \
|
||||||
&& rm --recursive --force --verbose /tmp/* \
|
&& rm --recursive --force --verbose /tmp/* \
|
||||||
&& rm --recursive --force --verbose /var/tmp/* \
|
&& rm --recursive --force --verbose /var/tmp/* \
|
||||||
@ -239,7 +251,6 @@ COPY --from=compile-frontend --chown=1000:1000 /src/src/documents/static/fronten
|
|||||||
# add users, setup scripts
|
# add users, setup scripts
|
||||||
# Mount the compiled frontend to expected location
|
# Mount the compiled frontend to expected location
|
||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
&& sed -i '1s|^#!/usr/bin/env python3|#!/command/with-contenv python3|' manage.py \
|
|
||||||
&& echo "Setting up user/group" \
|
&& echo "Setting up user/group" \
|
||||||
&& addgroup --gid 1000 paperless \
|
&& addgroup --gid 1000 paperless \
|
||||||
&& useradd --uid 1000 --gid paperless --home-dir /usr/src/paperless paperless \
|
&& useradd --uid 1000 --gid paperless --home-dir /usr/src/paperless paperless \
|
||||||
@ -248,21 +259,21 @@ RUN set -eux \
|
|||||||
&& mkdir --parents --verbose /usr/src/paperless/media \
|
&& mkdir --parents --verbose /usr/src/paperless/media \
|
||||||
&& mkdir --parents --verbose /usr/src/paperless/consume \
|
&& mkdir --parents --verbose /usr/src/paperless/consume \
|
||||||
&& mkdir --parents --verbose /usr/src/paperless/export \
|
&& mkdir --parents --verbose /usr/src/paperless/export \
|
||||||
&& echo "Creating gnupg directory" \
|
|
||||||
&& mkdir -m700 --verbose /usr/src/paperless/.gnupg \
|
|
||||||
&& echo "Adjusting all permissions" \
|
&& echo "Adjusting all permissions" \
|
||||||
&& chown --from root:root --changes --recursive paperless:paperless /usr/src/paperless \
|
&& chown --from root:root --changes --recursive paperless:paperless /usr/src/paperless \
|
||||||
&& echo "Collecting static files" \
|
&& echo "Collecting static files" \
|
||||||
&& s6-setuidgid paperless python3 manage.py collectstatic --clear --no-input --link \
|
&& gosu paperless python3 manage.py collectstatic --clear --no-input --link \
|
||||||
&& s6-setuidgid paperless python3 manage.py compilemessages
|
&& gosu paperless python3 manage.py compilemessages
|
||||||
|
|
||||||
VOLUME ["/usr/src/paperless/data", \
|
VOLUME ["/usr/src/paperless/data", \
|
||||||
"/usr/src/paperless/media", \
|
"/usr/src/paperless/media", \
|
||||||
"/usr/src/paperless/consume", \
|
"/usr/src/paperless/consume", \
|
||||||
"/usr/src/paperless/export"]
|
"/usr/src/paperless/export"]
|
||||||
|
|
||||||
ENTRYPOINT ["/init"]
|
ENTRYPOINT ["/sbin/docker-entrypoint.sh"]
|
||||||
|
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
|
CMD ["/usr/local/bin/paperless_cmd.sh"]
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=10s --retries=5 CMD [ "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000" ]
|
HEALTHCHECK --interval=30s --timeout=10s --retries=5 CMD [ "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000" ]
|
||||||
|
96
Pipfile
Normal file
96
Pipfile
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
[[source]]
|
||||||
|
url = "https://pypi.python.org/simple"
|
||||||
|
verify_ssl = true
|
||||||
|
name = "pypi"
|
||||||
|
|
||||||
|
[packages]
|
||||||
|
dateparser = "~=1.2"
|
||||||
|
# WARNING: django does not use semver.
|
||||||
|
# Only patch versions are guaranteed to not introduce breaking changes.
|
||||||
|
django = "~=4.2.11"
|
||||||
|
django-allauth = "*"
|
||||||
|
django-auditlog = "*"
|
||||||
|
django-celery-results = "*"
|
||||||
|
django-compression-middleware = "*"
|
||||||
|
django-cors-headers = "*"
|
||||||
|
django-extensions = "*"
|
||||||
|
django-filter = "~=24.1"
|
||||||
|
django-guardian = "*"
|
||||||
|
django-multiselectfield = "*"
|
||||||
|
djangorestframework = "==3.14.0"
|
||||||
|
djangorestframework-guardian = "*"
|
||||||
|
drf-writable-nested = "*"
|
||||||
|
bleach = "*"
|
||||||
|
celery = {extras = ["redis"], version = "*"}
|
||||||
|
channels = "~=4.0"
|
||||||
|
channels-redis = "*"
|
||||||
|
concurrent-log-handler = "*"
|
||||||
|
filelock = "*"
|
||||||
|
flower = "*"
|
||||||
|
gotenberg-client = "*"
|
||||||
|
gunicorn = "*"
|
||||||
|
imap-tools = "*"
|
||||||
|
inotifyrecursive = "~=0.3"
|
||||||
|
langdetect = "*"
|
||||||
|
mysqlclient = "*"
|
||||||
|
nltk = "*"
|
||||||
|
ocrmypdf = "~=15.4"
|
||||||
|
pathvalidate = "*"
|
||||||
|
pdf2image = "*"
|
||||||
|
psycopg2 = "*"
|
||||||
|
python-dateutil = "*"
|
||||||
|
python-dotenv = "*"
|
||||||
|
python-gnupg = "*"
|
||||||
|
python-ipware = "*"
|
||||||
|
python-magic = "*"
|
||||||
|
pyzbar = "*"
|
||||||
|
rapidfuzz = "*"
|
||||||
|
redis = {extras = ["hiredis"], version = "*"}
|
||||||
|
scikit-learn = "~=1.4"
|
||||||
|
setproctitle = "*"
|
||||||
|
tika-client = "*"
|
||||||
|
tqdm = "*"
|
||||||
|
uvicorn = {extras = ["standard"], version = "==0.25.0"}
|
||||||
|
watchdog = "~=4.0"
|
||||||
|
whitenoise = "~=6.6"
|
||||||
|
whoosh="~=2.7"
|
||||||
|
zxing-cpp = {version = "*", platform_machine = "== 'x86_64'"}
|
||||||
|
|
||||||
|
[dev-packages]
|
||||||
|
# Linting
|
||||||
|
black = "*"
|
||||||
|
pre-commit = "*"
|
||||||
|
ruff = "*"
|
||||||
|
# Testing
|
||||||
|
factory-boy = "*"
|
||||||
|
pytest = "*"
|
||||||
|
pytest-cov = "*"
|
||||||
|
pytest-django = "*"
|
||||||
|
pytest-httpx = "*"
|
||||||
|
pytest-env = "*"
|
||||||
|
pytest-sugar = "*"
|
||||||
|
pytest-xdist = "*"
|
||||||
|
pytest-rerunfailures = "*"
|
||||||
|
imagehash = "*"
|
||||||
|
daphne = "*"
|
||||||
|
# Documentation
|
||||||
|
mkdocs-material = "*"
|
||||||
|
mkdocs-glightbox = "*"
|
||||||
|
|
||||||
|
[typing-dev]
|
||||||
|
mypy = "*"
|
||||||
|
types-Pillow = "*"
|
||||||
|
django-filter-stubs = "*"
|
||||||
|
types-python-dateutil = "*"
|
||||||
|
djangorestframework-stubs = {extras= ["compatible-mypy"], version="*"}
|
||||||
|
celery-types = "*"
|
||||||
|
django-stubs = {extras= ["compatible-mypy"], version="*"}
|
||||||
|
types-dateparser = "*"
|
||||||
|
types-bleach = "*"
|
||||||
|
types-redis = "*"
|
||||||
|
types-tqdm = "*"
|
||||||
|
types-Markdown = "*"
|
||||||
|
types-Pygments = "*"
|
||||||
|
types-colorama = "*"
|
||||||
|
types-psycopg2 = "*"
|
||||||
|
types-setuptools = "*"
|
4323
Pipfile.lock
generated
Normal file
4323
Pipfile.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
14
README.md
14
README.md
@ -30,14 +30,14 @@ Thanks to the generous folks at [DigitalOcean](https://m.do.co/c/8d70b916d462),
|
|||||||
- [Translation](#translation)
|
- [Translation](#translation)
|
||||||
- [Feature Requests](#feature-requests)
|
- [Feature Requests](#feature-requests)
|
||||||
- [Bugs](#bugs)
|
- [Bugs](#bugs)
|
||||||
- [Related Projects](#related-projects)
|
- [Affiliated Projects](#affiliated-projects)
|
||||||
- [Important Note](#important-note)
|
- [Important Note](#important-note)
|
||||||
|
|
||||||
<p align="right">This project is supported by:<br/>
|
<p align="right">This project is supported by:<br/>
|
||||||
<a href="https://m.do.co/c/8d70b916d462" style="padding-top: 4px; display: block;">
|
<a href="https://m.do.co/c/8d70b916d462" style="padding-top: 4px; display: block;">
|
||||||
<picture>
|
<picture>
|
||||||
<source media="(prefers-color-scheme: dark)" srcset="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_white.svg" width="140px">
|
<source media="(prefers-color-scheme: dark)" srcset="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_white.svg" width="140px">
|
||||||
<source media="(prefers-color-scheme: light)" srcset="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_blue.svg" width="140px">
|
<source media="(prefers-color-scheme: light)" srcset="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_black_.svg" width="140px">
|
||||||
<img src="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_black_.svg" width="140px">
|
<img src="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_black_.svg" width="140px">
|
||||||
</picture>
|
</picture>
|
||||||
</a>
|
</a>
|
||||||
@ -55,7 +55,7 @@ A full list of [features](https://docs.paperless-ngx.com/#features) and [screens
|
|||||||
|
|
||||||
# Getting started
|
# Getting started
|
||||||
|
|
||||||
The easiest way to deploy paperless is `docker compose`. The files in the [`/docker/compose` directory](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose) are configured to pull the image from the GitHub container registry.
|
The easiest way to deploy paperless is `docker compose`. The files in the [`/docker/compose` directory](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose) are configured to pull the image from GitHub Packages.
|
||||||
|
|
||||||
If you'd like to jump right in, you can configure a `docker compose` environment with our install script:
|
If you'd like to jump right in, you can configure a `docker compose` environment with our install script:
|
||||||
|
|
||||||
@ -63,7 +63,7 @@ If you'd like to jump right in, you can configure a `docker compose` environment
|
|||||||
bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
||||||
```
|
```
|
||||||
|
|
||||||
More details and step-by-step guides for alternative installation methods can be found in [the documentation](https://docs.paperless-ngx.com/setup/#installation).
|
Alternatively, you can install the dependencies and setup apache and a database server yourself. The [documentation](https://docs.paperless-ngx.com/setup/#installation) has a step by step guide on how to do it.
|
||||||
|
|
||||||
Migrating from Paperless-ng is easy, just drop in the new docker image! See the [documentation on migrating](https://docs.paperless-ngx.com/setup/#migrating-to-paperless-ngx) for more details.
|
Migrating from Paperless-ng is easy, just drop in the new docker image! See the [documentation on migrating](https://docs.paperless-ngx.com/setup/#migrating-to-paperless-ngx) for more details.
|
||||||
|
|
||||||
@ -83,7 +83,7 @@ People interested in continuing the work on paperless-ngx are encouraged to reac
|
|||||||
|
|
||||||
## Translation
|
## Translation
|
||||||
|
|
||||||
Paperless-ngx is available in many languages that are coordinated on Crowdin. If you want to help out by translating paperless-ngx into your language, please head over to https://crowdin.com/project/paperless-ngx, and thank you! More details can be found in [CONTRIBUTING.md](https://github.com/paperless-ngx/paperless-ngx/blob/main/CONTRIBUTING.md#translating-paperless-ngx).
|
Paperless-ngx is available in many languages that are coordinated on Crowdin. If you want to help out by translating paperless-ngx into your language, please head over to https://crwd.in/paperless-ngx, and thank you! More details can be found in [CONTRIBUTING.md](https://github.com/paperless-ngx/paperless-ngx/blob/main/CONTRIBUTING.md#translating-paperless-ngx).
|
||||||
|
|
||||||
## Feature Requests
|
## Feature Requests
|
||||||
|
|
||||||
@ -93,9 +93,9 @@ Feature requests can be submitted via [GitHub Discussions](https://github.com/pa
|
|||||||
|
|
||||||
For bugs please [open an issue](https://github.com/paperless-ngx/paperless-ngx/issues) or [start a discussion](https://github.com/paperless-ngx/paperless-ngx/discussions) if you have questions.
|
For bugs please [open an issue](https://github.com/paperless-ngx/paperless-ngx/issues) or [start a discussion](https://github.com/paperless-ngx/paperless-ngx/discussions) if you have questions.
|
||||||
|
|
||||||
# Related Projects
|
# Affiliated Projects
|
||||||
|
|
||||||
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Related-Projects) for a user-maintained list of related projects and software that is compatible with Paperless-ngx.
|
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Affiliated-Projects) for a user-maintained list of affiliated projects and software that is compatible with Paperless-ngx.
|
||||||
|
|
||||||
# Important Note
|
# Important Note
|
||||||
|
|
||||||
|
@ -3,9 +3,10 @@
|
|||||||
# Can be used locally or by the CI to start the necessary containers with the
|
# Can be used locally or by the CI to start the necessary containers with the
|
||||||
# correct networking for the tests
|
# correct networking for the tests
|
||||||
|
|
||||||
|
version: "3.7"
|
||||||
services:
|
services:
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:8.19
|
image: docker.io/gotenberg/gotenberg:7.10
|
||||||
hostname: gotenberg
|
hostname: gotenberg
|
||||||
container_name: gotenberg
|
container_name: gotenberg
|
||||||
network_mode: host
|
network_mode: host
|
||||||
@ -19,7 +20,7 @@ services:
|
|||||||
- "--log-level=warn"
|
- "--log-level=warn"
|
||||||
- "--log-format=text"
|
- "--log-format=text"
|
||||||
tika:
|
tika:
|
||||||
image: docker.io/apache/tika:latest
|
image: ghcr.io/paperless-ngx/tika:latest
|
||||||
hostname: tika
|
hostname: tika
|
||||||
container_name: tika
|
container_name: tika
|
||||||
network_mode: host
|
network_mode: host
|
||||||
|
@ -1,17 +1,26 @@
|
|||||||
###############################################################################
|
|
||||||
# Paperless-ngx settings #
|
|
||||||
###############################################################################
|
|
||||||
|
|
||||||
# See http://docs.paperless-ngx.com/configuration/ for all available options.
|
|
||||||
|
|
||||||
# The UID and GID of the user used to run paperless in the container. Set this
|
# The UID and GID of the user used to run paperless in the container. Set this
|
||||||
# to your UID and GID on the host so that you have write access to the
|
# to your UID and GID on the host so that you have write access to the
|
||||||
# consumption directory.
|
# consumption directory.
|
||||||
#USERMAP_UID=1000
|
#USERMAP_UID=1000
|
||||||
#USERMAP_GID=1000
|
#USERMAP_GID=1000
|
||||||
|
|
||||||
# See the documentation linked above for all options. A few commonly adjusted settings
|
# Additional languages to install for text recognition, separated by a
|
||||||
# are provided below.
|
# whitespace. Note that this is
|
||||||
|
# different from PAPERLESS_OCR_LANGUAGE (default=eng), which defines the
|
||||||
|
# language used for OCR.
|
||||||
|
# The container installs English, German, Italian, Spanish and French by
|
||||||
|
# default.
|
||||||
|
# See https://packages.debian.org/search?keywords=tesseract-ocr-&searchon=names&suite=buster
|
||||||
|
# for available languages.
|
||||||
|
#PAPERLESS_OCR_LANGUAGES=tur ces
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# Paperless-specific settings #
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
# All settings defined in the paperless.conf.example can be used here. The
|
||||||
|
# Docker setup does not use the configuration file.
|
||||||
|
# A few commonly adjusted settings are provided below.
|
||||||
|
|
||||||
# This is required if you will be exposing Paperless-ngx on a public domain
|
# This is required if you will be exposing Paperless-ngx on a public domain
|
||||||
# (if doing so please consider security measures such as reverse proxy)
|
# (if doing so please consider security measures such as reverse proxy)
|
||||||
@ -21,17 +30,13 @@
|
|||||||
# be a very long sequence of random characters. You don't need to remember it.
|
# be a very long sequence of random characters. You don't need to remember it.
|
||||||
#PAPERLESS_SECRET_KEY=change-me
|
#PAPERLESS_SECRET_KEY=change-me
|
||||||
|
|
||||||
# Use this variable to set a timezone for the Paperless Docker containers. Defaults to UTC.
|
# Use this variable to set a timezone for the Paperless Docker containers. If not specified, defaults to UTC.
|
||||||
#PAPERLESS_TIME_ZONE=America/Los_Angeles
|
#PAPERLESS_TIME_ZONE=America/Los_Angeles
|
||||||
|
|
||||||
# The default language to use for OCR. Set this to the language most of your
|
# The default language to use for OCR. Set this to the language most of your
|
||||||
# documents are written in.
|
# documents are written in.
|
||||||
#PAPERLESS_OCR_LANGUAGE=eng
|
#PAPERLESS_OCR_LANGUAGE=eng
|
||||||
|
|
||||||
# Additional languages to install for text recognition, separated by a whitespace.
|
# Set if accessing paperless via a domain subpath e.g. https://domain.com/PATHPREFIX and using a reverse-proxy like traefik or nginx
|
||||||
# Note that this is different from PAPERLESS_OCR_LANGUAGE (default=eng), which defines
|
#PAPERLESS_FORCE_SCRIPT_NAME=/PATHPREFIX
|
||||||
# the language used for OCR.
|
#PAPERLESS_STATIC_URL=/PATHPREFIX/static/ # trailing slash required
|
||||||
# The container installs English, German, Italian, Spanish and French by default.
|
|
||||||
# See https://packages.debian.org/search?keywords=tesseract-ocr-&searchon=names&suite=buster
|
|
||||||
# for available languages.
|
|
||||||
#PAPERLESS_OCR_LANGUAGES=tur ces
|
|
||||||
|
@ -24,12 +24,13 @@
|
|||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
|
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
||||||
# - Run 'docker compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
|
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
# documentation.
|
# documentation.
|
||||||
|
|
||||||
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: docker.io/library/redis:7
|
image: docker.io/library/redis:7
|
||||||
@ -38,7 +39,7 @@ services:
|
|||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/mariadb:11
|
image: docker.io/library/mariadb:10
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- dbdata:/var/lib/mysql
|
- dbdata:/var/lib/mysql
|
||||||
@ -77,7 +78,7 @@ services:
|
|||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:8.19
|
image: docker.io/gotenberg/gotenberg:7.10
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||||
# want to allow external content like tracking pixels or even javascript.
|
# want to allow external content like tracking pixels or even javascript.
|
||||||
@ -87,7 +88,7 @@ services:
|
|||||||
- "--chromium-allow-list=file:///tmp/.*"
|
- "--chromium-allow-list=file:///tmp/.*"
|
||||||
|
|
||||||
tika:
|
tika:
|
||||||
image: docker.io/apache/tika:latest
|
image: ghcr.io/paperless-ngx/tika:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
|
@ -20,11 +20,13 @@
|
|||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
|
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
||||||
# - Run 'docker compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
# documentation.
|
# documentation.
|
||||||
|
|
||||||
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: docker.io/library/redis:7
|
image: docker.io/library/redis:7
|
||||||
@ -33,7 +35,7 @@ services:
|
|||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/mariadb:11
|
image: docker.io/library/mariadb:10
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- dbdata:/var/lib/mysql
|
- dbdata:/var/lib/mysql
|
||||||
|
@ -19,13 +19,16 @@
|
|||||||
#
|
#
|
||||||
# - Open portainer Stacks list and click 'Add stack'
|
# - Open portainer Stacks list and click 'Add stack'
|
||||||
# - Paste the contents of this file and assign a name, e.g. 'paperless'
|
# - Paste the contents of this file and assign a name, e.g. 'paperless'
|
||||||
# - Upload 'docker-compose.env' by clicking on 'Load variables from .env file'
|
|
||||||
# - Modify the environment variables as needed
|
|
||||||
# - Click 'Deploy the stack' and wait for it to be deployed
|
# - Click 'Deploy the stack' and wait for it to be deployed
|
||||||
|
# - Open the list of containers, select paperless_webserver_1
|
||||||
|
# - Click 'Console' and then 'Connect' to open the command line inside the container
|
||||||
|
# - Run 'python3 manage.py createsuperuser' to create a user
|
||||||
|
# - Exit the console
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
# documentation.
|
# documentation.
|
||||||
|
|
||||||
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: docker.io/library/redis:7
|
image: docker.io/library/redis:7
|
||||||
@ -34,7 +37,7 @@ services:
|
|||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/postgres:17
|
image: docker.io/library/postgres:15
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
@ -59,8 +62,28 @@ services:
|
|||||||
environment:
|
environment:
|
||||||
PAPERLESS_REDIS: redis://broker:6379
|
PAPERLESS_REDIS: redis://broker:6379
|
||||||
PAPERLESS_DBHOST: db
|
PAPERLESS_DBHOST: db
|
||||||
env_file:
|
# The UID and GID of the user used to run paperless in the container. Set this
|
||||||
- stack.env
|
# to your UID and GID on the host so that you have write access to the
|
||||||
|
# consumption directory.
|
||||||
|
USERMAP_UID: 1000
|
||||||
|
USERMAP_GID: 100
|
||||||
|
# Additional languages to install for text recognition, separated by a
|
||||||
|
# whitespace. Note that this is
|
||||||
|
# different from PAPERLESS_OCR_LANGUAGE (default=eng), which defines the
|
||||||
|
# language used for OCR.
|
||||||
|
# The container installs English, German, Italian, Spanish and French by
|
||||||
|
# default.
|
||||||
|
# See https://packages.debian.org/search?keywords=tesseract-ocr-&searchon=names&suite=buster
|
||||||
|
# for available languages.
|
||||||
|
#PAPERLESS_OCR_LANGUAGES: tur ces
|
||||||
|
# Adjust this key if you plan to make paperless available publicly. It should
|
||||||
|
# be a very long sequence of random characters. You don't need to remember it.
|
||||||
|
#PAPERLESS_SECRET_KEY: change-me
|
||||||
|
# Use this variable to set a timezone for the Paperless Docker containers. If not specified, defaults to UTC.
|
||||||
|
#PAPERLESS_TIME_ZONE: America/Los_Angeles
|
||||||
|
# The default language to use for OCR. Set this to the language most of your
|
||||||
|
# documents are written in.
|
||||||
|
#PAPERLESS_OCR_LANGUAGE: eng
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
data:
|
data:
|
||||||
|
@ -24,11 +24,13 @@
|
|||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
|
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
||||||
# - Run 'docker compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
# documentation.
|
# documentation.
|
||||||
|
|
||||||
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: docker.io/library/redis:7
|
image: docker.io/library/redis:7
|
||||||
@ -37,7 +39,7 @@ services:
|
|||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/postgres:17
|
image: docker.io/library/postgres:15
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
@ -70,7 +72,7 @@ services:
|
|||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:8.19
|
image: docker.io/gotenberg/gotenberg:7.10
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||||
@ -81,7 +83,7 @@ services:
|
|||||||
- "--chromium-allow-list=file:///tmp/.*"
|
- "--chromium-allow-list=file:///tmp/.*"
|
||||||
|
|
||||||
tika:
|
tika:
|
||||||
image: docker.io/apache/tika:latest
|
image: ghcr.io/paperless-ngx/tika:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
|
@ -20,11 +20,13 @@
|
|||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
|
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
||||||
# - Run 'docker compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
# documentation.
|
# documentation.
|
||||||
|
|
||||||
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: docker.io/library/redis:7
|
image: docker.io/library/redis:7
|
||||||
@ -33,7 +35,7 @@ services:
|
|||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/postgres:17
|
image: docker.io/library/postgres:15
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
|
@ -24,11 +24,13 @@
|
|||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
|
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
||||||
# - Run 'docker compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
# documentation.
|
# documentation.
|
||||||
|
|
||||||
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: docker.io/library/redis:7
|
image: docker.io/library/redis:7
|
||||||
@ -58,7 +60,7 @@ services:
|
|||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:8.19
|
image: docker.io/gotenberg/gotenberg:7.10
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||||
@ -69,7 +71,7 @@ services:
|
|||||||
- "--chromium-allow-list=file:///tmp/.*"
|
- "--chromium-allow-list=file:///tmp/.*"
|
||||||
|
|
||||||
tika:
|
tika:
|
||||||
image: docker.io/apache/tika:latest
|
image: ghcr.io/paperless-ngx/tika:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
|
@ -17,11 +17,13 @@
|
|||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
|
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
||||||
# - Run 'docker compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
# documentation.
|
# documentation.
|
||||||
|
|
||||||
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: docker.io/library/redis:7
|
image: docker.io/library/redis:7
|
||||||
|
@ -10,8 +10,8 @@ map_uidgid() {
|
|||||||
local -r usermap_new_gid=${USERMAP_GID:-${usermap_original_gid:-$usermap_new_uid}}
|
local -r usermap_new_gid=${USERMAP_GID:-${usermap_original_gid:-$usermap_new_uid}}
|
||||||
if [[ ${usermap_new_uid} != "${usermap_original_uid}" || ${usermap_new_gid} != "${usermap_original_gid}" ]]; then
|
if [[ ${usermap_new_uid} != "${usermap_original_uid}" || ${usermap_new_gid} != "${usermap_original_gid}" ]]; then
|
||||||
echo "Mapping UID and GID for paperless:paperless to $usermap_new_uid:$usermap_new_gid"
|
echo "Mapping UID and GID for paperless:paperless to $usermap_new_uid:$usermap_new_gid"
|
||||||
usermod --non-unique --uid "${usermap_new_uid}" paperless
|
usermod -o -u "${usermap_new_uid}" paperless
|
||||||
groupmod --non-unique --gid "${usermap_new_gid}" paperless
|
groupmod -o -g "${usermap_new_gid}" paperless
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -42,7 +42,7 @@ custom_container_init() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Make sure custom init directory has files in it
|
# Make sure custom init directory has files in it
|
||||||
if [ -n "$(/bin/ls --almost-all "${custom_script_dir}" 2>/dev/null)" ]; then
|
if [ -n "$(/bin/ls -A "${custom_script_dir}" 2>/dev/null)" ]; then
|
||||||
echo "[custom-init] files found in ${custom_script_dir} executing"
|
echo "[custom-init] files found in ${custom_script_dir} executing"
|
||||||
# Loop over files in the directory
|
# Loop over files in the directory
|
||||||
for SCRIPT in "${custom_script_dir}"/*; do
|
for SCRIPT in "${custom_script_dir}"/*; do
|
||||||
@ -86,13 +86,13 @@ initialize() {
|
|||||||
"${CONSUME_DIR}"; do
|
"${CONSUME_DIR}"; do
|
||||||
if [[ ! -d "${dir}" ]]; then
|
if [[ ! -d "${dir}" ]]; then
|
||||||
echo "Creating directory ${dir}"
|
echo "Creating directory ${dir}"
|
||||||
mkdir --parents --verbose "${dir}"
|
mkdir --parents "${dir}"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
local -r tmp_dir="${PAPERLESS_SCRATCH_DIR:=/tmp/paperless}"
|
local -r tmp_dir="${PAPERLESS_SCRATCH_DIR:=/tmp/paperless}"
|
||||||
echo "Creating directory scratch directory ${tmp_dir}"
|
echo "Creating directory scratch directory ${tmp_dir}"
|
||||||
mkdir --parents --verbose "${tmp_dir}"
|
mkdir --parents "${tmp_dir}"
|
||||||
|
|
||||||
set +e
|
set +e
|
||||||
echo "Adjusting permissions of paperless files. This may take a while."
|
echo "Adjusting permissions of paperless files. This may take a while."
|
||||||
@ -102,7 +102,7 @@ initialize() {
|
|||||||
"${DATA_DIR}" \
|
"${DATA_DIR}" \
|
||||||
"${MEDIA_ROOT_DIR}" \
|
"${MEDIA_ROOT_DIR}" \
|
||||||
"${CONSUME_DIR}"; do
|
"${CONSUME_DIR}"; do
|
||||||
find "${dir}" -not \( -user paperless -and -group paperless \) -exec chown --changes paperless:paperless {} +
|
find "${dir}" -not \( -user paperless -and -group paperless \) -exec chown paperless:paperless {} +
|
||||||
done
|
done
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
@ -122,44 +122,33 @@ install_languages() {
|
|||||||
if [ ${#langs[@]} -eq 0 ]; then
|
if [ ${#langs[@]} -eq 0 ]; then
|
||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
|
apt-get update
|
||||||
|
|
||||||
# Build list of packages to install
|
|
||||||
to_install=()
|
|
||||||
for lang in "${langs[@]}"; do
|
for lang in "${langs[@]}"; do
|
||||||
pkg="tesseract-ocr-$lang"
|
pkg="tesseract-ocr-$lang"
|
||||||
|
|
||||||
if dpkg --status "$pkg" &>/dev/null; then
|
if dpkg -s "$pkg" &>/dev/null; then
|
||||||
echo "Package $pkg already installed!"
|
echo "Package $pkg already installed!"
|
||||||
continue
|
continue
|
||||||
else
|
fi
|
||||||
to_install+=("$pkg")
|
|
||||||
|
if ! apt-cache show "$pkg" &>/dev/null; then
|
||||||
|
echo "Package $pkg not found! :("
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Installing package $pkg..."
|
||||||
|
if ! apt-get -y install "$pkg" &>/dev/null; then
|
||||||
|
echo "Could not install $pkg"
|
||||||
|
exit 1
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
# Use apt only when we install packages
|
|
||||||
if [ ${#to_install[@]} -gt 0 ]; then
|
|
||||||
apt-get update
|
|
||||||
|
|
||||||
for pkg in "${to_install[@]}"; do
|
|
||||||
|
|
||||||
if ! apt-cache show "$pkg" &>/dev/null; then
|
|
||||||
echo "Skipped $pkg: Package not found! :("
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Installing package $pkg..."
|
|
||||||
if ! apt-get --assume-yes install "$pkg" &>/dev/null; then
|
|
||||||
echo "Could not install $pkg"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
echo "Paperless-ngx docker container starting..."
|
echo "Paperless-ngx docker container starting..."
|
||||||
|
|
||||||
gosu_cmd=(gosu paperless)
|
gosu_cmd=(gosu paperless)
|
||||||
if [ "$(id --user)" == "$(id --user paperless)" ]; then
|
if [ "$(id -u)" == "$(id -u paperless)" ]; then
|
||||||
gosu_cmd=()
|
gosu_cmd=()
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
118
docker/docker-prepare.sh
Executable file
118
docker/docker-prepare.sh
Executable file
@ -0,0 +1,118 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
wait_for_postgres() {
|
||||||
|
local attempt_num=1
|
||||||
|
local -r max_attempts=5
|
||||||
|
|
||||||
|
echo "Waiting for PostgreSQL to start..."
|
||||||
|
|
||||||
|
local -r host="${PAPERLESS_DBHOST:-localhost}"
|
||||||
|
local -r port="${PAPERLESS_DBPORT:-5432}"
|
||||||
|
|
||||||
|
# Disable warning, host and port can't have spaces
|
||||||
|
# shellcheck disable=SC2086
|
||||||
|
while [ ! "$(pg_isready -h ${host} -p ${port})" ]; do
|
||||||
|
|
||||||
|
if [ $attempt_num -eq $max_attempts ]; then
|
||||||
|
echo "Unable to connect to database."
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "Attempt $attempt_num failed! Trying again in 5 seconds..."
|
||||||
|
fi
|
||||||
|
|
||||||
|
attempt_num=$(("$attempt_num" + 1))
|
||||||
|
sleep 5
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
wait_for_mariadb() {
|
||||||
|
echo "Waiting for MariaDB to start..."
|
||||||
|
|
||||||
|
local -r host="${PAPERLESS_DBHOST:=localhost}"
|
||||||
|
local -r port="${PAPERLESS_DBPORT:=3306}"
|
||||||
|
|
||||||
|
local attempt_num=1
|
||||||
|
local -r max_attempts=5
|
||||||
|
|
||||||
|
# Disable warning, host and port can't have spaces
|
||||||
|
# shellcheck disable=SC2086
|
||||||
|
while ! true > /dev/tcp/$host/$port; do
|
||||||
|
|
||||||
|
if [ $attempt_num -eq $max_attempts ]; then
|
||||||
|
echo "Unable to connect to database."
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "Attempt $attempt_num failed! Trying again in 5 seconds..."
|
||||||
|
|
||||||
|
fi
|
||||||
|
|
||||||
|
attempt_num=$(("$attempt_num" + 1))
|
||||||
|
sleep 5
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
wait_for_redis() {
|
||||||
|
# We use a Python script to send the Redis ping
|
||||||
|
# instead of installing redis-tools just for 1 thing
|
||||||
|
if ! python3 /sbin/wait-for-redis.py; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
migrations() {
|
||||||
|
(
|
||||||
|
# flock is in place to prevent multiple containers from doing migrations
|
||||||
|
# simultaneously. This also ensures that the db is ready when the command
|
||||||
|
# of the current container starts.
|
||||||
|
flock 200
|
||||||
|
echo "Apply database migrations..."
|
||||||
|
python3 manage.py migrate --skip-checks --no-input
|
||||||
|
) 200>"${DATA_DIR}/migration_lock"
|
||||||
|
}
|
||||||
|
|
||||||
|
django_checks() {
|
||||||
|
# Explicitly run the Django system checks
|
||||||
|
echo "Running Django checks"
|
||||||
|
python3 manage.py check
|
||||||
|
}
|
||||||
|
|
||||||
|
search_index() {
|
||||||
|
|
||||||
|
local -r index_version=8
|
||||||
|
local -r index_version_file=${DATA_DIR}/.index_version
|
||||||
|
|
||||||
|
if [[ (! -f "${index_version_file}") || $(<"${index_version_file}") != "$index_version" ]]; then
|
||||||
|
echo "Search index out of date. Updating..."
|
||||||
|
python3 manage.py document_index reindex --no-progress-bar
|
||||||
|
echo ${index_version} | tee "${index_version_file}" >/dev/null
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
superuser() {
|
||||||
|
if [[ -n "${PAPERLESS_ADMIN_USER}" ]]; then
|
||||||
|
python3 manage.py manage_superuser
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
do_work() {
|
||||||
|
if [[ "${PAPERLESS_DBENGINE}" == "mariadb" ]]; then
|
||||||
|
wait_for_mariadb
|
||||||
|
elif [[ -n "${PAPERLESS_DBHOST}" ]]; then
|
||||||
|
wait_for_postgres
|
||||||
|
fi
|
||||||
|
|
||||||
|
wait_for_redis
|
||||||
|
|
||||||
|
migrations
|
||||||
|
|
||||||
|
django_checks
|
||||||
|
|
||||||
|
search_index
|
||||||
|
|
||||||
|
superuser
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
do_work
|
42
docker/env-from-file.sh
Normal file
42
docker/env-from-file.sh
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Scans the environment variables for those with the suffix _FILE
|
||||||
|
# When located, checks the file exists, and exports the contents
|
||||||
|
# of the file as the same name, minus the suffix
|
||||||
|
# This allows the use of Docker secrets or mounted files
|
||||||
|
# to fill in any of the settings configurable via environment
|
||||||
|
# variables
|
||||||
|
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
for line in $(printenv)
|
||||||
|
do
|
||||||
|
# Extract the name of the environment variable
|
||||||
|
env_name=${line%%=*}
|
||||||
|
# Check if it starts with "PAPERLESS_" and ends in "_FILE"
|
||||||
|
if [[ ${env_name} == PAPERLESS_*_FILE ]]; then
|
||||||
|
# This should have been named different..
|
||||||
|
if [[ ${env_name} == "PAPERLESS_OCR_SKIP_ARCHIVE_FILE" ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
# Extract the value of the environment
|
||||||
|
env_value=${line#*=}
|
||||||
|
|
||||||
|
# Check the file exists
|
||||||
|
if [[ -f ${env_value} ]]; then
|
||||||
|
|
||||||
|
# Trim off the _FILE suffix
|
||||||
|
non_file_env_name=${env_name%"_FILE"}
|
||||||
|
echo "Setting ${non_file_env_name} from file"
|
||||||
|
|
||||||
|
# Reads the value from th file
|
||||||
|
val="$(< "${!env_name}")"
|
||||||
|
|
||||||
|
# Sets the normal name to the read file contents
|
||||||
|
export "${non_file_env_name}"="${val}"
|
||||||
|
|
||||||
|
else
|
||||||
|
echo "File ${env_value} referenced by ${env_name} doesn't exist"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
12
docker/flower-conditional.sh
Normal file
12
docker/flower-conditional.sh
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
echo "Checking if we should start flower..."
|
||||||
|
|
||||||
|
if [[ -n "${PAPERLESS_ENABLE_FLOWER}" ]]; then
|
||||||
|
# Small delay to allow celery to be up first
|
||||||
|
echo "Starting flower in 5s"
|
||||||
|
sleep 5
|
||||||
|
celery --app paperless flower --conf=/usr/src/paperless/src/paperless/flowerconfig.py
|
||||||
|
else
|
||||||
|
echo "Not starting flower"
|
||||||
|
fi
|
Binary file not shown.
Before Width: | Height: | Size: 30 KiB |
@ -1,7 +1,5 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
# Run this script to generate the management commands again (for example if a new command is create or the template is updated)
|
|
||||||
|
|
||||||
set -eu
|
set -eu
|
||||||
|
|
||||||
for command in decrypt_documents \
|
for command in decrypt_documents \
|
||||||
@ -16,12 +14,9 @@ for command in decrypt_documents \
|
|||||||
document_thumbnails \
|
document_thumbnails \
|
||||||
document_sanity_checker \
|
document_sanity_checker \
|
||||||
document_fuzzy_match \
|
document_fuzzy_match \
|
||||||
manage_superuser \
|
manage_superuser;
|
||||||
convert_mariadb_uuid \
|
|
||||||
prune_audit_logs \
|
|
||||||
createsuperuser;
|
|
||||||
do
|
do
|
||||||
echo "installing $command..."
|
echo "installing $command..."
|
||||||
sed "s/management_command/$command/g" management_script.sh >"$PWD/rootfs/usr/local/bin/$command"
|
sed "s/management_command/$command/g" management_script.sh > /usr/local/bin/$command
|
||||||
chmod u=rwx,g=rwx,o=rx "$PWD/rootfs/usr/local/bin/$command"
|
chmod +x /usr/local/bin/$command
|
||||||
done
|
done
|
||||||
|
@ -1,13 +1,17 @@
|
|||||||
#!/command/with-contenv /usr/bin/bash
|
#!/usr/bin/env bash
|
||||||
# shellcheck shell=bash
|
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
cd "${PAPERLESS_SRC_DIR}"
|
cd /usr/src/paperless/src/
|
||||||
|
# This ensures environment is setup
|
||||||
|
# shellcheck disable=SC1091
|
||||||
|
source /sbin/env-from-file.sh
|
||||||
|
|
||||||
if [[ $(id -u) == 0 ]]; then
|
if [[ $(id -u) == 0 ]] ;
|
||||||
s6-setuidgid paperless python3 manage.py management_command "$@"
|
then
|
||||||
elif [[ $(id -un) == "paperless" ]]; then
|
gosu paperless python3 manage.py management_command "$@"
|
||||||
|
elif [[ $(id -un) == "paperless" ]] ;
|
||||||
|
then
|
||||||
python3 manage.py management_command "$@"
|
python3 manage.py management_command "$@"
|
||||||
else
|
else
|
||||||
echo "Unknown user."
|
echo "Unknown user."
|
||||||
|
16
docker/paperless_cmd.sh
Executable file
16
docker/paperless_cmd.sh
Executable file
@ -0,0 +1,16 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
SUPERVISORD_WORKING_DIR="${PAPERLESS_SUPERVISORD_WORKING_DIR:-$PWD}"
|
||||||
|
rootless_args=()
|
||||||
|
if [ "$(id -u)" == "$(id -u paperless)" ]; then
|
||||||
|
rootless_args=(
|
||||||
|
--user
|
||||||
|
paperless
|
||||||
|
--logfile
|
||||||
|
"${SUPERVISORD_WORKING_DIR}/supervisord.log"
|
||||||
|
--pidfile
|
||||||
|
"${SUPERVISORD_WORKING_DIR}/supervisord.pid"
|
||||||
|
)
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec /usr/local/bin/supervisord -c /etc/supervisord.conf "${rootless_args[@]}"
|
@ -1,8 +0,0 @@
|
|||||||
#!/command/with-contenv /usr/bin/bash
|
|
||||||
# shellcheck shell=bash
|
|
||||||
declare -r log_prefix="[init-complete]"
|
|
||||||
declare -r end_time=$(date +%s)
|
|
||||||
declare -r start_time=${PAPERLESS_START_TIME_S}
|
|
||||||
|
|
||||||
echo "${log_prefix} paperless-ngx docker container init completed in $(($end_time-$start_time)) seconds"
|
|
||||||
echo "${log_prefix} Starting services"
|
|
@ -1 +0,0 @@
|
|||||||
oneshot
|
|
@ -1 +0,0 @@
|
|||||||
/etc/s6-overlay/s6-rc.d/init-complete/run
|
|
@ -1,44 +0,0 @@
|
|||||||
#!/command/with-contenv /usr/bin/bash
|
|
||||||
# shellcheck shell=bash
|
|
||||||
|
|
||||||
declare -r log_prefix="[custom-init]"
|
|
||||||
|
|
||||||
# Mostly borrowed from the LinuxServer.io base image
|
|
||||||
# https://github.com/linuxserver/docker-baseimage-ubuntu/tree/bionic/root/etc/cont-init.d
|
|
||||||
declare -r custom_script_dir="/custom-cont-init.d"
|
|
||||||
|
|
||||||
# Tamper checking.
|
|
||||||
# Don't run files which are owned by anyone except root
|
|
||||||
# Don't run files which are writeable by others
|
|
||||||
if [ -d "${custom_script_dir}" ]; then
|
|
||||||
if [ -n "$(/usr/bin/find "${custom_script_dir}" -maxdepth 1 ! -user root)" ]; then
|
|
||||||
echo "${log_prefix} **** Potential tampering with custom scripts detected ****"
|
|
||||||
echo "${log_prefix} **** The folder '${custom_script_dir}' must be owned by root ****"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
if [ -n "$(/usr/bin/find "${custom_script_dir}" -maxdepth 1 -perm -o+w)" ]; then
|
|
||||||
echo "${log_prefix} **** The folder '${custom_script_dir}' or some of contents have write permissions for others, which is a security risk. ****"
|
|
||||||
echo "${log_prefix} **** Please review the permissions and their contents to make sure they are owned by root, and can only be modified by root. ****"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Make sure custom init directory has files in it
|
|
||||||
if [ -n "$(/bin/ls --almost-all "${custom_script_dir}" 2>/dev/null)" ]; then
|
|
||||||
echo "${log_prefix} files found in ${custom_script_dir} executing"
|
|
||||||
# Loop over files in the directory
|
|
||||||
for SCRIPT in "${custom_script_dir}"/*; do
|
|
||||||
NAME="$(basename "${SCRIPT}")"
|
|
||||||
if [ -f "${SCRIPT}" ]; then
|
|
||||||
echo "${log_prefix} ${NAME}: executing..."
|
|
||||||
/command/with-contenv /bin/bash "${SCRIPT}"
|
|
||||||
echo "${log_prefix} ${NAME}: exited $?"
|
|
||||||
elif [ ! -f "${SCRIPT}" ]; then
|
|
||||||
echo "${log_prefix} ${NAME}: is not a file"
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
else
|
|
||||||
echo "${log_prefix} no custom files found exiting..."
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "${log_prefix} ${custom_script_dir} doesn't exist, nothing to do"
|
|
||||||
fi
|
|
@ -1 +0,0 @@
|
|||||||
oneshot
|
|
@ -1 +0,0 @@
|
|||||||
/etc/s6-overlay/s6-rc.d/init-custom-init/run
|
|
@ -1,33 +0,0 @@
|
|||||||
#!/command/with-contenv /usr/bin/bash
|
|
||||||
# shellcheck shell=bash
|
|
||||||
|
|
||||||
declare -r log_prefix="[env-init]"
|
|
||||||
|
|
||||||
echo "${log_prefix} Checking for environment from files"
|
|
||||||
|
|
||||||
if find /run/s6/container_environment/*"_FILE" -maxdepth 1 > /dev/null 2>&1; then
|
|
||||||
for FILENAME in /run/s6/container_environment/*; do
|
|
||||||
if [[ "${FILENAME##*/}" == PAPERLESS_*_FILE ]]; then
|
|
||||||
# This should have been named different..
|
|
||||||
if [[ ${FILENAME} == "PAPERLESS_OCR_SKIP_ARCHIVE_FILE" || ${FILENAME} == "PAPERLESS_MODEL_FILE" ]]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
SECRETFILE=$(cat "${FILENAME}")
|
|
||||||
# Check the file exists
|
|
||||||
if [[ -f ${SECRETFILE} ]]; then
|
|
||||||
# Trim off trailing _FILE
|
|
||||||
FILESTRIP=${FILENAME//_FILE/}
|
|
||||||
if [[ $(tail -n1 "${SECRETFILE}" | wc -l) != 0 ]]; then
|
|
||||||
echo "${log_prefix} Your secret: ${FILENAME##*/} contains a trailing newline and may not work as expected"
|
|
||||||
fi
|
|
||||||
# Set environment variable
|
|
||||||
cat "${SECRETFILE}" > "${FILESTRIP}"
|
|
||||||
echo "${log_prefix} ${FILESTRIP##*/} set from ${FILENAME##*/}"
|
|
||||||
else
|
|
||||||
echo "${log_prefix} cannot find secret in ${FILENAME##*/}"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
else
|
|
||||||
echo "${log_prefix} No *_FILE environment found"
|
|
||||||
fi
|
|
@ -1 +0,0 @@
|
|||||||
oneshot
|
|
@ -1 +0,0 @@
|
|||||||
/etc/s6-overlay/s6-rc.d/init-env-file/run
|
|
@ -1,33 +0,0 @@
|
|||||||
#!/command/with-contenv /usr/bin/bash
|
|
||||||
# shellcheck shell=bash
|
|
||||||
|
|
||||||
declare -r log_prefix="[init-folders]"
|
|
||||||
|
|
||||||
declare -r export_dir="/usr/src/paperless/export"
|
|
||||||
declare -r data_dir="${PAPERLESS_DATA_DIR:-/usr/src/paperless/data}"
|
|
||||||
declare -r media_root_dir="${PAPERLESS_MEDIA_ROOT:-/usr/src/paperless/media}"
|
|
||||||
declare -r consume_dir="${PAPERLESS_CONSUMPTION_DIR:-/usr/src/paperless/consume}"
|
|
||||||
declare -r tmp_dir="${PAPERLESS_SCRATCH_DIR:=/tmp/paperless}"
|
|
||||||
|
|
||||||
echo "${log_prefix} Checking for folder existence"
|
|
||||||
|
|
||||||
for dir in \
|
|
||||||
"${export_dir}" \
|
|
||||||
"${data_dir}" "${data_dir}/index" \
|
|
||||||
"${media_root_dir}" "${media_root_dir}/documents" "${media_root_dir}/documents/originals" "${media_root_dir}/documents/thumbnails" \
|
|
||||||
"${consume_dir}" \
|
|
||||||
"${tmp_dir}"; do
|
|
||||||
if [[ ! -d "${dir}" ]]; then
|
|
||||||
mkdir --parents --verbose "${dir}"
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
echo "${log_prefix} Adjusting file and folder permissions"
|
|
||||||
for dir in \
|
|
||||||
"${export_dir}" \
|
|
||||||
"${data_dir}" \
|
|
||||||
"${media_root_dir}" \
|
|
||||||
"${consume_dir}" \
|
|
||||||
"${tmp_dir}"; do
|
|
||||||
find "${dir}" -not \( -user paperless -and -group paperless \) -exec chown --changes paperless:paperless {} +
|
|
||||||
done
|
|
@ -1 +0,0 @@
|
|||||||
oneshot
|
|
@ -1 +0,0 @@
|
|||||||
/etc/s6-overlay/s6-rc.d/init-folders/run
|
|
@ -1,7 +0,0 @@
|
|||||||
#!/command/with-contenv /usr/bin/bash
|
|
||||||
# shellcheck shell=bash
|
|
||||||
declare -r data_dir="${PAPERLESS_DATA_DIR:-/usr/src/paperless/data}"
|
|
||||||
|
|
||||||
# shellcheck disable=SC2164
|
|
||||||
cd "${PAPERLESS_SRC_DIR}"
|
|
||||||
exec s6-setlock -n "${data_dir}/migration_lock" python3 manage.py migrate --skip-checks --no-input
|
|
@ -1,12 +0,0 @@
|
|||||||
#!/command/with-contenv /usr/bin/bash
|
|
||||||
# shellcheck shell=bash
|
|
||||||
declare -r log_prefix="[init-migrations]"
|
|
||||||
|
|
||||||
echo "${log_prefix} Apply database migrations..."
|
|
||||||
|
|
||||||
# The whole migrate, with flock, needs to run as the right user
|
|
||||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
|
||||||
exec /etc/s6-overlay/s6-rc.d/init-migrations/migrate.sh
|
|
||||||
else
|
|
||||||
exec s6-setuidgid paperless /etc/s6-overlay/s6-rc.d/init-migrations/migrate.sh
|
|
||||||
fi
|
|
@ -1 +0,0 @@
|
|||||||
oneshot
|
|
@ -1 +0,0 @@
|
|||||||
/etc/s6-overlay/s6-rc.d/init-migrations/run
|
|
@ -1,22 +0,0 @@
|
|||||||
#!/command/with-contenv /usr/bin/bash
|
|
||||||
# shellcheck shell=bash
|
|
||||||
declare -r log_prefix="[init-user]"
|
|
||||||
|
|
||||||
declare -r usermap_original_uid=$(id -u paperless)
|
|
||||||
declare -r usermap_original_gid=$(id -g paperless)
|
|
||||||
declare -r usermap_new_uid=${USERMAP_UID:-$usermap_original_uid}
|
|
||||||
declare -r usermap_new_gid=${USERMAP_GID:-${usermap_original_gid:-$usermap_new_uid}}
|
|
||||||
|
|
||||||
if [[ ${usermap_new_uid} != "${usermap_original_uid}" ]]; then
|
|
||||||
echo "${log_prefix} Mapping UID for paperless to $usermap_new_uid"
|
|
||||||
usermod --non-unique --uid "${usermap_new_uid}" paperless
|
|
||||||
else
|
|
||||||
echo "${log_prefix} No UID changes for paperless"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ ${usermap_new_gid} != "${usermap_original_gid}" ]]; then
|
|
||||||
echo "${log_prefix} Mapping GID for paperless to $usermap_new_gid"
|
|
||||||
groupmod --non-unique --gid "${usermap_new_gid}" paperless
|
|
||||||
else
|
|
||||||
echo "${log_prefix} No GID changes for paperless"
|
|
||||||
fi
|
|
@ -1 +0,0 @@
|
|||||||
oneshot
|
|
@ -1 +0,0 @@
|
|||||||
/etc/s6-overlay/s6-rc.d/init-modify-user/run
|
|
@ -1,28 +0,0 @@
|
|||||||
#!/command/with-contenv /usr/bin/bash
|
|
||||||
# shellcheck shell=bash
|
|
||||||
|
|
||||||
declare -r log_prefix="[init-index]"
|
|
||||||
|
|
||||||
declare -r index_version=9
|
|
||||||
declare -r data_dir="${PAPERLESS_DATA_DIR:-/usr/src/paperless/data}"
|
|
||||||
declare -r index_version_file="${data_dir}/.index_version"
|
|
||||||
|
|
||||||
update_index () {
|
|
||||||
echo "${log_prefix} Search index out of date. Updating..."
|
|
||||||
cd "${PAPERLESS_SRC_DIR}"
|
|
||||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
|
||||||
python3 manage.py document_index reindex --no-progress-bar
|
|
||||||
echo ${index_version} | tee "${index_version_file}" > /dev/null
|
|
||||||
else
|
|
||||||
s6-setuidgid paperless python3 manage.py document_index reindex --no-progress-bar
|
|
||||||
echo ${index_version} | s6-setuidgid paperless tee "${index_version_file}" > /dev/null
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
if [[ (! -f "${index_version_file}") ]]; then
|
|
||||||
echo "${log_prefix} No index version file found"
|
|
||||||
update_index
|
|
||||||
elif [[ $(<"${index_version_file}") != "$index_version" ]]; then
|
|
||||||
echo "${log_prefix} index version updated"
|
|
||||||
update_index
|
|
||||||
fi
|
|
@ -1 +0,0 @@
|
|||||||
oneshot
|
|
@ -1 +0,0 @@
|
|||||||
/etc/s6-overlay/s6-rc.d/init-search-index/run
|
|
@ -1,19 +0,0 @@
|
|||||||
#!/command/with-contenv /usr/bin/bash
|
|
||||||
# shellcheck shell=bash
|
|
||||||
|
|
||||||
declare -r log_prefix="[init-start]"
|
|
||||||
|
|
||||||
echo "${log_prefix} paperless-ngx docker container starting..."
|
|
||||||
|
|
||||||
# Set some directories into environment for other steps to access via environment
|
|
||||||
# Sort of like variables for later
|
|
||||||
printf "/usr/src/paperless/src" > /var/run/s6/container_environment/PAPERLESS_SRC_DIR
|
|
||||||
echo $(date +%s) > /var/run/s6/container_environment/PAPERLESS_START_TIME_S
|
|
||||||
|
|
||||||
# Check if we're starting as a non-root user
|
|
||||||
if [ $(id -u) == $(id -u paperless) ]; then
|
|
||||||
printf "true" > /var/run/s6/container_environment/USER_IS_NON_ROOT
|
|
||||||
echo "${log_prefix} paperless-ngx docker container running under a user"
|
|
||||||
else
|
|
||||||
echo "${log_prefix} paperless-ngx docker container starting init as root"
|
|
||||||
fi
|
|
@ -1 +0,0 @@
|
|||||||
oneshot
|
|
@ -1 +0,0 @@
|
|||||||
/etc/s6-overlay/s6-rc.d/init-start/run
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user