Compare commits

..

1 Commits

Author SHA1 Message Date
Jonas Winkler
c6a51a1cdc Version bump 2018-12-12 13:25:28 +01:00
1623 changed files with 94003 additions and 813104 deletions

View File

@@ -1,36 +0,0 @@
codecov:
require_ci_to_pass: true
# https://docs.codecov.com/docs/components
component_management:
individual_components:
- component_id: backend
paths:
- src/**
- component_id: frontend
paths:
- src-ui/**
# https://docs.codecov.com/docs/pull-request-comments
# codecov will only comment if coverage changes
comment:
layout: "header, diff, components, flags, files"
require_changes: true
# https://docs.codecov.com/docs/javascript-bundle-analysis
require_bundle_changes: true
bundle_change_threshold: "50Kb"
coverage:
status:
project:
default:
# https://docs.codecov.com/docs/commit-status#threshold
threshold: 1%
patch:
default:
# For the changed lines only, target 100% covered, but
# allow as low as 75%
target: 100%
threshold: 25%
# https://docs.codecov.com/docs/javascript-bundle-analysis
bundle_analysis:
# Fail if the bundle size increases by more than 1MB
warning_threshold: "1MB"
status: true

View File

@@ -1,3 +0,0 @@
[codespell]
write-changes = True
ignore-words-list = criterias,afterall,valeu,ureue,equest,ure,assertIn

View File

@@ -1,175 +0,0 @@
# syntax=docker/dockerfile:1
FROM --platform=$BUILDPLATFORM docker.io/node:20-bookworm-slim as main-app
ARG DEBIAN_FRONTEND=noninteractive
# Buildx provided, must be defined to use though
ARG TARGETARCH
# Can be workflow provided, defaults set for manual building
ARG JBIG2ENC_VERSION=0.29
ARG QPDF_VERSION=11.9.0
ARG GS_VERSION=10.03.1
# Set Python environment variables
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
# Ignore warning from Whitenoise
PYTHONWARNINGS="ignore:::django.http.response:517" \
PNGX_CONTAINERIZED=1
#
# Begin installation and configuration
# Order the steps below from least often changed to most
#
# Packages need for running
ARG RUNTIME_PACKAGES="\
# General utils
curl \
# Docker specific
gosu \
# Timezones support
tzdata \
# fonts for text file thumbnail generation
fonts-liberation \
gettext \
ghostscript \
gnupg \
icc-profiles-free \
imagemagick \
# PostgreSQL
postgresql-client \
# MySQL / MariaDB
mariadb-client \
# OCRmyPDF dependencies
tesseract-ocr \
tesseract-ocr-eng \
tesseract-ocr-deu \
tesseract-ocr-fra \
tesseract-ocr-ita \
tesseract-ocr-spa \
unpaper \
pngquant \
jbig2dec \
# lxml
libxml2 \
libxslt1.1 \
# itself
qpdf \
# Mime type detection
file \
libmagic1 \
media-types \
zlib1g \
# Barcode splitter
libzbar0 \
poppler-utils \
htop \
sudo"
# Install basic runtime packages.
# These change very infrequently
RUN set -eux \
echo "Installing system packages" \
&& apt-get update \
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES}
ARG PYTHON_PACKAGES="ca-certificates"
RUN set -eux \
echo "Installing python packages" \
&& apt-get update \
&& apt-get install --yes --quiet ${PYTHON_PACKAGES}
COPY --from=ghcr.io/astral-sh/uv:0.7.8 /uv /bin/uv
RUN set -eux \
&& echo "Installing pre-built updates" \
&& echo "Installing qpdf ${QPDF_VERSION}" \
&& curl --fail --silent --show-error --location \
--output libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
&& curl --fail --silent --show-error --location \
--output qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
&& dpkg --install ./libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
&& dpkg --install ./qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
&& echo "Installing Ghostscript ${GS_VERSION}" \
&& curl --fail --silent --show-error --location \
--output libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
&& curl --fail --silent --show-error --location \
--output ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
&& curl --fail --silent --show-error --location \
--output libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
&& dpkg --install ./libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
&& dpkg --install ./libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
&& dpkg --install ./ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
&& echo "Installing jbig2enc" \
&& curl --fail --silent --show-error --location \
--output jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
https://github.com/paperless-ngx/builder/releases/download/jbig2enc-${JBIG2ENC_VERSION}/jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
&& dpkg --install ./jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb
# setup docker-specific things
# These change sometimes, but rarely
WORKDIR /usr/src/paperless/src/docker/
COPY [ \
"docker/rootfs/etc/ImageMagick-6/paperless-policy.xml", \
"./" \
]
RUN set -eux \
&& echo "Configuring ImageMagick" \
&& mv paperless-policy.xml /etc/ImageMagick-6/policy.xml
# Packages needed only for building a few quick Python
# dependencies
ARG BUILD_PACKAGES="\
build-essential \
git \
# https://www.psycopg.org/docs/install.html#prerequisites
libpq-dev \
# https://github.com/PyMySQL/mysqlclient#linux
default-libmysqlclient-dev \
pkg-config"
# hadolint ignore=DL3042
RUN --mount=type=cache,target=/root/.cache/uv,id=pip-cache \
set -eux \
&& echo "Installing build system packages" \
&& apt-get update \
&& apt-get install --yes --quiet ${BUILD_PACKAGES}
RUN set -eux \
&& npm update -g pnpm
# add users, setup scripts
# Mount the compiled frontend to expected location
RUN set -eux \
&& echo "Setting up user/group" \
&& groupmod --new-name paperless node \
&& usermod --login paperless --home /usr/src/paperless node \
&& usermod -s /bin/bash paperless \
&& echo "paperless ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers \
&& echo "Creating volume directories" \
&& mkdir --parents --verbose /usr/src/paperless/paperless-ngx/data \
&& mkdir --parents --verbose /usr/src/paperless/paperless-ngx/media \
&& mkdir --parents --verbose /usr/src/paperless/paperless-ngx/consume \
&& mkdir --parents --verbose /usr/src/paperless/paperless-ngx/export \
&& mkdir --parents --verbose /usr/src/paperless/paperless-ngx/.venv \
&& echo "Adjusting all permissions" \
&& chown --from root:root --changes --recursive paperless:paperless /usr/src/paperless
VOLUME ["/usr/src/paperless/paperless-ngx/data", \
"/usr/src/paperless/paperless-ngx/media", \
"/usr/src/paperless/paperless-ngx/consume", \
"/usr/src/paperless/paperless-ngx/export", \
"/usr/src/paperless/paperless-ngx/.venv"]

View File

@@ -1,94 +0,0 @@
# Paperless-ngx Development Environment
## Overview
Welcome to the Paperless-ngx development environment! This setup uses VSCode DevContainers to provide a consistent and seamless development experience.
### What are DevContainers?
DevContainers are a feature in VSCode that allows you to develop within a Docker container. This ensures that your development environment is consistent across different machines and setups. By defining a containerized environment, you can eliminate the "works on my machine" problem.
### Advantages of DevContainers
- **Consistency**: Same environment for all developers.
- **Isolation**: Separate development environment from your local machine.
- **Reproducibility**: Easily recreate the environment on any machine.
- **Pre-configured Tools**: Include all necessary tools and dependencies in the container.
## DevContainer Setup
The DevContainer configuration provides up all the necessary services for Paperless-ngx, including:
- Redis
- Gotenberg
- Tika
Data is stored using Docker volumes to ensure persistence across container restarts.
## Configuration Files
The setup includes debugging configurations (`launch.json`) and tasks (`tasks.json`) to help you manage and debug various parts of the project:
- **Backend Debugging:**
- `manage.py runserver`
- `manage.py document-consumer`
- `celery`
- **Maintenance Tasks:**
- Create superuser
- Run migrations
- Recreate virtual environment (`.venv` with `uv`)
- Compile frontend assets
## Getting Started
### Step 1: Running the DevContainer
To start the DevContainer:
1. Open VSCode.
2. Open the project folder.
3. Open the command palette and choose `Dev Containers: Rebuild and Reopen in Container`.
VSCode will build and start the DevContainer environment.
### Step 2: Initial Setup
Once the DevContainer is up and running, run the `Project Setup: Run all Init Tasks` task to initialize the project.
Alternatively, the Project Setup can be done with individual tasks:
1. **Compile Frontend Assets**: `Maintenance: Compile frontend for production`.
2. **Run Database Migrations**: `Maintenance: manage.py migrate`.
3. **Create Superuser**: `Maintenance: manage.py createsuperuser`.
### Debugging and Running Services
You can start and debug backend services either as debugging sessions via `launch.json` or as tasks.
#### Using `launch.json`
1. Press `F5` or go to the **Run and Debug** view in VSCode.
2. Select the desired configuration:
- `Runserver`
- `Document Consumer`
- `Celery`
#### Using Tasks
1. Open the command palette and select `Tasks: Run Task`.
2. Choose the desired task:
- `Runserver`
- `Document Consumer`
- `Celery`
### Additional Maintenance Tasks
Additional tasks are available for common maintenance operations:
- **Recreate .venv**: For setting up the virtual environment using `uv`.
- **Migrate Database**: To apply database migrations.
- **Create Superuser**: To create an admin user for the application.
## Let's Get Started!
Follow the steps above to get your development environment up and running. Happy coding!

View File

@@ -1,28 +0,0 @@
{
"name": "Paperless Development",
"dockerComposeFile": "docker-compose.devcontainer.sqlite-tika.yml",
"service": "paperless-development",
"workspaceFolder": "/usr/src/paperless/paperless-ngx",
"postCreateCommand": "/bin/bash -c 'uv sync --group dev && uv run pre-commit install'",
"customizations": {
"vscode": {
"extensions": [
"mhutchie.git-graph",
"ms-python.python",
"ms-vscode.js-debug-nightly",
"eamodio.gitlens",
"yzhang.markdown-all-in-one"
],
"settings": {
"python.defaultInterpreterPath": "/usr/src/paperless/paperless-ngx/.venv/bin/python",
"python.pythonPath": "/usr/src/paperless/paperless-ngx/.venv/bin/python",
"python.terminal.activateEnvInCurrentTerminal": true,
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true
}
}
},
"remoteUser": "paperless"
}

View File

@@ -1,78 +0,0 @@
# Docker Compose file for developing Paperless NGX in VSCode DevContainers.
# This file contains everything Paperless NGX needs to run.
# Paperless supports amd64, arm, and arm64 hardware.
# All compose files of Paperless configure it in the following way:
#
# - Paperless is (re)started on system boot if it was running before shutdown.
# - Docker volumes for storing data are managed by Docker.
# - Folders for importing and exporting files are created in the same directory
# as this file and mounted to the correct folders inside the container.
# - Paperless listens on port 8000.
#
# SQLite is used as the database. The SQLite file is stored in the data volume.
#
# In addition, this Docker Compose file adds the following optional
# configurations:
#
# - Apache Tika and Gotenberg servers are started with Paperless NGX and Paperless
# is configured to use these services. These provide support for consuming
# Office documents (Word, Excel, PowerPoint, and their LibreOffice counterparts).
#
# This file is intended only to be used through VSCOde devcontainers. See README.md
# in the folder .devcontainer.
services:
broker:
image: docker.io/library/redis:7
restart: unless-stopped
volumes:
- ./redisdata:/data
# No ports need to be exposed; the VSCode DevContainer plugin manages them.
paperless-development:
image: paperless-ngx
build:
context: ../ # Dockerfile cannot access files from parent directories if context is not set.
dockerfile: ./.devcontainer/Dockerfile
restart: unless-stopped
depends_on:
- broker
- gotenberg
- tika
volumes:
- ..:/usr/src/paperless/paperless-ngx:delegated
- ../.devcontainer/vscode:/usr/src/paperless/paperless-ngx/.vscode:delegated # VSCode config files
- virtualenv:/usr/src/paperless/paperless-ngx/.venv # Virtual environment persisted in volume
- /usr/src/paperless/paperless-ngx/src/documents/static/frontend # Static frontend files exist only in container
- /usr/src/paperless/paperless-ngx/src/.pytest_cache
- /usr/src/paperless/paperless-ngx/.ruff_cache
- /usr/src/paperless/paperless-ngx/htmlcov
- /usr/src/paperless/paperless-ngx/.coverage
- ./data:/usr/src/paperless/paperless-ngx/data
- ./media:/usr/src/paperless/paperless-ngx/media
- ./consume:/usr/src/paperless/paperless-ngx/consume
- ~/.gitconfig:/usr/src/paperless/.gitconfig:ro
environment:
PAPERLESS_REDIS: redis://broker:6379
PAPERLESS_TIKA_ENABLED: 1
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
PAPERLESS_STATICDIR: ./src/documents/static
PAPERLESS_DEBUG: true
# Overrides default command so things don't shut down after the process ends.
command: /bin/sh -c "chown -R paperless:paperless /usr/src/paperless/paperless-ngx/src/documents/static/frontend && chown -R paperless:paperless /usr/src/paperless/paperless-ngx/.ruff_cache && while sleep 1000; do :; done"
gotenberg:
image: docker.io/gotenberg/gotenberg:8.17
restart: unless-stopped
# The Gotenberg Chromium route is used to convert .eml files. We do not
# want to allow external content like tracking pixels or even JavaScript.
command:
- "gotenberg"
- "--chromium-disable-javascript=true"
- "--chromium-allow-list=file:///tmp/.*"
tika:
image: docker.io/apache/tika:latest
restart: unless-stopped
volumes:
data:
media:
redisdata:
virtualenv:

View File

@@ -1,58 +0,0 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Chrome: Debug Angular Frontend",
"description": "Debug the Angular Dev Frontend in Chrome",
"type": "chrome",
"request": "launch",
"url": "http://localhost:4200",
"webRoot": "${workspaceFolder}/src-ui",
"preLaunchTask": "Start: Frontend Angular"
},
{
"name": "Debug: Backend Server (manage.py runserver)",
"description": "Debug the Django Backend Server",
"type": "python",
"request": "launch",
"program": "${workspaceFolder}/src/manage.py",
"args": [
"runserver"
],
"django": true,
"console": "integratedTerminal",
"env": {
"PYTHONPATH": "${workspaceFolder}/src"
},
"python": "${workspaceFolder}/.venv/bin/python"
},
{
"name": "Debug: Consumer Service (manage.py document_consumer)",
"description": "Debug the Consumer Service which processes files from a directory",
"type": "python",
"request": "launch",
"program": "${workspaceFolder}/src/manage.py",
"args": [
"document_consumer"
],
"django": true,
"console": "integratedTerminal",
"env": {
"PYTHONPATH": "${workspaceFolder}/src"
},
"python": "${workspaceFolder}/.venv/bin/python"
}
],
"compounds": [
{
"name": "Debug: FullStack",
"description": "Debug run the Angular dev frontend, Django backend, and consumer service",
"configurations": [
"Chrome: Debug Angular Frontend",
"Debug: Backend Server (manage.py runserver)",
"Debug: Consumer Service (manage.py document_consumer)"
],
"preLaunchTask": "Start: Celery Worker"
}
]
}

View File

@@ -1,11 +0,0 @@
{
"python.testing.pytestArgs": [
"src"
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true,
"files.watcherExclude": {
"**/.venv/**": true,
"**/pytest_cache/**": true
}
}

View File

@@ -1,223 +0,0 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "Start: Celery Worker",
"description": "Start the Celery Worker which processes background and consume tasks",
"type": "shell",
"command": "uv run celery --app paperless worker -l DEBUG",
"isBackground": true,
"options": {
"cwd": "${workspaceFolder}/src"
},
"problemMatcher": [
{
"owner": "custom",
"pattern": [
{
"regexp": ".",
"file": 1,
"location": 2,
"message": 3
}
],
"background": {
"activeOnStart": true,
"beginsPattern": "celery.*",
"endsPattern": "ready"
}
}
]
},
{
"label": "Start: Frontend Angular",
"description": "Start the Frontend Angular Dev Server",
"type": "shell",
"command": "pnpm start",
"isBackground": true,
"options": {
"cwd": "${workspaceFolder}/src-ui"
},
"problemMatcher": [
{
"owner": "custom",
"pattern": [
{
"regexp": ".",
"file": 1,
"location": 2,
"message": 3
}
],
"background": {
"activeOnStart": true,
"beginsPattern": ".*",
"endsPattern": "Compiled successfully"
}
}
]
},
{
"label": "Start: Consumer Service (manage.py document_consumer)",
"description": "Start the Consumer Service which processes files from a directory",
"type": "shell",
"command": "uv run python manage.py document_consumer",
"group": "build",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "shared",
"showReuseMessage": false,
"clear": true,
"revealProblems": "onProblem"
},
"options": {
"cwd": "${workspaceFolder}/src"
}
},
{
"label": "Start: Backend Server (manage.py runserver)",
"description": "Start the Backend Server which serves the Django API and the compiled Angular frontend",
"type": "shell",
"command": "uv run python manage.py runserver",
"group": "build",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "shared",
"showReuseMessage": false,
"clear": true,
"revealProblems": "onProblem"
},
"options": {
"cwd": "${workspaceFolder}/src"
}
},
{
"label": "Maintenance: manage.py migrate",
"description": "Apply database migrations",
"type": "shell",
"command": "uv run python manage.py migrate",
"group": "none",
"presentation": {
"echo": true,
"reveal": "always",
"focus": true,
"panel": "shared",
"showReuseMessage": false,
"clear": true,
"revealProblems": "onProblem"
},
"options": {
"cwd": "${workspaceFolder}/src"
}
},
{
"label": "Maintenance: Build Documentation",
"description": "Build the documentation with MkDocs",
"type": "shell",
"command": "uv run mkdocs build --config-file mkdocs.yml && uv run mkdocs serve",
"group": "none",
"presentation": {
"echo": true,
"reveal": "always",
"focus": true,
"panel": "shared",
"showReuseMessage": false,
"clear": true,
"revealProblems": "onProblem"
},
"options": {
"cwd": "${workspaceFolder}"
}
},
{
"label": "Maintenance: manage.py createsuperuser",
"description": "Create a superuser",
"type": "shell",
"command": "uv run python manage.py createsuperuser",
"group": "none",
"presentation": {
"echo": true,
"reveal": "always",
"focus": true,
"panel": "shared",
"showReuseMessage": false,
"clear": true,
"revealProblems": "onProblem"
},
"options": {
"cwd": "${workspaceFolder}/src"
}
},
{
"label": "Maintenance: recreate .venv",
"description": "Recreate the python virtual environment and install python dependencies",
"type": "shell",
"command": "rm -rf .venv && uv venv && uv sync --dev",
"group": "none",
"presentation": {
"echo": true,
"reveal": "always",
"focus": true,
"panel": "shared",
"showReuseMessage": false,
"clear": true,
"revealProblems": "onProblem"
},
"options": {
"cwd": "${workspaceFolder}"
}
},
{
"label": "Maintenance: Install Frontend Dependencies",
"description": "Install frontend (pnpm) dependencies",
"type": "pnpm",
"script": "install",
"path": "src-ui",
"group": "clean",
"problemMatcher": [],
"detail": "install dependencies from package"
},
{
"description": "Clean install frontend dependencies and build the frontend for production",
"label": "Maintenance: Compile frontend for production",
"type": "shell",
"command": "pnpm install && ./node_modules/.bin/ng build --configuration production",
"group": "none",
"presentation": {
"echo": true,
"reveal": "always",
"focus": true,
"panel": "shared",
"showReuseMessage": false,
"clear": true,
"revealProblems": "onProblem"
},
"options": {
"cwd": "${workspaceFolder}/src-ui"
}
},
{
"label": "Project Setup: Run all Init Tasks",
"description": "Runs all init tasks to setup the project including migrate the database, create a superuser and compile the frontend for production",
"dependsOrder": "sequence",
"dependsOn": [
"Maintenance: manage.py migrate",
"Maintenance: manage.py createsuperuser",
"Maintenance: Compile frontend for production"
]
},
{
"label": "Project Start: Run all Services",
"description": "Runs all services required to start the project including the Celery Worker, the Consumer Service and the Backend Server",
"dependsOn": [
"Start: Celery Worker",
"Start: Consumer Service (manage.py document_consumer)",
"Start: Backend Server (manage.py runserver)"
]
}
]
}

View File

@@ -1,30 +0,0 @@
# Tool caches
**/__pycache__
**/.ruff_cache/
**/.mypy_cache/
# Virtual environment & similar
.venv/
./src-ui/node_modules
./src-ui/dist
# IDE folders
.idea/
.vscode/
./src-ui/.vscode
# VCS
.git
# Test related
**/.pytest_cache
**/tests
**/*.spec.ts
**/htmlcov
# Local folders
./export
./consume
./media
./data
./docs
./dist
./scripts
./resources
# Other stuff
**/*.drawio.png

View File

@@ -18,20 +18,8 @@ max_line_length = off
indent_size = 4
indent_style = space
[*.{yml,yaml}]
indent_style = space
[*.rst]
indent_style = space
[*.md]
indent_style = space
# Tests don't get a line width restriction. It's still a good idea to follow
# the 79 character rule, but in the interests of clarity, tests often need to
# violate it.
[**/test_*.py]
max_line_length = off
[Dockerfile*]
indent_style = space

1
.env
View File

@@ -1 +0,0 @@
COMPOSE_PROJECT_NAME=paperless

1
.gitattributes vendored Normal file
View File

@@ -0,0 +1 @@
THANKS.md merge=union

View File

@@ -1,14 +0,0 @@
title: "[Feature Request] "
body:
- type: textarea
id: description
attributes:
label: Description
description: A clear and concise description of what you would like to see.
validations:
required: true
- type: textarea
id: other
attributes:
label: Other
description: Add any other context or information about the feature request here.

View File

@@ -1,55 +0,0 @@
title: "[Support] "
body:
- type: textarea
id: description
attributes:
label: What's your question or issue?
description: Provide a clear and concise description of what you're trying to do, and what's going wrong.
placeholder: |
I'm trying to...
[Include screenshots if helpful]
validations:
required: true
- type: textarea
id: steps
attributes:
label: What have you tried?
description: Describe any steps you've already taken to troubleshoot or solve the issue.
placeholder: |
- I checked the logs and saw...
- I followed the install guide and tried...
- type: input
id: version
attributes:
label: Paperless-ngx version
placeholder: e.g. 1.14.0
validations:
required: true
- type: input
id: host-os
attributes:
label: Host OS
description: Include architecture if relevant.
placeholder: e.g. Ubuntu 22.04 / Raspberry Pi arm64
- type: dropdown
id: install-method
attributes:
label: Installation method
options:
- Docker - official image
- Docker - linuxserver.io image
- Bare metal
- Other (please describe above)
- type: textarea
id: system-status
attributes:
label: System status
description: If available, copy & paste the system status output from Settings > System Status > Copy
render: json
- type: textarea
id: logs
attributes:
label: Relevant logs or output
description: If you have logs, errors that might help, paste it here.
render: bash

1
.github/FUNDING.yml vendored
View File

@@ -1 +0,0 @@
github: [shamoon, stumpylog]

View File

@@ -1,118 +0,0 @@
name: Bug report
description: Something is not working
title: "[BUG] Concise description of the issue"
labels: ["bug", "unconfirmed"]
body:
- type: markdown
attributes:
value: |
### ⚠️ Please remember: issues are for *bugs*
That is, something you believe affects every single user of Paperless-ngx, not just you. If you're not sure, start with one of the other options below.
Also, note that **Paperless-ngx does not perform OCR or archive file creation itself**, those are handled by other tools. Problems with OCR or archive versions of specific files should likely be raised 'upstream', see https://github.com/ocrmypdf/OCRmyPDF/issues or https://github.com/tesseract-ocr/tesseract/issues
- type: markdown
attributes:
value: |
#### Have a question? 👉 [Start a new discussion](https://github.com/paperless-ngx/paperless-ngx/discussions/new) or [ask in chat](https://matrix.to/#/#paperlessngx:matrix.org).
#### Before opening an issue, please double check:
- [The troubleshooting documentation](https://docs.paperless-ngx.com/troubleshooting/).
- [The installation instructions](https://docs.paperless-ngx.com/setup/#installation).
- [Existing issues and discussions](https://github.com/paperless-ngx/paperless-ngx/search?q=&type=issues).
- Disable any custom container initialization scripts, if using
If you encounter issues while installing or configuring Paperless-ngx, please post in the ["Support" section of the discussions](https://github.com/paperless-ngx/paperless-ngx/discussions/new?category=support).
- type: textarea
id: description
attributes:
label: Description
description: A clear and concise description of what the bug is. If applicable, add screenshots to help explain your problem.
placeholder: |
Currently Paperless does not work when...
[Screenshot if applicable]
validations:
required: true
- type: textarea
id: reproduction
attributes:
label: Steps to reproduce
description: Steps to reproduce the behavior.
placeholder: |
1. Go to '...'
2. Click on '....'
3. See error
validations:
required: true
- type: textarea
id: logs
attributes:
label: Webserver logs
description: Logs from the web server related to your issue.
render: bash
validations:
required: true
- type: textarea
id: logs_browser
attributes:
label: Browser logs
description: Logs from the web browser related to your issue, if needed
render: bash
- type: input
id: version
attributes:
label: Paperless-ngx version
placeholder: e.g. 1.6.0
validations:
required: true
- type: input
id: host-os
attributes:
label: Host OS
description: Host OS of the machine running paperless-ngx. Please add the architecture (uname -m) if applicable.
placeholder: e.g. Archlinux / Ubuntu 20.04 / Raspberry Pi `arm64`
validations:
required: true
- type: dropdown
id: install-method
attributes:
label: Installation method
options:
- Docker - official image
- Docker - linuxserver.io image
- Bare metal
- Other (please describe above)
description: Note there are significant differences from the official image and linuxserver.io, please check if your issue is specific to the third-party image.
validations:
required: true
- type: textarea
id: system-status
attributes:
label: System status
description: If available, copy & paste the system status output from Settings > System Status > Copy
render: json
- type: input
id: browser
attributes:
label: Browser
description: Which browser you are using, if relevant.
placeholder: e.g. Chrome, Safari
- type: textarea
id: config-changes
attributes:
label: Configuration changes
description: Any configuration changes you made in `docker-compose.yml`, `docker-compose.env` or `paperless.conf`.
- type: checkboxes
id: required-checks
attributes:
label: Please confirm the following
options:
- label: I believe this issue is a bug that affects all users of Paperless-ngx, not something specific to my installation.
required: true
- label: This issue is not about the OCR or archive creation of a specific file(s). Otherwise, please see above regarding OCR tools.
required: true
- label: I have already searched for relevant existing issues and discussions before opening this report.
required: true
- label: I have updated the title field above with a concise description.
required: true

View File

@@ -1,11 +0,0 @@
blank_issues_enabled: false
contact_links:
- name: 🤔 Questions and Help
url: https://github.com/paperless-ngx/paperless-ngx/discussions
about: General questions or support for using Paperless-ngx.
- name: 💬 Chat
url: https://matrix.to/#/#paperlessngx:matrix.org
about: Want to discuss Paperless-ngx with others? Check out our chat.
- name: 🚀 Feature Request
url: https://github.com/paperless-ngx/paperless-ngx/discussions/new?category=feature-requests
about: Remember to search for existing feature requests and "up-vote" those that you like.

View File

@@ -1,42 +0,0 @@
<!--
Note: All PRs with code changes should be targeted to the `dev` branch, pure documentation changes can target `main`
-->
## Proposed change
<!--
Please include a summary of the change and which issue is fixed (if any) and any relevant motivation / context. List any dependencies that are required for this change. If appropriate, please include an explanation of how your proposed change can be tested. Screenshots and / or videos can also be helpful if appropriate.
-->
<!--
⚠️ Important: Pull requests that implement a new feature or enhancement *should almost always target an existing feature request* with evidence of community interest and discussion. This is in order to balance the work of implementing and maintaining new features / enhancements. If that is not currently the case, please open a feature request instead of this PR to gather feedback from both users and the project maintainers.
-->
Closes #(issue or discussion)
## Type of change
<!--
What type of change does your PR introduce to Paperless-ngx?
NOTE: Please check only one box!
-->
- [ ] Bug fix: non-breaking change which fixes an issue.
- [ ] New feature / Enhancement: non-breaking change which adds functionality. _Please read the important note above._
- [ ] Breaking change: fix or feature that would cause existing functionality to not work as expected.
- [ ] Documentation only.
- [ ] Other. Please explain:
## Checklist:
<!--
NOTE: PRs that do not address the following will not be merged, please do not skip any relevant items.
-->
- [ ] I have read & agree with the [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/main/CONTRIBUTING.md).
- [ ] If applicable, I have included testing coverage for new code in this PR, for [backend](https://docs.paperless-ngx.com/development/#testing) and / or [front-end](https://docs.paperless-ngx.com/development/#testing-and-code-style) changes.
- [ ] If applicable, I have tested my code for new features & regressions on both mobile & desktop devices, using the latest version of major browsers.
- [ ] If applicable, I have checked that all tests pass, see [documentation](https://docs.paperless-ngx.com/development/#back-end-development).
- [ ] I have run all `pre-commit` hooks, see [documentation](https://docs.paperless-ngx.com/development/#code-formatting-with-pre-commit-hooks).
- [ ] I have made corresponding changes to the documentation as needed.
- [ ] I have checked my modifications for any breaking changes.

124
.github/dependabot.yml vendored
View File

@@ -1,124 +0,0 @@
# Please see the documentation for all configuration options:
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
# Required for uv support for now
enable-beta-ecosystems: true
updates:
# Enable version updates for pnpm
- package-ecosystem: "npm"
target-branch: "dev"
# Look for `pnpm-lock.yaml` file in the `/src-ui` directory
directory: "/src-ui"
open-pull-requests-limit: 10
schedule:
interval: "monthly"
labels:
- "frontend"
- "dependencies"
groups:
frontend-angular-dependencies:
patterns:
- "@angular*"
- "@ng-*"
- "ngx-*"
- "ng2-pdf-viewer"
frontend-jest-dependencies:
patterns:
- "@types/jest"
- "jest*"
frontend-eslint-dependencies:
patterns:
- "@typescript-eslint*"
- "eslint"
# Enable version updates for Python
- package-ecosystem: "uv"
target-branch: "dev"
directory: "/"
# Check for updates once a week
schedule:
interval: "weekly"
labels:
- "backend"
- "dependencies"
groups:
development:
patterns:
- "*pytest*"
- "ruff"
- "mkdocs-material"
- "pre-commit*"
django:
patterns:
- "*django*"
- "drf-*"
major-versions:
update-types:
- "major"
small-changes:
update-types:
- "minor"
- "patch"
exclude-patterns:
- "*django*"
- "drf-*"
pre-built:
patterns:
- psycopg*
- zxing-cpp
# Enable updates for GitHub Actions
- package-ecosystem: "github-actions"
target-branch: "dev"
directory: "/"
schedule:
# Check for updates to GitHub Actions every month
interval: "monthly"
labels:
- "ci-cd"
- "dependencies"
groups:
actions:
update-types:
- "major"
- "minor"
- "patch"
# Update Dockerfile in root directory
- package-ecosystem: "docker"
directories:
- "/"
- "/.devcontainer/"
schedule:
interval: "weekly"
open-pull-requests-limit: 5
labels:
- "dependencies"
commit-message:
prefix: "docker"
include: "scope"
# Update Docker Compose files in docker/compose directory
- package-ecosystem: "docker-compose"
directory: "/docker/compose/"
schedule:
interval: "weekly"
open-pull-requests-limit: 5
labels:
- "dependencies"
commit-message:
prefix: "docker-compose"
include: "scope"
groups:
# Individual groups for each image
gotenberg:
patterns:
- "docker.io/gotenberg/gotenberg*"
tika:
patterns:
- "docker.io/apache/tika*"
redis:
patterns:
- "docker.io/library/redis*"
mariadb:
patterns:
- "docker.io/library/mariadb*"
postgres:
patterns:
- "docker.io/library/postgres*"

26
.github/labeler.yml vendored
View File

@@ -1,26 +0,0 @@
backend:
- changed-files:
- any-glob-to-any-file:
- 'src/**'
- 'pyproject.toml'
- 'uv.lock'
- 'requirements.txt'
frontend:
- changed-files:
- any-glob-to-any-file:
- 'src-ui/**'
documentation:
- changed-files:
- any-glob-to-any-file:
- 'docs/**'
ci-cd:
- changed-files:
- any-glob-to-any-file:
- '.github/**'
# pr types
bug:
- head-branch:
- ['^fix']
enhancement:
- head-branch:
- ['^feature']

View File

@@ -1,53 +0,0 @@
categories:
- title: 'Breaking Changes'
labels:
- 'breaking-change'
- title: 'Notable Changes'
labels:
- 'notable'
- title: 'Features / Enhancements'
labels:
- 'enhancement'
- title: 'Bug Fixes'
labels:
- 'bug'
- title: 'Documentation'
labels:
- 'documentation'
- title: 'Maintenance'
labels:
- 'chore'
- 'deployment'
- 'translation'
- 'ci-cd'
- title: 'Dependencies'
collapse-after: 3
labels:
- 'dependencies'
- title: 'All App Changes'
labels:
- 'frontend'
- 'backend'
collapse-after: 1
include-labels:
- 'enhancement'
- 'bug'
- 'chore'
- 'deployment'
- 'translation'
- 'dependencies'
- 'documentation'
- 'frontend'
- 'backend'
- 'ci-cd'
- 'breaking-change'
- 'notable'
exclude-labels:
- 'skip-changelog'
category-template: '### $TITLE'
change-template: '- $TITLE @$AUTHOR ([#$NUMBER]($URL))'
change-title-escapes: '\<*_&#@'
template: |
## paperless-ngx $RESOLVED_VERSION
$CHANGES

View File

@@ -1,636 +0,0 @@
name: ci
on:
push:
tags:
# https://semver.org/#spec-item-2
- 'v[0-9]+.[0-9]+.[0-9]+'
# https://semver.org/#spec-item-9
- 'v[0-9]+.[0-9]+.[0-9]+-beta.rc[0-9]+'
branches-ignore:
- 'translations**'
pull_request:
branches-ignore:
- 'translations**'
env:
DEFAULT_UV_VERSION: "0.8.x"
# This is the default version of Python to use in most steps which aren't specific
DEFAULT_PYTHON_VERSION: "3.11"
NLTK_DATA: "/usr/share/nltk_data"
jobs:
pre-commit:
# We want to run on external PRs, but not on our own internal PRs as they'll be run
# by the push to the branch. Without this if check, checks are duplicated since
# internal PRs match both the push and pull_request events.
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
name: Linting Checks
runs-on: ubuntu-24.04
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Check files
uses: pre-commit/action@v3.0.1
documentation:
name: "Build & Deploy Documentation"
runs-on: ubuntu-24.04
needs:
- pre-commit
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Python
id: setup-python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install Python dependencies
run: |
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
- name: Make documentation
run: |
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
--frozen \
mkdocs build --config-file ./mkdocs.yml
- name: Deploy documentation
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
run: |
echo "docs.paperless-ngx.com" > "${{ github.workspace }}/docs/CNAME"
git config --global user.name "${{ github.actor }}"
git config --global user.email "${{ github.actor }}@users.noreply.github.com"
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
--frozen \
mkdocs gh-deploy --force --no-history
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: documentation
path: site/
retention-days: 7
tests-backend:
name: "Backend Tests (Python ${{ matrix.python-version }})"
runs-on: ubuntu-24.04
needs:
- pre-commit
strategy:
matrix:
python-version: ['3.10', '3.11', '3.12']
fail-fast: false
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Start containers
run: |
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml pull --quiet
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml up --detach
- name: Set up Python
id: setup-python
uses: actions/setup-python@v5
with:
python-version: "${{ matrix.python-version }}"
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ steps.setup-python.outputs.python-version }}
- name: Install system dependencies
run: |
sudo apt-get update -qq
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
- name: Configure ImageMagick
run: |
sudo cp docker/rootfs/etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml
- name: Install Python dependencies
run: |
uv sync \
--python ${{ steps.setup-python.outputs.python-version }} \
--group testing \
--frozen
- name: List installed Python dependencies
run: |
uv pip list
- name: Install or update NLTK dependencies
run: uv run python -m nltk.downloader punkt punkt_tab snowball_data stopwords -d ${{ env.NLTK_DATA }}
- name: Tests
env:
NLTK_DATA: ${{ env.NLTK_DATA }}
PAPERLESS_CI_TEST: 1
# Enable paperless_mail testing against real server
PAPERLESS_MAIL_TEST_HOST: ${{ secrets.TEST_MAIL_HOST }}
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
run: |
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
--frozen \
pytest
- name: Upload backend test results to Codecov
if: always()
uses: codecov/test-results-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: backend-python-${{ matrix.python-version }}
files: junit.xml
- name: Upload backend coverage to Codecov
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: backend-python-${{ matrix.python-version }}
files: coverage.xml
- name: Stop containers
if: always()
run: |
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml logs
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml down
install-frontend-dependencies:
name: "Install Frontend Dependencies"
runs-on: ubuntu-24.04
needs:
- pre-commit
steps:
- uses: actions/checkout@v4
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v4
with:
node-version: 20.x
cache: 'pnpm'
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
- name: Cache frontend dependencies
id: cache-frontend-deps
uses: actions/cache@v4
with:
path: |
~/.pnpm-store
~/.cache
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
- name: Install dependencies
run: cd src-ui && pnpm install
tests-frontend:
name: "Frontend Unit Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
runs-on: ubuntu-24.04
needs:
- install-frontend-dependencies
strategy:
fail-fast: false
matrix:
node-version: [20.x]
shard-index: [1, 2, 3, 4]
shard-count: [4]
steps:
- uses: actions/checkout@v4
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v4
with:
node-version: 20.x
cache: 'pnpm'
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
- name: Cache frontend dependencies
id: cache-frontend-deps
uses: actions/cache@v4
with:
path: |
~/.pnpm-store
~/.cache
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
- name: Re-link Angular cli
run: cd src-ui && pnpm link @angular/cli
- name: Linting checks
run: cd src-ui && pnpm run lint
- name: Run Jest unit tests
run: cd src-ui && pnpm run test --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
- name: Upload frontend test results to Codecov
uses: codecov/test-results-action@v1
if: always()
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: frontend-node-${{ matrix.node-version }}
directory: src-ui/
- name: Upload frontend coverage to Codecov
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: frontend-node-${{ matrix.node-version }}
directory: src-ui/coverage/
tests-frontend-e2e:
name: "Frontend E2E Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
runs-on: ubuntu-24.04
needs:
- install-frontend-dependencies
strategy:
fail-fast: false
matrix:
node-version: [20.x]
shard-index: [1, 2]
shard-count: [2]
steps:
- uses: actions/checkout@v4
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v4
with:
node-version: 20.x
cache: 'pnpm'
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
- name: Cache frontend dependencies
id: cache-frontend-deps
uses: actions/cache@v4
with:
path: |
~/.pnpm-store
~/.cache
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
- name: Re-link Angular cli
run: cd src-ui && pnpm link @angular/cli
- name: Cache Playwright browsers
uses: actions/cache@v4
with:
path: ~/.cache/ms-playwright
key: ${{ runner.os }}-playwright-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-playwright-
- name: Install Playwright system dependencies
run: npx playwright install-deps
- name: Install dependencies
run: cd src-ui && pnpm install --no-frozen-lockfile
- name: Install Playwright
run: cd src-ui && pnpm exec playwright install
- name: Run Playwright e2e tests
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
frontend-bundle-analysis:
name: "Frontend Bundle Analysis"
runs-on: ubuntu-24.04
needs:
- tests-frontend
- tests-frontend-e2e
steps:
- uses: actions/checkout@v4
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v4
with:
node-version: 20.x
cache: 'pnpm'
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
- name: Cache frontend dependencies
id: cache-frontend-deps
uses: actions/cache@v4
with:
path: |
~/.pnpm-store
~/.cache
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
- name: Re-link Angular cli
run: cd src-ui && pnpm link @angular/cli
- name: Build frontend and upload analysis
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
run: cd src-ui && pnpm run build --configuration=production
build-docker-image:
name: Build Docker image for ${{ github.ref_name }}
runs-on: ubuntu-24.04
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || startsWith(github.ref, 'refs/heads/fix-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v'))
concurrency:
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
cancel-in-progress: true
needs:
- tests-backend
- tests-frontend
- tests-frontend-e2e
steps:
- name: Check pushing to Docker Hub
id: push-other-places
# Only push to Dockerhub from the main repo AND the ref is either:
# main
# dev
# beta
# a tag
# Otherwise forks would require a Docker Hub account and secrets setup
run: |
if [[ ${{ github.repository_owner }} == "paperless-ngx" && ( ${{ github.ref_name }} == "dev" || ${{ github.ref_name }} == "beta" || ${{ startsWith(github.ref, 'refs/tags/v') }} == "true" ) ]] ; then
echo "Enabling DockerHub image push"
echo "enable=true" >> $GITHUB_OUTPUT
else
echo "Not pushing to DockerHub"
echo "enable=false" >> $GITHUB_OUTPUT
fi
- name: Set ghcr repository name
id: set-ghcr-repository
run: |
ghcr_name=$(echo "${{ github.repository }}" | awk '{ print tolower($0) }')
echo "Name is ${ghcr_name}"
echo "ghcr-repository=${ghcr_name}" >> $GITHUB_OUTPUT
- name: Gather Docker metadata
id: docker-meta
uses: docker/metadata-action@v5
with:
images: |
ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}
name=paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
name=quay.io/paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
tags: |
# Tag branches with branch name
type=ref,event=branch
# Process semver tags
# For a tag x.y.z or vX.Y.Z, output an x.y.z and x.y image tag
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
- name: Checkout
uses: actions/checkout@v4
# If https://github.com/docker/buildx/issues/1044 is resolved,
# the append input with a native arm64 arch could be used to
# significantly speed up building
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
platforms: arm64
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
uses: docker/login-action@v3
# Don't attempt to login if not pushing to Docker Hub
if: steps.push-other-places.outputs.enable == 'true'
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Quay.io
uses: docker/login-action@v3
# Don't attempt to login if not pushing to Quay.io
if: steps.push-other-places.outputs.enable == 'true'
with:
registry: quay.io
username: ${{ secrets.QUAY_USERNAME }}
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
platforms: linux/amd64,linux/arm64
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.docker-meta.outputs.tags }}
labels: ${{ steps.docker-meta.outputs.labels }}
build-args: |
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
# Get cache layers from this branch, then dev
# This allows new branches to get at least some cache benefits, generally from dev
cache-from: |
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:dev
cache-to: |
type=registry,mode=max,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
- name: Export frontend artifact from docker
run: |
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
- name: Upload frontend artifact
uses: actions/upload-artifact@v4
with:
name: frontend-compiled
path: src/documents/static/frontend/
retention-days: 7
build-release:
name: "Build Release"
needs:
- build-docker-image
- documentation
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Python
id: setup-python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ steps.setup-python.outputs.python-version }}
- name: Install Python dependencies
run: |
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
- name: Install system dependencies
run: |
sudo apt-get update -qq
sudo apt-get install -qq --no-install-recommends gettext liblept5
- name: Download frontend artifact
uses: actions/download-artifact@v4
with:
name: frontend-compiled
path: src/documents/static/frontend/
- name: Download documentation artifact
uses: actions/download-artifact@v4
with:
name: documentation
path: docs/_build/html/
- name: Generate requirements file
run: |
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
- name: Compile messages
run: |
cd src/
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
manage.py compilemessages
- name: Collect static files
run: |
cd src/
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
manage.py collectstatic --no-input
- name: Move files
run: |
echo "Making dist folders"
for directory in dist \
dist/paperless-ngx \
dist/paperless-ngx/scripts;
do
mkdir --verbose --parents ${directory}
done
echo "Copying basic files"
for file_name in .dockerignore \
.env \
Dockerfile \
pyproject.toml \
uv.lock \
requirements.txt \
LICENSE \
README.md \
paperless.conf.example
do
cp --verbose ${file_name} dist/paperless-ngx/
done
mv --verbose dist/paperless-ngx/paperless.conf.example dist/paperless-ngx/paperless.conf
echo "Copying Docker related files"
cp --recursive docker/ dist/paperless-ngx/docker
echo "Copying startup scripts"
cp --verbose scripts/*.service scripts/*.sh scripts/*.socket dist/paperless-ngx/scripts/
echo "Copying source files"
cp --recursive src/ dist/paperless-ngx/src
echo "Copying documentation"
cp --recursive docs/_build/html/ dist/paperless-ngx/docs
mv --verbose static dist/paperless-ngx
- name: Make release package
run: |
echo "Creating release archive"
cd dist
sudo chown -R 1000:1000 paperless-ngx/
tar -cJf paperless-ngx.tar.xz paperless-ngx/
- name: Upload release artifact
uses: actions/upload-artifact@v4
with:
name: release
path: dist/paperless-ngx.tar.xz
retention-days: 7
publish-release:
name: "Publish Release"
runs-on: ubuntu-24.04
outputs:
prerelease: ${{ steps.get_version.outputs.prerelease }}
changelog: ${{ steps.create-release.outputs.body }}
version: ${{ steps.get_version.outputs.version }}
needs:
- build-release
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
steps:
- name: Download release artifact
uses: actions/download-artifact@v4
with:
name: release
path: ./
- name: Get version
id: get_version
run: |
echo "version=${{ github.ref_name }}" >> $GITHUB_OUTPUT
if [[ ${{ contains(github.ref_name, '-beta.rc') }} == 'true' ]]; then
echo "prerelease=true" >> $GITHUB_OUTPUT
else
echo "prerelease=false" >> $GITHUB_OUTPUT
fi
- name: Create Release and Changelog
id: create-release
uses: release-drafter/release-drafter@v6
with:
name: Paperless-ngx ${{ steps.get_version.outputs.version }}
tag: ${{ steps.get_version.outputs.version }}
version: ${{ steps.get_version.outputs.version }}
prerelease: ${{ steps.get_version.outputs.prerelease }}
publish: true # ensures release is not marked as draft
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload release archive
id: upload-release-asset
uses: shogo82148/actions-upload-release-asset@v1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
upload_url: ${{ steps.create-release.outputs.upload_url }}
asset_path: ./paperless-ngx.tar.xz
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
asset_content_type: application/x-xz
append-changelog:
name: "Append Changelog"
runs-on: ubuntu-24.04
needs:
- publish-release
if: needs.publish-release.outputs.prerelease == 'false'
steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: main
- name: Set up Python
id: setup-python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Append Changelog to docs
id: append-Changelog
working-directory: docs
run: |
git branch ${{ needs.publish-release.outputs.version }}-changelog
git checkout ${{ needs.publish-release.outputs.version }}-changelog
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
echo "Manually linking usernames"
sed -i -r 's|@([a-zA-Z0-9_]+) \(\[#|[@\1](https://github.com/\1) ([#|g' changelog-new.md
echo "Removing unneeded comment tags"
sed -i -r 's|@<!---->|@|g' changelog-new.md
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
mv changelog-new.md changelog.md
uv run \
--python ${{ steps.setup-python.outputs.python-version }} \
--dev \
pre-commit run --files changelog.md || true
git config --global user.name "github-actions"
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
git push origin ${{ needs.publish-release.outputs.version }}-changelog
- name: Create Pull Request
uses: actions/github-script@v7
with:
script: |
const { repo, owner } = context.repo;
const result = await github.rest.pulls.create({
title: 'Documentation: Add ${{ needs.publish-release.outputs.version }} changelog',
owner,
repo,
head: '${{ needs.publish-release.outputs.version }}-changelog',
base: 'main',
body: 'This PR is auto-generated by CI.'
});
github.rest.issues.addLabels({
owner,
repo,
issue_number: result.data.number,
labels: ['documentation', 'skip-changelog']
});

View File

@@ -1,63 +0,0 @@
# This workflow runs on certain conditions to check for and potentially
# delete container images from the GHCR which no longer have an associated
# code branch.
# Requires a PAT with the correct scope set in the secrets.
#
# This workflow will not trigger runs on forked repos.
name: Cleanup Image Tags
on:
delete:
push:
paths:
- ".github/workflows/cleanup-tags.yml"
concurrency:
group: registry-tags-cleanup
cancel-in-progress: false
jobs:
cleanup-images:
name: Cleanup Image Tags for ${{ matrix.primary-name }}
if: github.repository_owner == 'paperless-ngx'
runs-on: ubuntu-24.04
strategy:
fail-fast: false
matrix:
primary-name: ["paperless-ngx", "paperless-ngx/builder/cache/app"]
env:
# Requires a personal access token with the OAuth scope delete:packages
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
steps:
- name: Clean temporary images
if: "${{ env.TOKEN != '' }}"
uses: stumpylog/image-cleaner-action/ephemeral@v0.10.0
with:
token: "${{ env.TOKEN }}"
owner: "${{ github.repository_owner }}"
is_org: "true"
package_name: "${{ matrix.primary-name }}"
scheme: "branch"
repo_name: "paperless-ngx"
match_regex: "(feature|fix)"
do_delete: "true"
cleanup-untagged-images:
name: Cleanup Untagged Images Tags for ${{ matrix.primary-name }}
if: github.repository_owner == 'paperless-ngx'
runs-on: ubuntu-24.04
needs:
- cleanup-images
strategy:
fail-fast: false
matrix:
primary-name: ["paperless-ngx", "paperless-ngx/builder/cache/app"]
env:
# Requires a personal access token with the OAuth scope delete:packages
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
steps:
- name: Clean untagged images
if: "${{ env.TOKEN != '' }}"
uses: stumpylog/image-cleaner-action/untagged@v0.10.0
with:
token: "${{ env.TOKEN }}"
owner: "${{ github.repository_owner }}"
is_org: "true"
package_name: "${{ matrix.primary-name }}"
do_delete: "true"

View File

@@ -1,48 +0,0 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [main, dev]
pull_request:
# The branches below must be a subset of the branches above
branches: [dev]
schedule:
- cron: '28 13 * * 5'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-24.04
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: ['javascript', 'python']
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://git.io/codeql-language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3

View File

@@ -1,30 +0,0 @@
name: Crowdin Action
on:
workflow_dispatch:
schedule:
- cron: '2 */12 * * *'
push:
paths: ['src/locale/**', 'src-ui/messages.xlf', 'src-ui/src/locale/**']
branches: [dev]
jobs:
synchronize-with-crowdin:
name: Crowdin Sync
if: github.repository_owner == 'paperless-ngx'
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v4
with:
token: ${{ secrets.PNGX_BOT_PAT }}
- name: crowdin action
uses: crowdin/github-action@v2
with:
upload_translations: false
download_translations: true
crowdin_branch_name: 'dev'
localization_branch_name: l10n_dev
pull_request_labels: 'skip-changelog, translation'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CROWDIN_PROJECT_ID: ${{ secrets.CROWDIN_PROJECT_ID }}
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}

View File

@@ -1,112 +0,0 @@
name: PR Bot
on:
pull_request_target:
types: [opened]
permissions:
contents: read
pull-requests: write
jobs:
pr-bot:
name: Automated PR Bot
runs-on: ubuntu-latest
steps:
- name: Label PR by file path or branch name
# see .github/labeler.yml for the labeler config
uses: actions/labeler@v5
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Label by size
uses: Gascon1/pr-size-labeler@v1.3.0
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
xs_label: 'small-change'
xs_diff: '9'
s_label: 'non-trivial'
s_diff: '99999'
fail_if_xl: 'false'
excluded_files: /\.lock$/ /\.txt$/ ^src-ui/pnpm-lock\.yaml$ ^src-ui/messages\.xlf$ ^src/locale/en_US/LC_MESSAGES/django\.po$
- name: Label by PR title
uses: actions/github-script@v7
with:
script: |
const pr = context.payload.pull_request;
const title = pr.title.toLowerCase();
const labels = [];
if (/^(fix|bugfix)/i.test(title)) {
labels.push('bug');
} else if (/^feature/i.test(title)) {
labels.push('enhancement');
} else if (!/^(dependabot)/i.test(title)) {
labels.push('enhancement'); // Default fallback
}
if (labels.length) {
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pr.number,
labels,
});
core.info(`Added labels based on title: ${labels.join(', ')}`);
}
- name: Label bot-generated PRs
if: ${{ contains(github.actor, 'dependabot') || contains(github.actor, 'crowdin-bot') }}
uses: actions/github-script@v7
with:
script: |
const pr = context.payload.pull_request;
const user = pr.user.login.toLowerCase();
const labels = [];
if (user.includes('dependabot')) {
labels.push('dependencies');
}
if (user.includes('crowdin-bot')) {
labels.push('translation', 'skip-changelog');
}
if (labels.length) {
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pr.number,
labels,
});
}
- name: Welcome comment
if: ${{ !contains(github.actor, 'bot') }}
uses: actions/github-script@v7
with:
script: |
const pr = context.payload.pull_request;
const user = pr.user.login;
const { data: members } = await github.rest.orgs.listMembers({
org: 'paperless-ngx',
});
const memberLogins = members.map(m => m.login.toLowerCase());
if (memberLogins.includes(user.toLowerCase())) {
core.info('Skipping comment: user is org member');
return;
}
const body =
"Hello @" + user + ",\n\n" +
"Thank you very much for submitting this PR to us!\n\n" +
"This is what will happen next:\n\n" +
"1. CI tests will run against your PR to ensure quality and consistency.\n" +
"2. Next, human contributors from paperless-ngx review your changes.\n" +
"3. Please address any issues that come up during the review as soon as you are able to.\n" +
"4. If accepted, your pull request will be merged into the `dev` branch and changes there will be tested further.\n" +
"5. Eventually, changes from you and other contributors will be merged into `main` and a new release will be made.\n\n" +
"You'll be hearing from us soon, and thank you again for contributing to our project.";
await github.rest.issues.createComment({
issue_number: pr.number,
owner: context.repo.owner,
repo: context.repo.repo,
body,
});

View File

@@ -1,24 +0,0 @@
name: Project Automations
on:
pull_request_target: #_target allows access to secrets
types:
- opened
- reopened
branches:
- main
- dev
permissions:
contents: read
jobs:
pr_opened_or_reopened:
name: pr_opened_or_reopened
runs-on: ubuntu-24.04
permissions:
# write permission is required for autolabeler
pull-requests: write
if: github.event_name == 'pull_request_target' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request.user.login != 'dependabot'
steps:
- name: Label PR with release-drafter
uses: release-drafter/release-drafter@v6
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,290 +0,0 @@
name: 'Repository Maintenance'
on:
schedule:
- cron: '0 3 * * *'
workflow_dispatch:
permissions:
issues: write
pull-requests: write
discussions: write
concurrency:
group: lock
jobs:
stale:
name: 'Stale'
if: github.repository_owner == 'paperless-ngx'
runs-on: ubuntu-24.04
steps:
- uses: actions/stale@v9
with:
days-before-stale: 7
days-before-close: 14
any-of-issue-labels: 'cant-reproduce,not a bug'
stale-issue-label: stale
stale-issue-message: >
This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
days-before-pr-stale: 14
days-before-pr-close: 7
stale-pr-message: ""
stale-pr-label: stale
exempt-pr-labels: 'notable'
close-pr-message: >
This pull request has been automatically closed because it has not had recent activity. Thank you for your contributions. Please open a new pull request or discussion if you would like to continue working on this change.
lock-threads:
name: 'Lock Old Threads'
if: github.repository_owner == 'paperless-ngx'
runs-on: ubuntu-24.04
steps:
- uses: dessant/lock-threads@v5
with:
issue-inactive-days: '30'
pr-inactive-days: '30'
discussion-inactive-days: '30'
log-output: true
issue-comment: >
This issue has been automatically locked since there has not been any recent activity after it was closed. Please open a new discussion or issue for related concerns. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
pr-comment: >
This pull request has been automatically locked since there has not been any recent activity after it was closed. Please open a new discussion or issue for related concerns. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
discussion-comment: >
This discussion has been automatically locked since there has not been any recent activity after it was closed. Please open a new discussion for related concerns. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
close-answered-discussions:
name: 'Close Answered Discussions'
if: github.repository_owner == 'paperless-ngx'
runs-on: ubuntu-24.04
steps:
- uses: actions/github-script@v7
with:
script: |
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
const query = `query($owner:String!, $name:String!) {
repository(owner:$owner, name:$name){
discussions(first:100, answered:true, states:[OPEN]) {
nodes {
id,
number
}
}
}
}`;
const variables = {
owner: context.repo.owner,
name: context.repo.repo,
}
const result = await github.graphql(query, variables)
console.log(`Found ${result.repository.discussions.nodes.length} open answered discussions`)
for (const discussion of result.repository.discussions.nodes) {
console.log(`Closing discussion #${discussion.number} (${discussion.id})`)
const addCommentMutation = `mutation($discussion:ID!, $body:String!) {
addDiscussionComment(input:{discussionId:$discussion, body:$body}) {
clientMutationId
}
}`;
const commentVariables = {
discussion: discussion.id,
body: 'This discussion has been automatically closed because it was marked as answered. Please see our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.',
}
await github.graphql(addCommentMutation, commentVariables)
const closeDiscussionMutation = `mutation($discussion:ID!, $reason:DiscussionCloseReason!) {
closeDiscussion(input:{discussionId:$discussion, reason:$reason}) {
clientMutationId
}
}`;
const closeVariables = {
discussion: discussion.id,
reason: "RESOLVED",
}
await github.graphql(closeDiscussionMutation, closeVariables)
await sleep(1000)
}
close-outdated-discussions:
name: 'Close Outdated Discussions'
if: github.repository_owner == 'paperless-ngx'
runs-on: ubuntu-24.04
steps:
- uses: actions/github-script@v7
with:
script: |
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
const CUTOFF_DAYS = 180;
const cutoff = new Date();
cutoff.setDate(cutoff.getDate() - CUTOFF_DAYS);
const query = `query(
$owner:String!,
$name:String!,
$supportCategory:ID!,
$generalCategory:ID!,
) {
supportDiscussions: repository(owner:$owner, name:$name){
discussions(
categoryId:$supportCategory,
last:50,
answered:false,
states:[OPEN],
) {
nodes {
id,
number,
updatedAt
}
},
},
generalDiscussions: repository(owner:$owner, name:$name){
discussions(
categoryId:$generalCategory,
last:50,
states:[OPEN],
) {
nodes {
id,
number,
updatedAt
}
}
}
}`;
const variables = {
owner: context.repo.owner,
name: context.repo.repo,
supportCategory: "DIC_kwDOG1Zs184CBKWK",
generalCategory: "DIC_kwDOG1Zs184CBKWJ"
}
const result = await github.graphql(query, variables);
const combinedDiscussions = [
...result.supportDiscussions.discussions.nodes,
...result.generalDiscussions.discussions.nodes,
]
console.log(`Checking ${combinedDiscussions.length} open discussions`);
for (const discussion of combinedDiscussions) {
if (new Date(discussion.updatedAt) < cutoff) {
console.log(`Closing outdated discussion #${discussion.number} (${discussion.id}), last updated at ${discussion.updatedAt}`);
const addCommentMutation = `mutation($discussion:ID!, $body:String!) {
addDiscussionComment(input:{discussionId:$discussion, body:$body}) {
clientMutationId
}
}`;
const commentVariables = {
discussion: discussion.id,
body: 'This discussion has been automatically closed due to inactivity. Please see our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.',
}
await github.graphql(addCommentMutation, commentVariables);
const closeDiscussionMutation = `mutation($discussion:ID!, $reason:DiscussionCloseReason!) {
closeDiscussion(input:{discussionId:$discussion, reason:$reason}) {
clientMutationId
}
}`;
const closeVariables = {
discussion: discussion.id,
reason: "OUTDATED",
}
await github.graphql(closeDiscussionMutation, closeVariables);
await sleep(1000);
}
}
close-unsupported-feature-requests:
name: 'Close Unsupported Feature Requests'
if: github.repository_owner == 'paperless-ngx'
runs-on: ubuntu-24.04
steps:
- uses: actions/github-script@v7
with:
script: |
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
const CUTOFF_MAX_COUNT = 80;
const CUTOFF_1_DAYS = 180;
const CUTOFF_1_COUNT = 5;
const CUTOFF_2_DAYS = 365;
const CUTOFF_2_COUNT = 20;
const CUTOFF_3_DAYS = 730;
const CUTOFF_3_COUNT = 40;
const cutoff1Date = new Date();
cutoff1Date.setDate(cutoff1Date.getDate() - CUTOFF_1_DAYS);
const cutoff2Date = new Date();
cutoff2Date.setDate(cutoff2Date.getDate() - CUTOFF_2_DAYS);
const cutoff3Date = new Date();
cutoff3Date.setDate(cutoff3Date.getDate() - CUTOFF_3_DAYS);
const query = `query(
$owner:String!,
$name:String!,
$featureRequestsCategory:ID!,
) {
repository(owner:$owner, name:$name){
discussions(
categoryId:$featureRequestsCategory,
last:100,
states:[OPEN],
) {
nodes {
id,
number,
updatedAt,
upvoteCount,
}
},
}
}`;
const variables = {
owner: context.repo.owner,
name: context.repo.repo,
featureRequestsCategory: "DIC_kwDOG1Zs184CBNr4"
}
const result = await github.graphql(query, variables);
for (const discussion of result.repository.discussions.nodes) {
const discussionUpdatedDate = new Date(discussion.updatedAt);
const discussionCreatedDate = new Date(discussion.createdAt);
if ((discussionUpdatedDate < cutoff1Date && discussion.upvoteCount < CUTOFF_MAX_COUNT) ||
(discussionCreatedDate < cutoff1Date && discussion.upvoteCount < CUTOFF_1_COUNT) ||
(discussionCreatedDate < cutoff2Date && discussion.upvoteCount < CUTOFF_2_COUNT) ||
(discussionCreatedDate < cutoff3Date && discussion.upvoteCount < CUTOFF_3_COUNT)) {
console.log(`Closing discussion #${discussion.number} (${discussion.id}), last updated at ${discussion.updatedAt} with votes ${discussion.upvoteCount}`);
const addCommentMutation = `mutation($discussion:ID!, $body:String!) {
addDiscussionComment(input:{discussionId:$discussion, body:$body}) {
clientMutationId
}
}`;
const commentVariables = {
discussion: discussion.id,
body: 'This discussion has been automatically closed due to lack of community support. Please see our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.',
}
await github.graphql(addCommentMutation, commentVariables);
const closeDiscussionMutation = `mutation($discussion:ID!, $reason:DiscussionCloseReason!) {
closeDiscussion(input:{discussionId:$discussion, reason:$reason}) {
clientMutationId
}
}`;
const closeVariables = {
discussion: discussion.id,
reason: "OUTDATED",
}
await github.graphql(closeDiscussionMutation, closeVariables);
await sleep(1000);
}
}

View File

@@ -1,69 +0,0 @@
name: Generate Translation Strings
on:
push:
branches:
- dev
jobs:
generate-translate-strings:
name: Generate Translation Strings
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
token: ${{ secrets.PNGX_BOT_PAT }}
ref: ${{ github.head_ref }}
- name: Set up Python
id: setup-python
uses: actions/setup-python@v5
- name: Install system dependencies
run: |
sudo apt-get update -qq
sudo apt-get install -qq --no-install-recommends gettext
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
enable-cache: true
- name: Install backend python dependencies
run: |
uv sync \
--group dev \
--frozen
- name: Generate backend translation strings
run: cd src/ && uv run manage.py makemessages -l en_US -i "samples*"
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v4
with:
node-version: 20.x
cache: 'pnpm'
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
- name: Cache frontend dependencies
id: cache-frontend-deps
uses: actions/cache@v4
with:
path: |
~/.pnpm-store
~/.cache
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
- name: Install frontend dependencies
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
run: cd src-ui && pnpm install
- name: Re-link Angular cli
run: cd src-ui && pnpm link @angular/cli
- name: Generate frontend translation strings
run: |
cd src-ui
pnpm run ng extract-i18n
- name: Commit changes
uses: stefanzweifel/git-auto-commit-action@v6
with:
file_pattern: 'src-ui/messages.xlf src/locale/en_US/LC_MESSAGES/django.po'
commit_message: "Auto translate strings"
commit_user_name: "GitHub Actions"
commit_author: "GitHub Actions <41898282+github-actions[bot]@users.noreply.github.com>"

54
.gitignore vendored
View File

@@ -22,7 +22,6 @@ var/
*.egg-info/
.installed.cfg
*.egg
/src/paperless_mail/templates/node_modules
# PyInstaller
# Usually these files are written by a python script from a template
@@ -44,7 +43,6 @@ nosetests.xml
coverage.xml
*,cover
.pytest_cache
junit.xml
# Translations
*.mo
@@ -53,57 +51,37 @@ junit.xml
# Django stuff:
*.log
# MkDocs documentation
site/
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Stored PDFs
media/documents/*.gpg
media/documents/thumbnails/*
media/documents/originals/*
media/overrides.css
media/overrides.js
# Sqlite database
db.sqlite3
# PyCharm
.idea
# VS Code
.vscode
/src-ui/.vscode
/docs/.vscode
.vscode-server
*CommandMarker
# Other stuff that doesn't belong
.virtualenv
virtualenv
/venv
.venv/
/docker-compose.env
/docker-compose.yml
.ruff_cache/
docker-compose.yml
docker-compose.env
# Used for development
scripts/import-for-development
scripts/nuke
# Static files collected by the collectstatic command
/static/
static/
# Stored PDFs
/media/
/data/
/paperless.conf
/consume/
/export/
# this is where the compiled frontend is moved to.
/src/documents/static/frontend/
# mac os
.DS_Store
# celery schedule file
celerybeat-schedule*
# ignore .devcontainer sub folders
/.devcontainer/consume/
/.devcontainer/data/
/.devcontainer/media/
/.devcontainer/redisdata/
# Classification Models
models/

View File

@@ -1,8 +0,0 @@
failure-threshold: warning
ignored:
# https://github.com/hadolint/hadolint/wiki/DL3008
- DL3008
# https://github.com/hadolint/hadolint/wiki/DL3013
- DL3013
# https://github.com/hadolint/hadolint/wiki/DL3003
- DL3003

View File

@@ -1,82 +0,0 @@
# This file configures pre-commit hooks.
# See https://pre-commit.com/ for general information
# See https://pre-commit.com/hooks.html for a listing of possible hooks
repos:
# General hooks
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: check-docstring-first
- id: check-json
exclude: "tsconfig.*json"
- id: check-yaml
args:
- "--unsafe"
- id: check-toml
- id: check-executables-have-shebangs
- id: end-of-file-fixer
exclude_types:
- svg
- pofile
exclude: "(^LICENSE$)"
- id: mixed-line-ending
args:
- "--fix=lf"
- id: trailing-whitespace
exclude_types:
- svg
- id: check-case-conflict
- id: detect-private-key
- repo: https://github.com/codespell-project/codespell
rev: v2.4.1
hooks:
- id: codespell
exclude: "(^src-ui/src/locale/)|(^src-ui/pnpm-lock.yaml)|(^src-ui/e2e/)|(^src/paperless_mail/tests/samples/)|(^src/documents/tests/samples/)"
exclude_types:
- pofile
- json
# See https://github.com/prettier/prettier/issues/15742 for the fork reason
- repo: https://github.com/rbubley/mirrors-prettier
rev: 'v3.6.2'
hooks:
- id: prettier
types_or:
- javascript
- ts
- markdown
additional_dependencies:
- prettier@3.3.3
- 'prettier-plugin-organize-imports@4.1.0'
# Python hooks
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.12.2
hooks:
- id: ruff
- id: ruff-format
- repo: https://github.com/tox-dev/pyproject-fmt
rev: "v2.6.0"
hooks:
- id: pyproject-fmt
# Dockerfile hooks
- repo: https://github.com/AleksaC/hadolint-py
rev: v2.12.1b3
hooks:
- id: hadolint
# Shell script hooks
- repo: https://github.com/lovesegfault/beautysh
rev: v6.2.1
hooks:
- id: beautysh
additional_dependencies:
- setuptools
args:
- "--tab"
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: "v0.10.0.1"
hooks:
- id: shellcheck
- repo: https://github.com/google/yamlfmt
rev: v0.17.2
hooks:
- id: yamlfmt
exclude: "^src-ui/pnpm-lock.yaml"

View File

@@ -1,19 +0,0 @@
const config = {
// https://prettier.io/docs/en/options.html#semicolons
semi: false,
// https://prettier.io/docs/en/options.html#quotes
singleQuote: true,
// https://prettier.io/docs/en/options.html#trailing-commas
trailingComma: 'es5',
overrides: [
{
files: ['docs/*.md'],
options: {
tabWidth: 4,
},
},
],
plugins: [require('prettier-plugin-organize-imports')],
}
module.exports = config

25
.travis.yml Normal file
View File

@@ -0,0 +1,25 @@
language: python
before_install:
- sudo apt-get update -qq
- sudo apt-get install -qq libpoppler-cpp-dev unpaper tesseract-ocr tesseract-ocr-eng tesseract-ocr-cat tesseract-ocr-deu
sudo: false
matrix:
include:
- python: 3.4
- python: 3.5
- python: 3.6
install:
- pip install --requirement requirements.txt
- pip install sphinx
script:
- cd src/
- pytest --cov
- pycodestyle
- sphinx-build -b html ../docs ../docs/_build -W
after_success:
- coveralls

View File

@@ -1,10 +0,0 @@
/.github/workflows/ @paperless-ngx/ci-cd
/docker/ @paperless-ngx/ci-cd
/scripts/ @paperless-ngx/ci-cd
/src-ui/ @paperless-ngx/frontend
/src/ @paperless-ngx/backend
pyproject.toml @paperless-ngx/backend
uv.lock @paperless-ngx/backend
*.py @paperless-ngx/backend

View File

@@ -2,127 +2,45 @@
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socioeconomic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
Examples of behavior that contributes to creating a positive environment include:
- Demonstrating empathy and kindness toward other people
- Being respectful of differing opinions, viewpoints, and experiences
- Giving and gracefully accepting constructive feedback
- Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
- Focusing on what is best not just for us as individuals, but for the
overall community
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior include:
Examples of unacceptable behavior by participants include:
- The use of sexualized language or imagery, and sexual attention or
advances of any kind
- Trolling, insulting or derogatory comments, and personal or political attacks
- Public or private harassment
- Publishing others' private information, such as a physical or email
address, without their explicit permission
- Other conduct which could reasonably be considered inappropriate in a
professional setting
* Unwelcome sexual attention or advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a professional setting
## Enforcement Responsibilities
## Our Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
hello@paperless-ngx.com.
All complaints will be reviewed and investigated promptly and fairly.
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at code@danielquinn.org. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4 to remove puritanical language. The original is available at [http://contributor-covenant.org/version/1/4][version]
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

View File

@@ -1,155 +0,0 @@
# Contributing
If you feel like contributing to the project, please do! Bug fixes and improvements are always welcome.
If you want to implement something big:
- Please start a discussion about that in the issues! Maybe something similar is already in development and we can make it happen together.
- When making additions to the project, consider if the majority of users will benefit from your change. If not, you're probably better of forking the project.
- Also consider if your change will get in the way of other users. A good change is a change that enhances the experience of some users who want that change and does not affect users who do not care about the change.
- Please see the [paperless-ngx merge process](#merging-prs) below.
## Python
Paperless supports python 3.10 - 3.12 at this time. We format Python code with [ruff](https://docs.astral.sh/ruff/formatter/).
## Branches
`main` always reflects the latest release. Apart from changes to the documentation or readme, absolutely no functional changes on this branch in between releases.
`dev` contains all changes that will be part of the next release. Use this branch to start making your changes.
`feature-X` branches are for experimental stuff that will eventually be merged into dev.
## Testing:
Please format and test your code! I know it's a hassle, but it makes sure that your code works now and will allow us to detect regressions easily.
To test your code, execute `pytest` in the src/ directory. This also generates a html coverage report, which you can use to see if you missed anything important during testing.
Before you can run `pytest`, ensure to [properly set up your local environment](https://docs.paperless-ngx.com/development/#initial-setup-and-first-start).
## More info:
... is available [in the documentation](https://docs.paperless-ngx.com/development).
# Merging PRs
Once you have submitted a **P**ull **R**equest it will be reviewed, approved, and merged by one or more community members of any team. Automated code tests and formatting checks must be passed.
## Non-Trivial Requests
PRs deemed `non-trivial` will go through a stricter review process before being merged into `dev`. This is to ensure code quality and complete functionality (free of side effects).
Examples of `non-trivial` PRs might include:
- Additional features
- Large changes to many distinct files
- Breaking or deprecation of existing features
Our community review process for `non-trivial` PRs is the following:
1. Must pass usual automated code tests and formatting checks.
2. The PR will be assigned and pinged to the appropriately experienced team (i.e. @paperless-ngx/backend for backend changes).
3. Development team will check and test code manually (possibly over several days).
- You may be asked to make changes or rebase.
- The team may ask for additional testing done by @paperless-ngx/test
4. **At least two** members of the team will approve and finally merge the request into `dev` 🎉.
This process might be slow as community members have different schedules and time to dedicate to the Paperless project. However it ensures community code reviews are as brilliantly thorough as they once were with @jonaswinkler.
# AI-Generated Code
This project does not specifically prohibit the use of AI-generated code _during the process_ of creating a PR, however:
1. Any code present in the final PR that was generated using AI sources should be clearly attributed as such and must not violate copyright protections.
2. We will not accept PRs that are entirely or mostly AI-derived.
# Translating Paperless-ngx
Some notes about translation:
- There are two resources:
- `src-ui/messages.xlf` contains the translation strings for the front end. This is the most important.
- `django.po` contains strings for the administration section of paperless, which is nice to have translated.
- Most of the front-end strings are used on buttons, menu items, etc., so ideally the translated string should not be much longer than the English original.
- Translation units may contain placeholders. These usually mean that there's a name of a tag or document or something in the string. You can click on the placeholders to copy them.
- Translation units may contain plural expressions such as `{PLURAL_VAR, plural, =1 {one result} =0 {no results} other {<placeholder> results}}`. Copy these verbatim and translate only the content in the inner `{}` brackets. Example: `{PLURAL_VAR, plural, =1 {Ein Ergebnis} =0 {Keine Ergebnisse} other {<placeholder> Ergebnisse}}`
- Changes to translations on Crowdin will get pushed into the repository automatically.
## Adding new languages to the codebase
If a language has already been added, and you would like to contribute new translations or change existing translations, please read the "Translation" section in the README.md file for further details on that.
If you would like the project to be translated to another language, first head over to https://crowdin.com/project/paperless-ngx to check if that language has already been enabled for translation.
If not, please request the language to be added by creating an issue on GitHub. The issue should contain:
- English name of the language (the localized name can be added on Crowdin).
- ISO language code. A list of those can be found here: https://support.crowdin.com/enterprise/language-codes/
- Date format commonly used for the language, e.g. dd/mm/yyyy, mm/dd/yyyy, etc.
After the language has been added and some translations have been made on Crowdin, the language needs to be enabled in the code.
Note that there is no need to manually add a .po of .xlf file as those will be automatically generated and imported from Crowdin.
The following files need to be changed:
- src-ui/angular.json (under the _projects/paperless-ui/i18n/locales_ JSON key)
- src/paperless/settings.py (in the _LANGUAGES_ array)
- src-ui/src/app/services/settings.service.ts (inside the _LANGUAGE_OPTIONS_ array)
- src-ui/src/app/app.module.ts (import locale from _angular/common/locales_ and call _registerLocaleData_)
Please add the language in the correct order, alphabetically by locale.
Note that _en-us_ needs to stay on top of the list, as it is the default project language
If you are familiar with Git, feel free to send a Pull Request with those changes.
If not, let us know in the issue you created for the language, so that another developer can make these changes.
# Organization Structure & Membership
Paperless-ngx is a community project. We do our best to delegate permission and responsibility among a team of people to ensure the longevity of the project.
## Structure
As of writing, there are 21 members in paperless-ngx. 4 of these people have complete administrative privileges to the repo:
- [@shamoon](https://github.com/shamoon)
- [@bauerj](https://github.com/bauerj)
- [@qcasey](https://github.com/qcasey)
- [@FrankStrieter](https://github.com/FrankStrieter)
There are 5 teams collaborating on specific tasks within paperless-ngx:
- @paperless-ngx/backend (Python / django)
- @paperless-ngx/frontend (JavaScript / Typescript)
- @paperless-ngx/ci-cd (GitHub Actions / Deployment)
- @paperless-ngx/issues (Issue triage)
- @paperless-ngx/test (General testing for larger PRs)
## Permissions
All team members are notified when mentioned or assigned to a relevant issue or pull request. Additionally, each team has slightly different access to paperless-ngx:
- The **test** team has no special permissions.
- The **issues** team has `triage` access. This means they can organize issues and pull requests.
- The **backend**, **frontend**, and **ci-cd** teams have `write` access. This means they can approve PRs and push code, containers, releases, and more.
## Joining
We are not overly strict with inviting people to the organization. If you have read the [team permissions](#permissions) and think having additional access would enhance your contributions, please reach out to an [admin](#structure) of the team.
The admins occasionally invite contributors directly if we believe having them on a team will accelerate their work.
# Automatic Repository Maintenance
The Paperless-ngx team appreciates all effort and interest from the community in filing bug reports, creating feature requests, sharing ideas and helping other
community members. That said, in an effort to keep the repository organized and manageable the project uses automatic handling of certain areas:
- Issues that cannot be reproduced will be marked 'stale' after 7 days of inactivity and closed after 14 further days of inactivity.
- Issues, pull requests and discussions that are closed will be locked after 30 days of inactivity.
- Discussions with a marked answer will be automatically closed.
- Discussions in the 'General' or 'Support' categories will be closed after 180 days of inactivity.
- Feature requests that do not meet the following thresholds will be closed: 180 days of inactivity, < 5 "up-votes" after 180 days, < 20 "up-votes" after 1 year or < 80 "up-votes" at 2 years.
In all cases, threads can be re-opened by project maintainers and, of course, users can always create a new discussion for related concerns.
Finally, remember that all information remains searchable and 'closed' feature requests can still serve as inspiration for new features.
Thank you all for your contributions.

View File

@@ -1,268 +1,47 @@
# syntax=docker/dockerfile:1
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/reference.md
FROM alpine:3.8
# Stage: compile-frontend
# Purpose: Compiles the frontend
# Notes:
# - Does PNPM stuff with Typescript and such
FROM --platform=$BUILDPLATFORM docker.io/node:20-bookworm-slim AS compile-frontend
LABEL maintainer="The Paperless Project https://github.com/danielquinn/paperless" \
contributors="Guy Addadi <addadi@gmail.com>, Pit Kleyersburg <pitkley@googlemail.com>, \
Sven Fischer <git-dev@linux4tw.de>"
COPY ./src-ui /src/src-ui
# Copy requirements file and init script
COPY requirements.txt /usr/src/paperless/
COPY scripts/docker-entrypoint.sh /sbin/docker-entrypoint.sh
WORKDIR /src/src-ui
RUN set -eux \
&& npm update -g pnpm \
&& npm install -g corepack@latest \
&& corepack enable \
&& pnpm install
# Set export and consumption directories
ENV PAPERLESS_EXPORT_DIR=/export \
PAPERLESS_CONSUMPTION_DIR=/consume
ARG PNGX_TAG_VERSION=
# Add the tag to the environment file if its a tagged dev build
RUN set -eux && \
case "${PNGX_TAG_VERSION}" in \
dev|beta|fix*|feature*) \
sed -i -E "s/tag: '([a-z\.]+)'/tag: '${PNGX_TAG_VERSION}'/g" /src/src-ui/src/environments/environment.prod.ts \
;; \
esac
RUN set -eux \
&& ./node_modules/.bin/ng build --configuration production
RUN apk update --no-cache && apk add python3 gnupg libmagic libpq bash shadow curl \
sudo poppler tesseract-ocr imagemagick ghostscript unpaper optipng && \
apk add --virtual .build-dependencies \
python3-dev poppler-dev postgresql-dev gcc g++ musl-dev zlib-dev jpeg-dev && \
# Install python dependencies
python3 -m ensurepip && \
rm -r /usr/lib/python*/ensurepip && \
cd /usr/src/paperless && \
pip3 install --no-cache-dir -r requirements.txt && \
# Remove build dependencies
apk del .build-dependencies && \
# Create the consumption directory
mkdir -p $PAPERLESS_CONSUMPTION_DIR && \
# Create user
addgroup -g 1000 paperless && \
adduser -D -u 1000 -G paperless -h /usr/src/paperless paperless && \
chown -Rh paperless:paperless /usr/src/paperless && \
mkdir -p $PAPERLESS_EXPORT_DIR && \
# Setup entrypoint
chmod 755 /sbin/docker-entrypoint.sh
# Stage: s6-overlay-base
# Purpose: Installs s6-overlay and rootfs
# Comments:
# - Don't leave anything extra in here either
FROM ghcr.io/astral-sh/uv:0.8.4-python3.12-bookworm-slim AS s6-overlay-base
WORKDIR /usr/src/paperless/src
# Mount volumes and set Entrypoint
VOLUME ["/usr/src/paperless/data", "/usr/src/paperless/media", "/consume", "/export"]
ENTRYPOINT ["/sbin/docker-entrypoint.sh"]
CMD ["--help"]
WORKDIR /usr/src/s6
# Copy application
COPY src/ /usr/src/paperless/src/
COPY data/ /usr/src/paperless/data/
COPY media/ /usr/src/paperless/media/
# https://github.com/just-containers/s6-overlay#customizing-s6-overlay-behaviour
ENV \
S6_BEHAVIOUR_IF_STAGE2_FAILS=2 \
S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0 \
S6_VERBOSITY=1 \
PATH=/command:$PATH
# Buildx provided, must be defined to use though
ARG TARGETARCH
ARG TARGETVARIANT
# Lock this version
ARG S6_OVERLAY_VERSION=3.2.1.0
ARG S6_BUILD_TIME_PKGS="curl \
xz-utils"
RUN set -eux \
&& echo "Installing build time packages" \
&& apt-get update \
&& apt-get install --yes --quiet --no-install-recommends ${S6_BUILD_TIME_PKGS} \
&& echo "Determining arch" \
&& S6_ARCH="" \
&& if [ "${TARGETARCH}${TARGETVARIANT}" = "amd64" ]; then S6_ARCH="x86_64"; \
elif [ "${TARGETARCH}${TARGETVARIANT}" = "arm64" ]; then S6_ARCH="aarch64"; fi\
&& if [ -z "${S6_ARCH}" ]; then { echo "Error: Not able to determine arch"; exit 1; }; fi \
&& echo "Installing s6-overlay for ${S6_ARCH}" \
&& curl --fail --silent --no-progress-meter --show-error --location --remote-name-all --parallel --parallel-max 4 \
"https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-noarch.tar.xz" \
"https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-noarch.tar.xz.sha256" \
"https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-${S6_ARCH}.tar.xz" \
"https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-${S6_ARCH}.tar.xz.sha256" \
&& echo "Validating s6-archive checksums" \
&& sha256sum --check ./*.sha256 \
&& echo "Unpacking archives" \
&& tar --directory / -Jxpf s6-overlay-noarch.tar.xz \
&& tar --directory / -Jxpf s6-overlay-${S6_ARCH}.tar.xz \
&& echo "Removing downloaded archives" \
&& rm ./*.tar.xz \
&& rm ./*.sha256 \
&& echo "Cleaning up image" \
&& apt-get --yes purge ${S6_BUILD_TIME_PKGS} \
&& apt-get --yes autoremove --purge \
&& rm -rf /var/lib/apt/lists/*
# Copy our service defs and filesystem
COPY ./docker/rootfs /
# Stage: main-app
# Purpose: The final image
# Comments:
# - Don't leave anything extra in here
FROM s6-overlay-base AS main-app
LABEL org.opencontainers.image.authors="paperless-ngx team <hello@paperless-ngx.com>"
LABEL org.opencontainers.image.documentation="https://docs.paperless-ngx.com/"
LABEL org.opencontainers.image.source="https://github.com/paperless-ngx/paperless-ngx"
LABEL org.opencontainers.image.url="https://github.com/paperless-ngx/paperless-ngx"
LABEL org.opencontainers.image.licenses="GPL-3.0-only"
ARG DEBIAN_FRONTEND=noninteractive
# Buildx provided, must be defined to use though
ARG TARGETARCH
# Can be workflow provided, defaults set for manual building
ARG JBIG2ENC_VERSION=0.30
ARG QPDF_VERSION=11.9.0
ARG GS_VERSION=10.03.1
# Set Python environment variables
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
# Ignore warning from Whitenoise about async iterators
PYTHONWARNINGS="ignore:::django.http.response:517" \
PNGX_CONTAINERIZED=1 \
# https://docs.astral.sh/uv/reference/settings/#link-mode
UV_LINK_MODE=copy \
UV_CACHE_DIR=/cache/uv/
#
# Begin installation and configuration
# Order the steps below from least often changed to most
#
# Packages need for running
ARG RUNTIME_PACKAGES="\
# General utils
curl \
# Docker specific
gosu \
# Timezones support
tzdata \
# fonts for text file thumbnail generation
fonts-liberation \
gettext \
ghostscript \
gnupg \
icc-profiles-free \
imagemagick \
# PostgreSQL
postgresql-client \
# MySQL / MariaDB
mariadb-client \
# OCRmyPDF dependencies
tesseract-ocr \
tesseract-ocr-eng \
tesseract-ocr-deu \
tesseract-ocr-fra \
tesseract-ocr-ita \
tesseract-ocr-spa \
unpaper \
pngquant \
jbig2dec \
# lxml
libxml2 \
libxslt1.1 \
# itself
qpdf \
# Mime type detection
file \
libmagic1 \
media-types \
zlib1g \
# Barcode splitter
libzbar0 \
poppler-utils"
# Install basic runtime packages.
# These change very infrequently
RUN set -eux \
echo "Installing system packages" \
&& apt-get update \
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES} \
&& echo "Installing pre-built updates" \
&& curl --fail --silent --no-progress-meter --show-error --location --remote-name-all --parallel --parallel-max 4 \
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
https://github.com/paperless-ngx/builder/releases/download/jbig2enc-${JBIG2ENC_VERSION}/jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
&& echo "Installing qpdf ${QPDF_VERSION}" \
&& dpkg --install ./libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
&& dpkg --install ./qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
&& echo "Installing Ghostscript ${GS_VERSION}" \
&& dpkg --install ./libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
&& dpkg --install ./libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
&& dpkg --install ./ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
&& echo "Installing jbig2enc" \
&& dpkg --install ./jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
&& echo "Configuring imagemagick" \
&& cp /etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml \
&& echo "Cleaning up image layer" \
&& rm --force --verbose *.deb \
&& rm --recursive --force --verbose /var/lib/apt/lists/*
WORKDIR /usr/src/paperless/src/
# Python dependencies
# Change pretty frequently
COPY --chown=1000:1000 ["pyproject.toml", "uv.lock", "/usr/src/paperless/src/"]
# Packages needed only for building a few quick Python
# dependencies
ARG BUILD_PACKAGES="\
build-essential \
# https://github.com/PyMySQL/mysqlclient#linux
default-libmysqlclient-dev \
pkg-config"
# hadolint ignore=DL3042
RUN --mount=type=cache,target=${UV_CACHE_DIR},id=python-cache \
set -eux \
&& echo "Installing build system packages" \
&& apt-get update \
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
&& echo "Installing Python requirements" \
&& uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt \
&& uv pip install --system --no-python-downloads --python-preference system --requirements requirements.txt \
&& echo "Installing NLTK data" \
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" snowball_data \
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" stopwords \
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" punkt_tab \
&& echo "Cleaning up image" \
&& apt-get --yes purge ${BUILD_PACKAGES} \
&& apt-get --yes autoremove --purge \
&& apt-get clean --yes \
&& rm --recursive --force --verbose *.whl \
&& rm --recursive --force --verbose /var/lib/apt/lists/* \
&& rm --recursive --force --verbose /tmp/* \
&& rm --recursive --force --verbose /var/tmp/* \
&& rm --recursive --force --verbose /var/cache/apt/archives/* \
&& truncate --size 0 /var/log/*log
# copy backend
COPY --chown=1000:1000 ./src ./
# copy frontend
COPY --from=compile-frontend --chown=1000:1000 /src/src/documents/static/frontend/ ./documents/static/frontend/
# add users, setup scripts
# Mount the compiled frontend to expected location
RUN set -eux \
&& sed -i '1s|^#!/usr/bin/env python3|#!/command/with-contenv python3|' manage.py \
&& echo "Setting up user/group" \
&& addgroup --gid 1000 paperless \
&& useradd --uid 1000 --gid paperless --home-dir /usr/src/paperless paperless \
&& echo "Creating volume directories" \
&& mkdir --parents --verbose /usr/src/paperless/data \
&& mkdir --parents --verbose /usr/src/paperless/media \
&& mkdir --parents --verbose /usr/src/paperless/consume \
&& mkdir --parents --verbose /usr/src/paperless/export \
&& echo "Creating gnupg directory" \
&& mkdir -m700 --verbose /usr/src/paperless/.gnupg \
&& echo "Adjusting all permissions" \
&& chown --from root:root --changes --recursive paperless:paperless /usr/src/paperless \
&& echo "Collecting static files" \
&& s6-setuidgid paperless python3 manage.py collectstatic --clear --no-input --link \
&& s6-setuidgid paperless python3 manage.py compilemessages
VOLUME ["/usr/src/paperless/data", \
"/usr/src/paperless/media", \
"/usr/src/paperless/consume", \
"/usr/src/paperless/export"]
ENTRYPOINT ["/init"]
EXPOSE 8000
HEALTHCHECK --interval=30s --timeout=10s --retries=5 CMD [ "curl", "-fs", "-S", "-L", "--max-time", "2", "http://localhost:8000" ]

39
Pipfile Normal file
View File

@@ -0,0 +1,39 @@
[[source]]
url = "https://pypi.python.org/simple"
verify_ssl = true
name = "pypi"
[packages]
django = "<2.1,>=2.0"
pillow = "*"
coveralls = "*"
dateparser = "*"
django-cors-headers = "*"
django-crispy-forms = "*"
django-extensions = "*"
django-filter = "*"
djangorestframework = "*"
factory-boy = "*"
filemagic = "*"
fuzzywuzzy = {extras = ["speedup"], version = "==0.15.0"}
gunicorn = "*"
inotify-simple = "*"
langdetect = "*"
pdftotext = "*"
pyocr = "*"
python-dateutil = "*"
python-dotenv = "*"
python-gnupg = "*"
pytz = "*"
sphinx = "*"
tox = "*"
pycodestyle = "*"
pytest = "*"
pytest-cov = "*"
pytest-django = "*"
pytest-sugar = "*"
pytest-env = "*"
pytest-xdist = "*"
[dev-packages]
ipython = "*"

658
Pipfile.lock generated Normal file
View File

@@ -0,0 +1,658 @@
{
"_meta": {
"hash": {
"sha256": "3782f7e6b5461c39c8fd0d0048a4622418f247439113bd3cdc91712fd47036f6"
},
"pipfile-spec": 6,
"requires": {},
"sources": [
{
"name": "pypi",
"url": "https://pypi.python.org/simple",
"verify_ssl": true
}
]
},
"default": {
"alabaster": {
"hashes": [
"sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359",
"sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"
],
"version": "==0.7.12"
},
"apipkg": {
"hashes": [
"sha256:37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6",
"sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c"
],
"version": "==1.5"
},
"atomicwrites": {
"hashes": [
"sha256:0312ad34fcad8fac3704d441f7b317e50af620823353ec657a53e981f92920c0",
"sha256:ec9ae8adaae229e4f8446952d204a3e4b5fdd2d099f9be3aaf556120135fb3ee"
],
"version": "==1.2.1"
},
"attrs": {
"hashes": [
"sha256:10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69",
"sha256:ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb"
],
"version": "==18.2.0"
},
"babel": {
"hashes": [
"sha256:6778d85147d5d85345c14a26aada5e478ab04e39b078b0745ee6870c2b5cf669",
"sha256:8cba50f48c529ca3fa18cf81fa9403be176d374ac4d60738b839122dfaaa3d23"
],
"version": "==2.6.0"
},
"backcall": {
"hashes": [
"sha256:38ecd85be2c1e78f77fd91700c76e14667dc21e2713b63876c0eb901196e01e4",
"sha256:bbbf4b1e5cd2bdb08f915895b51081c041bac22394fdfcfdfbe9f14b77c08bf2"
],
"version": "==0.1.0"
},
"certifi": {
"hashes": [
"sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c",
"sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a"
],
"version": "==2018.10.15"
},
"chardet": {
"hashes": [
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
],
"version": "==3.0.4"
},
"coverage": {
"hashes": [
"sha256:03481e81d558d30d230bc12999e3edffe392d244349a90f4ef9b88425fac74ba",
"sha256:0b136648de27201056c1869a6c0d4e23f464750fd9a9ba9750b8336a244429ed",
"sha256:0bf8cbbd71adfff0ef1f3a1531e6402d13b7b01ac50a79c97ca15f030dba6306",
"sha256:10a46017fef60e16694a30627319f38a2b9b52e90182dddb6e37dcdab0f4bf95",
"sha256:198626739a79b09fa0a2f06e083ffd12eb55449b5f8bfdbeed1df4910b2ca640",
"sha256:23d341cdd4a0371820eb2b0bd6b88f5003a7438bbedb33688cd33b8eae59affd",
"sha256:28b2191e7283f4f3568962e373b47ef7f0392993bb6660d079c62bd50fe9d162",
"sha256:2a5b73210bad5279ddb558d9a2bfedc7f4bf6ad7f3c988641d83c40293deaec1",
"sha256:2eb564bbf7816a9d68dd3369a510be3327f1c618d2357fa6b1216994c2e3d508",
"sha256:337ded681dd2ef9ca04ef5d93cfc87e52e09db2594c296b4a0a3662cb1b41249",
"sha256:3a2184c6d797a125dca8367878d3b9a178b6fdd05fdc2d35d758c3006a1cd694",
"sha256:3c79a6f7b95751cdebcd9037e4d06f8d5a9b60e4ed0cd231342aa8ad7124882a",
"sha256:3d72c20bd105022d29b14a7d628462ebdc61de2f303322c0212a054352f3b287",
"sha256:3eb42bf89a6be7deb64116dd1cc4b08171734d721e7a7e57ad64cc4ef29ed2f1",
"sha256:4635a184d0bbe537aa185a34193898eee409332a8ccb27eea36f262566585000",
"sha256:56e448f051a201c5ebbaa86a5efd0ca90d327204d8b059ab25ad0f35fbfd79f1",
"sha256:5a13ea7911ff5e1796b6d5e4fbbf6952381a611209b736d48e675c2756f3f74e",
"sha256:69bf008a06b76619d3c3f3b1983f5145c75a305a0fea513aca094cae5c40a8f5",
"sha256:6bc583dc18d5979dc0f6cec26a8603129de0304d5ae1f17e57a12834e7235062",
"sha256:701cd6093d63e6b8ad7009d8a92425428bc4d6e7ab8d75efbb665c806c1d79ba",
"sha256:7608a3dd5d73cb06c531b8925e0ef8d3de31fed2544a7de6c63960a1e73ea4bc",
"sha256:76ecd006d1d8f739430ec50cc872889af1f9c1b6b8f48e29941814b09b0fd3cc",
"sha256:7aa36d2b844a3e4a4b356708d79fd2c260281a7390d678a10b91ca595ddc9e99",
"sha256:7d3f553904b0c5c016d1dad058a7554c7ac4c91a789fca496e7d8347ad040653",
"sha256:7e1fe19bd6dce69d9fd159d8e4a80a8f52101380d5d3a4d374b6d3eae0e5de9c",
"sha256:8c3cb8c35ec4d9506979b4cf90ee9918bc2e49f84189d9bf5c36c0c1119c6558",
"sha256:9d6dd10d49e01571bf6e147d3b505141ffc093a06756c60b053a859cb2128b1f",
"sha256:be6cfcd8053d13f5f5eeb284aa8a814220c3da1b0078fa859011c7fffd86dab9",
"sha256:c1bb572fab8208c400adaf06a8133ac0712179a334c09224fb11393e920abcdd",
"sha256:de4418dadaa1c01d497e539210cb6baa015965526ff5afc078c57ca69160108d",
"sha256:e05cb4d9aad6233d67e0541caa7e511fa4047ed7750ec2510d466e806e0255d6",
"sha256:f05a636b4564104120111800021a92e43397bc12a5c72fed7036be8556e0029e",
"sha256:f3f501f345f24383c0000395b26b726e46758b71393267aeae0bd36f8b3ade80"
],
"version": "==4.5.1"
},
"coveralls": {
"hashes": [
"sha256:ab638e88d38916a6cedbf80a9cd8992d5fa55c77ab755e262e00b36792b7cd6d",
"sha256:b2388747e2529fa4c669fb1e3e2756e4e07b6ee56c7d9fce05f35ccccc913aa0"
],
"index": "pypi",
"version": "==1.5.1"
},
"dateparser": {
"hashes": [
"sha256:940828183c937bcec530753211b70f673c0a9aab831e43273489b310538dff86",
"sha256:b452ef8b36cd78ae86a50721794bc674aa3994e19b570f7ba92810f4e0a2ae03"
],
"index": "pypi",
"version": "==0.7.0"
},
"decorator": {
"hashes": [
"sha256:2c51dff8ef3c447388fe5e4453d24a2bf128d3a4c32af3fabef1f01c6851ab82",
"sha256:c39efa13fbdeb4506c476c9b3babf6a718da943dab7811c206005a4a956c080c"
],
"version": "==4.3.0"
},
"django": {
"hashes": [
"sha256:25df265e1fdb74f7e7305a1de620a84681bcc9c05e84a3ed97e4a1a63024f18d",
"sha256:d6d94554abc82ca37e447c3d28958f5ac39bd7d4adaa285543ae97fb1129fd69"
],
"index": "pypi",
"version": "==2.0.9"
},
"django-cors-headers": {
"hashes": [
"sha256:5545009c9b233ea7e70da7dbab7cb1c12afa01279895086f98ec243d7eab46fa",
"sha256:c4c2ee97139d18541a1be7d96fe337d1694623816d83f53cb7c00da9b94acae1"
],
"index": "pypi",
"version": "==2.4.0"
},
"django-crispy-forms": {
"hashes": [
"sha256:5952bab971110d0b86c278132dae0aa095beee8f723e625c3d3fa28888f1675f",
"sha256:705ededc554ad8736157c666681165fe22ead2dec0d5446d65fc9dd976a5a876"
],
"index": "pypi",
"version": "==1.7.2"
},
"django-extensions": {
"hashes": [
"sha256:30cb6a8c7d6f75a55edf0c0c4491bd98f8264ae1616ce105f9cecac4387edd07",
"sha256:4ad86a7a5e84f1c77db030761ae87a600647250c652030a2b71a16e87f3a3d62"
],
"index": "pypi",
"version": "==2.1.3"
},
"django-filter": {
"hashes": [
"sha256:6f4e4bc1a11151178520567b50320e5c32f8edb552139d93ea3e30613b886f56",
"sha256:86c3925020c27d072cdae7b828aaa5d165c2032a629abbe3c3a1be1edae61c58"
],
"index": "pypi",
"version": "==2.0.0"
},
"djangorestframework": {
"hashes": [
"sha256:607865b0bb1598b153793892101d881466bd5a991de12bd6229abb18b1c86136",
"sha256:63f76cbe1e7d12b94c357d7e54401103b2e52aef0f7c1650d6c820ad708776e5"
],
"index": "pypi",
"version": "==3.9.0"
},
"docopt": {
"hashes": [
"sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"
],
"version": "==0.6.2"
},
"docutils": {
"hashes": [
"sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6",
"sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274",
"sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6"
],
"version": "==0.14"
},
"execnet": {
"hashes": [
"sha256:a7a84d5fa07a089186a329528f127c9d73b9de57f1a1131b82bb5320ee651f6a",
"sha256:fc155a6b553c66c838d1a22dba1dc9f5f505c43285a878c6f74a79c024750b83"
],
"version": "==1.5.0"
},
"factory-boy": {
"hashes": [
"sha256:6f25cc4761ac109efd503f096e2ad99421b1159f01a29dbb917359dcd68e08ca",
"sha256:d552cb872b310ae78bd7429bf318e42e1e903b1a109e899a523293dfa762ea4f"
],
"index": "pypi",
"version": "==2.11.1"
},
"faker": {
"hashes": [
"sha256:2621643b80a10b91999925cfd20f64d2b36f20bf22136bbdc749bb57d6ffe124",
"sha256:5ed822d31bd2d6edf10944d176d30dc9c886afdd381eefb7ba8b7aad86171646"
],
"version": "==0.9.2"
},
"filelock": {
"hashes": [
"sha256:b8d5ca5ca1c815e1574aee746650ea7301de63d87935b3463d26368b76e31633",
"sha256:d610c1bb404daf85976d7a82eb2ada120f04671007266b708606565dd03b5be6"
],
"version": "==3.0.10"
},
"filemagic": {
"hashes": [
"sha256:e684359ef40820fe406f0ebc5bf8a78f89717bdb7fed688af68082d991d6dbf3"
],
"index": "pypi",
"version": "==1.6"
},
"fuzzywuzzy": {
"extras": [
"speedup"
],
"hashes": [
"sha256:3759bc6859daa0eecef8c82b45404bdac20c23f23136cf4c18b46b426bbc418f",
"sha256:5b36957ccf836e700f4468324fa80ba208990385392e217be077d5cd738ae602"
],
"index": "pypi",
"version": "==0.15.0"
},
"gunicorn": {
"hashes": [
"sha256:aa8e0b40b4157b36a5df5e599f45c9c76d6af43845ba3b3b0efe2c70473c2471",
"sha256:fa2662097c66f920f53f70621c6c58ca4a3c4d3434205e608e121b5b3b71f4f3"
],
"index": "pypi",
"version": "==19.9.0"
},
"idna": {
"hashes": [
"sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e",
"sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16"
],
"version": "==2.7"
},
"imagesize": {
"hashes": [
"sha256:3f349de3eb99145973fefb7dbe38554414e5c30abd0c8e4b970a7c9d09f3a1d8",
"sha256:f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5"
],
"version": "==1.1.0"
},
"inotify-simple": {
"hashes": [
"sha256:fc2c10dd73278a1027d0663f2db51240af5946390f363a154361406ebdddd8dd"
],
"index": "pypi",
"version": "==1.1.8"
},
"ipython": {
"hashes": [
"sha256:a5781d6934a3341a1f9acb4ea5acdc7ea0a0855e689dbe755d070ca51e995435",
"sha256:b10a7ddd03657c761fc503495bc36471c8158e3fc948573fb9fe82a7029d8efd"
],
"index": "pypi",
"version": "==7.1.1"
},
"ipython-genutils": {
"hashes": [
"sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8",
"sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"
],
"version": "==0.2.0"
},
"jedi": {
"hashes": [
"sha256:0191c447165f798e6a730285f2eee783fff81b0d3df261945ecb80983b5c3ca7",
"sha256:b7493f73a2febe0dc33d51c99b474547f7f6c0b2c8fb2b21f453eef204c12148"
],
"version": "==0.13.1"
},
"jinja2": {
"hashes": [
"sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd",
"sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4"
],
"version": "==2.10"
},
"langdetect": {
"hashes": [
"sha256:91a170d5f0ade380db809b3ba67f08e95fe6c6c8641f96d67a51ff7e98a9bf30"
],
"index": "pypi",
"version": "==1.0.7"
},
"markupsafe": {
"hashes": [
"sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665"
],
"version": "==1.0"
},
"more-itertools": {
"hashes": [
"sha256:c187a73da93e7a8acc0001572aebc7e3c69daf7bf6881a2cea10650bd4420092",
"sha256:c476b5d3a34e12d40130bc2f935028b5f636df8f372dc2c1c01dc19681b2039e",
"sha256:fcbfeaea0be121980e15bc97b3817b5202ca73d0eae185b4550cbfce2a3ebb3d"
],
"version": "==4.3.0"
},
"packaging": {
"hashes": [
"sha256:0886227f54515e592aaa2e5a553332c73962917f2831f1b0f9b9f4380a4b9807",
"sha256:f95a1e147590f204328170981833854229bb2912ac3d5f89e2a8ccd2834800c9"
],
"version": "==18.0"
},
"parso": {
"hashes": [
"sha256:35704a43a3c113cce4de228ddb39aab374b8004f4f2407d070b6a2ca784ce8a2",
"sha256:895c63e93b94ac1e1690f5fdd40b65f07c8171e3e53cbd7793b5b96c0e0a7f24"
],
"version": "==0.3.1"
},
"pdftotext": {
"hashes": [
"sha256:e3ad11efe0aa22cbfc46aa1296b2ea5a52ad208b778288311f2801adef178ccb"
],
"index": "pypi",
"version": "==2.1.1"
},
"pexpect": {
"hashes": [
"sha256:2a8e88259839571d1251d278476f3eec5db26deb73a70be5ed5dc5435e418aba",
"sha256:3fbd41d4caf27fa4a377bfd16fef87271099463e6fa73e92a52f92dfee5d425b"
],
"markers": "sys_platform != 'win32'",
"version": "==4.6.0"
},
"pickleshare": {
"hashes": [
"sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca",
"sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"
],
"version": "==0.7.5"
},
"pillow": {
"hashes": [
"sha256:00203f406818c3f45d47bb8fe7e67d3feddb8dcbbd45a289a1de7dd789226360",
"sha256:0616f800f348664e694dddb0b0c88d26761dd5e9f34e1ed7b7a7d2da14b40cb7",
"sha256:1f7908aab90c92ad85af9d2fec5fc79456a89b3adcc26314d2cde0e238bd789e",
"sha256:2ea3517cd5779843de8a759c2349a3cd8d3893e03ab47053b66d5ec6f8bc4f93",
"sha256:48a9f0538c91fc136b3a576bee0e7cd174773dc9920b310c21dcb5519722e82c",
"sha256:5280ebc42641a1283b7b1f2c20e5b936692198b9dd9995527c18b794850be1a8",
"sha256:5e34e4b5764af65551647f5cc67cf5198c1d05621781d5173b342e5e55bf023b",
"sha256:63b120421ab85cad909792583f83b6ca3584610c2fe70751e23f606a3c2e87f0",
"sha256:696b5e0109fe368d0057f484e2e91717b49a03f1e310f857f133a4acec9f91dd",
"sha256:870ed021a42b1b02b5fe4a739ea735f671a84128c0a666c705db2cb9abd528eb",
"sha256:916da1c19e4012d06a372127d7140dae894806fad67ef44330e5600d77833581",
"sha256:9303a289fa0811e1c6abd9ddebfc770556d7c3311cb2b32eff72164ddc49bc64",
"sha256:9577888ecc0ad7d06c3746afaba339c94d62b59da16f7a5d1cff9e491f23dace",
"sha256:987e1c94a33c93d9b209315bfda9faa54b8edfce6438a1e93ae866ba20de5956",
"sha256:99a3bbdbb844f4fb5d6dd59fac836a40749781c1fa63c563bc216c27aef63f60",
"sha256:99db8dc3097ceafbcff9cb2bff384b974795edeb11d167d391a02c7bfeeb6e16",
"sha256:a5a96cf49eb580756a44ecf12949e52f211e20bffbf5a95760ac14b1e499cd37",
"sha256:aa6ca3eb56704cdc0d876fc6047ffd5ee960caad52452fbee0f99908a141a0ae",
"sha256:aade5e66795c94e4a2b2624affeea8979648d1b0ae3fcee17e74e2c647fc4a8a",
"sha256:b78905860336c1d292409e3df6ad39cc1f1c7f0964e66844bbc2ebfca434d073",
"sha256:b92f521cdc4e4a3041cc343625b699f20b0b5f976793fb45681aac1efda565f8",
"sha256:bfde84bbd6ae5f782206d454b67b7ee8f7f818c29b99fd02bf022fd33bab14cb",
"sha256:c2b62d3df80e694c0e4a0ed47754c9480521e25642251b3ab1dff050a4e60409",
"sha256:c5e2be6c263b64f6f7656e23e18a4a9980cffc671442795682e8c4e4f815dd9f",
"sha256:c99aa3c63104e0818ec566f8ff3942fb7c7a8f35f9912cb63fd8e12318b214b2",
"sha256:dae06620d3978da346375ebf88b9e2dd7d151335ba668c995aea9ed07af7add4",
"sha256:db5499d0710823fa4fb88206050d46544e8f0e0136a9a5f5570b026584c8fd74",
"sha256:f36baafd82119c4a114b9518202f2a983819101dcc14b26e43fc12cbefdce00e",
"sha256:f52b79c8796d81391ab295b04e520bda6feed54d54931708872e8f9ae9db0ea1",
"sha256:ff8cff01582fa1a7e533cb97f628531c4014af4b5f38e33cdcfe5eec29b6d888"
],
"index": "pypi",
"version": "==5.3.0"
},
"pluggy": {
"hashes": [
"sha256:447ba94990e8014ee25ec853339faf7b0fc8050cdc3289d4d71f7f410fb90095",
"sha256:bde19360a8ec4dfd8a20dcb811780a30998101f078fc7ded6162f0076f50508f"
],
"version": "==0.8.0"
},
"prompt-toolkit": {
"hashes": [
"sha256:c1d6aff5252ab2ef391c2fe498ed8c088066f66bc64a8d5c095bbf795d9fec34",
"sha256:d4c47f79b635a0e70b84fdb97ebd9a274203706b1ee5ed44c10da62755cf3ec9",
"sha256:fd17048d8335c1e6d5ee403c3569953ba3eb8555d710bfc548faf0712666ea39"
],
"version": "==2.0.7"
},
"ptyprocess": {
"hashes": [
"sha256:923f299cc5ad920c68f2bc0bc98b75b9f838b93b599941a6b63ddbc2476394c0",
"sha256:d7cc528d76e76342423ca640335bd3633420dc1366f258cb31d05e865ef5ca1f"
],
"version": "==0.6.0"
},
"py": {
"hashes": [
"sha256:bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694",
"sha256:e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6"
],
"version": "==1.7.0"
},
"pycodestyle": {
"hashes": [
"sha256:cbc619d09254895b0d12c2c691e237b2e91e9b2ecf5e84c26b35400f93dcfb83",
"sha256:cbfca99bd594a10f674d0cd97a3d802a1fdef635d4361e1a2658de47ed261e3a"
],
"index": "pypi",
"version": "==2.4.0"
},
"pygments": {
"hashes": [
"sha256:78f3f434bcc5d6ee09020f92ba487f95ba50f1e3ef83ae96b9d5ffa1bab25c5d",
"sha256:dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc"
],
"version": "==2.2.0"
},
"pyocr": {
"hashes": [
"sha256:b6ba6263fd92da56627dff6d263d991a2246aacd117d1788f11b93f419ca395f"
],
"index": "pypi",
"version": "==0.5.3"
},
"pyparsing": {
"hashes": [
"sha256:40856e74d4987de5d01761a22d1621ae1c7f8774585acae358aa5c5936c6c90b",
"sha256:f353aab21fd474459d97b709e527b5571314ee5f067441dc9f88e33eecd96592"
],
"version": "==2.3.0"
},
"pytest": {
"hashes": [
"sha256:a9e5e8d7ab9d5b0747f37740276eb362e6a76275d76cebbb52c6049d93b475db",
"sha256:bf47e8ed20d03764f963f0070ff1c8fda6e2671fc5dd562a4d3b7148ad60f5ca"
],
"index": "pypi",
"version": "==3.9.3"
},
"pytest-cov": {
"hashes": [
"sha256:513c425e931a0344944f84ea47f3956be0e416d95acbd897a44970c8d926d5d7",
"sha256:e360f048b7dae3f2f2a9a4d067b2dd6b6a015d384d1577c994a43f3f7cbad762"
],
"index": "pypi",
"version": "==2.6.0"
},
"pytest-django": {
"hashes": [
"sha256:49e9ffc856bc6a1bec1c26c5c7b7213dff7cc8bc6b64d624c4d143d04aff0bcf",
"sha256:b379282feaf89069cb790775ab6bbbd2bd2038a68c7ef9b84a41898e0b551081"
],
"index": "pypi",
"version": "==3.4.3"
},
"pytest-env": {
"hashes": [
"sha256:7e94956aef7f2764f3c147d216ce066bf6c42948bb9e293169b1b1c880a580c2"
],
"index": "pypi",
"version": "==0.6.2"
},
"pytest-forked": {
"hashes": [
"sha256:e4500cd0509ec4a26535f7d4112a8cc0f17d3a41c29ffd4eab479d2a55b30805",
"sha256:f275cb48a73fc61a6710726348e1da6d68a978f0ec0c54ece5a5fae5977e5a08"
],
"version": "==0.2"
},
"pytest-sugar": {
"hashes": [
"sha256:ab8cc42faf121344a4e9b13f39a51257f26f410e416c52ea11078cdd00d98a2c"
],
"index": "pypi",
"version": "==0.9.1"
},
"pytest-xdist": {
"hashes": [
"sha256:3bc9dcb6ff47e607d3c710727cd9996fd7ac1466d405c3b40bb495da99b6b669",
"sha256:8e188d13ce6614c7a678179a76f46231199ffdfe6163de031c17e62ffa256917"
],
"index": "pypi",
"version": "==1.24.0"
},
"python-dateutil": {
"hashes": [
"sha256:063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93",
"sha256:88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02"
],
"index": "pypi",
"version": "==2.7.5"
},
"python-dotenv": {
"hashes": [
"sha256:122290a38ece9fe4f162dc7c95cae3357b983505830a154d3c98ef7f6c6cea77",
"sha256:4a205787bc829233de2a823aa328e44fd9996fedb954989a21f1fc67c13d7a77"
],
"index": "pypi",
"version": "==0.9.1"
},
"python-gnupg": {
"hashes": [
"sha256:2d158dfc6b54927752b945ebe57e6a0c45da27747fa3b9ae66eccc0d2147ac0d",
"sha256:faa69bab58ed0936f0ccf96c99b92369b7a1819305d37dfe5c927d21a437a09d"
],
"index": "pypi",
"version": "==0.4.3"
},
"python-levenshtein": {
"hashes": [
"sha256:033a11de5e3d19ea25c9302d11224e1a1898fe5abd23c61c7c360c25195e3eb1"
],
"markers": "extra == 'speedup'",
"version": "==0.12.0"
},
"pytz": {
"hashes": [
"sha256:31cb35c89bd7d333cd32c5f278fca91b523b0834369e757f4c5641ea252236ca",
"sha256:8e0f8568c118d3077b46be7d654cc8167fa916092e28320cde048e54bfc9f1e6"
],
"index": "pypi",
"version": "==2018.7"
},
"regex": {
"hashes": [
"sha256:0ef96690c3d2294155b7d44187ca4a151e45c931cb768e106ba464a9fa64c5da",
"sha256:251683e01a3bcacd9188acf0d4caf7b29a3b963c843159311825613ae144cddb",
"sha256:3fe15a75fe00f04d1ec16713d55cf1e206077c450267a10b33318756fb8b3f99",
"sha256:53a962f9dc28cdf403978a142cb1e054479759ad64d312a999f9f042c25b5c9a",
"sha256:8bd1da6a93d32336a5e5432886dd8543004f0591c39b83dbfa60705cccdf414d",
"sha256:b5423061918f602e9342b54d746ac31c598d328ecaf4ef0618763e960c926fd4",
"sha256:d80ebc65b1f7d0403117f59309c16eac24be6a0bc730b593a79f703462858d94",
"sha256:fd8419979639b7de7fb964a13bce3ac47e6fe33043b83de0398c3067986e5659",
"sha256:ff2f15b2b0b4b58ba8a1de651780a0d3fd54f96ad6b77dceb77695220e5d7b7a"
],
"version": "==2018.11.2"
},
"requests": {
"hashes": [
"sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c",
"sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279"
],
"version": "==2.20.0"
},
"six": {
"hashes": [
"sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9",
"sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb"
],
"version": "==1.11.0"
},
"snowballstemmer": {
"hashes": [
"sha256:919f26a68b2c17a7634da993d91339e288964f93c274f1343e3bbbe2096e1128",
"sha256:9f3bcd3c401c3e862ec0ebe6d2c069ebc012ce142cce209c098ccb5b09136e89"
],
"version": "==1.2.1"
},
"sphinx": {
"hashes": [
"sha256:652eb8c566f18823a022bb4b6dbc868d366df332a11a0226b5bc3a798a479f17",
"sha256:d222626d8356de702431e813a05c68a35967e3d66c6cd1c2c89539bb179a7464"
],
"index": "pypi",
"version": "==1.8.1"
},
"sphinxcontrib-websupport": {
"hashes": [
"sha256:68ca7ff70785cbe1e7bccc71a48b5b6d965d79ca50629606c7861a21b206d9dd",
"sha256:9de47f375baf1ea07cdb3436ff39d7a9c76042c10a769c52353ec46e4e8fc3b9"
],
"version": "==1.1.0"
},
"termcolor": {
"hashes": [
"sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"
],
"version": "==1.1.0"
},
"text-unidecode": {
"hashes": [
"sha256:5a1375bb2ba7968740508ae38d92e1f889a0832913cb1c447d5e2046061a396d",
"sha256:801e38bd550b943563660a91de8d4b6fa5df60a542be9093f7abf819f86050cc"
],
"version": "==1.2"
},
"toml": {
"hashes": [
"sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c",
"sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e"
],
"version": "==0.10.0"
},
"tox": {
"hashes": [
"sha256:513e32fdf2f9e2d583c2f248f47ba9886428c949f068ac54a0469cac55df5862",
"sha256:75fa30e8329b41b664585f5fb837e23ce1d7e6fa1f7811f2be571c990f9d911b"
],
"index": "pypi",
"version": "==3.5.3"
},
"traitlets": {
"hashes": [
"sha256:9c4bd2d267b7153df9152698efb1050a5d84982d3384a37b2c1f7723ba3e7835",
"sha256:c6cb5e6f57c5a9bdaa40fa71ce7b4af30298fbab9ece9815b5d995ab6217c7d9"
],
"version": "==4.3.2"
},
"tzlocal": {
"hashes": [
"sha256:4ebeb848845ac898da6519b9b31879cf13b6626f7184c496037b818e238f2c4e"
],
"version": "==1.5.1"
},
"urllib3": {
"hashes": [
"sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39",
"sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"
],
"version": "==1.24.1"
},
"virtualenv": {
"hashes": [
"sha256:686176c23a538ecc56d27ed9d5217abd34644823d6391cbeb232f42bf722baad",
"sha256:f899fafcd92e1150f40c8215328be38ff24b519cd95357fa6e78e006c7638208"
],
"version": "==16.1.0"
},
"wcwidth": {
"hashes": [
"sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
"sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
],
"version": "==0.1.7"
}
},
"develop": {}
}

84
README-de.md Normal file
View File

@@ -0,0 +1,84 @@
*[English](README.md)*<br/>
*[Greek](README-el.md)*
# Paperless
[![Dokumentation](https://readthedocs.org/projects/paperless/badge/?version=latest)](https://paperless.readthedocs.org/) [![Chat](https://badges.gitter.im/danielquinn/paperless.svg)](https://gitter.im/danielquinn/paperless) [![Travis](https://travis-ci.org/danielquinn/paperless.svg?branch=master)](https://travis-ci.org/danielquinn/paperless) [![Coverage Status](https://coveralls.io/repos/github/danielquinn/paperless/badge.svg?branch=master)](https://coveralls.io/github/danielquinn/paperless?branch=master) [![Danke](https://img.shields.io/badge/THANKS-md-ff69b4.svg)](https://github.com/danielquinn/paperless/blob/master/THANKS.md)
Indexiere und archiviere alle deine eingescannten Papierdokumente
Ich hasse Papier. Abgesehen von Umweltproblemen, ist es der Albtraum einer technisch-interessierten Person:
* Es gibt keine Suchfunktion
* Es braucht physischen Platz
* Sicherungen bedeuten mehr Papier
In den vergangenen Monaten hatte ich mehrmals das Problem, das richtige Dokument nicht zur Hand zu haben. Manchmal warf ich Dokumente weg, die ich noch gebraucht hätte (wer behält schon Wasserrechnungen für zwei Jahre?), andere verlor ich einfach... weil PAPIER. Ich schrieb dies, um mein Leben einfacher zu machen.
## Wie es funktioniert
Paperless steuert nicht deinen Scanner, es hilft nur damit umzugehen, was der Scanner herausspuckt
1. Kaufe einen Dokumentenscanner, der an einen Ort in deinem Netzwerk schreiben kann. Wenn du Inspirationen brauchst, schau in die [Scannerempfehlungen](https://paperless.readthedocs.io/en/latest/scanners.html).
2. Stelle "Scanne zu FTP" oder ähnliches ein. Es sollte möglich sein, eingescannte Bilder ohne etwas tun zu müssen an einen Server hochzuladen. Natürlich kannst du auch die einscannte Datei händisch hochladen, wenn der Scanner automatisches Hochladen nicht unterstützt. Paperless ist es egal, wie die Dokumente in seinen lokalen Konsumordner gelangen.
3. Besitze einen Zielserver, lasse das Papierless-Konsumskript laufen, um die Datei mit OCR zu versehen und sie in einer lokalen Datenbank zu indexieren.
4. Benutze die Weboberfläche, um die Datenbank zu durchforsten und zu finden, was du suchst.
5. Lade die PDF-Datei, die du brauchst/möchtest über die Weboberfläche herunter und mach was auch immer du willst damit. Du kannst es auch drucken und versenden, so als wäre es das Original. In den meisten Fällen, wird das niemanden interessieren oder bemerken.
Hier das, was du bekommt:
![Vorher und Nachher](https://raw.githubusercontent.com/danielquinn/paperless/master/docs/_static/screenshot.png)
## Dokumentation
Diese ist komplett verfügbar auf [ReadTheDocs](https://paperless.readthedocs.org/).
## Anforderungen
Dies alles ist eine wirklich ziemlich einfache, glänzende und benutzerfreundliche Hülle rund um einige sehr mächtige Werkzeuge.
* [ImageMagick](http://imagemagick.org/) wandelt Bilder zwischen Farbe und Graustufen um.
* [Tesseract](https://github.com/tesseract-ocr) erledigt die Buchstabenerkennung.
* [Unpaper](https://www.flameeyes.eu/projects/unpaper) bereinigt und begradigt das eingescannte Bild.
* [GNU Privacy Guard](https://gnupg.org/) wird als Verschlüsselungsbackend genutzt.
* [Python 3](https://python.org/) ist die Sprache des Projekts.
* [Pillow](https://pypi.python.org/pypi/pillowfight/) lädt die Bilddaten als Python-Objekt, um sie mit PyOCR zu verwenden.
* [PyOCR](https://github.com/jflesch/pyocr) ist ein glatter, programmatischer Wrapper um Tesseract.
* [Django](https://www.djangoproject.com/) ist das Framework, auf das dieses Projekt aufbaut.
* [Python-GNUPG](http://pythonhosted.org/python-gnupg/) entschlüsselt die PDFs auf Abruf, um das Herunterladen unverschlüsselter Dateien zu ermöglichen, während die verschlüsselten Dateien auf der Festplatte bleiben.
## Status des Projekts
Dieses Projekt wurde um 2015 gestartet und es gibt viele Leute, die es verwenden. Warum auch immer ist es ziemlich beliebt in Deutschland -- vielleicht kann jemand dort drüben mich über das Warum aufklären.
Ich entwickle keine neuen Funktionen mehr für Paperless, weil es genau das tut, was ich brauche und meine Aufmerksamkeit meinem neuesten Projekt [Aletheia](https://github.com/danielquinn/aletheia) gewidmet ist. Ich verlasse jedoch nicht das Projekt. Ich bin glücklich damit, Pull Requests zu begutachten und Fragen im Issue-Bereich zu beantworten. Wenn du ein Entwickler bist und eine neue Funktion willst, reihe sie in den Issues ein und/oder sende einen PR! Ich bin glücklich damit, neue Sachen hinzuzufügen, habe aber einfach nicht die Zeit, sie selbst zu erarbeiten.
## Verknüpfte Prjekte
Paperless gibt es bereits seit einer Weile und Leute haben damit angefangen, Sachen rund um Paperless zu entwickeln. Wenn du einer dieser Menschen bist, kannst du dein Projekt zu dieser Liste hinzufügen:
* [Paperless Desktop](https://github.com/thomasbrueggemann/paperless-desktop): Eine Desktop-Oberfläche für deine Paperless-Installation. Läuft auf Mac, Linux und Windows.
* [ansible-role-paperless](https://github.com/ovv/ansible-role-paperless): Eine einfache Möglichkeit, Paperless via Ansible laufen zu lassen.
## Ähnliche Projekte
Es gibt da draußen auch das Projekt [Mayan EDMS](https://mayan.readthedocs.org/en/latest/), welches überraschenderweise sehr große überschneidende Techniken hat wie Paperless. Mayan EDMS ist *viel* funktionsreicher und kommt ebenso mit einer glatten UI, aber kommt noch mit Python2; basiert jedoch auch auf Django und verwendet ein Konsummodell mit Tesseract und Unpaper. Es kann sein, dass Paperless weniger Ressourcen verbraucht, aber um ehrlich zu sein, hab ich das noch nicht selbst getestet. Eine Sache jedoch ist klar, *Paperless* ist ein **viel** besserer Name.
## Wichtiger Hinweis
Dokumentenscanner werden typerweise verwendet, um sensible Dokumente zu scannen. Dinge wie die Sozialversicherungsnummer, Steueraufzeichnungen, Rechnungen, etc. Während Paperless die Originaldateien über das Konsumskript verschlüsselt, sind die OCR-Texte *nicht* verschlüsselt und demnach in Klartext gespeichert (es muss durchsuchbar sein, also wenn jemand eine Idee hat, wie man das mit verschlüsselten Daten tun kann: Ich bin ganz Ohr). Das bedeutet, dass Paperless niemals auf einem nicht vertrauten Host laufen sollte. Stattdessen empfehle ich, wenn du es verwenden willst, es lokal auf einem Server in deinem Zuhause laufen zu lassen.
## Spenden
Wie mit aller Freier Software, liegt die Macht weniger in den Finanzen als mehr in den gemeinsamen Bemühungen. Ich schätze wirklich jeden Pull Request und Bugreport, der von Benutzern von Paperless getätigt wird, also bitte macht damit weiter. Wenn du jedoch nicht einer für Programmieren/Design/Dokumentation bist und mich wirklich finanziell unterstützen willst, sage ich nicht nein dazu ;-)
Das Ding ist, mir geht es finanziell OK, also würde ich dich darum bitten, an den [Hochkommissar der Vereinten Nationen für Flüchtlinge](https://donate.unhcr.org/int-en/general) zu spenden. Diese machen wichtige Arbeit und brauchen das Geld viel dringender als ich.

81
README-el.md Normal file
View File

@@ -0,0 +1,81 @@
*[English](README.md)*<br/>
*[German](README-de.md)*
# Paperless
[![Documentation](https://readthedocs.org/projects/paperless/badge/?version=latest)](https://paperless.readthedocs.org/) [![Chat](https://badges.gitter.im/danielquinn/paperless.svg)](https://gitter.im/danielquinn/paperless) [![Travis](https://travis-ci.org/danielquinn/paperless.svg?branch=master)](https://travis-ci.org/danielquinn/paperless) [![Coverage Status](https://coveralls.io/repos/github/danielquinn/paperless/badge.svg?branch=master)](https://coveralls.io/github/danielquinn/paperless?branch=master) [![Thanks](https://img.shields.io/badge/THANKS-md-ff69b4.svg)](https://github.com/danielquinn/paperless/blob/master/THANKS.md)
Ευρετήριο και αρχείο για όλα σας τα σκαναρισμένα έγγραφα
Μισώ το χαρτί. Πέρα από τα περιβαλλοντικά ζητήματα, είναι ο εφιάλτης ενός τεχνικού.
* Δεν υπάρχει η δυνατότητα της αναζήτησης
* Πιάνουν πολύ χώρο
* Τα αντίγραφα ασφαλείας σημάινουν περισσότερο χαρτί
Τους τελευταίους μήνες μου έχει τύχει αρκετές φορές να μην μπορώ να βρω το σωστό έγγραφο. Κάποιες φορές ανακύκλωνα το έγγραφο που χρειαζόμουν (ποιος κρατάει τους λογαριασμούς του νερού για 2 χρόνια;;;) και κάποιες φορές απλά το έχανα ... επειδή έτσι είναι τα χαρτιά. Το έκανα αυτό για να κάνω την ζωή μου πιο εύκολη
## Πως δουλεύει
Η εφαρμογή Paperless δεν ελέγχει το scanner σας, αλλά σας βοηθάει με τα αποτελέσματα του scanner σας.
1. Αγοράστε ένα scanner με πρόσβαση στο δίκτυο σας. Αν χρειάζεστε έμπνευση, δείτε την σελίδα με τα [προτεινόμενα scanner](https://paperless.readthedocs.io/en/latest/scanners.html).
2. Κάντε την ρύθμιση "scan to FTP" ή κάτι παρόμοιο. Θα μπορεί να αποθηκεύει τις σκαναρισμένες εικόνες σε έναν server χωρίς να χρειάζεται να κάνετε κάτι. Φυσικά άμα το scanner σας δεν μπορεί να αποθηκεύσει κάπου τις εικόνες σας αυτόματα μπορείτε να το κάνετε χειροκίνητα. Το Paperless δεν ενδιαφέρεται πως καταλήγουν κάπου τα αρχεία.
3. Να έχετε τον server που τρέχει το OCR script του Paperless να έχει ευρετήριο στην τοπική βάση δεδομένων.
4. Χρησιμοποιήστε το web frontend για να επιλέξετε βάση δεδομένων και να βρείτε αυτό που θέλετε.
5. Κατεβάστε το PDF που θέλετε/χρειάζεστε μέσω του web interface και κάντε ότι θέλετε με αυτό. Μπορείτε ακόμη να το εκτυπώσετε και να το στείλετε, σαν να ήταν το αρχικό. Στις περισσότερες περιπτώσεις κανείς δεν θα το προσέξει ή θα νοιαστεί.
Αυτό είναι που θα πάρετε:
![Το πριν και το μετά](https://raw.githubusercontent.com/danielquinn/paperless/master/docs/_static/screenshot.png)
## Documentation
Είναι όλα διαθέσιμα εδώ [ReadTheDocs](https://paperless.readthedocs.org/).
## Απαιτήσεις
Όλα αυτά είναι πολύ απλά, και φιλικά προς τον χρήστη, μια συλλογή με πολύτιμα εργαλεία.
* [ImageMagick](http://imagemagick.org/) μετατρέπει τις εικόνες σε έγχρωμες και ασπρόμαυρες.
* [Tesseract](https://github.com/tesseract-ocr) κάνει την αναγνώρηση των χαρακτήρων.
* [Unpaper](https://www.flameeyes.eu/projects/unpaper) despeckles and deskews the scanned image.
* [GNU Privacy Guard](https://gnupg.org/) χρησιμοποιείται για κρυπτογράφηση στο backend.
* [Python 3](https://python.org/) είναι η γλώσσα του project.
* [Pillow](https://pypi.python.org/pypi/pillowfight/) Φορτώνει την εικόνα σαν αντικείμενο στην python και μπορεί να χρησιμοποιηθεί με PyOCR
* [PyOCR](https://github.com/jflesch/pyocr) is a slick programmatic wrapper around tesseract.
* [Django](https://www.djangoproject.com/) το framework με το οποίο έγινε το project.
* [Python-GNUPG](http://pythonhosted.org/python-gnupg/) Αποκρυπτογραφεί τα PDF αρχεία στη στιγμή ώστε να κατεβάζετε αποκρυπτογραφημένα αρχεία, αφήνοντας τα κρυπτογραφημένα στον δίσκο.
## Σταθερότητα
Αυτό το project υπάρχει από το 2015 και υπάρχουν αρκετοί άνθρωποι που το χρησιμοποιούν, παρόλα αυτά βρίσκεται σε διαρκή ανάπτυξη (απλά δείτε πότε commit έχουν γίνει στο git history) οπότε μην περιμένετε να είναι 100% σταθερό. Μπορείτε να κάνετε backup την βάση δεδομένων sqlite3, τον φάκελο media και το configuration αρχείο σας ώστε να είστε ασφαλείς.
## Affiliated Projects
Το Paperless υπάρχει εδώ και κάποιο καιρό και άνθρωποι έχουν αρχίσει να φτιάχνουν πράγματα γύρω από αυτό. Αν είσαι ένας από αυτούς τους ανθρώπους, μπορούμε να βάλουμε το project σου σε αυτήν την λίστα:
* [Paperless Desktop](https://github.com/thomasbrueggemann/paperless-desktop): Μια desktop εφαρμογή για εγκατάσταση του Paperless. Τρέχει σε Mac, Linux, και Windows.
* [ansible-role-paperless](https://github.com/ovv/ansible-role-paperless): Ένας εύκολο τρόπος για να τρέχει το Paperless μέσω Ansible.
## Παρόμοια Projects
Υπάρχει ένα άλλο ṕroject που λέγεται [Mayan EDMS](https://mayan.readthedocs.org/en/latest/) το οποίο έχει παρόμοια τεχνικά χαρακτηριστικά με το Paperless σε εντυπωσιακό βαθμό. Επίσης βασισμένο στο Django και χρησιμοποιώντας το consumer model με Tesseract και Unpaper, Mayan EDMS έχει *πολλά* περισσότερα χαρακτηριστικά και έρχεται με ένα επιδέξιο UI, αλλά είναι ακόμα σε Python 2. Μπορεί να είναι ότι το Paperless καταναλώνει λιγότερους πόρους, αλλά για να είμαι ειλικρινής, αυτό είναι μια εικασία την οποία δεν έχω επιβεβαιώσει μόνος μου. Ένα πράγμα είναι σίγουρο, το *Paperless* έχει **πολύ** καλύτερο όνομα.
## Σημαντική Σημείωση
Τα scanner για αρχεία συνήθως χρησιμοποιούνται για ευαίσθητα αρχεία. Πράγματα όπως το ΑΜΚΑ, φορολογικά αρχεία, τιμολόγια κτλπ. Παρόλο που το Paperless κρυπτογραφεί τα αρχικά αρχεία μέσω του consumption script, το κείμενο OCR *δεν είναι* κρυπτογραφημένο και για αυτό αποθηκεύεται (πρέπει να είναι αναζητήσιμο, οπότε αν κάποιος ξέρει να το κάνει αυτό με κρυπτογραφημένα δεδομένα είμαι όλος αυτιά). Αυτό σημάνει ότι το Paperless δεν πρέπει ποτέ να τρέχει σε μη αξιόπιστο πάροχο. Για αυτό συστήνω αν θέλετε να το τρέξετε να το τρέξετε σε έναν τοπικό server σπίτι σας.
## Δωρεές
Όπως με όλα τα δωρεάν λογισμικά, η δύναμη δεν βρίσκεται στα οικονομικά αλλά στην συλλογική προσπάθεια. Αλήθεια εκτιμώ κάθε pull request και bug report που προσφέρεται από τους χρήστες του Paperless, οπότε σας παρακαλώ συνεχίστε. Αν παρόλα αυτά, δεν μπορείτε να γράψετε κώδικα/να κάνέτε design/να γράψετε documentation, και θέλετε να συνεισφέρετε οικονομικά, δεν θα πω όχι ;-)
Το θέμα είναι ότι είμαι οικονομικά εντάξει, οπότε θα σας ζητήσω να δωρίσετε τα χρήματα σας εδώ [United Nations High Commissioner for Refugees](https://donate.unhcr.org/int-en/general). Κάνουν σημαντική δουλειά και χρειάζονται τα χρήματα πολύ περισσότερο από ότι εγώ.

118
README.md
View File

@@ -1,103 +1,83 @@
[![ci](https://github.com/paperless-ngx/paperless-ngx/workflows/ci/badge.svg)](https://github.com/paperless-ngx/paperless-ngx/actions)
[![Crowdin](https://badges.crowdin.net/paperless-ngx/localized.svg)](https://crowdin.com/project/paperless-ngx)
[![Documentation Status](https://img.shields.io/github/deployments/paperless-ngx/paperless-ngx/github-pages?label=docs)](https://docs.paperless-ngx.com)
[![codecov](https://codecov.io/gh/paperless-ngx/paperless-ngx/branch/main/graph/badge.svg?token=VK6OUPJ3TY)](https://codecov.io/gh/paperless-ngx/paperless-ngx)
[![Chat on Matrix](https://matrix.to/img/matrix-badge.svg)](https://matrix.to/#/%23paperlessngx%3Amatrix.org)
[![demo](https://cronitor.io/badges/ve7ItY/production/W5E_B9jkelG9ZbDiNHUPQEVH3MY.svg)](https://demo.paperless-ngx.com)
*[German](README-de.md)*<br/>
*[Greek](README-el.md)*
<p align="center">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://github.com/paperless-ngx/paperless-ngx/blob/main/resources/logo/web/png/White%20logo%20-%20no%20background.png" width="50%">
<source media="(prefers-color-scheme: light)" srcset="https://github.com/paperless-ngx/paperless-ngx/raw/main/resources/logo/web/png/Black%20logo%20-%20no%20background.png" width="50%">
<img src="https://github.com/paperless-ngx/paperless-ngx/raw/main/resources/logo/web/png/Black%20logo%20-%20no%20background.png" width="50%">
</picture>
</p>
# Paperless
<!-- omit in toc -->
[![Documentation](https://readthedocs.org/projects/paperless/badge/?version=latest)](https://paperless.readthedocs.org/) [![Chat](https://badges.gitter.im/danielquinn/paperless.svg)](https://gitter.im/danielquinn/paperless) [![Travis](https://travis-ci.org/danielquinn/paperless.svg?branch=master)](https://travis-ci.org/danielquinn/paperless) [![Coverage Status](https://coveralls.io/repos/github/danielquinn/paperless/badge.svg?branch=master)](https://coveralls.io/github/danielquinn/paperless?branch=master) [![Thanks](https://img.shields.io/badge/THANKS-md-ff69b4.svg)](https://github.com/danielquinn/paperless/blob/master/THANKS.md)
# Paperless-ngx
Index and archive all of your scanned paper documents
Paperless-ngx is a document management system that transforms your physical documents into a searchable online archive so you can keep, well, _less paper_.
I hate paper. Environmental issues aside, it's a tech person's nightmare:
Paperless-ngx is the official successor to the original [Paperless](https://github.com/the-paperless-project/paperless) & [Paperless-ng](https://github.com/jonaswinkler/paperless-ng) projects and is designed to distribute the responsibility of advancing and supporting the project among a team of people. [Consider joining us!](#community-support)
* There's no search feature
* It takes up physical space
* Backups mean more paper
Thanks to the generous folks at [DigitalOcean](https://m.do.co/c/8d70b916d462), a demo is available at [demo.paperless-ngx.com](https://demo.paperless-ngx.com) using login `demo` / `demo`. _Note: demo content is reset frequently and confidential information should not be uploaded._
In the past few months I've been bitten more than a few times by the problem of not having the right document around. Sometimes I recycled a document I needed (who keeps water bills for two years?) and other times I just lost it... because paper. I wrote this to make my life easier.
- [Features](#features)
- [Getting started](#getting-started)
- [Contributing](#contributing)
- [Community Support](#community-support)
- [Translation](#translation)
- [Feature Requests](#feature-requests)
- [Bugs](#bugs)
- [Related Projects](#related-projects)
- [Important Note](#important-note)
<p align="right">This project is supported by:<br/>
<a href="https://m.do.co/c/8d70b916d462" style="padding-top: 4px; display: block;">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_white.svg" width="140px">
<source media="(prefers-color-scheme: light)" srcset="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_blue.svg" width="140px">
<img src="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_black_.svg" width="140px">
</picture>
</a>
</p>
## How it Works
# Features
Paperless does not control your scanner, it only helps you deal with what your scanner produces
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docs/assets/screenshots/documents-smallcards-dark.png">
<source media="(prefers-color-scheme: light)" srcset="https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docs/assets/screenshots/documents-smallcards.png">
<img src="https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docs/assets/screenshots/documents-smallcards.png">
</picture>
1. Buy a document scanner that can write to a place on your network. If you need some inspiration, have a look at the [scanner recommendations](https://paperless.readthedocs.io/en/latest/scanners.html) page.
2. Set it up to "scan to FTP" or something similar. It should be able to push scanned images to a server without you having to do anything. Of course if your scanner doesn't know how to automatically upload the file somewhere, you can always do that manually. Paperless doesn't care how the documents get into its local consumption directory.
3. Have the target server run the Paperless consumption script to OCR the file and index it into a local database.
4. Use the web frontend to sift through the database and find what you want.
5. Download the PDF you need/want via the web interface and do whatever you like with it. You can even print it and send it as if it's the original. In most cases, no one will care or notice.
A full list of [features](https://docs.paperless-ngx.com/#features) and [screenshots](https://docs.paperless-ngx.com/#screenshots) are available in the [documentation](https://docs.paperless-ngx.com/).
Here's what you get:
# Getting started
![The before and after](https://raw.githubusercontent.com/danielquinn/paperless/master/docs/_static/screenshot.png)
The easiest way to deploy paperless is `docker compose`. The files in the [`/docker/compose` directory](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose) are configured to pull the image from the GitHub container registry.
If you'd like to jump right in, you can configure a `docker compose` environment with our install script:
## Documentation
```bash
bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
```
It's all available on [ReadTheDocs](https://paperless.readthedocs.org/).
More details and step-by-step guides for alternative installation methods can be found in [the documentation](https://docs.paperless-ngx.com/setup/#installation).
Migrating from Paperless-ng is easy, just drop in the new docker image! See the [documentation on migrating](https://docs.paperless-ngx.com/setup/#migrating-to-paperless-ngx) for more details.
## Requirements
<!-- omit in toc -->
This is all really a quite simple, shiny, user-friendly wrapper around some very powerful tools.
### Documentation
* [ImageMagick](http://imagemagick.org/) converts the images between colour and greyscale.
* [Tesseract](https://github.com/tesseract-ocr) does the character recognition.
* [Unpaper](https://www.flameeyes.eu/projects/unpaper) despeckles and deskews the scanned image.
* [GNU Privacy Guard](https://gnupg.org/) is used as the encryption backend.
* [Python 3](https://python.org/) is the language of the project.
* [Pillow](https://pypi.python.org/pypi/pillowfight/) loads the image data as a python object to be used with PyOCR.
* [PyOCR](https://github.com/jflesch/pyocr) is a slick programmatic wrapper around tesseract.
* [Django](https://www.djangoproject.com/) is the framework this project is written against.
* [Python-GNUPG](http://pythonhosted.org/python-gnupg/) decrypts the PDFs on-the-fly to allow you to download unencrypted files, leaving the encrypted ones on-disk.
The documentation for Paperless-ngx is available at [https://docs.paperless-ngx.com](https://docs.paperless-ngx.com/).
# Contributing
## Project Status
If you feel like contributing to the project, please do! Bug fixes, enhancements, visual fixes etc. are always welcome. If you want to implement something big: Please start a discussion about that! The [documentation](https://docs.paperless-ngx.com/development/) has some basic information on how to get started.
This project has been around since 2015, and there's lots of people using it. For some reason, it's really popular in Germany -- maybe someone over there can clue me in as to why?
## Community Support
I am no longer doing new development on Paperless as it does exactly what I need it to and have since turned my attention to my latest project, [Aletheia](https://github.com/danielquinn/aletheia). However, I'm not abandoning this project. I am happy to field pull requests and answer questions in the issue queue. If you're a developer yourself and want a new feature, float it in the issue queue and/or send me a pull request! I'm happy to add new stuff, but I just don't have the time to do that work myself.
People interested in continuing the work on paperless-ngx are encouraged to reach out here on github and in the [Matrix Room](https://matrix.to/#/#paperless:matrix.org). If you would like to contribute to the project on an ongoing basis there are multiple [teams](https://github.com/orgs/paperless-ngx/people) (frontend, ci/cd, etc) that could use your help so please reach out!
## Translation
## Affiliated Projects
Paperless-ngx is available in many languages that are coordinated on Crowdin. If you want to help out by translating paperless-ngx into your language, please head over to https://crowdin.com/project/paperless-ngx, and thank you! More details can be found in [CONTRIBUTING.md](https://github.com/paperless-ngx/paperless-ngx/blob/main/CONTRIBUTING.md#translating-paperless-ngx).
Paperless has been around a while now, and people are starting to build stuff on top of it. If you're one of those people, we can add your project to this list:
## Feature Requests
* [Paperless Desktop](https://github.com/thomasbrueggemann/paperless-desktop): A desktop UI for your Paperless installation. Runs on Mac, Linux, and Windows.
* [ansible-role-paperless](https://github.com/ovv/ansible-role-paperless): An easy way to get Paperless running via Ansible.
Feature requests can be submitted via [GitHub Discussions](https://github.com/paperless-ngx/paperless-ngx/discussions/categories/feature-requests), you can search for existing ideas, add your own and vote for the ones you care about.
## Bugs
## Similar Projects
For bugs please [open an issue](https://github.com/paperless-ngx/paperless-ngx/issues) or [start a discussion](https://github.com/paperless-ngx/paperless-ngx/discussions) if you have questions.
There's another project out there called [Mayan EDMS](https://mayan.readthedocs.org/en/latest/) that has a surprising amount of technical overlap with Paperless. Also based on Django and using a consumer model with Tesseract and Unpaper, Mayan EDMS is *much* more featureful and comes with a slick UI as well, but still in Python 2. It may be that Paperless consumes fewer resources, but to be honest, this is just a guess as I haven't tested this myself. One thing's for certain though, *Paperless* is a **way** better name.
# Related Projects
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Related-Projects) for a user-maintained list of related projects and software that is compatible with Paperless-ngx.
## Important Note
# Important Note
Document scanners are typically used to scan sensitive documents. Things like your social insurance number, tax records, invoices, etc. While Paperless encrypts the original files via the consumption script, the OCR'd text is *not* encrypted and is therefore stored in the clear (it needs to be searchable, so if someone has ideas on how to do that on encrypted data, I'm all ears). This means that Paperless should never be run on an untrusted host. Instead, I recommend that if you do want to use it, run it locally on a server in your own home.
> Document scanners are typically used to scan sensitive documents like your social insurance number, tax records, invoices, etc. **Paperless-ngx should never be run on an untrusted host** because information is stored in clear text without encryption. No guarantees are made regarding security (but we do try!) and you use the app at your own risk.
> **The safest way to run Paperless-ngx is on a local server in your own home with backups in place**.
## Donations
As with all Free software, the power is less in the finances and more in the collective efforts. I really appreciate every pull request and bug report offered up by Paperless' users, so please keep that stuff coming. If however, you're not one for coding/design/documentation, and would like to contribute financially, I won't say no ;-)
The thing is, I'm doing ok for money, so I would instead ask you to donate to the [United Nations High Commissioner for Refugees](https://donate.unhcr.org/int-en/general). They're doing important work and they need the money a lot more than I do.

View File

@@ -1,9 +0,0 @@
# Security Policy
## Reporting a Vulnerability
The Paperless-ngx team and community take security bugs seriously. We appreciate your efforts to responsibly disclose your findings, and will make every effort to acknowledge your contributions.
To report a security issue, please use the GitHub Security Advisory ["Report a Vulnerability"](https://github.com/paperless-ngx/paperless-ngx/security/advisories/new) tab.
The team will send a response indicating the next steps in handling your report. After the initial reply to your report, the security team will keep you informed of the progress towards a fix and full announcement, and may ask for additional information or guidance.

19
THANKS.md Normal file
View File

@@ -0,0 +1,19 @@
# Thanks for using Paperless!
Working on this project has been exhausting, but rewarding at the same time.
It's just wonderful that so many people are using this thing, and in so many
crazy ways.
This file is here for everyone to post their own stories about how you use this
code. It helps me to understand who's using it and why, and maybe to give
others an idea of how it might be used. It's based on a Twitter exchange
between [John Glanville](https://twitter.com/hexapodium) and
[Julia Evans](https://github.com/jvns) and later better defined [here](https://github.com/paulmolluzzo/thanks-md).
To contribute, simply issue a pull request that appends to this file something
like this:
```
### Your Name
Some friendly message
```

View File

@@ -1,8 +0,0 @@
project_id_env: CROWDIN_PROJECT_ID
api_token_env: CROWDIN_PERSONAL_TOKEN
preserve_hierarchy: true
files:
- source: /src/locale/en_US/LC_MESSAGES/django.po
translation: /src/locale/%locale_with_underscore%/LC_MESSAGES/django.po
- source: /src-ui/messages.xlf
translation: /src-ui/src/locale/messages.%locale_with_underscore%.xlf

View File

@@ -0,0 +1,22 @@
# Environment variables to set for Paperless
# Commented out variables will be replaced with a default within Paperless.
#
# In addition to what you see here, you can also define any values you find in
# paperless.conf.example here. Values like:
#
# * PAPERLESS_PASSPHRASE
# * PAPERLESS_CONSUMPTION_DIR
# * PAPERLESS_CONSUME_MAIL_HOST
#
# ...are all explained in that file but can be defined here, since the Docker
# installation doesn't make use of paperless.conf.
# Additional languages to install for text recognition. Note that this is
# different from PAPERLESS_OCR_LANGUAGE (default=eng), which defines the
# default language used when guessing the language from the OCR output.
# PAPERLESS_OCR_LANGUAGES=deu ita
# You can change the default user and group id to a custom one
# USERMAP_UID=1000
# USERMAP_GID=1000

View File

@@ -0,0 +1,53 @@
version: '2.1'
services:
webserver:
build: ./
# uncomment the following line to start automatically on system boot
# restart: always
ports:
# You can adapt the port you want Paperless to listen on by
# modifying the part before the `:`.
- "8000:8000"
healthcheck:
test: ["CMD", "curl" , "-f", "http://localhost:8000"]
interval: 30s
timeout: 10s
retries: 5
volumes:
- data:/usr/src/paperless/data
- media:/usr/src/paperless/media
env_file: docker-compose.env
# The reason the line is here is so that the webserver that doesn't do
# any text recognition and doesn't have to install unnecessary
# languages the user might have set in the env-file by overwriting the
# value with nothing.
environment:
- PAPERLESS_OCR_LANGUAGES=
command: ["runserver", "--insecure", "--noreload", "0.0.0.0:8000"]
consumer:
build: ./
# uncomment the following line to start automatically on system boot
# restart: always
depends_on:
webserver:
condition: service_healthy
volumes:
- data:/usr/src/paperless/data
- media:/usr/src/paperless/media
# You have to adapt the local path you want the consumption
# directory to mount to by modifying the part before the ':'.
- ./consume:/consume
# Likewise, you can add a local path to mount a directory for
# exporting. This is not strictly needed for paperless to
# function, only if you're exporting your files: uncomment
# it and fill in a local path if you know you're going to
# want to export your documents.
# - /path/to/another/arbitrary/place:/export
env_file: docker-compose.env
command: ["document_consumer"]
volumes:
data:
media:

View File

@@ -1 +0,0 @@
COMPOSE_PROJECT_NAME=paperless

View File

@@ -1,25 +0,0 @@
# Docker Compose file for running paperless testing with actual Gotenberg
# and Tika containers for a more end to end test of the Tika related functionality
# Can be used locally or by the CI to start the necessary containers with the
# correct networking for the tests
services:
gotenberg:
image: docker.io/gotenberg/gotenberg:8.20
hostname: gotenberg
container_name: gotenberg
network_mode: host
restart: unless-stopped
# The gotenberg chromium route is used to convert .eml files. We do not
# want to allow external content like tracking pixels or even javascript.
command:
- "gotenberg"
- "--chromium-disable-javascript=true"
- "--chromium-allow-list=file:///tmp/.*"
- "--log-level=warn"
- "--log-format=text"
tika:
image: docker.io/apache/tika:latest
hostname: tika
container_name: tika
network_mode: host
restart: unless-stopped

View File

@@ -1,37 +0,0 @@
###############################################################################
# Paperless-ngx settings #
###############################################################################
# See http://docs.paperless-ngx.com/configuration/ for all available options.
# The UID and GID of the user used to run paperless in the container. Set this
# to your UID and GID on the host so that you have write access to the
# consumption directory.
#USERMAP_UID=1000
#USERMAP_GID=1000
# See the documentation linked above for all options. A few commonly adjusted settings
# are provided below.
# This is required if you will be exposing Paperless-ngx on a public domain
# (if doing so please consider security measures such as reverse proxy)
#PAPERLESS_URL=https://paperless.example.com
# Adjust this key if you plan to make paperless available publicly. It should
# be a very long sequence of random characters. You don't need to remember it.
#PAPERLESS_SECRET_KEY=change-me
# Use this variable to set a timezone for the Paperless Docker containers. Defaults to UTC.
#PAPERLESS_TIME_ZONE=America/Los_Angeles
# The default language to use for OCR. Set this to the language most of your
# documents are written in.
#PAPERLESS_OCR_LANGUAGE=eng
# Additional languages to install for text recognition, separated by a whitespace.
# Note that this is different from PAPERLESS_OCR_LANGUAGE (default=eng), which defines
# the language used for OCR.
# The container installs English, German, Italian, Spanish and French by default.
# See https://packages.debian.org/search?keywords=tesseract-ocr-&searchon=names
# for available languages.
#PAPERLESS_OCR_LANGUAGES=tur ces

View File

@@ -1,90 +0,0 @@
# docker compose file for running paperless from the Docker Hub.
# This file contains everything paperless needs to run.
# Paperless supports amd64, arm and arm64 hardware.
#
# All compose files of paperless configure paperless in the following way:
#
# - Paperless is (re)started on system boot, if it was running before shutdown.
# - Docker volumes for storing data are managed by Docker.
# - Folders for importing and exporting files are created in the same directory
# as this file and mounted to the correct folders inside the container.
# - Paperless listens on port 8000.
#
# In addition to that, this Docker Compose file adds the following optional
# configurations:
#
# - Instead of SQLite (default), MariaDB is used as the database server.
# - Apache Tika and Gotenberg servers are started with paperless and paperless
# is configured to use these services. These provide support for consuming
# Office documents (Word, Excel, PowerPoint and their LibreOffice counter-
# parts).
#
# To install and update paperless with this file, do the following:
#
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
# and '.env' into a folder.
# - Run 'docker compose pull'.
# - Run 'docker compose up -d'.
#
# For more extensive installation and update instructions, refer to the
# documentation.
services:
broker:
image: docker.io/library/redis:8
restart: unless-stopped
volumes:
- redisdata:/data
db:
image: docker.io/library/mariadb:11
restart: unless-stopped
volumes:
- dbdata:/var/lib/mysql
environment:
MARIADB_HOST: paperless
MARIADB_DATABASE: paperless
MARIADB_USER: paperless
MARIADB_PASSWORD: paperless
MARIADB_ROOT_PASSWORD: paperless
webserver:
image: ghcr.io/paperless-ngx/paperless-ngx:latest
restart: unless-stopped
depends_on:
- db
- broker
- gotenberg
- tika
ports:
- "8000:8000"
volumes:
- data:/usr/src/paperless/data
- media:/usr/src/paperless/media
- ./export:/usr/src/paperless/export
- ./consume:/usr/src/paperless/consume
env_file: docker-compose.env
environment:
PAPERLESS_REDIS: redis://broker:6379
PAPERLESS_DBENGINE: mariadb
PAPERLESS_DBHOST: db
PAPERLESS_DBUSER: paperless # only needed if non-default username
PAPERLESS_DBPASS: paperless # only needed if non-default password
PAPERLESS_DBPORT: 3306
PAPERLESS_TIKA_ENABLED: 1
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
gotenberg:
image: docker.io/gotenberg/gotenberg:8.20
restart: unless-stopped
# The gotenberg chromium route is used to convert .eml files. We do not
# want to allow external content like tracking pixels or even javascript.
command:
- "gotenberg"
- "--chromium-disable-javascript=true"
- "--chromium-allow-list=file:///tmp/.*"
tika:
image: docker.io/apache/tika:latest
restart: unless-stopped
volumes:
data:
media:
dbdata:
redisdata:

View File

@@ -1,69 +0,0 @@
# Docker Compose file for running paperless from the Docker Hub.
# This file contains everything paperless needs to run.
# Paperless supports amd64, arm and arm64 hardware.
#
# All compose files of paperless configure paperless in the following way:
#
# - Paperless is (re)started on system boot, if it was running before shutdown.
# - Docker volumes for storing data are managed by Docker.
# - Folders for importing and exporting files are created in the same directory
# as this file and mounted to the correct folders inside the container.
# - Paperless listens on port 8000.
#
# In addition to that, this Docker Compose file adds the following optional
# configurations:
#
# - Instead of SQLite (default), MariaDB is used as the database server.
#
# To install and update paperless with this file, do the following:
#
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
# and '.env' into a folder.
# - Run 'docker compose pull'.
# - Run 'docker compose up -d'.
#
# For more extensive installation and update instructions, refer to the
# documentation.
services:
broker:
image: docker.io/library/redis:8
restart: unless-stopped
volumes:
- redisdata:/data
db:
image: docker.io/library/mariadb:11
restart: unless-stopped
volumes:
- dbdata:/var/lib/mysql
environment:
MARIADB_HOST: paperless
MARIADB_DATABASE: paperless
MARIADB_USER: paperless
MARIADB_PASSWORD: paperless
MARIADB_ROOT_PASSWORD: paperless
webserver:
image: ghcr.io/paperless-ngx/paperless-ngx:latest
restart: unless-stopped
depends_on:
- db
- broker
ports:
- "8000:8000"
volumes:
- data:/usr/src/paperless/data
- media:/usr/src/paperless/media
- ./export:/usr/src/paperless/export
- ./consume:/usr/src/paperless/consume
env_file: docker-compose.env
environment:
PAPERLESS_REDIS: redis://broker:6379
PAPERLESS_DBENGINE: mariadb
PAPERLESS_DBHOST: db
PAPERLESS_DBUSER: paperless # only needed if non-default username
PAPERLESS_DBPASS: paperless # only needed if non-default password
PAPERLESS_DBPORT: 3306
volumes:
data:
media:
dbdata:
redisdata:

View File

@@ -1,65 +0,0 @@
# Docker Compose file for running paperless from the Docker Hub.
# This file contains everything paperless needs to run.
# Paperless supports amd64, arm and arm64 hardware.
#
# All compose files of paperless configure paperless in the following way:
#
# - Paperless is (re)started on system boot, if it was running before shutdown.
# - Docker volumes for storing data are managed by Docker.
# - Folders for importing and exporting files are created in the same directory
# as this file and mounted to the correct folders inside the container.
# - Paperless listens on port 8010.
#
# In addition to that, this Docker Compose file adds the following optional
# configurations:
#
# - Instead of SQLite (default), PostgreSQL is used as the database server.
#
# To install and update paperless with this file, do the following:
#
# - Open portainer Stacks list and click 'Add stack'
# - Paste the contents of this file and assign a name, e.g. 'paperless'
# - Upload 'docker-compose.env' by clicking on 'Load variables from .env file'
# - Modify the environment variables as needed
# - Click 'Deploy the stack' and wait for it to be deployed
#
# For more extensive installation and update instructions, refer to the
# documentation.
services:
broker:
image: docker.io/library/redis:8
restart: unless-stopped
volumes:
- redisdata:/data
db:
image: docker.io/library/postgres:17
restart: unless-stopped
volumes:
- pgdata:/var/lib/postgresql/data
environment:
POSTGRES_DB: paperless
POSTGRES_USER: paperless
POSTGRES_PASSWORD: paperless
webserver:
image: ghcr.io/paperless-ngx/paperless-ngx:latest
restart: unless-stopped
depends_on:
- db
- broker
ports:
- "8010:8000"
volumes:
- data:/usr/src/paperless/data
- media:/usr/src/paperless/media
- ./export:/usr/src/paperless/export
- ./consume:/usr/src/paperless/consume
environment:
PAPERLESS_REDIS: redis://broker:6379
PAPERLESS_DBHOST: db
env_file:
- stack.env
volumes:
data:
media:
pgdata:
redisdata:

View File

@@ -1,84 +0,0 @@
# Docker Compose file for running paperless from the docker container registry.
# This file contains everything paperless needs to run.
# Paperless supports amd64, arm and arm64 hardware.
#
# All compose files of paperless configure paperless in the following way:
#
# - Paperless is (re)started on system boot, if it was running before shutdown.
# - Docker volumes for storing data are managed by Docker.
# - Folders for importing and exporting files are created in the same directory
# as this file and mounted to the correct folders inside the container.
# - Paperless listens on port 8000.
#
# In addition to that, this Docker Compose file adds the following optional
# configurations:
#
# - Instead of SQLite (default), PostgreSQL is used as the database server.
# - Apache Tika and Gotenberg servers are started with paperless and paperless
# is configured to use these services. These provide support for consuming
# Office documents (Word, Excel, PowerPoint and their LibreOffice counter-
# parts).
#
# To install and update paperless with this file, do the following:
#
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
# and '.env' into a folder.
# - Run 'docker compose pull'.
# - Run 'docker compose up -d'.
#
# For more extensive installation and update instructions, refer to the
# documentation.
services:
broker:
image: docker.io/library/redis:8
restart: unless-stopped
volumes:
- redisdata:/data
db:
image: docker.io/library/postgres:17
restart: unless-stopped
volumes:
- pgdata:/var/lib/postgresql/data
environment:
POSTGRES_DB: paperless
POSTGRES_USER: paperless
POSTGRES_PASSWORD: paperless
webserver:
image: ghcr.io/paperless-ngx/paperless-ngx:latest
restart: unless-stopped
depends_on:
- db
- broker
- gotenberg
- tika
ports:
- "8000:8000"
volumes:
- data:/usr/src/paperless/data
- media:/usr/src/paperless/media
- ./export:/usr/src/paperless/export
- ./consume:/usr/src/paperless/consume
env_file: docker-compose.env
environment:
PAPERLESS_REDIS: redis://broker:6379
PAPERLESS_DBHOST: db
PAPERLESS_TIKA_ENABLED: 1
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
gotenberg:
image: docker.io/gotenberg/gotenberg:8.20
restart: unless-stopped
# The gotenberg chromium route is used to convert .eml files. We do not
# want to allow external content like tracking pixels or even javascript.
command:
- "gotenberg"
- "--chromium-disable-javascript=true"
- "--chromium-allow-list=file:///tmp/.*"
tika:
image: docker.io/apache/tika:latest
restart: unless-stopped
volumes:
data:
media:
pgdata:
redisdata:

View File

@@ -1,63 +0,0 @@
# Docker Compose file for running paperless from the Docker Hub.
# This file contains everything paperless needs to run.
# Paperless supports amd64, arm and arm64 hardware.
#
# All compose files of paperless configure paperless in the following way:
#
# - Paperless is (re)started on system boot, if it was running before shutdown.
# - Docker volumes for storing data are managed by Docker.
# - Folders for importing and exporting files are created in the same directory
# as this file and mounted to the correct folders inside the container.
# - Paperless listens on port 8000.
#
# In addition to that, this Docker Compose file adds the following optional
# configurations:
#
# - Instead of SQLite (default), PostgreSQL is used as the database server.
#
# To install and update paperless with this file, do the following:
#
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
# and '.env' into a folder.
# - Run 'docker compose pull'.
# - Run 'docker compose up -d'.
#
# For more extensive installation and update instructions, refer to the
# documentation.
services:
broker:
image: docker.io/library/redis:8
restart: unless-stopped
volumes:
- redisdata:/data
db:
image: docker.io/library/postgres:17
restart: unless-stopped
volumes:
- pgdata:/var/lib/postgresql/data
environment:
POSTGRES_DB: paperless
POSTGRES_USER: paperless
POSTGRES_PASSWORD: paperless
webserver:
image: ghcr.io/paperless-ngx/paperless-ngx:latest
restart: unless-stopped
depends_on:
- db
- broker
ports:
- "8000:8000"
volumes:
- data:/usr/src/paperless/data
- media:/usr/src/paperless/media
- ./export:/usr/src/paperless/export
- ./consume:/usr/src/paperless/consume
env_file: docker-compose.env
environment:
PAPERLESS_REDIS: redis://broker:6379
PAPERLESS_DBHOST: db
volumes:
data:
media:
pgdata:
redisdata:

View File

@@ -1,72 +0,0 @@
# Docker Compose file for running paperless from the docker container registry.
# This file contains everything paperless needs to run.
# Paperless supports amd64, arm and arm64 hardware.
# All compose files of paperless configure paperless in the following way:
#
# - Paperless is (re)started on system boot, if it was running before shutdown.
# - Docker volumes for storing data are managed by Docker.
# - Folders for importing and exporting files are created in the same directory
# as this file and mounted to the correct folders inside the container.
# - Paperless listens on port 8000.
#
# SQLite is used as the database. The SQLite file is stored in the data volume.
#
# In addition to that, this Docker Compose file adds the following optional
# configurations:
#
# - Apache Tika and Gotenberg servers are started with paperless and paperless
# is configured to use these services. These provide support for consuming
# Office documents (Word, Excel, PowerPoint and their LibreOffice counter-
# parts).
#
# To install and update paperless with this file, do the following:
#
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
# and '.env' into a folder.
# - Run 'docker compose pull'.
# - Run 'docker compose up -d'.
#
# For more extensive installation and update instructions, refer to the
# documentation.
services:
broker:
image: docker.io/library/redis:8
restart: unless-stopped
volumes:
- redisdata:/data
webserver:
image: ghcr.io/paperless-ngx/paperless-ngx:latest
restart: unless-stopped
depends_on:
- broker
- gotenberg
- tika
ports:
- "8000:8000"
volumes:
- data:/usr/src/paperless/data
- media:/usr/src/paperless/media
- ./export:/usr/src/paperless/export
- ./consume:/usr/src/paperless/consume
env_file: docker-compose.env
environment:
PAPERLESS_REDIS: redis://broker:6379
PAPERLESS_TIKA_ENABLED: 1
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
gotenberg:
image: docker.io/gotenberg/gotenberg:8.20
restart: unless-stopped
# The gotenberg chromium route is used to convert .eml files. We do not
# want to allow external content like tracking pixels or even javascript.
command:
- "gotenberg"
- "--chromium-disable-javascript=true"
- "--chromium-allow-list=file:///tmp/.*"
tika:
image: docker.io/apache/tika:latest
restart: unless-stopped
volumes:
data:
media:
redisdata:

View File

@@ -1,48 +0,0 @@
# Docker Compose file for running paperless from the Docker Hub.
# This file contains everything paperless needs to run.
# Paperless supports amd64, arm and arm64 hardware.
#
# All compose files of paperless configure paperless in the following way:
#
# - Paperless is (re)started on system boot, if it was running before shutdown.
# - Docker volumes for storing data are managed by Docker.
# - Folders for importing and exporting files are created in the same directory
# as this file and mounted to the correct folders inside the container.
# - Paperless listens on port 8000.
#
# SQLite is used as the database. The SQLite file is stored in the data volume.
#
# To install and update paperless with this file, do the following:
#
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
# and '.env' into a folder.
# - Run 'docker compose pull'.
# - Run 'docker compose up -d'.
#
# For more extensive installation and update instructions, refer to the
# documentation.
services:
broker:
image: docker.io/library/redis:8
restart: unless-stopped
volumes:
- redisdata:/data
webserver:
image: ghcr.io/paperless-ngx/paperless-ngx:latest
restart: unless-stopped
depends_on:
- broker
ports:
- "8000:8000"
volumes:
- data:/usr/src/paperless/data
- media:/usr/src/paperless/media
- ./export:/usr/src/paperless/export
- ./consume:/usr/src/paperless/consume
env_file: docker-compose.env
environment:
PAPERLESS_REDIS: redis://broker:6379
volumes:
data:
media:
redisdata:

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

View File

@@ -1,27 +0,0 @@
#!/usr/bin/env bash
# Run this script to generate the management commands again (for example if a new command is create or the template is updated)
set -eu
for command in decrypt_documents \
document_archiver \
document_exporter \
document_importer \
mail_fetcher \
document_create_classifier \
document_index \
document_renamer \
document_retagger \
document_thumbnails \
document_sanity_checker \
document_fuzzy_match \
manage_superuser \
convert_mariadb_uuid \
prune_audit_logs \
createsuperuser;
do
echo "installing $command..."
sed "s/management_command/$command/g" management_script.sh >"$PWD/rootfs/usr/local/bin/$command"
chmod u=rwx,g=rwx,o=rx "$PWD/rootfs/usr/local/bin/$command"
done

View File

@@ -1,14 +0,0 @@
#!/command/with-contenv /usr/bin/bash
# shellcheck shell=bash
set -e
cd "${PAPERLESS_SRC_DIR}"
if [[ $(id -u) == 0 ]]; then
s6-setuidgid paperless python3 manage.py management_command "$@"
elif [[ $(id -un) == "paperless" ]]; then
python3 manage.py management_command "$@"
else
echo "Unknown user."
fi

View File

@@ -1,96 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE policymap [
<!ELEMENT policymap (policy)+>
<!ATTLIST policymap xmlns CDATA #FIXED ''>
<!ELEMENT policy EMPTY>
<!ATTLIST policy xmlns CDATA #FIXED '' domain NMTOKEN #REQUIRED
name NMTOKEN #IMPLIED pattern CDATA #IMPLIED rights NMTOKEN #IMPLIED
stealth NMTOKEN #IMPLIED value CDATA #IMPLIED>
]>
<!--
Configure ImageMagick policies.
Domains include system, delegate, coder, filter, path, or resource.
Rights include none, read, write, execute and all. Use | to combine them,
for example: "read | write" to permit read from, or write to, a path.
Use a glob expression as a pattern.
Suppose we do not want users to process MPEG video images:
<policy domain="delegate" rights="none" pattern="mpeg:decode" />
Here we do not want users reading images from HTTP:
<policy domain="coder" rights="none" pattern="HTTP" />
The /repository file system is restricted to read only. We use a glob
expression to match all paths that start with /repository:
<policy domain="path" rights="read" pattern="/repository/*" />
Lets prevent users from executing any image filters:
<policy domain="filter" rights="none" pattern="*" />
Any large image is cached to disk rather than memory:
<policy domain="resource" name="area" value="1GP"/>
Define arguments for the memory, map, area, width, height and disk resources
with SI prefixes (.e.g 100MB). In addition, resource policies are maximums
for each instance of ImageMagick (e.g. policy memory limit 1GB, -limit 2GB
exceeds policy maximum so memory limit is 1GB).
Rules are processed in order. Here we want to restrict ImageMagick to only
read or write a small subset of proven web-safe image types:
<policy domain="delegate" rights="none" pattern="*" />
<policy domain="filter" rights="none" pattern="*" />
<policy domain="coder" rights="none" pattern="*" />
<policy domain="coder" rights="read|write" pattern="{GIF,JPEG,PNG,WEBP}" />
-->
<policymap>
<!-- <policy domain="system" name="shred" value="2"/> -->
<!-- <policy domain="system" name="precision" value="6"/> -->
<!-- <policy domain="system" name="memory-map" value="anonymous"/> -->
<!-- <policy domain="system" name="max-memory-request" value="256MiB"/> -->
<!-- <policy domain="resource" name="temporary-path" value="/tmp"/> -->
<policy domain="resource" name="memory" value="256MiB"/>
<policy domain="resource" name="map" value="512MiB"/>
<policy domain="resource" name="width" value="16KP"/>
<policy domain="resource" name="height" value="16KP"/>
<!-- <policy domain="resource" name="list-length" value="128"/> -->
<policy domain="resource" name="area" value="128MB"/>
<policy domain="resource" name="disk" value="1GiB"/>
<!-- <policy domain="resource" name="file" value="768"/> -->
<!-- <policy domain="resource" name="thread" value="4"/> -->
<!-- <policy domain="resource" name="throttle" value="0"/> -->
<!-- <policy domain="resource" name="time" value="3600"/> -->
<!-- <policy domain="coder" rights="none" pattern="MVG" /> -->
<!-- <policy domain="module" rights="none" pattern="{PS,PDF,XPS}" /> -->
<!-- <policy domain="delegate" rights="none" pattern="HTTPS" /> -->
<!-- <policy domain="path" rights="none" pattern="@*" /> -->
<!-- <policy domain="cache" name="memory-map" value="anonymous"/> -->
<!-- <policy domain="cache" name="synchronize" value="True"/> -->
<!-- <policy domain="cache" name="shared-secret" value="passphrase" stealth="true"/> -->
<!-- <policy domain="system" name="pixel-cache-memory" value="anonymous"/> -->
<!-- <policy domain="system" name="shred" value="2"/> -->
<!-- <policy domain="system" name="precision" value="6"/> -->
<!-- not needed due to the need to use explicitly by mvg: -->
<!-- <policy domain="delegate" rights="none" pattern="MVG" /> -->
<!-- use curl -->
<policy domain="delegate" rights="none" pattern="URL" />
<policy domain="delegate" rights="none" pattern="HTTPS" />
<policy domain="delegate" rights="none" pattern="HTTP" />
<!-- in order to avoid to get image with password text -->
<policy domain="path" rights="none" pattern="@*"/>
<!-- disable ghostscript format types -->
<policy domain="coder" rights="none" pattern="PS" />
<policy domain="coder" rights="none" pattern="PS2" />
<policy domain="coder" rights="none" pattern="PS3" />
<policy domain="coder" rights="none" pattern="EPS" />
<policy domain="coder" rights="read|write" pattern="PDF" />
<policy domain="coder" rights="none" pattern="XPS" />
</policymap>

View File

@@ -1,8 +0,0 @@
#!/command/with-contenv /usr/bin/bash
# shellcheck shell=bash
declare -r log_prefix="[init-complete]"
declare -r end_time=$(date +%s)
declare -r start_time=${PAPERLESS_START_TIME_S}
echo "${log_prefix} paperless-ngx docker container init completed in $(($end_time-$start_time)) seconds"
echo "${log_prefix} Starting services"

View File

@@ -1 +0,0 @@
oneshot

View File

@@ -1 +0,0 @@
/etc/s6-overlay/s6-rc.d/init-complete/run

View File

@@ -1,44 +0,0 @@
#!/command/with-contenv /usr/bin/bash
# shellcheck shell=bash
declare -r log_prefix="[custom-init]"
# Mostly borrowed from the LinuxServer.io base image
# https://github.com/linuxserver/docker-baseimage-ubuntu/tree/bionic/root/etc/cont-init.d
declare -r custom_script_dir="/custom-cont-init.d"
# Tamper checking.
# Don't run files which are owned by anyone except root
# Don't run files which are writeable by others
if [ -d "${custom_script_dir}" ]; then
if [ -n "$(/usr/bin/find "${custom_script_dir}" -maxdepth 1 ! -user root)" ]; then
echo "${log_prefix} **** Potential tampering with custom scripts detected ****"
echo "${log_prefix} **** The folder '${custom_script_dir}' must be owned by root ****"
exit 0
fi
if [ -n "$(/usr/bin/find "${custom_script_dir}" -maxdepth 1 -perm -o+w)" ]; then
echo "${log_prefix} **** The folder '${custom_script_dir}' or some of contents have write permissions for others, which is a security risk. ****"
echo "${log_prefix} **** Please review the permissions and their contents to make sure they are owned by root, and can only be modified by root. ****"
exit 0
fi
# Make sure custom init directory has files in it
if [ -n "$(/bin/ls --almost-all "${custom_script_dir}" 2>/dev/null)" ]; then
echo "${log_prefix} files found in ${custom_script_dir} executing"
# Loop over files in the directory
for SCRIPT in "${custom_script_dir}"/*; do
NAME="$(basename "${SCRIPT}")"
if [ -f "${SCRIPT}" ]; then
echo "${log_prefix} ${NAME}: executing..."
/command/with-contenv /bin/bash "${SCRIPT}"
echo "${log_prefix} ${NAME}: exited $?"
elif [ ! -f "${SCRIPT}" ]; then
echo "${log_prefix} ${NAME}: is not a file"
fi
done
else
echo "${log_prefix} no custom files found exiting..."
fi
else
echo "${log_prefix} ${custom_script_dir} doesn't exist, nothing to do"
fi

View File

@@ -1 +0,0 @@
/etc/s6-overlay/s6-rc.d/init-custom-init/run

View File

@@ -1,33 +0,0 @@
#!/command/with-contenv /usr/bin/bash
# shellcheck shell=bash
declare -r log_prefix="[env-init]"
echo "${log_prefix} Checking for environment from files"
if find /run/s6/container_environment/*"_FILE" -maxdepth 1 > /dev/null 2>&1; then
for FILENAME in /run/s6/container_environment/*; do
if [[ "${FILENAME##*/}" == PAPERLESS_*_FILE ]]; then
# This should have been named different..
if [[ "${FILENAME##*/}" == "PAPERLESS_OCR_SKIP_ARCHIVE_FILE" || "${FILENAME##*/}" == "PAPERLESS_MODEL_FILE" ]]; then
continue
fi
SECRETFILE=$(cat "${FILENAME}")
# Check the file exists
if [[ -f ${SECRETFILE} ]]; then
# Trim off trailing _FILE
FILESTRIP=${FILENAME//_FILE/}
if [[ $(tail -n1 "${SECRETFILE}" | wc -l) != 0 ]]; then
echo "${log_prefix} Your secret: ${FILENAME##*/} contains a trailing newline and may not work as expected"
fi
# Set environment variable
cat "${SECRETFILE}" > "${FILESTRIP}"
echo "${log_prefix} ${FILESTRIP##*/} set from ${FILENAME##*/}"
else
echo "${log_prefix} cannot find secret in ${FILENAME##*/}"
fi
fi
done
else
echo "${log_prefix} No *_FILE environment found"
fi

View File

@@ -1 +0,0 @@
oneshot

View File

@@ -1 +0,0 @@
/etc/s6-overlay/s6-rc.d/init-env-file/run

View File

@@ -1,65 +0,0 @@
#!/command/with-contenv /usr/bin/bash
# shellcheck shell=bash
declare -r log_prefix="[init-folders]"
declare -r export_dir="/usr/src/paperless/export"
declare -r data_dir="${PAPERLESS_DATA_DIR:-/usr/src/paperless/data}"
declare -r media_root_dir="${PAPERLESS_MEDIA_ROOT:-/usr/src/paperless/media}"
declare -r consume_dir="${PAPERLESS_CONSUMPTION_DIR:-/usr/src/paperless/consume}"
declare -r tmp_dir="${PAPERLESS_SCRATCH_DIR:=/tmp/paperless}"
declare -r main_dirs=(
"${export_dir}"
"${data_dir}"
"${media_root_dir}"
"${consume_dir}"
"${tmp_dir}"
)
declare -r extra_dirs=(
"${main_dirs[@]}"
"${data_dir}/index"
"${media_root_dir}/documents"
"${media_root_dir}/documents/originals"
"${media_root_dir}/documents/thumbnails"
)
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
# Non-root mode: Create directories as current user, warn about permission issues
echo "${log_prefix} Running in non-root mode, checking directories"
current_uid=$(id --user)
current_gid=$(id --group)
for dir in "${extra_dirs[@]}"; do
if [[ ! -d "${dir}" ]]; then
mkdir --parents --verbose "${dir}" || echo "${log_prefix} WARNING: Could not create ${dir} - permission denied"
fi
# Check permissions on existing directories too
if [[ -d "${dir}" && ! -w "${dir}" ]]; then
echo "${log_prefix} WARNING: No write permission to ${dir}"
fi
done
# Warn about ownership issues
for dir in "${main_dirs[@]}"; do
if [[ -d "${dir}" ]]; then
find "${dir}" -not \( -user ${current_uid} -and -group ${current_gid} \) -exec echo "${log_prefix} WARNING: Permission issue on {}: not owned by current user (${current_uid}:${current_gid})" \; 2>/dev/null || echo "${log_prefix} WARNING: Cannot check permissions on ${dir}"
fi
done
else
# Root mode: Create and fix permissions as needed
echo "${log_prefix} Running with root privileges, adjusting directories and permissions"
# First create directories
for dir in "${extra_dirs[@]}"; do
if [[ ! -d "${dir}" ]]; then
mkdir --parents --verbose "${dir}"
fi
done
# Then fix permissions on all directories
for dir in "${main_dirs[@]}"; do
find "${dir}" -not \( -user paperless -and -group paperless \) -exec chown --changes paperless:paperless {} +
done
fi

View File

@@ -1 +0,0 @@
oneshot

View File

@@ -1 +0,0 @@
/etc/s6-overlay/s6-rc.d/init-folders/run

View File

@@ -1,18 +0,0 @@
#!/command/with-contenv /usr/bin/bash
# shellcheck shell=bash
declare -r log_prefix="[init-migrations]"
declare -r data_dir="${PAPERLESS_DATA_DIR:-/usr/src/paperless/data}"
echo "${log_prefix} Apply database migrations..."
cd "${PAPERLESS_SRC_DIR}"
# The whole migrate, with flock, needs to run as the right user
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
exec s6-setlock -n "${data_dir}/migration_lock" python3 manage.py migrate --skip-checks --no-input
else
exec s6-setuidgid paperless \
s6-setlock -n "${data_dir}/migration_lock" \
python3 manage.py migrate --skip-checks --no-input
fi

View File

@@ -1 +0,0 @@
/etc/s6-overlay/s6-rc.d/init-migrations/run

View File

@@ -1,22 +0,0 @@
#!/command/with-contenv /usr/bin/bash
# shellcheck shell=bash
declare -r log_prefix="[init-user]"
declare -r usermap_original_uid=$(id -u paperless)
declare -r usermap_original_gid=$(id -g paperless)
declare -r usermap_new_uid=${USERMAP_UID:-$usermap_original_uid}
declare -r usermap_new_gid=${USERMAP_GID:-${usermap_original_gid:-$usermap_new_uid}}
if [[ ${usermap_new_uid} != "${usermap_original_uid}" ]]; then
echo "${log_prefix} Mapping UID for paperless to $usermap_new_uid"
usermod --non-unique --uid "${usermap_new_uid}" paperless
else
echo "${log_prefix} No UID changes for paperless"
fi
if [[ ${usermap_new_gid} != "${usermap_original_gid}" ]]; then
echo "${log_prefix} Mapping GID for paperless to $usermap_new_gid"
groupmod --non-unique --gid "${usermap_new_gid}" paperless
else
echo "${log_prefix} No GID changes for paperless"
fi

View File

@@ -1 +0,0 @@
/etc/s6-overlay/s6-rc.d/init-modify-user/run

Some files were not shown because too many files have changed in this diff Show More