mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-04-13 10:03:49 -05:00
Compare commits
308 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f269919410 | ||
![]() |
5f00066dff | ||
![]() |
705f542129 | ||
![]() |
f036292b72 | ||
![]() |
f8a43d5dab | ||
![]() |
53c106d448 | ||
![]() |
0f37186c88 | ||
![]() |
0fb55f3ae8 | ||
![]() |
78822f6121 | ||
![]() |
2ee1d7540e | ||
![]() |
43b2527275 | ||
![]() |
248b573c03 | ||
![]() |
b9f7428f2f | ||
![]() |
e35dad81d9 | ||
![]() |
7d40eb2e24 | ||
![]() |
d2783a3fbe | ||
![]() |
8ad794e189 | ||
![]() |
358db10fe3 | ||
![]() |
556f47fe12 | ||
![]() |
0d5a2b4382 | ||
![]() |
c9bc9acd1a | ||
![]() |
e9e209d290 | ||
![]() |
2e593a0022 | ||
![]() |
3526a4cf23 | ||
![]() |
f4791cac2d | ||
![]() |
fdafd4eefb | ||
![]() |
87a8847a8d | ||
![]() |
7c31c79bbc | ||
![]() |
bf2a9b02c6 | ||
![]() |
348858780c | ||
![]() |
eb481ac1c0 | ||
![]() |
9a2d7a64ac | ||
![]() |
32a7f9cd5a | ||
![]() |
92431b2f4b | ||
![]() |
b4b2a92225 | ||
![]() |
fd45e81a83 | ||
![]() |
97d59dce9c | ||
![]() |
f3479d982c | ||
![]() |
b3ba673d9a | ||
![]() |
68b7427640 | ||
![]() |
9c68100dc0 | ||
![]() |
6e694ad9ff | ||
![]() |
5db511afdf | ||
![]() |
a8de26f88a | ||
![]() |
7a07f1e81d | ||
![]() |
92524ae97a | ||
![]() |
1c89f6da24 | ||
![]() |
d1a3e3b859 | ||
![]() |
79ae594d54 | ||
![]() |
f753f6dc46 | ||
![]() |
97fe5c4176 | ||
![]() |
1f5086164b | ||
![]() |
e60bd3a132 | ||
![]() |
b4047e73bb | ||
![]() |
4263d2196c | ||
![]() |
ac780134fb | ||
![]() |
5d6cfa7349 | ||
![]() |
3105317137 | ||
![]() |
1d9482acc3 | ||
![]() |
1456169d7f | ||
![]() |
22a6fe5e10 | ||
![]() |
caa3c13edd | ||
![]() |
24e863b298 | ||
![]() |
0c9d615f56 | ||
![]() |
4c49da9ece | ||
![]() |
a0c1a19263 | ||
![]() |
1f5d1b6f26 | ||
![]() |
3b19a727b8 | ||
![]() |
7146a5f4fc | ||
![]() |
6babc61ba2 | ||
![]() |
db5e54c6e5 | ||
![]() |
4ef5fbfb6e | ||
![]() |
b0390a92ea | ||
![]() |
70898a5064 | ||
![]() |
d07d425618 | ||
![]() |
1f60f4a636 | ||
![]() |
b29395cde7 | ||
![]() |
2f70d58219 | ||
![]() |
9944f81512 | ||
![]() |
f4413e0a08 | ||
![]() |
169aa8c8bd | ||
![]() |
94556a2607 | ||
![]() |
dcd50d5359 | ||
![]() |
376823598e | ||
![]() |
54bcbfa546 | ||
![]() |
5421e54cb0 | ||
![]() |
032bada221 | ||
![]() |
670ee6c5b0 | ||
![]() |
309fb199f2 | ||
![]() |
79328b1cec | ||
![]() |
5570d20625 | ||
![]() |
ba2cb1dec8 | ||
![]() |
4d15544a3e | ||
![]() |
955ff32dcd | ||
![]() |
b746b6f2d6 | ||
![]() |
b4b0f802e1 | ||
![]() |
5f16d5f5f1 | ||
![]() |
8db04398c7 | ||
![]() |
485dad01b7 | ||
![]() |
cf48f47a8c | ||
![]() |
f2667f5afa | ||
![]() |
d73118d226 | ||
![]() |
dd9e9a8c56 | ||
![]() |
76d363f22d | ||
![]() |
aaaa6c1393 | ||
![]() |
bed82215a0 | ||
![]() |
f8aaa5cb32 | ||
![]() |
1e489a0666 | ||
![]() |
edc7181843 | ||
![]() |
89e5c08a1f | ||
![]() |
0faa9e8865 | ||
![]() |
f205c4d0e2 | ||
![]() |
344b2bc0eb | ||
![]() |
817aad7c8b | ||
![]() |
d82555e644 | ||
![]() |
f3e6ed56b9 | ||
![]() |
780d1c67e9 | ||
![]() |
2b72397a4d | ||
![]() |
6c13ffaa01 | ||
![]() |
eb8e124971 | ||
![]() |
1bc77546eb | ||
![]() |
5a453653e2 | ||
![]() |
16f17829b6 | ||
![]() |
3cf1c04a83 | ||
![]() |
bc90ccc555 | ||
![]() |
90a332a02c | ||
![]() |
0098d1bdd5 | ||
![]() |
f6fef18a73 | ||
![]() |
6563ec6770 | ||
![]() |
755cf8619f | ||
![]() |
c6d389100c | ||
![]() |
20c4b65273 | ||
![]() |
86c94c7508 | ||
![]() |
798ece411e | ||
![]() |
654c9ca273 | ||
![]() |
628d85080f | ||
![]() |
865e9fe233 | ||
![]() |
0eb765c3e8 | ||
![]() |
ddeb741a85 | ||
![]() |
b9bcff22f8 | ||
![]() |
2d52226732 | ||
![]() |
ec34197b59 | ||
![]() |
edc0e6f859 | ||
![]() |
61cb5103ed | ||
![]() |
d364436817 | ||
![]() |
827fcba277 | ||
![]() |
90561857e8 | ||
![]() |
3104417076 | ||
![]() |
047f7c3619 | ||
![]() |
a548c32c1f | ||
![]() |
ea911e73c6 | ||
![]() |
6b7fb286f7 | ||
![]() |
b40479632b | ||
![]() |
c122c60d3f | ||
![]() |
4f08b5fa20 | ||
![]() |
3bf64ae7da | ||
![]() |
822c2d2d56 | ||
![]() |
98e0a934ac | ||
![]() |
ceffcd6360 | ||
![]() |
37442ff829 | ||
![]() |
de5f66b3a0 | ||
![]() |
e37096f66f | ||
![]() |
e49ecd4dfe | ||
![]() |
4718df271f | ||
![]() |
17bb3ebbf5 | ||
![]() |
5e00c1c676 | ||
![]() |
fc68f55d1a | ||
![]() |
a9ef7ff58e | ||
![]() |
518091f856 | ||
![]() |
feb30f36df | ||
![]() |
bbad36717f | ||
![]() |
329ef7aef3 | ||
![]() |
2b2115e5f0 | ||
![]() |
ba5705a54f | ||
![]() |
ea94626b82 | ||
![]() |
fdfea68576 | ||
![]() |
4e61c2b2e6 | ||
![]() |
7c959754a0 | ||
![]() |
fed16974dd | ||
![]() |
ea3303aa76 | ||
![]() |
1dc80f04cb | ||
![]() |
c316ae369b | ||
![]() |
d94b284815 | ||
![]() |
63bb3644f6 | ||
![]() |
6a8ec182fa | ||
![]() |
880f08599a | ||
![]() |
71472a6a82 | ||
![]() |
7f36163c3b | ||
![]() |
e560fa3be0 | ||
![]() |
b274665e21 | ||
![]() |
a499905605 | ||
![]() |
b8bdc10f25 | ||
![]() |
e08606af6e | ||
![]() |
52ab07c673 | ||
![]() |
046d8456e2 | ||
![]() |
3314c59828 | ||
![]() |
2103a499eb | ||
![]() |
7ab779e78a | ||
![]() |
49390c9427 | ||
![]() |
6f29d64325 | ||
![]() |
69541546ea | ||
![]() |
065724befb | ||
![]() |
e877beea4e | ||
![]() |
3e848e6e0f | ||
![]() |
2e5656e1ce | ||
![]() |
438650bf17 | ||
![]() |
7df0b621a5 | ||
![]() |
4cd755f641 | ||
![]() |
8597911d85 | ||
![]() |
befb80bddf | ||
![]() |
7af6983cab | ||
![]() |
16d6bb7334 | ||
![]() |
49b658a944 | ||
![]() |
e1d8680698 | ||
![]() |
ee72e2d1fd | ||
![]() |
e0ea4a4625 | ||
![]() |
c2a9ac332a | ||
![]() |
bf368aadd0 | ||
![]() |
54e72d5b60 | ||
![]() |
cf7422346a | ||
![]() |
5b8c9ef5fc | ||
![]() |
f56974f158 | ||
![]() |
427508edf1 | ||
![]() |
311b259cff | ||
![]() |
fce7b03324 | ||
![]() |
79956d6a7b | ||
![]() |
978b072bff | ||
![]() |
9c6f695dbf | ||
![]() |
0100fcbb23 | ||
![]() |
1745da0d60 | ||
![]() |
e4e906ce2b | ||
![]() |
80c7b97fec | ||
![]() |
270e70a958 | ||
![]() |
082bf6fb8e | ||
![]() |
38296d9426 | ||
![]() |
8311313e6e | ||
![]() |
ac292999ef | ||
![]() |
f3cda54cd1 | ||
![]() |
8f9a294529 | ||
![]() |
702de0cac3 | ||
![]() |
ca42762841 | ||
![]() |
13cfd6f904 | ||
![]() |
18c4e6029f | ||
![]() |
6c34e37838 | ||
![]() |
2c28348b56 | ||
![]() |
79e541244e | ||
![]() |
74afad5976 | ||
![]() |
c694c9791b | ||
![]() |
11ceb8bde5 | ||
![]() |
20ec8cb57b | ||
![]() |
bfc11a545b | ||
![]() |
4866af31cb | ||
![]() |
0ea4da03a7 | ||
![]() |
41bcc12cc2 | ||
![]() |
475c231c6f | ||
![]() |
e00dd46b22 | ||
![]() |
fd425aa618 | ||
![]() |
e1dde85c59 | ||
![]() |
01207a284d | ||
![]() |
0f863ab378 | ||
![]() |
258064b339 | ||
![]() |
2bcb37f3e9 | ||
![]() |
81f8c64b2c | ||
![]() |
3b80112521 | ||
![]() |
459feea31e | ||
![]() |
eea5839390 | ||
![]() |
c79414ebd9 | ||
![]() |
ed1775e689 | ||
![]() |
cd50f20a20 | ||
![]() |
c8ec70c05f | ||
![]() |
5e3ee3a80d | ||
![]() |
29726c3ce1 | ||
![]() |
6804c92861 | ||
![]() |
a32077566b | ||
![]() |
283bcb4c91 | ||
![]() |
f68ee628d9 | ||
![]() |
bd5ba97ee8 | ||
![]() |
325594034e | ||
![]() |
28261ac51c | ||
![]() |
23ef52f405 | ||
![]() |
2b45793bc2 | ||
![]() |
2f96cc0050 | ||
![]() |
d36e8254f3 | ||
![]() |
405fab8514 | ||
![]() |
ee4f62a1b3 | ||
![]() |
d97e4a9a95 | ||
![]() |
1cfba87114 | ||
![]() |
b145ed315a | ||
![]() |
1f47b8c090 | ||
![]() |
1e3f2a1438 | ||
![]() |
d61b2bbfc6 | ||
![]() |
e1d6b4a9ac | ||
![]() |
9f398337c6 | ||
![]() |
765bf1d11c | ||
![]() |
49a96ccee0 | ||
![]() |
c45bbcaea2 | ||
![]() |
ab87aedfc7 | ||
![]() |
18e3ad8b22 | ||
![]() |
c342eafa8d | ||
![]() |
290c44f5d5 | ||
![]() |
02015ec404 | ||
![]() |
98b4f447d8 | ||
![]() |
272691e386 | ||
![]() |
1012cee39a | ||
![]() |
19a5733d0d | ||
![]() |
86788f1445 | ||
![]() |
1d5e7e930e | ||
![]() |
da6d568906 |
.codecov.yml
.devcontainer
.dockerignore.editorconfig.github
.gitignore.pre-commit-config.yaml.python-version.ruff.tomlCODEOWNERSCONTRIBUTING.mdDockerfilePipfilePipfile.lockREADME.mddocker
compose
docker-compose.ci-test.ymldocker-compose.mariadb-tika.ymldocker-compose.mariadb.ymldocker-compose.portainer.ymldocker-compose.postgres-tika.ymldocker-compose.postgres.ymldocker-compose.sqlite-tika.ymldocker-compose.sqlite.yml
docker-prepare.shenv-from-file.shflower-conditional.shinit-flow.drawio.pnginstall_management_commands.shmanagement_script.shpaperless_cmd.shrootfs/etc
ImageMagick-6
s6-overlay/s6-rc.d
init-complete
dependencies.d
init-custom-initinit-env-fileinit-foldersinit-migrationsinit-modify-userinit-search-indexinit-startinit-superuserinit-system-checksinit-tesseract-langsinit-wait-for-dbinit-wait-for-redis
runtypeupinit-custom-init
init-env-file
init-folders
init-migrations
init-modify-user
init-search-index
init-start
init-superuser
init-system-checks
init-tesseract-langs
16
.codecov.yml
16
.codecov.yml
@ -1,18 +1,18 @@
|
|||||||
codecov:
|
codecov:
|
||||||
require_ci_to_pass: true
|
require_ci_to_pass: true
|
||||||
# https://docs.codecov.com/docs/flags#recommended-automatic-flag-management
|
# https://docs.codecov.com/docs/components
|
||||||
# Require each flag to have 1 upload before notification
|
component_management:
|
||||||
flag_management:
|
individual_components:
|
||||||
individual_flags:
|
- component_id: backend
|
||||||
- name: backend
|
|
||||||
paths:
|
paths:
|
||||||
- src/
|
- src/**
|
||||||
- name: frontend
|
- component_id: frontend
|
||||||
paths:
|
paths:
|
||||||
- src-ui/
|
- src-ui/**
|
||||||
# https://docs.codecov.com/docs/pull-request-comments
|
# https://docs.codecov.com/docs/pull-request-comments
|
||||||
# codecov will only comment if coverage changes
|
# codecov will only comment if coverage changes
|
||||||
comment:
|
comment:
|
||||||
|
layout: "header, diff, components, flags, files"
|
||||||
require_changes: true
|
require_changes: true
|
||||||
# https://docs.codecov.com/docs/javascript-bundle-analysis
|
# https://docs.codecov.com/docs/javascript-bundle-analysis
|
||||||
require_bundle_changes: true
|
require_bundle_changes: true
|
||||||
|
@ -76,18 +76,15 @@ RUN set -eux \
|
|||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES}
|
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES}
|
||||||
|
|
||||||
ARG PYTHON_PACKAGES="\
|
ARG PYTHON_PACKAGES="ca-certificates"
|
||||||
python3 \
|
|
||||||
python3-pip \
|
|
||||||
python3-wheel \
|
|
||||||
pipenv \
|
|
||||||
ca-certificates"
|
|
||||||
|
|
||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
echo "Installing python packages" \
|
echo "Installing python packages" \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install --yes --quiet ${PYTHON_PACKAGES}
|
&& apt-get install --yes --quiet ${PYTHON_PACKAGES}
|
||||||
|
|
||||||
|
COPY --from=ghcr.io/astral-sh/uv:0.6 /uv /bin/uv
|
||||||
|
|
||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
&& echo "Installing pre-built updates" \
|
&& echo "Installing pre-built updates" \
|
||||||
&& echo "Installing qpdf ${QPDF_VERSION}" \
|
&& echo "Installing qpdf ${QPDF_VERSION}" \
|
||||||
@ -123,13 +120,15 @@ RUN set -eux \
|
|||||||
WORKDIR /usr/src/paperless/src/docker/
|
WORKDIR /usr/src/paperless/src/docker/
|
||||||
|
|
||||||
COPY [ \
|
COPY [ \
|
||||||
"docker/imagemagick-policy.xml", \
|
"docker/rootfs/etc/ImageMagick-6/paperless-policy.xml", \
|
||||||
"./" \
|
"./" \
|
||||||
]
|
]
|
||||||
|
|
||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
&& echo "Configuring ImageMagick" \
|
&& echo "Configuring ImageMagick" \
|
||||||
&& mv imagemagick-policy.xml /etc/ImageMagick-6/policy.xml
|
&& mv paperless-policy.xml /etc/ImageMagick-6/policy.xml
|
||||||
|
|
||||||
|
COPY --from=ghcr.io/astral-sh/uv:0.6 /uv /bin/uv
|
||||||
|
|
||||||
# Packages needed only for building a few quick Python
|
# Packages needed only for building a few quick Python
|
||||||
# dependencies
|
# dependencies
|
||||||
@ -140,18 +139,17 @@ ARG BUILD_PACKAGES="\
|
|||||||
libpq-dev \
|
libpq-dev \
|
||||||
# https://github.com/PyMySQL/mysqlclient#linux
|
# https://github.com/PyMySQL/mysqlclient#linux
|
||||||
default-libmysqlclient-dev \
|
default-libmysqlclient-dev \
|
||||||
pkg-config \
|
pkg-config"
|
||||||
pre-commit"
|
|
||||||
|
|
||||||
# hadolint ignore=DL3042
|
# hadolint ignore=DL3042
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip/,id=pip-cache \
|
RUN --mount=type=cache,target=/root/.cache/uv,id=pip-cache \
|
||||||
set -eux \
|
set -eux \
|
||||||
&& echo "Installing build system packages" \
|
&& echo "Installing build system packages" \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install --yes --quiet ${BUILD_PACKAGES}
|
&& apt-get install --yes --quiet ${BUILD_PACKAGES}
|
||||||
|
|
||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
&& npm update npm -g
|
&& npm update -g pnpm
|
||||||
|
|
||||||
# add users, setup scripts
|
# add users, setup scripts
|
||||||
# Mount the compiled frontend to expected location
|
# Mount the compiled frontend to expected location
|
||||||
@ -169,9 +167,6 @@ RUN set -eux \
|
|||||||
&& mkdir --parents --verbose /usr/src/paperless/paperless-ngx/.venv \
|
&& mkdir --parents --verbose /usr/src/paperless/paperless-ngx/.venv \
|
||||||
&& echo "Adjusting all permissions" \
|
&& echo "Adjusting all permissions" \
|
||||||
&& chown --from root:root --changes --recursive paperless:paperless /usr/src/paperless
|
&& chown --from root:root --changes --recursive paperless:paperless /usr/src/paperless
|
||||||
# && echo "Collecting static files" \
|
|
||||||
# && gosu paperless python3 manage.py collectstatic --clear --no-input --link \
|
|
||||||
# && gosu paperless python3 manage.py compilemessages
|
|
||||||
|
|
||||||
VOLUME ["/usr/src/paperless/paperless-ngx/data", \
|
VOLUME ["/usr/src/paperless/paperless-ngx/data", \
|
||||||
"/usr/src/paperless/paperless-ngx/media", \
|
"/usr/src/paperless/paperless-ngx/media", \
|
||||||
|
117
.devcontainer/README.md
Normal file
117
.devcontainer/README.md
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
# Paperless-ngx Development Environment
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Welcome to the Paperless-ngx development environment! This setup uses VSCode DevContainers to provide a consistent and seamless development experience.
|
||||||
|
|
||||||
|
### What are DevContainers?
|
||||||
|
|
||||||
|
DevContainers are a feature in VSCode that allows you to develop within a Docker container. This ensures that your development environment is consistent across different machines and setups. By defining a containerized environment, you can eliminate the "works on my machine" problem.
|
||||||
|
|
||||||
|
### Advantages of DevContainers
|
||||||
|
|
||||||
|
- **Consistency**: Same environment for all developers.
|
||||||
|
- **Isolation**: Separate development environment from your local machine.
|
||||||
|
- **Reproducibility**: Easily recreate the environment on any machine.
|
||||||
|
- **Pre-configured Tools**: Include all necessary tools and dependencies in the container.
|
||||||
|
|
||||||
|
## DevContainer Setup
|
||||||
|
|
||||||
|
The DevContainer configuration provides up all the necessary services for Paperless-ngx, including:
|
||||||
|
|
||||||
|
- Redis
|
||||||
|
- Gotenberg
|
||||||
|
- Tika
|
||||||
|
|
||||||
|
Data is stored using Docker volumes to ensure persistence across container restarts.
|
||||||
|
|
||||||
|
## Configuration Files
|
||||||
|
|
||||||
|
The setup includes debugging configurations (`launch.json`) and tasks (`tasks.json`) to help you manage and debug various parts of the project:
|
||||||
|
|
||||||
|
- **Backend Debugging:**
|
||||||
|
- `manage.py runserver`
|
||||||
|
- `manage.py document-consumer`
|
||||||
|
- `celery`
|
||||||
|
- **Maintenance Tasks:**
|
||||||
|
- Create superuser
|
||||||
|
- Run migrations
|
||||||
|
- Recreate virtual environment (`.venv` with `uv`)
|
||||||
|
- Compile frontend assets
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
### Step 1: Running the DevContainer
|
||||||
|
|
||||||
|
To start the DevContainer:
|
||||||
|
|
||||||
|
1. Open VSCode.
|
||||||
|
2. Open the project folder.
|
||||||
|
3. Open the command palette:
|
||||||
|
- **Windows/Linux**: `Ctrl+Shift+P`
|
||||||
|
- **Mac**: `Cmd+Shift+P`
|
||||||
|
4. Type and select `Dev Containers: Rebuild and Reopen in Container`.
|
||||||
|
|
||||||
|
VSCode will build and start the DevContainer environment.
|
||||||
|
|
||||||
|
### Step 2: Initial Setup
|
||||||
|
|
||||||
|
Once the DevContainer is up and running, perform the following steps:
|
||||||
|
|
||||||
|
1. **Compile Frontend Assets**:
|
||||||
|
|
||||||
|
- Open the command palette:
|
||||||
|
- **Windows/Linux**: `Ctrl+Shift+P`
|
||||||
|
- **Mac**: `Cmd+Shift+P`
|
||||||
|
- Select `Tasks: Run Task`.
|
||||||
|
- Choose `Frontend Compile`.
|
||||||
|
|
||||||
|
2. **Run Database Migrations**:
|
||||||
|
|
||||||
|
- Open the command palette:
|
||||||
|
- **Windows/Linux**: `Ctrl+Shift+P`
|
||||||
|
- **Mac**: `Cmd+Shift+P`
|
||||||
|
- Select `Tasks: Run Task`.
|
||||||
|
- Choose `Migrate Database`.
|
||||||
|
|
||||||
|
3. **Create Superuser**:
|
||||||
|
- Open the command palette:
|
||||||
|
- **Windows/Linux**: `Ctrl+Shift+P`
|
||||||
|
- **Mac**: `Cmd+Shift+P`
|
||||||
|
- Select `Tasks: Run Task`.
|
||||||
|
- Choose `Create Superuser`.
|
||||||
|
|
||||||
|
### Debugging and Running Services
|
||||||
|
|
||||||
|
You can start and debug backend services either as debugging sessions via `launch.json` or as tasks.
|
||||||
|
|
||||||
|
#### Using `launch.json`
|
||||||
|
|
||||||
|
1. Press `F5` or go to the **Run and Debug** view in VSCode.
|
||||||
|
2. Select the desired configuration:
|
||||||
|
- `Runserver`
|
||||||
|
- `Document Consumer`
|
||||||
|
- `Celery`
|
||||||
|
|
||||||
|
#### Using Tasks
|
||||||
|
|
||||||
|
1. Open the command palette:
|
||||||
|
- **Windows/Linux**: `Ctrl+Shift+P`
|
||||||
|
- **Mac**: `Cmd+Shift+P`
|
||||||
|
2. Select `Tasks: Run Task`.
|
||||||
|
3. Choose the desired task:
|
||||||
|
- `Runserver`
|
||||||
|
- `Document Consumer`
|
||||||
|
- `Celery`
|
||||||
|
|
||||||
|
### Additional Maintenance Tasks
|
||||||
|
|
||||||
|
Additional tasks are available for common maintenance operations:
|
||||||
|
|
||||||
|
- **Recreate .venv**: For setting up the virtual environment using `uv`.
|
||||||
|
- **Migrate Database**: To apply database migrations.
|
||||||
|
- **Create Superuser**: To create an admin user for the application.
|
||||||
|
|
||||||
|
## Let's Get Started!
|
||||||
|
|
||||||
|
Follow the steps above to get your development environment up and running. Happy coding!
|
@ -3,7 +3,7 @@
|
|||||||
"dockerComposeFile": "docker-compose.devcontainer.sqlite-tika.yml",
|
"dockerComposeFile": "docker-compose.devcontainer.sqlite-tika.yml",
|
||||||
"service": "paperless-development",
|
"service": "paperless-development",
|
||||||
"workspaceFolder": "/usr/src/paperless/paperless-ngx",
|
"workspaceFolder": "/usr/src/paperless/paperless-ngx",
|
||||||
"postCreateCommand": "pipenv install --dev && pipenv run pre-commit install",
|
"postCreateCommand": "/bin/bash -c 'uv sync --group dev && uv run pre-commit install'",
|
||||||
"customizations": {
|
"customizations": {
|
||||||
"vscode": {
|
"vscode": {
|
||||||
"extensions": [
|
"extensions": [
|
||||||
|
@ -43,7 +43,7 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- ..:/usr/src/paperless/paperless-ngx:delegated
|
- ..:/usr/src/paperless/paperless-ngx:delegated
|
||||||
- ../.devcontainer/vscode:/usr/src/paperless/paperless-ngx/.vscode:delegated # VSCode config files
|
- ../.devcontainer/vscode:/usr/src/paperless/paperless-ngx/.vscode:delegated # VSCode config files
|
||||||
- pipenv:/usr/src/paperless/paperless-ngx/.venv
|
- virtualenv:/usr/src/paperless/paperless-ngx/.venv # Virtual environment persisted in volume
|
||||||
- /usr/src/paperless/paperless-ngx/src/documents/static/frontend # Static frontend files exist only in container
|
- /usr/src/paperless/paperless-ngx/src/documents/static/frontend # Static frontend files exist only in container
|
||||||
- /usr/src/paperless/paperless-ngx/src/.pytest_cache
|
- /usr/src/paperless/paperless-ngx/src/.pytest_cache
|
||||||
- /usr/src/paperless/paperless-ngx/.ruff_cache
|
- /usr/src/paperless/paperless-ngx/.ruff_cache
|
||||||
@ -65,7 +65,7 @@ services:
|
|||||||
command: /bin/sh -c "chown -R paperless:paperless /usr/src/paperless/paperless-ngx/src/documents/static/frontend && chown -R paperless:paperless /usr/src/paperless/paperless-ngx/.ruff_cache && while sleep 1000; do :; done"
|
command: /bin/sh -c "chown -R paperless:paperless /usr/src/paperless/paperless-ngx/src/documents/static/frontend && chown -R paperless:paperless /usr/src/paperless/paperless-ngx/.ruff_cache && while sleep 1000; do :; done"
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:7.10
|
image: docker.io/gotenberg/gotenberg:8.17
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
# The Gotenberg Chromium route is used to convert .eml files. We do not
|
# The Gotenberg Chromium route is used to convert .eml files. We do not
|
||||||
@ -80,4 +80,7 @@ services:
|
|||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
pipenv:
|
data:
|
||||||
|
media:
|
||||||
|
redisdata:
|
||||||
|
virtualenv:
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
"label": "Start: Celery Worker",
|
"label": "Start: Celery Worker",
|
||||||
"description": "Start the Celery Worker which processes background and consume tasks",
|
"description": "Start the Celery Worker which processes background and consume tasks",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "pipenv run celery --app paperless worker -l DEBUG",
|
"command": "uv run celery --app paperless worker -l DEBUG",
|
||||||
"isBackground": true,
|
"isBackground": true,
|
||||||
"options": {
|
"options": {
|
||||||
"cwd": "${workspaceFolder}/src"
|
"cwd": "${workspaceFolder}/src"
|
||||||
@ -33,7 +33,7 @@
|
|||||||
"label": "Start: Frontend Angular",
|
"label": "Start: Frontend Angular",
|
||||||
"description": "Start the Frontend Angular Dev Server",
|
"description": "Start the Frontend Angular Dev Server",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "npm start",
|
"command": "pnpm start",
|
||||||
"isBackground": true,
|
"isBackground": true,
|
||||||
"options": {
|
"options": {
|
||||||
"cwd": "${workspaceFolder}/src-ui"
|
"cwd": "${workspaceFolder}/src-ui"
|
||||||
@ -61,7 +61,7 @@
|
|||||||
"label": "Start: Consumer Service (manage.py document_consumer)",
|
"label": "Start: Consumer Service (manage.py document_consumer)",
|
||||||
"description": "Start the Consumer Service which processes files from a directory",
|
"description": "Start the Consumer Service which processes files from a directory",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "pipenv run python manage.py document_consumer",
|
"command": "uv run python manage.py document_consumer",
|
||||||
"group": "build",
|
"group": "build",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"echo": true,
|
"echo": true,
|
||||||
@ -80,7 +80,7 @@
|
|||||||
"label": "Start: Backend Server (manage.py runserver)",
|
"label": "Start: Backend Server (manage.py runserver)",
|
||||||
"description": "Start the Backend Server which serves the Django API and the compiled Angular frontend",
|
"description": "Start the Backend Server which serves the Django API and the compiled Angular frontend",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "pipenv run python manage.py runserver",
|
"command": "uv run python manage.py runserver",
|
||||||
"group": "build",
|
"group": "build",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"echo": true,
|
"echo": true,
|
||||||
@ -99,7 +99,7 @@
|
|||||||
"label": "Maintenance: manage.py migrate",
|
"label": "Maintenance: manage.py migrate",
|
||||||
"description": "Apply database migrations",
|
"description": "Apply database migrations",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "pipenv run python manage.py migrate",
|
"command": "uv run python manage.py migrate",
|
||||||
"group": "none",
|
"group": "none",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"echo": true,
|
"echo": true,
|
||||||
@ -118,7 +118,7 @@
|
|||||||
"label": "Maintenance: Build Documentation",
|
"label": "Maintenance: Build Documentation",
|
||||||
"description": "Build the documentation with MkDocs",
|
"description": "Build the documentation with MkDocs",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "pipenv run mkdocs build --config-file mkdocs.yml && pipenv run mkdocs serve",
|
"command": "uv run mkdocs build --config-file mkdocs.yml && uv run mkdocs serve",
|
||||||
"group": "none",
|
"group": "none",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"echo": true,
|
"echo": true,
|
||||||
@ -137,7 +137,7 @@
|
|||||||
"label": "Maintenance: manage.py createsuperuser",
|
"label": "Maintenance: manage.py createsuperuser",
|
||||||
"description": "Create a superuser",
|
"description": "Create a superuser",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "pipenv run python manage.py createsuperuser",
|
"command": "uv run python manage.py createsuperuser",
|
||||||
"group": "none",
|
"group": "none",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"echo": true,
|
"echo": true,
|
||||||
@ -156,7 +156,7 @@
|
|||||||
"label": "Maintenance: recreate .venv",
|
"label": "Maintenance: recreate .venv",
|
||||||
"description": "Recreate the python virtual environment and install python dependencies",
|
"description": "Recreate the python virtual environment and install python dependencies",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "rm -R -v .venv/* || pipenv install --dev",
|
"command": "rm -R -v .venv/* || uv install --dev",
|
||||||
"group": "none",
|
"group": "none",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"echo": true,
|
"echo": true,
|
||||||
@ -173,8 +173,8 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Maintenance: Install Frontend Dependencies",
|
"label": "Maintenance: Install Frontend Dependencies",
|
||||||
"description": "Install frontend (npm) dependencies",
|
"description": "Install frontend (pnpm) dependencies",
|
||||||
"type": "npm",
|
"type": "pnpm",
|
||||||
"script": "install",
|
"script": "install",
|
||||||
"path": "src-ui",
|
"path": "src-ui",
|
||||||
"group": "clean",
|
"group": "clean",
|
||||||
@ -185,7 +185,7 @@
|
|||||||
"description": "Clean install frontend dependencies and build the frontend for production",
|
"description": "Clean install frontend dependencies and build the frontend for production",
|
||||||
"label": "Maintenance: Compile frontend for production",
|
"label": "Maintenance: Compile frontend for production",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "npm ci && ./node_modules/.bin/ng build --configuration production",
|
"command": "pnpm install && ./node_modules/.bin/ng build --configuration production",
|
||||||
"group": "none",
|
"group": "none",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"echo": true,
|
"echo": true,
|
||||||
|
@ -26,3 +26,5 @@
|
|||||||
./dist
|
./dist
|
||||||
./scripts
|
./scripts
|
||||||
./resources
|
./resources
|
||||||
|
# Other stuff
|
||||||
|
**/*.drawio.png
|
||||||
|
@ -27,9 +27,6 @@ indent_style = space
|
|||||||
[*.md]
|
[*.md]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
|
|
||||||
[Pipfile.lock]
|
|
||||||
indent_style = space
|
|
||||||
|
|
||||||
# Tests don't get a line width restriction. It's still a good idea to follow
|
# Tests don't get a line width restriction. It's still a good idea to follow
|
||||||
# the 79 character rule, but in the interests of clarity, tests often need to
|
# the 79 character rule, but in the interests of clarity, tests often need to
|
||||||
# violate it.
|
# violate it.
|
||||||
|
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
github: [shamoon, stumpylog]
|
4
.github/ISSUE_TEMPLATE/config.yml
vendored
4
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -2,10 +2,10 @@ blank_issues_enabled: false
|
|||||||
contact_links:
|
contact_links:
|
||||||
- name: 🤔 Questions and Help
|
- name: 🤔 Questions and Help
|
||||||
url: https://github.com/paperless-ngx/paperless-ngx/discussions
|
url: https://github.com/paperless-ngx/paperless-ngx/discussions
|
||||||
about: This issue tracker is not for support questions. Please refer to our Discussions.
|
about: General questions or support for using Paperless-ngx.
|
||||||
- name: 💬 Chat
|
- name: 💬 Chat
|
||||||
url: https://matrix.to/#/#paperlessngx:matrix.org
|
url: https://matrix.to/#/#paperlessngx:matrix.org
|
||||||
about: Want to discuss Paperless-ngx with others? Check out our chat.
|
about: Want to discuss Paperless-ngx with others? Check out our chat.
|
||||||
- name: 🚀 Feature Request
|
- name: 🚀 Feature Request
|
||||||
url: https://github.com/paperless-ngx/paperless-ngx/discussions/new?category=feature-requests
|
url: https://github.com/paperless-ngx/paperless-ngx/discussions/new?category=feature-requests
|
||||||
about: Remember to search for existing feature requests and "up-vote" any you like
|
about: Remember to search for existing feature requests and "up-vote" those that you like.
|
||||||
|
66
.github/dependabot.yml
vendored
66
.github/dependabot.yml
vendored
@ -1,12 +1,15 @@
|
|||||||
# https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates#package-ecosystem
|
# Please see the documentation for all configuration options:
|
||||||
|
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||||
|
|
||||||
version: 2
|
version: 2
|
||||||
|
# Required for uv support for now
|
||||||
|
enable-beta-ecosystems: true
|
||||||
updates:
|
updates:
|
||||||
|
|
||||||
# Enable version updates for npm
|
# Enable version updates for pnpm
|
||||||
- package-ecosystem: "npm"
|
- package-ecosystem: "npm"
|
||||||
target-branch: "dev"
|
target-branch: "dev"
|
||||||
# Look for `package.json` and `lock` files in the `/src-ui` directory
|
# Look for `pnpm-lock.yaml` file in the `/src-ui` directory
|
||||||
directory: "/src-ui"
|
directory: "/src-ui"
|
||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
schedule:
|
schedule:
|
||||||
@ -34,9 +37,8 @@ updates:
|
|||||||
- "eslint"
|
- "eslint"
|
||||||
|
|
||||||
# Enable version updates for Python
|
# Enable version updates for Python
|
||||||
- package-ecosystem: "pip"
|
- package-ecosystem: "uv"
|
||||||
target-branch: "dev"
|
target-branch: "dev"
|
||||||
# Look for a `Pipfile` in the `root` directory
|
|
||||||
directory: "/"
|
directory: "/"
|
||||||
# Check for updates once a week
|
# Check for updates once a week
|
||||||
schedule:
|
schedule:
|
||||||
@ -47,14 +49,13 @@ updates:
|
|||||||
# Add reviewers
|
# Add reviewers
|
||||||
reviewers:
|
reviewers:
|
||||||
- "paperless-ngx/backend"
|
- "paperless-ngx/backend"
|
||||||
ignore:
|
|
||||||
- dependency-name: "uvicorn"
|
|
||||||
groups:
|
groups:
|
||||||
development:
|
development:
|
||||||
patterns:
|
patterns:
|
||||||
- "*pytest*"
|
- "*pytest*"
|
||||||
- "ruff"
|
- "ruff"
|
||||||
- "mkdocs-material"
|
- "mkdocs-material"
|
||||||
|
- "pre-commit*"
|
||||||
django:
|
django:
|
||||||
patterns:
|
patterns:
|
||||||
- "*django*"
|
- "*django*"
|
||||||
@ -65,6 +66,10 @@ updates:
|
|||||||
update-types:
|
update-types:
|
||||||
- "minor"
|
- "minor"
|
||||||
- "patch"
|
- "patch"
|
||||||
|
pre-built:
|
||||||
|
patterns:
|
||||||
|
- psycopg*
|
||||||
|
- zxing-cpp
|
||||||
|
|
||||||
# Enable updates for GitHub Actions
|
# Enable updates for GitHub Actions
|
||||||
- package-ecosystem: "github-actions"
|
- package-ecosystem: "github-actions"
|
||||||
@ -85,3 +90,50 @@ updates:
|
|||||||
- "major"
|
- "major"
|
||||||
- "minor"
|
- "minor"
|
||||||
- "patch"
|
- "patch"
|
||||||
|
|
||||||
|
# Update Dockerfile in root directory
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
open-pull-requests-limit: 5
|
||||||
|
reviewers:
|
||||||
|
- "paperless-ngx/ci-cd"
|
||||||
|
labels:
|
||||||
|
- "ci-cd"
|
||||||
|
- "dependencies"
|
||||||
|
commit-message:
|
||||||
|
prefix: "docker"
|
||||||
|
include: "scope"
|
||||||
|
|
||||||
|
# Update Docker Compose files in docker/compose directory
|
||||||
|
- package-ecosystem: "docker-compose"
|
||||||
|
directory: "/docker/compose/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
open-pull-requests-limit: 5
|
||||||
|
reviewers:
|
||||||
|
- "paperless-ngx/ci-cd"
|
||||||
|
labels:
|
||||||
|
- "ci-cd"
|
||||||
|
- "dependencies"
|
||||||
|
commit-message:
|
||||||
|
prefix: "docker-compose"
|
||||||
|
include: "scope"
|
||||||
|
groups:
|
||||||
|
# Individual groups for each image
|
||||||
|
gotenberg:
|
||||||
|
patterns:
|
||||||
|
- "docker.io/gotenberg/gotenberg*"
|
||||||
|
tika:
|
||||||
|
patterns:
|
||||||
|
- "docker.io/apache/tika*"
|
||||||
|
redis:
|
||||||
|
patterns:
|
||||||
|
- "docker.io/library/redis*"
|
||||||
|
mariadb:
|
||||||
|
patterns:
|
||||||
|
- "docker.io/library/mariadb*"
|
||||||
|
postgres:
|
||||||
|
patterns:
|
||||||
|
- "docker.io/library/postgres*"
|
||||||
|
254
.github/workflows/ci.yml
vendored
254
.github/workflows/ci.yml
vendored
@ -14,9 +14,7 @@ on:
|
|||||||
- 'translations**'
|
- 'translations**'
|
||||||
|
|
||||||
env:
|
env:
|
||||||
# This is the version of pipenv all the steps will use
|
DEFAULT_UV_VERSION: "0.6.x"
|
||||||
# If changing this, change Dockerfile
|
|
||||||
DEFAULT_PIP_ENV_VERSION: "2024.4.0"
|
|
||||||
# This is the default version of Python to use in most steps which aren't specific
|
# This is the default version of Python to use in most steps which aren't specific
|
||||||
DEFAULT_PYTHON_VERSION: "3.11"
|
DEFAULT_PYTHON_VERSION: "3.11"
|
||||||
|
|
||||||
@ -59,24 +57,25 @@ jobs:
|
|||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
cache: "pipenv"
|
|
||||||
cache-dependency-path: 'Pipfile.lock'
|
|
||||||
-
|
-
|
||||||
name: Install pipenv
|
name: Install uv
|
||||||
run: |
|
uses: astral-sh/setup-uv@v5
|
||||||
pip install --user pipenv==${{ env.DEFAULT_PIP_ENV_VERSION }}
|
with:
|
||||||
|
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||||
|
enable-cache: true
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
-
|
-
|
||||||
name: Install dependencies
|
name: Install Python dependencies
|
||||||
run: |
|
run: |
|
||||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||||
-
|
|
||||||
name: List installed Python dependencies
|
|
||||||
run: |
|
|
||||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pip list
|
|
||||||
-
|
-
|
||||||
name: Make documentation
|
name: Make documentation
|
||||||
run: |
|
run: |
|
||||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run mkdocs build --config-file ./mkdocs.yml
|
uv run \
|
||||||
|
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||||
|
--dev \
|
||||||
|
--frozen \
|
||||||
|
mkdocs build --config-file ./mkdocs.yml
|
||||||
-
|
-
|
||||||
name: Deploy documentation
|
name: Deploy documentation
|
||||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||||
@ -84,7 +83,11 @@ jobs:
|
|||||||
echo "docs.paperless-ngx.com" > "${{ github.workspace }}/docs/CNAME"
|
echo "docs.paperless-ngx.com" > "${{ github.workspace }}/docs/CNAME"
|
||||||
git config --global user.name "${{ github.actor }}"
|
git config --global user.name "${{ github.actor }}"
|
||||||
git config --global user.email "${{ github.actor }}@users.noreply.github.com"
|
git config --global user.email "${{ github.actor }}@users.noreply.github.com"
|
||||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run mkdocs gh-deploy --force --no-history
|
uv run \
|
||||||
|
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||||
|
--dev \
|
||||||
|
--frozen \
|
||||||
|
mkdocs gh-deploy --force --no-history
|
||||||
-
|
-
|
||||||
name: Upload artifact
|
name: Upload artifact
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
@ -117,12 +120,13 @@ jobs:
|
|||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "${{ matrix.python-version }}"
|
python-version: "${{ matrix.python-version }}"
|
||||||
cache: "pipenv"
|
|
||||||
cache-dependency-path: 'Pipfile.lock'
|
|
||||||
-
|
-
|
||||||
name: Install pipenv
|
name: Install uv
|
||||||
run: |
|
uses: astral-sh/setup-uv@v5
|
||||||
pip install --user pipenv==${{ env.DEFAULT_PIP_ENV_VERSION }}
|
with:
|
||||||
|
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||||
|
enable-cache: true
|
||||||
|
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||||
-
|
-
|
||||||
name: Install system dependencies
|
name: Install system dependencies
|
||||||
run: |
|
run: |
|
||||||
@ -131,16 +135,18 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Configure ImageMagick
|
name: Configure ImageMagick
|
||||||
run: |
|
run: |
|
||||||
sudo cp docker/imagemagick-policy.xml /etc/ImageMagick-6/policy.xml
|
sudo cp docker/rootfs/etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml
|
||||||
-
|
-
|
||||||
name: Install Python dependencies
|
name: Install Python dependencies
|
||||||
run: |
|
run: |
|
||||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run python --version
|
uv sync \
|
||||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||||
|
--group testing \
|
||||||
|
--frozen
|
||||||
-
|
-
|
||||||
name: List installed Python dependencies
|
name: List installed Python dependencies
|
||||||
run: |
|
run: |
|
||||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pip list
|
uv pip list
|
||||||
-
|
-
|
||||||
name: Tests
|
name: Tests
|
||||||
env:
|
env:
|
||||||
@ -150,17 +156,26 @@ jobs:
|
|||||||
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
|
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
|
||||||
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
|
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
|
||||||
run: |
|
run: |
|
||||||
cd src/
|
uv run \
|
||||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pytest -ra
|
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||||
|
--dev \
|
||||||
|
--frozen \
|
||||||
|
pytest
|
||||||
-
|
-
|
||||||
name: Upload coverage
|
name: Upload backend test results to Codecov
|
||||||
if: ${{ matrix.python-version == env.DEFAULT_PYTHON_VERSION }}
|
if: always()
|
||||||
uses: actions/upload-artifact@v4
|
uses: codecov/test-results-action@v1
|
||||||
with:
|
with:
|
||||||
name: backend-coverage-report
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
path: src/coverage.xml
|
flags: backend-python-${{ matrix.python-version }}
|
||||||
retention-days: 7
|
files: junit.xml
|
||||||
if-no-files-found: warn
|
-
|
||||||
|
name: Upload backend coverage to Codecov
|
||||||
|
uses: codecov/codecov-action@v5
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
flags: backend-python-${{ matrix.python-version }}
|
||||||
|
files: coverage.xml
|
||||||
-
|
-
|
||||||
name: Stop containers
|
name: Stop containers
|
||||||
if: always()
|
if: always()
|
||||||
@ -168,42 +183,46 @@ jobs:
|
|||||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml logs
|
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml logs
|
||||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml down
|
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml down
|
||||||
|
|
||||||
install-frontend-depedendencies:
|
install-frontend-dependencies:
|
||||||
name: "Install Frontend Dependencies"
|
name: "Install Frontend Dependencies"
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
needs:
|
needs:
|
||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
- name: Install pnpm
|
||||||
|
uses: pnpm/action-setup@v4
|
||||||
|
with:
|
||||||
|
version: 10
|
||||||
-
|
-
|
||||||
name: Use Node.js 20
|
name: Use Node.js 20
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
cache: 'npm'
|
cache: 'pnpm'
|
||||||
cache-dependency-path: 'src-ui/package-lock.json'
|
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||||
- name: Cache frontend dependencies
|
- name: Cache frontend dependencies
|
||||||
id: cache-frontend-deps
|
id: cache-frontend-deps
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.npm
|
~/.pnpm-store
|
||||||
~/.cache
|
~/.cache
|
||||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||||
-
|
-
|
||||||
name: Install dependencies
|
name: Install dependencies
|
||||||
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
||||||
run: cd src-ui && npm ci
|
run: cd src-ui && pnpm install
|
||||||
-
|
-
|
||||||
name: Install Playwright
|
name: Install Playwright
|
||||||
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
||||||
run: cd src-ui && npx playwright install --with-deps
|
run: cd src-ui && pnpm playwright install --with-deps
|
||||||
|
|
||||||
tests-frontend:
|
tests-frontend:
|
||||||
name: "Frontend Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
name: "Frontend Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
needs:
|
needs:
|
||||||
- install-frontend-depedendencies
|
- install-frontend-dependencies
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
@ -212,124 +231,88 @@ jobs:
|
|||||||
shard-count: [4]
|
shard-count: [4]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
- name: Install pnpm
|
||||||
|
uses: pnpm/action-setup@v4
|
||||||
|
with:
|
||||||
|
version: 10
|
||||||
-
|
-
|
||||||
name: Use Node.js 20
|
name: Use Node.js 20
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
cache: 'npm'
|
cache: 'pnpm'
|
||||||
cache-dependency-path: 'src-ui/package-lock.json'
|
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||||
- name: Cache frontend dependencies
|
- name: Cache frontend dependencies
|
||||||
id: cache-frontend-deps
|
id: cache-frontend-deps
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.npm
|
~/.pnpm-store
|
||||||
~/.cache
|
~/.cache
|
||||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||||
- name: Re-link Angular cli
|
- name: Re-link Angular cli
|
||||||
run: cd src-ui && npm link @angular/cli
|
run: cd src-ui && pnpm link @angular/cli
|
||||||
-
|
-
|
||||||
name: Linting checks
|
name: Linting checks
|
||||||
run: cd src-ui && npm run lint
|
run: cd src-ui && pnpm run lint
|
||||||
-
|
-
|
||||||
name: Run Jest unit tests
|
name: Run Jest unit tests
|
||||||
run: cd src-ui && npm run test -- --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
run: cd src-ui && pnpm run test --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||||
-
|
|
||||||
name: Upload Jest coverage
|
|
||||||
if: always()
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: jest-coverage-report-${{ matrix.shard-index }}
|
|
||||||
path: |
|
|
||||||
src-ui/coverage/coverage-final.json
|
|
||||||
src-ui/coverage/lcov.info
|
|
||||||
src-ui/coverage/clover.xml
|
|
||||||
retention-days: 7
|
|
||||||
if-no-files-found: warn
|
|
||||||
-
|
-
|
||||||
name: Run Playwright e2e tests
|
name: Run Playwright e2e tests
|
||||||
run: cd src-ui && npx playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||||
-
|
-
|
||||||
name: Upload Playwright test results
|
name: Upload frontend test results to Codecov
|
||||||
|
uses: codecov/test-results-action@v1
|
||||||
if: always()
|
if: always()
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
with:
|
||||||
name: playwright-report-${{ matrix.shard-index }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
path: src-ui/playwright-report
|
flags: frontend-node-${{ matrix.node-version }}
|
||||||
retention-days: 7
|
directory: src-ui/
|
||||||
|
|
||||||
tests-coverage-upload:
|
|
||||||
name: "Upload to Codecov"
|
|
||||||
runs-on: ubuntu-24.04
|
|
||||||
needs:
|
|
||||||
- tests-backend
|
|
||||||
- tests-frontend
|
|
||||||
steps:
|
|
||||||
-
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
-
|
|
||||||
name: Download frontend jest coverage
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
path: src-ui/coverage/
|
|
||||||
pattern: jest-coverage-report-*
|
|
||||||
-
|
|
||||||
name: Download frontend playwright coverage
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
path: src-ui/coverage/
|
|
||||||
pattern: playwright-report-*
|
|
||||||
merge-multiple: true
|
|
||||||
-
|
-
|
||||||
name: Upload frontend coverage to Codecov
|
name: Upload frontend coverage to Codecov
|
||||||
uses: codecov/codecov-action@v5
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
# not required for public repos, but intermittently fails otherwise
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
flags: frontend
|
flags: frontend-node-${{ matrix.node-version }}
|
||||||
directory: src-ui/coverage/
|
directory: src-ui/coverage/
|
||||||
# dont include backend coverage files here
|
|
||||||
files: '!coverage.xml'
|
frontend-bundle-analysis:
|
||||||
|
name: "Frontend Bundle Analysis"
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
needs:
|
||||||
|
- tests-frontend
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
-
|
-
|
||||||
name: Download backend coverage
|
name: Install pnpm
|
||||||
uses: actions/download-artifact@v4
|
uses: pnpm/action-setup@v4
|
||||||
with:
|
with:
|
||||||
name: backend-coverage-report
|
version: 10
|
||||||
path: src/
|
|
||||||
-
|
|
||||||
name: Upload coverage to Codecov
|
|
||||||
uses: codecov/codecov-action@v5
|
|
||||||
with:
|
|
||||||
# not required for public repos, but intermittently fails otherwise
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
# future expansion
|
|
||||||
flags: backend
|
|
||||||
directory: src/
|
|
||||||
-
|
-
|
||||||
name: Use Node.js 20
|
name: Use Node.js 20
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
cache: 'npm'
|
cache: 'pnpm'
|
||||||
cache-dependency-path: 'src-ui/package-lock.json'
|
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||||
-
|
-
|
||||||
name: Cache frontend dependencies
|
name: Cache frontend dependencies
|
||||||
id: cache-frontend-deps
|
id: cache-frontend-deps
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.npm
|
~/.pnpm-store
|
||||||
~/.cache
|
~/.cache
|
||||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
||||||
-
|
-
|
||||||
name: Re-link Angular cli
|
name: Re-link Angular cli
|
||||||
run: cd src-ui && npm link @angular/cli
|
run: cd src-ui && pnpm link @angular/cli
|
||||||
-
|
-
|
||||||
name: Build frontend and upload analysis
|
name: Build frontend and upload analysis
|
||||||
env:
|
env:
|
||||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
run: cd src-ui && ng build --configuration=production
|
run: cd src-ui && pnpm run build --configuration=production
|
||||||
|
|
||||||
build-docker-image:
|
build-docker-image:
|
||||||
name: Build Docker image for ${{ github.ref_name }}
|
name: Build Docker image for ${{ github.ref_name }}
|
||||||
@ -472,16 +455,17 @@ jobs:
|
|||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
cache: "pipenv"
|
|
||||||
cache-dependency-path: 'Pipfile.lock'
|
|
||||||
-
|
-
|
||||||
name: Install pipenv + tools
|
name: Install uv
|
||||||
run: |
|
uses: astral-sh/setup-uv@v5
|
||||||
pip install --upgrade --user pipenv==${{ env.DEFAULT_PIP_ENV_VERSION }} setuptools wheel
|
with:
|
||||||
|
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||||
|
enable-cache: true
|
||||||
|
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||||
-
|
-
|
||||||
name: Install Python dependencies
|
name: Install Python dependencies
|
||||||
run: |
|
run: |
|
||||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||||
-
|
-
|
||||||
name: Install system dependencies
|
name: Install system dependencies
|
||||||
run: |
|
run: |
|
||||||
@ -502,17 +486,21 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Generate requirements file
|
name: Generate requirements file
|
||||||
run: |
|
run: |
|
||||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} requirements > requirements.txt
|
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
|
||||||
-
|
-
|
||||||
name: Compile messages
|
name: Compile messages
|
||||||
run: |
|
run: |
|
||||||
cd src/
|
cd src/
|
||||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run python3 manage.py compilemessages
|
uv run \
|
||||||
|
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||||
|
manage.py compilemessages
|
||||||
-
|
-
|
||||||
name: Collect static files
|
name: Collect static files
|
||||||
run: |
|
run: |
|
||||||
cd src/
|
cd src/
|
||||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run python3 manage.py collectstatic --no-input
|
uv run \
|
||||||
|
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||||
|
manage.py collectstatic --no-input
|
||||||
-
|
-
|
||||||
name: Move files
|
name: Move files
|
||||||
run: |
|
run: |
|
||||||
@ -528,13 +516,12 @@ jobs:
|
|||||||
for file_name in .dockerignore \
|
for file_name in .dockerignore \
|
||||||
.env \
|
.env \
|
||||||
Dockerfile \
|
Dockerfile \
|
||||||
Pipfile \
|
pyproject.toml \
|
||||||
Pipfile.lock \
|
uv.lock \
|
||||||
requirements.txt \
|
requirements.txt \
|
||||||
LICENSE \
|
LICENSE \
|
||||||
README.md \
|
README.md \
|
||||||
paperless.conf.example \
|
paperless.conf.example
|
||||||
gunicorn.conf.py
|
|
||||||
do
|
do
|
||||||
cp --verbose ${file_name} dist/paperless-ngx/
|
cp --verbose ${file_name} dist/paperless-ngx/
|
||||||
done
|
done
|
||||||
@ -631,15 +618,17 @@ jobs:
|
|||||||
ref: main
|
ref: main
|
||||||
-
|
-
|
||||||
name: Set up Python
|
name: Set up Python
|
||||||
|
id: setup-python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
cache: "pipenv"
|
|
||||||
cache-dependency-path: 'Pipfile.lock'
|
|
||||||
-
|
-
|
||||||
name: Install pipenv + tools
|
name: Install uv
|
||||||
run: |
|
uses: astral-sh/setup-uv@v5
|
||||||
pip install --upgrade --user pipenv==${{ env.DEFAULT_PIP_ENV_VERSION }} setuptools wheel
|
with:
|
||||||
|
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||||
|
enable-cache: true
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
-
|
-
|
||||||
name: Append Changelog to docs
|
name: Append Changelog to docs
|
||||||
id: append-Changelog
|
id: append-Changelog
|
||||||
@ -655,7 +644,10 @@ jobs:
|
|||||||
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
|
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
|
||||||
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
|
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
|
||||||
mv changelog-new.md changelog.md
|
mv changelog-new.md changelog.md
|
||||||
pipenv run pre-commit run --files changelog.md || true
|
uv run \
|
||||||
|
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||||
|
--dev \
|
||||||
|
pre-commit run --files changelog.md || true
|
||||||
git config --global user.name "github-actions"
|
git config --global user.name "github-actions"
|
||||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||||
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
|
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
|
||||||
|
4
.github/workflows/cleanup-tags.yml
vendored
4
.github/workflows/cleanup-tags.yml
vendored
@ -33,7 +33,7 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Clean temporary images
|
name: Clean temporary images
|
||||||
if: "${{ env.TOKEN != '' }}"
|
if: "${{ env.TOKEN != '' }}"
|
||||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.9.0
|
uses: stumpylog/image-cleaner-action/ephemeral@v0.10.0
|
||||||
with:
|
with:
|
||||||
token: "${{ env.TOKEN }}"
|
token: "${{ env.TOKEN }}"
|
||||||
owner: "${{ github.repository_owner }}"
|
owner: "${{ github.repository_owner }}"
|
||||||
@ -61,7 +61,7 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Clean untagged images
|
name: Clean untagged images
|
||||||
if: "${{ env.TOKEN != '' }}"
|
if: "${{ env.TOKEN != '' }}"
|
||||||
uses: stumpylog/image-cleaner-action/untagged@v0.9.0
|
uses: stumpylog/image-cleaner-action/untagged@v0.10.0
|
||||||
with:
|
with:
|
||||||
token: "${{ env.TOKEN }}"
|
token: "${{ env.TOKEN }}"
|
||||||
owner: "${{ github.repository_owner }}"
|
owner: "${{ github.repository_owner }}"
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -44,6 +44,7 @@ nosetests.xml
|
|||||||
coverage.xml
|
coverage.xml
|
||||||
*,cover
|
*,cover
|
||||||
.pytest_cache
|
.pytest_cache
|
||||||
|
junit.xml
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
|
@ -29,10 +29,10 @@ repos:
|
|||||||
- id: check-case-conflict
|
- id: check-case-conflict
|
||||||
- id: detect-private-key
|
- id: detect-private-key
|
||||||
- repo: https://github.com/codespell-project/codespell
|
- repo: https://github.com/codespell-project/codespell
|
||||||
rev: v2.3.0
|
rev: v2.4.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: codespell
|
- id: codespell
|
||||||
exclude: "(^src-ui/src/locale/)|(^src-ui/e2e/)|(^src/paperless_mail/tests/samples/)"
|
exclude: "(^src-ui/src/locale/)|(^src-ui/pnpm-lock.yaml)|(^src-ui/e2e/)|(^src/paperless_mail/tests/samples/)"
|
||||||
exclude_types:
|
exclude_types:
|
||||||
- pofile
|
- pofile
|
||||||
- json
|
- json
|
||||||
@ -45,16 +45,19 @@ repos:
|
|||||||
- javascript
|
- javascript
|
||||||
- ts
|
- ts
|
||||||
- markdown
|
- markdown
|
||||||
exclude: "(^Pipfile\\.lock$)"
|
|
||||||
additional_dependencies:
|
additional_dependencies:
|
||||||
- prettier@3.3.3
|
- prettier@3.3.3
|
||||||
- 'prettier-plugin-organize-imports@4.1.0'
|
- 'prettier-plugin-organize-imports@4.1.0'
|
||||||
# Python hooks
|
# Python hooks
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.8.6
|
rev: v0.9.9
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
|
- repo: https://github.com/tox-dev/pyproject-fmt
|
||||||
|
rev: "v2.5.1"
|
||||||
|
hooks:
|
||||||
|
- id: pyproject-fmt
|
||||||
# Dockerfile hooks
|
# Dockerfile hooks
|
||||||
- repo: https://github.com/AleksaC/hadolint-py
|
- repo: https://github.com/AleksaC/hadolint-py
|
||||||
rev: v2.12.0.3
|
rev: v2.12.0.3
|
||||||
|
@ -1 +0,0 @@
|
|||||||
3.10.15
|
|
85
.ruff.toml
85
.ruff.toml
@ -1,85 +0,0 @@
|
|||||||
fix = true
|
|
||||||
line-length = 88
|
|
||||||
respect-gitignore = true
|
|
||||||
src = ["src"]
|
|
||||||
target-version = "py310"
|
|
||||||
output-format = "grouped"
|
|
||||||
show-fixes = true
|
|
||||||
|
|
||||||
# https://docs.astral.sh/ruff/settings/
|
|
||||||
# https://docs.astral.sh/ruff/rules/
|
|
||||||
[lint]
|
|
||||||
extend-select = [
|
|
||||||
"W", # https://docs.astral.sh/ruff/rules/#pycodestyle-e-w
|
|
||||||
"I", # https://docs.astral.sh/ruff/rules/#isort-i
|
|
||||||
"UP", # https://docs.astral.sh/ruff/rules/#pyupgrade-up
|
|
||||||
"COM", # https://docs.astral.sh/ruff/rules/#flake8-commas-com
|
|
||||||
"DJ", # https://docs.astral.sh/ruff/rules/#flake8-django-dj
|
|
||||||
"EXE", # https://docs.astral.sh/ruff/rules/#flake8-executable-exe
|
|
||||||
"ISC", # https://docs.astral.sh/ruff/rules/#flake8-implicit-str-concat-isc
|
|
||||||
"ICN", # https://docs.astral.sh/ruff/rules/#flake8-import-conventions-icn
|
|
||||||
"G201", # https://docs.astral.sh/ruff/rules/#flake8-logging-format-g
|
|
||||||
"INP", # https://docs.astral.sh/ruff/rules/#flake8-no-pep420-inp
|
|
||||||
"PIE", # https://docs.astral.sh/ruff/rules/#flake8-pie-pie
|
|
||||||
"Q", # https://docs.astral.sh/ruff/rules/#flake8-quotes-q
|
|
||||||
"RSE", # https://docs.astral.sh/ruff/rules/#flake8-raise-rse
|
|
||||||
"T20", # https://docs.astral.sh/ruff/rules/#flake8-print-t20
|
|
||||||
"SIM", # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim
|
|
||||||
"TID", # https://docs.astral.sh/ruff/rules/#flake8-tidy-imports-tid
|
|
||||||
"TCH", # https://docs.astral.sh/ruff/rules/#flake8-type-checking-tch
|
|
||||||
"PLC", # https://docs.astral.sh/ruff/rules/#pylint-pl
|
|
||||||
"PLE", # https://docs.astral.sh/ruff/rules/#pylint-pl
|
|
||||||
"RUF", # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf
|
|
||||||
"FLY", # https://docs.astral.sh/ruff/rules/#flynt-fly
|
|
||||||
"PTH", # https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth
|
|
||||||
]
|
|
||||||
ignore = ["DJ001", "SIM105", "RUF012"]
|
|
||||||
|
|
||||||
[lint.per-file-ignores]
|
|
||||||
".github/scripts/*.py" = ["E501", "INP001", "SIM117"]
|
|
||||||
"docker/wait-for-redis.py" = ["INP001", "T201"]
|
|
||||||
"src/documents/consumer.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/file_handling.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/management/commands/document_consumer.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/management/commands/document_exporter.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/migrations/0012_auto_20160305_0040.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/migrations/0014_document_checksum.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/migrations/1003_mime_types.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/migrations/1012_fix_archive_files.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/models.py" = ["SIM115", "PTH"] # TODO PTH Enable & remove
|
|
||||||
"src/documents/parsers.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/signals/handlers.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tasks.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tests/test_api_app_config.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tests/test_api_bulk_download.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tests/test_api_documents.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tests/test_classifier.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tests/test_consumer.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tests/test_file_handling.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tests/test_management.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tests/test_management_consumer.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tests/test_management_exporter.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tests/test_management_thumbnails.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tests/test_migration_archive_files.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tests/test_migration_document_pages_count.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tests/test_migration_mime_type.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tests/test_sanity_check.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tests/test_tasks.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/tests/test_views.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/documents/views.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/paperless/checks.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/paperless/settings.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/paperless/tests/test_checks.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/paperless/urls.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/paperless/views.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/paperless_mail/mail.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/paperless_mail/preprocessor.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/paperless_tesseract/parsers.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/paperless_tesseract/tests/test_parser.py" = ["RUF001", "PTH"] # TODO PTH Enable & remove
|
|
||||||
"src/paperless_tika/tests/test_live_tika.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"src/paperless_tika/tests/test_tika_parser.py" = ["PTH"] # TODO Enable & remove
|
|
||||||
"*/tests/*.py" = ["E501", "SIM117"]
|
|
||||||
"*/migrations/*.py" = ["E501", "SIM", "T201"]
|
|
||||||
|
|
||||||
[lint.isort]
|
|
||||||
force-single-line = true
|
|
@ -5,5 +5,6 @@
|
|||||||
/src-ui/ @paperless-ngx/frontend
|
/src-ui/ @paperless-ngx/frontend
|
||||||
|
|
||||||
/src/ @paperless-ngx/backend
|
/src/ @paperless-ngx/backend
|
||||||
Pipfile* @paperless-ngx/backend
|
pyproject.toml @paperless-ngx/backend
|
||||||
|
uv.lock @paperless-ngx/backend
|
||||||
*.py @paperless-ngx/backend
|
*.py @paperless-ngx/backend
|
||||||
|
@ -81,7 +81,7 @@ Some notes about translation:
|
|||||||
|
|
||||||
If a language has already been added, and you would like to contribute new translations or change existing translations, please read the "Translation" section in the README.md file for further details on that.
|
If a language has already been added, and you would like to contribute new translations or change existing translations, please read the "Translation" section in the README.md file for further details on that.
|
||||||
|
|
||||||
If you would like the project to be translated to another language, first head over to https://crwd.in/paperless-ngx to check if that language has already been enabled for translation.
|
If you would like the project to be translated to another language, first head over to https://crowdin.com/project/paperless-ngx to check if that language has already been enabled for translation.
|
||||||
If not, please request the language to be added by creating an issue on GitHub. The issue should contain:
|
If not, please request the language to be added by creating an issue on GitHub. The issue should contain:
|
||||||
|
|
||||||
- English name of the language (the localized name can be added on Crowdin).
|
- English name of the language (the localized name can be added on Crowdin).
|
||||||
|
183
Dockerfile
183
Dockerfile
@ -4,15 +4,17 @@
|
|||||||
# Stage: compile-frontend
|
# Stage: compile-frontend
|
||||||
# Purpose: Compiles the frontend
|
# Purpose: Compiles the frontend
|
||||||
# Notes:
|
# Notes:
|
||||||
# - Does NPM stuff with Typescript and such
|
# - Does PNPM stuff with Typescript and such
|
||||||
FROM --platform=$BUILDPLATFORM docker.io/node:20-bookworm-slim AS compile-frontend
|
FROM --platform=$BUILDPLATFORM docker.io/node:20-bookworm-slim AS compile-frontend
|
||||||
|
|
||||||
COPY ./src-ui /src/src-ui
|
COPY ./src-ui /src/src-ui
|
||||||
|
|
||||||
WORKDIR /src/src-ui
|
WORKDIR /src/src-ui
|
||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
&& npm update npm -g \
|
&& npm update -g pnpm \
|
||||||
&& npm ci
|
&& npm install -g corepack@latest \
|
||||||
|
&& corepack enable \
|
||||||
|
&& pnpm install
|
||||||
|
|
||||||
ARG PNGX_TAG_VERSION=
|
ARG PNGX_TAG_VERSION=
|
||||||
# Add the tag to the environment file if its a tagged dev build
|
# Add the tag to the environment file if its a tagged dev build
|
||||||
@ -26,28 +28,66 @@ esac
|
|||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
&& ./node_modules/.bin/ng build --configuration production
|
&& ./node_modules/.bin/ng build --configuration production
|
||||||
|
|
||||||
# Stage: pipenv-base
|
# Stage: s6-overlay-base
|
||||||
# Purpose: Generates a requirements.txt file for building
|
# Purpose: Installs s6-overlay and rootfs
|
||||||
# Comments:
|
# Comments:
|
||||||
# - pipenv dependencies are not left in the final image
|
# - Don't leave anything extra in here either
|
||||||
# - pipenv can't touch the final image somehow
|
FROM ghcr.io/astral-sh/uv:0.6.13-python3.12-bookworm-slim AS s6-overlay-base
|
||||||
FROM --platform=$BUILDPLATFORM docker.io/python:3.12-alpine AS pipenv-base
|
|
||||||
|
|
||||||
WORKDIR /usr/src/pipenv
|
WORKDIR /usr/src/s6
|
||||||
|
|
||||||
COPY Pipfile* ./
|
# https://github.com/just-containers/s6-overlay#customizing-s6-overlay-behaviour
|
||||||
|
ENV \
|
||||||
|
S6_BEHAVIOUR_IF_STAGE2_FAILS=2 \
|
||||||
|
S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0 \
|
||||||
|
S6_VERBOSITY=1 \
|
||||||
|
PATH=/command:$PATH
|
||||||
|
|
||||||
|
# Buildx provided, must be defined to use though
|
||||||
|
ARG TARGETARCH
|
||||||
|
ARG TARGETVARIANT
|
||||||
|
# Lock this version
|
||||||
|
ARG S6_OVERLAY_VERSION=3.2.0.2
|
||||||
|
|
||||||
|
ARG S6_BUILD_TIME_PKGS="curl \
|
||||||
|
xz-utils"
|
||||||
|
|
||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
&& echo "Installing pipenv" \
|
&& echo "Installing build time packages" \
|
||||||
&& python3 -m pip install --no-cache-dir --upgrade pipenv==2024.4.0 \
|
&& apt-get update \
|
||||||
&& echo "Generating requirement.txt" \
|
&& apt-get install --yes --quiet --no-install-recommends ${S6_BUILD_TIME_PKGS} \
|
||||||
&& pipenv requirements > requirements.txt
|
&& echo "Determining arch" \
|
||||||
|
&& S6_ARCH="" \
|
||||||
|
&& if [ "${TARGETARCH}${TARGETVARIANT}" = "amd64" ]; then S6_ARCH="x86_64"; \
|
||||||
|
elif [ "${TARGETARCH}${TARGETVARIANT}" = "arm64" ]; then S6_ARCH="aarch64"; fi\
|
||||||
|
&& if [ -z "${S6_ARCH}" ]; then { echo "Error: Not able to determine arch"; exit 1; }; fi \
|
||||||
|
&& echo "Installing s6-overlay for ${S6_ARCH}" \
|
||||||
|
&& curl --fail --silent --no-progress-meter --show-error --location --remote-name-all --parallel --parallel-max 4 \
|
||||||
|
"https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-noarch.tar.xz" \
|
||||||
|
"https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-noarch.tar.xz.sha256" \
|
||||||
|
"https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-${S6_ARCH}.tar.xz" \
|
||||||
|
"https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-${S6_ARCH}.tar.xz.sha256" \
|
||||||
|
&& echo "Validating s6-archive checksums" \
|
||||||
|
&& sha256sum --check ./*.sha256 \
|
||||||
|
&& echo "Unpacking archives" \
|
||||||
|
&& tar --directory / -Jxpf s6-overlay-noarch.tar.xz \
|
||||||
|
&& tar --directory / -Jxpf s6-overlay-${S6_ARCH}.tar.xz \
|
||||||
|
&& echo "Removing downloaded archives" \
|
||||||
|
&& rm ./*.tar.xz \
|
||||||
|
&& rm ./*.sha256 \
|
||||||
|
&& echo "Cleaning up image" \
|
||||||
|
&& apt-get --yes purge ${S6_BUILD_TIME_PKGS} \
|
||||||
|
&& apt-get --yes autoremove --purge \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy our service defs and filesystem
|
||||||
|
COPY ./docker/rootfs /
|
||||||
|
|
||||||
# Stage: main-app
|
# Stage: main-app
|
||||||
# Purpose: The final image
|
# Purpose: The final image
|
||||||
# Comments:
|
# Comments:
|
||||||
# - Don't leave anything extra in here
|
# - Don't leave anything extra in here
|
||||||
FROM docker.io/python:3.12-slim-bookworm AS main-app
|
FROM s6-overlay-base AS main-app
|
||||||
|
|
||||||
LABEL org.opencontainers.image.authors="paperless-ngx team <hello@paperless-ngx.com>"
|
LABEL org.opencontainers.image.authors="paperless-ngx team <hello@paperless-ngx.com>"
|
||||||
LABEL org.opencontainers.image.documentation="https://docs.paperless-ngx.com/"
|
LABEL org.opencontainers.image.documentation="https://docs.paperless-ngx.com/"
|
||||||
@ -61,16 +101,19 @@ ARG DEBIAN_FRONTEND=noninteractive
|
|||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
|
|
||||||
# Can be workflow provided, defaults set for manual building
|
# Can be workflow provided, defaults set for manual building
|
||||||
ARG JBIG2ENC_VERSION=0.29
|
ARG JBIG2ENC_VERSION=0.30
|
||||||
ARG QPDF_VERSION=11.9.0
|
ARG QPDF_VERSION=11.9.0
|
||||||
ARG GS_VERSION=10.03.1
|
ARG GS_VERSION=10.03.1
|
||||||
|
|
||||||
# Set Python environment variables
|
# Set Python environment variables
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||||
PYTHONUNBUFFERED=1 \
|
PYTHONUNBUFFERED=1 \
|
||||||
# Ignore warning from Whitenoise
|
# Ignore warning from Whitenoise about async iterators
|
||||||
PYTHONWARNINGS="ignore:::django.http.response:517" \
|
PYTHONWARNINGS="ignore:::django.http.response:517" \
|
||||||
PNGX_CONTAINERIZED=1
|
PNGX_CONTAINERIZED=1 \
|
||||||
|
# https://docs.astral.sh/uv/reference/settings/#link-mode
|
||||||
|
UV_LINK_MODE=copy \
|
||||||
|
UV_CACHE_DIR=/cache/uv/
|
||||||
|
|
||||||
#
|
#
|
||||||
# Begin installation and configuration
|
# Begin installation and configuration
|
||||||
@ -127,118 +170,51 @@ RUN set -eux \
|
|||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES} \
|
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES} \
|
||||||
&& echo "Installing pre-built updates" \
|
&& echo "Installing pre-built updates" \
|
||||||
|
&& curl --fail --silent --no-progress-meter --show-error --location --remote-name-all --parallel --parallel-max 4 \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/jbig2enc-${JBIG2ENC_VERSION}/jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
||||||
&& echo "Installing qpdf ${QPDF_VERSION}" \
|
&& echo "Installing qpdf ${QPDF_VERSION}" \
|
||||||
&& curl --fail --silent --show-error --location \
|
|
||||||
--output libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
&& curl --fail --silent --show-error --location \
|
|
||||||
--output qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
&& dpkg --install ./libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
&& dpkg --install ./libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||||
&& dpkg --install ./qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
&& dpkg --install ./qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||||
&& echo "Installing Ghostscript ${GS_VERSION}" \
|
&& echo "Installing Ghostscript ${GS_VERSION}" \
|
||||||
&& curl --fail --silent --show-error --location \
|
|
||||||
--output libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
|
||||||
&& curl --fail --silent --show-error --location \
|
|
||||||
--output ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
|
||||||
&& curl --fail --silent --show-error --location \
|
|
||||||
--output libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
|
|
||||||
&& dpkg --install ./libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
|
&& dpkg --install ./libgs10-common_${GS_VERSION}.dfsg-1_all.deb \
|
||||||
&& dpkg --install ./libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
&& dpkg --install ./libgs10_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
||||||
&& dpkg --install ./ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
&& dpkg --install ./ghostscript_${GS_VERSION}.dfsg-1_${TARGETARCH}.deb \
|
||||||
&& echo "Installing jbig2enc" \
|
&& echo "Installing jbig2enc" \
|
||||||
&& curl --fail --silent --show-error --location \
|
|
||||||
--output jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/jbig2enc-${JBIG2ENC_VERSION}/jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
|
||||||
&& dpkg --install ./jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
&& dpkg --install ./jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
||||||
|
&& echo "Configuring imagemagick" \
|
||||||
|
&& cp /etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml \
|
||||||
&& echo "Cleaning up image layer" \
|
&& echo "Cleaning up image layer" \
|
||||||
&& rm --force --verbose *.deb \
|
&& rm --force --verbose *.deb \
|
||||||
&& rm --recursive --force --verbose /var/lib/apt/lists/* \
|
&& rm --recursive --force --verbose /var/lib/apt/lists/*
|
||||||
&& echo "Installing supervisor" \
|
|
||||||
&& python3 -m pip install --default-timeout=1000 --upgrade --no-cache-dir supervisor==4.2.5
|
|
||||||
|
|
||||||
# Copy gunicorn config
|
|
||||||
# Changes very infrequently
|
|
||||||
WORKDIR /usr/src/paperless/
|
|
||||||
|
|
||||||
COPY gunicorn.conf.py .
|
|
||||||
|
|
||||||
# setup docker-specific things
|
|
||||||
# These change sometimes, but rarely
|
|
||||||
WORKDIR /usr/src/paperless/src/docker/
|
|
||||||
|
|
||||||
COPY [ \
|
|
||||||
"docker/imagemagick-policy.xml", \
|
|
||||||
"docker/supervisord.conf", \
|
|
||||||
"docker/docker-entrypoint.sh", \
|
|
||||||
"docker/docker-prepare.sh", \
|
|
||||||
"docker/paperless_cmd.sh", \
|
|
||||||
"docker/wait-for-redis.py", \
|
|
||||||
"docker/env-from-file.sh", \
|
|
||||||
"docker/management_script.sh", \
|
|
||||||
"docker/flower-conditional.sh", \
|
|
||||||
"docker/install_management_commands.sh", \
|
|
||||||
"/usr/src/paperless/src/docker/" \
|
|
||||||
]
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Configuring ImageMagick" \
|
|
||||||
&& mv imagemagick-policy.xml /etc/ImageMagick-6/policy.xml \
|
|
||||||
&& echo "Configuring supervisord" \
|
|
||||||
&& mkdir /var/log/supervisord /var/run/supervisord \
|
|
||||||
&& mv supervisord.conf /etc/supervisord.conf \
|
|
||||||
&& echo "Setting up Docker scripts" \
|
|
||||||
&& mv docker-entrypoint.sh /sbin/docker-entrypoint.sh \
|
|
||||||
&& chmod 755 /sbin/docker-entrypoint.sh \
|
|
||||||
&& mv docker-prepare.sh /sbin/docker-prepare.sh \
|
|
||||||
&& chmod 755 /sbin/docker-prepare.sh \
|
|
||||||
&& mv wait-for-redis.py /sbin/wait-for-redis.py \
|
|
||||||
&& chmod 755 /sbin/wait-for-redis.py \
|
|
||||||
&& mv env-from-file.sh /sbin/env-from-file.sh \
|
|
||||||
&& chmod 755 /sbin/env-from-file.sh \
|
|
||||||
&& mv paperless_cmd.sh /usr/local/bin/paperless_cmd.sh \
|
|
||||||
&& chmod 755 /usr/local/bin/paperless_cmd.sh \
|
|
||||||
&& mv flower-conditional.sh /usr/local/bin/flower-conditional.sh \
|
|
||||||
&& chmod 755 /usr/local/bin/flower-conditional.sh \
|
|
||||||
&& echo "Installing management commands" \
|
|
||||||
&& chmod +x install_management_commands.sh \
|
|
||||||
&& ./install_management_commands.sh
|
|
||||||
|
|
||||||
WORKDIR /usr/src/paperless/src/
|
WORKDIR /usr/src/paperless/src/
|
||||||
|
|
||||||
# Python dependencies
|
# Python dependencies
|
||||||
# Change pretty frequently
|
# Change pretty frequently
|
||||||
COPY --from=pipenv-base /usr/src/pipenv/requirements.txt ./
|
COPY --chown=1000:1000 ["pyproject.toml", "uv.lock", "/usr/src/paperless/src/"]
|
||||||
|
|
||||||
# Packages needed only for building a few quick Python
|
# Packages needed only for building a few quick Python
|
||||||
# dependencies
|
# dependencies
|
||||||
ARG BUILD_PACKAGES="\
|
ARG BUILD_PACKAGES="\
|
||||||
build-essential \
|
build-essential \
|
||||||
git \
|
|
||||||
# https://www.psycopg.org/docs/install.html#prerequisites
|
|
||||||
libpq-dev \
|
|
||||||
# https://github.com/PyMySQL/mysqlclient#linux
|
# https://github.com/PyMySQL/mysqlclient#linux
|
||||||
default-libmysqlclient-dev \
|
default-libmysqlclient-dev \
|
||||||
pkg-config"
|
pkg-config"
|
||||||
|
|
||||||
# hadolint ignore=DL3042
|
# hadolint ignore=DL3042
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip/,id=pip-cache \
|
RUN --mount=type=cache,target=${UV_CACHE_DIR},id=python-cache \
|
||||||
set -eux \
|
set -eux \
|
||||||
&& echo "Installing build system packages" \
|
&& echo "Installing build system packages" \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||||
&& python3 -m pip install --no-cache-dir --upgrade wheel \
|
|
||||||
&& echo "Installing Python requirements" \
|
&& echo "Installing Python requirements" \
|
||||||
&& curl --fail --silent --show-error --location \
|
&& uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt \
|
||||||
--output psycopg_c-3.2.3-cp312-cp312-linux_x86_64.whl \
|
&& uv pip install --system --no-python-downloads --python-preference system --requirements requirements.txt \
|
||||||
https://github.com/paperless-ngx/builder/releases/download/psycopg-3.2.3/psycopg_c-3.2.3-cp312-cp312-linux_x86_64.whl \
|
|
||||||
&& curl --fail --silent --show-error --location \
|
|
||||||
--output psycopg_c-3.2.3-cp312-cp312-linux_aarch64.whl \
|
|
||||||
https://github.com/paperless-ngx/builder/releases/download/psycopg-3.2.3/psycopg_c-3.2.3-cp312-cp312-linux_aarch64.whl \
|
|
||||||
&& python3 -m pip install --default-timeout=1000 --find-links . --requirement requirements.txt \
|
|
||||||
&& echo "Installing NLTK data" \
|
&& echo "Installing NLTK data" \
|
||||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" snowball_data \
|
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" snowball_data \
|
||||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" stopwords \
|
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" stopwords \
|
||||||
@ -263,6 +239,7 @@ COPY --from=compile-frontend --chown=1000:1000 /src/src/documents/static/fronten
|
|||||||
# add users, setup scripts
|
# add users, setup scripts
|
||||||
# Mount the compiled frontend to expected location
|
# Mount the compiled frontend to expected location
|
||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
|
&& sed -i '1s|^#!/usr/bin/env python3|#!/command/with-contenv python3|' manage.py \
|
||||||
&& echo "Setting up user/group" \
|
&& echo "Setting up user/group" \
|
||||||
&& addgroup --gid 1000 paperless \
|
&& addgroup --gid 1000 paperless \
|
||||||
&& useradd --uid 1000 --gid paperless --home-dir /usr/src/paperless paperless \
|
&& useradd --uid 1000 --gid paperless --home-dir /usr/src/paperless paperless \
|
||||||
@ -276,18 +253,16 @@ RUN set -eux \
|
|||||||
&& echo "Adjusting all permissions" \
|
&& echo "Adjusting all permissions" \
|
||||||
&& chown --from root:root --changes --recursive paperless:paperless /usr/src/paperless \
|
&& chown --from root:root --changes --recursive paperless:paperless /usr/src/paperless \
|
||||||
&& echo "Collecting static files" \
|
&& echo "Collecting static files" \
|
||||||
&& gosu paperless python3 manage.py collectstatic --clear --no-input --link \
|
&& s6-setuidgid paperless python3 manage.py collectstatic --clear --no-input --link \
|
||||||
&& gosu paperless python3 manage.py compilemessages
|
&& s6-setuidgid paperless python3 manage.py compilemessages
|
||||||
|
|
||||||
VOLUME ["/usr/src/paperless/data", \
|
VOLUME ["/usr/src/paperless/data", \
|
||||||
"/usr/src/paperless/media", \
|
"/usr/src/paperless/media", \
|
||||||
"/usr/src/paperless/consume", \
|
"/usr/src/paperless/consume", \
|
||||||
"/usr/src/paperless/export"]
|
"/usr/src/paperless/export"]
|
||||||
|
|
||||||
ENTRYPOINT ["/sbin/docker-entrypoint.sh"]
|
ENTRYPOINT ["/init"]
|
||||||
|
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
CMD ["/usr/local/bin/paperless_cmd.sh"]
|
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=10s --retries=5 CMD [ "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000" ]
|
HEALTHCHECK --interval=30s --timeout=10s --retries=5 CMD [ "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000" ]
|
||||||
|
100
Pipfile
100
Pipfile
@ -1,100 +0,0 @@
|
|||||||
[[source]]
|
|
||||||
url = "https://pypi.python.org/simple"
|
|
||||||
verify_ssl = true
|
|
||||||
name = "pypi"
|
|
||||||
|
|
||||||
[packages]
|
|
||||||
dateparser = "~=1.2"
|
|
||||||
# WARNING: django does not use semver.
|
|
||||||
# Only patch versions are guaranteed to not introduce breaking changes.
|
|
||||||
django = "~=5.1.4"
|
|
||||||
django-allauth = {extras = ["mfa", "socialaccount"], version = "*"}
|
|
||||||
django-auditlog = "*"
|
|
||||||
django-celery-results = "*"
|
|
||||||
django-compression-middleware = "*"
|
|
||||||
django-cors-headers = "*"
|
|
||||||
django-extensions = "*"
|
|
||||||
django-filter = "~=24.3"
|
|
||||||
django-guardian = "*"
|
|
||||||
django-multiselectfield = "*"
|
|
||||||
django-soft-delete = "*"
|
|
||||||
djangorestframework = "~=3.15.2"
|
|
||||||
djangorestframework-guardian = "*"
|
|
||||||
drf-writable-nested = "*"
|
|
||||||
bleach = "*"
|
|
||||||
celery = {extras = ["redis"], version = "*"}
|
|
||||||
channels = "~=4.2"
|
|
||||||
channels-redis = "*"
|
|
||||||
concurrent-log-handler = "*"
|
|
||||||
filelock = "*"
|
|
||||||
flower = "*"
|
|
||||||
gotenberg-client = "*"
|
|
||||||
gunicorn = "*"
|
|
||||||
httpx-oauth = "*"
|
|
||||||
imap-tools = "*"
|
|
||||||
inotifyrecursive = "~=0.3"
|
|
||||||
jinja2 = "~=3.1"
|
|
||||||
langdetect = "*"
|
|
||||||
mysqlclient = "*"
|
|
||||||
nltk = "*"
|
|
||||||
ocrmypdf = "~=16.8"
|
|
||||||
pathvalidate = "*"
|
|
||||||
pdf2image = "*"
|
|
||||||
psycopg = {version = "*", extras = ["c"]}
|
|
||||||
python-dateutil = "*"
|
|
||||||
python-dotenv = "*"
|
|
||||||
python-gnupg = "*"
|
|
||||||
python-ipware = "*"
|
|
||||||
python-magic = "*"
|
|
||||||
pyzbar = "*"
|
|
||||||
rapidfuzz = "*"
|
|
||||||
redis = {extras = ["hiredis"], version = "*"}
|
|
||||||
scikit-learn = "~=1.6"
|
|
||||||
setproctitle = "*"
|
|
||||||
tika-client = "*"
|
|
||||||
tqdm = "*"
|
|
||||||
# See https://github.com/paperless-ngx/paperless-ngx/issues/5494
|
|
||||||
uvicorn = {extras = ["standard"], version = "==0.25.0"}
|
|
||||||
watchdog = "~=6.0"
|
|
||||||
whitenoise = "~=6.8"
|
|
||||||
whoosh = "~=2.7"
|
|
||||||
zxing-cpp = {version = "*", platform_machine = "== 'x86_64'"}
|
|
||||||
|
|
||||||
|
|
||||||
[dev-packages]
|
|
||||||
# Linting
|
|
||||||
pre-commit = "*"
|
|
||||||
ruff = "*"
|
|
||||||
factory-boy = "*"
|
|
||||||
# Testing
|
|
||||||
pytest = "*"
|
|
||||||
pytest-cov = "*"
|
|
||||||
pytest-django = "*"
|
|
||||||
pytest-httpx = "*"
|
|
||||||
pytest-env = "*"
|
|
||||||
pytest-sugar = "*"
|
|
||||||
pytest-xdist = "*"
|
|
||||||
pytest-mock = "*"
|
|
||||||
pytest-rerunfailures = "*"
|
|
||||||
imagehash = "*"
|
|
||||||
daphne = "*"
|
|
||||||
# Documentation
|
|
||||||
mkdocs-material = "*"
|
|
||||||
mkdocs-glightbox = "*"
|
|
||||||
|
|
||||||
[typing-dev]
|
|
||||||
mypy = "*"
|
|
||||||
types-Pillow = "*"
|
|
||||||
django-filter-stubs = "*"
|
|
||||||
types-python-dateutil = "*"
|
|
||||||
djangorestframework-stubs = {extras= ["compatible-mypy"], version="*"}
|
|
||||||
celery-types = "*"
|
|
||||||
django-stubs = {extras= ["compatible-mypy"], version="*"}
|
|
||||||
types-dateparser = "*"
|
|
||||||
types-bleach = "*"
|
|
||||||
types-redis = "*"
|
|
||||||
types-tqdm = "*"
|
|
||||||
types-Markdown = "*"
|
|
||||||
types-Pygments = "*"
|
|
||||||
types-colorama = "*"
|
|
||||||
types-setuptools = "*"
|
|
4839
Pipfile.lock
generated
4839
Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
@ -83,7 +83,7 @@ People interested in continuing the work on paperless-ngx are encouraged to reac
|
|||||||
|
|
||||||
## Translation
|
## Translation
|
||||||
|
|
||||||
Paperless-ngx is available in many languages that are coordinated on Crowdin. If you want to help out by translating paperless-ngx into your language, please head over to https://crwd.in/paperless-ngx, and thank you! More details can be found in [CONTRIBUTING.md](https://github.com/paperless-ngx/paperless-ngx/blob/main/CONTRIBUTING.md#translating-paperless-ngx).
|
Paperless-ngx is available in many languages that are coordinated on Crowdin. If you want to help out by translating paperless-ngx into your language, please head over to https://crowdin.com/project/paperless-ngx, and thank you! More details can be found in [CONTRIBUTING.md](https://github.com/paperless-ngx/paperless-ngx/blob/main/CONTRIBUTING.md#translating-paperless-ngx).
|
||||||
|
|
||||||
## Feature Requests
|
## Feature Requests
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:8.7
|
image: docker.io/gotenberg/gotenberg:8.19
|
||||||
hostname: gotenberg
|
hostname: gotenberg
|
||||||
container_name: gotenberg
|
container_name: gotenberg
|
||||||
network_mode: host
|
network_mode: host
|
||||||
|
@ -24,8 +24,8 @@
|
|||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
|
||||||
# - Run 'docker compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
|
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
# documentation.
|
# documentation.
|
||||||
@ -77,7 +77,7 @@ services:
|
|||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:8.7
|
image: docker.io/gotenberg/gotenberg:8.19
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||||
# want to allow external content like tracking pixels or even javascript.
|
# want to allow external content like tracking pixels or even javascript.
|
||||||
|
@ -20,7 +20,6 @@
|
|||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
|
||||||
# - Run 'docker compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
|
@ -22,10 +22,6 @@
|
|||||||
# - Upload 'docker-compose.env' by clicking on 'Load variables from .env file'
|
# - Upload 'docker-compose.env' by clicking on 'Load variables from .env file'
|
||||||
# - Modify the environment variables as needed
|
# - Modify the environment variables as needed
|
||||||
# - Click 'Deploy the stack' and wait for it to be deployed
|
# - Click 'Deploy the stack' and wait for it to be deployed
|
||||||
# - Open the list of containers, select paperless_webserver_1
|
|
||||||
# - Click 'Console' and then 'Connect' to open the command line inside the container
|
|
||||||
# - Run 'python3 manage.py createsuperuser' to create a user
|
|
||||||
# - Exit the console
|
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
# documentation.
|
# documentation.
|
||||||
@ -38,7 +34,7 @@ services:
|
|||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/postgres:16
|
image: docker.io/library/postgres:17
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
|
@ -24,7 +24,6 @@
|
|||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
|
||||||
# - Run 'docker compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
@ -38,7 +37,7 @@ services:
|
|||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/postgres:16
|
image: docker.io/library/postgres:17
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
@ -71,7 +70,7 @@ services:
|
|||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:8.7
|
image: docker.io/gotenberg/gotenberg:8.19
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||||
|
@ -20,7 +20,6 @@
|
|||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
|
||||||
# - Run 'docker compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
@ -34,7 +33,7 @@ services:
|
|||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/postgres:16
|
image: docker.io/library/postgres:17
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
|
@ -24,7 +24,6 @@
|
|||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
|
||||||
# - Run 'docker compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
@ -59,7 +58,7 @@ services:
|
|||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:8.7
|
image: docker.io/gotenberg/gotenberg:8.19
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
|
||||||
# - Run 'docker compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
|
@ -1,120 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
wait_for_postgres() {
|
|
||||||
local attempt_num=1
|
|
||||||
local -r max_attempts=5
|
|
||||||
|
|
||||||
echo "Waiting for PostgreSQL to start..."
|
|
||||||
|
|
||||||
local -r host="${PAPERLESS_DBHOST:-localhost}"
|
|
||||||
local -r port="${PAPERLESS_DBPORT:-5432}"
|
|
||||||
|
|
||||||
# Disable warning, host and port can't have spaces
|
|
||||||
# shellcheck disable=SC2086
|
|
||||||
while [ ! "$(pg_isready --host ${host} --port ${port})" ]; do
|
|
||||||
|
|
||||||
if [ $attempt_num -eq $max_attempts ]; then
|
|
||||||
echo "Unable to connect to database."
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "Attempt $attempt_num failed! Trying again in 5 seconds..."
|
|
||||||
fi
|
|
||||||
|
|
||||||
attempt_num=$(("$attempt_num" + 1))
|
|
||||||
sleep 5
|
|
||||||
done
|
|
||||||
echo "Connected to PostgreSQL"
|
|
||||||
}
|
|
||||||
|
|
||||||
wait_for_mariadb() {
|
|
||||||
echo "Waiting for MariaDB to start..."
|
|
||||||
|
|
||||||
local -r host="${PAPERLESS_DBHOST:=localhost}"
|
|
||||||
local -r port="${PAPERLESS_DBPORT:=3306}"
|
|
||||||
|
|
||||||
local attempt_num=1
|
|
||||||
local -r max_attempts=5
|
|
||||||
|
|
||||||
# Disable warning, host and port can't have spaces
|
|
||||||
# shellcheck disable=SC2086
|
|
||||||
while ! true > /dev/tcp/$host/$port; do
|
|
||||||
|
|
||||||
if [ $attempt_num -eq $max_attempts ]; then
|
|
||||||
echo "Unable to connect to database."
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "Attempt $attempt_num failed! Trying again in 5 seconds..."
|
|
||||||
|
|
||||||
fi
|
|
||||||
|
|
||||||
attempt_num=$(("$attempt_num" + 1))
|
|
||||||
sleep 5
|
|
||||||
done
|
|
||||||
echo "Connected to MariaDB"
|
|
||||||
}
|
|
||||||
|
|
||||||
wait_for_redis() {
|
|
||||||
# We use a Python script to send the Redis ping
|
|
||||||
# instead of installing redis-tools just for 1 thing
|
|
||||||
if ! python3 /sbin/wait-for-redis.py; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
migrations() {
|
|
||||||
(
|
|
||||||
# flock is in place to prevent multiple containers from doing migrations
|
|
||||||
# simultaneously. This also ensures that the db is ready when the command
|
|
||||||
# of the current container starts.
|
|
||||||
flock 200
|
|
||||||
echo "Apply database migrations..."
|
|
||||||
python3 manage.py migrate --skip-checks --no-input
|
|
||||||
) 200>"${DATA_DIR}/migration_lock"
|
|
||||||
}
|
|
||||||
|
|
||||||
django_checks() {
|
|
||||||
# Explicitly run the Django system checks
|
|
||||||
echo "Running Django checks"
|
|
||||||
python3 manage.py check
|
|
||||||
}
|
|
||||||
|
|
||||||
search_index() {
|
|
||||||
|
|
||||||
local -r index_version=9
|
|
||||||
local -r index_version_file=${DATA_DIR}/.index_version
|
|
||||||
|
|
||||||
if [[ (! -f "${index_version_file}") || $(<"${index_version_file}") != "$index_version" ]]; then
|
|
||||||
echo "Search index out of date. Updating..."
|
|
||||||
python3 manage.py document_index reindex --no-progress-bar
|
|
||||||
echo ${index_version} | tee "${index_version_file}" >/dev/null
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
superuser() {
|
|
||||||
if [[ -n "${PAPERLESS_ADMIN_USER}" ]]; then
|
|
||||||
python3 manage.py manage_superuser
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
do_work() {
|
|
||||||
if [[ "${PAPERLESS_DBENGINE}" == "mariadb" ]]; then
|
|
||||||
wait_for_mariadb
|
|
||||||
elif [[ -n "${PAPERLESS_DBHOST}" ]]; then
|
|
||||||
wait_for_postgres
|
|
||||||
fi
|
|
||||||
|
|
||||||
wait_for_redis
|
|
||||||
|
|
||||||
migrations
|
|
||||||
|
|
||||||
django_checks
|
|
||||||
|
|
||||||
search_index
|
|
||||||
|
|
||||||
superuser
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
do_work
|
|
@ -1,42 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# Scans the environment variables for those with the suffix _FILE
|
|
||||||
# When located, checks the file exists, and exports the contents
|
|
||||||
# of the file as the same name, minus the suffix
|
|
||||||
# This allows the use of Docker secrets or mounted files
|
|
||||||
# to fill in any of the settings configurable via environment
|
|
||||||
# variables
|
|
||||||
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
for line in $(printenv)
|
|
||||||
do
|
|
||||||
# Extract the name of the environment variable
|
|
||||||
env_name=${line%%=*}
|
|
||||||
# Check if it starts with "PAPERLESS_" and ends in "_FILE"
|
|
||||||
if [[ ${env_name} == PAPERLESS_*_FILE ]]; then
|
|
||||||
# This should have been named different..
|
|
||||||
if [[ ${env_name} == "PAPERLESS_OCR_SKIP_ARCHIVE_FILE" || ${env_name} == "PAPERLESS_MODEL_FILE" ]]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
# Extract the value of the environment
|
|
||||||
env_value=${line#*=}
|
|
||||||
|
|
||||||
# Check the file exists
|
|
||||||
if [[ -f ${env_value} ]]; then
|
|
||||||
|
|
||||||
# Trim off the _FILE suffix
|
|
||||||
non_file_env_name=${env_name%"_FILE"}
|
|
||||||
echo "Setting ${non_file_env_name} from file"
|
|
||||||
|
|
||||||
# Reads the value from th file
|
|
||||||
val="$(< "${!env_name}")"
|
|
||||||
|
|
||||||
# Sets the normal name to the read file contents
|
|
||||||
export "${non_file_env_name}"="${val}"
|
|
||||||
|
|
||||||
else
|
|
||||||
echo "File ${env_value} referenced by ${env_name} doesn't exist"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
done
|
|
@ -1,12 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
echo "Checking if we should start flower..."
|
|
||||||
|
|
||||||
if [[ -n "${PAPERLESS_ENABLE_FLOWER}" ]]; then
|
|
||||||
# Small delay to allow celery to be up first
|
|
||||||
echo "Starting flower in 5s"
|
|
||||||
sleep 5
|
|
||||||
celery --app paperless flower --conf=/usr/src/paperless/src/paperless/flowerconfig.py
|
|
||||||
else
|
|
||||||
echo "Not starting flower"
|
|
||||||
fi
|
|
BIN
docker/init-flow.drawio.png
Normal file
BIN
docker/init-flow.drawio.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 30 KiB |
@ -1,5 +1,7 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Run this script to generate the management commands again (for example if a new command is create or the template is updated)
|
||||||
|
|
||||||
set -eu
|
set -eu
|
||||||
|
|
||||||
for command in decrypt_documents \
|
for command in decrypt_documents \
|
||||||
@ -16,9 +18,10 @@ for command in decrypt_documents \
|
|||||||
document_fuzzy_match \
|
document_fuzzy_match \
|
||||||
manage_superuser \
|
manage_superuser \
|
||||||
convert_mariadb_uuid \
|
convert_mariadb_uuid \
|
||||||
prune_audit_logs;
|
prune_audit_logs \
|
||||||
|
createsuperuser;
|
||||||
do
|
do
|
||||||
echo "installing $command..."
|
echo "installing $command..."
|
||||||
sed "s/management_command/$command/g" management_script.sh > /usr/local/bin/$command
|
sed "s/management_command/$command/g" management_script.sh >"$PWD/rootfs/usr/local/bin/$command"
|
||||||
chmod +x /usr/local/bin/$command
|
chmod u=rwx,g=rwx,o=rx "$PWD/rootfs/usr/local/bin/$command"
|
||||||
done
|
done
|
||||||
|
@ -1,17 +1,13 @@
|
|||||||
#!/usr/bin/env bash
|
#!/command/with-contenv /usr/bin/bash
|
||||||
|
# shellcheck shell=bash
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
cd /usr/src/paperless/src/
|
cd "${PAPERLESS_SRC_DIR}"
|
||||||
# This ensures environment is setup
|
|
||||||
# shellcheck disable=SC1091
|
|
||||||
source /sbin/env-from-file.sh
|
|
||||||
|
|
||||||
if [[ $(id -u) == 0 ]] ;
|
if [[ $(id -u) == 0 ]]; then
|
||||||
then
|
s6-setuidgid paperless python3 manage.py management_command "$@"
|
||||||
gosu paperless python3 manage.py management_command "$@"
|
elif [[ $(id -un) == "paperless" ]]; then
|
||||||
elif [[ $(id -un) == "paperless" ]] ;
|
|
||||||
then
|
|
||||||
python3 manage.py management_command "$@"
|
python3 manage.py management_command "$@"
|
||||||
else
|
else
|
||||||
echo "Unknown user."
|
echo "Unknown user."
|
||||||
|
@ -1,16 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
SUPERVISORD_WORKING_DIR="${PAPERLESS_SUPERVISORD_WORKING_DIR:-$PWD}"
|
|
||||||
rootless_args=()
|
|
||||||
if [ "$(id -u)" == "$(id -u paperless)" ]; then
|
|
||||||
rootless_args=(
|
|
||||||
--user
|
|
||||||
paperless
|
|
||||||
--logfile
|
|
||||||
"${SUPERVISORD_WORKING_DIR}/supervisord.log"
|
|
||||||
--pidfile
|
|
||||||
"${SUPERVISORD_WORKING_DIR}/supervisord.pid"
|
|
||||||
)
|
|
||||||
fi
|
|
||||||
|
|
||||||
exec /usr/local/bin/supervisord -c /etc/supervisord.conf "${rootless_args[@]}"
|
|
0
docker/rootfs/etc/s6-overlay/s6-rc.d/init-complete/dependencies.d/init-tesseract-langs
Normal file
0
docker/rootfs/etc/s6-overlay/s6-rc.d/init-complete/dependencies.d/init-tesseract-langs
Normal file
8
docker/rootfs/etc/s6-overlay/s6-rc.d/init-complete/run
Executable file
8
docker/rootfs/etc/s6-overlay/s6-rc.d/init-complete/run
Executable file
@ -0,0 +1,8 @@
|
|||||||
|
#!/command/with-contenv /usr/bin/bash
|
||||||
|
# shellcheck shell=bash
|
||||||
|
declare -r log_prefix="[init-complete]"
|
||||||
|
declare -r end_time=$(date +%s)
|
||||||
|
declare -r start_time=${PAPERLESS_START_TIME_S}
|
||||||
|
|
||||||
|
echo "${log_prefix} paperless-ngx docker container init completed in $(($end_time-$start_time)) seconds"
|
||||||
|
echo "${log_prefix} Starting services"
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-complete/type
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-complete/type
Normal file
@ -0,0 +1 @@
|
|||||||
|
oneshot
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-complete/up
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-complete/up
Normal file
@ -0,0 +1 @@
|
|||||||
|
/etc/s6-overlay/s6-rc.d/init-complete/run
|
0
docker/rootfs/etc/s6-overlay/s6-rc.d/init-custom-init/dependencies.d/init-search-index
Normal file
0
docker/rootfs/etc/s6-overlay/s6-rc.d/init-custom-init/dependencies.d/init-search-index
Normal file
0
docker/rootfs/etc/s6-overlay/s6-rc.d/init-custom-init/dependencies.d/init-system-checks
Normal file
0
docker/rootfs/etc/s6-overlay/s6-rc.d/init-custom-init/dependencies.d/init-system-checks
Normal file
0
docker/rootfs/etc/s6-overlay/s6-rc.d/init-custom-init/dependencies.d/init-tesseract-langs
Normal file
0
docker/rootfs/etc/s6-overlay/s6-rc.d/init-custom-init/dependencies.d/init-tesseract-langs
Normal file
0
docker/rootfs/etc/s6-overlay/s6-rc.d/init-custom-init/dependencies.d/init-wait-for-redis
Normal file
0
docker/rootfs/etc/s6-overlay/s6-rc.d/init-custom-init/dependencies.d/init-wait-for-redis
Normal file
44
docker/rootfs/etc/s6-overlay/s6-rc.d/init-custom-init/run
Executable file
44
docker/rootfs/etc/s6-overlay/s6-rc.d/init-custom-init/run
Executable file
@ -0,0 +1,44 @@
|
|||||||
|
#!/command/with-contenv /usr/bin/bash
|
||||||
|
# shellcheck shell=bash
|
||||||
|
|
||||||
|
declare -r log_prefix="[custom-init]"
|
||||||
|
|
||||||
|
# Mostly borrowed from the LinuxServer.io base image
|
||||||
|
# https://github.com/linuxserver/docker-baseimage-ubuntu/tree/bionic/root/etc/cont-init.d
|
||||||
|
declare -r custom_script_dir="/custom-cont-init.d"
|
||||||
|
|
||||||
|
# Tamper checking.
|
||||||
|
# Don't run files which are owned by anyone except root
|
||||||
|
# Don't run files which are writeable by others
|
||||||
|
if [ -d "${custom_script_dir}" ]; then
|
||||||
|
if [ -n "$(/usr/bin/find "${custom_script_dir}" -maxdepth 1 ! -user root)" ]; then
|
||||||
|
echo "${log_prefix} **** Potential tampering with custom scripts detected ****"
|
||||||
|
echo "${log_prefix} **** The folder '${custom_script_dir}' must be owned by root ****"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
if [ -n "$(/usr/bin/find "${custom_script_dir}" -maxdepth 1 -perm -o+w)" ]; then
|
||||||
|
echo "${log_prefix} **** The folder '${custom_script_dir}' or some of contents have write permissions for others, which is a security risk. ****"
|
||||||
|
echo "${log_prefix} **** Please review the permissions and their contents to make sure they are owned by root, and can only be modified by root. ****"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Make sure custom init directory has files in it
|
||||||
|
if [ -n "$(/bin/ls --almost-all "${custom_script_dir}" 2>/dev/null)" ]; then
|
||||||
|
echo "${log_prefix} files found in ${custom_script_dir} executing"
|
||||||
|
# Loop over files in the directory
|
||||||
|
for SCRIPT in "${custom_script_dir}"/*; do
|
||||||
|
NAME="$(basename "${SCRIPT}")"
|
||||||
|
if [ -f "${SCRIPT}" ]; then
|
||||||
|
echo "${log_prefix} ${NAME}: executing..."
|
||||||
|
/command/with-contenv /bin/bash "${SCRIPT}"
|
||||||
|
echo "${log_prefix} ${NAME}: exited $?"
|
||||||
|
elif [ ! -f "${SCRIPT}" ]; then
|
||||||
|
echo "${log_prefix} ${NAME}: is not a file"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
else
|
||||||
|
echo "${log_prefix} no custom files found exiting..."
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "${log_prefix} ${custom_script_dir} doesn't exist, nothing to do"
|
||||||
|
fi
|
@ -0,0 +1 @@
|
|||||||
|
oneshot
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-custom-init/up
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-custom-init/up
Normal file
@ -0,0 +1 @@
|
|||||||
|
/etc/s6-overlay/s6-rc.d/init-custom-init/run
|
33
docker/rootfs/etc/s6-overlay/s6-rc.d/init-env-file/run
Executable file
33
docker/rootfs/etc/s6-overlay/s6-rc.d/init-env-file/run
Executable file
@ -0,0 +1,33 @@
|
|||||||
|
#!/command/with-contenv /usr/bin/bash
|
||||||
|
# shellcheck shell=bash
|
||||||
|
|
||||||
|
declare -r log_prefix="[env-init]"
|
||||||
|
|
||||||
|
echo "${log_prefix} Checking for environment from files"
|
||||||
|
|
||||||
|
if find /run/s6/container_environment/*"_FILE" -maxdepth 1 > /dev/null 2>&1; then
|
||||||
|
for FILENAME in /run/s6/container_environment/*; do
|
||||||
|
if [[ "${FILENAME##*/}" == PAPERLESS_*_FILE ]]; then
|
||||||
|
# This should have been named different..
|
||||||
|
if [[ ${FILENAME} == "PAPERLESS_OCR_SKIP_ARCHIVE_FILE" || ${FILENAME} == "PAPERLESS_MODEL_FILE" ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
SECRETFILE=$(cat "${FILENAME}")
|
||||||
|
# Check the file exists
|
||||||
|
if [[ -f ${SECRETFILE} ]]; then
|
||||||
|
# Trim off trailing _FILE
|
||||||
|
FILESTRIP=${FILENAME//_FILE/}
|
||||||
|
if [[ $(tail -n1 "${SECRETFILE}" | wc -l) != 0 ]]; then
|
||||||
|
echo "${log_prefix} Your secret: ${FILENAME##*/} contains a trailing newline and may not work as expected"
|
||||||
|
fi
|
||||||
|
# Set environment variable
|
||||||
|
cat "${SECRETFILE}" > "${FILESTRIP}"
|
||||||
|
echo "${log_prefix} ${FILESTRIP##*/} set from ${FILENAME##*/}"
|
||||||
|
else
|
||||||
|
echo "${log_prefix} cannot find secret in ${FILENAME##*/}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
else
|
||||||
|
echo "${log_prefix} No *_FILE environment found"
|
||||||
|
fi
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-env-file/type
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-env-file/type
Normal file
@ -0,0 +1 @@
|
|||||||
|
oneshot
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-env-file/up
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-env-file/up
Normal file
@ -0,0 +1 @@
|
|||||||
|
/etc/s6-overlay/s6-rc.d/init-env-file/run
|
33
docker/rootfs/etc/s6-overlay/s6-rc.d/init-folders/run
Executable file
33
docker/rootfs/etc/s6-overlay/s6-rc.d/init-folders/run
Executable file
@ -0,0 +1,33 @@
|
|||||||
|
#!/command/with-contenv /usr/bin/bash
|
||||||
|
# shellcheck shell=bash
|
||||||
|
|
||||||
|
declare -r log_prefix="[init-folders]"
|
||||||
|
|
||||||
|
declare -r export_dir="/usr/src/paperless/export"
|
||||||
|
declare -r data_dir="${PAPERLESS_DATA_DIR:-/usr/src/paperless/data}"
|
||||||
|
declare -r media_root_dir="${PAPERLESS_MEDIA_ROOT:-/usr/src/paperless/media}"
|
||||||
|
declare -r consume_dir="${PAPERLESS_CONSUMPTION_DIR:-/usr/src/paperless/consume}"
|
||||||
|
declare -r tmp_dir="${PAPERLESS_SCRATCH_DIR:=/tmp/paperless}"
|
||||||
|
|
||||||
|
echo "${log_prefix} Checking for folder existence"
|
||||||
|
|
||||||
|
for dir in \
|
||||||
|
"${export_dir}" \
|
||||||
|
"${data_dir}" "${data_dir}/index" \
|
||||||
|
"${media_root_dir}" "${media_root_dir}/documents" "${media_root_dir}/documents/originals" "${media_root_dir}/documents/thumbnails" \
|
||||||
|
"${consume_dir}" \
|
||||||
|
"${tmp_dir}"; do
|
||||||
|
if [[ ! -d "${dir}" ]]; then
|
||||||
|
mkdir --parents --verbose "${dir}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "${log_prefix} Adjusting file and folder permissions"
|
||||||
|
for dir in \
|
||||||
|
"${export_dir}" \
|
||||||
|
"${data_dir}" \
|
||||||
|
"${media_root_dir}" \
|
||||||
|
"${consume_dir}" \
|
||||||
|
"${tmp_dir}"; do
|
||||||
|
find "${dir}" -not \( -user paperless -and -group paperless \) -exec chown --changes paperless:paperless {} +
|
||||||
|
done
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-folders/type
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-folders/type
Normal file
@ -0,0 +1 @@
|
|||||||
|
oneshot
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-folders/up
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-folders/up
Normal file
@ -0,0 +1 @@
|
|||||||
|
/etc/s6-overlay/s6-rc.d/init-folders/run
|
7
docker/rootfs/etc/s6-overlay/s6-rc.d/init-migrations/migrate.sh
Executable file
7
docker/rootfs/etc/s6-overlay/s6-rc.d/init-migrations/migrate.sh
Executable file
@ -0,0 +1,7 @@
|
|||||||
|
#!/command/with-contenv /usr/bin/bash
|
||||||
|
# shellcheck shell=bash
|
||||||
|
declare -r data_dir="${PAPERLESS_DATA_DIR:-/usr/src/paperless/data}"
|
||||||
|
|
||||||
|
# shellcheck disable=SC2164
|
||||||
|
cd "${PAPERLESS_SRC_DIR}"
|
||||||
|
exec s6-setlock -n "${data_dir}/migration_lock" python3 manage.py migrate --skip-checks --no-input
|
12
docker/rootfs/etc/s6-overlay/s6-rc.d/init-migrations/run
Executable file
12
docker/rootfs/etc/s6-overlay/s6-rc.d/init-migrations/run
Executable file
@ -0,0 +1,12 @@
|
|||||||
|
#!/command/with-contenv /usr/bin/bash
|
||||||
|
# shellcheck shell=bash
|
||||||
|
declare -r log_prefix="[init-migrations]"
|
||||||
|
|
||||||
|
echo "${log_prefix} Apply database migrations..."
|
||||||
|
|
||||||
|
# The whole migrate, with flock, needs to run as the right user
|
||||||
|
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||||
|
exec /etc/s6-overlay/s6-rc.d/init-migrations/migrate.sh
|
||||||
|
else
|
||||||
|
exec s6-setuidgid paperless /etc/s6-overlay/s6-rc.d/init-migrations/migrate.sh
|
||||||
|
fi
|
@ -0,0 +1 @@
|
|||||||
|
oneshot
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-migrations/up
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-migrations/up
Normal file
@ -0,0 +1 @@
|
|||||||
|
/etc/s6-overlay/s6-rc.d/init-migrations/run
|
22
docker/rootfs/etc/s6-overlay/s6-rc.d/init-modify-user/run
Executable file
22
docker/rootfs/etc/s6-overlay/s6-rc.d/init-modify-user/run
Executable file
@ -0,0 +1,22 @@
|
|||||||
|
#!/command/with-contenv /usr/bin/bash
|
||||||
|
# shellcheck shell=bash
|
||||||
|
declare -r log_prefix="[init-user]"
|
||||||
|
|
||||||
|
declare -r usermap_original_uid=$(id -u paperless)
|
||||||
|
declare -r usermap_original_gid=$(id -g paperless)
|
||||||
|
declare -r usermap_new_uid=${USERMAP_UID:-$usermap_original_uid}
|
||||||
|
declare -r usermap_new_gid=${USERMAP_GID:-${usermap_original_gid:-$usermap_new_uid}}
|
||||||
|
|
||||||
|
if [[ ${usermap_new_uid} != "${usermap_original_uid}" ]]; then
|
||||||
|
echo "${log_prefix} Mapping UID for paperless to $usermap_new_uid"
|
||||||
|
usermod --non-unique --uid "${usermap_new_uid}" paperless
|
||||||
|
else
|
||||||
|
echo "${log_prefix} No UID changes for paperless"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${usermap_new_gid} != "${usermap_original_gid}" ]]; then
|
||||||
|
echo "${log_prefix} Mapping GID for paperless to $usermap_new_gid"
|
||||||
|
groupmod --non-unique --gid "${usermap_new_gid}" paperless
|
||||||
|
else
|
||||||
|
echo "${log_prefix} No GID changes for paperless"
|
||||||
|
fi
|
@ -0,0 +1 @@
|
|||||||
|
oneshot
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-modify-user/up
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-modify-user/up
Normal file
@ -0,0 +1 @@
|
|||||||
|
/etc/s6-overlay/s6-rc.d/init-modify-user/run
|
28
docker/rootfs/etc/s6-overlay/s6-rc.d/init-search-index/run
Executable file
28
docker/rootfs/etc/s6-overlay/s6-rc.d/init-search-index/run
Executable file
@ -0,0 +1,28 @@
|
|||||||
|
#!/command/with-contenv /usr/bin/bash
|
||||||
|
# shellcheck shell=bash
|
||||||
|
|
||||||
|
declare -r log_prefix="[init-index]"
|
||||||
|
|
||||||
|
declare -r index_version=9
|
||||||
|
declare -r data_dir="${PAPERLESS_DATA_DIR:-/usr/src/paperless/data}"
|
||||||
|
declare -r index_version_file="${data_dir}/.index_version"
|
||||||
|
|
||||||
|
update_index () {
|
||||||
|
echo "${log_prefix} Search index out of date. Updating..."
|
||||||
|
cd "${PAPERLESS_SRC_DIR}"
|
||||||
|
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||||
|
python3 manage.py document_index reindex --no-progress-bar
|
||||||
|
echo ${index_version} | tee "${index_version_file}" > /dev/null
|
||||||
|
else
|
||||||
|
s6-setuidgid paperless python3 manage.py document_index reindex --no-progress-bar
|
||||||
|
echo ${index_version} | s6-setuidgid paperless tee "${index_version_file}" > /dev/null
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
if [[ (! -f "${index_version_file}") ]]; then
|
||||||
|
echo "${log_prefix} No index version file found"
|
||||||
|
update_index
|
||||||
|
elif [[ $(<"${index_version_file}") != "$index_version" ]]; then
|
||||||
|
echo "${log_prefix} index version updated"
|
||||||
|
update_index
|
||||||
|
fi
|
@ -0,0 +1 @@
|
|||||||
|
oneshot
|
@ -0,0 +1 @@
|
|||||||
|
/etc/s6-overlay/s6-rc.d/init-search-index/run
|
19
docker/rootfs/etc/s6-overlay/s6-rc.d/init-start/run
Executable file
19
docker/rootfs/etc/s6-overlay/s6-rc.d/init-start/run
Executable file
@ -0,0 +1,19 @@
|
|||||||
|
#!/command/with-contenv /usr/bin/bash
|
||||||
|
# shellcheck shell=bash
|
||||||
|
|
||||||
|
declare -r log_prefix="[init-start]"
|
||||||
|
|
||||||
|
echo "${log_prefix} paperless-ngx docker container starting..."
|
||||||
|
|
||||||
|
# Set some directories into environment for other steps to access via environment
|
||||||
|
# Sort of like variables for later
|
||||||
|
printf "/usr/src/paperless/src" > /var/run/s6/container_environment/PAPERLESS_SRC_DIR
|
||||||
|
echo $(date +%s) > /var/run/s6/container_environment/PAPERLESS_START_TIME_S
|
||||||
|
|
||||||
|
# Check if we're starting as a non-root user
|
||||||
|
if [ $(id -u) == $(id -u paperless) ]; then
|
||||||
|
printf "true" > /var/run/s6/container_environment/USER_IS_NON_ROOT
|
||||||
|
echo "${log_prefix} paperless-ngx docker container running under a user"
|
||||||
|
else
|
||||||
|
echo "${log_prefix} paperless-ngx docker container starting init as root"
|
||||||
|
fi
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-start/type
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-start/type
Normal file
@ -0,0 +1 @@
|
|||||||
|
oneshot
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-start/up
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-start/up
Normal file
@ -0,0 +1 @@
|
|||||||
|
/etc/s6-overlay/s6-rc.d/init-start/run
|
20
docker/rootfs/etc/s6-overlay/s6-rc.d/init-superuser/run
Executable file
20
docker/rootfs/etc/s6-overlay/s6-rc.d/init-superuser/run
Executable file
@ -0,0 +1,20 @@
|
|||||||
|
#!/command/with-contenv /usr/bin/bash
|
||||||
|
# shellcheck shell=bash
|
||||||
|
|
||||||
|
declare -r log_prefix="[init-superuser]"
|
||||||
|
|
||||||
|
if [[ -n "${PAPERLESS_ADMIN_USER}" ]]; then
|
||||||
|
echo "${log_prefix} Creating superuser..."
|
||||||
|
cd "${PAPERLESS_SRC_DIR}"
|
||||||
|
|
||||||
|
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||||
|
python3 manage.py manage_superuser
|
||||||
|
else
|
||||||
|
s6-setuidgid paperless python3 manage.py manage_superuser
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "${log_prefix} Superuser creation done"
|
||||||
|
|
||||||
|
else
|
||||||
|
echo "${log_prefix} Not creating superuser"
|
||||||
|
fi
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-superuser/type
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-superuser/type
Normal file
@ -0,0 +1 @@
|
|||||||
|
oneshot
|
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-superuser/up
Normal file
1
docker/rootfs/etc/s6-overlay/s6-rc.d/init-superuser/up
Normal file
@ -0,0 +1 @@
|
|||||||
|
/etc/s6-overlay/s6-rc.d/init-superuser/run
|
0
docker/rootfs/etc/s6-overlay/s6-rc.d/init-system-checks/dependencies.d/init-tesseract-langs
Normal file
0
docker/rootfs/etc/s6-overlay/s6-rc.d/init-system-checks/dependencies.d/init-tesseract-langs
Normal file
15
docker/rootfs/etc/s6-overlay/s6-rc.d/init-system-checks/run
Executable file
15
docker/rootfs/etc/s6-overlay/s6-rc.d/init-system-checks/run
Executable file
@ -0,0 +1,15 @@
|
|||||||
|
#!/command/with-contenv /usr/bin/bash
|
||||||
|
# shellcheck shell=bash
|
||||||
|
|
||||||
|
declare -r log_prefix="[init-checks]"
|
||||||
|
|
||||||
|
# Explicitly run the Django system checks
|
||||||
|
echo "${log_prefix} Running Django checks"
|
||||||
|
|
||||||
|
cd "${PAPERLESS_SRC_DIR}"
|
||||||
|
|
||||||
|
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||||
|
python3 manage.py check
|
||||||
|
else
|
||||||
|
s6-setuidgid paperless python3 manage.py check
|
||||||
|
fi
|
@ -0,0 +1 @@
|
|||||||
|
oneshot
|
@ -0,0 +1 @@
|
|||||||
|
/etc/s6-overlay/s6-rc.d/init-system-checks/run
|
0
docker/rootfs/etc/s6-overlay/s6-rc.d/init-tesseract-langs/dependencies.d/init-env-file
Normal file
0
docker/rootfs/etc/s6-overlay/s6-rc.d/init-tesseract-langs/dependencies.d/init-env-file
Normal file
65
docker/rootfs/etc/s6-overlay/s6-rc.d/init-tesseract-langs/run
Executable file
65
docker/rootfs/etc/s6-overlay/s6-rc.d/init-tesseract-langs/run
Executable file
@ -0,0 +1,65 @@
|
|||||||
|
#!/command/with-contenv /usr/bin/bash
|
||||||
|
# shellcheck shell=bash
|
||||||
|
|
||||||
|
declare -r log_prefix="[init-tesseract-langs]"
|
||||||
|
|
||||||
|
install_languages() {
|
||||||
|
echo "Installing languages..."
|
||||||
|
|
||||||
|
read -ra langs <<<"$1"
|
||||||
|
|
||||||
|
# Check that it is not empty
|
||||||
|
if [ ${#langs[@]} -eq 0 ]; then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Build list of packages to install
|
||||||
|
to_install=()
|
||||||
|
for lang in "${langs[@]}"; do
|
||||||
|
pkg="tesseract-ocr-$lang"
|
||||||
|
|
||||||
|
if dpkg --status "$pkg" &>/dev/null; then
|
||||||
|
echo "${log_prefix} Package $pkg already installed!"
|
||||||
|
continue
|
||||||
|
else
|
||||||
|
to_install+=("$pkg")
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Use apt only when we install packages
|
||||||
|
if [ ${#to_install[@]} -gt 0 ]; then
|
||||||
|
|
||||||
|
# Warn the user if they're not root, but try anyway
|
||||||
|
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||||
|
echo "${log_prefix} ERROR: Unable to install language ${pkg} as non-root, startup may fail"
|
||||||
|
fi
|
||||||
|
|
||||||
|
apt-get --quiet update &>/dev/null
|
||||||
|
|
||||||
|
for pkg in "${to_install[@]}"; do
|
||||||
|
if ! apt-cache --quiet show "$pkg" &>/dev/null; then
|
||||||
|
echo "${log_prefix} Skipped $pkg: Package not found! :("
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
echo "${log_prefix} Installing package $pkg..."
|
||||||
|
if ! apt-get --quiet --assume-yes install "$pkg" &>/dev/null; then
|
||||||
|
echo "${log_prefix} Could not install $pkg"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "${log_prefix} Installed $pkg"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
echo "${log_prefix} Checking if additional teseract languages needed"
|
||||||
|
|
||||||
|
# Install additional languages if specified
|
||||||
|
if [[ -n "$PAPERLESS_OCR_LANGUAGES" ]]; then
|
||||||
|
|
||||||
|
install_languages "$PAPERLESS_OCR_LANGUAGES"
|
||||||
|
echo "${log_prefix} Additional packages installed"
|
||||||
|
else
|
||||||
|
echo "${log_prefix} No additional installs requested"
|
||||||
|
fi
|
@ -0,0 +1 @@
|
|||||||
|
oneshot
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user