mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-08-05 18:58:34 -05:00
Compare commits
1306 Commits
ngx-1.7.0-
...
v1.9.0-bet
Author | SHA1 | Date | |
---|---|---|---|
![]() |
b0d842a370 | ||
![]() |
194dda6d84 | ||
![]() |
3267708097 | ||
![]() |
71a808c95a | ||
![]() |
2c3c26edf1 | ||
![]() |
18a4ba7778 | ||
![]() |
140e239bdb | ||
![]() |
537e7c63f4 | ||
![]() |
ed2e884de8 | ||
![]() |
60980cb26a | ||
![]() |
3b84e34c8e | ||
![]() |
ebdf9b55df | ||
![]() |
f528b01de4 | ||
![]() |
6ae9a8f2be | ||
![]() |
65cfd55027 | ||
![]() |
15d074d39c | ||
![]() |
962d0ebb40 | ||
![]() |
d408900a91 | ||
![]() |
0bf9e55ca7 | ||
![]() |
55d36b39b1 | ||
![]() |
45fd01a688 | ||
![]() |
1fda0782ae | ||
![]() |
2680a83455 | ||
![]() |
148e875523 | ||
![]() |
b9e60e0145 | ||
![]() |
03559454f6 | ||
![]() |
50ee1c0bd5 | ||
![]() |
c6b13271cf | ||
![]() |
fd83e8f2a9 | ||
![]() |
5f7c724531 | ||
![]() |
31ebe2675b | ||
![]() |
60d40cc2ce | ||
![]() |
6796cdf947 | ||
![]() |
6592a925c4 | ||
![]() |
6d42b2a29d | ||
![]() |
6b4dccfbd5 | ||
![]() |
2582d325bc | ||
![]() |
078814e77a | ||
![]() |
a80b413d38 | ||
![]() |
8cb93da53e | ||
![]() |
299ae2b828 | ||
![]() |
96cf316eec | ||
![]() |
4aebb8e153 | ||
![]() |
82c6942f09 | ||
![]() |
ff280f0309 | ||
![]() |
06d15a11c8 | ||
![]() |
4b3649ea94 | ||
![]() |
5ffb25b71d | ||
![]() |
8b28159e2d | ||
![]() |
99d6103617 | ||
![]() |
a502fe7c5c | ||
![]() |
bd18a57a5d | ||
![]() |
3828d712bd | ||
![]() |
a6be010464 | ||
![]() |
efe51b30fe | ||
![]() |
26019b9c17 | ||
![]() |
ebdd7afb67 | ||
![]() |
0c90b84a92 | ||
![]() |
9c2265d1aa | ||
![]() |
8046a6f3a7 | ||
![]() |
ddff902291 | ||
![]() |
a406920ae6 | ||
![]() |
44e596b0c4 | ||
![]() |
2b1c8c8d9a | ||
![]() |
d40c13420d | ||
![]() |
8ad2f7daf0 | ||
![]() |
696ebf545f | ||
![]() |
ed515f4e36 | ||
![]() |
06a4949266 | ||
![]() |
f50a01e118 | ||
![]() |
09512be1ad | ||
![]() |
bb951ad860 | ||
![]() |
858ae909e8 | ||
![]() |
1692bac3fe | ||
![]() |
cce1595c3d | ||
![]() |
67bb140eef | ||
![]() |
6d5d308d6c | ||
![]() |
d39b4ae8cb | ||
![]() |
0f4b118b61 | ||
![]() |
f5f2240828 | ||
![]() |
66593a28f5 | ||
![]() |
ba1cdd5914 | ||
![]() |
3f536552a6 | ||
![]() |
1d2282df9e | ||
![]() |
1b56ffd0c0 | ||
![]() |
d5018af2a3 | ||
![]() |
5c1e09cc48 | ||
![]() |
6d956ac13b | ||
![]() |
865fbbd15c | ||
![]() |
97cfd0085e | ||
![]() |
f20f200c8d | ||
![]() |
78bd424ecb | ||
![]() |
6fa32c36e9 | ||
![]() |
817882ff6f | ||
![]() |
d1e8299010 | ||
![]() |
42c50c4e0b | ||
![]() |
c151db6e21 | ||
![]() |
7844537355 | ||
![]() |
a414208327 | ||
![]() |
ab761e837c | ||
![]() |
c8e838e3a0 | ||
![]() |
9b24cf7591 | ||
![]() |
5e3cdcdd6e | ||
![]() |
baeb2a074a | ||
![]() |
a56de4547c | ||
![]() |
e3cc5c3013 | ||
![]() |
4194b248b9 | ||
![]() |
3fcbd8f3ac | ||
![]() |
b3b2519bf0 | ||
![]() |
fccea022fa | ||
![]() |
d80d5e4e70 | ||
![]() |
f1e93eb70a | ||
![]() |
260b709296 | ||
![]() |
9bb762fc8f | ||
![]() |
5c49bbfc73 | ||
![]() |
edabf208bc | ||
![]() |
0878a199f4 | ||
![]() |
eec1f03f86 | ||
![]() |
a4c4b81297 | ||
![]() |
a5f9c8f651 | ||
![]() |
1ec7351842 | ||
![]() |
f2cab81aed | ||
![]() |
9cbc74ebb2 | ||
![]() |
3d36d0445c | ||
![]() |
f4fece5550 | ||
![]() |
6133f745b7 | ||
![]() |
02456b271b | ||
![]() |
ad1f5ae081 | ||
![]() |
86358d5561 | ||
![]() |
87953cb98a | ||
![]() |
f58e0041ce | ||
![]() |
8183de4902 | ||
![]() |
a8c575147b | ||
![]() |
09fcc28ded | ||
![]() |
22fb659b72 | ||
![]() |
40ae184c4e | ||
![]() |
18a2a41682 | ||
![]() |
4faff70b5d | ||
![]() |
8d3361766d | ||
![]() |
5dd4d0c370 | ||
![]() |
a0617c1fad | ||
![]() |
9e9593b899 | ||
![]() |
89b7270233 | ||
![]() |
3af4808864 | ||
![]() |
d4c3b7614d | ||
![]() |
676ba9ca22 | ||
![]() |
d0f5cc839f | ||
![]() |
110bd65c20 | ||
![]() |
ef0080b0a9 | ||
![]() |
a81dc00ccf | ||
![]() |
765fea7f7e | ||
![]() |
a0f48130c0 | ||
![]() |
7e2c693c8a | ||
![]() |
7396e4c326 | ||
![]() |
0175eab031 | ||
![]() |
3d0a26fdb1 | ||
![]() |
c52d18da1f | ||
![]() |
e0f341938a | ||
![]() |
a037e562b2 | ||
![]() |
f1084cbdcf | ||
![]() |
f6e4339069 | ||
![]() |
a754c6047d | ||
![]() |
a5d2ae2588 | ||
![]() |
a6f3378c21 | ||
![]() |
ca75fb5664 | ||
![]() |
32861ad592 | ||
![]() |
ada8516803 | ||
![]() |
d5c27a95aa | ||
![]() |
6b8a21d2b0 | ||
![]() |
cb7e6f8cd0 | ||
![]() |
f48a2cb65e | ||
![]() |
0fdd3d56f4 | ||
![]() |
173934258c | ||
![]() |
b70e21a6d5 | ||
![]() |
cafb884991 | ||
![]() |
94b09614d9 | ||
![]() |
7488505e37 | ||
![]() |
641ff9a71d | ||
![]() |
b596502f74 | ||
![]() |
4fbd760005 | ||
![]() |
6a5ac15b07 | ||
![]() |
57b419fa87 | ||
![]() |
52ce54930b | ||
![]() |
fa9898ebe1 | ||
![]() |
de4b3c39b9 | ||
![]() |
232e358a34 | ||
![]() |
a0d35f9262 | ||
![]() |
8883ef81dc | ||
![]() |
39a425eca4 | ||
![]() |
e31d383cfc | ||
![]() |
cb104b60f4 | ||
![]() |
b88963c900 | ||
![]() |
3597abbcd7 | ||
![]() |
0f780b6271 | ||
![]() |
f1ce4e1f5b | ||
![]() |
5d0d800c0a | ||
![]() |
3b330ef22f | ||
![]() |
873bb4fd2d | ||
![]() |
3973df64ba | ||
![]() |
8cda62ae92 | ||
![]() |
b8e1a49f85 | ||
![]() |
f9147b5405 | ||
![]() |
878f727a2c | ||
![]() |
0169ee4885 | ||
![]() |
904faf27c2 | ||
![]() |
d2a38fe05c | ||
![]() |
9a5d06239f | ||
![]() |
a1fad471a9 | ||
![]() |
57a97365b6 | ||
![]() |
cce36906b1 | ||
![]() |
da99ba114b | ||
![]() |
9a66bd3c34 | ||
![]() |
0d23b33c0e | ||
![]() |
e721092c2a | ||
![]() |
8c7afc5646 | ||
![]() |
7752d83781 | ||
![]() |
4b45e94beb | ||
![]() |
2a4ec13c8e | ||
![]() |
20671c718e | ||
![]() |
e73db49ed0 | ||
![]() |
0553824df2 | ||
![]() |
71bc2c5944 | ||
![]() |
05feadbb7a | ||
![]() |
a4709b1175 | ||
![]() |
3a031084f3 | ||
![]() |
0c517e5351 | ||
![]() |
5fe435048b | ||
![]() |
a722bfd099 | ||
![]() |
f3d99a5fdb | ||
![]() |
79de0989d5 | ||
![]() |
ca334770b7 | ||
![]() |
1071357505 | ||
![]() |
f32dfe0278 | ||
![]() |
278cedf3d0 | ||
![]() |
45a6b5a436 | ||
![]() |
611707a3d1 | ||
![]() |
b4d20d9b9a | ||
![]() |
ecc4553e67 | ||
![]() |
ef790ca6f4 | ||
![]() |
2d88638da7 | ||
![]() |
91ba0bd0af | ||
![]() |
0e2e5f3413 | ||
![]() |
7a99dcf693 | ||
![]() |
4e78ca5d82 | ||
![]() |
83de38e56f | ||
![]() |
f4be2e4fe7 | ||
![]() |
16b0f7f9ee | ||
![]() |
27721aef71 | ||
![]() |
329a317fdf | ||
![]() |
daad634894 | ||
![]() |
4444925dea | ||
![]() |
9c1ae96d33 | ||
![]() |
b1b6d50af6 | ||
![]() |
4c697ab50e | ||
![]() |
7450088674 | ||
![]() |
b141671d90 | ||
![]() |
2ab2d9127d | ||
![]() |
278453451e | ||
![]() |
91ee7972d2 | ||
![]() |
d1f59a6590 | ||
![]() |
cdecf8904e | ||
![]() |
3d16266c69 | ||
![]() |
191676b011 | ||
![]() |
ea07b261ad | ||
![]() |
e86f737320 | ||
![]() |
9a8562c624 | ||
![]() |
145c41f462 | ||
![]() |
1d38367e79 | ||
![]() |
f58c2d0a7b | ||
![]() |
ca7a6fe1f1 | ||
![]() |
95042f73c7 | ||
![]() |
678bcb171a | ||
![]() |
8da7e505c0 | ||
![]() |
72ce4405d5 | ||
![]() |
d8e3d91a79 | ||
![]() |
edaaedae36 | ||
![]() |
da8246d8c3 | ||
![]() |
5243ae80b4 | ||
![]() |
3aca576a0d | ||
![]() |
0bb9d91eae | ||
![]() |
8825d6b15f | ||
![]() |
1f73ca21bf | ||
![]() |
2db0854eef | ||
![]() |
f66e589312 | ||
![]() |
5c9ad3068b | ||
![]() |
d07b786da6 | ||
![]() |
da5d32ed89 | ||
![]() |
55dadea98e | ||
![]() |
77fbbe95ff | ||
![]() |
1aeb95396b | ||
![]() |
48dfbbebc6 | ||
![]() |
ccf3a9f3b2 | ||
![]() |
c0cb97bd42 | ||
![]() |
8efb97ef4e | ||
![]() |
d8cda7fc1b | ||
![]() |
ee9f1e7b70 | ||
![]() |
92dd70098c | ||
![]() |
4bea4c69a4 | ||
![]() |
7734325b71 | ||
![]() |
186ae844bc | ||
![]() |
c9bdf1c184 | ||
![]() |
13ffe468df | ||
![]() |
a090cf7a10 | ||
![]() |
b7250477b5 | ||
![]() |
dfd16c5187 | ||
![]() |
4afd6b78af | ||
![]() |
398f6e5b0c | ||
![]() |
d7f7d839f8 | ||
![]() |
49a843dcdd | ||
![]() |
ec045e81f2 | ||
![]() |
d8a7828cb5 | ||
![]() |
e32cb12ad7 | ||
![]() |
ee2847cfea | ||
![]() |
22e00a7080 | ||
![]() |
cbe567069f | ||
![]() |
53baed0389 | ||
![]() |
39cb9589c8 | ||
![]() |
bde03f3574 | ||
![]() |
8f31d150fd | ||
![]() |
453dbbb031 | ||
![]() |
421754fff6 | ||
![]() |
05ec5feacf | ||
![]() |
639d9b27c8 | ||
![]() |
2c99d027f3 | ||
![]() |
4f176682dc | ||
![]() |
13ef41bd42 | ||
![]() |
3c6ba80323 | ||
![]() |
fcfa8dfac2 | ||
![]() |
5b73e9aee6 | ||
![]() |
9ead264300 | ||
![]() |
a617eda321 | ||
![]() |
c913fa65b2 | ||
![]() |
497c8c84e5 | ||
![]() |
c58a94d497 | ||
![]() |
6a853d1fa2 | ||
![]() |
c30c58e564 | ||
![]() |
3b7a4c6b6b | ||
![]() |
978cdf2514 | ||
![]() |
33609616aa | ||
![]() |
006f6c998d | ||
![]() |
05fd69eae4 | ||
![]() |
b6b8719efa | ||
![]() |
7e8b9549a1 | ||
![]() |
032f78e0f5 | ||
![]() |
4059bc9ec6 | ||
![]() |
b85cd0925a | ||
![]() |
f20254217f | ||
![]() |
9424b763ca | ||
![]() |
08ae3f8771 | ||
![]() |
68f0cf419b | ||
![]() |
26b12512b1 | ||
![]() |
b98afadd5c | ||
![]() |
499bd552a1 | ||
![]() |
0090e27699 | ||
![]() |
e568b3000e | ||
![]() |
7ae8b46ea7 | ||
![]() |
ffb903841b | ||
![]() |
72ee904e67 | ||
![]() |
222e1968d8 | ||
![]() |
1df517afd3 | ||
![]() |
cc4cea1a41 | ||
![]() |
e8868d7ebf | ||
![]() |
7d9a9033f9 | ||
![]() |
87322d7732 | ||
![]() |
08c3d6e84b | ||
![]() |
b50325c3a3 | ||
![]() |
12cdcf7681 | ||
![]() |
34192349be | ||
![]() |
153d0bb12a | ||
![]() |
20092dadad | ||
![]() |
6844f8f2bf | ||
![]() |
58f2c6a5fc | ||
![]() |
5f10d86f04 | ||
![]() |
1120e823ed | ||
![]() |
301b384a02 | ||
![]() |
e4a26164de | ||
![]() |
56d3e8893f | ||
![]() |
99336908f0 | ||
![]() |
090325af35 | ||
![]() |
485be6c3fd | ||
![]() |
faa9d36c34 | ||
![]() |
ea8e108cdf | ||
![]() |
100f5422f6 | ||
![]() |
15c716e53b | ||
![]() |
75e77c5e54 | ||
![]() |
de4fdc07e0 | ||
![]() |
51edb2fa14 | ||
![]() |
bd995089a8 | ||
![]() |
a90dd2ad1e | ||
![]() |
cd44151e16 | ||
![]() |
5715ba1a9a | ||
![]() |
6f4a1c1751 | ||
![]() |
2a1b1eb1a4 | ||
![]() |
20c597b1d7 | ||
![]() |
9dc1989507 | ||
![]() |
681cb1b978 | ||
![]() |
332a9fac5a | ||
![]() |
d118f4a3f0 | ||
![]() |
7620cd02f0 | ||
![]() |
fdfd7bd82a | ||
![]() |
01c17e10cc | ||
![]() |
bed03b301b | ||
![]() |
ef48762da5 | ||
![]() |
7978f3f0e6 | ||
![]() |
8d5fad72bf | ||
![]() |
04db521851 | ||
![]() |
26d27be161 | ||
![]() |
1259d06302 | ||
![]() |
4db3f366ef | ||
![]() |
1c52c5b673 | ||
![]() |
a6a885d4f4 | ||
![]() |
b11066cef1 | ||
![]() |
1941b0c3ed | ||
![]() |
9a47fc747f | ||
![]() |
a425d3a55b | ||
![]() |
f03a0f6e73 | ||
![]() |
74d5724092 | ||
![]() |
3df21fcaa3 | ||
![]() |
fbb5de6740 | ||
![]() |
f7f9096c6e | ||
![]() |
d5a8e1725d | ||
![]() |
34413747d5 | ||
![]() |
18de626919 | ||
![]() |
bef192084f | ||
![]() |
54eef16bfb | ||
![]() |
7be49dba69 | ||
![]() |
218a6af62a | ||
![]() |
f0315d5c70 | ||
![]() |
f82e04201b | ||
![]() |
f41231f017 | ||
![]() |
ae1fb76d13 | ||
![]() |
fa0023223f | ||
![]() |
1c80fd17fd | ||
![]() |
48cb347198 | ||
![]() |
ffd583ed11 | ||
![]() |
97bbca3aef | ||
![]() |
06fd92fd27 | ||
![]() |
c0e05a7572 | ||
![]() |
2abe6eec84 | ||
![]() |
0b6e73840a | ||
![]() |
1707fe8990 | ||
![]() |
9335b0779c | ||
![]() |
277b521fad | ||
![]() |
af1b634d6d | ||
![]() |
cc19008961 | ||
![]() |
be304e37b4 | ||
![]() |
0a34a4a7ad | ||
![]() |
898564c8d8 | ||
![]() |
708638b97f | ||
![]() |
458e857956 | ||
![]() |
ac62bcb7ba | ||
![]() |
6b1c50b051 | ||
![]() |
d4a5376f73 | ||
![]() |
71b34aa3bd | ||
![]() |
6b4d8b18e0 | ||
![]() |
66b2013d23 | ||
![]() |
dc86993c84 | ||
![]() |
00a5c13001 | ||
![]() |
0e34923114 | ||
![]() |
011164bc32 | ||
![]() |
0a43ce9ced | ||
![]() |
d925fb38ce | ||
![]() |
3dc617277f | ||
![]() |
096af09fc4 | ||
![]() |
f97f9b857b | ||
![]() |
5c0829b052 | ||
![]() |
aa999b34e2 | ||
![]() |
0a06c291e2 | ||
![]() |
4bbaf5f89c | ||
![]() |
f88e070455 | ||
![]() |
5c980c31be | ||
![]() |
9eee37bc68 | ||
![]() |
a4927477fb | ||
![]() |
d0a6c6a2f3 | ||
![]() |
92757c5d8c | ||
![]() |
0cc7765f2b | ||
![]() |
ee1ef4ff56 | ||
![]() |
b5eed5e043 | ||
![]() |
62a253f571 | ||
![]() |
080a23dd8c | ||
![]() |
c90129957e | ||
![]() |
0fa717fe11 | ||
![]() |
e72766a5bf | ||
![]() |
104a684514 | ||
![]() |
5a809d7e31 | ||
![]() |
ce3f6837e9 | ||
![]() |
3ffd2a745b | ||
![]() |
6b9c07b809 | ||
![]() |
d4e2722586 | ||
![]() |
1343767295 | ||
![]() |
4b4bfc052f | ||
![]() |
f7539eb931 | ||
![]() |
efcfecca10 | ||
![]() |
6a3735822d | ||
![]() |
5bbcc7f2f7 | ||
![]() |
400f1d37bf | ||
![]() |
985b774378 | ||
![]() |
14cbcb4af6 | ||
![]() |
18ce86407d | ||
![]() |
d0ee203265 | ||
![]() |
fc26fe0ac0 | ||
![]() |
0e0cbe3517 | ||
![]() |
8e2cb6d416 | ||
![]() |
73a6e68e03 | ||
![]() |
48f9cb09af | ||
![]() |
1c83f489d1 | ||
![]() |
f6d78a0044 | ||
![]() |
e60a7df9a2 | ||
![]() |
feaf2da834 | ||
![]() |
bf8703deae | ||
![]() |
1997b7b2d9 | ||
![]() |
666b938550 | ||
![]() |
4e8f546502 | ||
![]() |
5e9f3586cd | ||
![]() |
69ef26dab0 | ||
![]() |
163231d307 | ||
![]() |
e530750fc6 | ||
![]() |
c3997c9f26 | ||
![]() |
6d7defa79e | ||
![]() |
bc232582df | ||
![]() |
286affea38 | ||
![]() |
5f5c9e2eb3 | ||
![]() |
de5eaf1c2c | ||
![]() |
6f3755684e | ||
![]() |
6950daca9a | ||
![]() |
b4de83e348 | ||
![]() |
83a1a32a5e | ||
![]() |
3cea4804f8 | ||
![]() |
8ce32003d7 | ||
![]() |
d3191490d9 | ||
![]() |
f6d5ba56b1 | ||
![]() |
998ca64c1e | ||
![]() |
eaa33744a6 | ||
![]() |
c0a47ca999 | ||
![]() |
22bfab840d | ||
![]() |
88b7f8ac1e | ||
![]() |
6fbe4404f5 | ||
![]() |
d5e340d0f6 | ||
![]() |
94954eeba3 | ||
![]() |
b2911b2eba | ||
![]() |
c398f22e76 | ||
![]() |
063f6c1d5a | ||
![]() |
2ca691d3b8 | ||
![]() |
f2086b3a90 | ||
![]() |
bb15b744c8 | ||
![]() |
f1e99de59a | ||
![]() |
e0999c7ba4 | ||
![]() |
89c5aac9ed | ||
![]() |
4a7c9a6050 | ||
![]() |
e94ce3102e | ||
![]() |
0225faddbb | ||
![]() |
fb10d3a5be | ||
![]() |
0613e3ab12 | ||
![]() |
b21edde1bc | ||
![]() |
1953a8ecb7 | ||
![]() |
c84b592543 | ||
![]() |
53f905b88b | ||
![]() |
d057a5076c | ||
![]() |
bcb9c6ccb0 | ||
![]() |
96f86adfb8 | ||
![]() |
de89f75707 | ||
![]() |
b2307d911e | ||
![]() |
35a558ec01 | ||
![]() |
2e97c0a5fb | ||
![]() |
7d9575b7fd | ||
![]() |
321e0ced2a | ||
![]() |
a697eb8530 | ||
![]() |
4b37c1963b | ||
![]() |
8f2687e390 | ||
![]() |
c0f799a807 | ||
![]() |
abc5bd98b4 | ||
![]() |
a51893c849 | ||
![]() |
79e218d00a | ||
![]() |
9ae20a6bec | ||
![]() |
2f739ff0b3 | ||
![]() |
e0a8f4df0d | ||
![]() |
347d7c07ef | ||
![]() |
42e3cf821b | ||
![]() |
574f1be067 | ||
![]() |
cbde0e0286 | ||
![]() |
11012c6be1 | ||
![]() |
432bd83188 | ||
![]() |
f8adfa9873 | ||
![]() |
1efd226f75 | ||
![]() |
ba1bb95935 | ||
![]() |
f5e740f2ec | ||
![]() |
98be7b227f | ||
![]() |
f07cfd4f51 | ||
![]() |
029d81122b | ||
![]() |
26eb7ecdb5 | ||
![]() |
3cf3ea4e20 | ||
![]() |
84a2937d31 | ||
![]() |
1d1f3bde96 | ||
![]() |
329d81ec64 | ||
![]() |
c725f50f07 | ||
![]() |
463560c929 | ||
![]() |
c7412deb77 | ||
![]() |
9ed6c78466 | ||
![]() |
9a1bd9637c | ||
![]() |
87fd18bee1 | ||
![]() |
f3dced3199 | ||
![]() |
b36989e8e4 | ||
![]() |
95c45b90a8 | ||
![]() |
0bdc132dcf | ||
![]() |
e450391d9b | ||
![]() |
38f4bf4e28 | ||
![]() |
109b0ea78b | ||
![]() |
5061feb7bc | ||
![]() |
52480e0bc4 | ||
![]() |
109dd45b56 | ||
![]() |
c8da513d97 | ||
![]() |
06e7e40b15 | ||
![]() |
f3d6fcb52b | ||
![]() |
2decae6586 | ||
![]() |
6766041328 | ||
![]() |
2d96cec464 | ||
![]() |
843d229c3d | ||
![]() |
3d5bcd9d75 | ||
![]() |
b391dd6a3f | ||
![]() |
c126cf2bc1 | ||
![]() |
fcb5beb617 | ||
![]() |
dad141726c | ||
![]() |
48637188a7 | ||
![]() |
5d063e8449 | ||
![]() |
2b8a03e93c | ||
![]() |
213665cea2 | ||
![]() |
d230431227 | ||
![]() |
2040c9fe41 | ||
![]() |
b49821a9f4 | ||
![]() |
1701b0afed | ||
![]() |
7785431152 | ||
![]() |
b325233b2d | ||
![]() |
3baf29a6b7 | ||
![]() |
2510216f21 | ||
![]() |
e4d7ae9718 | ||
![]() |
596fabda5d | ||
![]() |
e95f34ae13 | ||
![]() |
169cb9424f | ||
![]() |
536576518e | ||
![]() |
b034e972b0 | ||
![]() |
1bf36b6461 | ||
![]() |
58ab269d3d | ||
![]() |
7cbb73be7a | ||
![]() |
cdbf81c51c | ||
![]() |
cc54368658 | ||
![]() |
d81e4dbe99 | ||
![]() |
4fd075aafa | ||
![]() |
a789649d97 | ||
![]() |
1817d014ef | ||
![]() |
8f1f0f5475 | ||
![]() |
b406a2430d | ||
![]() |
431edb0e67 | ||
![]() |
5b96944940 | ||
![]() |
8a6aaf4e2d | ||
![]() |
b30c4275ef | ||
![]() |
7f7ec625c8 | ||
![]() |
010f1f2bd1 | ||
![]() |
ce32089cc4 | ||
![]() |
f0ee2e14fb | ||
![]() |
9b8a490061 | ||
![]() |
f4e8ab27fa | ||
![]() |
4da49c8d59 | ||
![]() |
3960093231 | ||
![]() |
3003bdd507 | ||
![]() |
7909b30b4b | ||
![]() |
915382f2c7 | ||
![]() |
905a34188d | ||
![]() |
c907d690b7 | ||
![]() |
261cab8450 | ||
![]() |
7c2137fcda | ||
![]() |
2c3cb7f516 | ||
![]() |
dd4d903f69 | ||
![]() |
a823b8f70c | ||
![]() |
3d7aa7a4b9 | ||
![]() |
3e8bff03e7 | ||
![]() |
584082a1df | ||
![]() |
49644ce18a | ||
![]() |
7432ef9e19 | ||
![]() |
ca40e39da4 | ||
![]() |
7e271129c7 | ||
![]() |
b5f95a351c | ||
![]() |
5e10befe28 | ||
![]() |
98ebb095cc | ||
![]() |
c3b5b47b22 | ||
![]() |
3c133c36bd | ||
![]() |
9ceb2e6084 | ||
![]() |
2b322b638e | ||
![]() |
0d298d743a | ||
![]() |
ec5d80585d | ||
![]() |
88e71e12b1 | ||
![]() |
63bb72caab | ||
![]() |
240c7913e9 | ||
![]() |
582629a996 | ||
![]() |
939dd17910 | ||
![]() |
d58b1a7de7 | ||
![]() |
a938895e5e | ||
![]() |
9d9111e4d8 | ||
![]() |
79dbfce36f | ||
![]() |
e7f162e5e5 | ||
![]() |
f23d10f000 | ||
![]() |
67dd86988b | ||
![]() |
e16ab324f4 | ||
![]() |
317926f379 | ||
![]() |
fe61ca2c97 | ||
![]() |
2940686fba | ||
![]() |
2ef2a561ef | ||
![]() |
7ae31def4f | ||
![]() |
8f038d7d26 | ||
![]() |
b9a2652013 | ||
![]() |
dce4166bc8 | ||
![]() |
3c5f509bc7 | ||
![]() |
182cea3385 | ||
![]() |
0ba1ba55bd | ||
![]() |
45440eec1d | ||
![]() |
49fad14920 | ||
![]() |
0f1e31643d | ||
![]() |
c17c291b1c | ||
![]() |
666641f990 | ||
![]() |
268f99f8ac | ||
![]() |
216f048466 | ||
![]() |
ed8d5f1a80 | ||
![]() |
27a1ef25a5 | ||
![]() |
ecbc8165d5 | ||
![]() |
128594584e | ||
![]() |
8b798013c1 | ||
![]() |
8c19c2c2e9 | ||
![]() |
6f0fee4c43 | ||
![]() |
a5b4a7caad | ||
![]() |
c1b9db19c6 | ||
![]() |
40f88faf37 | ||
![]() |
81e6228ab3 | ||
![]() |
157951343b | ||
![]() |
9325eff6fc | ||
![]() |
8c8f366e0f | ||
![]() |
542221a38d | ||
![]() |
aa7faaaa72 | ||
![]() |
c96db31006 | ||
![]() |
c66de5931c | ||
![]() |
fdec13ef81 | ||
![]() |
9aee44f363 | ||
![]() |
2caa2d5b32 | ||
![]() |
66c7f44bea | ||
![]() |
99e4a79cb8 | ||
![]() |
f7f0df60ec | ||
![]() |
e012262301 | ||
![]() |
5676155e4e | ||
![]() |
d98bfa5bed | ||
![]() |
7ccac8053b | ||
![]() |
40cf46fe7d | ||
![]() |
f5c05e1283 | ||
![]() |
330e47f0b7 | ||
![]() |
1e9378b429 | ||
![]() |
af58fb5fa3 | ||
![]() |
3d6fb2383a | ||
![]() |
2407798d2e | ||
![]() |
f9194bd28c | ||
![]() |
816f020cc3 | ||
![]() |
9191aa32df | ||
![]() |
0a6395507e | ||
![]() |
eff68c601c | ||
![]() |
3f5540f35b | ||
![]() |
e5f5030e9c | ||
![]() |
df39d37ca9 | ||
![]() |
bea81d0449 | ||
![]() |
d261845fc6 | ||
![]() |
eac0b295d2 | ||
![]() |
f4eefcea13 | ||
![]() |
268715711f | ||
![]() |
c5f70b4401 | ||
![]() |
912caf84a6 | ||
![]() |
448fa4d35f | ||
![]() |
cd8d4c357d | ||
![]() |
bdcc6f861d | ||
![]() |
d9d6b7b151 | ||
![]() |
4517692c20 | ||
![]() |
609b9e3369 | ||
![]() |
be8ca110f4 | ||
![]() |
f1da37dd12 | ||
![]() |
ecca51dbdd | ||
![]() |
d19015579c | ||
![]() |
6179ca5668 | ||
![]() |
6c70db31bd | ||
![]() |
b0790d7010 | ||
![]() |
eb8158673f | ||
![]() |
409f17980f | ||
![]() |
654ef06682 | ||
![]() |
bca95a4972 | ||
![]() |
4df065d8d5 | ||
![]() |
a358477cda | ||
![]() |
44d6db8c47 | ||
![]() |
49e627d2fd | ||
![]() |
c3a8d93eb4 | ||
![]() |
7a648c6465 | ||
![]() |
5c1c5b5b0d | ||
![]() |
811da4bac5 | ||
![]() |
23b2fbef45 | ||
![]() |
f1b52b495a | ||
![]() |
0bff3891bd | ||
![]() |
a7b1658ee1 | ||
![]() |
c274bbcddf | ||
![]() |
57f32e5360 | ||
![]() |
b4ecf8e28e | ||
![]() |
06cfba8c7e | ||
![]() |
5603834282 | ||
![]() |
33134d4529 | ||
![]() |
bf57b6e4a2 | ||
![]() |
b63f87a5b5 | ||
![]() |
68e3612d36 | ||
![]() |
616a826b8a | ||
![]() |
09d62d76b2 | ||
![]() |
deab7794d6 | ||
![]() |
bc892059a1 | ||
![]() |
6d0fdc7510 | ||
![]() |
dd1d4b86d2 | ||
![]() |
65eeea9453 | ||
![]() |
b2b202586c | ||
![]() |
49341260dc | ||
![]() |
dfcef81001 | ||
![]() |
a834a6c874 | ||
![]() |
ad5188a280 | ||
![]() |
53c2a0c724 | ||
![]() |
3a8cc31f1b | ||
![]() |
0f6b452d17 | ||
![]() |
e994bcf737 | ||
![]() |
5432700b0d | ||
![]() |
7b7534c952 | ||
![]() |
b4570545ef | ||
![]() |
6478db13e6 | ||
![]() |
1d7ddcc10d | ||
![]() |
30508c6c2c | ||
![]() |
18f43c5757 | ||
![]() |
52498efd14 | ||
![]() |
40cb721d16 | ||
![]() |
1c2699b16e | ||
![]() |
d98a016087 | ||
![]() |
bc5b6db031 | ||
![]() |
4a4f252ad8 | ||
![]() |
c81dd1a478 | ||
![]() |
31016156be | ||
![]() |
dad352f05e | ||
![]() |
95e94618d8 | ||
![]() |
045a401cd7 | ||
![]() |
d5d7e2edbc | ||
![]() |
64e1a6ec7e | ||
![]() |
2221b425ad | ||
![]() |
86b2ccae94 | ||
![]() |
0bd67a54ab | ||
![]() |
306f254218 | ||
![]() |
449add88a6 | ||
![]() |
941ba8d689 | ||
![]() |
813335f8eb | ||
![]() |
f0cef2f42f | ||
![]() |
e767eb38f4 | ||
![]() |
71cbef4c13 | ||
![]() |
834ad1ef84 | ||
![]() |
753e6661bc | ||
![]() |
1ce19f5444 | ||
![]() |
0de1230a1a | ||
![]() |
5ff304324d | ||
![]() |
37f7ef41f2 | ||
![]() |
4022284059 | ||
![]() |
dde6a2eb7d | ||
![]() |
1083ed4e40 | ||
![]() |
c111825b1e | ||
![]() |
e36c22d29a | ||
![]() |
c192931015 | ||
![]() |
301ad7e07d | ||
![]() |
cc93616019 | ||
![]() |
88066563d3 | ||
![]() |
9e3590352c | ||
![]() |
86ad52639f | ||
![]() |
c00be946a5 | ||
![]() |
322aeeb552 | ||
![]() |
501d4cafa9 | ||
![]() |
e556fb3e3a | ||
![]() |
bb930297d6 | ||
![]() |
83f10167e5 | ||
![]() |
d47bb21389 | ||
![]() |
4dedff00b8 | ||
![]() |
2414dad656 | ||
![]() |
8f98cb4860 | ||
![]() |
6e96b7e00a | ||
![]() |
00287b27ab | ||
![]() |
8f18b7fd6c | ||
![]() |
dde7771dc6 | ||
![]() |
4165184e42 | ||
![]() |
e4953a756a | ||
![]() |
471ac63a3a | ||
![]() |
f358eda5c5 | ||
![]() |
035130ecdc | ||
![]() |
ca0e86757b | ||
![]() |
d4153607c9 | ||
![]() |
99d0a0845d | ||
![]() |
5fae5a9ee0 | ||
![]() |
b5a75be1db | ||
![]() |
eb5e0e0b9b | ||
![]() |
dc90f58391 | ||
![]() |
ca43c71cf5 | ||
![]() |
931a311c48 | ||
![]() |
9f9d7da1ce | ||
![]() |
d00e8d3b0f | ||
![]() |
d4124bae0c | ||
![]() |
58eb2d6f63 | ||
![]() |
01987f1b51 | ||
![]() |
0a35358e8d | ||
![]() |
5bacb85c33 | ||
![]() |
6933ac523f | ||
![]() |
ba9120b417 | ||
![]() |
3e71f5810f | ||
![]() |
296a0edae4 | ||
![]() |
cdf5602dfb | ||
![]() |
e214f719c9 | ||
![]() |
08fbcf5158 | ||
![]() |
10ca515ac5 | ||
![]() |
e59a14852b | ||
![]() |
c696b4f2f2 | ||
![]() |
553153ba92 | ||
![]() |
9d2bcf807e | ||
![]() |
422ac9befe | ||
![]() |
793f641af6 | ||
![]() |
0ea5f5d584 | ||
![]() |
c024b846c3 | ||
![]() |
37b3fde4e1 | ||
![]() |
e89ef5de25 | ||
![]() |
06cac44d02 | ||
![]() |
488fe28ad3 | ||
![]() |
50f474ae92 | ||
![]() |
78ca2ffaba | ||
![]() |
911f5bc78e | ||
![]() |
b227427916 | ||
![]() |
b5f77fd6e7 | ||
![]() |
4fe966f534 | ||
![]() |
bcce0838dd | ||
![]() |
76e43bcb89 | ||
![]() |
c666be32f4 | ||
![]() |
2d850795d8 | ||
![]() |
784982718e | ||
![]() |
2f8d263c9c | ||
![]() |
b214163af3 | ||
![]() |
a4fe1000c2 | ||
![]() |
9a275fa4ed | ||
![]() |
f2c83f51de | ||
![]() |
fb76b72787 | ||
![]() |
bec6c4511c | ||
![]() |
77b9988d05 | ||
![]() |
3e49f93816 | ||
![]() |
32f6932faf | ||
![]() |
db76e1d65f | ||
![]() |
0136ba504b | ||
![]() |
459e026f16 | ||
![]() |
b03a723c3e | ||
![]() |
7562636151 | ||
![]() |
3acc65ca0d | ||
![]() |
fde0f4ca0a | ||
![]() |
73cab2af2d | ||
![]() |
94d2198b30 | ||
![]() |
88a67c8703 | ||
![]() |
ccf9b1291e | ||
![]() |
47dae716ae | ||
![]() |
ea26e1c72f | ||
![]() |
e6d79f0673 | ||
![]() |
bf7002d0ae | ||
![]() |
cbae145da5 | ||
![]() |
88bbfe5961 | ||
![]() |
d60569b6d6 | ||
![]() |
91165e80ba | ||
![]() |
a15f9552eb | ||
![]() |
501d225f93 | ||
![]() |
4942f244da | ||
![]() |
1be5c9af56 | ||
![]() |
818d383f2e | ||
![]() |
5fffa32630 | ||
![]() |
19d5feb483 | ||
![]() |
199fc6be94 | ||
![]() |
865729c033 | ||
![]() |
6aa9071e24 | ||
![]() |
6db3fc2eea | ||
![]() |
65ffaaa67c | ||
![]() |
440467e304 | ||
![]() |
74422dd000 | ||
![]() |
78d663bbb4 | ||
![]() |
db0a58ea04 | ||
![]() |
eba1e69e64 | ||
![]() |
a4e7877033 | ||
![]() |
c62260ab02 | ||
![]() |
b58550bb79 | ||
![]() |
d02c7df75c | ||
![]() |
6dbebf4806 | ||
![]() |
1019660f6a | ||
![]() |
bfd11060ec | ||
![]() |
7b6dccf5ef | ||
![]() |
d76eccad1c | ||
![]() |
6f0ac7ae45 | ||
![]() |
8f17ed1eb9 | ||
![]() |
9737e4a24d | ||
![]() |
805c17565b | ||
![]() |
8e9d1cdd18 | ||
![]() |
1d3300fb34 | ||
![]() |
26d93cf3be | ||
![]() |
4e3183ee65 | ||
![]() |
8370ec58c1 | ||
![]() |
0d1bcd3c13 | ||
![]() |
a456b968af | ||
![]() |
2b5562e376 | ||
![]() |
3a242dc296 | ||
![]() |
5aff9b6fdb | ||
![]() |
35b3216fee | ||
![]() |
fac6fe0c2e | ||
![]() |
6f8020e30d | ||
![]() |
efbc2a5715 | ||
![]() |
0bee3901b6 | ||
![]() |
7106c68032 | ||
![]() |
be707536bd | ||
![]() |
d18eebf97d | ||
![]() |
60cc7afc72 | ||
![]() |
b7949d2e69 | ||
![]() |
48175d5b8e | ||
![]() |
cb8fd6597d | ||
![]() |
4754ac2bd1 | ||
![]() |
3cca77e748 | ||
![]() |
4ec1aaabe6 | ||
![]() |
0baacbef98 | ||
![]() |
70bc70ec97 | ||
![]() |
be2b59431f | ||
![]() |
c43193b17d | ||
![]() |
2147af0e6a | ||
![]() |
d8261b3359 | ||
![]() |
2666e70706 | ||
![]() |
19cf66be0f | ||
![]() |
1ecb26a3fb | ||
![]() |
8b26ddcd2c | ||
![]() |
47786dcb8c | ||
![]() |
5d91d6a885 | ||
![]() |
559f7c2683 | ||
![]() |
3b4da70c85 | ||
![]() |
95199bd325 | ||
![]() |
a8887b211e | ||
![]() |
9a758fc3dc | ||
![]() |
edc9c3f01c | ||
![]() |
83d769251d | ||
![]() |
bd66333147 | ||
![]() |
2228678520 | ||
![]() |
9a042118f9 | ||
![]() |
787ee454ff | ||
![]() |
32a4587bd3 | ||
![]() |
474050ba6b | ||
![]() |
34657fd675 | ||
![]() |
18747db17f | ||
![]() |
dccea9434a | ||
![]() |
85bf92ad2f | ||
![]() |
40b07572a9 | ||
![]() |
9147e3a0bd | ||
![]() |
ed25212654 | ||
![]() |
ce5fe61e67 | ||
![]() |
9e9266b92a | ||
![]() |
e329d72e8b | ||
![]() |
92ec3fc060 | ||
![]() |
3afb3a905c | ||
![]() |
b3e6f04b30 | ||
![]() |
0a29f51862 | ||
![]() |
7149d407cd | ||
![]() |
fbb17df916 | ||
![]() |
34b317da7a | ||
![]() |
c161829803 | ||
![]() |
7c2ae129d7 | ||
![]() |
c49afa7caa | ||
![]() |
534c157809 | ||
![]() |
cae4d3fae3 | ||
![]() |
f59500c809 | ||
![]() |
b75bf255ba | ||
![]() |
1588242876 | ||
![]() |
00eff651e6 | ||
![]() |
a83d6a691a | ||
![]() |
5065f2cb80 | ||
![]() |
a4b36b041a | ||
![]() |
c360d9fa18 | ||
![]() |
78258eb9cb | ||
![]() |
71e5b5cf72 | ||
![]() |
4b0229584e | ||
![]() |
b2df8297e1 | ||
![]() |
95048b14fd | ||
![]() |
3360791a31 | ||
![]() |
4c65ecbe89 | ||
![]() |
65a56a1da0 | ||
![]() |
645f9b2ee2 | ||
![]() |
117157f02c | ||
![]() |
ccee85a05e | ||
![]() |
a27fb173dd | ||
![]() |
e4885badfc | ||
![]() |
62c488aff6 | ||
![]() |
afcb5fe3cf | ||
![]() |
d38bed1334 | ||
![]() |
f6a9d5b038 | ||
![]() |
863258f23d | ||
![]() |
3b76fa3f92 | ||
![]() |
023a42fa07 | ||
![]() |
537515432c | ||
![]() |
f93783052b | ||
![]() |
991bc1a1ce | ||
![]() |
1b5c557c44 | ||
![]() |
5794faef6c | ||
![]() |
ec01c436ea | ||
![]() |
9bb5568d8e | ||
![]() |
1cd5de697e | ||
![]() |
ce8c812669 | ||
![]() |
2c3ac053d8 | ||
![]() |
ca3bb6a540 | ||
![]() |
0932f095e1 | ||
![]() |
1c4e74920a | ||
![]() |
e6efff426a | ||
![]() |
cacf60fdd9 | ||
![]() |
352f94ff2b | ||
![]() |
47530d274f | ||
![]() |
be13ec822f | ||
![]() |
c6da0cc9a2 | ||
![]() |
07623f9883 | ||
![]() |
6e0d334a0c | ||
![]() |
cffdaefe2f | ||
![]() |
9de4ca61e8 | ||
![]() |
d7919c45a3 | ||
![]() |
3dfadcc397 | ||
![]() |
98d677dc0b | ||
![]() |
682f3fdc3e | ||
![]() |
ea7a1012b9 | ||
![]() |
f24373699e | ||
![]() |
f74e15840d | ||
![]() |
c2c8a27545 | ||
![]() |
6f2fd1e2da | ||
![]() |
597effc856 | ||
![]() |
0388ce3e5b | ||
![]() |
1ea6a14437 | ||
![]() |
e8a073d538 | ||
![]() |
c8ac686b22 | ||
![]() |
81cfd13b4a | ||
![]() |
73a3abe535 | ||
![]() |
9006dd12f3 | ||
![]() |
2bcbb89175 | ||
![]() |
39daddef34 | ||
![]() |
ff4538612e | ||
![]() |
1782d00cc5 | ||
![]() |
d4f468208e | ||
![]() |
c60b708b9c | ||
![]() |
3dee012415 | ||
![]() |
a1d1fb962b | ||
![]() |
24e02a6c5f | ||
![]() |
4f352391ae | ||
![]() |
4667e0bf88 | ||
![]() |
9544e6c757 | ||
![]() |
83b5e1a49d | ||
![]() |
4f287b5ecd | ||
![]() |
4c346be367 | ||
![]() |
c87ac6f1ad | ||
![]() |
02dc395880 | ||
![]() |
63a3e0b325 | ||
![]() |
01ae5688d7 | ||
![]() |
ec8b10c85b | ||
![]() |
a440b712de | ||
![]() |
3653045922 | ||
![]() |
0d7438e398 | ||
![]() |
81b17bec69 | ||
![]() |
e43c446c38 | ||
![]() |
0cb442c6e0 | ||
![]() |
17ea079fcd | ||
![]() |
2270aefdee | ||
![]() |
30828bcbe0 | ||
![]() |
4667adc64d | ||
![]() |
de779d453c | ||
![]() |
fe959b30c6 | ||
![]() |
3fcbb17a15 | ||
![]() |
cdfde1d91f | ||
![]() |
64a5b24e12 | ||
![]() |
7d29bd216d | ||
![]() |
1abd7cc2a0 | ||
![]() |
78608d92b4 | ||
![]() |
54cbacf4f4 | ||
![]() |
b43aae84ab | ||
![]() |
9d9789953b | ||
![]() |
614eb930d3 | ||
![]() |
62094a2098 | ||
![]() |
84ed805fd7 | ||
![]() |
82a158e803 | ||
![]() |
1527ae1472 | ||
![]() |
5d1e86fdc3 | ||
![]() |
a47ec7c77d | ||
![]() |
ee1404d99e | ||
![]() |
2caba558d7 | ||
![]() |
5e4119f6a9 | ||
![]() |
6b46b43367 | ||
![]() |
460f10f46a | ||
![]() |
37c6201a5a | ||
![]() |
4184988c0c | ||
![]() |
3264015dac | ||
![]() |
d1c785d1d0 | ||
![]() |
abd63df75b | ||
![]() |
57f35292d5 | ||
![]() |
7bd6f4a4ea | ||
![]() |
35de8e6ad5 | ||
![]() |
1549edfd55 | ||
![]() |
f33cf6cc2e | ||
![]() |
3a8cffe3ce | ||
![]() |
58066443de | ||
![]() |
594445f6dd | ||
![]() |
c3e86f0f21 | ||
![]() |
d80102a7a4 | ||
![]() |
e886c38b45 | ||
![]() |
6fb856ee70 | ||
![]() |
3922588716 | ||
![]() |
940b12a908 | ||
![]() |
db5349881d | ||
![]() |
0edfe83a23 | ||
![]() |
7abc6440f6 | ||
![]() |
168ce2111d | ||
![]() |
617d0bfee7 | ||
![]() |
dc657f2eb0 | ||
![]() |
ef2b4a7536 | ||
![]() |
e41d75c374 | ||
![]() |
eb55f5655f | ||
![]() |
9f72bf7745 | ||
![]() |
f34202a82a | ||
![]() |
f8c8161a3e | ||
![]() |
63aafd3133 | ||
![]() |
e2303235cd | ||
![]() |
1771d18a21 | ||
![]() |
d02a0b2213 | ||
![]() |
6cbbd0c515 | ||
![]() |
f328d1461f | ||
![]() |
d3e9799279 | ||
![]() |
14eefe1f5d | ||
![]() |
6c2cc4cf50 | ||
![]() |
440f4729ac | ||
![]() |
8a5176e593 | ||
![]() |
1259911275 | ||
![]() |
4e4035a867 | ||
![]() |
9bd87e78dc | ||
![]() |
12a1c4ccbf | ||
![]() |
888e17cb91 | ||
![]() |
0fa48cea2a | ||
![]() |
9b7d393d5d | ||
![]() |
cc702cbdfa | ||
![]() |
e8ddb0c427 | ||
![]() |
feb677e656 | ||
![]() |
c19dd81ecf | ||
![]() |
01a50a3a98 | ||
![]() |
8920a32c75 | ||
![]() |
4773d0bb7f | ||
![]() |
20a6c5e7b7 | ||
![]() |
0826e0f96b | ||
![]() |
bd374f4c36 | ||
![]() |
bac8849f2d | ||
![]() |
ad87c3c87d | ||
![]() |
4c7eb34290 | ||
![]() |
a1fd9f7310 | ||
![]() |
8f1c4cd9c4 | ||
![]() |
85b210ebf6 | ||
![]() |
af7ffa3878 | ||
![]() |
96a84d16a6 | ||
![]() |
0737ea30e8 | ||
![]() |
1807811903 | ||
![]() |
a33dce1948 | ||
![]() |
ef4dc1b49e | ||
![]() |
0f886be109 | ||
![]() |
73a597855f | ||
![]() |
21ec2dfa68 | ||
![]() |
296d1d1b61 | ||
![]() |
af536dfefb | ||
![]() |
7d7c5207d7 | ||
![]() |
52f0a3dfb9 | ||
![]() |
081f11af40 | ||
![]() |
9f0f458a02 | ||
![]() |
e74c716588 | ||
![]() |
89bd0791dc | ||
![]() |
a591614a39 | ||
![]() |
0085c8351e | ||
![]() |
4df7f92a56 | ||
![]() |
1841cefbd8 | ||
![]() |
f13ae930a5 | ||
![]() |
dbe233f0ae | ||
![]() |
17e5f6d76b | ||
![]() |
f52d167da3 | ||
![]() |
63aa7128a7 | ||
![]() |
d30a652fc1 | ||
![]() |
1cd0684f62 | ||
![]() |
bc97a13a62 | ||
![]() |
db98eed392 | ||
![]() |
76115d6a81 | ||
![]() |
9e2fd52434 | ||
![]() |
7ca12c3dc4 | ||
![]() |
3dffa09977 | ||
![]() |
7c313eed33 | ||
![]() |
383cf7f4d5 | ||
![]() |
4babf0d102 | ||
![]() |
df4567f9e4 | ||
![]() |
f4d09d46f4 | ||
![]() |
9d3242c13e | ||
![]() |
08c6a21bbb | ||
![]() |
7d18be9928 | ||
![]() |
c52070d554 | ||
![]() |
23d6589a73 | ||
![]() |
3ae73e7df5 | ||
![]() |
8f3f60d249 | ||
![]() |
16664789d2 | ||
![]() |
f97aae1fd8 | ||
![]() |
7e1bbecc9f | ||
![]() |
376a56df26 | ||
![]() |
e75ea257e8 | ||
![]() |
7680bf7c0d |
9
.build-config.json
Normal file
9
.build-config.json
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"qpdf": {
|
||||||
|
"version": "10.6.3"
|
||||||
|
},
|
||||||
|
"jbig2enc": {
|
||||||
|
"version": "0.29",
|
||||||
|
"git_tag": "0.29"
|
||||||
|
}
|
||||||
|
}
|
@@ -17,3 +17,5 @@
|
|||||||
**/htmlcov
|
**/htmlcov
|
||||||
/src/.pytest_cache
|
/src/.pytest_cache
|
||||||
.idea
|
.idea
|
||||||
|
.venv/
|
||||||
|
.vscode/
|
||||||
|
@@ -18,14 +18,23 @@ max_line_length = off
|
|||||||
indent_size = 4
|
indent_size = 4
|
||||||
indent_style = space
|
indent_style = space
|
||||||
|
|
||||||
[*.yml]
|
[*.{yml,yaml}]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
|
|
||||||
[*.rst]
|
[*.rst]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
|
|
||||||
|
[*.md]
|
||||||
|
indent_style = space
|
||||||
|
|
||||||
|
[Pipfile.lock]
|
||||||
|
indent_style = space
|
||||||
|
|
||||||
# Tests don't get a line width restriction. It's still a good idea to follow
|
# Tests don't get a line width restriction. It's still a good idea to follow
|
||||||
# the 79 character rule, but in the interests of clarity, tests often need to
|
# the 79 character rule, but in the interests of clarity, tests often need to
|
||||||
# violate it.
|
# violate it.
|
||||||
[**/test_*.py]
|
[**/test_*.py]
|
||||||
max_line_length = off
|
max_line_length = off
|
||||||
|
|
||||||
|
[Dockerfile*]
|
||||||
|
indent_style = space
|
||||||
|
2
.env
2
.env
@@ -1,2 +1,2 @@
|
|||||||
COMPOSE_PROJECT_NAME=paperless
|
COMPOSE_PROJECT_NAME=paperless
|
||||||
export PROMPT="(pipenv-projectname)$P$G"
|
export PROMPT="(pipenv-projectname)$P$G"
|
||||||
|
53
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
53
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -7,34 +7,34 @@ body:
|
|||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
Have a question? 👉 [Start a new discussion](https://github.com/paperless-ngx/paperless-ngx/discussions/new) or [ask in chat](https://matrix.to/#/#paperless:adnidor.de).
|
Have a question? 👉 [Start a new discussion](https://github.com/paperless-ngx/paperless-ngx/discussions/new) or [ask in chat](https://matrix.to/#/#paperless:adnidor.de).
|
||||||
|
|
||||||
Before opening an issue, please check [the documentation](https://paperless-ngx.readthedocs.io/en/latest/troubleshooting.html) and see if it helps you resolve your issue. Please also make sure that you followed the installation instructions.
|
Before opening an issue, please double check:
|
||||||
|
|
||||||
If you encounter issues while installing or configuring Paperless-ngx, please post in the ["Support" section of the discussions](https://github.com/paperless-ngx/paperless-ngx/discussions/new?category=support). Remember that Paperless successfully runs on a variety of different systems. If Paperless-ngx does not start, it's likely an issue with your system, not an issue of Paperless-ngx.
|
- [The troubleshooting documentation](https://paperless-ngx.readthedocs.io/en/latest/troubleshooting.html).
|
||||||
|
- [The installation instructions](https://paperless-ngx.readthedocs.io/en/latest/setup.html#installation).
|
||||||
Finally, please search issues and discussions before opening a new bug report.
|
- [Existing issues and discussions](https://github.com/paperless-ngx/paperless-ngx/search?q=&type=issues).
|
||||||
|
|
||||||
|
If you encounter issues while installing or configuring Paperless-ngx, please post in the ["Support" section of the discussions](https://github.com/paperless-ngx/paperless-ngx/discussions/new?category=support).
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Description
|
||||||
description: A clear and concise description of what the bug is.
|
description: A clear and concise description of what the bug is. If applicable, add screenshots to help explain your problem.
|
||||||
placeholder: Currently...
|
placeholder: |
|
||||||
validations:
|
Currently Paperless does not work when...
|
||||||
required: true
|
|
||||||
- type: textarea
|
[Screenshot if applicable]
|
||||||
id: expected-behavior
|
|
||||||
attributes:
|
|
||||||
label: Expected behavior
|
|
||||||
description: A clear and concise description of what you expected to happen.
|
|
||||||
placeholder: In this situation...
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: reproduction
|
id: reproduction
|
||||||
attributes:
|
attributes:
|
||||||
label: Steps to reproduce
|
label: Steps to reproduce
|
||||||
description: Steps to reproduce the behavior
|
description: Steps to reproduce the behavior.
|
||||||
placeholder: "1. Go to '...', 2. Click on '....', 3. See error"
|
placeholder: |
|
||||||
|
1. Go to '...'
|
||||||
|
2. Click on '....'
|
||||||
|
3. See error
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
@@ -43,11 +43,6 @@ body:
|
|||||||
label: Webserver logs
|
label: Webserver logs
|
||||||
description: If available, post any logs from the web server related to your issue.
|
description: If available, post any logs from the web server related to your issue.
|
||||||
render: bash
|
render: bash
|
||||||
- type: textarea
|
|
||||||
id: screenshots
|
|
||||||
attributes:
|
|
||||||
label: Screenshots
|
|
||||||
description: If applicable, add screenshots to help explain your problem.
|
|
||||||
- type: input
|
- type: input
|
||||||
id: version
|
id: version
|
||||||
attributes:
|
attributes:
|
||||||
@@ -59,8 +54,8 @@ body:
|
|||||||
id: host-os
|
id: host-os
|
||||||
attributes:
|
attributes:
|
||||||
label: Host OS
|
label: Host OS
|
||||||
description: Host OS of the machine running paperless-ngx
|
description: Host OS of the machine running paperless-ngx. Please add the architecture (uname -m) if applicable.
|
||||||
placeholder: e.g. Archlinux / Ubuntu 20.04
|
placeholder: e.g. Archlinux / Ubuntu 20.04 / Raspberry Pi `arm64`
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: dropdown
|
- type: dropdown
|
||||||
@@ -68,16 +63,18 @@ body:
|
|||||||
attributes:
|
attributes:
|
||||||
label: Installation method
|
label: Installation method
|
||||||
options:
|
options:
|
||||||
- Docker
|
- Docker - official image
|
||||||
|
- Docker - linuxserver.io image
|
||||||
- Bare metal
|
- Bare metal
|
||||||
- Other (please describe above)
|
- Other (please describe above)
|
||||||
|
description: Note there are significant differences from the official image and linuxserver.io, please check if your issue is specific to the third-party image.
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: input
|
- type: input
|
||||||
id: browser
|
id: browser
|
||||||
attributes:
|
attributes:
|
||||||
label: Browser
|
label: Browser
|
||||||
description: Which browser you are using, if relevant
|
description: Which browser you are using, if relevant.
|
||||||
placeholder: e.g. Chrome, Safari
|
placeholder: e.g. Chrome, Safari
|
||||||
- type: input
|
- type: input
|
||||||
id: config-changes
|
id: config-changes
|
||||||
@@ -88,4 +85,4 @@ body:
|
|||||||
id: other
|
id: other
|
||||||
attributes:
|
attributes:
|
||||||
label: Other
|
label: Other
|
||||||
description: Any other relevant details
|
description: Any other relevant details.
|
||||||
|
24
.github/dependabot.yml
vendored
24
.github/dependabot.yml
vendored
@@ -6,11 +6,14 @@ updates:
|
|||||||
# Enable version updates for npm
|
# Enable version updates for npm
|
||||||
- package-ecosystem: "npm"
|
- package-ecosystem: "npm"
|
||||||
target-branch: "dev"
|
target-branch: "dev"
|
||||||
# Look for `package.json` and `lock` files in the `root` directory
|
# Look for `package.json` and `lock` files in the `/src-ui` directory
|
||||||
directory: "/src-ui"
|
directory: "/src-ui"
|
||||||
# Check the npm registry for updates every week
|
# Check the npm registry for updates every month
|
||||||
schedule:
|
schedule:
|
||||||
interval: "monthly"
|
interval: "monthly"
|
||||||
|
labels:
|
||||||
|
- "frontend"
|
||||||
|
- "dependencies"
|
||||||
# Add reviewers
|
# Add reviewers
|
||||||
reviewers:
|
reviewers:
|
||||||
- "paperless-ngx/frontend"
|
- "paperless-ngx/frontend"
|
||||||
@@ -23,6 +26,23 @@ updates:
|
|||||||
# Check for updates once a week
|
# Check for updates once a week
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
|
labels:
|
||||||
|
- "backend"
|
||||||
|
- "dependencies"
|
||||||
# Add reviewers
|
# Add reviewers
|
||||||
reviewers:
|
reviewers:
|
||||||
- "paperless-ngx/backend"
|
- "paperless-ngx/backend"
|
||||||
|
|
||||||
|
# Enable updates for Github Actions
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
target-branch: "dev"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
# Check for updates to GitHub Actions every month
|
||||||
|
interval: "monthly"
|
||||||
|
labels:
|
||||||
|
- "ci-cd"
|
||||||
|
- "dependencies"
|
||||||
|
# Add reviewers
|
||||||
|
reviewers:
|
||||||
|
- "paperless-ngx/ci-cd"
|
||||||
|
33
.github/release-drafter.yml
vendored
33
.github/release-drafter.yml
vendored
@@ -1,4 +1,18 @@
|
|||||||
|
autolabeler:
|
||||||
|
- label: "bug"
|
||||||
|
branch:
|
||||||
|
- '/^fix/'
|
||||||
|
title:
|
||||||
|
- "/^fix/i"
|
||||||
|
- label: "enhancement"
|
||||||
|
branch:
|
||||||
|
- '/^feature/'
|
||||||
|
title:
|
||||||
|
- "/^feature/i"
|
||||||
categories:
|
categories:
|
||||||
|
- title: 'Breaking Changes'
|
||||||
|
labels:
|
||||||
|
- 'breaking-change'
|
||||||
- title: 'Features'
|
- title: 'Features'
|
||||||
labels:
|
labels:
|
||||||
- 'enhancement'
|
- 'enhancement'
|
||||||
@@ -12,9 +26,15 @@ categories:
|
|||||||
- 'chore'
|
- 'chore'
|
||||||
- 'deployment'
|
- 'deployment'
|
||||||
- 'translation'
|
- 'translation'
|
||||||
|
- 'ci-cd'
|
||||||
- title: 'Dependencies'
|
- title: 'Dependencies'
|
||||||
collapse-after: 3
|
collapse-after: 3
|
||||||
label: 'dependencies'
|
label: 'dependencies'
|
||||||
|
- title: 'All App Changes'
|
||||||
|
labels:
|
||||||
|
- 'frontend'
|
||||||
|
- 'backend'
|
||||||
|
collapse-after: 0
|
||||||
include-labels:
|
include-labels:
|
||||||
- 'enhancement'
|
- 'enhancement'
|
||||||
- 'bug'
|
- 'bug'
|
||||||
@@ -22,13 +42,14 @@ include-labels:
|
|||||||
- 'deployment'
|
- 'deployment'
|
||||||
- 'translation'
|
- 'translation'
|
||||||
- 'dependencies'
|
- 'dependencies'
|
||||||
replacers: # Changes "Feature: Update checker" to "Update checker"
|
- 'documentation'
|
||||||
- search: '/Feature:|Feat:|\[feature\]/gi'
|
- 'frontend'
|
||||||
replace: ''
|
- 'backend'
|
||||||
change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
|
- 'ci-cd'
|
||||||
|
category-template: '### $TITLE'
|
||||||
|
change-template: '- $TITLE @$AUTHOR ([#$NUMBER]($URL))'
|
||||||
change-title-escapes: '\<*_&#@'
|
change-title-escapes: '\<*_&#@'
|
||||||
tag-prefix: "ngx-"
|
|
||||||
template: |
|
template: |
|
||||||
## Changelog
|
## paperless-ngx $RESOLVED_VERSION
|
||||||
|
|
||||||
$CHANGES
|
$CHANGES
|
||||||
|
388
.github/scripts/cleanup-tags.py
vendored
Normal file
388
.github/scripts/cleanup-tags.py
vendored
Normal file
@@ -0,0 +1,388 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import functools
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from typing import Dict
|
||||||
|
from typing import Final
|
||||||
|
from typing import List
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from common import get_log_level
|
||||||
|
|
||||||
|
logger = logging.getLogger("cleanup-tags")
|
||||||
|
|
||||||
|
|
||||||
|
class ContainerPackage:
|
||||||
|
def __init__(self, data: Dict):
|
||||||
|
self._data = data
|
||||||
|
self.name = self._data["name"]
|
||||||
|
self.id = self._data["id"]
|
||||||
|
self.url = self._data["url"]
|
||||||
|
self.tags = self._data["metadata"]["container"]["tags"]
|
||||||
|
|
||||||
|
@functools.cached_property
|
||||||
|
def untagged(self) -> bool:
|
||||||
|
return len(self.tags) == 0
|
||||||
|
|
||||||
|
@functools.cache
|
||||||
|
def tag_matches(self, pattern: str) -> bool:
|
||||||
|
for tag in self.tags:
|
||||||
|
if re.match(pattern, tag) is not None:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"Package {self.name}"
|
||||||
|
|
||||||
|
|
||||||
|
class GithubContainerRegistry:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
session: requests.Session,
|
||||||
|
token: str,
|
||||||
|
owner_or_org: str,
|
||||||
|
):
|
||||||
|
self._session: requests.Session = session
|
||||||
|
self._token = token
|
||||||
|
self._owner_or_org = owner_or_org
|
||||||
|
# https://docs.github.com/en/rest/branches/branches
|
||||||
|
self._BRANCHES_ENDPOINT = "https://api.github.com/repos/{OWNER}/{REPO}/branches"
|
||||||
|
if self._owner_or_org == "paperless-ngx":
|
||||||
|
# https://docs.github.com/en/rest/packages#get-all-package-versions-for-a-package-owned-by-an-organization
|
||||||
|
self._PACKAGES_VERSIONS_ENDPOINT = "https://api.github.com/orgs/{ORG}/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions"
|
||||||
|
# https://docs.github.com/en/rest/packages#delete-package-version-for-an-organization
|
||||||
|
self._PACKAGE_VERSION_DELETE_ENDPOINT = "https://api.github.com/orgs/{ORG}/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions/{PACKAGE_VERSION_ID}"
|
||||||
|
else:
|
||||||
|
# https://docs.github.com/en/rest/packages#get-all-package-versions-for-a-package-owned-by-the-authenticated-user
|
||||||
|
self._PACKAGES_VERSIONS_ENDPOINT = "https://api.github.com/user/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions"
|
||||||
|
# https://docs.github.com/en/rest/packages#delete-a-package-version-for-the-authenticated-user
|
||||||
|
self._PACKAGE_VERSION_DELETE_ENDPOINT = "https://api.github.com/user/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions/{PACKAGE_VERSION_ID}"
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
"""
|
||||||
|
Sets up the required headers for auth and response
|
||||||
|
type from the API
|
||||||
|
"""
|
||||||
|
self._session.headers.update(
|
||||||
|
{
|
||||||
|
"Accept": "application/vnd.github.v3+json",
|
||||||
|
"Authorization": f"token {self._token}",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
"""
|
||||||
|
Ensures the authorization token is cleaned up no matter
|
||||||
|
the reason for the exit
|
||||||
|
"""
|
||||||
|
if "Accept" in self._session.headers:
|
||||||
|
del self._session.headers["Accept"]
|
||||||
|
if "Authorization" in self._session.headers:
|
||||||
|
del self._session.headers["Authorization"]
|
||||||
|
|
||||||
|
def _read_all_pages(self, endpoint):
|
||||||
|
"""
|
||||||
|
Internal function to read all pages of an endpoint, utilizing the
|
||||||
|
next.url until exhausted
|
||||||
|
"""
|
||||||
|
internal_data = []
|
||||||
|
|
||||||
|
while True:
|
||||||
|
resp = self._session.get(endpoint)
|
||||||
|
if resp.status_code == 200:
|
||||||
|
internal_data += resp.json()
|
||||||
|
if "next" in resp.links:
|
||||||
|
endpoint = resp.links["next"]["url"]
|
||||||
|
else:
|
||||||
|
logger.debug("Exiting pagination loop")
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
logger.warning(f"Request to {endpoint} return HTTP {resp.status_code}")
|
||||||
|
break
|
||||||
|
|
||||||
|
return internal_data
|
||||||
|
|
||||||
|
def get_branches(self, repo: str):
|
||||||
|
"""
|
||||||
|
Returns all current branches of the given repository
|
||||||
|
"""
|
||||||
|
endpoint = self._BRANCHES_ENDPOINT.format(OWNER=self._owner_or_org, REPO=repo)
|
||||||
|
internal_data = self._read_all_pages(endpoint)
|
||||||
|
return internal_data
|
||||||
|
|
||||||
|
def filter_branches_by_name_pattern(self, branch_data, pattern: str):
|
||||||
|
"""
|
||||||
|
Filters the given list of branches to those which start with the given
|
||||||
|
pattern. Future enhancement could use regex patterns instead.
|
||||||
|
"""
|
||||||
|
matches = {}
|
||||||
|
|
||||||
|
for branch in branch_data:
|
||||||
|
if branch["name"].startswith(pattern):
|
||||||
|
matches[branch["name"]] = branch
|
||||||
|
|
||||||
|
return matches
|
||||||
|
|
||||||
|
def get_package_versions(
|
||||||
|
self,
|
||||||
|
package_name: str,
|
||||||
|
package_type: str = "container",
|
||||||
|
) -> List[ContainerPackage]:
|
||||||
|
"""
|
||||||
|
Returns all the versions of a given package (container images) from
|
||||||
|
the API
|
||||||
|
"""
|
||||||
|
package_name = quote(package_name, safe="")
|
||||||
|
endpoint = self._PACKAGES_VERSIONS_ENDPOINT.format(
|
||||||
|
ORG=self._owner_or_org,
|
||||||
|
PACKAGE_TYPE=package_type,
|
||||||
|
PACKAGE_NAME=package_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
pkgs = []
|
||||||
|
|
||||||
|
for data in self._read_all_pages(endpoint):
|
||||||
|
pkgs.append(ContainerPackage(data))
|
||||||
|
|
||||||
|
return pkgs
|
||||||
|
|
||||||
|
def delete_package_version(self, package_data: ContainerPackage):
|
||||||
|
"""
|
||||||
|
Deletes the given package version from the GHCR
|
||||||
|
"""
|
||||||
|
resp = self._session.delete(package_data.url)
|
||||||
|
if resp.status_code != 204:
|
||||||
|
logger.warning(
|
||||||
|
f"Request to delete {package_data.url} returned HTTP {resp.status_code}",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _main():
|
||||||
|
parser = ArgumentParser(
|
||||||
|
description="Using the GitHub API locate and optionally delete container"
|
||||||
|
" tags which no longer have an associated feature branch",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Requires an affirmative command to actually do a delete
|
||||||
|
parser.add_argument(
|
||||||
|
"--delete",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="If provided, actually delete the container tags",
|
||||||
|
)
|
||||||
|
|
||||||
|
# When a tagged image is updated, the previous version remains, but it no longer tagged
|
||||||
|
# Add this option to remove them as well
|
||||||
|
parser.add_argument(
|
||||||
|
"--untagged",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="If provided, delete untagged containers as well",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Allows configuration of log level for debugging
|
||||||
|
parser.add_argument(
|
||||||
|
"--loglevel",
|
||||||
|
default="info",
|
||||||
|
help="Configures the logging level",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=get_log_level(args),
|
||||||
|
datefmt="%Y-%m-%d %H:%M:%S",
|
||||||
|
format="%(asctime)s %(levelname)-8s %(message)s",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Must be provided in the environment
|
||||||
|
repo_owner: Final[str] = os.environ["GITHUB_REPOSITORY_OWNER"]
|
||||||
|
repo: Final[str] = os.environ["GITHUB_REPOSITORY"]
|
||||||
|
gh_token: Final[str] = os.environ["TOKEN"]
|
||||||
|
|
||||||
|
with requests.session() as sess:
|
||||||
|
with GithubContainerRegistry(sess, gh_token, repo_owner) as gh_api:
|
||||||
|
|
||||||
|
# Step 1 - Get branch information
|
||||||
|
|
||||||
|
# Step 1.1 - Locate all branches of the repo
|
||||||
|
all_branches = gh_api.get_branches("paperless-ngx")
|
||||||
|
logger.info(f"Located {len(all_branches)} branches of {repo_owner}/{repo} ")
|
||||||
|
|
||||||
|
# Step 1.2 - Filter branches to those starting with "feature-"
|
||||||
|
feature_branches = gh_api.filter_branches_by_name_pattern(
|
||||||
|
all_branches,
|
||||||
|
"feature-",
|
||||||
|
)
|
||||||
|
logger.info(f"Located {len(feature_branches)} feature branches")
|
||||||
|
|
||||||
|
# Step 2 - Deal with package information
|
||||||
|
for package_name in ["paperless-ngx", "paperless-ngx/builder/cache/app"]:
|
||||||
|
|
||||||
|
# Step 2.1 - Location all versions of the given package
|
||||||
|
all_package_versions = gh_api.get_package_versions(package_name)
|
||||||
|
|
||||||
|
# Faster lookup, map the tag to their container
|
||||||
|
all_pkgs_tags_to_version = {}
|
||||||
|
for pkg in all_package_versions:
|
||||||
|
for tag in pkg.tags:
|
||||||
|
all_pkgs_tags_to_version[tag] = pkg
|
||||||
|
logger.info(
|
||||||
|
f"Located {len(all_package_versions)} versions of package {package_name}",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Step 2.2 - Location package versions which have a tag of "feature-"
|
||||||
|
packages_tagged_feature = []
|
||||||
|
for package in all_package_versions:
|
||||||
|
if package.tag_matches("feature-"):
|
||||||
|
packages_tagged_feature.append(package)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f'Located {len(packages_tagged_feature)} versions of package {package_name} tagged "feature-"',
|
||||||
|
)
|
||||||
|
|
||||||
|
# Faster lookup, map feature- tags to their container
|
||||||
|
feature_pkgs_tags_to_versions = {}
|
||||||
|
for pkg in packages_tagged_feature:
|
||||||
|
for tag in pkg.tags:
|
||||||
|
feature_pkgs_tags_to_versions[tag] = pkg
|
||||||
|
|
||||||
|
# Step 2.3 - Determine which package versions have no matching branch and which tags we're keeping
|
||||||
|
tags_to_delete = list(
|
||||||
|
set(feature_pkgs_tags_to_versions.keys())
|
||||||
|
- set(feature_branches.keys()),
|
||||||
|
)
|
||||||
|
tags_to_keep = list(
|
||||||
|
set(all_pkgs_tags_to_version.keys()) - set(tags_to_delete),
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f"Located {len(tags_to_delete)} versions of package {package_name} to delete",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Step 2.4 - Delete certain package versions
|
||||||
|
for tag_to_delete in tags_to_delete:
|
||||||
|
package_version_info = feature_pkgs_tags_to_versions[tag_to_delete]
|
||||||
|
|
||||||
|
if args.delete:
|
||||||
|
logger.info(
|
||||||
|
f"Deleting {tag_to_delete} (id {package_version_info.id})",
|
||||||
|
)
|
||||||
|
gh_api.delete_package_version(
|
||||||
|
package_version_info,
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
f"Would delete {tag_to_delete} (id {package_version_info.id})",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Step 3 - Deal with untagged and dangling packages
|
||||||
|
if args.untagged:
|
||||||
|
|
||||||
|
"""
|
||||||
|
Ok, bear with me, these are annoying.
|
||||||
|
|
||||||
|
Our images are multi-arch, so the manifest is more like a pointer to a sha256 digest.
|
||||||
|
These images are untagged, but pointed to, and so should not be removed (or every pull fails).
|
||||||
|
|
||||||
|
So for each image getting kept, parse the manifest to find the digest(s) it points to. Then
|
||||||
|
remove those from the list of untagged images. The final result is the untagged, not pointed to
|
||||||
|
version which should be safe to remove.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
Tag: ghcr.io/paperless-ngx/paperless-ngx:1.7.1 refers to
|
||||||
|
amd64: sha256:b9ed4f8753bbf5146547671052d7e91f68cdfc9ef049d06690b2bc866fec2690
|
||||||
|
armv7: sha256:81605222df4ba4605a2ba4893276e5d08c511231ead1d5da061410e1bbec05c3
|
||||||
|
arm64: sha256:374cd68db40734b844705bfc38faae84cc4182371de4bebd533a9a365d5e8f3b
|
||||||
|
each of which appears as untagged image
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Step 3.1 - Simplify the untagged data, mapping name (which is a digest) to the version
|
||||||
|
untagged_versions = {}
|
||||||
|
for x in all_package_versions:
|
||||||
|
if x.untagged:
|
||||||
|
untagged_versions[x.name] = x
|
||||||
|
|
||||||
|
skips = 0
|
||||||
|
# Extra security to not delete on an unexpected error
|
||||||
|
actually_delete = True
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Located {len(tags_to_keep)} tags of package {package_name} to keep",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Step 3.2 - Parse manifests to locate digests pointed to
|
||||||
|
for tag in tags_to_keep:
|
||||||
|
full_name = f"ghcr.io/{repo_owner}/{package_name}:{tag}"
|
||||||
|
logger.info(f"Checking manifest for {full_name}")
|
||||||
|
try:
|
||||||
|
proc = subprocess.run(
|
||||||
|
[
|
||||||
|
shutil.which("docker"),
|
||||||
|
"manifest",
|
||||||
|
"inspect",
|
||||||
|
full_name,
|
||||||
|
],
|
||||||
|
capture_output=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
manifest_list = json.loads(proc.stdout)
|
||||||
|
for manifest in manifest_list["manifests"]:
|
||||||
|
digest = manifest["digest"]
|
||||||
|
platform_data_os = manifest["platform"]["os"]
|
||||||
|
platform_arch = manifest["platform"]["architecture"]
|
||||||
|
platform_variant = manifest["platform"].get(
|
||||||
|
"variant",
|
||||||
|
"",
|
||||||
|
)
|
||||||
|
platform = f"{platform_data_os}/{platform_arch}{platform_variant}"
|
||||||
|
|
||||||
|
if digest in untagged_versions:
|
||||||
|
logger.debug(
|
||||||
|
f"Skipping deletion of {digest}, referred to by {full_name} for {platform}",
|
||||||
|
)
|
||||||
|
del untagged_versions[digest]
|
||||||
|
skips += 1
|
||||||
|
|
||||||
|
except json.decoder.JSONDecodeError as err:
|
||||||
|
# This is probably for a cache image, which isn't a multi-arch digest
|
||||||
|
# These are ok to delete all on
|
||||||
|
logger.debug(f"{err} on {full_name}")
|
||||||
|
continue
|
||||||
|
except Exception as err:
|
||||||
|
actually_delete = False
|
||||||
|
logger.exception(err)
|
||||||
|
continue
|
||||||
|
|
||||||
|
logger.info(f"Skipping deletion of {skips} packages")
|
||||||
|
|
||||||
|
# Step 3.3 - Delete the untagged and not pointed at packages
|
||||||
|
logger.info(f"Deleting untagged packages of {package_name}")
|
||||||
|
for to_delete_name in untagged_versions:
|
||||||
|
to_delete_version = untagged_versions[to_delete_name]
|
||||||
|
|
||||||
|
if args.delete and actually_delete:
|
||||||
|
logger.info(
|
||||||
|
f"Deleting id {to_delete_version.id} named {to_delete_version.name}",
|
||||||
|
)
|
||||||
|
gh_api.delete_package_version(
|
||||||
|
to_delete_version,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
f"Would delete {to_delete_name} (id {to_delete_version.id})",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.info("Leaving untagged images untouched")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
_main()
|
44
.github/scripts/common.py
vendored
Normal file
44
.github/scripts/common.py
vendored
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import logging
|
||||||
|
from argparse import ArgumentError
|
||||||
|
|
||||||
|
|
||||||
|
def get_image_tag(
|
||||||
|
repo_name: str,
|
||||||
|
pkg_name: str,
|
||||||
|
pkg_version: str,
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Returns a string representing the normal image for a given package
|
||||||
|
"""
|
||||||
|
return f"ghcr.io/{repo_name.lower()}/builder/{pkg_name}:{pkg_version}"
|
||||||
|
|
||||||
|
|
||||||
|
def get_cache_image_tag(
|
||||||
|
repo_name: str,
|
||||||
|
pkg_name: str,
|
||||||
|
pkg_version: str,
|
||||||
|
branch_name: str,
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Returns a string representing the expected image cache tag for a given package
|
||||||
|
|
||||||
|
Registry type caching is utilized for the builder images, to allow fast
|
||||||
|
rebuilds, generally almost instant for the same version
|
||||||
|
"""
|
||||||
|
return f"ghcr.io/{repo_name.lower()}/builder/cache/{pkg_name}:{pkg_version}"
|
||||||
|
|
||||||
|
|
||||||
|
def get_log_level(args) -> int:
|
||||||
|
levels = {
|
||||||
|
"critical": logging.CRITICAL,
|
||||||
|
"error": logging.ERROR,
|
||||||
|
"warn": logging.WARNING,
|
||||||
|
"warning": logging.WARNING,
|
||||||
|
"info": logging.INFO,
|
||||||
|
"debug": logging.DEBUG,
|
||||||
|
}
|
||||||
|
level = levels.get(args.loglevel.lower())
|
||||||
|
if level is None:
|
||||||
|
level = logging.INFO
|
||||||
|
return level
|
92
.github/scripts/get-build-json.py
vendored
Executable file
92
.github/scripts/get-build-json.py
vendored
Executable file
@@ -0,0 +1,92 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
This is a helper script for the mutli-stage Docker image builder.
|
||||||
|
It provides a single point of configuration for package version control.
|
||||||
|
The output JSON object is used by the CI workflow to determine what versions
|
||||||
|
to build and pull into the final Docker image.
|
||||||
|
|
||||||
|
Python package information is obtained from the Pipfile.lock. As this is
|
||||||
|
kept updated by dependabot, it usually will need no further configuration.
|
||||||
|
The sole exception currently is pikepdf, which has a dependency on qpdf,
|
||||||
|
and is configured here to use the latest version of qpdf built by the workflow.
|
||||||
|
|
||||||
|
Other package version information is configured directly below, generally by
|
||||||
|
setting the version and Git information, if any.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
|
from common import get_cache_image_tag
|
||||||
|
from common import get_image_tag
|
||||||
|
|
||||||
|
|
||||||
|
def _main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Generate a JSON object of information required to build the given package, based on the Pipfile.lock",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"package",
|
||||||
|
help="The name of the package to generate JSON for",
|
||||||
|
)
|
||||||
|
|
||||||
|
PIPFILE_LOCK_PATH: Final[Path] = Path("Pipfile.lock")
|
||||||
|
BUILD_CONFIG_PATH: Final[Path] = Path(".build-config.json")
|
||||||
|
|
||||||
|
# Read the main config file
|
||||||
|
build_json: Final = json.loads(BUILD_CONFIG_PATH.read_text())
|
||||||
|
|
||||||
|
# Read Pipfile.lock file
|
||||||
|
pipfile_data: Final = json.loads(PIPFILE_LOCK_PATH.read_text())
|
||||||
|
|
||||||
|
args: Final = parser.parse_args()
|
||||||
|
|
||||||
|
# Read from environment variables set by GitHub Actions
|
||||||
|
repo_name: Final[str] = os.environ["GITHUB_REPOSITORY"]
|
||||||
|
branch_name: Final[str] = os.environ["GITHUB_REF_NAME"]
|
||||||
|
|
||||||
|
# Default output values
|
||||||
|
version = None
|
||||||
|
extra_config = {}
|
||||||
|
|
||||||
|
if args.package in pipfile_data["default"]:
|
||||||
|
# Read the version from Pipfile.lock
|
||||||
|
pkg_data = pipfile_data["default"][args.package]
|
||||||
|
pkg_version = pkg_data["version"].split("==")[-1]
|
||||||
|
version = pkg_version
|
||||||
|
|
||||||
|
# Any extra/special values needed
|
||||||
|
if args.package == "pikepdf":
|
||||||
|
extra_config["qpdf_version"] = build_json["qpdf"]["version"]
|
||||||
|
|
||||||
|
elif args.package in build_json:
|
||||||
|
version = build_json[args.package]["version"]
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise NotImplementedError(args.package)
|
||||||
|
|
||||||
|
# The JSON object we'll output
|
||||||
|
output = {
|
||||||
|
"name": args.package,
|
||||||
|
"version": version,
|
||||||
|
"image_tag": get_image_tag(repo_name, args.package, version),
|
||||||
|
"cache_tag": get_cache_image_tag(
|
||||||
|
repo_name,
|
||||||
|
args.package,
|
||||||
|
version,
|
||||||
|
branch_name,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add anything special a package may need
|
||||||
|
output.update(extra_config)
|
||||||
|
|
||||||
|
# Output the JSON info to stdout
|
||||||
|
print(json.dumps(output))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
_main()
|
15
.github/stale.yml
vendored
15
.github/stale.yml
vendored
@@ -1,18 +1,23 @@
|
|||||||
# Number of days of inactivity before an issue becomes stale
|
# Number of days of inactivity before an issue becomes stale
|
||||||
daysUntilStale: 30
|
daysUntilStale: 30
|
||||||
|
|
||||||
# Number of days of inactivity before a stale issue is closed
|
# Number of days of inactivity before a stale issue is closed
|
||||||
daysUntilClose: 7
|
daysUntilClose: 7
|
||||||
# Issues with these labels will never be considered stale
|
|
||||||
exemptLabels:
|
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)
|
||||||
- pinned
|
onlyLabels: [cant-reproduce]
|
||||||
- security
|
|
||||||
- fixpending
|
|
||||||
# Label to use when marking an issue as stale
|
# Label to use when marking an issue as stale
|
||||||
staleLabel: stale
|
staleLabel: stale
|
||||||
|
|
||||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||||
markComment: >
|
markComment: >
|
||||||
This issue has been automatically marked as stale because it has not had
|
This issue has been automatically marked as stale because it has not had
|
||||||
recent activity. It will be closed if no further activity occurs. Thank you
|
recent activity. It will be closed if no further activity occurs. Thank you
|
||||||
for your contributions.
|
for your contributions.
|
||||||
|
|
||||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||||
closeComment: false
|
closeComment: false
|
||||||
|
|
||||||
|
# See https://github.com/marketplace/stale for more info on the app
|
||||||
|
# and https://github.com/probot/stale for the configuration docs
|
||||||
|
517
.github/workflows/ci.yml
vendored
517
.github/workflows/ci.yml
vendored
@@ -3,8 +3,10 @@ name: ci
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- ngx-*
|
# https://semver.org/#spec-item-2
|
||||||
- beta-*
|
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||||
|
# https://semver.org/#spec-item-9
|
||||||
|
- 'v[0-9]+.[0-9]+.[0-9]+-beta.rc[0-9]+'
|
||||||
branches-ignore:
|
branches-ignore:
|
||||||
- 'translations**'
|
- 'translations**'
|
||||||
pull_request:
|
pull_request:
|
||||||
@@ -12,281 +14,428 @@ on:
|
|||||||
- 'translations**'
|
- 'translations**'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
pre-commit:
|
||||||
|
name: Linting Checks
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
-
|
||||||
|
name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
-
|
||||||
|
name: Install tools
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.9"
|
||||||
|
|
||||||
|
-
|
||||||
|
name: Check files
|
||||||
|
uses: pre-commit/action@v3.0.0
|
||||||
|
|
||||||
documentation:
|
documentation:
|
||||||
|
name: "Build Documentation"
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
|
needs:
|
||||||
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
|
-
|
||||||
|
name: Install pipenv
|
||||||
|
run: |
|
||||||
|
pipx install pipenv==2022.8.5
|
||||||
|
pipenv --version
|
||||||
-
|
-
|
||||||
name: Set up Python
|
name: Set up Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
-
|
cache: "pipenv"
|
||||||
name: Get pip cache dir
|
cache-dependency-path: 'Pipfile.lock'
|
||||||
id: pip-cache
|
|
||||||
run: |
|
|
||||||
echo "::set-output name=dir::$(pip cache dir)"
|
|
||||||
-
|
|
||||||
name: Persistent Github pip cache
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: ${{ steps.pip-cache.outputs.dir }}
|
|
||||||
key: ${{ runner.os }}-pip3.8}
|
|
||||||
-
|
-
|
||||||
name: Install dependencies
|
name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pipenv
|
pipenv sync --dev
|
||||||
pipenv install --system --dev --ignore-pipfile
|
-
|
||||||
|
name: List installed Python dependencies
|
||||||
|
run: |
|
||||||
|
pipenv run pip list
|
||||||
-
|
-
|
||||||
name: Make documentation
|
name: Make documentation
|
||||||
run: |
|
run: |
|
||||||
cd docs/
|
cd docs/
|
||||||
make html
|
pipenv run make html
|
||||||
-
|
-
|
||||||
name: Upload artifact
|
name: Upload artifact
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: documentation
|
name: documentation
|
||||||
path: docs/_build/html/
|
path: docs/_build/html/
|
||||||
|
|
||||||
codestyle:
|
tests-backend:
|
||||||
runs-on: ubuntu-20.04
|
name: "Tests (${{ matrix.python-version }})"
|
||||||
steps:
|
|
||||||
-
|
|
||||||
name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
-
|
|
||||||
name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
-
|
|
||||||
name: Get pip cache dir
|
|
||||||
id: pip-cache
|
|
||||||
run: |
|
|
||||||
echo "::set-output name=dir::$(pip cache dir)"
|
|
||||||
-
|
|
||||||
name: Persistent Github pip cache
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: ${{ steps.pip-cache.outputs.dir }}
|
|
||||||
key: ${{ runner.os }}-pip${{ matrix.python-version }}
|
|
||||||
-
|
|
||||||
name: Install dependencies
|
|
||||||
run: |
|
|
||||||
pip install --upgrade pipenv
|
|
||||||
pipenv install --system --dev --ignore-pipfile
|
|
||||||
-
|
|
||||||
name: Codestyle
|
|
||||||
run: |
|
|
||||||
cd src/
|
|
||||||
pycodestyle --max-line-length=88 --ignore=E121,E123,E126,E226,E24,E704,W503,W504,E203
|
|
||||||
codeformatting:
|
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
steps:
|
|
||||||
-
|
|
||||||
name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
-
|
|
||||||
name: Run black
|
|
||||||
uses: psf/black@stable
|
|
||||||
with:
|
|
||||||
options: "--check --diff"
|
|
||||||
version: "22.3.0"
|
|
||||||
|
|
||||||
tests:
|
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
|
needs:
|
||||||
|
- pre-commit
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ['3.8', '3.9']
|
python-version: ['3.8', '3.9', '3.10']
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 2
|
||||||
|
-
|
||||||
|
name: Install pipenv
|
||||||
|
run: |
|
||||||
|
pipx install pipenv==2022.8.5
|
||||||
|
pipenv --version
|
||||||
-
|
-
|
||||||
name: Set up Python
|
name: Set up Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "${{ matrix.python-version }}"
|
python-version: "${{ matrix.python-version }}"
|
||||||
|
cache: "pipenv"
|
||||||
|
cache-dependency-path: 'Pipfile.lock'
|
||||||
-
|
-
|
||||||
name: Get pip cache dir
|
name: Install system dependencies
|
||||||
id: pip-cache
|
|
||||||
run: |
|
|
||||||
echo "::set-output name=dir::$(pip cache dir)"
|
|
||||||
-
|
|
||||||
name: Persistent Github pip cache
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: ${{ steps.pip-cache.outputs.dir }}
|
|
||||||
key: ${{ runner.os }}-pip${{ matrix.python-version }}
|
|
||||||
-
|
|
||||||
name: Install dependencies
|
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update -qq
|
sudo apt-get update -qq
|
||||||
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript optipng
|
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
|
||||||
pip install --upgrade pipenv
|
-
|
||||||
pipenv install --system --dev --ignore-pipfile
|
name: Install Python dependencies
|
||||||
|
run: |
|
||||||
|
pipenv sync --dev
|
||||||
|
-
|
||||||
|
name: List installed Python dependencies
|
||||||
|
run: |
|
||||||
|
pipenv run pip list
|
||||||
-
|
-
|
||||||
name: Tests
|
name: Tests
|
||||||
run: |
|
run: |
|
||||||
cd src/
|
cd src/
|
||||||
pytest
|
pipenv run pytest
|
||||||
|
-
|
||||||
|
name: Get changed files
|
||||||
|
id: changed-files-specific
|
||||||
|
uses: tj-actions/changed-files@v29.0.2
|
||||||
|
with:
|
||||||
|
files: |
|
||||||
|
src/**
|
||||||
|
-
|
||||||
|
name: List all changed files
|
||||||
|
run: |
|
||||||
|
for file in ${{ steps.changed-files-specific.outputs.all_changed_files }}; do
|
||||||
|
echo "${file} was changed"
|
||||||
|
done
|
||||||
-
|
-
|
||||||
name: Publish coverage results
|
name: Publish coverage results
|
||||||
if: matrix.python-version == '3.9'
|
if: matrix.python-version == '3.9' && steps.changed-files-specific.outputs.any_changed == 'true'
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
# https://github.com/coveralls-clients/coveralls-python/issues/251
|
# https://github.com/coveralls-clients/coveralls-python/issues/251
|
||||||
run: |
|
run: |
|
||||||
cd src/
|
cd src/
|
||||||
coveralls --service=github
|
pipenv run coveralls --service=github
|
||||||
|
|
||||||
|
tests-frontend:
|
||||||
|
name: "Tests Frontend"
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
needs:
|
||||||
|
- pre-commit
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
node-version: [16.x]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
-
|
||||||
|
name: Use Node.js ${{ matrix.node-version }}
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: ${{ matrix.node-version }}
|
||||||
|
- run: cd src-ui && npm ci
|
||||||
|
- run: cd src-ui && npm run test
|
||||||
|
- run: cd src-ui && npm run e2e:ci
|
||||||
|
|
||||||
|
prepare-docker-build:
|
||||||
|
name: Prepare Docker Pipeline Data
|
||||||
|
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v'))
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
# If the push triggered the installer library workflow, wait for it to
|
||||||
|
# complete here. This ensures the required versions for the final
|
||||||
|
# image have been built, while not waiting at all if the versions haven't changed
|
||||||
|
concurrency:
|
||||||
|
group: build-installer-library
|
||||||
|
cancel-in-progress: false
|
||||||
|
needs:
|
||||||
|
- documentation
|
||||||
|
- tests-backend
|
||||||
|
- tests-frontend
|
||||||
|
steps:
|
||||||
|
-
|
||||||
|
name: Set ghcr repository name
|
||||||
|
id: set-ghcr-repository
|
||||||
|
run: |
|
||||||
|
ghcr_name=$(echo "${GITHUB_REPOSITORY}" | awk '{ print tolower($0) }')
|
||||||
|
echo ::set-output name=repository::${ghcr_name}
|
||||||
|
-
|
||||||
|
name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
-
|
||||||
|
name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.9"
|
||||||
|
-
|
||||||
|
name: Setup qpdf image
|
||||||
|
id: qpdf-setup
|
||||||
|
run: |
|
||||||
|
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py qpdf)
|
||||||
|
|
||||||
|
echo ${build_json}
|
||||||
|
|
||||||
|
echo ::set-output name=qpdf-json::${build_json}
|
||||||
|
-
|
||||||
|
name: Setup psycopg2 image
|
||||||
|
id: psycopg2-setup
|
||||||
|
run: |
|
||||||
|
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py psycopg2)
|
||||||
|
|
||||||
|
echo ${build_json}
|
||||||
|
|
||||||
|
echo ::set-output name=psycopg2-json::${build_json}
|
||||||
|
-
|
||||||
|
name: Setup pikepdf image
|
||||||
|
id: pikepdf-setup
|
||||||
|
run: |
|
||||||
|
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py pikepdf)
|
||||||
|
|
||||||
|
echo ${build_json}
|
||||||
|
|
||||||
|
echo ::set-output name=pikepdf-json::${build_json}
|
||||||
|
-
|
||||||
|
name: Setup jbig2enc image
|
||||||
|
id: jbig2enc-setup
|
||||||
|
run: |
|
||||||
|
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py jbig2enc)
|
||||||
|
|
||||||
|
echo ${build_json}
|
||||||
|
|
||||||
|
echo ::set-output name=jbig2enc-json::${build_json}
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
|
||||||
|
ghcr-repository: ${{ steps.set-ghcr-repository.outputs.repository }}
|
||||||
|
|
||||||
|
qpdf-json: ${{ steps.qpdf-setup.outputs.qpdf-json }}
|
||||||
|
|
||||||
|
pikepdf-json: ${{ steps.pikepdf-setup.outputs.pikepdf-json }}
|
||||||
|
|
||||||
|
psycopg2-json: ${{ steps.psycopg2-setup.outputs.psycopg2-json }}
|
||||||
|
|
||||||
|
jbig2enc-json: ${{ steps.jbig2enc-setup.outputs.jbig2enc-json}}
|
||||||
|
|
||||||
# build and push image to docker hub.
|
# build and push image to docker hub.
|
||||||
build-docker-image:
|
build-docker-image:
|
||||||
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || startsWith(github.ref, 'refs/tags/ngx-') || startsWith(github.ref, 'refs/tags/beta-'))
|
runs-on: ubuntu-20.04
|
||||||
runs-on: ubuntu-latest
|
concurrency:
|
||||||
needs: [tests, codeformatting, codestyle]
|
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
needs:
|
||||||
|
- prepare-docker-build
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Prepare
|
name: Check pushing to Docker Hub
|
||||||
id: prepare
|
id: docker-hub
|
||||||
|
# Only push to Dockerhub from the main repo AND the ref is either:
|
||||||
|
# main
|
||||||
|
# dev
|
||||||
|
# beta
|
||||||
|
# a tag
|
||||||
|
# Otherwise forks would require a Docker Hub account and secrets setup
|
||||||
run: |
|
run: |
|
||||||
IMAGE_NAME=ghcr.io/${{ github.repository }}
|
if [[ ${{ needs.prepare-docker-build.outputs.ghcr-repository }} == "paperless-ngx/paperless-ngx" && ( ${{ github.ref_name }} == "main" || ${{ github.ref_name }} == "dev" || ${{ github.ref_name }} == "beta" || ${{ startsWith(github.ref, 'refs/tags/v') }} == "true" ) ]] ; then
|
||||||
if [[ $GITHUB_REF == refs/tags/ngx-* ]]; then
|
echo "Enabling DockerHub image push"
|
||||||
TAGS=${IMAGE_NAME}:${GITHUB_REF#refs/tags/ngx-},${IMAGE_NAME}:latest
|
echo ::set-output name=enable::"true"
|
||||||
INSPECT_TAG=${IMAGE_NAME}:latest
|
|
||||||
elif [[ $GITHUB_REF == refs/tags/beta-* ]]; then
|
|
||||||
TAGS=${IMAGE_NAME}:beta
|
|
||||||
INSPECT_TAG=${TAGS}
|
|
||||||
elif [[ $GITHUB_REF == refs/heads/* ]]; then
|
|
||||||
TAGS=${IMAGE_NAME}:${GITHUB_REF#refs/heads/}
|
|
||||||
INSPECT_TAG=${TAGS}
|
|
||||||
else
|
else
|
||||||
exit 1
|
echo "Not pushing to DockerHub"
|
||||||
|
echo ::set-output name=enable::"false"
|
||||||
fi
|
fi
|
||||||
echo ::set-output name=tags::${TAGS}
|
-
|
||||||
echo ::set-output name=inspect_tag::${INSPECT_TAG}
|
name: Gather Docker metadata
|
||||||
|
id: docker-meta
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: |
|
||||||
|
ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}
|
||||||
|
name=paperlessngx/paperless-ngx,enable=${{ steps.docker-hub.outputs.enable }}
|
||||||
|
tags: |
|
||||||
|
# Tag branches with branch name
|
||||||
|
type=ref,event=branch
|
||||||
|
# Process semver tags
|
||||||
|
# For a tag x.y.z or vX.Y.Z, output an x.y.z and x.y image tag
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
-
|
-
|
||||||
name: Set up Docker Buildx
|
name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v1
|
uses: docker/setup-buildx-action@v2
|
||||||
-
|
-
|
||||||
name: Set up QEMU
|
name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v1
|
uses: docker/setup-qemu-action@v2
|
||||||
-
|
-
|
||||||
name: Login to Github Container Registry
|
name: Login to Github Container Registry
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
-
|
||||||
|
name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
# Don't attempt to login is not pushing to Docker Hub
|
||||||
|
if: steps.docker-hub.outputs.enable == 'true'
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
-
|
-
|
||||||
name: Build and push
|
name: Build and push
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||||
push: true
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
tags: ${{ steps.prepare.outputs.tags }}
|
tags: ${{ steps.docker-meta.outputs.tags }}
|
||||||
cache-from: type=gha
|
labels: ${{ steps.docker-meta.outputs.labels }}
|
||||||
cache-to: type=gha,mode=max
|
build-args: |
|
||||||
|
JBIG2ENC_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.jbig2enc-json).version }}
|
||||||
|
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
|
||||||
|
PIKEPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.pikepdf-json).version }}
|
||||||
|
PSYCOPG2_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.psycopg2-json).version }}
|
||||||
|
# Get cache layers from this branch, then dev, then main
|
||||||
|
# This allows new branches to get at least some cache benefits, generally from dev
|
||||||
|
cache-from: |
|
||||||
|
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
||||||
|
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:dev
|
||||||
|
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:main
|
||||||
|
cache-to: |
|
||||||
|
type=registry,mode=max,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
||||||
-
|
-
|
||||||
name: Inspect image
|
name: Inspect image
|
||||||
run: |
|
run: |
|
||||||
docker buildx imagetools inspect ${{ steps.prepare.outputs.inspect_tag }}
|
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||||
-
|
-
|
||||||
name: Export frontend artifact from docker
|
name: Export frontend artifact from docker
|
||||||
run: |
|
run: |
|
||||||
docker run -d --name frontend-extract ${{ steps.prepare.outputs.inspect_tag }}
|
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||||
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
|
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
|
||||||
-
|
-
|
||||||
name: Upload frontend artifact
|
name: Upload frontend artifact
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: frontend-compiled
|
name: frontend-compiled
|
||||||
path: src/documents/static/frontend/
|
path: src/documents/static/frontend/
|
||||||
|
|
||||||
build-release:
|
build-release:
|
||||||
needs: [build-docker-image, documentation, tests, codeformatting, codestyle]
|
needs:
|
||||||
|
- build-docker-image
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
|
-
|
||||||
|
name: Install pipenv
|
||||||
|
run: |
|
||||||
|
pip3 install --upgrade pip setuptools wheel pipx
|
||||||
|
pipx install pipenv
|
||||||
-
|
-
|
||||||
name: Set up Python
|
name: Set up Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
|
cache: "pipenv"
|
||||||
|
cache-dependency-path: 'Pipfile.lock'
|
||||||
-
|
-
|
||||||
name: Install dependencies
|
name: Install Python dependencies
|
||||||
|
run: |
|
||||||
|
pipenv sync --dev
|
||||||
|
-
|
||||||
|
name: Install system dependencies
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update -qq
|
sudo apt-get update -qq
|
||||||
sudo apt-get install -qq --no-install-recommends gettext liblept5
|
sudo apt-get install -qq --no-install-recommends gettext liblept5
|
||||||
pip3 install --upgrade pip setuptools wheel
|
|
||||||
pip3 install -r requirements.txt
|
|
||||||
-
|
-
|
||||||
name: Download frontend artifact
|
name: Download frontend artifact
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: frontend-compiled
|
name: frontend-compiled
|
||||||
path: src/documents/static/frontend/
|
path: src/documents/static/frontend/
|
||||||
-
|
-
|
||||||
name: Download documentation artifact
|
name: Download documentation artifact
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: documentation
|
name: documentation
|
||||||
path: docs/_build/html/
|
path: docs/_build/html/
|
||||||
|
-
|
||||||
|
name: Generate requirements file
|
||||||
|
run: |
|
||||||
|
pipenv requirements > requirements.txt
|
||||||
|
-
|
||||||
|
name: Compile messages
|
||||||
|
run: |
|
||||||
|
cd src/
|
||||||
|
pipenv run python3 manage.py compilemessages
|
||||||
|
-
|
||||||
|
name: Collect static files
|
||||||
|
run: |
|
||||||
|
cd src/
|
||||||
|
pipenv run python3 manage.py collectstatic --no-input
|
||||||
-
|
-
|
||||||
name: Move files
|
name: Move files
|
||||||
run: |
|
run: |
|
||||||
mkdir dist
|
mkdir dist
|
||||||
mkdir dist/paperless-ngx
|
mkdir dist/paperless-ngx
|
||||||
mkdir dist/paperless-ngx/scripts
|
mkdir dist/paperless-ngx/scripts
|
||||||
cp .dockerignore .env Dockerfile Pipfile Pipfile.lock LICENSE README.md requirements.txt dist/paperless-ngx/
|
cp .dockerignore .env Dockerfile Pipfile Pipfile.lock requirements.txt LICENSE README.md dist/paperless-ngx/
|
||||||
cp paperless.conf.example dist/paperless-ngx/paperless.conf
|
cp paperless.conf.example dist/paperless-ngx/paperless.conf
|
||||||
cp gunicorn.conf.py dist/paperless-ngx/gunicorn.conf.py
|
cp gunicorn.conf.py dist/paperless-ngx/gunicorn.conf.py
|
||||||
cp docker/ dist/paperless-ngx/docker -r
|
cp -r docker/ dist/paperless-ngx/docker
|
||||||
cp scripts/*.service scripts/*.sh dist/paperless-ngx/scripts/
|
cp scripts/*.service scripts/*.sh dist/paperless-ngx/scripts/
|
||||||
cp src/ dist/paperless-ngx/src -r
|
cp -r src/ dist/paperless-ngx/src
|
||||||
cp docs/_build/html/ dist/paperless-ngx/docs -r
|
cp -r docs/_build/html/ dist/paperless-ngx/docs
|
||||||
-
|
mv static dist/paperless-ngx
|
||||||
name: Compile messages
|
|
||||||
run: |
|
|
||||||
cd dist/paperless-ngx/src
|
|
||||||
python3 manage.py compilemessages
|
|
||||||
-
|
|
||||||
name: Collect static files
|
|
||||||
run: |
|
|
||||||
cd dist/paperless-ngx/src
|
|
||||||
python3 manage.py collectstatic --no-input
|
|
||||||
-
|
-
|
||||||
name: Make release package
|
name: Make release package
|
||||||
run: |
|
run: |
|
||||||
cd dist
|
cd dist
|
||||||
find . -name __pycache__ | xargs rm -r
|
|
||||||
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
||||||
-
|
-
|
||||||
name: Upload release artifact
|
name: Upload release artifact
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: release
|
name: release
|
||||||
path: dist/paperless-ngx.tar.xz
|
path: dist/paperless-ngx.tar.xz
|
||||||
|
|
||||||
publish-release:
|
publish-release:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-20.04
|
||||||
needs: build-release
|
outputs:
|
||||||
if: contains(github.ref, 'refs/tags/ngx-') || contains(github.ref, 'refs/tags/beta-')
|
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
||||||
|
changelog: ${{ steps.create-release.outputs.body }}
|
||||||
|
version: ${{ steps.get_version.outputs.version }}
|
||||||
|
needs:
|
||||||
|
- build-release
|
||||||
|
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Download release artifact
|
name: Download release artifact
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: release
|
name: release
|
||||||
path: ./
|
path: ./
|
||||||
@@ -294,27 +443,24 @@ jobs:
|
|||||||
name: Get version
|
name: Get version
|
||||||
id: get_version
|
id: get_version
|
||||||
run: |
|
run: |
|
||||||
if [[ $GITHUB_REF == refs/tags/ngx-* ]]; then
|
echo ::set-output name=version::${{ github.ref_name }}
|
||||||
echo ::set-output name=version::${GITHUB_REF#refs/tags/ngx-}
|
if [[ ${{ contains(github.ref_name, '-beta.rc') }} == 'true' ]]; then
|
||||||
echo ::set-output name=prerelease::false
|
|
||||||
echo ::set-output name=body::"For a complete list of changes, see the changelog at https://paperless-ngx.readthedocs.io/en/latest/changelog.html"
|
|
||||||
elif [[ $GITHUB_REF == refs/tags/beta-* ]]; then
|
|
||||||
echo ::set-output name=version::${GITHUB_REF#refs/tags/beta-}
|
|
||||||
echo ::set-output name=prerelease::true
|
echo ::set-output name=prerelease::true
|
||||||
echo ::set-output name=body::"For a complete list of changes, see the changelog at https://github.com/paperless-ngx/paperless-ngx/blob/beta/docs/changelog.rst"
|
else
|
||||||
|
echo ::set-output name=prerelease::false
|
||||||
fi
|
fi
|
||||||
-
|
-
|
||||||
name: Create release
|
name: Create Release and Changelog
|
||||||
id: create_release
|
id: create-release
|
||||||
uses: actions/create-release@v1
|
uses: paperless-ngx/release-drafter@master
|
||||||
|
with:
|
||||||
|
name: Paperless-ngx ${{ steps.get_version.outputs.version }}
|
||||||
|
tag: ${{ steps.get_version.outputs.version }}
|
||||||
|
version: ${{ steps.get_version.outputs.version }}
|
||||||
|
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
||||||
|
publish: true # ensures release is not marked as draft
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
|
||||||
tag_name: ngx-${{ steps.get_version.outputs.version }}
|
|
||||||
release_name: Paperless-ngx ${{ steps.get_version.outputs.version }}
|
|
||||||
draft: false
|
|
||||||
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
|
||||||
body: ${{ steps.get_version.outputs.body }}
|
|
||||||
-
|
-
|
||||||
name: Upload release archive
|
name: Upload release archive
|
||||||
id: upload-release-asset
|
id: upload-release-asset
|
||||||
@@ -322,7 +468,56 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }} # This pulls from the CREATE RELEASE step above, referencing it's ID to get its outputs object, which include a `upload_url`. See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps
|
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||||
asset_path: ./paperless-ngx.tar.xz
|
asset_path: ./paperless-ngx.tar.xz
|
||||||
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
|
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
|
||||||
asset_content_type: application/x-xz
|
asset_content_type: application/x-xz
|
||||||
|
|
||||||
|
append-changelog:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
needs:
|
||||||
|
- publish-release
|
||||||
|
if: needs.publish-release.outputs.prerelease == 'false'
|
||||||
|
steps:
|
||||||
|
-
|
||||||
|
name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
-
|
||||||
|
name: Append Changelog to docs
|
||||||
|
id: append-Changelog
|
||||||
|
working-directory: docs
|
||||||
|
run: |
|
||||||
|
git branch ${{ needs.publish-release.outputs.version }}-changelog
|
||||||
|
git checkout ${{ needs.publish-release.outputs.version }}-changelog
|
||||||
|
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
|
||||||
|
echo "Manually linking usernames"
|
||||||
|
sed -i -r 's|@(.+?) \(\[#|[@\1](https://github.com/\1) ([#|ig' changelog-new.md
|
||||||
|
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
|
||||||
|
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
|
||||||
|
mv changelog-new.md changelog.md
|
||||||
|
git config --global user.name "github-actions"
|
||||||
|
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||||
|
git commit -am "Changelog ${{ steps.get_version.outputs.version }} - GHA"
|
||||||
|
git push origin ${{ needs.publish-release.outputs.version }}-changelog
|
||||||
|
-
|
||||||
|
name: Create Pull Request
|
||||||
|
uses: actions/github-script@v6
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const { repo, owner } = context.repo;
|
||||||
|
const result = await github.rest.pulls.create({
|
||||||
|
title: '[Documentation] Add ${{ needs.publish-release.outputs.version }} changelog',
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
head: '${{ needs.publish-release.outputs.version }}-changelog',
|
||||||
|
base: 'main',
|
||||||
|
body: 'This PR is auto-generated by CI.'
|
||||||
|
});
|
||||||
|
github.rest.issues.addLabels({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: result.data.number,
|
||||||
|
labels: ['documentation']
|
||||||
|
});
|
||||||
|
58
.github/workflows/cleanup-tags.yml
vendored
Normal file
58
.github/workflows/cleanup-tags.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
# This workflow runs on certain conditions to check for and potentially
|
||||||
|
# delete container images from the GHCR which no longer have an associated
|
||||||
|
# code branch.
|
||||||
|
# Requires a PAT with the correct scope set in the secrets
|
||||||
|
|
||||||
|
name: Cleanup Image Tags
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * SAT'
|
||||||
|
delete:
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- closed
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- ".github/workflows/cleanup-tags.yml"
|
||||||
|
- ".github/scripts/cleanup-tags.py"
|
||||||
|
- ".github/scripts/common.py"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
cleanup:
|
||||||
|
name: Cleanup Image Tags
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
env:
|
||||||
|
# Requires a personal access token with the OAuth scope delete:packages
|
||||||
|
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
|
||||||
|
steps:
|
||||||
|
-
|
||||||
|
name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
-
|
||||||
|
name: Login to Github Container Registry
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
-
|
||||||
|
name: Set up Python
|
||||||
|
uses: actions/setup-python@v3
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
-
|
||||||
|
name: Install requests
|
||||||
|
run: |
|
||||||
|
python -m pip install requests
|
||||||
|
-
|
||||||
|
name: Cleanup feature tags
|
||||||
|
# Only run if the token is not empty
|
||||||
|
if: "${{ env.TOKEN != '' }}"
|
||||||
|
run: |
|
||||||
|
python ${GITHUB_WORKSPACE}/.github/scripts/cleanup-tags.py --loglevel info --untagged --delete
|
||||||
|
-
|
||||||
|
name: Check all tags still pull
|
||||||
|
run: |
|
||||||
|
ghcr_name=$(echo "${GITHUB_REPOSITORY}" | awk '{ print tolower($0) }')
|
||||||
|
docker pull --quiet --all-tags ghcr.io/${ghcr_name}
|
4
.github/workflows/codeql-analysis.yml
vendored
4
.github/workflows/codeql-analysis.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
|||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v1
|
uses: github/codeql-action/init@v2
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
@@ -51,4 +51,4 @@ jobs:
|
|||||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v1
|
uses: github/codeql-action/analyze@v2
|
||||||
|
147
.github/workflows/installer-library.yml
vendored
Normal file
147
.github/workflows/installer-library.yml
vendored
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
# This workflow will run to update the installer library of
|
||||||
|
# Docker images. These are the images which provide updated wheels
|
||||||
|
# .deb installation packages or maybe just some compiled library
|
||||||
|
|
||||||
|
name: Build Image Library
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
# Must match one of these branches AND one of the paths
|
||||||
|
# to be triggered
|
||||||
|
branches:
|
||||||
|
- "main"
|
||||||
|
- "dev"
|
||||||
|
- "library-*"
|
||||||
|
- "feature-*"
|
||||||
|
paths:
|
||||||
|
# Trigger the workflow if a Dockerfile changed
|
||||||
|
- "docker-builders/**"
|
||||||
|
# Trigger if a package was updated
|
||||||
|
- ".build-config.json"
|
||||||
|
- "Pipfile.lock"
|
||||||
|
# Also trigger on workflow changes related to the library
|
||||||
|
- ".github/workflows/installer-library.yml"
|
||||||
|
- ".github/workflows/reusable-workflow-builder.yml"
|
||||||
|
- ".github/scripts/**"
|
||||||
|
|
||||||
|
# Set a workflow level concurrency group so primary workflow
|
||||||
|
# can wait for this to complete if needed
|
||||||
|
# DO NOT CHANGE without updating main workflow group
|
||||||
|
concurrency:
|
||||||
|
group: build-installer-library
|
||||||
|
cancel-in-progress: false
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
prepare-docker-build:
|
||||||
|
name: Prepare Docker Image Version Data
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
steps:
|
||||||
|
-
|
||||||
|
name: Set ghcr repository name
|
||||||
|
id: set-ghcr-repository
|
||||||
|
run: |
|
||||||
|
ghcr_name=$(echo "${GITHUB_REPOSITORY}" | awk '{ print tolower($0) }')
|
||||||
|
echo ::set-output name=repository::${ghcr_name}
|
||||||
|
-
|
||||||
|
name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
-
|
||||||
|
name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.9"
|
||||||
|
-
|
||||||
|
name: Setup qpdf image
|
||||||
|
id: qpdf-setup
|
||||||
|
run: |
|
||||||
|
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py qpdf)
|
||||||
|
|
||||||
|
echo ${build_json}
|
||||||
|
|
||||||
|
echo ::set-output name=qpdf-json::${build_json}
|
||||||
|
-
|
||||||
|
name: Setup psycopg2 image
|
||||||
|
id: psycopg2-setup
|
||||||
|
run: |
|
||||||
|
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py psycopg2)
|
||||||
|
|
||||||
|
echo ${build_json}
|
||||||
|
|
||||||
|
echo ::set-output name=psycopg2-json::${build_json}
|
||||||
|
-
|
||||||
|
name: Setup pikepdf image
|
||||||
|
id: pikepdf-setup
|
||||||
|
run: |
|
||||||
|
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py pikepdf)
|
||||||
|
|
||||||
|
echo ${build_json}
|
||||||
|
|
||||||
|
echo ::set-output name=pikepdf-json::${build_json}
|
||||||
|
-
|
||||||
|
name: Setup jbig2enc image
|
||||||
|
id: jbig2enc-setup
|
||||||
|
run: |
|
||||||
|
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py jbig2enc)
|
||||||
|
|
||||||
|
echo ${build_json}
|
||||||
|
|
||||||
|
echo ::set-output name=jbig2enc-json::${build_json}
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
|
||||||
|
ghcr-repository: ${{ steps.set-ghcr-repository.outputs.repository }}
|
||||||
|
|
||||||
|
qpdf-json: ${{ steps.qpdf-setup.outputs.qpdf-json }}
|
||||||
|
|
||||||
|
pikepdf-json: ${{ steps.pikepdf-setup.outputs.pikepdf-json }}
|
||||||
|
|
||||||
|
psycopg2-json: ${{ steps.psycopg2-setup.outputs.psycopg2-json }}
|
||||||
|
|
||||||
|
jbig2enc-json: ${{ steps.jbig2enc-setup.outputs.jbig2enc-json}}
|
||||||
|
|
||||||
|
build-qpdf-debs:
|
||||||
|
name: qpdf
|
||||||
|
needs:
|
||||||
|
- prepare-docker-build
|
||||||
|
uses: ./.github/workflows/reusable-workflow-builder.yml
|
||||||
|
with:
|
||||||
|
dockerfile: ./docker-builders/Dockerfile.qpdf
|
||||||
|
build-json: ${{ needs.prepare-docker-build.outputs.qpdf-json }}
|
||||||
|
build-args: |
|
||||||
|
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
|
||||||
|
|
||||||
|
build-jbig2enc:
|
||||||
|
name: jbig2enc
|
||||||
|
needs:
|
||||||
|
- prepare-docker-build
|
||||||
|
uses: ./.github/workflows/reusable-workflow-builder.yml
|
||||||
|
with:
|
||||||
|
dockerfile: ./docker-builders/Dockerfile.jbig2enc
|
||||||
|
build-json: ${{ needs.prepare-docker-build.outputs.jbig2enc-json }}
|
||||||
|
build-args: |
|
||||||
|
JBIG2ENC_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.jbig2enc-json).version }}
|
||||||
|
|
||||||
|
build-psycopg2-wheel:
|
||||||
|
name: psycopg2
|
||||||
|
needs:
|
||||||
|
- prepare-docker-build
|
||||||
|
uses: ./.github/workflows/reusable-workflow-builder.yml
|
||||||
|
with:
|
||||||
|
dockerfile: ./docker-builders/Dockerfile.psycopg2
|
||||||
|
build-json: ${{ needs.prepare-docker-build.outputs.psycopg2-json }}
|
||||||
|
build-args: |
|
||||||
|
PSYCOPG2_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.psycopg2-json).version }}
|
||||||
|
|
||||||
|
build-pikepdf-wheel:
|
||||||
|
name: pikepdf
|
||||||
|
needs:
|
||||||
|
- prepare-docker-build
|
||||||
|
- build-qpdf-debs
|
||||||
|
uses: ./.github/workflows/reusable-workflow-builder.yml
|
||||||
|
with:
|
||||||
|
dockerfile: ./docker-builders/Dockerfile.pikepdf
|
||||||
|
build-json: ${{ needs.prepare-docker-build.outputs.pikepdf-json }}
|
||||||
|
build-args: |
|
||||||
|
REPO=${{ needs.prepare-docker-build.outputs.ghcr-repository }}
|
||||||
|
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
|
||||||
|
PIKEPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.pikepdf-json).version }}
|
22
.github/workflows/project-actions.yml
vendored
22
.github/workflows/project-actions.yml
vendored
@@ -13,6 +13,9 @@ on:
|
|||||||
- main
|
- main
|
||||||
- dev
|
- dev
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
env:
|
env:
|
||||||
todo: Todo
|
todo: Todo
|
||||||
done: Done
|
done: Done
|
||||||
@@ -24,8 +27,8 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.event_name == 'issues' && (github.event.action == 'opened' || github.event.action == 'reopened')
|
if: github.event_name == 'issues' && (github.event.action == 'opened' || github.event.action == 'reopened')
|
||||||
steps:
|
steps:
|
||||||
- name: Set issue status to ${{ env.todo }}
|
- name: Add issue to project and set status to ${{ env.todo }}
|
||||||
uses: leonsteinhaeuser/project-beta-automations@v1.2.1
|
uses: leonsteinhaeuser/project-beta-automations@v1.3.0
|
||||||
with:
|
with:
|
||||||
gh_token: ${{ secrets.GH_TOKEN }}
|
gh_token: ${{ secrets.GH_TOKEN }}
|
||||||
organization: paperless-ngx
|
organization: paperless-ngx
|
||||||
@@ -35,13 +38,20 @@ jobs:
|
|||||||
pr_opened_or_reopened:
|
pr_opened_or_reopened:
|
||||||
name: pr_opened_or_reopened
|
name: pr_opened_or_reopened
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.event_name == 'pull_request_target' && (github.event.action == 'opened' || github.event.action == 'reopened')
|
permissions:
|
||||||
|
# write permission is required for autolabeler
|
||||||
|
pull-requests: write
|
||||||
|
if: github.event_name == 'pull_request_target' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request.user.login != 'dependabot'
|
||||||
steps:
|
steps:
|
||||||
- name: Set PR status to ${{ env.in_progress }}
|
- name: Add PR to project and set status to "Needs Review"
|
||||||
uses: leonsteinhaeuser/project-beta-automations@v1.2.1
|
uses: leonsteinhaeuser/project-beta-automations@v1.3.0
|
||||||
with:
|
with:
|
||||||
gh_token: ${{ secrets.GH_TOKEN }}
|
gh_token: ${{ secrets.GH_TOKEN }}
|
||||||
organization: paperless-ngx
|
organization: paperless-ngx
|
||||||
project_id: 2
|
project_id: 2
|
||||||
resource_node_id: ${{ github.event.pull_request.node_id }}
|
resource_node_id: ${{ github.event.pull_request.node_id }}
|
||||||
status_value: ${{ env.in_progress }} # Target status
|
status_value: "Needs Review" # Target status
|
||||||
|
- name: Label PR with release-drafter
|
||||||
|
uses: release-drafter/release-drafter@v5
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
53
.github/workflows/reusable-workflow-builder.yml
vendored
Normal file
53
.github/workflows/reusable-workflow-builder.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
name: Reusable Image Builder
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
dockerfile:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
build-json:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
build-args:
|
||||||
|
required: false
|
||||||
|
default: ""
|
||||||
|
type: string
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ fromJSON(inputs.build-json).name }}-${{ fromJSON(inputs.build-json).version }}
|
||||||
|
cancel-in-progress: false
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-image:
|
||||||
|
name: Build ${{ fromJSON(inputs.build-json).name }} @ ${{ fromJSON(inputs.build-json).version }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
-
|
||||||
|
name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
-
|
||||||
|
name: Login to Github Container Registry
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
-
|
||||||
|
name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
-
|
||||||
|
name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
-
|
||||||
|
name: Build ${{ fromJSON(inputs.build-json).name }}
|
||||||
|
uses: docker/build-push-action@v3
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ${{ inputs.dockerfile }}
|
||||||
|
tags: ${{ fromJSON(inputs.build-json).image_tag }}
|
||||||
|
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||||
|
build-args: ${{ inputs.build-args }}
|
||||||
|
push: true
|
||||||
|
cache-from: type=registry,ref=${{ fromJSON(inputs.build-json).cache_tag }}
|
||||||
|
cache-to: type=registry,mode=max,ref=${{ fromJSON(inputs.build-json).cache_tag }}
|
11
.gitignore
vendored
11
.gitignore
vendored
@@ -61,10 +61,16 @@ target/
|
|||||||
# PyCharm
|
# PyCharm
|
||||||
.idea
|
.idea
|
||||||
|
|
||||||
|
# VS Code
|
||||||
|
.vscode
|
||||||
|
/src-ui/.vscode
|
||||||
|
/docs/.vscode
|
||||||
|
|
||||||
# Other stuff that doesn't belong
|
# Other stuff that doesn't belong
|
||||||
.virtualenv
|
.virtualenv
|
||||||
virtualenv
|
virtualenv
|
||||||
/venv
|
/venv
|
||||||
|
.venv/
|
||||||
/docker-compose.env
|
/docker-compose.env
|
||||||
/docker-compose.yml
|
/docker-compose.yml
|
||||||
|
|
||||||
@@ -81,8 +87,9 @@ scripts/nuke
|
|||||||
/paperless.conf
|
/paperless.conf
|
||||||
/consume/
|
/consume/
|
||||||
/export/
|
/export/
|
||||||
/src-ui/.vscode
|
|
||||||
|
|
||||||
# this is where the compiled frontend is moved to.
|
# this is where the compiled frontend is moved to.
|
||||||
/src/documents/static/frontend/
|
/src/documents/static/frontend/
|
||||||
/docs/.vscode/settings.json
|
|
||||||
|
# mac os
|
||||||
|
.DS_Store
|
||||||
|
8
.hadolint.yml
Normal file
8
.hadolint.yml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
failure-threshold: warning
|
||||||
|
ignored:
|
||||||
|
# https://github.com/hadolint/hadolint/wiki/DL3008
|
||||||
|
- DL3008
|
||||||
|
# https://github.com/hadolint/hadolint/wiki/DL3013
|
||||||
|
- DL3013
|
||||||
|
# https://github.com/hadolint/hadolint/wiki/DL3003
|
||||||
|
- DL3003
|
@@ -5,7 +5,7 @@
|
|||||||
repos:
|
repos:
|
||||||
# General hooks
|
# General hooks
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.1.0
|
rev: v4.3.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-docstring-first
|
- id: check-docstring-first
|
||||||
- id: check-json
|
- id: check-json
|
||||||
@@ -27,7 +27,7 @@ repos:
|
|||||||
- id: check-case-conflict
|
- id: check-case-conflict
|
||||||
- id: detect-private-key
|
- id: detect-private-key
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
rev: "v2.6.1"
|
rev: "v2.7.1"
|
||||||
hooks:
|
hooks:
|
||||||
- id: prettier
|
- id: prettier
|
||||||
types_or:
|
types_or:
|
||||||
@@ -37,17 +37,17 @@ repos:
|
|||||||
exclude: "(^Pipfile\\.lock$)"
|
exclude: "(^Pipfile\\.lock$)"
|
||||||
# Python hooks
|
# Python hooks
|
||||||
- repo: https://github.com/asottile/reorder_python_imports
|
- repo: https://github.com/asottile/reorder_python_imports
|
||||||
rev: v3.0.1
|
rev: v3.8.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: reorder-python-imports
|
- id: reorder-python-imports
|
||||||
exclude: "(migrations)"
|
exclude: "(migrations)"
|
||||||
- repo: https://github.com/asottile/yesqa
|
- repo: https://github.com/asottile/yesqa
|
||||||
rev: "v1.3.0"
|
rev: "v1.4.0"
|
||||||
hooks:
|
hooks:
|
||||||
- id: yesqa
|
- id: yesqa
|
||||||
exclude: "(migrations)"
|
exclude: "(migrations)"
|
||||||
- repo: https://github.com/asottile/add-trailing-comma
|
- repo: https://github.com/asottile/add-trailing-comma
|
||||||
rev: "v2.2.1"
|
rev: "v2.2.3"
|
||||||
hooks:
|
hooks:
|
||||||
- id: add-trailing-comma
|
- id: add-trailing-comma
|
||||||
exclude: "(migrations)"
|
exclude: "(migrations)"
|
||||||
@@ -59,14 +59,21 @@ repos:
|
|||||||
args:
|
args:
|
||||||
- "--config=./src/setup.cfg"
|
- "--config=./src/setup.cfg"
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 22.3.0
|
rev: 22.6.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
# Dockerfile hooks
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
- repo: https://github.com/pryorda/dockerfilelint-precommit-hooks
|
rev: v2.37.3
|
||||||
rev: "v0.1.0"
|
|
||||||
hooks:
|
hooks:
|
||||||
- id: dockerfilelint
|
- id: pyupgrade
|
||||||
|
exclude: "(migrations)"
|
||||||
|
args:
|
||||||
|
- "--py38-plus"
|
||||||
|
# Dockerfile hooks
|
||||||
|
- repo: https://github.com/AleksaC/hadolint-py
|
||||||
|
rev: v2.10.0
|
||||||
|
hooks:
|
||||||
|
- id: hadolint
|
||||||
# Shell script hooks
|
# Shell script hooks
|
||||||
- repo: https://github.com/lovesegfault/beautysh
|
- repo: https://github.com/lovesegfault/beautysh
|
||||||
rev: v6.2.1
|
rev: v6.2.1
|
||||||
|
4
.prettierrc
Normal file
4
.prettierrc
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# https://prettier.io/docs/en/options.html#semicolons
|
||||||
|
semi: false
|
||||||
|
# https://prettier.io/docs/en/options.html#quotes
|
||||||
|
singleQuote: true
|
9
CODEOWNERS
Normal file
9
CODEOWNERS
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
/.github/workflows/ @paperless-ngx/ci-cd
|
||||||
|
/docker/ @paperless-ngx/ci-cd
|
||||||
|
/scripts/ @paperless-ngx/ci-cd
|
||||||
|
|
||||||
|
/src-ui/ @paperless-ngx/frontend
|
||||||
|
|
||||||
|
/src/ @paperless-ngx/backend
|
||||||
|
Pipfile* @paperless-ngx/backend
|
||||||
|
*.py @paperless-ngx/backend
|
@@ -17,23 +17,23 @@ diverse, inclusive, and healthy community.
|
|||||||
Examples of behavior that contributes to a positive environment for our
|
Examples of behavior that contributes to a positive environment for our
|
||||||
community include:
|
community include:
|
||||||
|
|
||||||
* Demonstrating empathy and kindness toward other people
|
- Demonstrating empathy and kindness toward other people
|
||||||
* Being respectful of differing opinions, viewpoints, and experiences
|
- Being respectful of differing opinions, viewpoints, and experiences
|
||||||
* Giving and gracefully accepting constructive feedback
|
- Giving and gracefully accepting constructive feedback
|
||||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
- Accepting responsibility and apologizing to those affected by our mistakes,
|
||||||
and learning from the experience
|
and learning from the experience
|
||||||
* Focusing on what is best not just for us as individuals, but for the
|
- Focusing on what is best not just for us as individuals, but for the
|
||||||
overall community
|
overall community
|
||||||
|
|
||||||
Examples of unacceptable behavior include:
|
Examples of unacceptable behavior include:
|
||||||
|
|
||||||
* The use of sexualized language or imagery, and sexual attention or
|
- The use of sexualized language or imagery, and sexual attention or
|
||||||
advances of any kind
|
advances of any kind
|
||||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||||
* Public or private harassment
|
- Public or private harassment
|
||||||
* Publishing others' private information, such as a physical or email
|
- Publishing others' private information, such as a physical or email
|
||||||
address, without their explicit permission
|
address, without their explicit permission
|
||||||
* Other conduct which could reasonably be considered inappropriate in a
|
- Other conduct which could reasonably be considered inappropriate in a
|
||||||
professional setting
|
professional setting
|
||||||
|
|
||||||
## Enforcement Responsibilities
|
## Enforcement Responsibilities
|
||||||
@@ -106,7 +106,7 @@ Violating these terms may lead to a permanent ban.
|
|||||||
### 4. Permanent Ban
|
### 4. Permanent Ban
|
||||||
|
|
||||||
**Community Impact**: Demonstrating a pattern of violation of community
|
**Community Impact**: Demonstrating a pattern of violation of community
|
||||||
standards, including sustained inappropriate behavior, harassment of an
|
standards, including sustained inappropriate behavior, harassment of an
|
||||||
individual, or aggression toward or disparagement of classes of individuals.
|
individual, or aggression toward or disparagement of classes of individuals.
|
||||||
|
|
||||||
**Consequence**: A permanent ban from any sort of public interaction within
|
**Consequence**: A permanent ban from any sort of public interaction within
|
||||||
|
@@ -4,10 +4,10 @@ If you feel like contributing to the project, please do! Bug fixes and improveme
|
|||||||
|
|
||||||
If you want to implement something big:
|
If you want to implement something big:
|
||||||
|
|
||||||
* Please start a discussion about that in the issues! Maybe something similar is already in development and we can make it happen together.
|
- Please start a discussion about that in the issues! Maybe something similar is already in development and we can make it happen together.
|
||||||
* When making additions to the project, consider if the majority of users will benefit from your change. If not, you're probably better of forking the project.
|
- When making additions to the project, consider if the majority of users will benefit from your change. If not, you're probably better of forking the project.
|
||||||
* Also consider if your change will get in the way of other users. A good change is a change that enhances the experience of some users who want that change and does not affect users who do not care about the change.
|
- Also consider if your change will get in the way of other users. A good change is a change that enhances the experience of some users who want that change and does not affect users who do not care about the change.
|
||||||
* Please see the [paperless-ngx merge process](#merging-prs) below.
|
- Please see the [paperless-ngx merge process](#merging-prs) below.
|
||||||
|
|
||||||
## Python
|
## Python
|
||||||
|
|
||||||
@@ -27,6 +27,8 @@ Please format and test your code! I know it's a hassle, but it makes sure that y
|
|||||||
|
|
||||||
To test your code, execute `pytest` in the src/ directory. This also generates a html coverage report, which you can use to see if you missed anything important during testing.
|
To test your code, execute `pytest` in the src/ directory. This also generates a html coverage report, which you can use to see if you missed anything important during testing.
|
||||||
|
|
||||||
|
Before you can run `pytest`, ensure to [properly set up your local environment](https://paperless-ngx.readthedocs.io/en/latest/extending.html#initial-setup-and-first-start).
|
||||||
|
|
||||||
## More info:
|
## More info:
|
||||||
|
|
||||||
... is available in the documentation. https://paperless-ngx.readthedocs.io/en/latest/extending.html
|
... is available in the documentation. https://paperless-ngx.readthedocs.io/en/latest/extending.html
|
||||||
@@ -41,9 +43,9 @@ PRs deemed `non-trivial` will go through a stricter review process before being
|
|||||||
|
|
||||||
Examples of `non-trivial` PRs might include:
|
Examples of `non-trivial` PRs might include:
|
||||||
|
|
||||||
* Additional features
|
- Additional features
|
||||||
* Large changes to many distinct files
|
- Large changes to many distinct files
|
||||||
* Breaking or depreciation of existing features
|
- Breaking or depreciation of existing features
|
||||||
|
|
||||||
Our community review process for `non-trivial` PRs is the following:
|
Our community review process for `non-trivial` PRs is the following:
|
||||||
|
|
||||||
@@ -75,18 +77,18 @@ If a language has already been added, and you would like to contribute new trans
|
|||||||
If you would like the project to be translated to another language, first head over to https://crwd.in/paperless-ngx to check if that language has already been enabled for translation.
|
If you would like the project to be translated to another language, first head over to https://crwd.in/paperless-ngx to check if that language has already been enabled for translation.
|
||||||
If not, please request the language to be added by creating an issue on GitHub. The issue should contain:
|
If not, please request the language to be added by creating an issue on GitHub. The issue should contain:
|
||||||
|
|
||||||
* English name of the language (the localized name can be added on Crowdin).
|
- English name of the language (the localized name can be added on Crowdin).
|
||||||
* ISO language code. A list of those can be found here: https://support.crowdin.com/enterprise/language-codes/
|
- ISO language code. A list of those can be found here: https://support.crowdin.com/enterprise/language-codes/
|
||||||
* Date format commonly used for the language, e.g. dd/mm/yyyy, mm/dd/yyyy, etc.
|
- Date format commonly used for the language, e.g. dd/mm/yyyy, mm/dd/yyyy, etc.
|
||||||
|
|
||||||
After the language has been added and some translations have been made on Crowdin, the language needs to be enabled in the code.
|
After the language has been added and some translations have been made on Crowdin, the language needs to be enabled in the code.
|
||||||
Note that there is no need to manually add a .po of .xlf file as those will be automatically generated and imported from Crowdin.
|
Note that there is no need to manually add a .po of .xlf file as those will be automatically generated and imported from Crowdin.
|
||||||
The following files need to be changed:
|
The following files need to be changed:
|
||||||
|
|
||||||
* src-ui/angular.json (under the _projects/paperless-ui/i18n/locales_ JSON key)
|
- src-ui/angular.json (under the _projects/paperless-ui/i18n/locales_ JSON key)
|
||||||
* src/paperless/settings.py (in the _LANGUAGES_ array)
|
- src/paperless/settings.py (in the _LANGUAGES_ array)
|
||||||
* src-ui/src/app/services/settings.service.ts (inside the _getLanguageOptions_ method)
|
- src-ui/src/app/services/settings.service.ts (inside the _getLanguageOptions_ method)
|
||||||
* src-ui/src/app/app.module.ts (import locale from _angular/common/locales_ and call _registerLocaleData_)
|
- src-ui/src/app/app.module.ts (import locale from _angular/common/locales_ and call _registerLocaleData_)
|
||||||
|
|
||||||
Please add the language in the correct order, alphabetically by locale.
|
Please add the language in the correct order, alphabetically by locale.
|
||||||
Note that _en-us_ needs to stay on top of the list, as it is the default project language
|
Note that _en-us_ needs to stay on top of the list, as it is the default project language
|
||||||
@@ -102,26 +104,26 @@ Paperless-ngx is a community project. We do our best to delegate permission and
|
|||||||
|
|
||||||
As of writing, there are 21 members in paperless-ngx. 4 of these people have complete administrative privileges to the repo:
|
As of writing, there are 21 members in paperless-ngx. 4 of these people have complete administrative privileges to the repo:
|
||||||
|
|
||||||
* [@shamoon](https://github.com/shamoon)
|
- [@shamoon](https://github.com/shamoon)
|
||||||
* [@bauerj](https://github.com/bauerj)
|
- [@bauerj](https://github.com/bauerj)
|
||||||
* [@qcasey](https://github.com/qcasey)
|
- [@qcasey](https://github.com/qcasey)
|
||||||
* [@FrankStrieter](https://github.com/FrankStrieter)
|
- [@FrankStrieter](https://github.com/FrankStrieter)
|
||||||
|
|
||||||
There are 5 teams collaborating on specific tasks within paperless-ngx:
|
There are 5 teams collaborating on specific tasks within paperless-ngx:
|
||||||
|
|
||||||
* @paperless-ngx/backend (Python / django)
|
- @paperless-ngx/backend (Python / django)
|
||||||
* @paperless-ngx/frontend (JavaScript / Typescript)
|
- @paperless-ngx/frontend (JavaScript / Typescript)
|
||||||
* @paperless-ngx/ci-cd (GitHub Actions / Deployment)
|
- @paperless-ngx/ci-cd (GitHub Actions / Deployment)
|
||||||
* @paperless-ngx/issues (Issue triage)
|
- @paperless-ngx/issues (Issue triage)
|
||||||
* @paperless-ngx/test (General testing for larger PRs)
|
- @paperless-ngx/test (General testing for larger PRs)
|
||||||
|
|
||||||
## Permissions
|
## Permissions
|
||||||
|
|
||||||
All team members are notified when mentioned or assigned to a relevant issue or pull request. Additionally, each team has slightly different access to paperless-ngx:
|
All team members are notified when mentioned or assigned to a relevant issue or pull request. Additionally, each team has slightly different access to paperless-ngx:
|
||||||
|
|
||||||
* The **test** team has no special permissions.
|
- The **test** team has no special permissions.
|
||||||
* The **issues** team has `triage` access. This means they can organize issues and pull requests.
|
- The **issues** team has `triage` access. This means they can organize issues and pull requests.
|
||||||
* The **backend**, **frontend**, and **ci-cd** teams have `write` access. This means they can approve PRs and push code, containers, releases, and more.
|
- The **backend**, **frontend**, and **ci-cd** teams have `write` access. This means they can approve PRs and push code, containers, releases, and more.
|
||||||
|
|
||||||
## Joining
|
## Joining
|
||||||
|
|
||||||
|
359
Dockerfile
359
Dockerfile
@@ -1,134 +1,245 @@
|
|||||||
FROM node:16 AS compile-frontend
|
# syntax=docker/dockerfile:1.4
|
||||||
|
|
||||||
COPY . /src
|
# Pull the installer images from the library
|
||||||
|
# These are all built previously
|
||||||
|
# They provide either a .deb or .whl
|
||||||
|
|
||||||
|
ARG JBIG2ENC_VERSION
|
||||||
|
ARG QPDF_VERSION
|
||||||
|
ARG PIKEPDF_VERSION
|
||||||
|
ARG PSYCOPG2_VERSION
|
||||||
|
|
||||||
|
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/jbig2enc:${JBIG2ENC_VERSION} as jbig2enc-builder
|
||||||
|
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/qpdf:${QPDF_VERSION} as qpdf-builder
|
||||||
|
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/pikepdf:${PIKEPDF_VERSION} as pikepdf-builder
|
||||||
|
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/psycopg2:${PSYCOPG2_VERSION} as psycopg2-builder
|
||||||
|
|
||||||
|
FROM --platform=$BUILDPLATFORM node:16-bullseye-slim AS compile-frontend
|
||||||
|
|
||||||
|
# This stage compiles the frontend
|
||||||
|
# This stage runs once for the native platform, as the outputs are not
|
||||||
|
# dependent on target arch
|
||||||
|
# Inputs: None
|
||||||
|
|
||||||
|
COPY ./src-ui /src/src-ui
|
||||||
|
|
||||||
WORKDIR /src/src-ui
|
WORKDIR /src/src-ui
|
||||||
RUN npm update npm -g && npm install
|
RUN set -eux \
|
||||||
RUN ./node_modules/.bin/ng build --configuration production
|
&& npm update npm -g \
|
||||||
|
&& npm ci --omit=optional
|
||||||
|
RUN set -eux \
|
||||||
|
&& ./node_modules/.bin/ng build --configuration production
|
||||||
|
|
||||||
|
FROM python:3.9-slim-bullseye as main-app
|
||||||
FROM ubuntu:20.04 AS jbig2enc
|
|
||||||
|
|
||||||
WORKDIR /usr/src/jbig2enc
|
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends build-essential automake libtool libleptonica-dev zlib1g-dev git ca-certificates
|
|
||||||
|
|
||||||
RUN git clone https://github.com/agl/jbig2enc .
|
|
||||||
RUN ./autogen.sh
|
|
||||||
RUN ./configure && make
|
|
||||||
|
|
||||||
|
|
||||||
FROM python:3.9-slim-bullseye
|
|
||||||
|
|
||||||
# Binary dependencies
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get -y --no-install-recommends install \
|
|
||||||
# Basic dependencies
|
|
||||||
curl \
|
|
||||||
gnupg \
|
|
||||||
imagemagick \
|
|
||||||
gettext \
|
|
||||||
tzdata \
|
|
||||||
gosu \
|
|
||||||
# fonts for text file thumbnail generation
|
|
||||||
fonts-liberation \
|
|
||||||
# for Numpy
|
|
||||||
libatlas-base-dev \
|
|
||||||
libxslt1-dev \
|
|
||||||
# thumbnail size reduction
|
|
||||||
optipng \
|
|
||||||
libxml2 \
|
|
||||||
pngquant \
|
|
||||||
unpaper \
|
|
||||||
zlib1g \
|
|
||||||
ghostscript \
|
|
||||||
icc-profiles-free \
|
|
||||||
# Mime type detection
|
|
||||||
file \
|
|
||||||
libmagic-dev \
|
|
||||||
media-types \
|
|
||||||
# OCRmyPDF dependencies
|
|
||||||
liblept5 \
|
|
||||||
tesseract-ocr \
|
|
||||||
tesseract-ocr-eng \
|
|
||||||
tesseract-ocr-deu \
|
|
||||||
tesseract-ocr-fra \
|
|
||||||
tesseract-ocr-ita \
|
|
||||||
tesseract-ocr-spa \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# copy jbig2enc
|
|
||||||
COPY --from=jbig2enc /usr/src/jbig2enc/src/.libs/libjbig2enc* /usr/local/lib/
|
|
||||||
COPY --from=jbig2enc /usr/src/jbig2enc/src/jbig2 /usr/local/bin/
|
|
||||||
COPY --from=jbig2enc /usr/src/jbig2enc/src/*.h /usr/local/include/
|
|
||||||
|
|
||||||
WORKDIR /usr/src/paperless/src/
|
|
||||||
|
|
||||||
COPY requirements.txt ../
|
|
||||||
|
|
||||||
# Python dependencies
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get -y --no-install-recommends install \
|
|
||||||
build-essential \
|
|
||||||
libpq-dev \
|
|
||||||
git \
|
|
||||||
zlib1g-dev \
|
|
||||||
libjpeg62-turbo-dev \
|
|
||||||
&& if [ "$(uname -m)" = "armv7l" ] || [ "$(uname -m)" = "aarch64" ]; \
|
|
||||||
then echo "Building qpdf" \
|
|
||||||
&& mkdir -p /usr/src/qpdf \
|
|
||||||
&& cd /usr/src/qpdf \
|
|
||||||
&& git clone https://github.com/qpdf/qpdf.git . \
|
|
||||||
&& git checkout --quiet release-qpdf-10.6.2 \
|
|
||||||
&& ./configure \
|
|
||||||
&& make \
|
|
||||||
&& make install \
|
|
||||||
&& cd /usr/src/paperless/src/ \
|
|
||||||
&& rm -rf /usr/src/qpdf; \
|
|
||||||
else \
|
|
||||||
echo "Skipping qpdf build because pikepdf binary wheels are available."; \
|
|
||||||
fi \
|
|
||||||
&& python3 -m pip install --upgrade pip wheel \
|
|
||||||
&& python3 -m pip install --default-timeout=1000 --upgrade --no-cache-dir supervisor \
|
|
||||||
&& python3 -m pip install --default-timeout=1000 --no-cache-dir -r ../requirements.txt \
|
|
||||||
&& apt-get -y purge build-essential git zlib1g-dev libjpeg62-turbo-dev \
|
|
||||||
&& apt-get -y autoremove --purge \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# setup docker-specific things
|
|
||||||
COPY docker/ ./docker/
|
|
||||||
|
|
||||||
RUN cd docker \
|
|
||||||
&& cp imagemagick-policy.xml /etc/ImageMagick-6/policy.xml \
|
|
||||||
&& mkdir /var/log/supervisord /var/run/supervisord \
|
|
||||||
&& cp supervisord.conf /etc/supervisord.conf \
|
|
||||||
&& cp docker-entrypoint.sh /sbin/docker-entrypoint.sh \
|
|
||||||
&& cp docker-prepare.sh /sbin/docker-prepare.sh \
|
|
||||||
&& chmod 755 /sbin/docker-entrypoint.sh \
|
|
||||||
&& chmod +x install_management_commands.sh \
|
|
||||||
&& ./install_management_commands.sh \
|
|
||||||
&& cd .. \
|
|
||||||
&& rm docker -rf
|
|
||||||
|
|
||||||
COPY gunicorn.conf.py ../
|
|
||||||
|
|
||||||
# copy app
|
|
||||||
COPY --from=compile-frontend /src/src/ ./
|
|
||||||
|
|
||||||
# add users, setup scripts
|
|
||||||
RUN addgroup --gid 1000 paperless \
|
|
||||||
&& useradd --uid 1000 --gid paperless --home-dir /usr/src/paperless paperless \
|
|
||||||
&& chown -R paperless:paperless ../ \
|
|
||||||
&& gosu paperless python3 manage.py collectstatic --clear --no-input \
|
|
||||||
&& gosu paperless python3 manage.py compilemessages
|
|
||||||
|
|
||||||
VOLUME ["/usr/src/paperless/data", "/usr/src/paperless/media", "/usr/src/paperless/consume", "/usr/src/paperless/export"]
|
|
||||||
ENTRYPOINT ["/sbin/docker-entrypoint.sh"]
|
|
||||||
EXPOSE 8000
|
|
||||||
CMD ["/usr/local/bin/supervisord", "-c", "/etc/supervisord.conf"]
|
|
||||||
|
|
||||||
LABEL org.opencontainers.image.authors="paperless-ngx team <hello@paperless-ngx.com>"
|
LABEL org.opencontainers.image.authors="paperless-ngx team <hello@paperless-ngx.com>"
|
||||||
LABEL org.opencontainers.image.documentation="https://paperless-ngx.readthedocs.io/en/latest/"
|
LABEL org.opencontainers.image.documentation="https://paperless-ngx.readthedocs.io/en/latest/"
|
||||||
LABEL org.opencontainers.image.source="https://github.com/paperless-ngx/paperless-ngx"
|
LABEL org.opencontainers.image.source="https://github.com/paperless-ngx/paperless-ngx"
|
||||||
LABEL org.opencontainers.image.url="https://github.com/paperless-ngx/paperless-ngx"
|
LABEL org.opencontainers.image.url="https://github.com/paperless-ngx/paperless-ngx"
|
||||||
LABEL org.opencontainers.image.licenses="GPL-3.0-only"
|
LABEL org.opencontainers.image.licenses="GPL-3.0-only"
|
||||||
|
|
||||||
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
#
|
||||||
|
# Begin installation and configuration
|
||||||
|
# Order the steps below from least often changed to most
|
||||||
|
#
|
||||||
|
|
||||||
|
# copy jbig2enc
|
||||||
|
# Basically will never change again
|
||||||
|
COPY --from=jbig2enc-builder /usr/src/jbig2enc/src/.libs/libjbig2enc* /usr/local/lib/
|
||||||
|
COPY --from=jbig2enc-builder /usr/src/jbig2enc/src/jbig2 /usr/local/bin/
|
||||||
|
COPY --from=jbig2enc-builder /usr/src/jbig2enc/src/*.h /usr/local/include/
|
||||||
|
|
||||||
|
# Packages need for running
|
||||||
|
ARG RUNTIME_PACKAGES="\
|
||||||
|
curl \
|
||||||
|
file \
|
||||||
|
# fonts for text file thumbnail generation
|
||||||
|
fonts-liberation \
|
||||||
|
gettext \
|
||||||
|
ghostscript \
|
||||||
|
gnupg \
|
||||||
|
gosu \
|
||||||
|
icc-profiles-free \
|
||||||
|
imagemagick \
|
||||||
|
media-types \
|
||||||
|
liblept5 \
|
||||||
|
libpq5 \
|
||||||
|
libxml2 \
|
||||||
|
liblcms2-2 \
|
||||||
|
libtiff5 \
|
||||||
|
libxslt1.1 \
|
||||||
|
libfreetype6 \
|
||||||
|
libwebp6 \
|
||||||
|
libopenjp2-7 \
|
||||||
|
libimagequant0 \
|
||||||
|
libraqm0 \
|
||||||
|
libgnutls30 \
|
||||||
|
libjpeg62-turbo \
|
||||||
|
python3 \
|
||||||
|
python3-pip \
|
||||||
|
python3-setuptools \
|
||||||
|
postgresql-client \
|
||||||
|
mariadb-client \
|
||||||
|
# For Numpy
|
||||||
|
libatlas3-base \
|
||||||
|
# OCRmyPDF dependencies
|
||||||
|
tesseract-ocr \
|
||||||
|
tesseract-ocr-eng \
|
||||||
|
tesseract-ocr-deu \
|
||||||
|
tesseract-ocr-fra \
|
||||||
|
tesseract-ocr-ita \
|
||||||
|
tesseract-ocr-spa \
|
||||||
|
# Suggested for OCRmyPDF
|
||||||
|
pngquant \
|
||||||
|
# Suggested for pikepdf
|
||||||
|
jbig2dec \
|
||||||
|
tzdata \
|
||||||
|
unpaper \
|
||||||
|
# Mime type detection
|
||||||
|
zlib1g \
|
||||||
|
# Barcode splitter
|
||||||
|
libzbar0 \
|
||||||
|
poppler-utils"
|
||||||
|
|
||||||
|
# Install basic runtime packages.
|
||||||
|
# These change very infrequently
|
||||||
|
RUN set -eux \
|
||||||
|
echo "Installing system packages" \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES} \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
|
&& echo "Installing supervisor" \
|
||||||
|
&& python3 -m pip install --default-timeout=1000 --upgrade --no-cache-dir supervisor==4.2.4
|
||||||
|
|
||||||
|
# Copy gunicorn config
|
||||||
|
# Changes very infrequently
|
||||||
|
WORKDIR /usr/src/paperless/
|
||||||
|
|
||||||
|
COPY gunicorn.conf.py .
|
||||||
|
|
||||||
|
# setup docker-specific things
|
||||||
|
# Use mounts to avoid copying installer files into the image
|
||||||
|
# These change sometimes, but rarely
|
||||||
|
WORKDIR /usr/src/paperless/src/docker/
|
||||||
|
|
||||||
|
COPY [ \
|
||||||
|
"docker/imagemagick-policy.xml", \
|
||||||
|
"docker/supervisord.conf", \
|
||||||
|
"docker/docker-entrypoint.sh", \
|
||||||
|
"docker/docker-prepare.sh", \
|
||||||
|
"docker/paperless_cmd.sh", \
|
||||||
|
"docker/wait-for-redis.py", \
|
||||||
|
"docker/management_script.sh", \
|
||||||
|
"docker/install_management_commands.sh", \
|
||||||
|
"/usr/src/paperless/src/docker/" \
|
||||||
|
]
|
||||||
|
|
||||||
|
RUN set -eux \
|
||||||
|
&& echo "Configuring ImageMagick" \
|
||||||
|
&& mv imagemagick-policy.xml /etc/ImageMagick-6/policy.xml \
|
||||||
|
&& echo "Configuring supervisord" \
|
||||||
|
&& mkdir /var/log/supervisord /var/run/supervisord \
|
||||||
|
&& mv supervisord.conf /etc/supervisord.conf \
|
||||||
|
&& echo "Setting up Docker scripts" \
|
||||||
|
&& mv docker-entrypoint.sh /sbin/docker-entrypoint.sh \
|
||||||
|
&& chmod 755 /sbin/docker-entrypoint.sh \
|
||||||
|
&& mv docker-prepare.sh /sbin/docker-prepare.sh \
|
||||||
|
&& chmod 755 /sbin/docker-prepare.sh \
|
||||||
|
&& mv wait-for-redis.py /sbin/wait-for-redis.py \
|
||||||
|
&& chmod 755 /sbin/wait-for-redis.py \
|
||||||
|
&& mv paperless_cmd.sh /usr/local/bin/paperless_cmd.sh \
|
||||||
|
&& chmod 755 /usr/local/bin/paperless_cmd.sh \
|
||||||
|
&& echo "Installing managment commands" \
|
||||||
|
&& chmod +x install_management_commands.sh \
|
||||||
|
&& ./install_management_commands.sh
|
||||||
|
|
||||||
|
# Install the built packages from the installer library images
|
||||||
|
# Use mounts to avoid copying installer files into the image
|
||||||
|
# These change sometimes
|
||||||
|
RUN --mount=type=bind,from=qpdf-builder,target=/qpdf \
|
||||||
|
--mount=type=bind,from=psycopg2-builder,target=/psycopg2 \
|
||||||
|
--mount=type=bind,from=pikepdf-builder,target=/pikepdf \
|
||||||
|
set -eux \
|
||||||
|
&& echo "Installing qpdf" \
|
||||||
|
&& apt-get install --yes --no-install-recommends /qpdf/usr/src/qpdf/libqpdf28_*.deb \
|
||||||
|
&& apt-get install --yes --no-install-recommends /qpdf/usr/src/qpdf/qpdf_*.deb \
|
||||||
|
&& echo "Installing pikepdf and dependencies" \
|
||||||
|
&& python3 -m pip install --no-cache-dir /pikepdf/usr/src/wheels/pyparsing*.whl \
|
||||||
|
&& python3 -m pip install --no-cache-dir /pikepdf/usr/src/wheels/packaging*.whl \
|
||||||
|
&& python3 -m pip install --no-cache-dir /pikepdf/usr/src/wheels/lxml*.whl \
|
||||||
|
&& python3 -m pip install --no-cache-dir /pikepdf/usr/src/wheels/Pillow*.whl \
|
||||||
|
&& python3 -m pip install --no-cache-dir /pikepdf/usr/src/wheels/pikepdf*.whl \
|
||||||
|
&& python3 -m pip list \
|
||||||
|
&& echo "Installing psycopg2" \
|
||||||
|
&& python3 -m pip install --no-cache-dir /psycopg2/usr/src/wheels/psycopg2*.whl \
|
||||||
|
&& python3 -m pip list
|
||||||
|
|
||||||
|
WORKDIR /usr/src/paperless/src/
|
||||||
|
|
||||||
|
# Python dependencies
|
||||||
|
# Change pretty frequently
|
||||||
|
COPY Pipfile* ./
|
||||||
|
|
||||||
|
# Packages needed only for building a few quick Python
|
||||||
|
# dependencies
|
||||||
|
ARG BUILD_PACKAGES="\
|
||||||
|
build-essential \
|
||||||
|
git \
|
||||||
|
default-libmysqlclient-dev \
|
||||||
|
python3-dev"
|
||||||
|
|
||||||
|
RUN set -eux \
|
||||||
|
&& echo "Installing build system packages" \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||||
|
&& python3 -m pip install --no-cache-dir --upgrade wheel \
|
||||||
|
&& echo "Installing pipenv" \
|
||||||
|
&& python3 -m pip install --no-cache-dir --upgrade pipenv \
|
||||||
|
&& echo "Installing Python requirements" \
|
||||||
|
# pipenv tries to be too fancy and prints so much junk
|
||||||
|
&& pipenv requirements > requirements.txt \
|
||||||
|
&& python3 -m pip install --default-timeout=1000 --no-cache-dir --requirement requirements.txt \
|
||||||
|
&& rm requirements.txt \
|
||||||
|
&& echo "Cleaning up image" \
|
||||||
|
&& apt-get -y purge ${BUILD_PACKAGES} \
|
||||||
|
&& apt-get -y autoremove --purge \
|
||||||
|
&& apt-get clean --yes \
|
||||||
|
# Remove pipenv and its unique packages
|
||||||
|
&& python3 -m pip uninstall --yes \
|
||||||
|
pipenv \
|
||||||
|
distlib \
|
||||||
|
platformdirs \
|
||||||
|
virtualenv \
|
||||||
|
virtualenv-clone \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
|
&& rm -rf /tmp/* \
|
||||||
|
&& rm -rf /var/tmp/* \
|
||||||
|
&& rm -rf /var/cache/apt/archives/* \
|
||||||
|
&& truncate -s 0 /var/log/*log
|
||||||
|
|
||||||
|
# copy backend
|
||||||
|
COPY ./src ./
|
||||||
|
|
||||||
|
# copy frontend
|
||||||
|
COPY --from=compile-frontend /src/src/documents/static/frontend/ ./documents/static/frontend/
|
||||||
|
|
||||||
|
# add users, setup scripts
|
||||||
|
RUN set -eux \
|
||||||
|
&& addgroup --gid 1000 paperless \
|
||||||
|
&& useradd --uid 1000 --gid paperless --home-dir /usr/src/paperless paperless \
|
||||||
|
&& chown -R paperless:paperless ../ \
|
||||||
|
&& gosu paperless python3 manage.py collectstatic --clear --no-input \
|
||||||
|
&& gosu paperless python3 manage.py compilemessages
|
||||||
|
|
||||||
|
VOLUME ["/usr/src/paperless/data", \
|
||||||
|
"/usr/src/paperless/media", \
|
||||||
|
"/usr/src/paperless/consume", \
|
||||||
|
"/usr/src/paperless/export"]
|
||||||
|
|
||||||
|
ENTRYPOINT ["/sbin/docker-entrypoint.sh"]
|
||||||
|
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
CMD ["/usr/local/bin/paperless_cmd.sh"]
|
||||||
|
47
Pipfile
47
Pipfile
@@ -9,35 +9,35 @@ verify_ssl = true
|
|||||||
name = "piwheels"
|
name = "piwheels"
|
||||||
|
|
||||||
[packages]
|
[packages]
|
||||||
dateparser = "~=1.1.0"
|
dateparser = "~=1.1"
|
||||||
django = "~=3.2"
|
django = "~=4.0"
|
||||||
django-cors-headers = "*"
|
django-cors-headers = "*"
|
||||||
django-extensions = "*"
|
django-extensions = "*"
|
||||||
django-filter = "~=21.1"
|
django-filter = "~=22.1"
|
||||||
django-q = "~=1.3.4"
|
django-q = {editable = true, ref = "paperless-main", git = "https://github.com/paperless-ngx/django-q.git"}
|
||||||
djangorestframework = "~=3.13.1"
|
djangorestframework = "~=3.13"
|
||||||
filelock = "*"
|
filelock = "*"
|
||||||
fuzzywuzzy = {extras = ["speedup"], version = "*"}
|
fuzzywuzzy = {extras = ["speedup"], version = "*"}
|
||||||
gunicorn = "*"
|
gunicorn = "*"
|
||||||
imap-tools = "*"
|
imap-tools = "*"
|
||||||
langdetect = "*"
|
langdetect = "*"
|
||||||
numpy = "~=1.22.0"
|
|
||||||
pathvalidate = "*"
|
pathvalidate = "*"
|
||||||
pillow = "~=9.0"
|
pillow = "~=9.2"
|
||||||
pikepdf = "~=5.0"
|
pikepdf = "~=5.6"
|
||||||
python-gnupg = "*"
|
python-gnupg = "*"
|
||||||
python-dotenv = "*"
|
python-dotenv = "*"
|
||||||
python-dateutil = "*"
|
python-dateutil = "*"
|
||||||
python-magic = "*"
|
python-magic = "*"
|
||||||
psycopg2-binary = "*"
|
psycopg2 = "*"
|
||||||
redis = "*"
|
redis = "*"
|
||||||
# Pinned because aarch64 wheels and updates cause warnings when loading the classifier model.
|
scikit-learn = "~=1.1"
|
||||||
scikit-learn="==0.24.0"
|
# Pin this until piwheels is building 1.9 (see https://www.piwheels.org/project/scipy/)
|
||||||
whitenoise = "~=6.0.0"
|
scipy = "==1.8.1"
|
||||||
watchdog = "~=2.1.0"
|
whitenoise = "~=6.2"
|
||||||
whoosh="~=2.7.4"
|
watchdog = "~=2.1"
|
||||||
inotifyrecursive = "~=0.3.4"
|
whoosh="~=2.7"
|
||||||
ocrmypdf = "~=13.4.0"
|
inotifyrecursive = "~=0.3"
|
||||||
|
ocrmypdf = "~=13.7"
|
||||||
tqdm = "*"
|
tqdm = "*"
|
||||||
tika = "*"
|
tika = "*"
|
||||||
# TODO: This will sadly also install daphne+dependencies,
|
# TODO: This will sadly also install daphne+dependencies,
|
||||||
@@ -46,11 +46,13 @@ channels = "~=3.0"
|
|||||||
channels-redis = "*"
|
channels-redis = "*"
|
||||||
uvicorn = {extras = ["standard"], version = "*"}
|
uvicorn = {extras = ["standard"], version = "*"}
|
||||||
concurrent-log-handler = "*"
|
concurrent-log-handler = "*"
|
||||||
# uvloop 0.15+ incompatible with python 3.6
|
|
||||||
uvloop = "~=0.16"
|
|
||||||
cryptography = "~=36.0.1"
|
|
||||||
"pdfminer.six" = "*"
|
"pdfminer.six" = "*"
|
||||||
"backports.zoneinfo" = "*"
|
"backports.zoneinfo" = {version = "*", markers = "python_version < '3.9'"}
|
||||||
|
"importlib-resources" = {version = "*", markers = "python_version < '3.9'"}
|
||||||
|
zipp = {version = "*", markers = "python_version < '3.9'"}
|
||||||
|
pyzbar = "*"
|
||||||
|
pdf2image = "*"
|
||||||
|
mysqlclient = "*"
|
||||||
|
|
||||||
[dev-packages]
|
[dev-packages]
|
||||||
coveralls = "*"
|
coveralls = "*"
|
||||||
@@ -62,7 +64,10 @@ pytest-django = "*"
|
|||||||
pytest-env = "*"
|
pytest-env = "*"
|
||||||
pytest-sugar = "*"
|
pytest-sugar = "*"
|
||||||
pytest-xdist = "*"
|
pytest-xdist = "*"
|
||||||
sphinx = "~=3.4.2"
|
sphinx = "~=5.1"
|
||||||
sphinx_rtd_theme = "*"
|
sphinx_rtd_theme = "*"
|
||||||
tox = "*"
|
tox = "*"
|
||||||
black = "*"
|
black = "*"
|
||||||
|
pre-commit = "*"
|
||||||
|
sphinx-autobuild = "*"
|
||||||
|
myst-parser = "*"
|
||||||
|
2218
Pipfile.lock
generated
2218
Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
73
README.md
73
README.md
@@ -10,23 +10,23 @@
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<!-- omit in toc -->
|
<!-- omit in toc -->
|
||||||
|
|
||||||
# Paperless-ngx
|
# Paperless-ngx
|
||||||
|
|
||||||
Paperless-ngx is a document management system that transforms your physical documents into a searchable online archive so you can keep, well, *less paper*.
|
Paperless-ngx is a document management system that transforms your physical documents into a searchable online archive so you can keep, well, _less paper_.
|
||||||
|
|
||||||
Paperless-ngx forked from [paperless-ng](https://github.com/jonaswinkler/paperless-ng) to continue the great work and distribute responsibility of supporting and advancing the project among a team of people. [Consider joining us!](#community-support) Discussion of this transition can be found in issues
|
Paperless-ngx forked from [paperless-ng](https://github.com/jonaswinkler/paperless-ng) to continue the great work and distribute responsibility of supporting and advancing the project among a team of people. [Consider joining us!](#community-support) Discussion of this transition can be found in issues
|
||||||
[#1599](https://github.com/jonaswinkler/paperless-ng/issues/1599) and [#1632](https://github.com/jonaswinkler/paperless-ng/issues/1632).
|
[#1599](https://github.com/jonaswinkler/paperless-ng/issues/1599) and [#1632](https://github.com/jonaswinkler/paperless-ng/issues/1632).
|
||||||
|
|
||||||
A demo is available at [demo.paperless-ngx.com](https://demo.paperless-ngx.com) using login `demo` / `demo`. *Note: demo content is reset frequently and confidential information should not be uploaded.*
|
A demo is available at [demo.paperless-ngx.com](https://demo.paperless-ngx.com) using login `demo` / `demo`. _Note: demo content is reset frequently and confidential information should not be uploaded._
|
||||||
|
|
||||||
|
|
||||||
- [Features](#features)
|
- [Features](#features)
|
||||||
- [Getting started](#getting-started)
|
- [Getting started](#getting-started)
|
||||||
- [Contributing](#contributing)
|
- [Contributing](#contributing)
|
||||||
- [Community Support](#community-support)
|
- [Community Support](#community-support)
|
||||||
- [Translation](#translation)
|
- [Translation](#translation)
|
||||||
- [Feature Requests](#feature-requests)
|
- [Feature Requests](#feature-requests)
|
||||||
- [Bugs](#bugs)
|
- [Bugs](#bugs)
|
||||||
- [Affiliated Projects](#affiliated-projects)
|
- [Affiliated Projects](#affiliated-projects)
|
||||||
- [Important Note](#important-note)
|
- [Important Note](#important-note)
|
||||||
|
|
||||||
@@ -35,28 +35,28 @@ A demo is available at [demo.paperless-ngx.com](https://demo.paperless-ngx.com)
|
|||||||

|

|
||||||

|

|
||||||
|
|
||||||
* Organize and index your scanned documents with tags, correspondents, types, and more.
|
- Organize and index your scanned documents with tags, correspondents, types, and more.
|
||||||
* Performs OCR on your documents, adds selectable text to image only documents and adds tags, correspondents and document types to your documents.
|
- Performs OCR on your documents, adds selectable text to image only documents and adds tags, correspondents and document types to your documents.
|
||||||
* Supports PDF documents, images, plain text files, and Office documents (Word, Excel, Powerpoint, and LibreOffice equivalents).
|
- Supports PDF documents, images, plain text files, and Office documents (Word, Excel, Powerpoint, and LibreOffice equivalents).
|
||||||
* Office document support is optional and provided by Apache Tika (see [configuration](https://paperless-ngx.readthedocs.io/en/latest/configuration.html#tika-settings))
|
- Office document support is optional and provided by Apache Tika (see [configuration](https://paperless-ngx.readthedocs.io/en/latest/configuration.html#tika-settings))
|
||||||
* Paperless stores your documents plain on disk. Filenames and folders are managed by paperless and their format can be configured freely.
|
- Paperless stores your documents plain on disk. Filenames and folders are managed by paperless and their format can be configured freely.
|
||||||
* Single page application front end.
|
- Single page application front end.
|
||||||
* Includes a dashboard that shows basic statistics and has document upload.
|
- Includes a dashboard that shows basic statistics and has document upload.
|
||||||
* Filtering by tags, correspondents, types, and more.
|
- Filtering by tags, correspondents, types, and more.
|
||||||
* Customizable views can be saved and displayed on the dashboard.
|
- Customizable views can be saved and displayed on the dashboard.
|
||||||
* Full text search helps you find what you need.
|
- Full text search helps you find what you need.
|
||||||
* Auto completion suggests relevant words from your documents.
|
- Auto completion suggests relevant words from your documents.
|
||||||
* Results are sorted by relevance to your search query.
|
- Results are sorted by relevance to your search query.
|
||||||
* Highlighting shows you which parts of the document matched the query.
|
- Highlighting shows you which parts of the document matched the query.
|
||||||
* Searching for similar documents ("More like this")
|
- Searching for similar documents ("More like this")
|
||||||
* Email processing: Paperless adds documents from your email accounts.
|
- Email processing: Paperless adds documents from your email accounts.
|
||||||
* Configure multiple accounts and filters for each account.
|
- Configure multiple accounts and filters for each account.
|
||||||
* When adding documents from mail, paperless can move these mail to a new folder, mark them as read, flag them as important or delete them.
|
- When adding documents from mail, paperless can move these mail to a new folder, mark them as read, flag them as important or delete them.
|
||||||
* Machine learning powered document matching.
|
- Machine learning powered document matching.
|
||||||
* Paperless-ngx learns from your documents and will be able to automatically assign tags, correspondents and types to documents once you've stored a few documents in paperless.
|
- Paperless-ngx learns from your documents and will be able to automatically assign tags, correspondents and types to documents once you've stored a few documents in paperless.
|
||||||
* Optimized for multi core systems: Paperless-ngx consumes multiple documents in parallel.
|
- Optimized for multi core systems: Paperless-ngx consumes multiple documents in parallel.
|
||||||
* The integrated sanity checker makes sure that your document archive is in good health.
|
- The integrated sanity checker makes sure that your document archive is in good health.
|
||||||
* [More screenshots are available in the documentation](https://paperless-ngx.readthedocs.io/en/latest/screenshots.html).
|
- [More screenshots are available in the documentation](https://paperless-ngx.readthedocs.io/en/latest/screenshots.html).
|
||||||
|
|
||||||
# Getting started
|
# Getting started
|
||||||
|
|
||||||
@@ -65,7 +65,7 @@ The easiest way to deploy paperless is docker-compose. The files in the [`/docke
|
|||||||
If you'd like to jump right in, you can configure a docker-compose environment with our install script:
|
If you'd like to jump right in, you can configure a docker-compose environment with our install script:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/master/install-paperless-ngx.sh)"
|
bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
||||||
```
|
```
|
||||||
|
|
||||||
Alternatively, you can install the dependencies and setup apache and a database server yourself. The [documentation](https://paperless-ngx.readthedocs.io/en/latest/setup.html#installation) has a step by step guide on how to do it.
|
Alternatively, you can install the dependencies and setup apache and a database server yourself. The [documentation](https://paperless-ngx.readthedocs.io/en/latest/setup.html#installation) has a step by step guide on how to do it.
|
||||||
@@ -73,6 +73,7 @@ Alternatively, you can install the dependencies and setup apache and a database
|
|||||||
Migrating from Paperless-ng is easy, just drop in the new docker image! See the [documentation on migrating](https://paperless-ngx.readthedocs.io/en/latest/setup.html#migrating-from-paperless-ng) for more details.
|
Migrating from Paperless-ng is easy, just drop in the new docker image! See the [documentation on migrating](https://paperless-ngx.readthedocs.io/en/latest/setup.html#migrating-from-paperless-ng) for more details.
|
||||||
|
|
||||||
<!-- omit in toc -->
|
<!-- omit in toc -->
|
||||||
|
|
||||||
### Documentation
|
### Documentation
|
||||||
|
|
||||||
The documentation for Paperless-ngx is available on [ReadTheDocs](https://paperless-ngx.readthedocs.io/).
|
The documentation for Paperless-ngx is available on [ReadTheDocs](https://paperless-ngx.readthedocs.io/).
|
||||||
@@ -101,18 +102,18 @@ For bugs please [open an issue](https://github.com/paperless-ngx/paperless-ngx/i
|
|||||||
|
|
||||||
Paperless has been around a while now, and people are starting to build stuff on top of it. If you're one of those people, we can add your project to this list:
|
Paperless has been around a while now, and people are starting to build stuff on top of it. If you're one of those people, we can add your project to this list:
|
||||||
|
|
||||||
* [Paperless App](https://github.com/bauerj/paperless_app): An Android/iOS app for Paperless-ngx. Also works with the original Paperless and Paperless-ng.
|
- [Paperless App](https://github.com/bauerj/paperless_app): An Android/iOS app for Paperless-ngx. Also works with the original Paperless and Paperless-ng.
|
||||||
* [Paperless Share](https://github.com/qcasey/paperless_share). Share any files from your Android application with paperless. Very simple, but works with all of the mobile scanning apps out there that allow you to share scanned documents.
|
- [Paperless Share](https://github.com/qcasey/paperless_share). Share any files from your Android application with paperless. Very simple, but works with all of the mobile scanning apps out there that allow you to share scanned documents.
|
||||||
* [Scan to Paperless](https://github.com/sbrunner/scan-to-paperless): Scan and prepare (crop, deskew, OCR, ...) your documents for Paperless.
|
- [Scan to Paperless](https://github.com/sbrunner/scan-to-paperless): Scan and prepare (crop, deskew, OCR, ...) your documents for Paperless.
|
||||||
|
|
||||||
These projects also exist, but their status and compatibility with paperless-ngx is unknown.
|
These projects also exist, but their status and compatibility with paperless-ngx is unknown.
|
||||||
|
|
||||||
* [paperless-cli](https://github.com/stgarf/paperless-cli): A golang command line binary to interact with a Paperless instance.
|
- [paperless-cli](https://github.com/stgarf/paperless-cli): A golang command line binary to interact with a Paperless instance.
|
||||||
|
|
||||||
This project also exists, but needs updates to be compatible with paperless-ngx.
|
This project also exists, but needs updates to be compatible with paperless-ngx.
|
||||||
|
|
||||||
* [Paperless Desktop](https://github.com/thomasbrueggemann/paperless-desktop): A desktop UI for your Paperless installation. Runs on Mac, Linux, and Windows.
|
- [Paperless Desktop](https://github.com/thomasbrueggemann/paperless-desktop): A desktop UI for your Paperless installation. Runs on Mac, Linux, and Windows.
|
||||||
Known issues on Mac: (Could not load reminders and documents)
|
Known issues on Mac: (Could not load reminders and documents)
|
||||||
|
|
||||||
# Important Note
|
# Important Note
|
||||||
|
|
||||||
|
43
build-docker-image.sh
Executable file
43
build-docker-image.sh
Executable file
@@ -0,0 +1,43 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Helper script for building the Docker image locally.
|
||||||
|
# Parses and provides the nessecary versions of other images to Docker
|
||||||
|
# before passing in the rest of script args.
|
||||||
|
|
||||||
|
# First Argument: The Dockerfile to build
|
||||||
|
# Other Arguments: Additional arguments to docker build
|
||||||
|
|
||||||
|
# Example Usage:
|
||||||
|
# ./build-docker-image.sh Dockerfile -t paperless-ngx:my-awesome-feature
|
||||||
|
|
||||||
|
set -eux
|
||||||
|
|
||||||
|
if ! command -v jq; then
|
||||||
|
echo "jq required"
|
||||||
|
exit 1
|
||||||
|
elif [ ! -f "$1" ]; then
|
||||||
|
echo "$1 is not a file, please provide the Dockerfile"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse what we can from Pipfile.lock
|
||||||
|
pikepdf_version=$(jq ".default.pikepdf.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
||||||
|
psycopg2_version=$(jq ".default.psycopg2.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
||||||
|
# Read this from the other config file
|
||||||
|
qpdf_version=$(jq ".qpdf.version" .build-config.json | sed 's/"//g')
|
||||||
|
jbig2enc_version=$(jq ".jbig2enc.version" .build-config.json | sed 's/"//g')
|
||||||
|
# Get the branch name (used for caching)
|
||||||
|
branch_name=$(git rev-parse --abbrev-ref HEAD)
|
||||||
|
|
||||||
|
# https://docs.docker.com/develop/develop-images/build_enhancements/
|
||||||
|
# Required to use cache-from
|
||||||
|
export DOCKER_BUILDKIT=1
|
||||||
|
|
||||||
|
docker build --file "$1" \
|
||||||
|
--progress=plain \
|
||||||
|
--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/app:"${branch_name}" \
|
||||||
|
--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/app:dev \
|
||||||
|
--build-arg JBIG2ENC_VERSION="${jbig2enc_version}" \
|
||||||
|
--build-arg QPDF_VERSION="${qpdf_version}" \
|
||||||
|
--build-arg PIKEPDF_VERSION="${pikepdf_version}" \
|
||||||
|
--build-arg PSYCOPG2_VERSION="${psycopg2_version}" "${@:2}" .
|
14
docker-builders/Dockerfile.frontend
Normal file
14
docker-builders/Dockerfile.frontend
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# This Dockerfile compiles the frontend
|
||||||
|
# Inputs: None
|
||||||
|
|
||||||
|
FROM node:16-bullseye-slim AS compile-frontend
|
||||||
|
|
||||||
|
COPY ./src /src/src
|
||||||
|
COPY ./src-ui /src/src-ui
|
||||||
|
|
||||||
|
WORKDIR /src/src-ui
|
||||||
|
RUN set -eux \
|
||||||
|
&& npm update npm -g \
|
||||||
|
&& npm ci --omit=optional
|
||||||
|
RUN set -eux \
|
||||||
|
&& ./node_modules/.bin/ng build --configuration production
|
35
docker-builders/Dockerfile.jbig2enc
Normal file
35
docker-builders/Dockerfile.jbig2enc
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
# This Dockerfile compiles the jbig2enc library
|
||||||
|
# Inputs:
|
||||||
|
# - JBIG2ENC_VERSION - the Git tag to checkout and build
|
||||||
|
|
||||||
|
FROM debian:bullseye-slim as main
|
||||||
|
|
||||||
|
LABEL org.opencontainers.image.description="A intermediate image with jbig2enc built"
|
||||||
|
|
||||||
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
ARG JBIG2ENC_VERSION
|
||||||
|
|
||||||
|
ARG BUILD_PACKAGES="\
|
||||||
|
build-essential \
|
||||||
|
automake \
|
||||||
|
libtool \
|
||||||
|
libleptonica-dev \
|
||||||
|
zlib1g-dev \
|
||||||
|
git \
|
||||||
|
ca-certificates"
|
||||||
|
|
||||||
|
WORKDIR /usr/src/jbig2enc
|
||||||
|
|
||||||
|
RUN set -eux \
|
||||||
|
&& echo "Installing build tools" \
|
||||||
|
&& apt-get update --quiet \
|
||||||
|
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||||
|
&& echo "Building jbig2enc" \
|
||||||
|
&& git clone --quiet --branch $JBIG2ENC_VERSION https://github.com/agl/jbig2enc . \
|
||||||
|
&& ./autogen.sh \
|
||||||
|
&& ./configure \
|
||||||
|
&& make \
|
||||||
|
&& echo "Cleaning up image" \
|
||||||
|
&& apt-get -y purge ${BUILD_PACKAGES} \
|
||||||
|
&& apt-get -y autoremove --purge \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
88
docker-builders/Dockerfile.pikepdf
Normal file
88
docker-builders/Dockerfile.pikepdf
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
# This Dockerfile builds the pikepdf wheel
|
||||||
|
# Inputs:
|
||||||
|
# - REPO - Docker repository to pull qpdf from
|
||||||
|
# - QPDF_VERSION - The image qpdf version to copy .deb files from
|
||||||
|
# - PIKEPDF_VERSION - Version of pikepdf to build wheel for
|
||||||
|
|
||||||
|
# Default to pulling from the main repo registry when manually building
|
||||||
|
ARG REPO="paperless-ngx/paperless-ngx"
|
||||||
|
|
||||||
|
ARG QPDF_VERSION
|
||||||
|
FROM ghcr.io/${REPO}/builder/qpdf:${QPDF_VERSION} as qpdf-builder
|
||||||
|
|
||||||
|
# This does nothing, except provide a name for a copy below
|
||||||
|
|
||||||
|
FROM python:3.9-slim-bullseye as main
|
||||||
|
|
||||||
|
LABEL org.opencontainers.image.description="A intermediate image with pikepdf wheel built"
|
||||||
|
|
||||||
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
ARG PIKEPDF_VERSION
|
||||||
|
|
||||||
|
ARG BUILD_PACKAGES="\
|
||||||
|
build-essential \
|
||||||
|
python3-dev \
|
||||||
|
python3-pip \
|
||||||
|
# qpdf requirement - https://github.com/qpdf/qpdf#crypto-providers
|
||||||
|
libgnutls28-dev \
|
||||||
|
# lxml requrements - https://lxml.de/installation.html
|
||||||
|
libxml2-dev \
|
||||||
|
libxslt1-dev \
|
||||||
|
# Pillow requirements - https://pillow.readthedocs.io/en/stable/installation.html#external-libraries
|
||||||
|
# JPEG functionality
|
||||||
|
libjpeg62-turbo-dev \
|
||||||
|
# conpressed PNG
|
||||||
|
zlib1g-dev \
|
||||||
|
# compressed TIFF
|
||||||
|
libtiff-dev \
|
||||||
|
# type related services
|
||||||
|
libfreetype-dev \
|
||||||
|
# color management
|
||||||
|
liblcms2-dev \
|
||||||
|
# WebP format
|
||||||
|
libwebp-dev \
|
||||||
|
# JPEG 2000
|
||||||
|
libopenjp2-7-dev \
|
||||||
|
# improved color quantization
|
||||||
|
libimagequant-dev \
|
||||||
|
# complex text layout support
|
||||||
|
libraqm-dev"
|
||||||
|
|
||||||
|
WORKDIR /usr/src
|
||||||
|
|
||||||
|
COPY --from=qpdf-builder /usr/src/qpdf/*.deb ./
|
||||||
|
|
||||||
|
# As this is an base image for a multi-stage final image
|
||||||
|
# the added size of the install is basically irrelevant
|
||||||
|
|
||||||
|
RUN set -eux \
|
||||||
|
&& echo "Installing build tools" \
|
||||||
|
&& apt-get update --quiet \
|
||||||
|
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||||
|
&& echo "Installing qpdf" \
|
||||||
|
&& dpkg --install libqpdf28_*.deb \
|
||||||
|
&& dpkg --install libqpdf-dev_*.deb \
|
||||||
|
&& echo "Installing Python tools" \
|
||||||
|
&& python3 -m pip install --no-cache-dir --upgrade \
|
||||||
|
pip \
|
||||||
|
wheel \
|
||||||
|
# https://pikepdf.readthedocs.io/en/latest/installation.html#requirements
|
||||||
|
pybind11 \
|
||||||
|
&& echo "Building pikepdf wheel ${PIKEPDF_VERSION}" \
|
||||||
|
&& mkdir wheels \
|
||||||
|
&& python3 -m pip wheel \
|
||||||
|
# Build the package at the required version
|
||||||
|
pikepdf==${PIKEPDF_VERSION} \
|
||||||
|
# Output the *.whl into this directory
|
||||||
|
--wheel-dir wheels \
|
||||||
|
# Do not use a binary packge for the package being built
|
||||||
|
--no-binary=pikepdf \
|
||||||
|
# Do use binary packages for dependencies
|
||||||
|
--prefer-binary \
|
||||||
|
# Don't cache build files
|
||||||
|
--no-cache-dir \
|
||||||
|
&& ls -ahl wheels \
|
||||||
|
&& echo "Cleaning up image" \
|
||||||
|
&& apt-get -y purge ${BUILD_PACKAGES} \
|
||||||
|
&& apt-get -y autoremove --purge \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
48
docker-builders/Dockerfile.psycopg2
Normal file
48
docker-builders/Dockerfile.psycopg2
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
# This Dockerfile builds the psycopg2 wheel
|
||||||
|
# Inputs:
|
||||||
|
# - PSYCOPG2_VERSION - Version to build
|
||||||
|
|
||||||
|
FROM python:3.9-slim-bullseye as main
|
||||||
|
|
||||||
|
LABEL org.opencontainers.image.description="A intermediate image with psycopg2 wheel built"
|
||||||
|
|
||||||
|
ARG PSYCOPG2_VERSION
|
||||||
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
ARG BUILD_PACKAGES="\
|
||||||
|
build-essential \
|
||||||
|
python3-dev \
|
||||||
|
python3-pip \
|
||||||
|
# https://www.psycopg.org/docs/install.html#prerequisites
|
||||||
|
libpq-dev"
|
||||||
|
|
||||||
|
WORKDIR /usr/src
|
||||||
|
|
||||||
|
# As this is an base image for a multi-stage final image
|
||||||
|
# the added size of the install is basically irrelevant
|
||||||
|
|
||||||
|
RUN set -eux \
|
||||||
|
&& echo "Installing build tools" \
|
||||||
|
&& apt-get update --quiet \
|
||||||
|
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||||
|
&& echo "Installing Python tools" \
|
||||||
|
&& python3 -m pip install --no-cache-dir --upgrade pip wheel \
|
||||||
|
&& echo "Building psycopg2 wheel ${PSYCOPG2_VERSION}" \
|
||||||
|
&& cd /usr/src \
|
||||||
|
&& mkdir wheels \
|
||||||
|
&& python3 -m pip wheel \
|
||||||
|
# Build the package at the required version
|
||||||
|
psycopg2==${PSYCOPG2_VERSION} \
|
||||||
|
# Output the *.whl into this directory
|
||||||
|
--wheel-dir wheels \
|
||||||
|
# Do not use a binary packge for the package being built
|
||||||
|
--no-binary=psycopg2 \
|
||||||
|
# Do use binary packages for dependencies
|
||||||
|
--prefer-binary \
|
||||||
|
# Don't cache build files
|
||||||
|
--no-cache-dir \
|
||||||
|
&& ls -ahl wheels/ \
|
||||||
|
&& echo "Cleaning up image" \
|
||||||
|
&& apt-get -y purge ${BUILD_PACKAGES} \
|
||||||
|
&& apt-get -y autoremove --purge \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
52
docker-builders/Dockerfile.qpdf
Normal file
52
docker-builders/Dockerfile.qpdf
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
# This Dockerfile compiles the jbig2enc library
|
||||||
|
# Inputs:
|
||||||
|
# - QPDF_VERSION - the version of qpdf to build a .deb.
|
||||||
|
# Must be preset as a deb-src
|
||||||
|
|
||||||
|
FROM debian:bullseye-slim as main
|
||||||
|
|
||||||
|
LABEL org.opencontainers.image.description="A intermediate image with qpdf built"
|
||||||
|
|
||||||
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
# This must match to pikepdf's minimum at least
|
||||||
|
ARG QPDF_VERSION
|
||||||
|
|
||||||
|
ARG BUILD_PACKAGES="\
|
||||||
|
build-essential \
|
||||||
|
debhelper \
|
||||||
|
debian-keyring \
|
||||||
|
devscripts \
|
||||||
|
equivs \
|
||||||
|
libtool \
|
||||||
|
# https://qpdf.readthedocs.io/en/stable/installation.html#system-requirements
|
||||||
|
libjpeg62-turbo-dev \
|
||||||
|
libgnutls28-dev \
|
||||||
|
packaging-dev \
|
||||||
|
zlib1g-dev"
|
||||||
|
|
||||||
|
WORKDIR /usr/src
|
||||||
|
|
||||||
|
# As this is an base image for a multi-stage final image
|
||||||
|
# the added size of the install is basically irrelevant
|
||||||
|
|
||||||
|
RUN set -eux \
|
||||||
|
&& echo "Installing build tools" \
|
||||||
|
&& apt-get update --quiet \
|
||||||
|
&& apt-get install --yes --quiet --no-install-recommends $BUILD_PACKAGES \
|
||||||
|
&& echo "Building qpdf" \
|
||||||
|
&& echo "deb-src http://deb.debian.org/debian/ bookworm main" > /etc/apt/sources.list.d/bookworm-src.list \
|
||||||
|
&& apt-get update \
|
||||||
|
&& mkdir qpdf \
|
||||||
|
&& cd qpdf \
|
||||||
|
&& apt-get source --yes --quiet qpdf=${QPDF_VERSION}-1/bookworm \
|
||||||
|
&& cd qpdf-$QPDF_VERSION \
|
||||||
|
# We don't need to build the tests (also don't run them)
|
||||||
|
&& rm -rf libtests \
|
||||||
|
&& DEBEMAIL=hello@paperless-ngx.com debchange --bpo \
|
||||||
|
&& export DEB_BUILD_OPTIONS="terse nocheck nodoc parallel=2" \
|
||||||
|
&& dpkg-buildpackage --build=binary --unsigned-source --unsigned-changes --post-clean \
|
||||||
|
&& ls -ahl ../*.deb \
|
||||||
|
&& echo "Cleaning up image" \
|
||||||
|
&& apt-get -y purge ${BUILD_PACKAGES} \
|
||||||
|
&& apt-get -y autoremove --purge \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
@@ -22,6 +22,10 @@
|
|||||||
# Docker setup does not use the configuration file.
|
# Docker setup does not use the configuration file.
|
||||||
# A few commonly adjusted settings are provided below.
|
# A few commonly adjusted settings are provided below.
|
||||||
|
|
||||||
|
# This is required if you will be exposing Paperless-ngx on a public domain
|
||||||
|
# (if doing so please consider security measures such as reverse proxy)
|
||||||
|
#PAPERLESS_URL=https://paperless.example.com
|
||||||
|
|
||||||
# Adjust this key if you plan to make paperless available publicly. It should
|
# Adjust this key if you plan to make paperless available publicly. It should
|
||||||
# be a very long sequence of random characters. You don't need to remember it.
|
# be a very long sequence of random characters. You don't need to remember it.
|
||||||
#PAPERLESS_SECRET_KEY=change-me
|
#PAPERLESS_SECRET_KEY=change-me
|
||||||
@@ -32,3 +36,7 @@
|
|||||||
# The default language to use for OCR. Set this to the language most of your
|
# The default language to use for OCR. Set this to the language most of your
|
||||||
# documents are written in.
|
# documents are written in.
|
||||||
#PAPERLESS_OCR_LANGUAGE=eng
|
#PAPERLESS_OCR_LANGUAGE=eng
|
||||||
|
|
||||||
|
# Set if accessing paperless via a domain subpath e.g. https://domain.com/PATHPREFIX and using a reverse-proxy like traefik or nginx
|
||||||
|
#PAPERLESS_FORCE_SCRIPT_NAME=/PATHPREFIX
|
||||||
|
#PAPERLESS_STATIC_URL=/PATHPREFIX/static/ # trailing slash required
|
||||||
|
101
docker/compose/docker-compose.mariadb-tika.yml
Normal file
101
docker/compose/docker-compose.mariadb-tika.yml
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
# docker-compose file for running paperless from the Docker Hub.
|
||||||
|
# This file contains everything paperless needs to run.
|
||||||
|
# Paperless supports amd64, arm and arm64 hardware.
|
||||||
|
#
|
||||||
|
# All compose files of paperless configure paperless in the following way:
|
||||||
|
#
|
||||||
|
# - Paperless is (re)started on system boot, if it was running before shutdown.
|
||||||
|
# - Docker volumes for storing data are managed by Docker.
|
||||||
|
# - Folders for importing and exporting files are created in the same directory
|
||||||
|
# as this file and mounted to the correct folders inside the container.
|
||||||
|
# - Paperless listens on port 8000.
|
||||||
|
#
|
||||||
|
# In addition to that, this docker-compose file adds the following optional
|
||||||
|
# configurations:
|
||||||
|
#
|
||||||
|
# - Instead of SQLite (default), MariaDB is used as the database server.
|
||||||
|
# - Apache Tika and Gotenberg servers are started with paperless and paperless
|
||||||
|
# is configured to use these services. These provide support for consuming
|
||||||
|
# Office documents (Word, Excel, Power Point and their LibreOffice counter-
|
||||||
|
# parts.
|
||||||
|
#
|
||||||
|
# To install and update paperless with this file, do the following:
|
||||||
|
#
|
||||||
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
|
# and '.env' into a folder.
|
||||||
|
# - Run 'docker-compose pull'.
|
||||||
|
# - Run 'docker-compose run --rm webserver createsuperuser' to create a user.
|
||||||
|
# - Run 'docker-compose up -d'.
|
||||||
|
#
|
||||||
|
# For more extensive installation and update instructions, refer to the
|
||||||
|
# documentation.
|
||||||
|
|
||||||
|
version: "3.4"
|
||||||
|
services:
|
||||||
|
broker:
|
||||||
|
image: docker.io/library/redis:7
|
||||||
|
restart: unless-stopped
|
||||||
|
volumes:
|
||||||
|
- redisdata:/data
|
||||||
|
|
||||||
|
db:
|
||||||
|
image: docker.io/library/mariadb:10
|
||||||
|
restart: unless-stopped
|
||||||
|
volumes:
|
||||||
|
- dbdata:/var/lib/mysql
|
||||||
|
environment:
|
||||||
|
MARIADB_HOST: paperless
|
||||||
|
MARIADB_DATABASE: paperless
|
||||||
|
MARIADB_USER: paperless
|
||||||
|
MARIADB_PASSWORD: paperless
|
||||||
|
MARIADB_ROOT_PASSWORD: paperless
|
||||||
|
ports:
|
||||||
|
- "3306:3306"
|
||||||
|
|
||||||
|
webserver:
|
||||||
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- db
|
||||||
|
- broker
|
||||||
|
- gotenberg
|
||||||
|
- tika
|
||||||
|
ports:
|
||||||
|
- 8000:8000
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 5
|
||||||
|
volumes:
|
||||||
|
- data:/usr/src/paperless/data
|
||||||
|
- media:/usr/src/paperless/media
|
||||||
|
- ./export:/usr/src/paperless/export
|
||||||
|
- ./consume:/usr/src/paperless/consume
|
||||||
|
env_file: docker-compose.env
|
||||||
|
environment:
|
||||||
|
PAPERLESS_REDIS: redis://broker:6379
|
||||||
|
PAPERLESS_DBENGINE: mariadb
|
||||||
|
PAPERLESS_DBHOST: db
|
||||||
|
PAPERLESS_DBUSER: paperless
|
||||||
|
PAPERLESS_DBPASSWORD: paperless
|
||||||
|
PAPERLESS_DBPORT: 3306
|
||||||
|
PAPERLESS_TIKA_ENABLED: 1
|
||||||
|
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
||||||
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
|
gotenberg:
|
||||||
|
image: docker.io/gotenberg/gotenberg:7.4
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
CHROMIUM_DISABLE_ROUTES: 1
|
||||||
|
|
||||||
|
tika:
|
||||||
|
image: ghcr.io/paperless-ngx/tika:latest
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
data:
|
||||||
|
media:
|
||||||
|
dbdata:
|
||||||
|
redisdata:
|
83
docker/compose/docker-compose.mariadb.yml
Normal file
83
docker/compose/docker-compose.mariadb.yml
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
# docker-compose file for running paperless from the Docker Hub.
|
||||||
|
# This file contains everything paperless needs to run.
|
||||||
|
# Paperless supports amd64, arm and arm64 hardware.
|
||||||
|
#
|
||||||
|
# All compose files of paperless configure paperless in the following way:
|
||||||
|
#
|
||||||
|
# - Paperless is (re)started on system boot, if it was running before shutdown.
|
||||||
|
# - Docker volumes for storing data are managed by Docker.
|
||||||
|
# - Folders for importing and exporting files are created in the same directory
|
||||||
|
# as this file and mounted to the correct folders inside the container.
|
||||||
|
# - Paperless listens on port 8000.
|
||||||
|
#
|
||||||
|
# In addition to that, this docker-compose file adds the following optional
|
||||||
|
# configurations:
|
||||||
|
#
|
||||||
|
# - Instead of SQLite (default), MariaDB is used as the database server.
|
||||||
|
#
|
||||||
|
# To install and update paperless with this file, do the following:
|
||||||
|
#
|
||||||
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
|
# and '.env' into a folder.
|
||||||
|
# - Run 'docker-compose pull'.
|
||||||
|
# - Run 'docker-compose run --rm webserver createsuperuser' to create a user.
|
||||||
|
# - Run 'docker-compose up -d'.
|
||||||
|
#
|
||||||
|
# For more extensive installation and update instructions, refer to the
|
||||||
|
# documentation.
|
||||||
|
|
||||||
|
version: "3.4"
|
||||||
|
services:
|
||||||
|
broker:
|
||||||
|
image: docker.io/library/redis:7
|
||||||
|
restart: unless-stopped
|
||||||
|
volumes:
|
||||||
|
- redisdata:/data
|
||||||
|
|
||||||
|
db:
|
||||||
|
image: docker.io/library/mariadb:10
|
||||||
|
restart: unless-stopped
|
||||||
|
volumes:
|
||||||
|
- dbdata:/var/lib/mysql
|
||||||
|
environment:
|
||||||
|
MARIADB_HOST: paperless
|
||||||
|
MARIADB_DATABASE: paperless
|
||||||
|
MARIADB_USER: paperless
|
||||||
|
MARIADB_PASSWORD: paperless
|
||||||
|
MARIADB_ROOT_PASSWORD: paperless
|
||||||
|
ports:
|
||||||
|
- "3306:3306"
|
||||||
|
|
||||||
|
webserver:
|
||||||
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- db
|
||||||
|
- broker
|
||||||
|
ports:
|
||||||
|
- 8000:8000
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 5
|
||||||
|
volumes:
|
||||||
|
- data:/usr/src/paperless/data
|
||||||
|
- media:/usr/src/paperless/media
|
||||||
|
- ./export:/usr/src/paperless/export
|
||||||
|
- ./consume:/usr/src/paperless/consume
|
||||||
|
env_file: docker-compose.env
|
||||||
|
environment:
|
||||||
|
PAPERLESS_REDIS: redis://broker:6379
|
||||||
|
PAPERLESS_DBENGINE: mariadb
|
||||||
|
PAPERLESS_DBHOST: db
|
||||||
|
PAPERLESS_DBUSER: paperless
|
||||||
|
PAPERLESS_DBPASSWORD: paperless
|
||||||
|
PAPERLESS_DBPORT: 3306
|
||||||
|
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
data:
|
||||||
|
media:
|
||||||
|
dbdata:
|
||||||
|
redisdata:
|
@@ -31,13 +31,13 @@
|
|||||||
version: "3.4"
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: redis:6.0
|
image: docker.io/library/redis:7
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: postgres:13
|
image: docker.io/library/postgres:13
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
@@ -55,7 +55,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- 8010:8000
|
- 8010:8000
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 5
|
retries: 5
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
# docker-compose file for running paperless from the Docker Hub.
|
# docker-compose file for running paperless from the docker container registry.
|
||||||
# This file contains everything paperless needs to run.
|
# This file contains everything paperless needs to run.
|
||||||
# Paperless supports amd64, arm and arm64 hardware.
|
# Paperless supports amd64, arm and arm64 hardware.
|
||||||
#
|
#
|
||||||
@@ -33,13 +33,13 @@
|
|||||||
version: "3.4"
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: redis:6.0
|
image: docker.io/library/redis:7
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: postgres:13
|
image: docker.io/library/postgres:13
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
@@ -59,7 +59,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- 8000:8000
|
- 8000:8000
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 5
|
retries: 5
|
||||||
@@ -77,13 +77,14 @@ services:
|
|||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: gotenberg/gotenberg:7
|
image: docker.io/gotenberg/gotenberg:7.4
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
command:
|
||||||
CHROMIUM_DISABLE_ROUTES: 1
|
- "gotenberg"
|
||||||
|
- "--chromium-disable-routes=true"
|
||||||
|
|
||||||
tika:
|
tika:
|
||||||
image: apache/tika
|
image: ghcr.io/paperless-ngx/tika:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
|
@@ -29,13 +29,13 @@
|
|||||||
version: "3.4"
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: redis:6.0
|
image: docker.io/library/redis:7
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: postgres:13
|
image: docker.io/library/postgres:13
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
@@ -53,7 +53,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- 8000:8000
|
- 8000:8000
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 5
|
retries: 5
|
||||||
|
@@ -1,7 +1,6 @@
|
|||||||
# docker-compose file for running paperless from the Docker Hub.
|
# docker-compose file for running paperless from the docker container registry.
|
||||||
# This file contains everything paperless needs to run.
|
# This file contains everything paperless needs to run.
|
||||||
# Paperless supports amd64, arm and arm64 hardware.
|
# Paperless supports amd64, arm and arm64 hardware.
|
||||||
#
|
|
||||||
# All compose files of paperless configure paperless in the following way:
|
# All compose files of paperless configure paperless in the following way:
|
||||||
#
|
#
|
||||||
# - Paperless is (re)started on system boot, if it was running before shutdown.
|
# - Paperless is (re)started on system boot, if it was running before shutdown.
|
||||||
@@ -34,7 +33,7 @@
|
|||||||
version: "3.4"
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: redis:6.0
|
image: docker.io/library/redis:7
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
@@ -49,7 +48,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- 8000:8000
|
- 8000:8000
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 5
|
retries: 5
|
||||||
@@ -66,13 +65,14 @@ services:
|
|||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: gotenberg/gotenberg:7
|
image: docker.io/gotenberg/gotenberg:7.4
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
command:
|
||||||
CHROMIUM_DISABLE_ROUTES: 1
|
- "gotenberg"
|
||||||
|
- "--chromium-disable-routes=true"
|
||||||
|
|
||||||
tika:
|
tika:
|
||||||
image: apache/tika
|
image: ghcr.io/paperless-ngx/tika:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
|
@@ -26,7 +26,7 @@
|
|||||||
version: "3.4"
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: redis:6.0
|
image: docker.io/library/redis:7
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
@@ -39,7 +39,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- 8000:8000
|
- 8000:8000
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 5
|
retries: 5
|
||||||
|
@@ -1,7 +1,38 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
|
# Adapted from:
|
||||||
|
# https://github.com/docker-library/postgres/blob/master/docker-entrypoint.sh
|
||||||
|
# usage: file_env VAR
|
||||||
|
# ie: file_env 'XYZ_DB_PASSWORD' will allow for "$XYZ_DB_PASSWORD_FILE" to
|
||||||
|
# fill in the value of "$XYZ_DB_PASSWORD" from a file, especially for Docker's
|
||||||
|
# secrets feature
|
||||||
|
file_env() {
|
||||||
|
local var="$1"
|
||||||
|
local fileVar="${var}_FILE"
|
||||||
|
|
||||||
|
# Basic validation
|
||||||
|
if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
|
||||||
|
echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Only export var if the _FILE exists
|
||||||
|
if [ "${!fileVar:-}" ]; then
|
||||||
|
# And the file exists
|
||||||
|
if [[ -f ${!fileVar} ]]; then
|
||||||
|
echo "Setting ${var} from file"
|
||||||
|
val="$(< "${!fileVar}")"
|
||||||
|
export "$var"="$val"
|
||||||
|
else
|
||||||
|
echo "File ${!fileVar} doesn't exist"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
# Source: https://github.com/sameersbn/docker-gitlab/
|
# Source: https://github.com/sameersbn/docker-gitlab/
|
||||||
map_uidgid() {
|
map_uidgid() {
|
||||||
USERMAP_ORIG_UID=$(id -u paperless)
|
USERMAP_ORIG_UID=$(id -u paperless)
|
||||||
@@ -10,31 +41,62 @@ map_uidgid() {
|
|||||||
USERMAP_NEW_GID=${USERMAP_GID:-${USERMAP_ORIG_GID:-$USERMAP_NEW_UID}}
|
USERMAP_NEW_GID=${USERMAP_GID:-${USERMAP_ORIG_GID:-$USERMAP_NEW_UID}}
|
||||||
if [[ ${USERMAP_NEW_UID} != "${USERMAP_ORIG_UID}" || ${USERMAP_NEW_GID} != "${USERMAP_ORIG_GID}" ]]; then
|
if [[ ${USERMAP_NEW_UID} != "${USERMAP_ORIG_UID}" || ${USERMAP_NEW_GID} != "${USERMAP_ORIG_GID}" ]]; then
|
||||||
echo "Mapping UID and GID for paperless:paperless to $USERMAP_NEW_UID:$USERMAP_NEW_GID"
|
echo "Mapping UID and GID for paperless:paperless to $USERMAP_NEW_UID:$USERMAP_NEW_GID"
|
||||||
usermod -u "${USERMAP_NEW_UID}" paperless
|
usermod -o -u "${USERMAP_NEW_UID}" paperless
|
||||||
groupmod -o -g "${USERMAP_NEW_GID}" paperless
|
groupmod -o -g "${USERMAP_NEW_GID}" paperless
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
map_folders() {
|
||||||
|
# Export these so they can be used in docker-prepare.sh
|
||||||
|
export DATA_DIR="${PAPERLESS_DATA_DIR:-/usr/src/paperless/data}"
|
||||||
|
export MEDIA_ROOT_DIR="${PAPERLESS_MEDIA_ROOT:-/usr/src/paperless/media}"
|
||||||
|
}
|
||||||
|
|
||||||
initialize() {
|
initialize() {
|
||||||
|
|
||||||
|
# Setup environment from secrets before anything else
|
||||||
|
for env_var in \
|
||||||
|
PAPERLESS_DBUSER \
|
||||||
|
PAPERLESS_DBPASS \
|
||||||
|
PAPERLESS_SECRET_KEY \
|
||||||
|
PAPERLESS_AUTO_LOGIN_USERNAME \
|
||||||
|
PAPERLESS_ADMIN_USER \
|
||||||
|
PAPERLESS_ADMIN_MAIL \
|
||||||
|
PAPERLESS_ADMIN_PASSWORD \
|
||||||
|
PAPERLESS_REDIS; do
|
||||||
|
# Check for a version of this var with _FILE appended
|
||||||
|
# and convert the contents to the env var value
|
||||||
|
file_env ${env_var}
|
||||||
|
done
|
||||||
|
|
||||||
|
# Change the user and group IDs if needed
|
||||||
map_uidgid
|
map_uidgid
|
||||||
|
|
||||||
for dir in export data data/index media media/documents media/documents/originals media/documents/thumbnails; do
|
# Check for overrides of certain folders
|
||||||
if [[ ! -d "../$dir" ]]; then
|
map_folders
|
||||||
echo "Creating directory ../$dir"
|
|
||||||
mkdir ../$dir
|
local export_dir="/usr/src/paperless/export"
|
||||||
|
|
||||||
|
for dir in "${export_dir}" "${DATA_DIR}" "${DATA_DIR}/index" "${MEDIA_ROOT_DIR}" "${MEDIA_ROOT_DIR}/documents" "${MEDIA_ROOT_DIR}/documents/originals" "${MEDIA_ROOT_DIR}/documents/thumbnails"; do
|
||||||
|
if [[ ! -d "${dir}" ]]; then
|
||||||
|
echo "Creating directory ${dir}"
|
||||||
|
mkdir "${dir}"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
echo "Creating directory /tmp/paperless"
|
local tmp_dir="/tmp/paperless"
|
||||||
mkdir -p /tmp/paperless
|
echo "Creating directory ${tmp_dir}"
|
||||||
|
mkdir -p "${tmp_dir}"
|
||||||
|
|
||||||
set +e
|
set +e
|
||||||
echo "Adjusting permissions of paperless files. This may take a while."
|
echo "Adjusting permissions of paperless files. This may take a while."
|
||||||
chown -R paperless:paperless /tmp/paperless
|
chown -R paperless:paperless ${tmp_dir}
|
||||||
find .. -not \( -user paperless -and -group paperless \) -exec chown paperless:paperless {} +
|
for dir in "${export_dir}" "${DATA_DIR}" "${MEDIA_ROOT_DIR}"; do
|
||||||
|
find "${dir}" -not \( -user paperless -and -group paperless \) -exec chown paperless:paperless {} +
|
||||||
|
done
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
gosu paperless /sbin/docker-prepare.sh
|
"${gosu_cmd[@]}" /sbin/docker-prepare.sh
|
||||||
}
|
}
|
||||||
|
|
||||||
install_languages() {
|
install_languages() {
|
||||||
@@ -56,12 +118,12 @@ install_languages() {
|
|||||||
# continue
|
# continue
|
||||||
#fi
|
#fi
|
||||||
|
|
||||||
if dpkg -s $pkg &>/dev/null; then
|
if dpkg -s "$pkg" &>/dev/null; then
|
||||||
echo "Package $pkg already installed!"
|
echo "Package $pkg already installed!"
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! apt-cache show $pkg &>/dev/null; then
|
if ! apt-cache show "$pkg" &>/dev/null; then
|
||||||
echo "Package $pkg not found! :("
|
echo "Package $pkg not found! :("
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
@@ -76,8 +138,13 @@ install_languages() {
|
|||||||
|
|
||||||
echo "Paperless-ngx docker container starting..."
|
echo "Paperless-ngx docker container starting..."
|
||||||
|
|
||||||
|
gosu_cmd=(gosu paperless)
|
||||||
|
if [ "$(id -u)" == "$(id -u paperless)" ]; then
|
||||||
|
gosu_cmd=()
|
||||||
|
fi
|
||||||
|
|
||||||
# Install additional languages if specified
|
# Install additional languages if specified
|
||||||
if [[ ! -z "$PAPERLESS_OCR_LANGUAGES" ]]; then
|
if [[ -n "$PAPERLESS_OCR_LANGUAGES" ]]; then
|
||||||
install_languages "$PAPERLESS_OCR_LANGUAGES"
|
install_languages "$PAPERLESS_OCR_LANGUAGES"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -85,7 +152,7 @@ initialize
|
|||||||
|
|
||||||
if [[ "$1" != "/"* ]]; then
|
if [[ "$1" != "/"* ]]; then
|
||||||
echo Executing management command "$@"
|
echo Executing management command "$@"
|
||||||
exec gosu paperless python3 manage.py "$@"
|
exec "${gosu_cmd[@]}" python3 manage.py "$@"
|
||||||
else
|
else
|
||||||
echo Executing "$@"
|
echo Executing "$@"
|
||||||
exec "$@"
|
exec "$@"
|
||||||
|
@@ -1,19 +1,19 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
wait_for_postgres() {
|
wait_for_postgres() {
|
||||||
attempt_num=1
|
local attempt_num=1
|
||||||
max_attempts=5
|
local max_attempts=5
|
||||||
|
|
||||||
echo "Waiting for PostgreSQL to start..."
|
echo "Waiting for PostgreSQL to start..."
|
||||||
|
|
||||||
host="${PAPERLESS_DBHOST}"
|
local host="${PAPERLESS_DBHOST:-localhost}"
|
||||||
port="${PAPERLESS_DBPORT}"
|
local port="${PAPERLESS_DBPORT:-5432}"
|
||||||
|
|
||||||
if [[ -z $port ]]; then
|
# Disable warning, host and port can't have spaces
|
||||||
port="5432"
|
# shellcheck disable=SC2086
|
||||||
fi
|
while [ ! "$(pg_isready -h ${host} -p ${port})" ]; do
|
||||||
|
|
||||||
while ! </dev/tcp/$host/$port; do
|
|
||||||
|
|
||||||
if [ $attempt_num -eq $max_attempts ]; then
|
if [ $attempt_num -eq $max_attempts ]; then
|
||||||
echo "Unable to connect to database."
|
echo "Unable to connect to database."
|
||||||
@@ -23,11 +23,43 @@ wait_for_postgres() {
|
|||||||
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
attempt_num=$(expr "$attempt_num" + 1)
|
attempt_num=$(("$attempt_num" + 1))
|
||||||
sleep 5
|
sleep 5
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
|
wait_for_mariadb() {
|
||||||
|
echo "Waiting for MariaDB to start..."
|
||||||
|
|
||||||
|
host="${PAPERLESS_DBHOST:=localhost}"
|
||||||
|
port="${PAPERLESS_DBPORT:=3306}"
|
||||||
|
|
||||||
|
attempt_num=1
|
||||||
|
max_attempts=5
|
||||||
|
|
||||||
|
while ! true > /dev/tcp/$host/$port; do
|
||||||
|
|
||||||
|
if [ $attempt_num -eq $max_attempts ]; then
|
||||||
|
echo "Unable to connect to database."
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "Attempt $attempt_num failed! Trying again in 5 seconds..."
|
||||||
|
|
||||||
|
fi
|
||||||
|
|
||||||
|
attempt_num=$(("$attempt_num" + 1))
|
||||||
|
sleep 5
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
wait_for_redis() {
|
||||||
|
# We use a Python script to send the Redis ping
|
||||||
|
# instead of installing redis-tools just for 1 thing
|
||||||
|
if ! python3 /sbin/wait-for-redis.py; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
migrations() {
|
migrations() {
|
||||||
(
|
(
|
||||||
# flock is in place to prevent multiple containers from doing migrations
|
# flock is in place to prevent multiple containers from doing migrations
|
||||||
@@ -36,17 +68,18 @@ migrations() {
|
|||||||
flock 200
|
flock 200
|
||||||
echo "Apply database migrations..."
|
echo "Apply database migrations..."
|
||||||
python3 manage.py migrate
|
python3 manage.py migrate
|
||||||
) 200>/usr/src/paperless/data/migration_lock
|
) 200>"${DATA_DIR}/migration_lock"
|
||||||
}
|
}
|
||||||
|
|
||||||
search_index() {
|
search_index() {
|
||||||
index_version=1
|
|
||||||
index_version_file=/usr/src/paperless/data/.index_version
|
|
||||||
|
|
||||||
if [[ (! -f "$index_version_file") || $(<$index_version_file) != "$index_version" ]]; then
|
local index_version=1
|
||||||
|
local index_version_file=${DATA_DIR}/.index_version
|
||||||
|
|
||||||
|
if [[ (! -f "${index_version_file}") || $(<"${index_version_file}") != "$index_version" ]]; then
|
||||||
echo "Search index out of date. Updating..."
|
echo "Search index out of date. Updating..."
|
||||||
python3 manage.py document_index reindex
|
python3 manage.py document_index reindex --no-progress-bar
|
||||||
echo $index_version | tee $index_version_file >/dev/null
|
echo ${index_version} | tee "${index_version_file}" >/dev/null
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -57,10 +90,14 @@ superuser() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
do_work() {
|
do_work() {
|
||||||
if [[ -n "${PAPERLESS_DBHOST}" ]]; then
|
if [[ "${PAPERLESS_DBENGINE}" == "mariadb" ]]; then
|
||||||
|
wait_for_mariadb
|
||||||
|
elif [[ -n "${PAPERLESS_DBHOST}" ]]; then
|
||||||
wait_for_postgres
|
wait_for_postgres
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
wait_for_redis
|
||||||
|
|
||||||
migrations
|
migrations
|
||||||
|
|
||||||
search_index
|
search_index
|
||||||
|
@@ -1,4 +1,19 @@
|
|||||||
for command in document_archiver document_exporter document_importer mail_fetcher document_create_classifier document_index document_renamer document_retagger document_thumbnails document_sanity_checker manage_superuser;
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
for command in decrypt_documents \
|
||||||
|
document_archiver \
|
||||||
|
document_exporter \
|
||||||
|
document_importer \
|
||||||
|
mail_fetcher \
|
||||||
|
document_create_classifier \
|
||||||
|
document_index \
|
||||||
|
document_renamer \
|
||||||
|
document_retagger \
|
||||||
|
document_thumbnails \
|
||||||
|
document_sanity_checker \
|
||||||
|
manage_superuser;
|
||||||
do
|
do
|
||||||
echo "installing $command..."
|
echo "installing $command..."
|
||||||
sed "s/management_command/$command/g" management_script.sh > /usr/local/bin/$command
|
sed "s/management_command/$command/g" management_script.sh > /usr/local/bin/$command
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
@@ -6,10 +6,10 @@ cd /usr/src/paperless/src/
|
|||||||
|
|
||||||
if [[ $(id -u) == 0 ]] ;
|
if [[ $(id -u) == 0 ]] ;
|
||||||
then
|
then
|
||||||
gosu paperless python3 manage.py management_command "$@"
|
gosu paperless python3 manage.py management_command "$@"
|
||||||
elif [[ $(id -un) == "paperless" ]] ;
|
elif [[ $(id -un) == "paperless" ]] ;
|
||||||
then
|
then
|
||||||
python3 manage.py management_command "$@"
|
python3 manage.py management_command "$@"
|
||||||
else
|
else
|
||||||
echo "Unknown user."
|
echo "Unknown user."
|
||||||
fi
|
fi
|
||||||
|
15
docker/paperless_cmd.sh
Executable file
15
docker/paperless_cmd.sh
Executable file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
rootless_args=()
|
||||||
|
if [ "$(id -u)" == "$(id -u paperless)" ]; then
|
||||||
|
rootless_args=(
|
||||||
|
--user
|
||||||
|
paperless
|
||||||
|
--logfile
|
||||||
|
supervisord.log
|
||||||
|
--pidfile
|
||||||
|
supervisord.pid
|
||||||
|
)
|
||||||
|
fi
|
||||||
|
|
||||||
|
/usr/local/bin/supervisord -c /etc/supervisord.conf "${rootless_args[@]}"
|
@@ -19,6 +19,7 @@ stderr_logfile_maxbytes=0
|
|||||||
[program:consumer]
|
[program:consumer]
|
||||||
command=python3 manage.py document_consumer
|
command=python3 manage.py document_consumer
|
||||||
user=paperless
|
user=paperless
|
||||||
|
stopsignal=INT
|
||||||
|
|
||||||
stdout_logfile=/dev/stdout
|
stdout_logfile=/dev/stdout
|
||||||
stdout_logfile_maxbytes=0
|
stdout_logfile_maxbytes=0
|
||||||
@@ -28,6 +29,7 @@ stderr_logfile_maxbytes=0
|
|||||||
[program:scheduler]
|
[program:scheduler]
|
||||||
command=python3 manage.py qcluster
|
command=python3 manage.py qcluster
|
||||||
user=paperless
|
user=paperless
|
||||||
|
stopasgroup = true
|
||||||
|
|
||||||
stdout_logfile=/dev/stdout
|
stdout_logfile=/dev/stdout
|
||||||
stdout_logfile_maxbytes=0
|
stdout_logfile_maxbytes=0
|
||||||
|
44
docker/wait-for-redis.py
Executable file
44
docker/wait-for-redis.py
Executable file
@@ -0,0 +1,44 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Simple script which attempts to ping the Redis broker as set in the environment for
|
||||||
|
a certain number of times, waiting a little bit in between
|
||||||
|
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
|
from redis import Redis
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
MAX_RETRY_COUNT: Final[int] = 5
|
||||||
|
RETRY_SLEEP_SECONDS: Final[int] = 5
|
||||||
|
|
||||||
|
REDIS_URL: Final[str] = os.getenv("PAPERLESS_REDIS", "redis://localhost:6379")
|
||||||
|
|
||||||
|
print(f"Waiting for Redis...", flush=True)
|
||||||
|
|
||||||
|
attempt = 0
|
||||||
|
with Redis.from_url(url=REDIS_URL) as client:
|
||||||
|
while attempt < MAX_RETRY_COUNT:
|
||||||
|
try:
|
||||||
|
client.ping()
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
print(
|
||||||
|
f"Redis ping #{attempt} failed.\n"
|
||||||
|
f"Error: {str(e)}.\n"
|
||||||
|
f"Waiting {RETRY_SLEEP_SECONDS}s",
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
time.sleep(RETRY_SLEEP_SECONDS)
|
||||||
|
attempt += 1
|
||||||
|
|
||||||
|
if attempt >= MAX_RETRY_COUNT:
|
||||||
|
print(f"Failed to connect to redis using environment variable PAPERLESS_REDIS.")
|
||||||
|
sys.exit(os.EX_UNAVAILABLE)
|
||||||
|
else:
|
||||||
|
print(f"Connected to Redis broker.")
|
||||||
|
sys.exit(os.EX_OK)
|
@@ -1,18 +0,0 @@
|
|||||||
FROM python:3.5.1
|
|
||||||
MAINTAINER Pit Kleyersburg <pitkley@googlemail.com>
|
|
||||||
|
|
||||||
# Install Sphinx and Pygments
|
|
||||||
RUN pip install Sphinx Pygments
|
|
||||||
|
|
||||||
# Setup directories, copy data
|
|
||||||
RUN mkdir /build
|
|
||||||
COPY . /build
|
|
||||||
WORKDIR /build/docs
|
|
||||||
|
|
||||||
# Build documentation
|
|
||||||
RUN make html
|
|
||||||
|
|
||||||
# Start webserver
|
|
||||||
WORKDIR /build/docs/_build/html
|
|
||||||
EXPOSE 8000/tcp
|
|
||||||
CMD ["python3", "-m", "http.server"]
|
|
@@ -24,6 +24,7 @@ I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
|||||||
help:
|
help:
|
||||||
@echo "Please use \`make <target>' where <target> is one of"
|
@echo "Please use \`make <target>' where <target> is one of"
|
||||||
@echo " html to make standalone HTML files"
|
@echo " html to make standalone HTML files"
|
||||||
|
@echo " livehtml to preview changes with live reload in your browser"
|
||||||
@echo " dirhtml to make HTML files named index.html in directories"
|
@echo " dirhtml to make HTML files named index.html in directories"
|
||||||
@echo " singlehtml to make a single large HTML file"
|
@echo " singlehtml to make a single large HTML file"
|
||||||
@echo " pickle to make pickle files"
|
@echo " pickle to make pickle files"
|
||||||
@@ -54,6 +55,9 @@ html:
|
|||||||
@echo
|
@echo
|
||||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||||
|
|
||||||
|
livehtml:
|
||||||
|
sphinx-autobuild "./" "$(BUILDDIR)" $(O)
|
||||||
|
|
||||||
dirhtml:
|
dirhtml:
|
||||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||||
@echo
|
@echo
|
||||||
|
13
docs/_static/css/custom.css
vendored
13
docs/_static/css/custom.css
vendored
@@ -64,6 +64,10 @@ body {
|
|||||||
color: var(--color-text-body);
|
color: var(--color-text-body);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.rst-content p {
|
||||||
|
word-break: break-word;
|
||||||
|
}
|
||||||
|
|
||||||
h1, h2, h3, h4, h5, h6 {
|
h1, h2, h3, h4, h5, h6 {
|
||||||
font-family: inherit;
|
font-family: inherit;
|
||||||
}
|
}
|
||||||
@@ -435,7 +439,8 @@ a.image-reference img {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.rst-content code.literal,
|
.rst-content code.literal,
|
||||||
.rst-content tt.literal {
|
.rst-content tt.literal,
|
||||||
|
html.writer-html5 .rst-content dl.footnote code {
|
||||||
border-color: var(--color-border);
|
border-color: var(--color-border);
|
||||||
background-color: var(--color-border);
|
background-color: var(--color-border);
|
||||||
color: var(--color-text-code-inline)
|
color: var(--color-text-code-inline)
|
||||||
@@ -580,9 +585,13 @@ a.image-reference img {
|
|||||||
right: 12px;
|
right: 12px;
|
||||||
height: 20px;
|
height: 20px;
|
||||||
width: 24px;
|
width: 24px;
|
||||||
z-index: 1000;
|
z-index: 10;
|
||||||
border: none;
|
border: none;
|
||||||
background-color: transparent;
|
background-color: transparent;
|
||||||
color: inherit;
|
color: inherit;
|
||||||
opacity: 0.7;
|
opacity: 0.7;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.wy-nav-content-wrap {
|
||||||
|
z-index: 20;
|
||||||
|
}
|
||||||
|
52
docs/_static/js/darkmode.js
vendored
52
docs/_static/js/darkmode.js
vendored
@@ -1,47 +1,47 @@
|
|||||||
let toggleButton;
|
let toggleButton
|
||||||
let icon;
|
let icon
|
||||||
|
|
||||||
function load() {
|
function load() {
|
||||||
"use strict";
|
'use strict'
|
||||||
|
|
||||||
toggleButton = document.createElement("button");
|
toggleButton = document.createElement('button')
|
||||||
toggleButton.setAttribute("title", "Toggle dark mode");
|
toggleButton.setAttribute('title', 'Toggle dark mode')
|
||||||
toggleButton.classList.add("dark-mode-toggle");
|
toggleButton.classList.add('dark-mode-toggle')
|
||||||
icon = document.createElement("i");
|
icon = document.createElement('i')
|
||||||
icon.classList.add("fa", darkModeState ? "fa-sun-o" : "fa-moon-o");
|
icon.classList.add('fa', darkModeState ? 'fa-sun-o' : 'fa-moon-o')
|
||||||
toggleButton.appendChild(icon);
|
toggleButton.appendChild(icon)
|
||||||
document.body.prepend(toggleButton);
|
document.body.prepend(toggleButton)
|
||||||
|
|
||||||
// Listen for changes in the OS settings
|
// Listen for changes in the OS settings
|
||||||
// addListener is used because older versions of Safari don't support addEventListener
|
// addListener is used because older versions of Safari don't support addEventListener
|
||||||
// prefersDarkQuery set in <head>
|
// prefersDarkQuery set in <head>
|
||||||
if (prefersDarkQuery) {
|
if (prefersDarkQuery) {
|
||||||
prefersDarkQuery.addListener(function (evt) {
|
prefersDarkQuery.addListener(function (evt) {
|
||||||
toggleDarkMode(evt.matches);
|
toggleDarkMode(evt.matches)
|
||||||
});
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initial setting depending on the prefers-color-mode or localstorage
|
// Initial setting depending on the prefers-color-mode or localstorage
|
||||||
// darkModeState should be set in the document <head> to prevent flash
|
// darkModeState should be set in the document <head> to prevent flash
|
||||||
if (darkModeState == undefined) darkModeState = false;
|
if (darkModeState == undefined) darkModeState = false
|
||||||
toggleDarkMode(darkModeState);
|
toggleDarkMode(darkModeState)
|
||||||
|
|
||||||
// Toggles the "dark-mode" class on click and sets localStorage state
|
// Toggles the "dark-mode" class on click and sets localStorage state
|
||||||
toggleButton.addEventListener("click", () => {
|
toggleButton.addEventListener('click', () => {
|
||||||
darkModeState = !darkModeState;
|
darkModeState = !darkModeState
|
||||||
|
|
||||||
toggleDarkMode(darkModeState);
|
toggleDarkMode(darkModeState)
|
||||||
localStorage.setItem("dark-mode", darkModeState);
|
localStorage.setItem('dark-mode', darkModeState)
|
||||||
});
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
function toggleDarkMode(state) {
|
function toggleDarkMode(state) {
|
||||||
document.documentElement.classList.toggle("dark-mode", state);
|
document.documentElement.classList.toggle('dark-mode', state)
|
||||||
document.documentElement.classList.toggle("light-mode", !state);
|
document.documentElement.classList.toggle('light-mode', !state)
|
||||||
icon.classList.remove("fa-sun-o");
|
icon.classList.remove('fa-sun-o')
|
||||||
icon.classList.remove("fa-moon-o");
|
icon.classList.remove('fa-moon-o')
|
||||||
icon.classList.add(state ? "fa-sun-o" : "fa-moon-o");
|
icon.classList.add(state ? 'fa-sun-o' : 'fa-moon-o')
|
||||||
darkModeState = state;
|
darkModeState = state
|
||||||
}
|
}
|
||||||
|
|
||||||
document.addEventListener("DOMContentLoaded", load);
|
document.addEventListener('DOMContentLoaded', load)
|
||||||
|
@@ -35,13 +35,15 @@ Options available to docker installations:
|
|||||||
``/var/lib/docker/volumes`` on the host and you need to be root in order
|
``/var/lib/docker/volumes`` on the host and you need to be root in order
|
||||||
to access them.
|
to access them.
|
||||||
|
|
||||||
Paperless uses 3 volumes:
|
Paperless uses 4 volumes:
|
||||||
|
|
||||||
* ``paperless_media``: This is where your documents are stored.
|
* ``paperless_media``: This is where your documents are stored.
|
||||||
* ``paperless_data``: This is where auxillary data is stored. This
|
* ``paperless_data``: This is where auxillary data is stored. This
|
||||||
folder also contains the SQLite database, if you use it.
|
folder also contains the SQLite database, if you use it.
|
||||||
* ``paperless_pgdata``: Exists only if you use PostgreSQL and contains
|
* ``paperless_pgdata``: Exists only if you use PostgreSQL and contains
|
||||||
the database.
|
the database.
|
||||||
|
* ``paperless_dbdata``: Exists only if you use MariaDB and contains
|
||||||
|
the database.
|
||||||
|
|
||||||
Options available to bare-metal and non-docker installations:
|
Options available to bare-metal and non-docker installations:
|
||||||
|
|
||||||
@@ -49,7 +51,7 @@ Options available to bare-metal and non-docker installations:
|
|||||||
crashes at some point or your disk fails, you can simply copy the folder back
|
crashes at some point or your disk fails, you can simply copy the folder back
|
||||||
into place and it works.
|
into place and it works.
|
||||||
|
|
||||||
When using PostgreSQL, you'll also have to backup the database.
|
When using PostgreSQL or MariaDB, you'll also have to backup the database.
|
||||||
|
|
||||||
.. _migrating-restoring:
|
.. _migrating-restoring:
|
||||||
|
|
||||||
@@ -117,6 +119,23 @@ Then you can start paperless-ngx with ``-d`` to have it run in the background.
|
|||||||
|
|
||||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
In version 1.7.1 and onwards, the Docker image can now be pinned to a release series.
|
||||||
|
This is often combined with automatic updaters such as Watchtower to allow safer
|
||||||
|
unattended upgrading to new bugfix releases only. It is still recommended to always
|
||||||
|
review release notes before upgrading. To pin your install to a release series, edit
|
||||||
|
the ``docker-compose.yml`` find the line that says
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
|
|
||||||
|
and replace the version with the series you want to track, for example:
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
image: ghcr.io/paperless-ngx/paperless-ngx:1.7
|
||||||
|
|
||||||
Bare Metal Route
|
Bare Metal Route
|
||||||
================
|
================
|
||||||
|
|
||||||
@@ -270,6 +289,10 @@ When you use the provided docker compose script, put the export inside the
|
|||||||
``export`` folder in your paperless source directory. Specify ``../export``
|
``export`` folder in your paperless source directory. Specify ``../export``
|
||||||
as the ``source``.
|
as the ``source``.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Importing from a previous version of Paperless may work, but for best results
|
||||||
|
it is suggested to match the versions.
|
||||||
|
|
||||||
.. _utilities-retagger:
|
.. _utilities-retagger:
|
||||||
|
|
||||||
@@ -289,6 +312,7 @@ there are tools for it.
|
|||||||
-c, --correspondent
|
-c, --correspondent
|
||||||
-T, --tags
|
-T, --tags
|
||||||
-t, --document_type
|
-t, --document_type
|
||||||
|
-s, --storage_path
|
||||||
-i, --inbox-only
|
-i, --inbox-only
|
||||||
--use-first
|
--use-first
|
||||||
-f, --overwrite
|
-f, --overwrite
|
||||||
@@ -297,7 +321,7 @@ Run this after changing or adding matching rules. It'll loop over all
|
|||||||
of the documents in your database and attempt to match documents
|
of the documents in your database and attempt to match documents
|
||||||
according to the new rules.
|
according to the new rules.
|
||||||
|
|
||||||
Specify any combination of ``-c``, ``-T`` and ``-t`` to have the
|
Specify any combination of ``-c``, ``-T``, ``-t`` and ``-s`` to have the
|
||||||
retagger perform matching of the specified metadata type. If you don't
|
retagger perform matching of the specified metadata type. If you don't
|
||||||
specify any of these options, the document retagger won't do anything.
|
specify any of these options, the document retagger won't do anything.
|
||||||
|
|
||||||
@@ -369,8 +393,8 @@ the naming scheme.
|
|||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
|
|
||||||
Since this command moves you documents around alot, it is advised to to
|
Since this command moves your documents, it is advised to do
|
||||||
a backup before. The renaming logic is robust and will never overwrite
|
a backup beforehand. The renaming logic is robust and will never overwrite
|
||||||
or delete a file, but you can't ever be careful enough.
|
or delete a file, but you can't ever be careful enough.
|
||||||
|
|
||||||
.. code::
|
.. code::
|
||||||
@@ -379,7 +403,7 @@ the naming scheme.
|
|||||||
|
|
||||||
The command takes no arguments and processes all your documents at once.
|
The command takes no arguments and processes all your documents at once.
|
||||||
|
|
||||||
Learn how to use :ref:`Management Utilities<Management utilities>`.
|
Learn how to use :ref:`Management Utilities<utilities-management-commands>`.
|
||||||
|
|
||||||
|
|
||||||
.. _utilities-sanity-checker:
|
.. _utilities-sanity-checker:
|
||||||
|
@@ -7,12 +7,12 @@ easier.
|
|||||||
|
|
||||||
.. _advanced-matching:
|
.. _advanced-matching:
|
||||||
|
|
||||||
Matching tags, correspondents and document types
|
Matching tags, correspondents, document types, and storage paths
|
||||||
################################################
|
################################################################
|
||||||
|
|
||||||
Paperless will compare the matching algorithms defined by every tag and
|
Paperless will compare the matching algorithms defined by every tag, correspondent,
|
||||||
correspondent already set in your database to see if they apply to the text in
|
document type, and storage path in your database to see if they apply to the text
|
||||||
a document. In other words, if you defined a tag called ``Home Utility``
|
in a document. In other words, if you define a tag called ``Home Utility``
|
||||||
that had a ``match`` property of ``bc hydro`` and a ``matching_algorithm`` of
|
that had a ``match`` property of ``bc hydro`` and a ``matching_algorithm`` of
|
||||||
``literal``, Paperless will automatically tag your newly-consumed document with
|
``literal``, Paperless will automatically tag your newly-consumed document with
|
||||||
your ``Home Utility`` tag so long as the text ``bc hydro`` appears in the body
|
your ``Home Utility`` tag so long as the text ``bc hydro`` appears in the body
|
||||||
@@ -22,10 +22,10 @@ The matching logic is quite powerful. It supports searching the text of your
|
|||||||
document with different algorithms, and as such, some experimentation may be
|
document with different algorithms, and as such, some experimentation may be
|
||||||
necessary to get things right.
|
necessary to get things right.
|
||||||
|
|
||||||
In order to have a tag, correspondent, or type assigned automatically to newly
|
In order to have a tag, correspondent, document type, or storage path assigned
|
||||||
consumed documents, assign a match and matching algorithm using the web
|
automatically to newly consumed documents, assign a match and matching algorithm
|
||||||
interface. These settings define when to assign correspondents, tags, and types
|
using the web interface. These settings define when to assign tags, correspondents,
|
||||||
to documents.
|
document types, and storage paths to documents.
|
||||||
|
|
||||||
The following algorithms are available:
|
The following algorithms are available:
|
||||||
|
|
||||||
@@ -37,7 +37,7 @@ The following algorithms are available:
|
|||||||
* **Literal:** Matches only if the match appears exactly as provided (i.e. preserve ordering) in the PDF.
|
* **Literal:** Matches only if the match appears exactly as provided (i.e. preserve ordering) in the PDF.
|
||||||
* **Regular expression:** Parses the match as a regular expression and tries to
|
* **Regular expression:** Parses the match as a regular expression and tries to
|
||||||
find a match within the document.
|
find a match within the document.
|
||||||
* **Fuzzy match:** I dont know. Look at the source.
|
* **Fuzzy match:** I don't know. Look at the source.
|
||||||
* **Auto:** Tries to automatically match new documents. This does not require you
|
* **Auto:** Tries to automatically match new documents. This does not require you
|
||||||
to set a match. See the notes below.
|
to set a match. See the notes below.
|
||||||
|
|
||||||
@@ -47,9 +47,9 @@ defining a match text of ``"Bank of America" BofA`` using the *any* algorithm,
|
|||||||
will match documents that contain either "Bank of America" or "BofA", but will
|
will match documents that contain either "Bank of America" or "BofA", but will
|
||||||
not match documents containing "Bank of South America".
|
not match documents containing "Bank of South America".
|
||||||
|
|
||||||
Then just save your tag/correspondent and run another document through the
|
Then just save your tag, correspondent, document type, or storage path and run
|
||||||
consumer. Once complete, you should see the newly-created document,
|
another document through the consumer. Once complete, you should see the
|
||||||
automatically tagged with the appropriate data.
|
newly-created document, automatically tagged with the appropriate data.
|
||||||
|
|
||||||
|
|
||||||
.. _advanced-automatic_matching:
|
.. _advanced-automatic_matching:
|
||||||
@@ -58,9 +58,9 @@ Automatic matching
|
|||||||
==================
|
==================
|
||||||
|
|
||||||
Paperless-ngx comes with a new matching algorithm called *Auto*. This matching
|
Paperless-ngx comes with a new matching algorithm called *Auto*. This matching
|
||||||
algorithm tries to assign tags, correspondents, and document types to your
|
algorithm tries to assign tags, correspondents, document types, and storage paths
|
||||||
documents based on how you have already assigned these on existing documents. It
|
to your documents based on how you have already assigned these on existing documents.
|
||||||
uses a neural network under the hood.
|
It uses a neural network under the hood.
|
||||||
|
|
||||||
If, for example, all your bank statements of your account 123 at the Bank of
|
If, for example, all your bank statements of your account 123 at the Bank of
|
||||||
America are tagged with the tag "bofa_123" and the matching algorithm of this
|
America are tagged with the tag "bofa_123" and the matching algorithm of this
|
||||||
@@ -80,20 +80,21 @@ feature:
|
|||||||
that the neural network only learns from documents which you have correctly
|
that the neural network only learns from documents which you have correctly
|
||||||
tagged before.
|
tagged before.
|
||||||
* The matching algorithm can only work if there is a correlation between the
|
* The matching algorithm can only work if there is a correlation between the
|
||||||
tag, correspondent, or document type and the document itself. Your bank
|
tag, correspondent, document type, or storage path and the document itself.
|
||||||
statements usually contain your bank account number and the name of the bank,
|
Your bank statements usually contain your bank account number and the name
|
||||||
so this works reasonably well, However, tags such as "TODO" cannot be
|
of the bank, so this works reasonably well, However, tags such as "TODO"
|
||||||
automatically assigned.
|
cannot be automatically assigned.
|
||||||
* The matching algorithm needs a reasonable number of documents to identify when
|
* The matching algorithm needs a reasonable number of documents to identify when
|
||||||
to assign tags, correspondents, and types. If one out of a thousand documents
|
to assign tags, correspondents, storage paths, and types. If one out of a
|
||||||
has the correspondent "Very obscure web shop I bought something five years
|
thousand documents has the correspondent "Very obscure web shop I bought
|
||||||
ago", it will probably not assign this correspondent automatically if you buy
|
something five years ago", it will probably not assign this correspondent
|
||||||
something from them again. The more documents, the better.
|
automatically if you buy something from them again. The more documents, the better.
|
||||||
* Paperless also needs a reasonable amount of negative examples to decide when
|
* Paperless also needs a reasonable amount of negative examples to decide when
|
||||||
not to assign a certain tag, correspondent or type. This will usually be the
|
not to assign a certain tag, correspondent, document type, or storage path. This will
|
||||||
case as you start filling up paperless with documents. Example: If all your
|
usually be the case as you start filling up paperless with documents.
|
||||||
documents are either from "Webshop" and "Bank", paperless will assign one of
|
Example: If all your documents are either from "Webshop" and "Bank", paperless
|
||||||
these correspondents to ANY new document, if both are set to automatic matching.
|
will assign one of these correspondents to ANY new document, if both are set
|
||||||
|
to automatic matching.
|
||||||
|
|
||||||
Hooking into the consumption process
|
Hooking into the consumption process
|
||||||
####################################
|
####################################
|
||||||
@@ -120,10 +121,10 @@ Pre-consumption script
|
|||||||
======================
|
======================
|
||||||
|
|
||||||
Executed after the consumer sees a new document in the consumption folder, but
|
Executed after the consumer sees a new document in the consumption folder, but
|
||||||
before any processing of the document is performed. This script receives exactly
|
before any processing of the document is performed. This script can access the
|
||||||
one argument:
|
following relevant environment variables set:
|
||||||
|
|
||||||
* Document file name
|
* ``DOCUMENT_SOURCE_PATH``
|
||||||
|
|
||||||
A simple but common example for this would be creating a simple script like
|
A simple but common example for this would be creating a simple script like
|
||||||
this:
|
this:
|
||||||
@@ -133,7 +134,7 @@ this:
|
|||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
pdf2pdfocr.py -i ${1}
|
pdf2pdfocr.py -i ${DOCUMENT_SOURCE_PATH}
|
||||||
|
|
||||||
``/etc/paperless.conf``
|
``/etc/paperless.conf``
|
||||||
|
|
||||||
@@ -156,16 +157,21 @@ Post-consumption script
|
|||||||
=======================
|
=======================
|
||||||
|
|
||||||
Executed after the consumer has successfully processed a document and has moved it
|
Executed after the consumer has successfully processed a document and has moved it
|
||||||
into paperless. It receives the following arguments:
|
into paperless. It receives the following environment variables:
|
||||||
|
|
||||||
* Document id
|
* ``DOCUMENT_ID``
|
||||||
* Generated file name
|
* ``DOCUMENT_FILE_NAME``
|
||||||
* Source path
|
* ``DOCUMENT_CREATED``
|
||||||
* Thumbnail path
|
* ``DOCUMENT_MODIFIED``
|
||||||
* Download URL
|
* ``DOCUMENT_ADDED``
|
||||||
* Thumbnail URL
|
* ``DOCUMENT_SOURCE_PATH``
|
||||||
* Correspondent
|
* ``DOCUMENT_ARCHIVE_PATH``
|
||||||
* Tags
|
* ``DOCUMENT_THUMBNAIL_PATH``
|
||||||
|
* ``DOCUMENT_DOWNLOAD_URL``
|
||||||
|
* ``DOCUMENT_THUMBNAIL_URL``
|
||||||
|
* ``DOCUMENT_CORRESPONDENT``
|
||||||
|
* ``DOCUMENT_TAGS``
|
||||||
|
* ``DOCUMENT_ORIGINAL_FILENAME``
|
||||||
|
|
||||||
The script can be in any language, but for a simple shell script
|
The script can be in any language, but for a simple shell script
|
||||||
example, you can take a look at `post-consumption-example.sh`_ in this project.
|
example, you can take a look at `post-consumption-example.sh`_ in this project.
|
||||||
@@ -179,13 +185,14 @@ Assumed you have ``/home/foo/paperless-ngx/scripts/post-consumption-example.sh``
|
|||||||
You can pass that script into the consumer container via a host mount in your ``docker-compose.yml``.
|
You can pass that script into the consumer container via a host mount in your ``docker-compose.yml``.
|
||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
...
|
|
||||||
consumer:
|
...
|
||||||
...
|
consumer:
|
||||||
volumes:
|
...
|
||||||
...
|
volumes:
|
||||||
- /home/paperless-ngx/scripts:/path/in/container/scripts/
|
...
|
||||||
...
|
- /home/paperless-ngx/scripts:/path/in/container/scripts/
|
||||||
|
...
|
||||||
|
|
||||||
Example (docker-compose.yml): ``- /home/foo/paperless-ngx/scripts:/usr/src/paperless/scripts``
|
Example (docker-compose.yml): ``- /home/foo/paperless-ngx/scripts:/usr/src/paperless/scripts``
|
||||||
|
|
||||||
@@ -242,7 +249,7 @@ will create a directory structure as follows:
|
|||||||
last filename a document was stored as. If you do rename a file, paperless will
|
last filename a document was stored as. If you do rename a file, paperless will
|
||||||
report your files as missing and won't be able to find them.
|
report your files as missing and won't be able to find them.
|
||||||
|
|
||||||
Paperless provides the following placeholders withing filenames:
|
Paperless provides the following placeholders within filenames:
|
||||||
|
|
||||||
* ``{asn}``: The archive serial number of the document, or "none".
|
* ``{asn}``: The archive serial number of the document, or "none".
|
||||||
* ``{correspondent}``: The name of the correspondent, or "none".
|
* ``{correspondent}``: The name of the correspondent, or "none".
|
||||||
@@ -267,6 +274,17 @@ If paperless detects that two documents share the same filename, paperless will
|
|||||||
append ``_01``, ``_02``, etc to the filename. This happens if all the placeholders in a filename
|
append ``_01``, ``_02``, etc to the filename. This happens if all the placeholders in a filename
|
||||||
evaluate to the same value.
|
evaluate to the same value.
|
||||||
|
|
||||||
|
.. hint::
|
||||||
|
You can affect how empty placeholders are treated by changing the following setting to
|
||||||
|
`true`.
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
PAPERLESS_FILENAME_FORMAT_REMOVE_NONE=True
|
||||||
|
|
||||||
|
Doing this results in all empty placeholders resolving to "" instead of "none" as stated above.
|
||||||
|
Spaces before empty placeholders are removed as well, empty directories are omitted.
|
||||||
|
|
||||||
.. hint::
|
.. hint::
|
||||||
|
|
||||||
Paperless checks the filename of a document whenever it is saved. Therefore,
|
Paperless checks the filename of a document whenever it is saved. Therefore,
|
||||||
@@ -289,3 +307,59 @@ evaluate to the same value.
|
|||||||
|
|
||||||
However, keep in mind that inside docker, if files get stored outside of the
|
However, keep in mind that inside docker, if files get stored outside of the
|
||||||
predefined volumes, they will be lost after a restart of paperless.
|
predefined volumes, they will be lost after a restart of paperless.
|
||||||
|
|
||||||
|
|
||||||
|
Storage paths
|
||||||
|
#############
|
||||||
|
|
||||||
|
One of the best things in Paperless is that you can not only access the documents via the
|
||||||
|
web interface, but also via the file system.
|
||||||
|
|
||||||
|
When as single storage layout is not sufficient for your use case, storage paths come to
|
||||||
|
the rescue. Storage paths allow you to configure more precisely where each document is stored
|
||||||
|
in the file system.
|
||||||
|
|
||||||
|
- Each storage path is a `PAPERLESS_FILENAME_FORMAT` and follows the rules described above
|
||||||
|
- Each document is assigned a storage path using the matching algorithms described above, but
|
||||||
|
can be overwritten at any time
|
||||||
|
|
||||||
|
For example, you could define the following two storage paths:
|
||||||
|
|
||||||
|
1. Normal communications are put into a folder structure sorted by `year/correspondent`
|
||||||
|
2. Communications with insurance companies are stored in a flat structure with longer file names,
|
||||||
|
but containing the full date of the correspondence.
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
By Year = {created_year}/{correspondent}/{title}
|
||||||
|
Insurances = Insurances/{correspondent}/{created_year}-{created_month}-{created_day} {title}
|
||||||
|
|
||||||
|
|
||||||
|
If you then map these storage paths to the documents, you might get the following result.
|
||||||
|
For simplicity, `By Year` defines the same structure as in the previous example above.
|
||||||
|
|
||||||
|
.. code:: text
|
||||||
|
|
||||||
|
2019/ # By Year
|
||||||
|
My bank/
|
||||||
|
Statement January.pdf
|
||||||
|
Statement February.pdf
|
||||||
|
|
||||||
|
Insurances/ # Insurances
|
||||||
|
Healthcare 123/
|
||||||
|
2022-01-01 Statement January.pdf
|
||||||
|
2022-02-02 Letter.pdf
|
||||||
|
2022-02-03 Letter.pdf
|
||||||
|
Dental 456/
|
||||||
|
2021-12-01 New Conditions.pdf
|
||||||
|
|
||||||
|
|
||||||
|
.. hint::
|
||||||
|
|
||||||
|
Defining a storage path is optional. If no storage path is defined for a document, the global
|
||||||
|
`PAPERLESS_FILENAME_FORMAT` is applied.
|
||||||
|
|
||||||
|
.. caution::
|
||||||
|
|
||||||
|
If you adjust the format of an existing storage path, old documents don't get relocated automatically.
|
||||||
|
You need to run the :ref:`document renamer <utilities-renamer>` to adjust their pathes.
|
||||||
|
@@ -31,7 +31,8 @@ The objects served by the document endpoint contain the following fields:
|
|||||||
* ``tags``: List of IDs of tags assigned to this document, or empty list.
|
* ``tags``: List of IDs of tags assigned to this document, or empty list.
|
||||||
* ``document_type``: Document type of this document, or null.
|
* ``document_type``: Document type of this document, or null.
|
||||||
* ``correspondent``: Correspondent of this document or null.
|
* ``correspondent``: Correspondent of this document or null.
|
||||||
* ``created``: The date at which this document was created.
|
* ``created``: The date time at which this document was created.
|
||||||
|
* ``created_date``: The date (YYYY-MM-DD) at which this document was created. Optional. If also passed with created, this is ignored.
|
||||||
* ``modified``: The date at which this document was last edited in paperless. Read-only.
|
* ``modified``: The date at which this document was last edited in paperless. Read-only.
|
||||||
* ``added``: The date at which this document was added to paperless. Read-only.
|
* ``added``: The date at which this document was added to paperless. Read-only.
|
||||||
* ``archive_serial_number``: The identifier of this document in a physical document archive.
|
* ``archive_serial_number``: The identifier of this document in a physical document archive.
|
||||||
@@ -240,11 +241,13 @@ be instructed to consume the document from there.
|
|||||||
The endpoint supports the following optional form fields:
|
The endpoint supports the following optional form fields:
|
||||||
|
|
||||||
* ``title``: Specify a title that the consumer should use for the document.
|
* ``title``: Specify a title that the consumer should use for the document.
|
||||||
|
* ``created``: Specify a DateTime where the document was created (e.g. "2016-04-19" or "2016-04-19 06:15:00+02:00").
|
||||||
* ``correspondent``: Specify the ID of a correspondent that the consumer should use for the document.
|
* ``correspondent``: Specify the ID of a correspondent that the consumer should use for the document.
|
||||||
* ``document_type``: Similar to correspondent.
|
* ``document_type``: Similar to correspondent.
|
||||||
* ``tags``: Similar to correspondent. Specify this multiple times to have multiple tags added
|
* ``tags``: Similar to correspondent. Specify this multiple times to have multiple tags added
|
||||||
to the document.
|
to the document.
|
||||||
|
|
||||||
|
|
||||||
The endpoint will immediately return "OK" if the document consumption process
|
The endpoint will immediately return "OK" if the document consumption process
|
||||||
was started successfully. No additional status information about the consumption
|
was started successfully. No additional status information about the consumption
|
||||||
process itself is available, since that happens in a different process.
|
process itself is available, since that happens in a different process.
|
||||||
|
2064
docs/changelog.md
Normal file
2064
docs/changelog.md
Normal file
File diff suppressed because it is too large
Load Diff
1691
docs/changelog.rst
1691
docs/changelog.rst
File diff suppressed because it is too large
Load Diff
12
docs/conf.py
12
docs/conf.py
@@ -2,6 +2,8 @@ import sphinx_rtd_theme
|
|||||||
|
|
||||||
|
|
||||||
__version__ = None
|
__version__ = None
|
||||||
|
__full_version_str__ = None
|
||||||
|
__major_minor_version_str__ = None
|
||||||
exec(open("../src/paperless/version.py").read())
|
exec(open("../src/paperless/version.py").read())
|
||||||
|
|
||||||
|
|
||||||
@@ -12,13 +14,17 @@ extensions = [
|
|||||||
"sphinx.ext.imgmath",
|
"sphinx.ext.imgmath",
|
||||||
"sphinx.ext.viewcode",
|
"sphinx.ext.viewcode",
|
||||||
"sphinx_rtd_theme",
|
"sphinx_rtd_theme",
|
||||||
|
"myst_parser",
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ["_templates"]
|
templates_path = ["_templates"]
|
||||||
|
|
||||||
# The suffix of source filenames.
|
# The suffix of source filenames.
|
||||||
source_suffix = ".rst"
|
source_suffix = {
|
||||||
|
".rst": "restructuredtext",
|
||||||
|
".md": "markdown",
|
||||||
|
}
|
||||||
|
|
||||||
# The encoding of source files.
|
# The encoding of source files.
|
||||||
# source_encoding = 'utf-8-sig'
|
# source_encoding = 'utf-8-sig'
|
||||||
@@ -41,9 +47,9 @@ copyright = "2015-2022, Daniel Quinn, Jonas Winkler, and the paperless-ngx team"
|
|||||||
#
|
#
|
||||||
|
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = ".".join([str(_) for _ in __version__[:2]])
|
version = __major_minor_version_str__
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
release = ".".join([str(_) for _ in __version__[:3]])
|
release = __full_version_str__
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
|
@@ -27,11 +27,23 @@ PAPERLESS_REDIS=<url>
|
|||||||
This is required for processing scheduled tasks such as email fetching, index
|
This is required for processing scheduled tasks such as email fetching, index
|
||||||
optimization and for training the automatic document matcher.
|
optimization and for training the automatic document matcher.
|
||||||
|
|
||||||
|
* If your Redis server needs login credentials PAPERLESS_REDIS = ``redis://<username>:<password>@<host>:<port>``
|
||||||
|
|
||||||
|
* With the requirepass option PAPERLESS_REDIS = ``redis://:<password>@<host>:<port>``
|
||||||
|
|
||||||
|
`More information on securing your Redis Instance <https://redis.io/docs/getting-started/#securing-redis>`_.
|
||||||
|
|
||||||
Defaults to redis://localhost:6379.
|
Defaults to redis://localhost:6379.
|
||||||
|
|
||||||
|
PAPERLESS_DBENGINE=<engine_name>
|
||||||
|
Optional, gives the ability to choose Postgres or MariaDB for database engine.
|
||||||
|
Available options are `postgresql` and `mariadb`.
|
||||||
|
Default is `postgresql`.
|
||||||
|
|
||||||
PAPERLESS_DBHOST=<hostname>
|
PAPERLESS_DBHOST=<hostname>
|
||||||
By default, sqlite is used as the database backend. This can be changed here.
|
By default, sqlite is used as the database backend. This can be changed here.
|
||||||
Set PAPERLESS_DBHOST and PostgreSQL will be used instead of mysql.
|
|
||||||
|
Set PAPERLESS_DBHOST and another database will be used instead of sqlite.
|
||||||
|
|
||||||
PAPERLESS_DBPORT=<port>
|
PAPERLESS_DBPORT=<port>
|
||||||
Adjust port if necessary.
|
Adjust port if necessary.
|
||||||
@@ -39,17 +51,17 @@ PAPERLESS_DBPORT=<port>
|
|||||||
Default is 5432.
|
Default is 5432.
|
||||||
|
|
||||||
PAPERLESS_DBNAME=<name>
|
PAPERLESS_DBNAME=<name>
|
||||||
Database name in PostgreSQL.
|
Database name in PostgreSQL or MariaDB.
|
||||||
|
|
||||||
Defaults to "paperless".
|
Defaults to "paperless".
|
||||||
|
|
||||||
PAPERLESS_DBUSER=<name>
|
PAPERLESS_DBUSER=<name>
|
||||||
Database user in PostgreSQL.
|
Database user in PostgreSQL or MariaDB.
|
||||||
|
|
||||||
Defaults to "paperless".
|
Defaults to "paperless".
|
||||||
|
|
||||||
PAPERLESS_DBPASS=<password>
|
PAPERLESS_DBPASS=<password>
|
||||||
Database password for PostgreSQL.
|
Database password for PostgreSQL or MariaDB.
|
||||||
|
|
||||||
Defaults to "paperless".
|
Defaults to "paperless".
|
||||||
|
|
||||||
@@ -60,6 +72,13 @@ PAPERLESS_DBSSLMODE=<mode>
|
|||||||
|
|
||||||
Default is ``prefer``.
|
Default is ``prefer``.
|
||||||
|
|
||||||
|
PAPERLESS_DB_TIMEOUT=<float>
|
||||||
|
Amount of time for a database connection to wait for the database to unlock.
|
||||||
|
Mostly applicable for an sqlite based installation, consider changing to postgresql
|
||||||
|
if you need to increase this.
|
||||||
|
|
||||||
|
Defaults to unset, keeping the Django defaults.
|
||||||
|
|
||||||
Paths and folders
|
Paths and folders
|
||||||
#################
|
#################
|
||||||
|
|
||||||
@@ -111,6 +130,14 @@ PAPERLESS_FILENAME_FORMAT=<format>
|
|||||||
|
|
||||||
Default is none, which disables this feature.
|
Default is none, which disables this feature.
|
||||||
|
|
||||||
|
PAPERLESS_FILENAME_FORMAT_REMOVE_NONE=<bool>
|
||||||
|
Tells paperless to replace placeholders in `PAPERLESS_FILENAME_FORMAT` that would resolve
|
||||||
|
to 'none' to be omitted from the resulting filename. This also holds true for directory
|
||||||
|
names.
|
||||||
|
See :ref:`advanced-file_name_handling` for details.
|
||||||
|
|
||||||
|
Defaults to `false` which disables this feature.
|
||||||
|
|
||||||
PAPERLESS_LOGGING_DIR=<path>
|
PAPERLESS_LOGGING_DIR=<path>
|
||||||
This is where paperless will store log files.
|
This is where paperless will store log files.
|
||||||
|
|
||||||
@@ -130,6 +157,8 @@ PAPERLESS_LOGROTATE_MAX_BACKUPS=<num>
|
|||||||
|
|
||||||
Defaults to 20.
|
Defaults to 20.
|
||||||
|
|
||||||
|
.. _hosting-and-security:
|
||||||
|
|
||||||
Hosting & Security
|
Hosting & Security
|
||||||
##################
|
##################
|
||||||
|
|
||||||
@@ -142,7 +171,24 @@ PAPERLESS_SECRET_KEY=<key>
|
|||||||
|
|
||||||
Default is listed in the file ``src/paperless/settings.py``.
|
Default is listed in the file ``src/paperless/settings.py``.
|
||||||
|
|
||||||
PAPERLESS_ALLOWED_HOSTS<comma-separated-list>
|
PAPERLESS_URL=<url>
|
||||||
|
This setting can be used to set the three options below (ALLOWED_HOSTS,
|
||||||
|
CORS_ALLOWED_HOSTS and CSRF_TRUSTED_ORIGINS). If the other options are
|
||||||
|
set the values will be combined with this one. Do not include a trailing
|
||||||
|
slash. E.g. https://paperless.domain.com
|
||||||
|
|
||||||
|
Defaults to empty string, leaving the other settings unaffected.
|
||||||
|
|
||||||
|
PAPERLESS_CSRF_TRUSTED_ORIGINS=<comma-separated-list>
|
||||||
|
A list of trusted origins for unsafe requests (e.g. POST). As of Django 4.0
|
||||||
|
this is required to access the Django admin via the web.
|
||||||
|
See https://docs.djangoproject.com/en/4.0/ref/settings/#csrf-trusted-origins
|
||||||
|
|
||||||
|
Can also be set using PAPERLESS_URL (see above).
|
||||||
|
|
||||||
|
Defaults to empty string, which does not add any origins to the trusted list.
|
||||||
|
|
||||||
|
PAPERLESS_ALLOWED_HOSTS=<comma-separated-list>
|
||||||
If you're planning on putting Paperless on the open internet, then you
|
If you're planning on putting Paperless on the open internet, then you
|
||||||
really should set this value to the domain name you're using. Failing to do
|
really should set this value to the domain name you're using. Failing to do
|
||||||
so leaves you open to HTTP host header attacks:
|
so leaves you open to HTTP host header attacks:
|
||||||
@@ -151,12 +197,19 @@ PAPERLESS_ALLOWED_HOSTS<comma-separated-list>
|
|||||||
Just remember that this is a comma-separated list, so "example.com" is fine,
|
Just remember that this is a comma-separated list, so "example.com" is fine,
|
||||||
as is "example.com,www.example.com", but NOT " example.com" or "example.com,"
|
as is "example.com,www.example.com", but NOT " example.com" or "example.com,"
|
||||||
|
|
||||||
|
Can also be set using PAPERLESS_URL (see above).
|
||||||
|
|
||||||
|
If manually set, please remember to include "localhost". Otherwise docker
|
||||||
|
healthcheck will fail.
|
||||||
|
|
||||||
Defaults to "*", which is all hosts.
|
Defaults to "*", which is all hosts.
|
||||||
|
|
||||||
PAPERLESS_CORS_ALLOWED_HOSTS<comma-separated-list>
|
PAPERLESS_CORS_ALLOWED_HOSTS=<comma-separated-list>
|
||||||
You need to add your servers to the list of allowed hosts that can do CORS
|
You need to add your servers to the list of allowed hosts that can do CORS
|
||||||
calls. Set this to your public domain name.
|
calls. Set this to your public domain name.
|
||||||
|
|
||||||
|
Can also be set using PAPERLESS_URL (see above).
|
||||||
|
|
||||||
Defaults to "http://localhost:8000".
|
Defaults to "http://localhost:8000".
|
||||||
|
|
||||||
PAPERLESS_FORCE_SCRIPT_NAME=<path>
|
PAPERLESS_FORCE_SCRIPT_NAME=<path>
|
||||||
@@ -168,9 +221,16 @@ PAPERLESS_FORCE_SCRIPT_NAME=<path>
|
|||||||
PAPERLESS_STATIC_URL=<path>
|
PAPERLESS_STATIC_URL=<path>
|
||||||
Override the STATIC_URL here. Unless you're hosting Paperless off a
|
Override the STATIC_URL here. Unless you're hosting Paperless off a
|
||||||
subdomain like /paperless/, you probably don't need to change this.
|
subdomain like /paperless/, you probably don't need to change this.
|
||||||
|
If you do change it, be sure to include the trailing slash.
|
||||||
|
|
||||||
Defaults to "/static/".
|
Defaults to "/static/".
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
When hosting paperless behind a reverse proxy like Traefik or Nginx at a subpath e.g.
|
||||||
|
example.com/paperlessngx you will also need to set ``PAPERLESS_FORCE_SCRIPT_NAME``
|
||||||
|
(see above).
|
||||||
|
|
||||||
PAPERLESS_AUTO_LOGIN_USERNAME=<username>
|
PAPERLESS_AUTO_LOGIN_USERNAME=<username>
|
||||||
Specify a username here so that paperless will automatically perform login
|
Specify a username here so that paperless will automatically perform login
|
||||||
with the selected user.
|
with the selected user.
|
||||||
@@ -185,7 +245,7 @@ PAPERLESS_AUTO_LOGIN_USERNAME=<username>
|
|||||||
PAPERLESS_ADMIN_USER=<username>
|
PAPERLESS_ADMIN_USER=<username>
|
||||||
If this environment variable is specified, Paperless automatically creates
|
If this environment variable is specified, Paperless automatically creates
|
||||||
a superuser with the provided username at start. This is useful in cases
|
a superuser with the provided username at start. This is useful in cases
|
||||||
where you can not run the `createsuperuser` command seperately, such as Kubernetes
|
where you can not run the `createsuperuser` command separately, such as Kubernetes
|
||||||
or AWS ECS.
|
or AWS ECS.
|
||||||
|
|
||||||
Requires `PAPERLESS_ADMIN_PASSWORD` to be set.
|
Requires `PAPERLESS_ADMIN_PASSWORD` to be set.
|
||||||
@@ -389,6 +449,24 @@ PAPERLESS_OCR_IMAGE_DPI=<num>
|
|||||||
Default is none, which will automatically calculate image DPI so that
|
Default is none, which will automatically calculate image DPI so that
|
||||||
the produced PDF documents are A4 sized.
|
the produced PDF documents are A4 sized.
|
||||||
|
|
||||||
|
PAPERLESS_OCR_MAX_IMAGE_PIXELS=<num>
|
||||||
|
Paperless will raise a warning when OCRing images which are over this limit and
|
||||||
|
will not OCR images which are more than twice this limit. Note this does not
|
||||||
|
prevent the document from being consumed, but could result in missing text content.
|
||||||
|
|
||||||
|
If unset, will default to the value determined by
|
||||||
|
`Pillow <https://pillow.readthedocs.io/en/stable/reference/Image.html#PIL.Image.MAX_IMAGE_PIXELS>`_.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Increasing this limit could cause Paperless to consume additional resources
|
||||||
|
when consuming a file. Be sure you have sufficient system resources.
|
||||||
|
|
||||||
|
.. caution::
|
||||||
|
|
||||||
|
The limit is intended to prevent malicious files from consuming system resources
|
||||||
|
and causing crashes and other errors. Only increase this value if you are certain
|
||||||
|
your documents are not malicious and you need the text which was not OCRed
|
||||||
|
|
||||||
PAPERLESS_OCR_USER_ARGS=<json>
|
PAPERLESS_OCR_USER_ARGS=<json>
|
||||||
OCRmyPDF offers many more options. Use this parameter to specify any
|
OCRmyPDF offers many more options. Use this parameter to specify any
|
||||||
@@ -439,7 +517,7 @@ PAPERLESS_TIKA_GOTENBERG_ENDPOINT=<url>
|
|||||||
Defaults to "http://localhost:3000".
|
Defaults to "http://localhost:3000".
|
||||||
|
|
||||||
If you run paperless on docker, you can add those services to the docker-compose
|
If you run paperless on docker, you can add those services to the docker-compose
|
||||||
file (see the provided ``docker-compose.tika.yml`` file for reference). The changes
|
file (see the provided ``docker-compose.sqlite-tika.yml`` file for reference). The changes
|
||||||
requires are as follows:
|
requires are as follows:
|
||||||
|
|
||||||
.. code:: yaml
|
.. code:: yaml
|
||||||
@@ -460,19 +538,22 @@ requires are as follows:
|
|||||||
# ...
|
# ...
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: gotenberg/gotenberg:7
|
image: gotenberg/gotenberg:7.4
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
command:
|
||||||
CHROMIUM_DISABLE_ROUTES: 1
|
- "gotenberg"
|
||||||
|
- "--chromium-disable-routes=true"
|
||||||
|
|
||||||
tika:
|
tika:
|
||||||
image: apache/tika
|
image: ghcr.io/paperless-ngx/tika:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
Add the configuration variables to the environment of the webserver (alternatively
|
Add the configuration variables to the environment of the webserver (alternatively
|
||||||
put the configuration in the ``docker-compose.env`` file) and add the additional
|
put the configuration in the ``docker-compose.env`` file) and add the additional
|
||||||
services below the webserver service. Watch out for indentation.
|
services below the webserver service. Watch out for indentation.
|
||||||
|
|
||||||
|
Make sure to use the correct format `PAPERLESS_TIKA_ENABLED = 1` so python_dotenv can parse the statement correctly.
|
||||||
|
|
||||||
Software tweaks
|
Software tweaks
|
||||||
###############
|
###############
|
||||||
|
|
||||||
@@ -481,6 +562,8 @@ PAPERLESS_TASK_WORKERS=<num>
|
|||||||
maintain the automatic matching algorithm, check emails, consume documents,
|
maintain the automatic matching algorithm, check emails, consume documents,
|
||||||
etc. This variable specifies how many things it will do in parallel.
|
etc. This variable specifies how many things it will do in parallel.
|
||||||
|
|
||||||
|
Defaults to 1
|
||||||
|
|
||||||
|
|
||||||
PAPERLESS_THREADS_PER_WORKER=<num>
|
PAPERLESS_THREADS_PER_WORKER=<num>
|
||||||
Furthermore, paperless uses multiple threads when consuming documents to
|
Furthermore, paperless uses multiple threads when consuming documents to
|
||||||
@@ -528,6 +611,10 @@ PAPERLESS_WORKER_TIMEOUT=<num>
|
|||||||
large documents within the default 1800 seconds. So extending this timeout
|
large documents within the default 1800 seconds. So extending this timeout
|
||||||
may prove to be useful on weak hardware setups.
|
may prove to be useful on weak hardware setups.
|
||||||
|
|
||||||
|
PAPERLESS_WORKER_RETRY=<num>
|
||||||
|
If PAPERLESS_WORKER_TIMEOUT has been configured, the retry time for a task can
|
||||||
|
also be configured. By default, this value will be set to 10s more than the
|
||||||
|
worker timeout. This value should never be set less than the worker timeout.
|
||||||
|
|
||||||
PAPERLESS_TIME_ZONE=<timezone>
|
PAPERLESS_TIME_ZONE=<timezone>
|
||||||
Set the time zone here.
|
Set the time zone here.
|
||||||
@@ -548,6 +635,28 @@ PAPERLESS_CONSUMER_POLLING=<num>
|
|||||||
|
|
||||||
Defaults to 0, which disables polling and uses filesystem notifications.
|
Defaults to 0, which disables polling and uses filesystem notifications.
|
||||||
|
|
||||||
|
PAPERLESS_CONSUMER_POLLING_RETRY_COUNT=<num>
|
||||||
|
If consumer polling is enabled, sets the number of times paperless will check for a
|
||||||
|
file to remain unmodified.
|
||||||
|
|
||||||
|
Defaults to 5.
|
||||||
|
|
||||||
|
PAPERLESS_CONSUMER_POLLING_DELAY=<num>
|
||||||
|
If consumer polling is enabled, sets the delay in seconds between each check (above) paperless
|
||||||
|
will do while waiting for a file to remain unmodified.
|
||||||
|
|
||||||
|
Defaults to 5.
|
||||||
|
|
||||||
|
.. _configuration-inotify:
|
||||||
|
|
||||||
|
PAPERLESS_CONSUMER_INOTIFY_DELAY=<num>
|
||||||
|
Sets the time in seconds the consumer will wait for additional events
|
||||||
|
from inotify before the consumer will consider a file ready and begin consumption.
|
||||||
|
Certain scanners or network setups may generate multiple events for a single file,
|
||||||
|
leading to multiple consumers working on the same file. Configure this to
|
||||||
|
prevent that.
|
||||||
|
|
||||||
|
Defaults to 0.5 seconds.
|
||||||
|
|
||||||
PAPERLESS_CONSUMER_DELETE_DUPLICATES=<bool>
|
PAPERLESS_CONSUMER_DELETE_DUPLICATES=<bool>
|
||||||
When the consumer detects a duplicate document, it will not touch the
|
When the consumer detects a duplicate document, it will not touch the
|
||||||
@@ -576,6 +685,37 @@ PAPERLESS_CONSUMER_SUBDIRS_AS_TAGS=<bool>
|
|||||||
|
|
||||||
Defaults to false.
|
Defaults to false.
|
||||||
|
|
||||||
|
PAPERLESS_CONSUMER_ENABLE_BARCODES=<bool>
|
||||||
|
Enables the scanning and page separation based on detected barcodes.
|
||||||
|
This allows for scanning and adding multiple documents per uploaded
|
||||||
|
file, which are separated by one or multiple barcode pages.
|
||||||
|
|
||||||
|
For ease of use, it is suggested to use a standardized separation page,
|
||||||
|
e.g. `here <https://www.alliancegroup.co.uk/patch-codes.htm>`_.
|
||||||
|
|
||||||
|
If no barcodes are detected in the uploaded file, no page separation
|
||||||
|
will happen.
|
||||||
|
|
||||||
|
The original document will be removed and the separated pages will be
|
||||||
|
saved as pdf.
|
||||||
|
|
||||||
|
Defaults to false.
|
||||||
|
|
||||||
|
PAPERLESS_CONSUMER_BARCODE_TIFF_SUPPORT=<bool>
|
||||||
|
Whether TIFF image files should be scanned for barcodes.
|
||||||
|
This will automatically convert any TIFF image(s) to pdfs for later
|
||||||
|
processing.
|
||||||
|
This only has an effect, if PAPERLESS_CONSUMER_ENABLE_BARCODES has been
|
||||||
|
enabled.
|
||||||
|
|
||||||
|
Defaults to false.
|
||||||
|
|
||||||
|
PAPERLESS_CONSUMER_BARCODE_STRING=PATCHT
|
||||||
|
Defines the string to be detected as a separator barcode.
|
||||||
|
If paperless is used with the PATCH-T separator pages, users
|
||||||
|
shouldn't change this.
|
||||||
|
|
||||||
|
Defaults to "PATCHT"
|
||||||
|
|
||||||
PAPERLESS_CONVERT_MEMORY_LIMIT=<num>
|
PAPERLESS_CONVERT_MEMORY_LIMIT=<num>
|
||||||
On smaller systems, or even in the case of Very Large Documents, the consumer
|
On smaller systems, or even in the case of Very Large Documents, the consumer
|
||||||
@@ -600,13 +740,6 @@ PAPERLESS_CONVERT_TMPDIR=<path>
|
|||||||
|
|
||||||
Default is none, which disables the temporary directory.
|
Default is none, which disables the temporary directory.
|
||||||
|
|
||||||
PAPERLESS_OPTIMIZE_THUMBNAILS=<bool>
|
|
||||||
Use optipng to optimize thumbnails. This usually reduces the size of
|
|
||||||
thumbnails by about 20%, but uses considerable compute time during
|
|
||||||
consumption.
|
|
||||||
|
|
||||||
Defaults to true.
|
|
||||||
|
|
||||||
PAPERLESS_POST_CONSUME_SCRIPT=<filename>
|
PAPERLESS_POST_CONSUME_SCRIPT=<filename>
|
||||||
After a document is consumed, Paperless can trigger an arbitrary script if
|
After a document is consumed, Paperless can trigger an arbitrary script if
|
||||||
you like. This script will be passed a number of arguments for you to work
|
you like. This script will be passed a number of arguments for you to work
|
||||||
@@ -622,8 +755,24 @@ PAPERLESS_FILENAME_DATE_ORDER=<format>
|
|||||||
The filename will be checked first, and if nothing is found, the document
|
The filename will be checked first, and if nothing is found, the document
|
||||||
text will be checked as normal.
|
text will be checked as normal.
|
||||||
|
|
||||||
|
A date in a filename must have some separators (`.`, `-`, `/`, etc)
|
||||||
|
for it to be parsed.
|
||||||
|
|
||||||
Defaults to none, which disables this feature.
|
Defaults to none, which disables this feature.
|
||||||
|
|
||||||
|
PAPERLESS_NUMBER_OF_SUGGESTED_DATES=<num>
|
||||||
|
Paperless searches an entire document for dates. The first date found will
|
||||||
|
be used as the initial value for the created date. When this variable is
|
||||||
|
greater than 0 (or left to it's default value), paperless will also suggest
|
||||||
|
other dates found in the document, up to a maximum of this setting. Note that
|
||||||
|
duplicates will be removed, which can result in fewer dates displayed in the
|
||||||
|
frontend than this setting value.
|
||||||
|
|
||||||
|
The task to find all dates can be time-consuming and increases with a higher
|
||||||
|
(maximum) number of suggested dates and slower hardware.
|
||||||
|
|
||||||
|
Defaults to 3. Set to 0 to disable this feature.
|
||||||
|
|
||||||
PAPERLESS_THUMBNAIL_FONT_NAME=<filename>
|
PAPERLESS_THUMBNAIL_FONT_NAME=<filename>
|
||||||
Paperless creates thumbnails for plain text files by rendering the content
|
Paperless creates thumbnails for plain text files by rendering the content
|
||||||
of the file on an image and uses a predefined font for that. This
|
of the file on an image and uses a predefined font for that. This
|
||||||
@@ -639,10 +788,7 @@ PAPERLESS_IGNORE_DATES=<string>
|
|||||||
this process. This is useful for special dates (like date of birth) that appear
|
this process. This is useful for special dates (like date of birth) that appear
|
||||||
in documents regularly but are very unlikely to be the documents creation date.
|
in documents regularly but are very unlikely to be the documents creation date.
|
||||||
|
|
||||||
You may specify dates in a multitude of formats supported by dateparser (see
|
The date is parsed using the order specified in PAPERLESS_DATE_ORDER
|
||||||
https://dateparser.readthedocs.io/en/latest/#popular-formats) but as the dates
|
|
||||||
need to be comma separated, the options are limited.
|
|
||||||
Example: "2020-12-02,22.04.1999"
|
|
||||||
|
|
||||||
Defaults to an empty string to not ignore any dates.
|
Defaults to an empty string to not ignore any dates.
|
||||||
|
|
||||||
@@ -659,7 +805,7 @@ PAPERLESS_CONSUMER_IGNORE_PATTERNS=<json>
|
|||||||
|
|
||||||
This can be adjusted by configuring a custom json array with patterns to exclude.
|
This can be adjusted by configuring a custom json array with patterns to exclude.
|
||||||
|
|
||||||
Defautls to ``[".DS_STORE/*", "._*", ".stfolder/*"]``.
|
Defaults to ``[".DS_STORE/*", "._*", ".stfolder/*", ".stversions/*", ".localized/*", "desktop.ini"]``.
|
||||||
|
|
||||||
Binaries
|
Binaries
|
||||||
########
|
########
|
||||||
@@ -672,13 +818,10 @@ the program doesn't automatically execute it (ie. the program isn't in your
|
|||||||
$PATH), then you'll need to specify the literal path for that program.
|
$PATH), then you'll need to specify the literal path for that program.
|
||||||
|
|
||||||
PAPERLESS_CONVERT_BINARY=<path>
|
PAPERLESS_CONVERT_BINARY=<path>
|
||||||
Defaults to "/usr/bin/convert".
|
Defaults to "convert".
|
||||||
|
|
||||||
PAPERLESS_GS_BINARY=<path>
|
PAPERLESS_GS_BINARY=<path>
|
||||||
Defaults to "/usr/bin/gs".
|
Defaults to "gs".
|
||||||
|
|
||||||
PAPERLESS_OPTIPNG_BINARY=<path>
|
|
||||||
Defaults to "/usr/bin/optipng".
|
|
||||||
|
|
||||||
|
|
||||||
.. _configuration-docker:
|
.. _configuration-docker:
|
||||||
@@ -695,9 +838,14 @@ PAPERLESS_WEBSERVER_WORKERS=<num>
|
|||||||
also loads the entire application into memory separately, so increasing this value
|
also loads the entire application into memory separately, so increasing this value
|
||||||
will increase RAM usage.
|
will increase RAM usage.
|
||||||
|
|
||||||
Consider configuring this to 1 on low power devices with limited amount of RAM.
|
Defaults to 1.
|
||||||
|
|
||||||
Defaults to 2.
|
PAPERLESS_BIND_ADDR=<ip address>
|
||||||
|
The IP address the webserver will listen on inside the container. There are
|
||||||
|
special setups where you may need to configure this value to restrict the
|
||||||
|
Ip address or interface the webserver listens on.
|
||||||
|
|
||||||
|
Defaults to [::], meaning all interfaces, including IPv6.
|
||||||
|
|
||||||
PAPERLESS_PORT=<port>
|
PAPERLESS_PORT=<port>
|
||||||
The port number the webserver will listen on inside the container. There are
|
The port number the webserver will listen on inside the container. There are
|
||||||
@@ -752,3 +900,26 @@ PAPERLESS_OCR_LANGUAGES=<list>
|
|||||||
PAPERLESS_OCR_LANGUAGE=tur
|
PAPERLESS_OCR_LANGUAGE=tur
|
||||||
|
|
||||||
Defaults to none, which does not install any additional languages.
|
Defaults to none, which does not install any additional languages.
|
||||||
|
|
||||||
|
|
||||||
|
.. _configuration-update-checking:
|
||||||
|
|
||||||
|
Update Checking
|
||||||
|
###############
|
||||||
|
|
||||||
|
PAPERLESS_ENABLE_UPDATE_CHECK=<bool>
|
||||||
|
Enable (or disable) the automatic check for available updates. This feature is disabled
|
||||||
|
by default but if it is not explicitly set Paperless-ngx will show a message about this.
|
||||||
|
|
||||||
|
If enabled, the feature works by pinging the the Github API for the latest release e.g.
|
||||||
|
https://api.github.com/repos/paperless-ngx/paperless-ngx/releases/latest
|
||||||
|
to determine whether a new version is available.
|
||||||
|
|
||||||
|
Actual updating of the app must still be performed manually.
|
||||||
|
|
||||||
|
Note that for users of thirdy-party containers e.g. linuxserver.io this notification
|
||||||
|
may be 'ahead' of a new release from the third-party maintainers.
|
||||||
|
|
||||||
|
In either case, no tracking data is collected by the app in any way.
|
||||||
|
|
||||||
|
Defaults to none, which disables the feature.
|
||||||
|
@@ -34,6 +34,8 @@ it fixed for everyone!
|
|||||||
Before contributing please review our `code of conduct`_ and other important
|
Before contributing please review our `code of conduct`_ and other important
|
||||||
information in the `contributing guidelines`_.
|
information in the `contributing guidelines`_.
|
||||||
|
|
||||||
|
.. _code-formatting-with-pre-commit-hooks:
|
||||||
|
|
||||||
Code formatting with pre-commit Hooks
|
Code formatting with pre-commit Hooks
|
||||||
=====================================
|
=====================================
|
||||||
|
|
||||||
@@ -77,7 +79,7 @@ To do the setup you need to perform the steps from the following chapters in a c
|
|||||||
6. You can now either ...
|
6. You can now either ...
|
||||||
|
|
||||||
* install redis or
|
* install redis or
|
||||||
* use the included scripts/start-services.sh to use docker to fire up a redis instance (and some other services such as tika, gotenberg and a postgresql server) or
|
* use the included scripts/start-services.sh to use docker to fire up a redis instance (and some other services such as tika, gotenberg and a database server) or
|
||||||
* spin up a bare redis container
|
* spin up a bare redis container
|
||||||
|
|
||||||
.. code:: shell-session
|
.. code:: shell-session
|
||||||
@@ -85,6 +87,7 @@ To do the setup you need to perform the steps from the following chapters in a c
|
|||||||
docker run -d -p 6379:6379 --restart unless-stopped redis:latest
|
docker run -d -p 6379:6379 --restart unless-stopped redis:latest
|
||||||
|
|
||||||
7. Install the python dependencies by performing in the src/ directory.
|
7. Install the python dependencies by performing in the src/ directory.
|
||||||
|
|
||||||
.. code:: shell-session
|
.. code:: shell-session
|
||||||
|
|
||||||
pipenv install --dev
|
pipenv install --dev
|
||||||
@@ -139,8 +142,9 @@ Testing and code style:
|
|||||||
* Run ``pytest`` in the src/ directory to execute all tests. This also generates a HTML coverage
|
* Run ``pytest`` in the src/ directory to execute all tests. This also generates a HTML coverage
|
||||||
report. When runnings test, paperless.conf is loaded as well. However: the tests rely on the default
|
report. When runnings test, paperless.conf is loaded as well. However: the tests rely on the default
|
||||||
configuration. This is not ideal. But for now, make sure no settings except for DEBUG are overridden when testing.
|
configuration. This is not ideal. But for now, make sure no settings except for DEBUG are overridden when testing.
|
||||||
* Run ``black`` to format your code.
|
* Coding style is enforced by the Git pre-commit hooks. These will ensure your code is formatted and do some
|
||||||
* Run ``pycodestyle`` to test your code for issues with the configured code style settings.
|
linting when you do a `git commit`.
|
||||||
|
* You can also run ``black`` manually to format your code
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
@@ -182,6 +186,31 @@ X-Frame-Options are in place so that the front end behaves exactly as in product
|
|||||||
relies on you being logged into the back end. Without a valid session, The front end will simply
|
relies on you being logged into the back end. Without a valid session, The front end will simply
|
||||||
not work.
|
not work.
|
||||||
|
|
||||||
|
Testing and code style:
|
||||||
|
|
||||||
|
* The frontend code (.ts, .html, .scss) use ``prettier`` for code formatting via the Git
|
||||||
|
``pre-commit`` hooks which run automatically on commit. See
|
||||||
|
:ref:`above <code-formatting-with-pre-commit-hooks>` for installation. You can also run this
|
||||||
|
via cli with a command such as
|
||||||
|
|
||||||
|
.. code:: shell-session
|
||||||
|
|
||||||
|
$ git ls-files -- '*.ts' | xargs pre-commit run prettier --files
|
||||||
|
|
||||||
|
* Frontend testing uses jest and cypress. There is currently a need for significantly more
|
||||||
|
frontend tests. Unit tests and e2e tests, respectively, can be run non-interactively with:
|
||||||
|
|
||||||
|
.. code:: shell-session
|
||||||
|
|
||||||
|
$ ng test
|
||||||
|
$ npm run e2e:ci
|
||||||
|
|
||||||
|
Cypress also includes a UI which can be run from within the ``src-ui`` directory with
|
||||||
|
|
||||||
|
.. code:: shell-session
|
||||||
|
|
||||||
|
$ ./node_modules/.bin/cypress open
|
||||||
|
|
||||||
In order to build the front end and serve it as part of django, execute
|
In order to build the front end and serve it as part of django, execute
|
||||||
|
|
||||||
.. code:: shell-session
|
.. code:: shell-session
|
||||||
@@ -305,11 +334,17 @@ directory.
|
|||||||
Building the Docker image
|
Building the Docker image
|
||||||
=========================
|
=========================
|
||||||
|
|
||||||
|
The docker image is primarily built by the GitHub actions workflow, but it can be
|
||||||
|
faster when developing to build and tag an image locally.
|
||||||
|
|
||||||
|
To provide the build arguments automatically, build the image using the helper
|
||||||
|
script ``build-docker-image.sh``.
|
||||||
|
|
||||||
Building the docker image from source:
|
Building the docker image from source:
|
||||||
|
|
||||||
.. code:: shell-session
|
.. code:: shell-session
|
||||||
|
|
||||||
docker build . -t <your-tag>
|
./build-docker-image.sh Dockerfile -t <your-tag>
|
||||||
|
|
||||||
Extending Paperless
|
Extending Paperless
|
||||||
===================
|
===================
|
||||||
|
13
docs/faq.rst
13
docs/faq.rst
@@ -5,11 +5,11 @@ Frequently asked questions
|
|||||||
|
|
||||||
**Q:** *What's the general plan for Paperless-ngx?*
|
**Q:** *What's the general plan for Paperless-ngx?*
|
||||||
|
|
||||||
**A:** While Paperless-ngx is already considered largely "feature-complete" it is a community-driven
|
**A:** While Paperless-ngx is already considered largely "feature-complete" it is a community-driven
|
||||||
project and development will be guided in this way. New features can be submitted via
|
project and development will be guided in this way. New features can be submitted via
|
||||||
GitHub discussions and "up-voted" by the community but this is not a garauntee the feature
|
GitHub discussions and "up-voted" by the community but this is not a guarantee the feature
|
||||||
will be implemented. This project will always be open to collaboration in the form of PRs,
|
will be implemented. This project will always be open to collaboration in the form of PRs,
|
||||||
ideas etc.
|
ideas etc.
|
||||||
|
|
||||||
**Q:** *I'm using docker. Where are my documents?*
|
**Q:** *I'm using docker. Where are my documents?*
|
||||||
|
|
||||||
@@ -81,11 +81,10 @@ python requirements do not have precompiled packages for ARM / ARM64. Installati
|
|||||||
of these will require additional development libraries and compilation will take
|
of these will require additional development libraries and compilation will take
|
||||||
a long time.
|
a long time.
|
||||||
|
|
||||||
**Q:** *How do I run this on unRaid?*
|
**Q:** *How do I run this on Unraid?*
|
||||||
|
|
||||||
**A:** Head over to `<https://github.com/selfhosters/unRAID-CA-templates>`_,
|
**A:** Paperless-ngx is available as `community app <https://unraid.net/community/apps?q=paperless-ngx>`_
|
||||||
`Uli Fahrer <https://github.com/Tooa>`_ created a container template for that.
|
in Unraid. `Uli Fahrer <https://github.com/Tooa>`_ created a container template for that.
|
||||||
I don't exactly know how to use that though, since I don't use unRaid.
|
|
||||||
|
|
||||||
**Q:** *How do I run this on my toaster?*
|
**Q:** *How do I run this on my toaster?*
|
||||||
|
|
||||||
|
@@ -44,7 +44,7 @@ resources in the documentation:
|
|||||||
learn about how paperless automates all tagging using machine learning.
|
learn about how paperless automates all tagging using machine learning.
|
||||||
* Paperless now comes with a :ref:`proper email consumer <usage-email>`
|
* Paperless now comes with a :ref:`proper email consumer <usage-email>`
|
||||||
that's fully tested and production ready.
|
that's fully tested and production ready.
|
||||||
* Paperless creates searchable PDF/A documents from whatever you you put into
|
* Paperless creates searchable PDF/A documents from whatever you put into
|
||||||
the consumption directory. This means that you can select text in
|
the consumption directory. This means that you can select text in
|
||||||
image-only documents coming from your scanner.
|
image-only documents coming from your scanner.
|
||||||
* See :ref:`this note <utilities-encyption>` about GnuPG encryption in
|
* See :ref:`this note <utilities-encyption>` about GnuPG encryption in
|
||||||
@@ -52,7 +52,7 @@ resources in the documentation:
|
|||||||
* Paperless is now integrated with a
|
* Paperless is now integrated with a
|
||||||
:ref:`task processing queue <setup-task_processor>` that tells you
|
:ref:`task processing queue <setup-task_processor>` that tells you
|
||||||
at a glance when and why something is not working.
|
at a glance when and why something is not working.
|
||||||
* The :ref:`changelog <paperless_changelog>` contains a detailed list of all changes
|
* The :doc:`changelog </changelog>` contains a detailed list of all changes
|
||||||
in paperless-ngx.
|
in paperless-ngx.
|
||||||
|
|
||||||
Contents
|
Contents
|
||||||
|
@@ -0,0 +1 @@
|
|||||||
|
myst-parser==0.17.2
|
||||||
|
@@ -1,134 +1,8 @@
|
|||||||
|
|
||||||
.. _scanners:
|
.. _scanners:
|
||||||
|
|
||||||
***********************
|
*******************
|
||||||
Scanner recommendations
|
Scanners & Software
|
||||||
***********************
|
*******************
|
||||||
|
|
||||||
As Paperless operates by watching a folder for new files, doesn't care what
|
|
||||||
scanner you use, but sometimes finding a scanner that will write to an FTP,
|
|
||||||
NFS, or SMB server can be difficult. This page is here to help you find one
|
|
||||||
that works right for you based on recommendations from other Paperless users.
|
|
||||||
|
|
||||||
Physical scanners
|
|
||||||
=================
|
|
||||||
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| Brand | Model | Supports | Recommended By |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| | | FTP | NFS | SMB | SMTP | API [1]_ | |
|
|
||||||
+=========+================+=====+=====+=====+======+==========+================+
|
|
||||||
| Brother | `ADS-1700W`_ | yes | | yes | yes | |`holzhannes`_ |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| Brother | `ADS-1600W`_ | yes | | yes | yes | |`holzhannes`_ |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| Brother | `ADS-1500W`_ | yes | | yes | yes | |`danielquinn`_ |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| Brother | `ADS-1100W`_ | yes | | | | |`ytzelf`_ |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| Brother | `ADS-2800W`_ | yes | yes | | yes | yes |`philpagel`_ |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| Brother | `MFC-J6930DW`_ | yes | | | | |`ayounggun`_ |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| Brother | `MFC-L5850DW`_ | yes | | | yes | |`holzhannes`_ |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| Brother | `MFC-L2750DW`_ | yes | | yes | yes | |`muued`_ |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| Brother | `MFC-J5910DW`_ | yes | | | | |`bmsleight`_ |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| Brother | `MFC-8950DW`_ | yes | | | yes | yes |`philpagel`_ |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| Brother | `MFC-9142CDN`_ | yes | | yes | | |`REOLDEV`_ |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| Fujitsu | `ix500`_ | yes | | yes | | |`eonist`_ |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| Epson | `ES-580W`_ | yes | | yes | yes | |`fignew`_ |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| Epson | `WF-7710DWF`_ | yes | | yes | | |`Skylinar`_ |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| Fujitsu | `S1300i`_ | yes | | yes | | |`jonaswinkler`_ |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
| Doxie | `Q2`_ | | | | | yes |`Unkn0wnCat`_ |
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
|
||||||
|
|
||||||
.. _MFC-L5850DW: https://www.brother-usa.com/products/mfcl5850dw
|
|
||||||
.. _MFC-L2750DW: https://www.brother.de/drucker/laserdrucker/mfc-l2750dw
|
|
||||||
.. _ADS-1700W: https://www.brother-usa.com/products/ads1700w
|
|
||||||
.. _ADS-1600W: https://www.brother-usa.com/products/ads1600w
|
|
||||||
.. _ADS-1500W: https://www.brother.ca/en/p/ads1500w
|
|
||||||
.. _ADS-1100W: https://support.brother.com/g/b/downloadtop.aspx?c=fr&lang=fr&prod=ads1100w_eu_as_cn
|
|
||||||
.. _ADS-2800W: https://www.brother-usa.com/products/ads2800w
|
|
||||||
.. _MFC-J6930DW: https://www.brother.ca/en/p/MFCJ6930DW
|
|
||||||
.. _MFC-J5910DW: https://www.brother.co.uk/printers/inkjet-printers/mfcj5910dw
|
|
||||||
.. _MFC-8950DW: https://www.brother-usa.com/products/mfc8950dw
|
|
||||||
.. _MFC-9142CDN: https://www.brother.co.uk/printers/laser-printers/mfc9140cdn
|
|
||||||
.. _ES-580W: https://epson.com/Support/Scanners/ES-Series/Epson-WorkForce-ES-580W/s/SPT_B11B258201
|
|
||||||
.. _WF-7710DWF: https://www.epson.de/en/products/printers/inkjet-printers/for-home/workforce-wf-7710dwf
|
|
||||||
.. _ix500: http://www.fujitsu.com/us/products/computing/peripheral/scanners/scansnap/ix500/
|
|
||||||
.. _S1300i: https://www.fujitsu.com/global/products/computing/peripheral/scanners/soho/s1300i/
|
|
||||||
.. _Q2: https://www.getdoxie.com/product/doxie-q/
|
|
||||||
|
|
||||||
.. _ayounggun: https://github.com/ayounggun
|
|
||||||
.. _bmsleight: https://github.com/bmsleight
|
|
||||||
.. _danielquinn: https://github.com/danielquinn
|
|
||||||
.. _eonist: https://github.com/eonist
|
|
||||||
.. _fignew: https://github.com/fignew
|
|
||||||
.. _holzhannes: https://github.com/holzhannes
|
|
||||||
.. _jonaswinkler: https://github.com/jonaswinkler
|
|
||||||
.. _REOLDEV: https://github.com/REOLDEV
|
|
||||||
.. _Skylinar: https://github.com/Skylinar
|
|
||||||
.. _ytzelf: https://github.com/ytzelf
|
|
||||||
.. _Unkn0wnCat: https://github.com/Unkn0wnCat
|
|
||||||
.. _muued: https://github.com/muued
|
|
||||||
.. _philpagel: https://github.com/philpagel
|
|
||||||
|
|
||||||
.. [1] Scanners with API Integration allow to push scanned documents directly to :ref:`Paperless API <api-file_uploads>`, sometimes referred to as Webhook or Document POST.
|
|
||||||
|
|
||||||
Mobile phone software
|
|
||||||
=====================
|
|
||||||
|
|
||||||
You can use your phone to "scan" documents. The regular camera app will work, but may have too low contrast for OCR to work well. Apps specifically for scanning are recommended.
|
|
||||||
|
|
||||||
+-----------------------------+----------------+-----+-----+-----+-------+--------+------------------+
|
|
||||||
| Name | OS | Supports | Recommended By |
|
|
||||||
+-----------------------------+----------------+-----+-----+-----+-------+--------+------------------+
|
|
||||||
| | | FTP | NFS | SMB | Email | WebDAV | |
|
|
||||||
+=============================+================+=====+=====+=====+=======+========+==================+
|
|
||||||
| `Office Lens`_ | Android | ? | ? | ? | ? | ? | `jonaswinkler`_ |
|
|
||||||
+-----------------------------+----------------+-----+-----+-----+-------+--------+------------------+
|
|
||||||
| `Genius Scan`_ | Android | yes | no | yes | yes | yes | `hannahswain`_ |
|
|
||||||
+-----------------------------+----------------+-----+-----+-----+-------+--------+------------------+
|
|
||||||
| `OpenScan`_ | Android | no | no | no | no | no | `benjaminfrank`_ |
|
|
||||||
+-----------------------------+----------------+-----+-----+-----+-------+--------+------------------+
|
|
||||||
| `OCR Scanner - QuickScan`_ | iOS | no | no | no | no | yes | `holzhannes`_ |
|
|
||||||
+-----------------------------+----------------+-----+-----+-----+-------+--------+------------------+
|
|
||||||
|
|
||||||
On Android, you can use these applications in combination with one of the :ref:`Paperless-ngx compatible apps <usage-mobile_upload>` to "Share" the documents produced by these scanner apps with paperless. On iOS, you can share the scanned documents via iOS-Sharing to other mail, WebDav or FTP apps.
|
|
||||||
|
|
||||||
.. _Office Lens: https://play.google.com/store/apps/details?id=com.microsoft.office.officelens
|
|
||||||
.. _Genius Scan: https://play.google.com/store/apps/details?id=com.thegrizzlylabs.geniusscan.free
|
|
||||||
.. _OCR Scanner - QuickScan: https://apps.apple.com/us/app/quickscan-scanner-text-ocr/id1513790291
|
|
||||||
.. _OpenScan: https://github.com/Ethereal-Developers-Inc/OpenScan
|
|
||||||
|
|
||||||
.. _hannahswain: https://github.com/hannahswain
|
|
||||||
.. _benjaminfrank: https://github.com/benjaminfrank
|
|
||||||
|
|
||||||
API Scanning Setup
|
|
||||||
==================
|
|
||||||
|
|
||||||
This sections contains information on how to set up scanners to post directly to :ref:`Paperless API <api-file_uploads>`.
|
|
||||||
|
|
||||||
Doxie Q2
|
|
||||||
--------
|
|
||||||
|
|
||||||
This part assumes your Doxie is connected to WiFi and you know its IP.
|
|
||||||
|
|
||||||
1. Open your Doxie web UI by navigating to its IP address
|
|
||||||
2. Navigate to Options -> Webhook
|
|
||||||
3. Set the *URL* to ``https://[your-paperless-ngx-instance]/api/documents/post_document/``
|
|
||||||
4. Set the *File Parameter Name* to ``document``
|
|
||||||
5. Add the username and password to the respective fields (Consider creating a user just for your Doxie)
|
|
||||||
6. Click *Submit* at the bottom of the page
|
|
||||||
|
|
||||||
Congrats, you can now scan directly from your Doxie to your Paperless-ngx instance!
|
|
||||||
|
|
||||||
|
Paperless-ngx is compatible with many different scanners and scanning tools. A user-maintained list of scanners and other software is available on `the wiki <https://github.com/paperless-ngx/paperless-ngx/wiki/Scanner-&-Software-Recommendations>`_.
|
||||||
|
@@ -73,7 +73,7 @@ Paperless consists of the following components:
|
|||||||
for getting the tasks from the webserver and the consumer to the task scheduler. These run in a different
|
for getting the tasks from the webserver and the consumer to the task scheduler. These run in a different
|
||||||
process (maybe even on different machines!), and therefore, this is necessary.
|
process (maybe even on different machines!), and therefore, this is necessary.
|
||||||
|
|
||||||
* Optional: A database server. Paperless supports both PostgreSQL and SQLite for storing its data.
|
* Optional: A database server. Paperless supports PostgreSQL, MariaDB and SQLite for storing its data.
|
||||||
|
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
@@ -110,7 +110,7 @@ performs all the steps described in :ref:`setup-docker_hub` automatically.
|
|||||||
|
|
||||||
.. code:: shell-session
|
.. code:: shell-session
|
||||||
|
|
||||||
$ bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/master/install-paperless-ngx.sh)"
|
$ bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
||||||
|
|
||||||
.. _setup-docker_hub:
|
.. _setup-docker_hub:
|
||||||
|
|
||||||
@@ -184,6 +184,25 @@ Install Paperless from Docker Hub
|
|||||||
port 8000. Modifying the part before the colon will map requests on another
|
port 8000. Modifying the part before the colon will map requests on another
|
||||||
port to the webserver running on the default port.
|
port to the webserver running on the default port.
|
||||||
|
|
||||||
|
**Rootless**
|
||||||
|
|
||||||
|
If you want to run Paperless as a rootless container, you will need to do the
|
||||||
|
following in your ``docker-compose.yml``:
|
||||||
|
|
||||||
|
- set the ``user`` running the container to map to the ``paperless`` user in the
|
||||||
|
container.
|
||||||
|
This value (``user_id`` below), should be the same id that ``USERMAP_UID`` and
|
||||||
|
``USERMAP_GID`` are set to in the next step.
|
||||||
|
See ``USERMAP_UID`` and ``USERMAP_GID`` :ref:`here <configuration-docker>`.
|
||||||
|
|
||||||
|
Your entry for Paperless should contain something like:
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
webserver:
|
||||||
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
|
user: <user_id>
|
||||||
|
|
||||||
5. Modify ``docker-compose.env``, following the comments in the file. The
|
5. Modify ``docker-compose.env``, following the comments in the file. The
|
||||||
most important change is to set ``USERMAP_UID`` and ``USERMAP_GID``
|
most important change is to set ``USERMAP_UID`` and ``USERMAP_GID``
|
||||||
to the uid and gid of your user on the host system. Use ``id -u`` and
|
to the uid and gid of your user on the host system. Use ``id -u`` and
|
||||||
@@ -200,6 +219,20 @@ Install Paperless from Docker Hub
|
|||||||
You can copy any setting from the file ``paperless.conf.example`` and paste it here.
|
You can copy any setting from the file ``paperless.conf.example`` and paste it here.
|
||||||
Have a look at :ref:`configuration` to see what's available.
|
Have a look at :ref:`configuration` to see what's available.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
You can utilize Docker secrets for some configuration settings by
|
||||||
|
appending `_FILE` to some configuration values. This is supported currently
|
||||||
|
only by:
|
||||||
|
|
||||||
|
* PAPERLESS_DBUSER
|
||||||
|
* PAPERLESS_DBPASS
|
||||||
|
* PAPERLESS_SECRET_KEY
|
||||||
|
* PAPERLESS_AUTO_LOGIN_USERNAME
|
||||||
|
* PAPERLESS_ADMIN_USER
|
||||||
|
* PAPERLESS_ADMIN_MAIL
|
||||||
|
* PAPERLESS_ADMIN_PASSWORD
|
||||||
|
|
||||||
.. caution::
|
.. caution::
|
||||||
|
|
||||||
Some file systems such as NFS network shares don't support file system
|
Some file systems such as NFS network shares don't support file system
|
||||||
@@ -284,19 +317,22 @@ writing. Windows is not and will never be supported.
|
|||||||
* ``python3-pip``
|
* ``python3-pip``
|
||||||
* ``python3-dev``
|
* ``python3-dev``
|
||||||
|
|
||||||
|
* ``default-libmysqlclient-dev`` for MariaDB
|
||||||
* ``fonts-liberation`` for generating thumbnails for plain text files
|
* ``fonts-liberation`` for generating thumbnails for plain text files
|
||||||
* ``imagemagick`` >= 6 for PDF conversion
|
* ``imagemagick`` >= 6 for PDF conversion
|
||||||
* ``optipng`` for optimizing thumbnails
|
|
||||||
* ``gnupg`` for handling encrypted documents
|
* ``gnupg`` for handling encrypted documents
|
||||||
* ``libpq-dev`` for PostgreSQL
|
* ``libpq-dev`` for PostgreSQL
|
||||||
* ``libmagic-dev`` for mime type detection
|
* ``libmagic-dev`` for mime type detection
|
||||||
|
* ``mariadb-client`` for MariaDB compile time
|
||||||
* ``mime-support`` for mime type detection
|
* ``mime-support`` for mime type detection
|
||||||
|
* ``libzbar0`` for barcode detection
|
||||||
|
* ``poppler-utils`` for barcode detection
|
||||||
|
|
||||||
Use this list for your preferred package management:
|
Use this list for your preferred package management:
|
||||||
|
|
||||||
.. code::
|
.. code::
|
||||||
|
|
||||||
python3 python3-pip python3-dev imagemagick fonts-liberation optipng gnupg libpq-dev libmagic-dev mime-support
|
python3 python3-pip python3-dev imagemagick fonts-liberation gnupg libpq-dev libmagic-dev mime-support libzbar0 poppler-utils
|
||||||
|
|
||||||
These dependencies are required for OCRmyPDF, which is used for text recognition.
|
These dependencies are required for OCRmyPDF, which is used for text recognition.
|
||||||
|
|
||||||
@@ -306,7 +342,7 @@ writing. Windows is not and will never be supported.
|
|||||||
* ``qpdf``
|
* ``qpdf``
|
||||||
* ``liblept5``
|
* ``liblept5``
|
||||||
* ``libxml2``
|
* ``libxml2``
|
||||||
* ``pngquant``
|
* ``pngquant`` (suggested for certain PDF image optimizations)
|
||||||
* ``zlib1g``
|
* ``zlib1g``
|
||||||
* ``tesseract-ocr`` >= 4.0.0 for OCR
|
* ``tesseract-ocr`` >= 4.0.0 for OCR
|
||||||
* ``tesseract-ocr`` language packs (``tesseract-ocr-eng``, ``tesseract-ocr-deu``, etc)
|
* ``tesseract-ocr`` language packs (``tesseract-ocr-eng``, ``tesseract-ocr-deu``, etc)
|
||||||
@@ -328,7 +364,13 @@ writing. Windows is not and will never be supported.
|
|||||||
2. Install ``redis`` >= 5.0 and configure it to start automatically.
|
2. Install ``redis`` >= 5.0 and configure it to start automatically.
|
||||||
|
|
||||||
3. Optional. Install ``postgresql`` and configure a database, user and password for paperless. If you do not wish
|
3. Optional. Install ``postgresql`` and configure a database, user and password for paperless. If you do not wish
|
||||||
to use PostgreSQL, SQLite is available as well.
|
to use PostgreSQL, MariaDB and SQLite are available as well.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
On bare-metal installations using SQLite, ensure the
|
||||||
|
`JSON1 extension <https://code.djangoproject.com/wiki/JSON1Extension>`_ is enabled. This is
|
||||||
|
usually the case, but not always.
|
||||||
|
|
||||||
4. Get the release archive from `<https://github.com/paperless-ngx/paperless-ngx/releases>`_.
|
4. Get the release archive from `<https://github.com/paperless-ngx/paperless-ngx/releases>`_.
|
||||||
If you clone the git repo as it is, you also have to compile the front end by yourself.
|
If you clone the git repo as it is, you also have to compile the front end by yourself.
|
||||||
@@ -338,6 +380,7 @@ writing. Windows is not and will never be supported.
|
|||||||
settings to your needs. Required settings for getting paperless running are:
|
settings to your needs. Required settings for getting paperless running are:
|
||||||
|
|
||||||
* ``PAPERLESS_REDIS`` should point to your redis server, such as redis://localhost:6379.
|
* ``PAPERLESS_REDIS`` should point to your redis server, such as redis://localhost:6379.
|
||||||
|
* ``PAPERLESS_DBENGINE`` optional, and should be one of `postgres, mariadb, or sqlite`
|
||||||
* ``PAPERLESS_DBHOST`` should be the hostname on which your PostgreSQL server is running. Do not configure this
|
* ``PAPERLESS_DBHOST`` should be the hostname on which your PostgreSQL server is running. Do not configure this
|
||||||
to use SQLite instead. Also configure port, database name, user and password as necessary.
|
to use SQLite instead. Also configure port, database name, user and password as necessary.
|
||||||
* ``PAPERLESS_CONSUMPTION_DIR`` should point to a folder which paperless should watch for documents. You might
|
* ``PAPERLESS_CONSUMPTION_DIR`` should point to a folder which paperless should watch for documents. You might
|
||||||
@@ -345,6 +388,8 @@ writing. Windows is not and will never be supported.
|
|||||||
paperless stores its data. If you like, you can point both to the same directory.
|
paperless stores its data. If you like, you can point both to the same directory.
|
||||||
* ``PAPERLESS_SECRET_KEY`` should be a random sequence of characters. It's used for authentication. Failure
|
* ``PAPERLESS_SECRET_KEY`` should be a random sequence of characters. It's used for authentication. Failure
|
||||||
to do so allows third parties to forge authentication credentials.
|
to do so allows third parties to forge authentication credentials.
|
||||||
|
* ``PAPERLESS_URL`` if you are behind a reverse proxy. This should point to your domain. Please see
|
||||||
|
:ref:`configuration` for more information.
|
||||||
|
|
||||||
Many more adjustments can be made to paperless, especially the OCR part. The following options are recommended
|
Many more adjustments can be made to paperless, especially the OCR part. The following options are recommended
|
||||||
for everyone:
|
for everyone:
|
||||||
@@ -477,7 +522,7 @@ Migrating from Paperless-ng
|
|||||||
===========================
|
===========================
|
||||||
|
|
||||||
Paperless-ngx is meant to be a drop-in replacement for Paperless-ng and thus upgrading should be
|
Paperless-ngx is meant to be a drop-in replacement for Paperless-ng and thus upgrading should be
|
||||||
trivial for most users, especially when using docker. However, as with any major change, it is
|
trivial for most users, especially when using docker. However, as with any major change, it is
|
||||||
recommended to take a full backup first. Once you are ready, simply change the docker image to
|
recommended to take a full backup first. Once you are ready, simply change the docker image to
|
||||||
point to the new source. E.g. if using Docker Compose, edit ``docker-compose.yml`` and change:
|
point to the new source. E.g. if using Docker Compose, edit ``docker-compose.yml`` and change:
|
||||||
|
|
||||||
@@ -490,12 +535,12 @@ to
|
|||||||
.. code::
|
.. code::
|
||||||
|
|
||||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
|
|
||||||
and then run ``docker-compose up -d`` which will pull the new image recreate the container.
|
and then run ``docker-compose up -d`` which will pull the new image recreate the container.
|
||||||
That's it!
|
That's it!
|
||||||
|
|
||||||
Users who installed with the bare-metal route should also update their Git clone to point to
|
Users who installed with the bare-metal route should also update their Git clone to point to
|
||||||
``https://github.com/paperless-ngx/paperless-ngx``, e.g. using the command
|
``https://github.com/paperless-ngx/paperless-ngx``, e.g. using the command
|
||||||
``git remote set-url origin https://github.com/paperless-ngx/paperless-ngx`` and then pull the
|
``git remote set-url origin https://github.com/paperless-ngx/paperless-ngx`` and then pull the
|
||||||
lastest version.
|
lastest version.
|
||||||
|
|
||||||
@@ -509,7 +554,7 @@ how you installed paperless.
|
|||||||
This setup describes how to update an existing paperless Docker installation.
|
This setup describes how to update an existing paperless Docker installation.
|
||||||
The important things to keep in mind are as follows:
|
The important things to keep in mind are as follows:
|
||||||
|
|
||||||
* Read the :ref:`changelog <paperless_changelog>` and take note of breaking changes.
|
* Read the :doc:`changelog </changelog>` and take note of breaking changes.
|
||||||
* You should decide if you want to stick with SQLite or want to migrate your database
|
* You should decide if you want to stick with SQLite or want to migrate your database
|
||||||
to PostgreSQL. See :ref:`setup-sqlite_to_psql` for details on how to move your data from
|
to PostgreSQL. See :ref:`setup-sqlite_to_psql` for details on how to move your data from
|
||||||
SQLite to PostgreSQL. Both work fine with paperless. However, if you already have a
|
SQLite to PostgreSQL. Both work fine with paperless. However, if you already have a
|
||||||
@@ -720,8 +765,6 @@ configuring some options in paperless can help improve performance immensely:
|
|||||||
* If you want to perform OCR on the device, consider using ``PAPERLESS_OCR_CLEAN=none``.
|
* If you want to perform OCR on the device, consider using ``PAPERLESS_OCR_CLEAN=none``.
|
||||||
This will speed up OCR times and use less memory at the expense of slightly worse
|
This will speed up OCR times and use less memory at the expense of slightly worse
|
||||||
OCR results.
|
OCR results.
|
||||||
* Set ``PAPERLESS_OPTIMIZE_THUMBNAILS`` to 'false' if you want faster consumption
|
|
||||||
times. Thumbnails will be about 20% larger.
|
|
||||||
* If using docker, consider setting ``PAPERLESS_WEBSERVER_WORKERS`` to
|
* If using docker, consider setting ``PAPERLESS_WEBSERVER_WORKERS`` to
|
||||||
1. This will save some memory.
|
1. This will save some memory.
|
||||||
|
|
||||||
@@ -782,4 +825,6 @@ the following configuration is required for paperless to operate:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
The ``PAPERLESS_URL`` configuration variable is also required when using a reverse proxy. Please refer to the :ref:`hosting-and-security` docs.
|
||||||
|
|
||||||
Also read `this <https://channels.readthedocs.io/en/stable/deploying.html#nginx-supervisor-ubuntu>`__, towards the end of the section.
|
Also read `this <https://channels.readthedocs.io/en/stable/deploying.html#nginx-supervisor-ubuntu>`__, towards the end of the section.
|
||||||
|
@@ -119,17 +119,18 @@ You may experience these errors when using the optional TIKA integration:
|
|||||||
Gotenberg is a server that converts Office documents into PDF documents and has a default timeout of 30 seconds.
|
Gotenberg is a server that converts Office documents into PDF documents and has a default timeout of 30 seconds.
|
||||||
When conversion takes longer, Gotenberg raises this error.
|
When conversion takes longer, Gotenberg raises this error.
|
||||||
|
|
||||||
You can increase the timeout by configuring an environment variable for Gotenberg (see also `here <https://gotenberg.dev/docs/modules/api#properties>`__).
|
You can increase the timeout by configuring a command flag for Gotenberg (see also `here <https://gotenberg.dev/docs/modules/api#properties>`__).
|
||||||
If using docker-compose, this is achieved by the following configuration change in the ``docker-compose.yml`` file:
|
If using docker-compose, this is achieved by the following configuration change in the ``docker-compose.yml`` file:
|
||||||
|
|
||||||
.. code:: yaml
|
.. code:: yaml
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: gotenberg/gotenberg:7
|
image: gotenberg/gotenberg:7.4
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
command:
|
||||||
CHROMIUM_DISABLE_ROUTES: 1
|
- "gotenberg"
|
||||||
API_PROCESS_TIMEOUT: 60
|
- "--chromium-disable-routes=true"
|
||||||
|
- "--api-timeout=60"
|
||||||
|
|
||||||
Permission denied errors in the consumption directory
|
Permission denied errors in the consumption directory
|
||||||
#####################################################
|
#####################################################
|
||||||
@@ -234,3 +235,85 @@ You might find messages like these in your log files:
|
|||||||
This indicates that paperless failed to read PDF metadata from one of your documents. This happens when you
|
This indicates that paperless failed to read PDF metadata from one of your documents. This happens when you
|
||||||
open the affected documents in paperless for editing. Paperless will continue to work, and will simply not
|
open the affected documents in paperless for editing. Paperless will continue to work, and will simply not
|
||||||
show the invalid metadata.
|
show the invalid metadata.
|
||||||
|
|
||||||
|
Consumer fails with a FileNotFoundError
|
||||||
|
#######################################
|
||||||
|
|
||||||
|
You might find messages like these in your log files:
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
[ERROR] [paperless.consumer] Error while consuming document SCN_0001.pdf: FileNotFoundError: [Errno 2] No such file or directory: '/tmp/ocrmypdf.io.yhk3zbv0/origin.pdf'
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "/app/paperless/src/paperless_tesseract/parsers.py", line 261, in parse
|
||||||
|
ocrmypdf.ocr(**args)
|
||||||
|
File "/usr/local/lib/python3.8/dist-packages/ocrmypdf/api.py", line 337, in ocr
|
||||||
|
return run_pipeline(options=options, plugin_manager=plugin_manager, api=True)
|
||||||
|
File "/usr/local/lib/python3.8/dist-packages/ocrmypdf/_sync.py", line 385, in run_pipeline
|
||||||
|
exec_concurrent(context, executor)
|
||||||
|
File "/usr/local/lib/python3.8/dist-packages/ocrmypdf/_sync.py", line 302, in exec_concurrent
|
||||||
|
pdf = post_process(pdf, context, executor)
|
||||||
|
File "/usr/local/lib/python3.8/dist-packages/ocrmypdf/_sync.py", line 235, in post_process
|
||||||
|
pdf_out = metadata_fixup(pdf_out, context)
|
||||||
|
File "/usr/local/lib/python3.8/dist-packages/ocrmypdf/_pipeline.py", line 798, in metadata_fixup
|
||||||
|
with pikepdf.open(context.origin) as original, pikepdf.open(working_file) as pdf:
|
||||||
|
File "/usr/local/lib/python3.8/dist-packages/pikepdf/_methods.py", line 923, in open
|
||||||
|
pdf = Pdf._open(
|
||||||
|
FileNotFoundError: [Errno 2] No such file or directory: '/tmp/ocrmypdf.io.yhk3zbv0/origin.pdf'
|
||||||
|
|
||||||
|
This probably indicates paperless tried to consume the same file twice. This can happen for a number of reasons,
|
||||||
|
depending on how documents are placed into the consume folder. If paperless is using inotify (the default) to
|
||||||
|
check for documents, try adjusting the :ref:`inotify configuration <configuration-inotify>`. If polling is enabled,
|
||||||
|
try adjusting the :ref:`polling configuration <configuration-polling>`.
|
||||||
|
|
||||||
|
Consumer fails waiting for file to remain unmodified.
|
||||||
|
#####################################################
|
||||||
|
|
||||||
|
You might find messages like these in your log files:
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
[ERROR] [paperless.management.consumer] Timeout while waiting on file /usr/src/paperless/src/../consume/SCN_0001.pdf to remain unmodified.
|
||||||
|
|
||||||
|
This indicates paperless timed out while waiting for the file to be completely written to the consume folder.
|
||||||
|
Adjusting :ref:`polling configuration <configuration-polling>` values should resolve the issue.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
The user will need to manually move the file out of the consume folder and
|
||||||
|
back in, for the initial failing file to be consumed.
|
||||||
|
|
||||||
|
Consumer fails reporting "OS reports file as busy still".
|
||||||
|
#########################################################
|
||||||
|
|
||||||
|
You might find messages like these in your log files:
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
[WARNING] [paperless.management.consumer] Not consuming file /usr/src/paperless/src/../consume/SCN_0001.pdf: OS reports file as busy still
|
||||||
|
|
||||||
|
This indicates paperless was unable to open the file, as the OS reported the file as still being in use. To prevent a
|
||||||
|
crash, paperless did not try to consume the file. If paperless is using inotify (the default) to
|
||||||
|
check for documents, try adjusting the :ref:`inotify configuration <configuration-inotify>`. If polling is enabled,
|
||||||
|
try adjusting the :ref:`polling configuration <configuration-polling>`.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
The user will need to manually move the file out of the consume folder and
|
||||||
|
back in, for the initial failing file to be consumed.
|
||||||
|
|
||||||
|
Log reports "Creating PaperlessTask failed".
|
||||||
|
#########################################################
|
||||||
|
|
||||||
|
You might find messages like these in your log files:
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
[ERROR] [paperless.management.consumer] Creating PaperlessTask failed: db locked
|
||||||
|
|
||||||
|
You are likely using an sqlite based installation, with an increased number of workers and are running into sqlite's concurrency limitations.
|
||||||
|
Uploading or consuming multiple files at once results in many workers attempting to access the database simultaneously.
|
||||||
|
|
||||||
|
Consider changing to the PostgreSQL database if you will be processing many documents at once often. Otherwise,
|
||||||
|
try tweaking the ``PAPERLESS_DB_TIMEOUT`` setting to allow more time for the database to unlock. This may have
|
||||||
|
minor performance implications.
|
||||||
|
@@ -62,7 +62,7 @@ your documents:
|
|||||||
|
|
||||||
1. OCR the document, if it has no text. Digital documents usually have text,
|
1. OCR the document, if it has no text. Digital documents usually have text,
|
||||||
and this step will be skipped for those documents.
|
and this step will be skipped for those documents.
|
||||||
2. Paperless will create an archiveable PDF/A document from your document.
|
2. Paperless will create an archivable PDF/A document from your document.
|
||||||
If this document is coming from your scanner, it will have embedded selectable text.
|
If this document is coming from your scanner, it will have embedded selectable text.
|
||||||
3. Paperless performs automatic matching of tags, correspondents and types on the
|
3. Paperless performs automatic matching of tags, correspondents and types on the
|
||||||
document before storing it in the database.
|
document before storing it in the database.
|
||||||
@@ -161,6 +161,9 @@ These are as follows:
|
|||||||
will not consume flagged mails.
|
will not consume flagged mails.
|
||||||
* **Move to folder:** Moves consumed mails out of the way so that paperless wont
|
* **Move to folder:** Moves consumed mails out of the way so that paperless wont
|
||||||
consume them again.
|
consume them again.
|
||||||
|
* **Add custom Tag:** Adds a custom tag to mails with consumed documents (the IMAP
|
||||||
|
standard calls these "keywords"). Paperless will not consume mails already tagged.
|
||||||
|
Not all mail servers support this feature!
|
||||||
|
|
||||||
.. caution::
|
.. caution::
|
||||||
|
|
||||||
@@ -180,6 +183,15 @@ These are as follows:
|
|||||||
automatically or manually and tell paperless to move them to yet another folder
|
automatically or manually and tell paperless to move them to yet another folder
|
||||||
after consumption. It's up to you.
|
after consumption. It's up to you.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
When defining a mail rule with a folder, you may need to try different characters to
|
||||||
|
define how the sub-folders are separated. Common values include ".", "/" or "|", but
|
||||||
|
this varies by the mail server. Check the documentation for your mail server. In the
|
||||||
|
event of an error fetching mail from a certain folder, check the Paperless logs. When
|
||||||
|
a folder is not located, Paperless will attempt to list all folders found in the account
|
||||||
|
to the Paperless logs.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Paperless will process the rules in the order defined in the admin page.
|
Paperless will process the rules in the order defined in the admin page.
|
||||||
|
@@ -1,9 +1,17 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
bind = f'0.0.0.0:{os.getenv("PAPERLESS_PORT", 8000)}'
|
# See https://docs.gunicorn.org/en/stable/settings.html for
|
||||||
workers = int(os.getenv("PAPERLESS_WEBSERVER_WORKERS", 2))
|
# explanations of settings
|
||||||
|
|
||||||
|
bind = f'{os.getenv("PAPERLESS_BIND_ADDR", "[::]")}:{os.getenv("PAPERLESS_PORT", 8000)}'
|
||||||
|
|
||||||
|
workers = int(os.getenv("PAPERLESS_WEBSERVER_WORKERS", 1))
|
||||||
worker_class = "paperless.workers.ConfigurableWorker"
|
worker_class = "paperless.workers.ConfigurableWorker"
|
||||||
timeout = 120
|
timeout = 120
|
||||||
|
preload_app = True
|
||||||
|
|
||||||
|
# https://docs.gunicorn.org/en/stable/faq.html#blocking-os-fchmod
|
||||||
|
worker_tmp_dir = "/dev/shm"
|
||||||
|
|
||||||
|
|
||||||
def pre_fork(server, worker):
|
def pre_fork(server, worker):
|
||||||
@@ -24,7 +32,7 @@ def worker_int(worker):
|
|||||||
## get traceback info
|
## get traceback info
|
||||||
import threading, sys, traceback
|
import threading, sys, traceback
|
||||||
|
|
||||||
id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
|
id2name = {th.ident: th.name for th in threading.enumerate()}
|
||||||
code = []
|
code = []
|
||||||
for threadId, stack in sys._current_frames().items():
|
for threadId, stack in sys._current_frames().items():
|
||||||
code.append("\n# Thread: %s(%d)" % (id2name.get(threadId, ""), threadId))
|
code.append("\n# Thread: %s(%d)" % (id2name.get(threadId, ""), threadId))
|
||||||
|
@@ -1,18 +1,18 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
ask() {
|
ask() {
|
||||||
while true ; do
|
while true ; do
|
||||||
if [[ -z $3 ]] ; then
|
if [[ -z $3 ]] ; then
|
||||||
read -p "$1 [$2]: " result
|
read -r -p "$1 [$2]: " result
|
||||||
else
|
else
|
||||||
read -p "$1 ($3) [$2]: " result
|
read -r -p "$1 ($3) [$2]: " result
|
||||||
fi
|
fi
|
||||||
if [[ -z $result ]]; then
|
if [[ -z $result ]]; then
|
||||||
ask_result=$2
|
ask_result=$2
|
||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
array=$3
|
array=$3
|
||||||
if [[ -z $3 || " ${array[@]} " =~ " ${result} " ]]; then
|
if [[ -z $3 || " ${array[*]} " =~ ${result} ]]; then
|
||||||
ask_result=$result
|
ask_result=$result
|
||||||
return
|
return
|
||||||
else
|
else
|
||||||
@@ -24,7 +24,7 @@ ask() {
|
|||||||
ask_docker_folder() {
|
ask_docker_folder() {
|
||||||
while true ; do
|
while true ; do
|
||||||
|
|
||||||
read -p "$1 [$2]: " result
|
read -r -p "$1 [$2]: " result
|
||||||
|
|
||||||
if [[ -z $result ]]; then
|
if [[ -z $result ]]; then
|
||||||
ask_result=$2
|
ask_result=$2
|
||||||
@@ -47,25 +47,29 @@ if [[ $(id -u) == "0" ]] ; then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -z $(which wget) ]] ; then
|
if ! command -v wget &> /dev/null ; then
|
||||||
echo "wget executable not found. Is wget installed?"
|
echo "wget executable not found. Is wget installed?"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -z $(which docker) ]] ; then
|
if ! command -v docker &> /dev/null ; then
|
||||||
echo "docker executable not found. Is docker installed?"
|
echo "docker executable not found. Is docker installed?"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -z $(which docker-compose) ]] ; then
|
DOCKER_COMPOSE_CMD="docker-compose"
|
||||||
echo "docker-compose executable not found. Is docker-compose installed?"
|
if ! command -v ${DOCKER_COMPOSE_CMD} ; then
|
||||||
exit 1
|
if docker compose version &> /dev/null ; then
|
||||||
|
DOCKER_COMPOSE_CMD="docker compose"
|
||||||
|
else
|
||||||
|
echo "docker-compose executable not found. Is docker-compose installed?"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check if user has permissions to run Docker by trying to get the status of Docker (docker status).
|
# Check if user has permissions to run Docker by trying to get the status of Docker (docker status).
|
||||||
# If this fails, the user probably does not have permissions for Docker.
|
# If this fails, the user probably does not have permissions for Docker.
|
||||||
docker stats --no-stream 2>/dev/null 1>&2
|
if ! docker stats --no-stream &> /dev/null ; then
|
||||||
if [ $? -ne 0 ] ; then
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "WARN: It look like the current user does not have Docker permissions."
|
echo "WARN: It look like the current user does not have Docker permissions."
|
||||||
echo "WARN: Use 'sudo usermod -aG docker $USER' to assign Docker permissions to the user."
|
echo "WARN: Use 'sudo usermod -aG docker $USER' to assign Docker permissions to the user."
|
||||||
@@ -88,6 +92,14 @@ echo ""
|
|||||||
echo "1. Application configuration"
|
echo "1. Application configuration"
|
||||||
echo "============================"
|
echo "============================"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "The URL paperless will be available at. This is required if the"
|
||||||
|
echo "installation will be accessible via the web, otherwise can be left blank."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
ask "URL" ""
|
||||||
|
URL=$ask_result
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "The port on which the paperless webserver will listen for incoming"
|
echo "The port on which the paperless webserver will listen for incoming"
|
||||||
echo "connections."
|
echo "connections."
|
||||||
@@ -106,12 +118,12 @@ ask "Current time zone" "$default_time_zone"
|
|||||||
TIME_ZONE=$ask_result
|
TIME_ZONE=$ask_result
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "Database backend: PostgreSQL and SQLite are available. Use PostgreSQL"
|
echo "Database backend: PostgreSQL, MariaDB, and SQLite are available. Use PostgreSQL"
|
||||||
echo "if unsure. If you're running on a low-power device such as Raspberry"
|
echo "if unsure. If you're running on a low-power device such as Raspberry"
|
||||||
echo "Pi, use SQLite to save resources."
|
echo "Pi, use SQLite to save resources."
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
ask "Database backend" "postgres" "postgres sqlite"
|
ask "Database backend" "postgres" "postgres sqlite mariadb"
|
||||||
DATABASE_BACKEND=$ask_result
|
DATABASE_BACKEND=$ask_result
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
@@ -162,7 +174,7 @@ ask "Target folder" "$(pwd)/paperless-ngx"
|
|||||||
TARGET_FOLDER=$ask_result
|
TARGET_FOLDER=$ask_result
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "The consume folder is where paperles will search for new documents."
|
echo "The consume folder is where paperless will search for new documents."
|
||||||
echo "Point this to a folder where your scanner is able to put your scanned"
|
echo "Point this to a folder where your scanner is able to put your scanned"
|
||||||
echo "documents."
|
echo "documents."
|
||||||
echo ""
|
echo ""
|
||||||
@@ -202,9 +214,9 @@ echo ""
|
|||||||
ask_docker_folder "Data folder" ""
|
ask_docker_folder "Data folder" ""
|
||||||
DATA_FOLDER=$ask_result
|
DATA_FOLDER=$ask_result
|
||||||
|
|
||||||
if [[ "$DATABASE_BACKEND" == "postgres" ]] ; then
|
if [[ "$DATABASE_BACKEND" == "postgres" || "$DATABASE_BACKEND" == "mariadb" ]] ; then
|
||||||
echo ""
|
echo ""
|
||||||
echo "The database folder, where postgres stores its data."
|
echo "The database folder, where your database stores its data."
|
||||||
echo "Leave empty to have this managed by docker."
|
echo "Leave empty to have this managed by docker."
|
||||||
echo ""
|
echo ""
|
||||||
echo "CAUTION: If specified, you must specify an absolute path starting with /"
|
echo "CAUTION: If specified, you must specify an absolute path starting with /"
|
||||||
@@ -212,7 +224,7 @@ if [[ "$DATABASE_BACKEND" == "postgres" ]] ; then
|
|||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
ask_docker_folder "Database folder" ""
|
ask_docker_folder "Database folder" ""
|
||||||
POSTGRES_FOLDER=$ask_result
|
DATABASE_FOLDER=$ask_result
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
@@ -228,7 +240,7 @@ ask "Paperless username" "$(whoami)"
|
|||||||
USERNAME=$ask_result
|
USERNAME=$ask_result
|
||||||
|
|
||||||
while true; do
|
while true; do
|
||||||
read -sp "Paperless password: " PASSWORD
|
read -r -sp "Paperless password: " PASSWORD
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
if [[ -z $PASSWORD ]] ; then
|
if [[ -z $PASSWORD ]] ; then
|
||||||
@@ -236,7 +248,7 @@ while true; do
|
|||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
|
|
||||||
read -sp "Paperless password (again): " PASSWORD_REPEAT
|
read -r -sp "Paperless password (again): " PASSWORD_REPEAT
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
if [[ ! "$PASSWORD" == "$PASSWORD_REPEAT" ]] ; then
|
if [[ ! "$PASSWORD" == "$PASSWORD_REPEAT" ]] ; then
|
||||||
@@ -266,14 +278,16 @@ if [[ -z $DATA_FOLDER ]] ; then
|
|||||||
else
|
else
|
||||||
echo "Data folder: $DATA_FOLDER"
|
echo "Data folder: $DATA_FOLDER"
|
||||||
fi
|
fi
|
||||||
if [[ "$DATABASE_BACKEND" == "postgres" ]] ; then
|
if [[ "$DATABASE_BACKEND" == "postgres" || "$DATABASE_BACKEND" == "mariadb" ]] ; then
|
||||||
if [[ -z $POSTGRES_FOLDER ]] ; then
|
if [[ -z $DATABASE_FOLDER ]] ; then
|
||||||
echo "Database (postgres) folder: Managed by docker"
|
echo "Database folder: Managed by docker"
|
||||||
else
|
else
|
||||||
echo "Database (postgres) folder: $POSTGRES_FOLDER"
|
echo "Database folder: $DATABASE_FOLDER"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
|
echo "URL: $URL"
|
||||||
echo "Port: $PORT"
|
echo "Port: $PORT"
|
||||||
echo "Database: $DATABASE_BACKEND"
|
echo "Database: $DATABASE_BACKEND"
|
||||||
echo "Tika enabled: $TIKA_ENABLED"
|
echo "Tika enabled: $TIKA_ENABLED"
|
||||||
@@ -285,7 +299,7 @@ echo "Paperless username: $USERNAME"
|
|||||||
echo "Paperless email: $EMAIL"
|
echo "Paperless email: $EMAIL"
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
read -p "Press any key to install."
|
read -r -p "Press any key to install."
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "Installing paperless..."
|
echo "Installing paperless..."
|
||||||
@@ -301,14 +315,20 @@ if [[ $TIKA_ENABLED == "yes" ]] ; then
|
|||||||
DOCKER_COMPOSE_VERSION="$DOCKER_COMPOSE_VERSION-tika"
|
DOCKER_COMPOSE_VERSION="$DOCKER_COMPOSE_VERSION-tika"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
wget "https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/master/docker/compose/docker-compose.$DOCKER_COMPOSE_VERSION.yml" -O docker-compose.yml
|
wget "https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docker/compose/docker-compose.$DOCKER_COMPOSE_VERSION.yml" -O docker-compose.yml
|
||||||
wget "https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/master/docker/compose/.env" -O .env
|
wget "https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docker/compose/.env" -O .env
|
||||||
|
|
||||||
SECRET_KEY=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 64 | head -n 1)
|
SECRET_KEY=$(tr -dc 'a-zA-Z0-9' < /dev/urandom | fold -w 64 | head -n 1)
|
||||||
|
|
||||||
DEFAULT_LANGUAGES="deu eng fra ita spa"
|
DEFAULT_LANGUAGES=("deu eng fra ita spa")
|
||||||
|
|
||||||
|
_split_langs="${OCR_LANGUAGE//+/ }"
|
||||||
|
read -r -a OCR_LANGUAGES_ARRAY <<< "${_split_langs}"
|
||||||
|
|
||||||
{
|
{
|
||||||
|
if [[ ! $URL == "" ]] ; then
|
||||||
|
echo "PAPERLESS_URL=$URL"
|
||||||
|
fi
|
||||||
if [[ ! $USERMAP_UID == "1000" ]] ; then
|
if [[ ! $USERMAP_UID == "1000" ]] ; then
|
||||||
echo "USERMAP_UID=$USERMAP_UID"
|
echo "USERMAP_UID=$USERMAP_UID"
|
||||||
fi
|
fi
|
||||||
@@ -318,8 +338,8 @@ DEFAULT_LANGUAGES="deu eng fra ita spa"
|
|||||||
echo "PAPERLESS_TIME_ZONE=$TIME_ZONE"
|
echo "PAPERLESS_TIME_ZONE=$TIME_ZONE"
|
||||||
echo "PAPERLESS_OCR_LANGUAGE=$OCR_LANGUAGE"
|
echo "PAPERLESS_OCR_LANGUAGE=$OCR_LANGUAGE"
|
||||||
echo "PAPERLESS_SECRET_KEY=$SECRET_KEY"
|
echo "PAPERLESS_SECRET_KEY=$SECRET_KEY"
|
||||||
if [[ ! " ${DEFAULT_LANGUAGES[@]} " =~ " ${OCR_LANGUAGE} " ]] ; then
|
if [[ ! ${DEFAULT_LANGUAGES[*]} =~ ${OCR_LANGUAGES_ARRAY[*]} ]] ; then
|
||||||
echo "PAPERLESS_OCR_LANGUAGES=$OCR_LANGUAGE"
|
echo "PAPERLESS_OCR_LANGUAGES=${OCR_LANGUAGES_ARRAY[*]}"
|
||||||
fi
|
fi
|
||||||
} > docker-compose.env
|
} > docker-compose.env
|
||||||
|
|
||||||
@@ -329,18 +349,38 @@ sed -i "s#- \./consume:/usr/src/paperless/consume#- $CONSUME_FOLDER:/usr/src/pap
|
|||||||
|
|
||||||
if [[ -n $MEDIA_FOLDER ]] ; then
|
if [[ -n $MEDIA_FOLDER ]] ; then
|
||||||
sed -i "s#- media:/usr/src/paperless/media#- $MEDIA_FOLDER:/usr/src/paperless/media#g" docker-compose.yml
|
sed -i "s#- media:/usr/src/paperless/media#- $MEDIA_FOLDER:/usr/src/paperless/media#g" docker-compose.yml
|
||||||
|
sed -i "/^\s*media:/d" docker-compose.yml
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -n $DATA_FOLDER ]] ; then
|
if [[ -n $DATA_FOLDER ]] ; then
|
||||||
sed -i "s#- data:/usr/src/paperless/data#- $DATA_FOLDER:/usr/src/paperless/data#g" docker-compose.yml
|
sed -i "s#- data:/usr/src/paperless/data#- $DATA_FOLDER:/usr/src/paperless/data#g" docker-compose.yml
|
||||||
|
sed -i "/^\s*data:/d" docker-compose.yml
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -n $POSTGRES_FOLDER ]] ; then
|
# If the database folder was provided (not blank), replace the pgdata/dbdata volume with a bind mount
|
||||||
sed -i "s#- pgdata:/var/lib/postgresql/data#- $POSTGRES_FOLDER:/var/lib/postgresql/data#g" docker-compose.yml
|
# of the provided folder
|
||||||
|
if [[ -n $DATABASE_FOLDER ]] ; then
|
||||||
|
if [[ "$DATABASE_BACKEND" == "postgres" ]] ; then
|
||||||
|
sed -i "s#- pgdata:/var/lib/postgresql/data#- $DATABASE_FOLDER:/var/lib/postgresql/data#g" docker-compose.yml
|
||||||
|
sed -i "/^\s*pgdata:/d" docker-compose.yml
|
||||||
|
elif [[ "$DATABASE_BACKEND" == "mariadb" ]]; then
|
||||||
|
sed -i "s#- dbdata:/var/lib/mysql#- $DATABASE_FOLDER:/var/lib/mysql#g" docker-compose.yml
|
||||||
|
sed -i "/^\s*dbdata:/d" docker-compose.yml
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
docker-compose pull
|
# remove trailing blank lines from end of file
|
||||||
|
sed -i -e :a -e '/^\n*$/{$d;N;};/\n$/ba' docker-compose.yml
|
||||||
|
# if last line in file contains "volumes:", remove that line since no more named volumes are left
|
||||||
|
l1=$(grep -n '^volumes:' docker-compose.yml | cut -d : -f 1) # get line number containing volume: at begin of line
|
||||||
|
l2=$(wc -l < docker-compose.yml) # get total number of lines
|
||||||
|
if [ "$l1" -eq "$l2" ] ; then
|
||||||
|
sed -i "/^volumes:/d" docker-compose.yml
|
||||||
|
fi
|
||||||
|
|
||||||
docker-compose run --rm -e DJANGO_SUPERUSER_PASSWORD="$PASSWORD" webserver createsuperuser --noinput --username "$USERNAME" --email "$EMAIL"
|
|
||||||
|
|
||||||
docker-compose up -d
|
${DOCKER_COMPOSE_CMD} pull
|
||||||
|
|
||||||
|
${DOCKER_COMPOSE_CMD} run --rm -e DJANGO_SUPERUSER_PASSWORD="$PASSWORD" webserver createsuperuser --noinput --username "$USERNAME" --email "$EMAIL"
|
||||||
|
|
||||||
|
${DOCKER_COMPOSE_CMD} up --detach
|
||||||
|
@@ -23,12 +23,15 @@
|
|||||||
#PAPERLESS_MEDIA_ROOT=../media
|
#PAPERLESS_MEDIA_ROOT=../media
|
||||||
#PAPERLESS_STATICDIR=../static
|
#PAPERLESS_STATICDIR=../static
|
||||||
#PAPERLESS_FILENAME_FORMAT=
|
#PAPERLESS_FILENAME_FORMAT=
|
||||||
|
#PAPERLESS_FILENAME_FORMAT_REMOVE_NONE=
|
||||||
|
|
||||||
# Security and hosting
|
# Security and hosting
|
||||||
|
|
||||||
#PAPERLESS_SECRET_KEY=change-me
|
#PAPERLESS_SECRET_KEY=change-me
|
||||||
#PAPERLESS_ALLOWED_HOSTS=example.com,www.example.com
|
#PAPERLESS_URL=https://example.com
|
||||||
#PAPERLESS_CORS_ALLOWED_HOSTS=http://example.com,http://localhost:8000
|
#PAPERLESS_CSRF_TRUSTED_ORIGINS=https://example.com # can be set using PAPERLESS_URL
|
||||||
|
#PAPERLESS_ALLOWED_HOSTS=example.com,www.example.com # can be set using PAPERLESS_URL
|
||||||
|
#PAPERLESS_CORS_ALLOWED_HOSTS=https://localhost:8080,https://example.com # can be set using PAPERLESS_URL
|
||||||
#PAPERLESS_FORCE_SCRIPT_NAME=
|
#PAPERLESS_FORCE_SCRIPT_NAME=
|
||||||
#PAPERLESS_STATIC_URL=/static/
|
#PAPERLESS_STATIC_URL=/static/
|
||||||
#PAPERLESS_AUTO_LOGIN_USERNAME=
|
#PAPERLESS_AUTO_LOGIN_USERNAME=
|
||||||
@@ -58,15 +61,18 @@
|
|||||||
#PAPERLESS_CONSUMER_POLLING=10
|
#PAPERLESS_CONSUMER_POLLING=10
|
||||||
#PAPERLESS_CONSUMER_DELETE_DUPLICATES=false
|
#PAPERLESS_CONSUMER_DELETE_DUPLICATES=false
|
||||||
#PAPERLESS_CONSUMER_RECURSIVE=false
|
#PAPERLESS_CONSUMER_RECURSIVE=false
|
||||||
#PAPERLESS_CONSUMER_IGNORE_PATTERNS=[".DS_STORE/*", "._*", ".stfolder/*"]
|
#PAPERLESS_CONSUMER_IGNORE_PATTERNS=[".DS_STORE/*", "._*", ".stfolder/*", ".stversions/*", ".localized/*", "desktop.ini"]
|
||||||
#PAPERLESS_CONSUMER_SUBDIRS_AS_TAGS=false
|
#PAPERLESS_CONSUMER_SUBDIRS_AS_TAGS=false
|
||||||
#PAPERLESS_OPTIMIZE_THUMBNAILS=true
|
#PAPERLESS_CONSUMER_ENABLE_BARCODES=false
|
||||||
|
#PAPERLESS_CONSUMER_BARCODE_STRING=PATCHT
|
||||||
#PAPERLESS_PRE_CONSUME_SCRIPT=/path/to/an/arbitrary/script.sh
|
#PAPERLESS_PRE_CONSUME_SCRIPT=/path/to/an/arbitrary/script.sh
|
||||||
#PAPERLESS_POST_CONSUME_SCRIPT=/path/to/an/arbitrary/script.sh
|
#PAPERLESS_POST_CONSUME_SCRIPT=/path/to/an/arbitrary/script.sh
|
||||||
#PAPERLESS_FILENAME_DATE_ORDER=YMD
|
#PAPERLESS_FILENAME_DATE_ORDER=YMD
|
||||||
#PAPERLESS_FILENAME_PARSE_TRANSFORMS=[]
|
#PAPERLESS_FILENAME_PARSE_TRANSFORMS=[]
|
||||||
|
#PAPERLESS_NUMBER_OF_SUGGESTED_DATES=5
|
||||||
#PAPERLESS_THUMBNAIL_FONT_NAME=
|
#PAPERLESS_THUMBNAIL_FONT_NAME=
|
||||||
#PAPERLESS_IGNORE_DATES=
|
#PAPERLESS_IGNORE_DATES=
|
||||||
|
#PAPERLESS_ENABLE_UPDATE_CHECK=
|
||||||
|
|
||||||
# Tika settings
|
# Tika settings
|
||||||
|
|
||||||
@@ -78,4 +84,3 @@
|
|||||||
|
|
||||||
#PAPERLESS_CONVERT_BINARY=/usr/bin/convert
|
#PAPERLESS_CONVERT_BINARY=/usr/bin/convert
|
||||||
#PAPERLESS_GS_BINARY=/usr/bin/gs
|
#PAPERLESS_GS_BINARY=/usr/bin/gs
|
||||||
#PAPERLESS_OPTIPNG_BINARY=/usr/bin/optipng
|
|
||||||
|
109
requirements.txt
109
requirements.txt
@@ -1,109 +0,0 @@
|
|||||||
#
|
|
||||||
# These requirements were autogenerated by pipenv
|
|
||||||
# To regenerate from the project's Pipfile, run:
|
|
||||||
#
|
|
||||||
# pipenv lock --requirements
|
|
||||||
#
|
|
||||||
|
|
||||||
-i https://pypi.python.org/simple
|
|
||||||
--extra-index-url https://www.piwheels.org/simple
|
|
||||||
aioredis==1.3.1
|
|
||||||
arrow==1.2.2; python_version >= '3.6'
|
|
||||||
asgiref==3.5.0; python_version >= '3.7'
|
|
||||||
async-timeout==4.0.2; python_version >= '3.6'
|
|
||||||
attrs==21.4.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
|
||||||
autobahn==22.2.2; python_version >= '3.7'
|
|
||||||
automat==20.2.0
|
|
||||||
backports.zoneinfo==0.2.1
|
|
||||||
blessed==1.19.1; python_version >= '2.7'
|
|
||||||
certifi==2021.10.8
|
|
||||||
cffi==1.15.0
|
|
||||||
channels-redis==3.3.1
|
|
||||||
channels==3.0.4
|
|
||||||
chardet==4.0.0; python_version >= '3.1'
|
|
||||||
charset-normalizer==2.0.12; python_version >= '3'
|
|
||||||
click==8.0.4; python_version >= '3.6'
|
|
||||||
coloredlogs==15.0.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
|
||||||
concurrent-log-handler==0.9.20
|
|
||||||
constantly==15.1.0
|
|
||||||
cryptography==36.0.1
|
|
||||||
daphne==3.0.2; python_version >= '3.6'
|
|
||||||
dateparser==1.1.0
|
|
||||||
django-cors-headers==3.11.0
|
|
||||||
django-extensions==3.1.5
|
|
||||||
django-filter==21.1
|
|
||||||
django-picklefield==3.0.1; python_version >= '3'
|
|
||||||
django-q==1.3.9
|
|
||||||
django==3.2.12
|
|
||||||
djangorestframework==3.13.1
|
|
||||||
filelock==3.6.0
|
|
||||||
fuzzywuzzy[speedup]==0.18.0
|
|
||||||
gunicorn==20.1.0
|
|
||||||
h11==0.13.0; python_version >= '3.6'
|
|
||||||
hiredis==2.0.0; python_version >= '3.6'
|
|
||||||
httptools==0.3.0
|
|
||||||
humanfriendly==10.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
|
||||||
hyperlink==21.0.0
|
|
||||||
idna==3.3; python_version >= '3.5'
|
|
||||||
imap-tools==0.51.1
|
|
||||||
img2pdf==0.4.3
|
|
||||||
importlib-resources==5.4.0; python_version < '3.9'
|
|
||||||
incremental==21.3.0
|
|
||||||
inotify-simple==1.3.5; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
|
||||||
inotifyrecursive==0.3.5
|
|
||||||
joblib==1.1.0; python_version >= '3.6'
|
|
||||||
langdetect==1.0.9
|
|
||||||
lxml==4.8.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
|
||||||
msgpack==1.0.3
|
|
||||||
numpy==1.22.2
|
|
||||||
ocrmypdf==13.4.0
|
|
||||||
packaging==21.3; python_version >= '3.6'
|
|
||||||
pathvalidate==2.5.0
|
|
||||||
pdfminer.six==20211012
|
|
||||||
pikepdf==5.0.1
|
|
||||||
pillow==9.0.1
|
|
||||||
pluggy==1.0.0; python_version >= '3.6'
|
|
||||||
portalocker==2.4.0; python_version >= '3'
|
|
||||||
psycopg2-binary==2.9.3
|
|
||||||
pyasn1-modules==0.2.8
|
|
||||||
pyasn1==0.4.8
|
|
||||||
pycparser==2.21; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
|
||||||
pyopenssl==22.0.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
|
||||||
pyparsing==3.0.7; python_version >= '3.6'
|
|
||||||
python-dateutil==2.8.2
|
|
||||||
python-dotenv==0.19.2
|
|
||||||
python-gnupg==0.4.8
|
|
||||||
python-levenshtein==0.12.2
|
|
||||||
python-magic==0.4.25
|
|
||||||
pytz-deprecation-shim==0.1.0.post0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'
|
|
||||||
pytz==2021.3
|
|
||||||
pyyaml==6.0
|
|
||||||
redis==3.5.3
|
|
||||||
regex==2022.1.18
|
|
||||||
reportlab==3.6.7; python_version >= '3.6' and python_version < '4'
|
|
||||||
requests==2.27.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'
|
|
||||||
scikit-learn==0.24.0
|
|
||||||
scipy==1.8.0; python_version < '3.11' and python_version >= '3.8'
|
|
||||||
service-identity==21.1.0
|
|
||||||
setuptools==60.9.3; python_version >= '3.7'
|
|
||||||
six==1.16.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
|
||||||
sqlparse==0.4.2; python_version >= '3.5'
|
|
||||||
threadpoolctl==3.1.0; python_version >= '3.6'
|
|
||||||
tika==1.24
|
|
||||||
tqdm==4.62.3
|
|
||||||
twisted[tls]==22.1.0; python_full_version >= '3.6.7'
|
|
||||||
txaio==22.2.1; python_version >= '3.6'
|
|
||||||
typing-extensions==4.1.1; python_version >= '3.6'
|
|
||||||
tzdata==2021.5; python_version >= '3.6'
|
|
||||||
tzlocal==4.1; python_version >= '3.6'
|
|
||||||
urllib3==1.26.8; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'
|
|
||||||
uvicorn[standard]==0.17.5
|
|
||||||
uvloop==0.16.0
|
|
||||||
watchdog==2.1.6
|
|
||||||
watchgod==0.7
|
|
||||||
wcwidth==0.2.5
|
|
||||||
websockets==10.2
|
|
||||||
whitenoise==6.0.0
|
|
||||||
whoosh==2.7.4
|
|
||||||
zipp==3.7.0; python_version < '3.10'
|
|
||||||
zope.interface==5.4.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
|
@@ -1,21 +1,16 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
DOCUMENT_ID=${1}
|
|
||||||
DOCUMENT_FILE_NAME=${2}
|
|
||||||
DOCUMENT_SOURCE_PATH=${3}
|
|
||||||
DOCUMENT_THUMBNAIL_PATH=${4}
|
|
||||||
DOCUMENT_DOWNLOAD_URL=${5}
|
|
||||||
DOCUMENT_THUMBNAIL_URL=${6}
|
|
||||||
DOCUMENT_CORRESPONDENT=${7}
|
|
||||||
DOCUMENT_TAGS=${8}
|
|
||||||
|
|
||||||
echo "
|
echo "
|
||||||
|
|
||||||
A document with an id of ${DOCUMENT_ID} was just consumed. I know the
|
A document with an id of ${DOCUMENT_ID} was just consumed. I know the
|
||||||
following additional information about it:
|
following additional information about it:
|
||||||
|
|
||||||
* Generated File Name: ${DOCUMENT_FILE_NAME}
|
* Generated File Name: ${DOCUMENT_FILE_NAME}
|
||||||
|
* Archive Path: ${DOCUMENT_ARCHIVE_PATH}
|
||||||
* Source Path: ${DOCUMENT_SOURCE_PATH}
|
* Source Path: ${DOCUMENT_SOURCE_PATH}
|
||||||
|
* Created: ${DOCUMENT_CREATED}
|
||||||
|
* Added: ${DOCUMENT_ADDED}
|
||||||
|
* Modified: ${DOCUMENT_MODIFIED}
|
||||||
* Thumbnail Path: ${DOCUMENT_THUMBNAIL_PATH}
|
* Thumbnail Path: ${DOCUMENT_THUMBNAIL_PATH}
|
||||||
* Download URL: ${DOCUMENT_DOWNLOAD_URL}
|
* Download URL: ${DOCUMENT_DOWNLOAD_URL}
|
||||||
* Thumbnail URL: ${DOCUMENT_THUMBNAIL_URL}
|
* Thumbnail URL: ${DOCUMENT_THUMBNAIL_URL}
|
||||||
|
@@ -1,4 +1,6 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
docker run -p 5432:5432 -e POSTGRES_PASSWORD=password -v paperless_pgdata:/var/lib/postgresql/data -d postgres:13
|
docker run -p 5432:5432 -e POSTGRES_PASSWORD=password -v paperless_pgdata:/var/lib/postgresql/data -d postgres:13
|
||||||
docker run -d -p 6379:6379 redis:latest
|
docker run -d -p 6379:6379 redis:latest
|
||||||
docker run -p 3000:3000 -d gotenberg/gotenberg:7
|
docker run -p 3000:3000 -d gotenberg/gotenberg:7.4
|
||||||
docker run -p 9998:9998 -d apache/tika
|
docker run -p 9998:9998 -d ghcr.io/paperless-ngx/tika:latest
|
||||||
|
4
src-ui/.gitignore
vendored
4
src-ui/.gitignore
vendored
@@ -45,3 +45,7 @@ testem.log
|
|||||||
# System Files
|
# System Files
|
||||||
.DS_Store
|
.DS_Store
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
|
|
||||||
|
# Cypress
|
||||||
|
cypress/videos/**/*
|
||||||
|
cypress/screenshots/**/*
|
||||||
|
@@ -16,6 +16,7 @@
|
|||||||
"i18n": {
|
"i18n": {
|
||||||
"sourceLocale": "en-US",
|
"sourceLocale": "en-US",
|
||||||
"locales": {
|
"locales": {
|
||||||
|
"be-BY": "src/locale/messages.be_BY.xlf",
|
||||||
"cs-CZ": "src/locale/messages.cs_CZ.xlf",
|
"cs-CZ": "src/locale/messages.cs_CZ.xlf",
|
||||||
"da-DK": "src/locale/messages.da_DK.xlf",
|
"da-DK": "src/locale/messages.da_DK.xlf",
|
||||||
"de-DE": "src/locale/messages.de_DE.xlf",
|
"de-DE": "src/locale/messages.de_DE.xlf",
|
||||||
@@ -30,8 +31,12 @@
|
|||||||
"pt-PT": "src/locale/messages.pt_PT.xlf",
|
"pt-PT": "src/locale/messages.pt_PT.xlf",
|
||||||
"ro-RO": "src/locale/messages.ro_RO.xlf",
|
"ro-RO": "src/locale/messages.ro_RO.xlf",
|
||||||
"ru-RU": "src/locale/messages.ru_RU.xlf",
|
"ru-RU": "src/locale/messages.ru_RU.xlf",
|
||||||
"sv-SE": "src/locale/messages.sv_SE.xlf"
|
"sl-SI": "src/locale/messages.sl_SI.xlf",
|
||||||
}
|
"sr-CS": "src/locale/messages.sr_CS.xlf",
|
||||||
|
"sv-SE": "src/locale/messages.sv_SE.xlf",
|
||||||
|
"tr-TR": "src/locale/messages.tr_TR.xlf",
|
||||||
|
"zh-CN": "src/locale/messages.zh_CN.xlf"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"architect": {
|
"architect": {
|
||||||
"build": {
|
"build": {
|
||||||
@@ -121,12 +126,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"test": {
|
"test": {
|
||||||
"builder": "@angular-devkit/build-angular:karma",
|
"builder": "@angular-builders/jest:run",
|
||||||
"options": {
|
"options": {
|
||||||
"main": "src/test.ts",
|
|
||||||
"polyfills": "src/polyfills.ts",
|
|
||||||
"tsConfig": "tsconfig.spec.json",
|
"tsConfig": "tsconfig.spec.json",
|
||||||
"karmaConfig": "karma.conf.js",
|
|
||||||
"assets": [
|
"assets": [
|
||||||
"src/favicon.ico",
|
"src/favicon.ico",
|
||||||
"src/apple-touch-icon.png",
|
"src/apple-touch-icon.png",
|
||||||
@@ -140,9 +142,21 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"e2e": {
|
"e2e": {
|
||||||
"builder": "@angular-devkit/build-angular:protractor",
|
"builder": "@cypress/schematic:cypress",
|
||||||
|
"options": {
|
||||||
|
"devServerTarget": "paperless-ui:serve",
|
||||||
|
"watch": true,
|
||||||
|
"headless": false
|
||||||
|
},
|
||||||
|
"configurations": {
|
||||||
|
"production": {
|
||||||
|
"devServerTarget": "paperless-ui:serve:production"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"cypress-run": {
|
||||||
|
"builder": "@cypress/schematic:cypress",
|
||||||
"options": {
|
"options": {
|
||||||
"protractorConfig": "e2e/protractor.conf.js",
|
|
||||||
"devServerTarget": "paperless-ui:serve"
|
"devServerTarget": "paperless-ui:serve"
|
||||||
},
|
},
|
||||||
"configurations": {
|
"configurations": {
|
||||||
@@ -150,6 +164,13 @@
|
|||||||
"devServerTarget": "paperless-ui:serve:production"
|
"devServerTarget": "paperless-ui:serve:production"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"cypress-open": {
|
||||||
|
"builder": "@cypress/schematic:cypress",
|
||||||
|
"options": {
|
||||||
|
"watch": true,
|
||||||
|
"headless": false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
13
src-ui/cypress.config.ts
Normal file
13
src-ui/cypress.config.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { defineConfig } from 'cypress'
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
videosFolder: 'cypress/videos',
|
||||||
|
screenshotsFolder: 'cypress/screenshots',
|
||||||
|
fixturesFolder: 'cypress/fixtures',
|
||||||
|
e2e: {
|
||||||
|
setupNodeEvents(on, config) {
|
||||||
|
return require('./cypress/plugins/index.ts')(on, config)
|
||||||
|
},
|
||||||
|
baseUrl: 'http://localhost:4200',
|
||||||
|
},
|
||||||
|
})
|
94
src-ui/cypress/e2e/documents/document-detail.cy.ts
Normal file
94
src-ui/cypress/e2e/documents/document-detail.cy.ts
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
describe('document-detail', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
// also uses global fixtures from cypress/support/e2e.ts
|
||||||
|
|
||||||
|
this.modifiedDocuments = []
|
||||||
|
|
||||||
|
cy.fixture('documents/documents.json').then((documentsJson) => {
|
||||||
|
cy.intercept('GET', 'http://localhost:8000/api/documents/1/', (req) => {
|
||||||
|
let response = { ...documentsJson }
|
||||||
|
response = response.results.find((d) => d.id == 1)
|
||||||
|
req.reply(response)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
cy.intercept('PUT', 'http://localhost:8000/api/documents/1/', (req) => {
|
||||||
|
this.modifiedDocuments.push(req.body) // store this for later
|
||||||
|
req.reply({ result: 'OK' })
|
||||||
|
}).as('saveDoc')
|
||||||
|
|
||||||
|
cy.fixture('documents/1/comments.json').then((commentsJson) => {
|
||||||
|
cy.intercept(
|
||||||
|
'GET',
|
||||||
|
'http://localhost:8000/api/documents/1/comments/',
|
||||||
|
(req) => {
|
||||||
|
req.reply(commentsJson.filter((c) => c.id != 10)) // 3
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
cy.intercept(
|
||||||
|
'DELETE',
|
||||||
|
'http://localhost:8000/api/documents/1/comments/?id=9',
|
||||||
|
(req) => {
|
||||||
|
req.reply(commentsJson.filter((c) => c.id != 9 && c.id != 10)) // 2
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
cy.intercept(
|
||||||
|
'POST',
|
||||||
|
'http://localhost:8000/api/documents/1/comments/',
|
||||||
|
(req) => {
|
||||||
|
req.reply(commentsJson) // 4
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
cy.viewport(1024, 1024)
|
||||||
|
cy.visit('/documents/1/')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should activate / deactivate save button when changes are saved', () => {
|
||||||
|
cy.contains('button', 'Save').should('be.disabled')
|
||||||
|
cy.get('app-input-text[formcontrolname="title"]')
|
||||||
|
.type(' additional')
|
||||||
|
.wait(1500) // this delay is for frontend debounce
|
||||||
|
cy.contains('button', 'Save').should('not.be.disabled')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should warn on unsaved changes', () => {
|
||||||
|
cy.get('app-input-text[formcontrolname="title"]')
|
||||||
|
.type(' additional')
|
||||||
|
.wait(1500) // this delay is for frontend debounce
|
||||||
|
cy.get('button[title="Close"]').click()
|
||||||
|
cy.contains('You have unsaved changes')
|
||||||
|
cy.contains('button', 'Cancel').click().wait(150)
|
||||||
|
cy.contains('button', 'Save').click().wait('@saveDoc').wait(2000) // navigates away after saving
|
||||||
|
cy.contains('You have unsaved changes').should('not.exist')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of comments', () => {
|
||||||
|
cy.wait(1000).get('a').contains('Comments').click().wait(1000)
|
||||||
|
cy.get('app-document-comments').find('.card').its('length').should('eq', 3)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should support comment deletion', () => {
|
||||||
|
cy.wait(1000).get('a').contains('Comments').click().wait(1000)
|
||||||
|
cy.get('app-document-comments')
|
||||||
|
.find('.card')
|
||||||
|
.first()
|
||||||
|
.find('button')
|
||||||
|
.click({ force: true })
|
||||||
|
.wait(500)
|
||||||
|
cy.get('app-document-comments').find('.card').its('length').should('eq', 2)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should support comment insertion', () => {
|
||||||
|
cy.wait(1000).get('a').contains('Comments').click().wait(1000)
|
||||||
|
cy.get('app-document-comments')
|
||||||
|
.find('form textarea')
|
||||||
|
.type('Testing new comment')
|
||||||
|
.wait(500)
|
||||||
|
cy.get('app-document-comments').find('form button').click().wait(1500)
|
||||||
|
cy.get('app-document-comments').find('.card').its('length').should('eq', 4)
|
||||||
|
})
|
||||||
|
})
|
129
src-ui/cypress/e2e/documents/documents-list.cy.ts
Normal file
129
src-ui/cypress/e2e/documents/documents-list.cy.ts
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
describe('documents-list', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
// also uses global fixtures from cypress/support/e2e.ts
|
||||||
|
|
||||||
|
this.bulkEdits = {}
|
||||||
|
|
||||||
|
cy.fixture('documents/documents.json').then((documentsJson) => {
|
||||||
|
// bulk edit
|
||||||
|
cy.intercept(
|
||||||
|
'POST',
|
||||||
|
'http://localhost:8000/api/documents/bulk_edit/',
|
||||||
|
(req) => {
|
||||||
|
this.bulkEdits = req.body // store this for later
|
||||||
|
req.reply({ result: 'OK' })
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
cy.intercept('GET', 'http://localhost:8000/api/documents/*', (req) => {
|
||||||
|
let response = { ...documentsJson }
|
||||||
|
|
||||||
|
// bulkEdits was set earlier by bulk_edit intercept
|
||||||
|
if (this.bulkEdits.hasOwnProperty('documents')) {
|
||||||
|
response.results = response.results.map((d) => {
|
||||||
|
if ((this.bulkEdits['documents'] as Array<number>).includes(d.id)) {
|
||||||
|
switch (this.bulkEdits['method']) {
|
||||||
|
case 'modify_tags':
|
||||||
|
d.tags = (d.tags as Array<number>).concat([
|
||||||
|
this.bulkEdits['parameters']['add_tags'],
|
||||||
|
])
|
||||||
|
break
|
||||||
|
case 'set_correspondent':
|
||||||
|
d.correspondent =
|
||||||
|
this.bulkEdits['parameters']['correspondent']
|
||||||
|
break
|
||||||
|
case 'set_document_type':
|
||||||
|
d.document_type =
|
||||||
|
this.bulkEdits['parameters']['document_type']
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return d
|
||||||
|
})
|
||||||
|
} else if (req.query.hasOwnProperty('tags__id__all')) {
|
||||||
|
// filtering e.g. http://localhost:8000/api/documents/?page=1&page_size=50&ordering=-created&tags__id__all=2
|
||||||
|
const tag_id = +req.query['tags__id__all']
|
||||||
|
response.results = (documentsJson.results as Array<any>).filter((d) =>
|
||||||
|
(d.tags as Array<number>).includes(tag_id)
|
||||||
|
)
|
||||||
|
response.count = response.results.length
|
||||||
|
}
|
||||||
|
|
||||||
|
req.reply(response)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
cy.viewport(1280, 1024)
|
||||||
|
cy.visit('/documents')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents rendered as cards with thumbnails', () => {
|
||||||
|
cy.contains('3 documents')
|
||||||
|
cy.contains('lorem ipsum')
|
||||||
|
cy.get('app-document-card-small:first-of-type img')
|
||||||
|
.invoke('attr', 'src')
|
||||||
|
.should('eq', 'http://localhost:8000/api/documents/1/thumb/')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should change to table "details" view', () => {
|
||||||
|
cy.get('div.btn-group input[value="details"]').next().click()
|
||||||
|
cy.get('table')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should change to large cards view', () => {
|
||||||
|
cy.get('div.btn-group input[value="largeCards"]').next().click()
|
||||||
|
cy.get('app-document-card-large')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should filter tags', () => {
|
||||||
|
cy.get('app-filter-editor app-filterable-dropdown[title="Tags"]').within(
|
||||||
|
() => {
|
||||||
|
cy.contains('button', 'Tags').click()
|
||||||
|
cy.contains('button', 'Tag 2').click()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
cy.contains('One document')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should apply tags', () => {
|
||||||
|
cy.get('app-document-card-small:first-of-type').click()
|
||||||
|
cy.get('app-bulk-editor app-filterable-dropdown[title="Tags"]').within(
|
||||||
|
() => {
|
||||||
|
cy.contains('button', 'Tags').click()
|
||||||
|
cy.contains('button', 'Test Tag').click()
|
||||||
|
cy.contains('button', 'Apply').click()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
cy.contains('button', 'Confirm').click()
|
||||||
|
cy.get('app-document-card-small:first-of-type').contains('Test Tag')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should apply correspondent', () => {
|
||||||
|
cy.get('app-document-card-small:first-of-type').click()
|
||||||
|
cy.get(
|
||||||
|
'app-bulk-editor app-filterable-dropdown[title="Correspondent"]'
|
||||||
|
).within(() => {
|
||||||
|
cy.contains('button', 'Correspondent').click()
|
||||||
|
cy.contains('button', 'ABC Test Correspondent').click()
|
||||||
|
cy.contains('button', 'Apply').click()
|
||||||
|
})
|
||||||
|
cy.contains('button', 'Confirm').click()
|
||||||
|
cy.get('app-document-card-small:first-of-type').contains(
|
||||||
|
'ABC Test Correspondent'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should apply document type', () => {
|
||||||
|
cy.get('app-document-card-small:first-of-type').click()
|
||||||
|
cy.get(
|
||||||
|
'app-bulk-editor app-filterable-dropdown[title="Document type"]'
|
||||||
|
).within(() => {
|
||||||
|
cy.contains('button', 'Document type').click()
|
||||||
|
cy.contains('button', 'Test Doc Type').click()
|
||||||
|
cy.contains('button', 'Apply').click()
|
||||||
|
})
|
||||||
|
cy.contains('button', 'Confirm').click()
|
||||||
|
cy.get('app-document-card-small:first-of-type').contains('Test Doc Type')
|
||||||
|
})
|
||||||
|
})
|
331
src-ui/cypress/e2e/documents/query-params.cy.ts
Normal file
331
src-ui/cypress/e2e/documents/query-params.cy.ts
Normal file
@@ -0,0 +1,331 @@
|
|||||||
|
import { PaperlessDocument } from 'src/app/data/paperless-document'
|
||||||
|
|
||||||
|
describe('documents query params', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
// also uses global fixtures from cypress/support/e2e.ts
|
||||||
|
|
||||||
|
cy.fixture('documents/documents.json').then((documentsJson) => {
|
||||||
|
// mock api filtering
|
||||||
|
cy.intercept('GET', 'http://localhost:8000/api/documents/*', (req) => {
|
||||||
|
let response = { ...documentsJson }
|
||||||
|
|
||||||
|
if (req.query.hasOwnProperty('ordering')) {
|
||||||
|
const sort_field = req.query['ordering'].toString().replace('-', '')
|
||||||
|
const reverse = req.query['ordering'].toString().indexOf('-') !== -1
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).sort((docA, docB) => {
|
||||||
|
let result = 0
|
||||||
|
switch (sort_field) {
|
||||||
|
case 'created':
|
||||||
|
case 'added':
|
||||||
|
result =
|
||||||
|
new Date(docA[sort_field]) < new Date(docB[sort_field])
|
||||||
|
? -1
|
||||||
|
: 1
|
||||||
|
break
|
||||||
|
case 'archive_serial_number':
|
||||||
|
result = docA[sort_field] < docB[sort_field] ? -1 : 1
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if (reverse) result = -result
|
||||||
|
return result
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.query.hasOwnProperty('tags__id__in')) {
|
||||||
|
const tag_ids: Array<number> = req.query['tags__id__in']
|
||||||
|
.toString()
|
||||||
|
.split(',')
|
||||||
|
.map((v) => +v)
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter(
|
||||||
|
(d) =>
|
||||||
|
d.tags.length > 0 &&
|
||||||
|
d.tags.filter((t) => tag_ids.includes(t)).length > 0
|
||||||
|
)
|
||||||
|
response.count = response.results.length
|
||||||
|
} else if (req.query.hasOwnProperty('tags__id__none')) {
|
||||||
|
const tag_ids: Array<number> = req.query['tags__id__none']
|
||||||
|
.toString()
|
||||||
|
.split(',')
|
||||||
|
.map((v) => +v)
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter((d) => d.tags.filter((t) => tag_ids.includes(t)).length == 0)
|
||||||
|
response.count = response.results.length
|
||||||
|
} else if (
|
||||||
|
req.query.hasOwnProperty('is_tagged') &&
|
||||||
|
req.query['is_tagged'] == '0'
|
||||||
|
) {
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter((d) => d.tags.length == 0)
|
||||||
|
response.count = response.results.length
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.query.hasOwnProperty('document_type__id')) {
|
||||||
|
const doctype_id = +req.query['document_type__id']
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter((d) => d.document_type == doctype_id)
|
||||||
|
response.count = response.results.length
|
||||||
|
} else if (
|
||||||
|
req.query.hasOwnProperty('document_type__isnull') &&
|
||||||
|
req.query['document_type__isnull'] == '1'
|
||||||
|
) {
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter((d) => d.document_type == undefined)
|
||||||
|
response.count = response.results.length
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.query.hasOwnProperty('correspondent__id')) {
|
||||||
|
const correspondent_id = +req.query['correspondent__id']
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter((d) => d.correspondent == correspondent_id)
|
||||||
|
response.count = response.results.length
|
||||||
|
} else if (
|
||||||
|
req.query.hasOwnProperty('correspondent__isnull') &&
|
||||||
|
req.query['correspondent__isnull'] == '1'
|
||||||
|
) {
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter((d) => d.correspondent == undefined)
|
||||||
|
response.count = response.results.length
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.query.hasOwnProperty('storage_path__id')) {
|
||||||
|
const storage_path_id = +req.query['storage_path__id']
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter((d) => d.storage_path == storage_path_id)
|
||||||
|
response.count = response.results.length
|
||||||
|
} else if (
|
||||||
|
req.query.hasOwnProperty('storage_path__isnull') &&
|
||||||
|
req.query['storage_path__isnull'] == '1'
|
||||||
|
) {
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter((d) => d.storage_path == undefined)
|
||||||
|
response.count = response.results.length
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.query.hasOwnProperty('created__date__gt')) {
|
||||||
|
const date = new Date(req.query['created__date__gt'])
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter((d) => new Date(d.created) > date)
|
||||||
|
response.count = response.results.length
|
||||||
|
} else if (req.query.hasOwnProperty('created__date__lt')) {
|
||||||
|
const date = new Date(req.query['created__date__lt'])
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter((d) => new Date(d.created) < date)
|
||||||
|
response.count = response.results.length
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.query.hasOwnProperty('added__date__gt')) {
|
||||||
|
const date = new Date(req.query['added__date__gt'])
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter((d) => new Date(d.added) > date)
|
||||||
|
response.count = response.results.length
|
||||||
|
} else if (req.query.hasOwnProperty('added__date__lt')) {
|
||||||
|
const date = new Date(req.query['added__date__lt'])
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter((d) => new Date(d.added) < date)
|
||||||
|
response.count = response.results.length
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.query.hasOwnProperty('title_content')) {
|
||||||
|
const title_content_regexp = new RegExp(
|
||||||
|
req.query['title_content'].toString(),
|
||||||
|
'i'
|
||||||
|
)
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter(
|
||||||
|
(d) =>
|
||||||
|
title_content_regexp.test(d.title) ||
|
||||||
|
title_content_regexp.test(d.content)
|
||||||
|
)
|
||||||
|
response.count = response.results.length
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.query.hasOwnProperty('archive_serial_number')) {
|
||||||
|
const asn = +req.query['archive_serial_number']
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter((d) => d.archive_serial_number == asn)
|
||||||
|
response.count = response.results.length
|
||||||
|
} else if (req.query.hasOwnProperty('archive_serial_number__isnull')) {
|
||||||
|
const isnull = req.query['storage_path__isnull'] == '1'
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter((d) =>
|
||||||
|
isnull
|
||||||
|
? d.archive_serial_number == undefined
|
||||||
|
: d.archive_serial_number != undefined
|
||||||
|
)
|
||||||
|
response.count = response.results.length
|
||||||
|
} else if (req.query.hasOwnProperty('archive_serial_number__gt')) {
|
||||||
|
const asn = +req.query['archive_serial_number__gt']
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter(
|
||||||
|
(d) => d.archive_serial_number > 0 && d.archive_serial_number > asn
|
||||||
|
)
|
||||||
|
response.count = response.results.length
|
||||||
|
} else if (req.query.hasOwnProperty('archive_serial_number__lt')) {
|
||||||
|
const asn = +req.query['archive_serial_number__lt']
|
||||||
|
response.results = (
|
||||||
|
documentsJson.results as Array<PaperlessDocument>
|
||||||
|
).filter(
|
||||||
|
(d) => d.archive_serial_number > 0 && d.archive_serial_number < asn
|
||||||
|
)
|
||||||
|
response.count = response.results.length
|
||||||
|
}
|
||||||
|
|
||||||
|
req.reply(response)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents sorted by created', () => {
|
||||||
|
cy.visit('/documents?sort=created')
|
||||||
|
cy.get('app-document-card-small').first().contains('No latin title')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents reverse sorted by created', () => {
|
||||||
|
cy.visit('/documents?sort=created&reverse=true')
|
||||||
|
cy.get('app-document-card-small').first().contains('sit amet')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents sorted by added', () => {
|
||||||
|
cy.visit('/documents?sort=added')
|
||||||
|
cy.get('app-document-card-small').first().contains('No latin title')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents reverse sorted by added', () => {
|
||||||
|
cy.visit('/documents?sort=added&reverse=true')
|
||||||
|
cy.get('app-document-card-small').first().contains('sit amet')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by any tags', () => {
|
||||||
|
cy.visit('/documents?sort=created&reverse=true&tags__id__in=2,4,5')
|
||||||
|
cy.contains('3 documents')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by excluded tags', () => {
|
||||||
|
cy.visit('/documents?sort=created&reverse=true&tags__id__none=2,4')
|
||||||
|
cy.contains('One document')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by no tags', () => {
|
||||||
|
cy.visit('/documents?sort=created&reverse=true&is_tagged=0')
|
||||||
|
cy.contains('One document')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by document type', () => {
|
||||||
|
cy.visit('/documents?sort=created&reverse=true&document_type__id=1')
|
||||||
|
cy.contains('3 documents')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by no document type', () => {
|
||||||
|
cy.visit('/documents?sort=created&reverse=true&document_type__isnull=1')
|
||||||
|
cy.contains('One document')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by correspondent', () => {
|
||||||
|
cy.visit('/documents?sort=created&reverse=true&correspondent__id=9')
|
||||||
|
cy.contains('2 documents')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by no correspondent', () => {
|
||||||
|
cy.visit('/documents?sort=created&reverse=true&correspondent__isnull=1')
|
||||||
|
cy.contains('2 documents')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by storage path', () => {
|
||||||
|
cy.visit('/documents?sort=created&reverse=true&storage_path__id=2')
|
||||||
|
cy.contains('One document')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by no storage path', () => {
|
||||||
|
cy.visit('/documents?sort=created&reverse=true&storage_path__isnull=1')
|
||||||
|
cy.contains('3 documents')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by title or content', () => {
|
||||||
|
cy.visit('/documents?sort=created&reverse=true&title_content=lorem')
|
||||||
|
cy.contains('2 documents')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by asn', () => {
|
||||||
|
cy.visit('/documents?sort=created&reverse=true&archive_serial_number=12345')
|
||||||
|
cy.contains('One document')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by empty asn', () => {
|
||||||
|
cy.visit(
|
||||||
|
'/documents?sort=created&reverse=true&archive_serial_number__isnull=1'
|
||||||
|
)
|
||||||
|
cy.contains('2 documents')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by non-empty asn', () => {
|
||||||
|
cy.visit(
|
||||||
|
'/documents?sort=created&reverse=true&archive_serial_number__isnull=0'
|
||||||
|
)
|
||||||
|
cy.contains('2 documents')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by asn greater than', () => {
|
||||||
|
cy.visit(
|
||||||
|
'/documents?sort=created&reverse=true&archive_serial_number__gt=12346'
|
||||||
|
)
|
||||||
|
cy.contains('One document')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by asn less than', () => {
|
||||||
|
cy.visit(
|
||||||
|
'/documents?sort=created&reverse=true&archive_serial_number__lt=12346'
|
||||||
|
)
|
||||||
|
cy.contains('One document')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by created date greater than', () => {
|
||||||
|
cy.visit(
|
||||||
|
'/documents?sort=created&reverse=true&created__date__gt=2022-03-23'
|
||||||
|
)
|
||||||
|
cy.contains('3 documents')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by created date less than', () => {
|
||||||
|
cy.visit(
|
||||||
|
'/documents?sort=created&reverse=true&created__date__lt=2022-03-23'
|
||||||
|
)
|
||||||
|
cy.contains('One document')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by added date greater than', () => {
|
||||||
|
cy.visit('/documents?sort=created&reverse=true&added__date__gt=2022-03-24')
|
||||||
|
cy.contains('2 documents')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by added date less than', () => {
|
||||||
|
cy.visit('/documents?sort=created&reverse=true&added__date__lt=2022-03-24')
|
||||||
|
cy.contains('2 documents')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by multiple filters', () => {
|
||||||
|
cy.visit(
|
||||||
|
'/documents?sort=created&reverse=true&document_type__id=1&correspondent__id=9&tags__id__in=4,5'
|
||||||
|
)
|
||||||
|
cy.contains('2 documents')
|
||||||
|
})
|
||||||
|
})
|
25
src-ui/cypress/e2e/manage/manage.cy.ts
Normal file
25
src-ui/cypress/e2e/manage/manage.cy.ts
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
describe('manage', () => {
|
||||||
|
// also uses global fixtures from cypress/support/e2e.ts
|
||||||
|
|
||||||
|
it('should show a list of correspondents with bottom pagination as well', () => {
|
||||||
|
cy.visit('/correspondents')
|
||||||
|
cy.get('tbody').find('tr').its('length').should('eq', 25)
|
||||||
|
cy.get('ngb-pagination').its('length').should('eq', 2)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of tags without bottom pagination', () => {
|
||||||
|
cy.visit('/tags')
|
||||||
|
cy.get('tbody').find('tr').its('length').should('eq', 8)
|
||||||
|
cy.get('ngb-pagination').its('length').should('eq', 1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of documents filtered by tag', () => {
|
||||||
|
cy.intercept('http://localhost:8000/api/documents/*', (req) => {
|
||||||
|
if (req.url.indexOf('tags__id__all=4'))
|
||||||
|
req.reply({ count: 3, next: null, previous: null, results: [] })
|
||||||
|
})
|
||||||
|
cy.visit('/tags')
|
||||||
|
cy.get('tbody').find('button:visible').contains('Documents').first().click() // id = 4
|
||||||
|
cy.contains('3 documents')
|
||||||
|
})
|
||||||
|
})
|
93
src-ui/cypress/e2e/settings/settings.cy.ts
Normal file
93
src-ui/cypress/e2e/settings/settings.cy.ts
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
describe('settings', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
// also uses global fixtures from cypress/support/e2e.ts
|
||||||
|
|
||||||
|
this.modifiedViews = []
|
||||||
|
|
||||||
|
// mock API methods
|
||||||
|
cy.intercept('http://localhost:8000/api/ui_settings/', {
|
||||||
|
fixture: 'ui_settings/settings.json',
|
||||||
|
}).then(() => {
|
||||||
|
cy.fixture('saved_views/savedviews.json').then((savedViewsJson) => {
|
||||||
|
// saved views PATCH
|
||||||
|
cy.intercept(
|
||||||
|
'PATCH',
|
||||||
|
'http://localhost:8000/api/saved_views/*',
|
||||||
|
(req) => {
|
||||||
|
this.modifiedViews.push(req.body) // store this for later
|
||||||
|
req.reply({ result: 'OK' })
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
cy.intercept(
|
||||||
|
'GET',
|
||||||
|
'http://localhost:8000/api/saved_views/*',
|
||||||
|
(req) => {
|
||||||
|
let response = { ...savedViewsJson }
|
||||||
|
if (this.modifiedViews.length) {
|
||||||
|
response.results = response.results.map((v) => {
|
||||||
|
if (this.modifiedViews.find((mv) => mv.id == v.id))
|
||||||
|
v = this.modifiedViews.find((mv) => mv.id == v.id)
|
||||||
|
return v
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
req.reply(response)
|
||||||
|
}
|
||||||
|
).as('savedViews')
|
||||||
|
})
|
||||||
|
|
||||||
|
cy.fixture('documents/documents.json').then((documentsJson) => {
|
||||||
|
cy.intercept('GET', 'http://localhost:8000/api/documents/1/', (req) => {
|
||||||
|
let response = { ...documentsJson }
|
||||||
|
response = response.results.find((d) => d.id == 1)
|
||||||
|
req.reply(response)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
cy.viewport(1024, 1024)
|
||||||
|
cy.visit('/settings')
|
||||||
|
cy.wait('@savedViews')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should activate / deactivate save button when settings change and are saved', () => {
|
||||||
|
cy.contains('button', 'Save').should('be.disabled')
|
||||||
|
cy.contains('Use system settings').click()
|
||||||
|
cy.contains('button', 'Save').should('not.be.disabled')
|
||||||
|
cy.contains('button', 'Save').click()
|
||||||
|
cy.contains('button', 'Save').should('be.disabled')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should warn on unsaved changes', () => {
|
||||||
|
cy.contains('Use system settings').click()
|
||||||
|
cy.contains('a', 'Dashboard').click()
|
||||||
|
cy.contains('You have unsaved changes')
|
||||||
|
cy.contains('button', 'Cancel').click()
|
||||||
|
cy.contains('button', 'Save').click().wait('@savedViews')
|
||||||
|
cy.contains('a', 'Dashboard').click()
|
||||||
|
cy.contains('You have unsaved changes').should('not.exist')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should apply appearance changes when set', () => {
|
||||||
|
cy.contains('Use system settings').click()
|
||||||
|
cy.get('body').should('not.have.class', 'color-scheme-system')
|
||||||
|
cy.contains('Enable dark mode').click()
|
||||||
|
cy.get('body').should('have.class', 'color-scheme-dark')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should remove saved view from sidebar when unset', () => {
|
||||||
|
cy.contains('a', 'Saved views').click()
|
||||||
|
cy.get('#show_in_sidebar_1').click()
|
||||||
|
cy.contains('button', 'Save').click().wait('@savedViews')
|
||||||
|
cy.contains('li', 'Inbox').should('not.exist')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should remove saved view from dashboard when unset', () => {
|
||||||
|
cy.contains('a', 'Saved views').click()
|
||||||
|
cy.get('#show_on_dashboard_1').click()
|
||||||
|
cy.contains('button', 'Save').click().wait('@savedViews')
|
||||||
|
cy.visit('/dashboard')
|
||||||
|
cy.get('app-saved-view-widget').contains('Inbox').should('not.exist')
|
||||||
|
})
|
||||||
|
})
|
60
src-ui/cypress/e2e/tasks/tasks.cy.ts
Normal file
60
src-ui/cypress/e2e/tasks/tasks.cy.ts
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
describe('tasks', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
this.dismissedTasks = new Set<number>()
|
||||||
|
|
||||||
|
cy.fixture('tasks/tasks.json').then((tasksViewsJson) => {
|
||||||
|
// acknowledge tasks POST
|
||||||
|
cy.intercept(
|
||||||
|
'POST',
|
||||||
|
'http://localhost:8000/api/acknowledge_tasks/',
|
||||||
|
(req) => {
|
||||||
|
req.body['tasks'].forEach((t) => this.dismissedTasks.add(t)) // store this for later
|
||||||
|
req.reply({ result: 'OK' })
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
cy.intercept('GET', 'http://localhost:8000/api/tasks/', (req) => {
|
||||||
|
let response = [...tasksViewsJson]
|
||||||
|
if (this.dismissedTasks.size) {
|
||||||
|
response = response.filter((t) => {
|
||||||
|
return !this.dismissedTasks.has(t.id)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
req.reply(response)
|
||||||
|
}).as('tasks')
|
||||||
|
})
|
||||||
|
|
||||||
|
cy.visit('/tasks')
|
||||||
|
cy.wait('@tasks')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should show a list of dismissable tasks in tabs', () => {
|
||||||
|
cy.get('tbody').find('tr:visible').its('length').should('eq', 10) // double because collapsible result tr
|
||||||
|
cy.wait(500) // stabilizes the test, for some reason...
|
||||||
|
cy.get('tbody')
|
||||||
|
.find('button:visible')
|
||||||
|
.contains('Dismiss')
|
||||||
|
.first()
|
||||||
|
.click()
|
||||||
|
.wait('@tasks')
|
||||||
|
.wait(2000)
|
||||||
|
.then(() => {
|
||||||
|
cy.get('tbody').find('tr:visible').its('length').should('eq', 8) // double because collapsible result tr
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should allow toggling all tasks in list and warn on dismiss', () => {
|
||||||
|
cy.get('thead').find('input[type="checkbox"]').first().click()
|
||||||
|
cy.get('body').find('button').contains('Dismiss selected').first().click()
|
||||||
|
cy.contains('Confirm')
|
||||||
|
cy.get('.modal')
|
||||||
|
.contains('button', 'Dismiss')
|
||||||
|
.click()
|
||||||
|
.wait('@tasks')
|
||||||
|
.wait(2000)
|
||||||
|
.then(() => {
|
||||||
|
cy.get('tbody').find('tr:visible').should('not.exist')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
@@ -0,0 +1 @@
|
|||||||
|
{"count":27,"next":"http://localhost:8000/api/correspondents/?page=2","previous":null,"results":[{"id":9,"slug":"abc-test-correspondent","name":"ABC Test Correspondent","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":13,"slug":"corresp-10","name":"Corresp 10","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":14,"slug":"corresp-11","name":"Corresp 11","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":15,"slug":"corresp-12","name":"Corresp 12","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":16,"slug":"corresp-13","name":"Corresp 13","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":18,"slug":"corresp-15","name":"Corresp 15","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":19,"slug":"corresp-16","name":"Corresp 16","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":20,"slug":"corresp-17","name":"Corresp 17","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":21,"slug":"corresp-18","name":"Corresp 18","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":22,"slug":"corresp-19","name":"Corresp 19","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":23,"slug":"corresp-20","name":"Corresp 20","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":24,"slug":"corresp-21","name":"Corresp 21","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":25,"slug":"corresp-22","name":"Corresp 22","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":26,"slug":"corresp-23","name":"Corresp 23","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":5,"slug":"corresp-3","name":"Corresp 3","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":6,"slug":"corresp-4","name":"Corresp 4","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":7,"slug":"corresp-5","name":"Corresp 5","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":8,"slug":"corresp-6","name":"Corresp 6","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":10,"slug":"corresp-7","name":"Corresp 7","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":11,"slug":"corresp-8","name":"Corresp 8","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":12,"slug":"corresp-9","name":"Corresp 9","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":17,"slug":"correspondent-14","name":"Correspondent 14","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":2,"slug":"correspondent-2","name":"Correspondent 2","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":7,"last_correspondence":"2021-01-20T23:37:58.204614Z"},{"id":27,"slug":"michael-shamoon","name":"Michael Shamoon","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":1,"last_correspondence":"2022-03-16T03:48:50.089624Z"},{"id":4,"slug":"newest-correspondent","name":"Newest Correspondent","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":1,"last_correspondence":"2021-02-07T08:00:00Z"}]}
|
1
src-ui/cypress/fixtures/document_types/doctypes.json
Normal file
1
src-ui/cypress/fixtures/document_types/doctypes.json
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"count":1,"next":null,"previous":null,"results":[{"id":1,"slug":"test","name":"Test Doc Type","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0}]}
|
46
src-ui/cypress/fixtures/documents/1/comments.json
Normal file
46
src-ui/cypress/fixtures/documents/1/comments.json
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 10,
|
||||||
|
"comment": "Testing new comment",
|
||||||
|
"created": "2022-08-08T04:24:55.176008Z",
|
||||||
|
"user": {
|
||||||
|
"id": 1,
|
||||||
|
"username": "user2",
|
||||||
|
"firstname": "",
|
||||||
|
"lastname": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 9,
|
||||||
|
"comment": "Testing one more time",
|
||||||
|
"created": "2022-02-18T04:24:55.176008Z",
|
||||||
|
"user": {
|
||||||
|
"id": 2,
|
||||||
|
"username": "user1",
|
||||||
|
"firstname": "",
|
||||||
|
"lastname": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 8,
|
||||||
|
"comment": "Another comment",
|
||||||
|
"created": "2021-11-08T04:24:47.925042Z",
|
||||||
|
"user": {
|
||||||
|
"id": 2,
|
||||||
|
"username": "user33",
|
||||||
|
"firstname": "",
|
||||||
|
"lastname": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 7,
|
||||||
|
"comment": "Cupcake ipsum dolor sit amet cheesecake candy cookie tiramisu. Donut chocolate chupa chups macaroon brownie halvah pie cheesecake gummies. Sweet chocolate bar candy donut gummi bears bear claw liquorice bonbon shortbread.\n\nDonut chocolate bar candy wafer wafer tiramisu. Gummies chocolate cake muffin toffee carrot cake macaroon. Toffee toffee jelly beans danish lollipop cake.",
|
||||||
|
"created": "2021-02-08T02:37:49.724132Z",
|
||||||
|
"user": {
|
||||||
|
"id": 3,
|
||||||
|
"username": "admin",
|
||||||
|
"firstname": "",
|
||||||
|
"lastname": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
1
src-ui/cypress/fixtures/documents/1/metadata.json
Normal file
1
src-ui/cypress/fixtures/documents/1/metadata.json
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"original_checksum":"e959bc7d593245d92685213264e962ba","original_size":963754,"original_mime_type":"application/pdf","media_filename":"2022/lorem-ipsum.pdf","has_archive_version":true,"original_metadata":[],"archive_checksum":"5a1f46a9150bcade978c764b039ce4d0","archive_media_filename":"2022/lorem-ipsum.pdf","archive_size":351160,"archive_metadata":[{"namespace":"http://ns.adobe.com/pdf/1.3/","prefix":"pdf","key":"Producer","value":"pikepdf5.0.1"},{"namespace":"http://ns.adobe.com/xap/1.0/","prefix":"xmp","key":"ModifyDate","value":"2022-03-22T04:53:18+00:00"},{"namespace":"http://ns.adobe.com/xap/1.0/","prefix":"xmp","key":"CreateDate","value":"2022-03-22T18:05:43+00:00"},{"namespace":"http://ns.adobe.com/xap/1.0/","prefix":"xmp","key":"CreatorTool","value":"ocrmypdf13.4.0/TesseractOCR-PDF4.1.1"},{"namespace":"http://ns.adobe.com/xap/1.0/mm/","prefix":"xmpMM","key":"DocumentID","value":"uuid:df27edcf-e34a-11f7-0000-8fa6067a3c04"},{"namespace":"http://purl.org/dc/elements/1.1/","prefix":"dc","key":"format","value":"application/pdf"},{"namespace":"http://purl.org/dc/elements/1.1/","prefix":"dc","key":"title","value":"ScannedDocument"},{"namespace":"http://www.aiim.org/pdfa/ns/id/","prefix":"pdfaid","key":"part","value":"2"},{"namespace":"http://www.aiim.org/pdfa/ns/id/","prefix":"pdfaid","key":"conformance","value":"B"},{"namespace":"http://purl.org/dc/elements/1.1/","prefix":"dc","key":"creator","value":"None"},{"namespace":"http://ns.adobe.com/xap/1.0/","prefix":"xmp","key":"MetadataDate","value":"2022-03-22T21:53:18.882551-07:00"}]}
|
1
src-ui/cypress/fixtures/documents/1/suggestions.json
Normal file
1
src-ui/cypress/fixtures/documents/1/suggestions.json
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"correspondents":[],"tags":[3],"document_types":[1]}
|
73
src-ui/cypress/fixtures/documents/documents.json
Normal file
73
src-ui/cypress/fixtures/documents/documents.json
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
{
|
||||||
|
"count": 3,
|
||||||
|
"next": null,
|
||||||
|
"previous": null,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"correspondent": 9,
|
||||||
|
"document_type": 1,
|
||||||
|
"storage_path": null,
|
||||||
|
"title": "No latin title",
|
||||||
|
"content": "Test document PDF \n\nLorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla est purus, ultrices in porttitor \nin, accumsan non quam. Nam consectetur porttitor rhoncus. Curabitur eu est et leo feugiat \nauctor vel quis lorem. Ut et ligula dolor, sit amet consequat lorem. Aliquam porta eros sed \nvelit imperdiet egestas. Maecenas tempus eros ut diam ullamcorper id dictum libero \ntempor. Donec quis augue quis magna condimentum lobortis. Quisque imperdiet ipsum vel \nmagna viverra rutrum. Cras viverra molestie urna, vitae vestibulum turpis varius id. \nVestibulum mollis, arcu iaculis bibendum varius, velit sapien blandit metus, ac posuere lorem \nnulla ac dolor. Maecenas urna elit, tincidunt in dapibus nec, vehicula eu dui. Duis lacinia \nfringilla massa. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur \nridiculus mus. Ut consequat ultricies est, non rhoncus mauris congue porta. Vivamus viverra \nsuscipit felis eget condimentum. Cum sociis natoque penatibus et magnis dis parturient \nmontes, nascetur ridiculus mus. Integer bibendum sagittis ligula, non faucibus nulla volutpat \nvitae. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. \nIn aliquet quam et velit bibendum accumsan. Cum sociis natoque penatibus et magnis dis \nparturient montes, nascetur ridiculus mus. Vestibulum vitae ipsum nec arcu semper \nadipiscing at ac lacus. Praesent id pellentesque orci. Morbi congue viverra nisl nec rhoncus. \nInteger mattis, ipsum a tincidunt commodo, lacus arcu elementum elit, at mollis eros ante ac \nrisus. In volutpat, ante at pretium ultricies, velit magna suscipit enim, aliquet blandit massa \norci nec lorem. Nulla facilisi. Duis eu vehicula arcu. Nulla facilisi. Maecenas pellentesque \nvolutpat felis, quis tristique ligula luctus vel. Sed nec mi eros. Integer augue enim, sollicitudin \nullamcorper mattis eget, aliquam in est. Morbi sollicitudin libero nec augue dignissim ut \nconsectetur dui volutpat. Nulla facilisi. Mauris egestas vestibulum neque cursus tincidunt. \nDonec sit amet pulvinar orci. \nQuisque volutpat pharetra tincidunt. Fusce sapien arcu, molestie eget varius egestas, \nfaucibus ac urna. Sed at nisi in velit egestas aliquam ut a felis. Aenean malesuada iaculis nisl, \nut tempor lacus egestas consequat. Nam nibh lectus, gravida sed egestas ut, feugiat quis \ndolor. Donec eu leo enim, non laoreet ante. Morbi dictum tempor vulputate. Phasellus \nultricies risus vel augue sagittis euismod. Vivamus tincidunt placerat nisi in aliquam. Cras \nquis mi ac nunc pretium aliquam. Aenean elementum erat ac metus commodo rhoncus. \nAliquam nulla augue, porta non sagittis quis, accumsan vitae sem. Phasellus id lectus tortor, \neget pulvinar augue. Etiam eget velit ac purus fringilla blandit. Donec odio odio, sagittis sed \niaculis sed, consectetur eget sem. Lorem ipsum dolor sit amet, consectetur adipiscing elit. \nMaecenas accumsan velit vel turpis rutrum in sodales diam placerat. \nQuisque luctus ullamcorper velit sit amet lobortis. Etiam ligula felis, vulputate quis rhoncus \nnec, fermentum eget odio. Vivamus vel ipsum ac augue sodales mollis euismod nec tellus. \nFusce et augue rutrum nunc semper vehicula vel semper nisl. Nam laoreet euismod quam at \nvarius. Sed aliquet auctor nibh. Curabitur malesuada fermentum lacus vel accumsan. Duis \nornare scelerisque nulla, ac pulvinar ligula tempus sit amet. In placerat nulla ac ante \nscelerisque posuere. Phasellus at ante felis. Sed hendrerit risus a metus posuere rutrum. \nPhasellus eu augue dui. Proin in vestibulum ipsum. Aenean accumsan mollis sapien, ut \neleifend sem blandit at. Vivamus luctus mi eget lorem lobortis pharetra. Phasellus at tortor \nquam, a volutpat purus. Etiam sollicitudin arcu vel elit bibendum et imperdiet risus tincidunt. \nEtiam elit velit, posuere ut pulvinar ac, condimentum eget justo. Fusce a erat velit. Vivamus \nimperdiet ultrices orci in hendrerit.",
|
||||||
|
"tags": [
|
||||||
|
4
|
||||||
|
],
|
||||||
|
"created": "2022-03-22T07:24:18Z",
|
||||||
|
"modified": "2022-03-22T07:24:23.264859Z",
|
||||||
|
"added": "2022-03-22T07:24:22.922631Z",
|
||||||
|
"archive_serial_number": null,
|
||||||
|
"original_file_name": "2022-03-22 no latin title.pdf",
|
||||||
|
"archived_file_name": "2022-03-22 no latin title.pdf"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"correspondent": null,
|
||||||
|
"document_type": null,
|
||||||
|
"storage_path": 2,
|
||||||
|
"title": "lorem ipsum dolor sit amet",
|
||||||
|
"content": "Test document PDF",
|
||||||
|
"tags": [],
|
||||||
|
"created": "2022-03-23T07:24:18Z",
|
||||||
|
"modified": "2022-03-23T07:24:23.264859Z",
|
||||||
|
"added": "2022-03-23T07:24:22.922631Z",
|
||||||
|
"archive_serial_number": 12345,
|
||||||
|
"original_file_name": "2022-03-23 lorem ipsum dolor sit amet.pdf",
|
||||||
|
"archived_file_name": "2022-03-23 llorem ipsum dolor sit amet.pdf"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"correspondent": null,
|
||||||
|
"document_type": 1,
|
||||||
|
"storage_path": null,
|
||||||
|
"title": "dolor",
|
||||||
|
"content": "Test document PDF",
|
||||||
|
"tags": [
|
||||||
|
2
|
||||||
|
],
|
||||||
|
"created": "2022-03-24T07:24:18Z",
|
||||||
|
"modified": "2022-03-24T07:24:23.264859Z",
|
||||||
|
"added": "2022-03-24T07:24:22.922631Z",
|
||||||
|
"archive_serial_number": null,
|
||||||
|
"original_file_name": "2022-03-24 dolor.pdf",
|
||||||
|
"archived_file_name": "2022-03-24 dolor.pdf"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 4,
|
||||||
|
"correspondent": 9,
|
||||||
|
"document_type": 1,
|
||||||
|
"storage_path": null,
|
||||||
|
"title": "sit amet",
|
||||||
|
"content": "Test document PDF",
|
||||||
|
"tags": [
|
||||||
|
4, 5
|
||||||
|
],
|
||||||
|
"created": "2022-06-01T07:24:18Z",
|
||||||
|
"modified": "2022-06-01T07:24:23.264859Z",
|
||||||
|
"added": "2022-06-01T07:24:22.922631Z",
|
||||||
|
"archive_serial_number": 12347,
|
||||||
|
"original_file_name": "2022-06-01 sit amet.pdf",
|
||||||
|
"archived_file_name": "2022-06-01 sit amet.pdf"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
BIN
src-ui/cypress/fixtures/documents/lorem-ipsum.png
Normal file
BIN
src-ui/cypress/fixtures/documents/lorem-ipsum.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 156 KiB |
@@ -0,0 +1 @@
|
|||||||
|
{"version":"v1.7.1","update_available":false,"feature_is_set":true}
|
1
src-ui/cypress/fixtures/saved_views/savedviews.json
Normal file
1
src-ui/cypress/fixtures/saved_views/savedviews.json
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"count":3,"next":null,"previous":null,"results":[{"id":1,"name":"Inbox","show_on_dashboard":true,"show_in_sidebar":true,"sort_field":"created","sort_reverse":true,"filter_rules":[{"rule_type":6,"value":"18"}]},{"id":2,"name":"Recently Added","show_on_dashboard":true,"show_in_sidebar":false,"sort_field":"created","sort_reverse":true,"filter_rules":[]},{"id":11,"name":"Taxes","show_on_dashboard":false,"show_in_sidebar":true,"sort_field":"created","sort_reverse":true,"filter_rules":[{"rule_type":6,"value":"39"}]}]}
|
17
src-ui/cypress/fixtures/storage_paths/storage_paths.json
Normal file
17
src-ui/cypress/fixtures/storage_paths/storage_paths.json
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
{
|
||||||
|
"count": 1,
|
||||||
|
"next": null,
|
||||||
|
"previous": null,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"slug": "year-title",
|
||||||
|
"name": "Year - Title",
|
||||||
|
"path": "{created_year}/{title}",
|
||||||
|
"match": "",
|
||||||
|
"matching_algorithm": 6,
|
||||||
|
"is_insensitive": true,
|
||||||
|
"document_count": 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
103
src-ui/cypress/fixtures/tags/tags.json
Normal file
103
src-ui/cypress/fixtures/tags/tags.json
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
{
|
||||||
|
"count": 8,
|
||||||
|
"next": null,
|
||||||
|
"previous": null,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"id": 4,
|
||||||
|
"slug": "another-sample-tag",
|
||||||
|
"name": "Another Sample Tag",
|
||||||
|
"color": "#a6cee3",
|
||||||
|
"text_color": "#000000",
|
||||||
|
"match": "",
|
||||||
|
"matching_algorithm": 6,
|
||||||
|
"is_insensitive": true,
|
||||||
|
"is_inbox_tag": false,
|
||||||
|
"document_count": 3
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 7,
|
||||||
|
"slug": "newone",
|
||||||
|
"name": "NewOne",
|
||||||
|
"color": "#9e4ad1",
|
||||||
|
"text_color": "#ffffff",
|
||||||
|
"match": "",
|
||||||
|
"matching_algorithm": 1,
|
||||||
|
"is_insensitive": true,
|
||||||
|
"is_inbox_tag": false,
|
||||||
|
"document_count": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 6,
|
||||||
|
"slug": "partial-tag",
|
||||||
|
"name": "Partial Tag",
|
||||||
|
"color": "#72dba7",
|
||||||
|
"text_color": "#000000",
|
||||||
|
"match": "",
|
||||||
|
"matching_algorithm": 1,
|
||||||
|
"is_insensitive": true,
|
||||||
|
"is_inbox_tag": false,
|
||||||
|
"document_count": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"slug": "tag-2",
|
||||||
|
"name": "Tag 2",
|
||||||
|
"color": "#612db7",
|
||||||
|
"text_color": "#ffffff",
|
||||||
|
"match": "",
|
||||||
|
"matching_algorithm": 1,
|
||||||
|
"is_insensitive": true,
|
||||||
|
"is_inbox_tag": false,
|
||||||
|
"document_count": 3
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"slug": "tag-3",
|
||||||
|
"name": "Tag 3",
|
||||||
|
"color": "#b2df8a",
|
||||||
|
"text_color": "#000000",
|
||||||
|
"match": "",
|
||||||
|
"matching_algorithm": 1,
|
||||||
|
"is_insensitive": true,
|
||||||
|
"is_inbox_tag": false,
|
||||||
|
"document_count": 4
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 5,
|
||||||
|
"slug": "tagwithpartial",
|
||||||
|
"name": "TagWithPartial",
|
||||||
|
"color": "#3b2db4",
|
||||||
|
"text_color": "#ffffff",
|
||||||
|
"match": "",
|
||||||
|
"matching_algorithm": 6,
|
||||||
|
"is_insensitive": true,
|
||||||
|
"is_inbox_tag": false,
|
||||||
|
"document_count": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 8,
|
||||||
|
"slug": "test-another",
|
||||||
|
"name": "Test Another",
|
||||||
|
"color": "#3ccea5",
|
||||||
|
"text_color": "#000000",
|
||||||
|
"match": "",
|
||||||
|
"matching_algorithm": 4,
|
||||||
|
"is_insensitive": true,
|
||||||
|
"is_inbox_tag": false,
|
||||||
|
"document_count": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"slug": "test-tag",
|
||||||
|
"name": "Test Tag",
|
||||||
|
"color": "#fb9a99",
|
||||||
|
"text_color": "#000000",
|
||||||
|
"match": "",
|
||||||
|
"matching_algorithm": 1,
|
||||||
|
"is_insensitive": true,
|
||||||
|
"is_inbox_tag": false,
|
||||||
|
"document_count": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
1
src-ui/cypress/fixtures/tasks/tasks.json
Normal file
1
src-ui/cypress/fixtures/tasks/tasks.json
Normal file
File diff suppressed because one or more lines are too long
34
src-ui/cypress/fixtures/ui_settings/settings.json
Normal file
34
src-ui/cypress/fixtures/ui_settings/settings.json
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
{
|
||||||
|
"user_id": 1,
|
||||||
|
"username": "admin",
|
||||||
|
"display_name": "Admin",
|
||||||
|
"settings": {
|
||||||
|
"language": "",
|
||||||
|
"bulk_edit": {
|
||||||
|
"confirmation_dialogs": true,
|
||||||
|
"apply_on_close": false
|
||||||
|
},
|
||||||
|
"documentListSize": 50,
|
||||||
|
"dark_mode": {
|
||||||
|
"use_system": true,
|
||||||
|
"enabled": "false",
|
||||||
|
"thumb_inverted": "true"
|
||||||
|
},
|
||||||
|
"theme": {
|
||||||
|
"color": "#b198e5"
|
||||||
|
},
|
||||||
|
"document_details": {
|
||||||
|
"native_pdf_viewer": false
|
||||||
|
},
|
||||||
|
"date_display": {
|
||||||
|
"date_locale": "",
|
||||||
|
"date_format": "mediumDate"
|
||||||
|
},
|
||||||
|
"notifications": {
|
||||||
|
"consumer_new_documents": true,
|
||||||
|
"consumer_success": true,
|
||||||
|
"consumer_failed": true,
|
||||||
|
"consumer_suppress_on_dashboard": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
3
src-ui/cypress/plugins/index.ts
Normal file
3
src-ui/cypress/plugins/index.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
// Plugins enable you to tap into, modify, or extend the internal behavior of Cypress
|
||||||
|
// For more info, visit https://on.cypress.io/plugins-api
|
||||||
|
module.exports = (on, config) => {}
|
43
src-ui/cypress/support/commands.ts
Normal file
43
src-ui/cypress/support/commands.ts
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
// ***********************************************
|
||||||
|
// This example namespace declaration will help
|
||||||
|
// with Intellisense and code completion in your
|
||||||
|
// IDE or Text Editor.
|
||||||
|
// ***********************************************
|
||||||
|
// declare namespace Cypress {
|
||||||
|
// interface Chainable<Subject = any> {
|
||||||
|
// customCommand(param: any): typeof customCommand;
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// function customCommand(param: any): void {
|
||||||
|
// console.warn(param);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// NOTE: You can use it like so:
|
||||||
|
// Cypress.Commands.add('customCommand', customCommand);
|
||||||
|
//
|
||||||
|
// ***********************************************
|
||||||
|
// This example commands.js shows you how to
|
||||||
|
// create various custom commands and overwrite
|
||||||
|
// existing commands.
|
||||||
|
//
|
||||||
|
// For more comprehensive examples of custom
|
||||||
|
// commands please read more here:
|
||||||
|
// https://on.cypress.io/custom-commands
|
||||||
|
// ***********************************************
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// -- This is a parent command --
|
||||||
|
// Cypress.Commands.add("login", (email, password) => { ... })
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// -- This is a child command --
|
||||||
|
// Cypress.Commands.add("drag", { prevSubject: 'element'}, (subject, options) => { ... })
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// -- This is a dual command --
|
||||||
|
// Cypress.Commands.add("dismiss", { prevSubject: 'optional'}, (subject, options) => { ... })
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// -- This will overwrite an existing command --
|
||||||
|
// Cypress.Commands.overwrite("visit", (originalFn, url, options) => { ... })
|
43
src-ui/cypress/support/e2e.ts
Normal file
43
src-ui/cypress/support/e2e.ts
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
// mock API methods
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
cy.intercept('http://localhost:8000/api/ui_settings/', {
|
||||||
|
fixture: 'ui_settings/settings.json',
|
||||||
|
})
|
||||||
|
|
||||||
|
cy.intercept('http://localhost:8000/api/remote_version/', {
|
||||||
|
fixture: 'remote_version/remote_version.json',
|
||||||
|
})
|
||||||
|
|
||||||
|
cy.intercept('http://localhost:8000/api/saved_views/*', {
|
||||||
|
fixture: 'saved_views/savedviews.json',
|
||||||
|
})
|
||||||
|
|
||||||
|
cy.intercept('http://localhost:8000/api/tags/*', {
|
||||||
|
fixture: 'tags/tags.json',
|
||||||
|
})
|
||||||
|
|
||||||
|
cy.intercept('http://localhost:8000/api/correspondents/*', {
|
||||||
|
fixture: 'correspondents/correspondents.json',
|
||||||
|
})
|
||||||
|
|
||||||
|
cy.intercept('http://localhost:8000/api/document_types/*', {
|
||||||
|
fixture: 'document_types/doctypes.json',
|
||||||
|
})
|
||||||
|
|
||||||
|
cy.intercept('http://localhost:8000/api/storage_paths/*', {
|
||||||
|
fixture: 'storage_paths/storage_paths.json',
|
||||||
|
})
|
||||||
|
|
||||||
|
cy.intercept('http://localhost:8000/api/documents/1/metadata/', {
|
||||||
|
fixture: 'documents/1/metadata.json',
|
||||||
|
})
|
||||||
|
|
||||||
|
cy.intercept('http://localhost:8000/api/documents/1/suggestions/', {
|
||||||
|
fixture: 'documents/1/suggestions.json',
|
||||||
|
})
|
||||||
|
|
||||||
|
cy.intercept('http://localhost:8000/api/documents/1/thumb/', {
|
||||||
|
fixture: 'documents/lorem-ipsum.png',
|
||||||
|
})
|
||||||
|
})
|
8
src-ui/cypress/tsconfig.json
Normal file
8
src-ui/cypress/tsconfig.json
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"extends": "../tsconfig.json",
|
||||||
|
"include": ["**/*.ts"],
|
||||||
|
"compilerOptions": {
|
||||||
|
"sourceMap": false,
|
||||||
|
"types": ["cypress"]
|
||||||
|
}
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user