Compare commits
805 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
c075642d78 | ||
![]() |
253f1a44c1 | ||
![]() |
26837c8871 | ||
![]() |
c732d31edd | ||
![]() |
c2595f28fb | ||
![]() |
58656a63cf | ||
![]() |
f4ce178cfa | ||
![]() |
2c1cfc64d5 | ||
![]() |
4b84d93cee | ||
![]() |
0bfa347595 | ||
![]() |
c7a07b59fd | ||
![]() |
c7c737d8c9 | ||
![]() |
1126e668d1 | ||
![]() |
b71a94ebca | ||
![]() |
815137af19 | ||
![]() |
3c4fbfec54 | ||
![]() |
b3a8eb7e06 | ||
![]() |
1e5c64ff73 | ||
![]() |
a320bfa425 | ||
![]() |
1c4dfc3c6e | ||
![]() |
92ee4d33c3 | ||
![]() |
01c2f01d3f | ||
![]() |
b83e6a5d5b | ||
![]() |
16281b38e0 | ||
![]() |
ca11e116b6 | ||
![]() |
1f32e4a642 | ||
![]() |
4c01089de0 | ||
![]() |
47a416dd9b | ||
![]() |
1ab1bbdd70 | ||
![]() |
1e05cb168c | ||
![]() |
d0383c1edf | ||
![]() |
c8ee35692c | ||
![]() |
3e062a8021 | ||
![]() |
0eb17e7102 | ||
![]() |
a14796cf90 | ||
![]() |
84b1c1ce6c | ||
![]() |
27e0b65c2d | ||
![]() |
ef35576174 | ||
![]() |
804faf726b | ||
![]() |
a94e5d2e47 | ||
![]() |
54d817d656 | ||
![]() |
00aa6c1f6a | ||
![]() |
ed34815393 | ||
![]() |
ceb15716c3 | ||
![]() |
bc97f540de | ||
![]() |
4ae67c79e0 | ||
![]() |
be857e989b | ||
![]() |
232402197c | ||
![]() |
12cbe84166 | ||
![]() |
b7b0be141c | ||
![]() |
8f9f4d9b71 | ||
![]() |
b97b9b7a28 | ||
![]() |
cf23732da3 | ||
![]() |
4e6b1d54e8 | ||
![]() |
fec8f90b78 | ||
![]() |
bedd4dafc4 | ||
![]() |
0d92648d62 | ||
![]() |
07989bc2fa | ||
![]() |
09ff2cb9a3 | ||
![]() |
6aa2e60037 | ||
![]() |
9f3ef532b8 | ||
![]() |
654621b438 | ||
![]() |
f8cfa9e02d | ||
![]() |
7476aa5897 | ||
![]() |
1d00a59706 | ||
![]() |
82d4e5e456 | ||
![]() |
07b6a36aab | ||
![]() |
0ceb6cc143 | ||
![]() |
864a155ee8 | ||
![]() |
15e2e575ee | ||
![]() |
253c524131 | ||
![]() |
48c02b8554 | ||
![]() |
d8fef22c9a | ||
![]() |
d82103e02d | ||
![]() |
9e5b22931e | ||
![]() |
30cd361edb | ||
![]() |
48da0071bd | ||
![]() |
32037e3645 | ||
![]() |
4279038a62 | ||
![]() |
c8afe7503b | ||
![]() |
4dfae98250 | ||
![]() |
b44dca49f5 | ||
![]() |
3d5892a841 | ||
![]() |
365e06891a | ||
![]() |
0f436481b8 | ||
![]() |
a5510be0bf | ||
![]() |
45a3c95a66 | ||
![]() |
94cb13ce9d | ||
![]() |
fecc431b10 | ||
![]() |
c88a975bf0 | ||
![]() |
4ed26ab934 | ||
![]() |
76a8da20bf | ||
![]() |
9f05747681 | ||
![]() |
f0bb9bb953 | ||
![]() |
72c6ac09ac | ||
![]() |
07c38d6353 | ||
![]() |
dd1cafbc77 | ||
![]() |
77e30d7844 | ||
![]() |
1c4f772a14 | ||
![]() |
bfc3148e4f | ||
![]() |
85db76c86e | ||
![]() |
f6c0d97172 | ||
![]() |
67f846513b | ||
![]() |
ad4cf843dd | ||
![]() |
af79b1504f | ||
![]() |
caff72967e | ||
![]() |
4abd21bbca | ||
![]() |
a2770a1bff | ||
![]() |
30e2d15321 | ||
![]() |
ba7e8b8627 | ||
![]() |
58850e97ff | ||
![]() |
b35acabc56 | ||
![]() |
ef907f1bc4 | ||
![]() |
c25128917b | ||
![]() |
9db6e64666 | ||
![]() |
f51a29a2e1 | ||
![]() |
b90025a2d0 | ||
![]() |
03b5ae02e1 | ||
![]() |
70627b992a | ||
![]() |
e830cd9baa | ||
![]() |
05fb10390e | ||
![]() |
d2c802c9da | ||
![]() |
741c165a5b | ||
![]() |
53d3ede184 | ||
![]() |
a41d2f831d | ||
![]() |
e30b7d0fa4 | ||
![]() |
7222741863 | ||
![]() |
49cea717af | ||
![]() |
49a7b62155 | ||
![]() |
3abc88985a | ||
![]() |
764b007d2c | ||
![]() |
a5c0d3dfae | ||
![]() |
386070923f | ||
![]() |
e79015d877 | ||
![]() |
b574396751 | ||
![]() |
91400070a7 | ||
![]() |
23d02085e8 | ||
![]() |
a9459dca89 | ||
![]() |
bd08cd1983 | ||
![]() |
3ae91495c1 | ||
![]() |
7f9fa46271 | ||
![]() |
f3b02d9922 | ||
![]() |
f2be582299 | ||
![]() |
b2eb403f9b | ||
![]() |
7e6a5927d7 | ||
![]() |
2c4a3650c4 | ||
![]() |
5b125d4513 | ||
![]() |
66167aeb55 | ||
![]() |
9804269cd1 | ||
![]() |
3239c478a5 | ||
![]() |
8f2ca5761b | ||
![]() |
ef9669cdb6 | ||
![]() |
caddcaf807 | ||
![]() |
90707d661b | ||
![]() |
facb7226fe | ||
![]() |
b671f54cb7 | ||
![]() |
55f26c9c4c | ||
![]() |
18995b3561 | ||
![]() |
8e3fbdddc7 | ||
![]() |
c19df84bef | ||
![]() |
e8527ba723 | ||
![]() |
577b49df9d | ||
![]() |
5b83bd03f5 | ||
![]() |
73cbf6c33d | ||
![]() |
95ab3b99f8 | ||
![]() |
24dbf669a9 | ||
![]() |
8716590cef | ||
![]() |
10729f0362 | ||
![]() |
800f54f263 | ||
![]() |
e63a543c29 | ||
![]() |
0f08796e1b | ||
![]() |
d10c32bad6 | ||
![]() |
813ad6551c | ||
![]() |
5582b33d40 | ||
![]() |
79afc236c9 | ||
![]() |
1f83a75d5f | ||
![]() |
2a3baf5aec | ||
![]() |
f92ae3b232 | ||
![]() |
03523244ef | ||
![]() |
55e799b833 | ||
![]() |
10929e9ac8 | ||
![]() |
89757609c2 | ||
![]() |
0f1dea67b7 | ||
![]() |
38e035b95c | ||
![]() |
f695d4b9da | ||
![]() |
4fbe7f16f3 | ||
![]() |
dcc2cc5001 | ||
![]() |
cb271deb8f | ||
![]() |
d480e91196 | ||
![]() |
9880f9ebc7 | ||
![]() |
999ae678c2 | ||
![]() |
5f0eba694c | ||
![]() |
f893ba929a | ||
![]() |
d35b0423b6 | ||
![]() |
651a22d056 | ||
![]() |
8dee246854 | ||
![]() |
90db397ec6 | ||
![]() |
cf973ff41e | ||
![]() |
fd8de5b1ea | ||
![]() |
27772257a8 | ||
![]() |
5665db844e | ||
![]() |
21a9963a2b | ||
![]() |
99f260225a | ||
![]() |
e4054d684c | ||
![]() |
01af725d79 | ||
![]() |
db0f31adea | ||
![]() |
226c771735 | ||
![]() |
b18b070622 | ||
![]() |
e52ae28426 | ||
![]() |
6acd30eda1 | ||
![]() |
f170cc0354 | ||
![]() |
d9abae51b5 | ||
![]() |
ada67bd54e | ||
![]() |
f7f5d0efa6 | ||
![]() |
a2bdd64ad0 | ||
![]() |
48f4f21d28 | ||
![]() |
57b8ee37ec | ||
![]() |
71bf8eb332 | ||
![]() |
fb1e288580 | ||
![]() |
720c6dd3b0 | ||
![]() |
be6506da08 | ||
![]() |
4f6698e39f | ||
![]() |
cdcd22e6a6 | ||
![]() |
b1b3eb4406 | ||
![]() |
b0c6f5e56b | ||
![]() |
322d8a61c2 | ||
![]() |
d41c4730cd | ||
![]() |
b38bb47491 | ||
![]() |
6973691cce | ||
![]() |
96176589ca | ||
![]() |
a74740877a | ||
![]() |
1de4072a48 | ||
![]() |
cf2796b2af | ||
![]() |
85315b768c | ||
![]() |
76a147e58b | ||
![]() |
de88c6de8c | ||
![]() |
613b429540 | ||
![]() |
d9abb745a9 | ||
![]() |
702225f535 | ||
![]() |
612e6341a3 | ||
![]() |
70b566f746 | ||
![]() |
6dd69126ae | ||
![]() |
f3d6756fba | ||
![]() |
8d60506884 | ||
![]() |
9712ac109d | ||
![]() |
379b4f8cd3 | ||
![]() |
86d223fd93 | ||
![]() |
205106b566 | ||
![]() |
54099d8441 | ||
![]() |
7c8b501c40 | ||
![]() |
02bf0349ca | ||
![]() |
9d72d1fc81 | ||
![]() |
085e6da1f2 | ||
![]() |
1ce0eae931 | ||
![]() |
ff5c25fc8c | ||
![]() |
ef863bec7c | ||
![]() |
89117da57d | ||
![]() |
6170f4f56a | ||
![]() |
0b00f8f4f0 | ||
![]() |
a45c128c38 | ||
![]() |
fe26655653 | ||
![]() |
2277fb5a58 | ||
![]() |
b6ab3095ab | ||
![]() |
591466d16d | ||
![]() |
666c12f2e8 | ||
![]() |
8180226eb7 | ||
![]() |
0d5be4a730 | ||
![]() |
49a2e2f3fa | ||
![]() |
219563146d | ||
![]() |
0d01295e79 | ||
![]() |
b65e195c27 | ||
![]() |
f5717cca1c | ||
![]() |
a2c9f6792e | ||
![]() |
95c12c1840 | ||
![]() |
4de1cb0a09 | ||
![]() |
ec9ebd3026 | ||
![]() |
5ee9ad3e4f | ||
![]() |
9e2135e2c7 | ||
![]() |
116da276c7 | ||
![]() |
7c9ab8c0b6 | ||
![]() |
3a36d9b1ae | ||
![]() |
5fce21e269 | ||
![]() |
a46bfc8b5f | ||
![]() |
a860c29304 | ||
![]() |
cc90b45022 | ||
![]() |
78ae4c42f7 | ||
![]() |
d1292c59ea | ||
![]() |
1a87c730bc | ||
![]() |
7aa72f768f | ||
![]() |
81b9f2d4e0 | ||
![]() |
a03a745295 | ||
![]() |
ce8bf90663 | ||
![]() |
e2ae919a84 | ||
![]() |
9a0e44a731 | ||
![]() |
650c816a7b | ||
![]() |
c8bfbb9315 | ||
![]() |
80b32dd392 | ||
![]() |
e3352ea426 | ||
![]() |
faed7683be | ||
![]() |
46a39190a4 | ||
![]() |
a2f738772c | ||
![]() |
a1697ff21c | ||
![]() |
b9fdf68be3 | ||
![]() |
ec971d0473 | ||
![]() |
d462464af9 | ||
![]() |
423e0768f9 | ||
![]() |
0ef3a141a8 | ||
![]() |
d532913d56 | ||
![]() |
3205bb3bdf | ||
![]() |
b238ba054d | ||
![]() |
a8e13df249 | ||
![]() |
9dc77d94ed | ||
![]() |
714995877a | ||
![]() |
729d7a11cd | ||
![]() |
95cd86a541 | ||
![]() |
ca1b8344fa | ||
![]() |
e797295b82 | ||
![]() |
f216b322c2 | ||
![]() |
b97c19ca3c | ||
![]() |
d54c7ca27c | ||
![]() |
077de8dcaa | ||
![]() |
6f50e5671a | ||
![]() |
bab9bdc832 | ||
![]() |
8e0adbb0fb | ||
![]() |
6b175ae7e3 | ||
![]() |
9494633da3 | ||
![]() |
c754a5f391 | ||
![]() |
2b692c6fc8 | ||
![]() |
35b6fb1a6d | ||
![]() |
61566a34d1 | ||
![]() |
e14f4c94c2 | ||
![]() |
52bdb1a80c | ||
![]() |
88ee3bdb6d | ||
![]() |
8f8a99a645 | ||
![]() |
8d35f22daf | ||
![]() |
6f8ef9bbdc | ||
![]() |
e7ddf6ba8f | ||
![]() |
407a119b9a | ||
![]() |
96aa2451bf | ||
![]() |
7680aab04d | ||
![]() |
3aef26b229 | ||
![]() |
935141adc0 | ||
![]() |
4300733d0c | ||
![]() |
c284a091c0 | ||
![]() |
7e768bfe23 | ||
![]() |
938f388a0b | ||
![]() |
d22a6fb8ea | ||
![]() |
11142e2f05 | ||
![]() |
fe7fb488c0 | ||
![]() |
0929c5086a | ||
![]() |
eb3ed7719b | ||
![]() |
6c19a0f8c7 | ||
![]() |
a0ece589b0 | ||
![]() |
8165071edf | ||
![]() |
a667974378 | ||
![]() |
fe1f88ce5d | ||
![]() |
8f2715e437 | ||
![]() |
57a3223c77 | ||
![]() |
df82ac8ac4 | ||
![]() |
16adddc803 | ||
![]() |
d1ae82c5c2 | ||
![]() |
0098936347 | ||
![]() |
17b85f6400 | ||
![]() |
48be9c0fd1 | ||
![]() |
ce36c2d0ea | ||
![]() |
06c63ef4a4 | ||
![]() |
03d93a7d6e | ||
![]() |
a0005c8b3e | ||
![]() |
2220343c50 | ||
![]() |
68fbed996f | ||
![]() |
20a4d8949d | ||
![]() |
1029ecfd49 | ||
![]() |
4de2a3b09e | ||
![]() |
651407c88e | ||
![]() |
3e2e485f66 | ||
![]() |
10efaad224 | ||
![]() |
3dda02660c | ||
![]() |
0b4e8141b0 | ||
![]() |
16ab0efa59 | ||
![]() |
20ca1ec547 | ||
![]() |
a1c9ab237f | ||
![]() |
a65239f7f1 | ||
![]() |
a0622675fd | ||
![]() |
1e0b778097 | ||
![]() |
3b666fef77 | ||
![]() |
9291c98189 | ||
![]() |
f6dadd8c82 | ||
![]() |
022bb272e6 | ||
![]() |
edad1a41e8 | ||
![]() |
421e78a748 | ||
![]() |
b961df90a7 | ||
![]() |
0b26b7098a | ||
![]() |
b09566a9a9 | ||
![]() |
b869deab66 | ||
![]() |
3d552c3112 | ||
![]() |
2a2bf3bf55 | ||
![]() |
26863b8cdc | ||
![]() |
b0f7d07214 | ||
![]() |
30c6557a32 | ||
![]() |
e18e173089 | ||
![]() |
6a8bdbd4f6 | ||
![]() |
b5dec87a62 | ||
![]() |
e50d30876a | ||
![]() |
cbcd9ed67d | ||
![]() |
6bcc26b487 | ||
![]() |
56fcb3fee1 | ||
![]() |
557e1790dd | ||
![]() |
2e67697d36 | ||
![]() |
ca4500692f | ||
![]() |
5aba6bff09 | ||
![]() |
9ed74068b9 | ||
![]() |
be4685742c | ||
![]() |
86b0a38811 | ||
![]() |
3e528f0a9a | ||
![]() |
170d7b6922 | ||
![]() |
f05249a9ad | ||
![]() |
5957ed7af3 | ||
![]() |
8ed63893eb | ||
![]() |
ea9dd926bc | ||
![]() |
f31d3b531f | ||
![]() |
d851448c32 | ||
![]() |
65327d52a6 | ||
![]() |
2ea5ae59b2 | ||
![]() |
a04d09028b | ||
![]() |
dd9255cb81 | ||
![]() |
536d7ecd3e | ||
![]() |
f50aac08df | ||
![]() |
db0f1d2159 | ||
![]() |
d97b565d6c | ||
![]() |
d3071f13d8 | ||
![]() |
768407c1d7 | ||
![]() |
c5d18b03cd | ||
![]() |
5f05b44cde | ||
![]() |
beaa09e9b3 | ||
![]() |
d6960f537b | ||
![]() |
0918eab004 | ||
![]() |
9b16789a17 | ||
![]() |
157240351f | ||
![]() |
6ad3d45d60 | ||
![]() |
b715e4d426 | ||
![]() |
aad5e9e99f | ||
![]() |
2a104ad33f | ||
![]() |
851290ee89 | ||
![]() |
a8c6c55e3b | ||
![]() |
992a647424 | ||
![]() |
c22461a1b6 | ||
![]() |
23fefc3ab7 | ||
![]() |
0beb9f0b5f | ||
![]() |
d376f9e7a3 | ||
![]() |
07e7bcd30b | ||
![]() |
95e86cb649 | ||
![]() |
802e5591ce | ||
![]() |
26d5730ad2 | ||
![]() |
8c7554e081 | ||
![]() |
9f5d47c320 | ||
![]() |
4aa452ce63 | ||
![]() |
7ef81ae10f | ||
![]() |
3628292afa | ||
![]() |
8aa5ecde62 | ||
![]() |
2f149eac9d | ||
![]() |
fcfc705b87 | ||
![]() |
7bd5c010a1 | ||
![]() |
52168d8e61 | ||
![]() |
a3842d9228 | ||
![]() |
452c51bd16 | ||
![]() |
22bedd9957 | ||
![]() |
cb318c723d | ||
![]() |
9a81d3c28e | ||
![]() |
3ca59e3b7a | ||
![]() |
07a12bdf15 | ||
![]() |
62e81d8bf0 | ||
![]() |
e295a41caa | ||
![]() |
c545a80aa3 | ||
![]() |
56f1a0cb51 | ||
![]() |
7218b6da97 | ||
![]() |
83f9f2d387 | ||
![]() |
13a2e38385 | ||
![]() |
996d942387 | ||
![]() |
cc42eb9fab | ||
![]() |
44125be979 | ||
![]() |
c2e9cc9a51 | ||
![]() |
fcd10f2adc | ||
![]() |
93009c1eed | ||
![]() |
d875be60d4 | ||
![]() |
db48d4c576 | ||
![]() |
3293231ad2 | ||
![]() |
aa1f2d3b59 | ||
![]() |
f492b679e3 | ||
![]() |
7ca84322bd | ||
![]() |
e3257b8fa3 | ||
![]() |
0bcda5ded8 | ||
![]() |
7ec82c0891 | ||
![]() |
3241ac7dc2 | ||
![]() |
e974605fc8 | ||
![]() |
ce13380533 | ||
![]() |
e23e3acda3 | ||
![]() |
63ab9972da | ||
![]() |
feb4901620 | ||
![]() |
67788a1b1b | ||
![]() |
001faf9ed7 | ||
![]() |
7a464d8a6e | ||
![]() |
5acd1c7c1b | ||
![]() |
de14540374 | ||
![]() |
37e0c2667b | ||
![]() |
252abb41c3 | ||
![]() |
fb2af341d8 | ||
![]() |
931f5f9c27 | ||
![]() |
b5d04e575e | ||
![]() |
3d395601fe | ||
![]() |
6630ce646c | ||
![]() |
f5508eea1c | ||
![]() |
c8c460432f | ||
![]() |
18299dafd2 | ||
![]() |
817d09026e | ||
![]() |
d76b009390 | ||
![]() |
9effed3ce1 | ||
![]() |
c1bbfc5dcf | ||
![]() |
b6c9cfb76f | ||
![]() |
59ca7bbcf2 | ||
![]() |
52c8d5e999 | ||
![]() |
5851e7f1b7 | ||
![]() |
e05b3441de | ||
![]() |
0d6e79cb93 | ||
![]() |
76a102d901 | ||
![]() |
bbd4659fbf | ||
![]() |
0880420ef6 | ||
![]() |
2351c79282 | ||
![]() |
d6016fc798 | ||
![]() |
bc17291006 | ||
![]() |
ed6cb14c4d | ||
![]() |
08de8a04b8 | ||
![]() |
5c67de8b47 | ||
![]() |
38b0408b1a | ||
![]() |
9ccad7ea86 | ||
![]() |
4a4e810a14 | ||
![]() |
76d2df3bde | ||
![]() |
c02563d894 | ||
![]() |
574ec6780b | ||
![]() |
9e0f56982b | ||
![]() |
1c66daf12b | ||
![]() |
59d683849e | ||
![]() |
9946acb1a0 | ||
![]() |
83a760644d | ||
![]() |
25ccff8640 | ||
![]() |
5c4c5a7794 | ||
![]() |
cb6af97595 | ||
![]() |
c4407dccf6 | ||
![]() |
ecdea4c3c8 | ||
![]() |
26d6f302cf | ||
![]() |
ecf10622ef | ||
![]() |
0fb553675b | ||
![]() |
2080fde4f9 | ||
![]() |
d10e67ce09 | ||
![]() |
74fe7c586b | ||
![]() |
05188aed6d | ||
![]() |
865efb7752 | ||
![]() |
4782b4da07 | ||
![]() |
4693632c7d | ||
![]() |
4c4b571a88 | ||
![]() |
328c87995b | ||
![]() |
a1d10e7d4a | ||
![]() |
77d9a7e9d3 | ||
![]() |
981b090088 | ||
![]() |
6d1c788ee0 | ||
![]() |
02de773d5b | ||
![]() |
2a240d83fd | ||
![]() |
25cdf7916d | ||
![]() |
11b5983a0d | ||
![]() |
4964987245 | ||
![]() |
ed129d6074 | ||
![]() |
37e928d869 | ||
![]() |
06def8c11e | ||
![]() |
10571676a4 | ||
![]() |
af5160237d | ||
![]() |
b86842ba73 | ||
![]() |
bfc271e743 | ||
![]() |
ee88140fdd | ||
![]() |
51249a1dce | ||
![]() |
57ec9e6b13 | ||
![]() |
1324d17d87 | ||
![]() |
26b438a888 | ||
![]() |
70f3f98363 | ||
![]() |
71e4be2d5e | ||
![]() |
5740806a28 | ||
![]() |
9b50a1b7a6 | ||
![]() |
19caad832e | ||
![]() |
dd6ae13281 | ||
![]() |
077abbe961 | ||
![]() |
3d85dc1127 | ||
![]() |
e3ea5dd13c | ||
![]() |
714b2ecd9c | ||
![]() |
883937bfd7 | ||
![]() |
0ebe08d796 | ||
![]() |
36b4fff5c7 | ||
![]() |
0684c8c388 | ||
![]() |
67744c877d | ||
![]() |
45d8c945e2 | ||
![]() |
ee19307ea2 | ||
![]() |
2c1cd25be4 | ||
![]() |
6e65558ea4 | ||
![]() |
304324ebd0 | ||
![]() |
97cd06d2ba | ||
![]() |
df948065a3 | ||
![]() |
f92126b44f | ||
![]() |
e329f6cdf1 | ||
![]() |
2c96438d61 | ||
![]() |
41a9aac75d | ||
![]() |
8768168536 | ||
![]() |
325809fbbf | ||
![]() |
3dd47a9f5b | ||
![]() |
00f16ef8f0 | ||
![]() |
5e67aae83b | ||
![]() |
ae5c603c98 | ||
![]() |
c62aa3cb55 | ||
![]() |
7073cb6d5c | ||
![]() |
a495ad58d0 | ||
![]() |
569165371c | ||
![]() |
ea14fa5251 | ||
![]() |
4a02865697 | ||
![]() |
3a2a20cefd | ||
![]() |
f2f42de701 | ||
![]() |
6d60d4897c | ||
![]() |
9f71ce8083 | ||
![]() |
50f8f7da93 | ||
![]() |
d475344b51 | ||
![]() |
2630863409 | ||
![]() |
e120f4a3f7 | ||
![]() |
6aff4c986c | ||
![]() |
2e891b1634 | ||
![]() |
35a0c5d36f | ||
![]() |
82d786b94c | ||
![]() |
de49d602a1 | ||
![]() |
3cb6511b66 | ||
![]() |
1d3ae777d5 | ||
![]() |
dbb2ea39d2 | ||
![]() |
fb607332b9 | ||
![]() |
b956f627b0 | ||
![]() |
2ac64ab573 | ||
![]() |
482e00970c | ||
![]() |
d62340efb5 | ||
![]() |
eb0f35219c | ||
![]() |
243598ae50 | ||
![]() |
74c965d21d | ||
![]() |
30316179a0 | ||
![]() |
f16a1101e6 | ||
![]() |
0466f7a18a | ||
![]() |
97cf3b2079 | ||
![]() |
6542d75a6a | ||
![]() |
c6900c5d51 | ||
![]() |
0e9642ea3e | ||
![]() |
3ab2892066 | ||
![]() |
69b69aca6a | ||
![]() |
a05dbd2e5a | ||
![]() |
c1641f6fb8 | ||
![]() |
452c79f9a1 | ||
![]() |
37959fe31c | ||
![]() |
30f73f39a0 | ||
![]() |
fa613cd5fb | ||
![]() |
b8afb22902 | ||
![]() |
07e07fc7e8 | ||
![]() |
58f95c1891 | ||
![]() |
7ad8e3b3da | ||
![]() |
b9a548758a | ||
![]() |
a436caf2ad | ||
![]() |
5461f8a225 | ||
![]() |
78b747571c | ||
![]() |
0c6a9a189b | ||
![]() |
45d666ff2d | ||
![]() |
4143925322 | ||
![]() |
9be3d2ccaf | ||
![]() |
8be8a310d7 | ||
![]() |
b81c339922 | ||
![]() |
d3d103f141 | ||
![]() |
dd575ccb88 | ||
![]() |
a83c7c64b5 | ||
![]() |
c04ded6fd8 | ||
![]() |
f24c779737 | ||
![]() |
3cdd358fc8 | ||
![]() |
e11939b149 | ||
![]() |
a4ef0702c9 | ||
![]() |
548b6352ca | ||
![]() |
2658c16073 | ||
![]() |
49d0b6aa00 | ||
![]() |
e65f584197 | ||
![]() |
ce1bbda188 | ||
![]() |
846897fb4c | ||
![]() |
457e134261 | ||
![]() |
3e129763c7 | ||
![]() |
c49d086965 | ||
![]() |
df7bfc4efd | ||
![]() |
7fba1f9ed2 | ||
![]() |
3205d52331 | ||
![]() |
111960c530 | ||
![]() |
e1bc1a0129 | ||
![]() |
8b543a5fa9 | ||
![]() |
dc7a67a1d7 | ||
![]() |
350c20d6ab | ||
![]() |
b5f0cd7c70 | ||
![]() |
90488cd77a | ||
![]() |
5bbc59e87c | ||
![]() |
c02758213b | ||
![]() |
09c62d67c1 | ||
![]() |
3f3fa3044c | ||
![]() |
62673145fb | ||
![]() |
0baf73de5e | ||
![]() |
66a0783e7b | ||
![]() |
17144c45e5 | ||
![]() |
311c0ba4f1 | ||
![]() |
e293d23ae3 | ||
![]() |
93769d2608 | ||
![]() |
e7540563d0 | ||
![]() |
fc1047550e | ||
![]() |
dadc618719 | ||
![]() |
36f3bd2869 | ||
![]() |
fdcea983a4 | ||
![]() |
081534457c | ||
![]() |
94a6272a1d | ||
![]() |
d389e0ecf8 | ||
![]() |
17eb1c604f | ||
![]() |
99474aab06 | ||
![]() |
f3d3bf20de | ||
![]() |
3c999e9847 | ||
![]() |
692fa5f606 | ||
![]() |
752b8e79ff | ||
![]() |
3f82cf4ab3 | ||
![]() |
1549b9df74 | ||
![]() |
78ef87a952 | ||
![]() |
29ede48e0f | ||
![]() |
6349d25219 | ||
![]() |
830a450f00 | ||
![]() |
18f9ce9c0b | ||
![]() |
2471be0c78 | ||
![]() |
60cfd687dc | ||
![]() |
e06c61b95d | ||
![]() |
471eee0872 | ||
![]() |
20abd8a9f8 | ||
![]() |
88e5c471de | ||
![]() |
09086e574d | ||
![]() |
8d95c13e31 | ||
![]() |
c922cc4351 | ||
![]() |
a42f28c502 | ||
![]() |
b802f3a71f | ||
![]() |
f78f212a77 | ||
![]() |
22cbfd473b | ||
![]() |
e5973ef713 | ||
![]() |
5364a29b5f | ||
![]() |
49754d33fa | ||
![]() |
d7d95037be | ||
![]() |
515146d4a2 | ||
![]() |
b7540fab58 | ||
![]() |
88e6f8abf6 | ||
![]() |
3c4dadd905 | ||
![]() |
f18f997796 | ||
![]() |
3a1daf46ae | ||
![]() |
8dffea4a42 | ||
![]() |
3852a6c5cf | ||
![]() |
6493f51a29 | ||
![]() |
028f42e775 | ||
![]() |
eb1cc55f94 | ||
![]() |
fb864f1132 | ||
![]() |
8b8d988c07 | ||
![]() |
c2b5451fe4 | ||
![]() |
487d3a6262 | ||
![]() |
fe990b4cd2 | ||
![]() |
019c7e2f78 | ||
![]() |
1c64a4f145 | ||
![]() |
fc869aa203 | ||
![]() |
3a0ada9f46 | ||
![]() |
cc9980fc19 | ||
![]() |
7515d8af64 | ||
![]() |
5e7579c1fd | ||
![]() |
38af53f281 | ||
![]() |
a26bec5b00 | ||
![]() |
feb943b6df | ||
![]() |
059e37a41f | ||
![]() |
7ce67fd465 | ||
![]() |
6b8b8209f3 | ||
![]() |
fd1d12859d | ||
![]() |
efb00b2387 | ||
![]() |
9b2ca57038 | ||
![]() |
9694face16 | ||
![]() |
7ef14832d0 | ||
![]() |
33f7b58e6e | ||
![]() |
9e992da863 | ||
![]() |
c986a218c7 | ||
![]() |
8ee6312402 | ||
![]() |
3c86b12ef9 | ||
![]() |
02d09edd49 | ||
![]() |
55af3c3dd1 | ||
![]() |
f8f5a77744 | ||
![]() |
5b6956ff24 | ||
![]() |
f1c138eaed | ||
![]() |
caf43638de | ||
![]() |
b783d2e210 | ||
![]() |
9a40a5f019 | ||
![]() |
81a7b34101 | ||
![]() |
f124e2a889 | ||
![]() |
02b2bcafc5 | ||
![]() |
81a5fd377e | ||
![]() |
f875ae4abf | ||
![]() |
01fd400ec7 | ||
![]() |
c59420581c | ||
![]() |
0aa9462cea | ||
![]() |
bf2f6f84e5 | ||
![]() |
5126f01b57 |
@@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
"qpdf": {
|
|
||||||
"version": "11.3.0"
|
|
||||||
},
|
|
||||||
"jbig2enc": {
|
|
||||||
"version": "0.29",
|
|
||||||
"git_tag": "0.29"
|
|
||||||
}
|
|
||||||
}
|
|
15
.codecov.yml
@@ -1,3 +1,15 @@
|
|||||||
|
codecov:
|
||||||
|
require_ci_to_pass: true
|
||||||
|
# https://docs.codecov.com/docs/flags#recommended-automatic-flag-management
|
||||||
|
# Require each flag to have 1 upload before notification
|
||||||
|
flag_management:
|
||||||
|
individual_flags:
|
||||||
|
- name: backend
|
||||||
|
paths:
|
||||||
|
- src/
|
||||||
|
- name: frontend
|
||||||
|
paths:
|
||||||
|
- src-ui/
|
||||||
# https://docs.codecov.com/docs/pull-request-comments
|
# https://docs.codecov.com/docs/pull-request-comments
|
||||||
# codecov will only comment if coverage changes
|
# codecov will only comment if coverage changes
|
||||||
comment:
|
comment:
|
||||||
@@ -8,12 +20,9 @@ coverage:
|
|||||||
default:
|
default:
|
||||||
# https://docs.codecov.com/docs/commit-status#threshold
|
# https://docs.codecov.com/docs/commit-status#threshold
|
||||||
threshold: 1%
|
threshold: 1%
|
||||||
# https://docs.codecov.com/docs/commit-status#only_pulls
|
|
||||||
only_pulls: true
|
|
||||||
patch:
|
patch:
|
||||||
default:
|
default:
|
||||||
# For the changed lines only, target 75% covered, but
|
# For the changed lines only, target 75% covered, but
|
||||||
# allow as low as 50%
|
# allow as low as 50%
|
||||||
target: 75%
|
target: 75%
|
||||||
threshold: 25%
|
threshold: 25%
|
||||||
only_pulls: true
|
|
||||||
|
@@ -1,21 +1,28 @@
|
|||||||
|
# Tool caches
|
||||||
**/__pycache__
|
**/__pycache__
|
||||||
/src-ui/.vscode
|
**/.ruff_cache/
|
||||||
/src-ui/node_modules
|
**/.mypy_cache/
|
||||||
/src-ui/dist
|
# Virtual environment & similar
|
||||||
|
.venv/
|
||||||
|
./src-ui/node_modules
|
||||||
|
./src-ui/dist
|
||||||
|
# IDE folders
|
||||||
|
.idea/
|
||||||
|
.vscode/
|
||||||
|
./src-ui/.vscode
|
||||||
|
# VCS
|
||||||
.git
|
.git
|
||||||
/export
|
# Test related
|
||||||
/consume
|
**/.pytest_cache
|
||||||
/media
|
|
||||||
/data
|
|
||||||
/docs
|
|
||||||
.pytest_cache
|
|
||||||
/dist
|
|
||||||
/scripts
|
|
||||||
/resources
|
|
||||||
**/tests
|
**/tests
|
||||||
**/*.spec.ts
|
**/*.spec.ts
|
||||||
**/htmlcov
|
**/htmlcov
|
||||||
/src/.pytest_cache
|
# Local folders
|
||||||
.idea
|
./export
|
||||||
.venv/
|
./consume
|
||||||
.vscode/
|
./media
|
||||||
|
./data
|
||||||
|
./docs
|
||||||
|
./dist
|
||||||
|
./scripts
|
||||||
|
./resources
|
||||||
|
13
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -6,14 +6,21 @@ body:
|
|||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
Have a question? 👉 [Start a new discussion](https://github.com/paperless-ngx/paperless-ngx/discussions/new) or [ask in chat](https://matrix.to/#/#paperlessngx:matrix.org).
|
### ⚠️ Please remember: issues are for *bugs*
|
||||||
|
That is, something you believe affects every single user of Paperless-ngx, not just you. If you're not sure, start with one of the other options below.
|
||||||
|
|
||||||
Before opening an issue, please double check:
|
Also, note that **Paperless-ngx does not perform OCR itself**, that is handled by other tools. Problems with OCR of specific files should likely be raised 'upstream', see https://github.com/ocrmypdf/OCRmyPDF/issues or https://github.com/tesseract-ocr/tesseract/issues
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
#### Have a question? 👉 [Start a new discussion](https://github.com/paperless-ngx/paperless-ngx/discussions/new) or [ask in chat](https://matrix.to/#/#paperlessngx:matrix.org).
|
||||||
|
|
||||||
|
#### Before opening an issue, please double check:
|
||||||
|
|
||||||
- [The troubleshooting documentation](https://docs.paperless-ngx.com/troubleshooting/).
|
- [The troubleshooting documentation](https://docs.paperless-ngx.com/troubleshooting/).
|
||||||
- [The installation instructions](https://docs.paperless-ngx.com/setup/#installation).
|
- [The installation instructions](https://docs.paperless-ngx.com/setup/#installation).
|
||||||
- [Existing issues and discussions](https://github.com/paperless-ngx/paperless-ngx/search?q=&type=issues).
|
- [Existing issues and discussions](https://github.com/paperless-ngx/paperless-ngx/search?q=&type=issues).
|
||||||
- Disable any customer container initialization scripts, if using any
|
- Disable any customer container initialization scripts, if using
|
||||||
|
|
||||||
If you encounter issues while installing or configuring Paperless-ngx, please post in the ["Support" section of the discussions](https://github.com/paperless-ngx/paperless-ngx/discussions/new?category=support).
|
If you encounter issues while installing or configuring Paperless-ngx, please post in the ["Support" section of the discussions](https://github.com/paperless-ngx/paperless-ngx/discussions/new?category=support).
|
||||||
- type: textarea
|
- type: textarea
|
||||||
|
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -20,11 +20,16 @@ NOTE: Please check only one box!
|
|||||||
- [ ] Bug fix (non-breaking change which fixes an issue)
|
- [ ] Bug fix (non-breaking change which fixes an issue)
|
||||||
- [ ] New feature (non-breaking change which adds functionality)
|
- [ ] New feature (non-breaking change which adds functionality)
|
||||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||||
- [ ] Other (please explain)
|
- [ ] Other (please explain):
|
||||||
|
|
||||||
## Checklist:
|
## Checklist:
|
||||||
|
|
||||||
|
<!--
|
||||||
|
NOTE: PRs that do not address the following will not be merged, please do not skip any relevant items.
|
||||||
|
-->
|
||||||
|
|
||||||
- [ ] I have read & agree with the [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/main/CONTRIBUTING.md).
|
- [ ] I have read & agree with the [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/main/CONTRIBUTING.md).
|
||||||
|
- [ ] If applicable, I have included testing coverage for new code in this PR, for [backend](https://docs.paperless-ngx.com/development/#testing) and / or [front-end](https://docs.paperless-ngx.com/development/#testing-and-code-style) changes.
|
||||||
- [ ] If applicable, I have tested my code for new features & regressions on both mobile & desktop devices, using the latest version of major browsers.
|
- [ ] If applicable, I have tested my code for new features & regressions on both mobile & desktop devices, using the latest version of major browsers.
|
||||||
- [ ] If applicable, I have checked that all tests pass, see [documentation](https://docs.paperless-ngx.com/development/#back-end-development).
|
- [ ] If applicable, I have checked that all tests pass, see [documentation](https://docs.paperless-ngx.com/development/#back-end-development).
|
||||||
- [ ] I have run all `pre-commit` hooks, see [documentation](https://docs.paperless-ngx.com/development/#code-formatting-with-pre-commit-hooks).
|
- [ ] I have run all `pre-commit` hooks, see [documentation](https://docs.paperless-ngx.com/development/#code-formatting-with-pre-commit-hooks).
|
||||||
|
42
.github/dependabot.yml
vendored
@@ -8,7 +8,7 @@ updates:
|
|||||||
target-branch: "dev"
|
target-branch: "dev"
|
||||||
# Look for `package.json` and `lock` files in the `/src-ui` directory
|
# Look for `package.json` and `lock` files in the `/src-ui` directory
|
||||||
directory: "/src-ui"
|
directory: "/src-ui"
|
||||||
# Check the npm registry for updates every month
|
open-pull-requests-limit: 10
|
||||||
schedule:
|
schedule:
|
||||||
interval: "monthly"
|
interval: "monthly"
|
||||||
labels:
|
labels:
|
||||||
@@ -17,6 +17,21 @@ updates:
|
|||||||
# Add reviewers
|
# Add reviewers
|
||||||
reviewers:
|
reviewers:
|
||||||
- "paperless-ngx/frontend"
|
- "paperless-ngx/frontend"
|
||||||
|
groups:
|
||||||
|
frontend-angular-dependencies:
|
||||||
|
patterns:
|
||||||
|
- "@angular*"
|
||||||
|
- "@ng-*"
|
||||||
|
- "ngx-*"
|
||||||
|
- "ng2-pdf-viewer"
|
||||||
|
frontend-jest-dependencies:
|
||||||
|
patterns:
|
||||||
|
- "@types/jest"
|
||||||
|
- "jest*"
|
||||||
|
frontend-eslint-dependencies:
|
||||||
|
patterns:
|
||||||
|
- "@typescript-eslint*"
|
||||||
|
- "eslint"
|
||||||
|
|
||||||
# Enable version updates for Python
|
# Enable version updates for Python
|
||||||
- package-ecosystem: "pip"
|
- package-ecosystem: "pip"
|
||||||
@@ -32,8 +47,25 @@ updates:
|
|||||||
# Add reviewers
|
# Add reviewers
|
||||||
reviewers:
|
reviewers:
|
||||||
- "paperless-ngx/backend"
|
- "paperless-ngx/backend"
|
||||||
|
groups:
|
||||||
|
development:
|
||||||
|
patterns:
|
||||||
|
- "*pytest*"
|
||||||
|
- "black"
|
||||||
|
- "ruff"
|
||||||
|
- "mkdocs-material"
|
||||||
|
django:
|
||||||
|
patterns:
|
||||||
|
- "*django*"
|
||||||
|
major-versions:
|
||||||
|
update-types:
|
||||||
|
- "major"
|
||||||
|
small-changes:
|
||||||
|
update-types:
|
||||||
|
- "minor"
|
||||||
|
- "patch"
|
||||||
|
|
||||||
# Enable updates for Github Actions
|
# Enable updates for GitHub Actions
|
||||||
- package-ecosystem: "github-actions"
|
- package-ecosystem: "github-actions"
|
||||||
target-branch: "dev"
|
target-branch: "dev"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
@@ -46,3 +78,9 @@ updates:
|
|||||||
# Add reviewers
|
# Add reviewers
|
||||||
reviewers:
|
reviewers:
|
||||||
- "paperless-ngx/ci-cd"
|
- "paperless-ngx/ci-cd"
|
||||||
|
groups:
|
||||||
|
actions:
|
||||||
|
update-types:
|
||||||
|
- "major"
|
||||||
|
- "minor"
|
||||||
|
- "patch"
|
||||||
|
485
.github/scripts/cleanup-tags.py
vendored
@@ -1,485 +0,0 @@
|
|||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
from argparse import ArgumentParser
|
|
||||||
from typing import Dict
|
|
||||||
from typing import Final
|
|
||||||
from typing import Iterator
|
|
||||||
from typing import List
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from common import get_log_level
|
|
||||||
from github import ContainerPackage
|
|
||||||
from github import GithubBranchApi
|
|
||||||
from github import GithubContainerRegistryApi
|
|
||||||
|
|
||||||
logger = logging.getLogger("cleanup-tags")
|
|
||||||
|
|
||||||
|
|
||||||
class ImageProperties:
|
|
||||||
"""
|
|
||||||
Data class wrapping the properties of an entry in the image index
|
|
||||||
manifests list. It is NOT an actual image with layers, etc
|
|
||||||
|
|
||||||
https://docs.docker.com/registry/spec/manifest-v2-2/
|
|
||||||
https://github.com/opencontainers/image-spec/blob/main/manifest.md
|
|
||||||
https://github.com/opencontainers/image-spec/blob/main/descriptor.md
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, data: Dict) -> None:
|
|
||||||
self._data = data
|
|
||||||
# This is the sha256: digest string. Corresponds to GitHub API name
|
|
||||||
# if the package is an untagged package
|
|
||||||
self.digest = self._data["digest"]
|
|
||||||
platform_data_os = self._data["platform"]["os"]
|
|
||||||
platform_arch = self._data["platform"]["architecture"]
|
|
||||||
platform_variant = self._data["platform"].get(
|
|
||||||
"variant",
|
|
||||||
"",
|
|
||||||
)
|
|
||||||
self.platform = f"{platform_data_os}/{platform_arch}{platform_variant}"
|
|
||||||
|
|
||||||
|
|
||||||
class ImageIndex:
|
|
||||||
"""
|
|
||||||
Data class wrapping up logic for an OCI Image Index
|
|
||||||
JSON data. Primary use is to access the manifests listing
|
|
||||||
|
|
||||||
See https://github.com/opencontainers/image-spec/blob/main/image-index.md
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, package_url: str, tag: str) -> None:
|
|
||||||
self.qualified_name = f"{package_url}:{tag}"
|
|
||||||
logger.info(f"Getting image index for {self.qualified_name}")
|
|
||||||
try:
|
|
||||||
proc = subprocess.run(
|
|
||||||
[
|
|
||||||
shutil.which("docker"),
|
|
||||||
"buildx",
|
|
||||||
"imagetools",
|
|
||||||
"inspect",
|
|
||||||
"--raw",
|
|
||||||
self.qualified_name,
|
|
||||||
],
|
|
||||||
capture_output=True,
|
|
||||||
check=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
self._data = json.loads(proc.stdout)
|
|
||||||
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
logger.error(
|
|
||||||
f"Failed to get image index for {self.qualified_name}: {e.stderr}",
|
|
||||||
)
|
|
||||||
raise e
|
|
||||||
|
|
||||||
@property
|
|
||||||
def image_pointers(self) -> Iterator[ImageProperties]:
|
|
||||||
for manifest_data in self._data["manifests"]:
|
|
||||||
yield ImageProperties(manifest_data)
|
|
||||||
|
|
||||||
|
|
||||||
class RegistryTagsCleaner:
|
|
||||||
"""
|
|
||||||
This is the base class for the image registry cleaning. Given a package
|
|
||||||
name, it will keep all images which are tagged and all untagged images
|
|
||||||
referred to by a manifest. This results in only images which have been untagged
|
|
||||||
and cannot be referenced except by their SHA in being removed. None of these
|
|
||||||
images should be referenced, so it is fine to delete them.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
package_name: str,
|
|
||||||
repo_owner: str,
|
|
||||||
repo_name: str,
|
|
||||||
package_api: GithubContainerRegistryApi,
|
|
||||||
branch_api: Optional[GithubBranchApi],
|
|
||||||
):
|
|
||||||
self.actually_delete = False
|
|
||||||
self.package_api = package_api
|
|
||||||
self.branch_api = branch_api
|
|
||||||
self.package_name = package_name
|
|
||||||
self.repo_owner = repo_owner
|
|
||||||
self.repo_name = repo_name
|
|
||||||
self.tags_to_delete: List[str] = []
|
|
||||||
self.tags_to_keep: List[str] = []
|
|
||||||
|
|
||||||
# Get the information about all versions of the given package
|
|
||||||
# These are active, not deleted, the default returned from the API
|
|
||||||
self.all_package_versions = self.package_api.get_active_package_versions(
|
|
||||||
self.package_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get a mapping from a tag like "1.7.0" or "feature-xyz" to the ContainerPackage
|
|
||||||
# tagged with it. It makes certain lookups easy
|
|
||||||
self.all_pkgs_tags_to_version: Dict[str, ContainerPackage] = {}
|
|
||||||
for pkg in self.all_package_versions:
|
|
||||||
for tag in pkg.tags:
|
|
||||||
self.all_pkgs_tags_to_version[tag] = pkg
|
|
||||||
logger.info(
|
|
||||||
f"Located {len(self.all_package_versions)} versions of package {self.package_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
self.decide_what_tags_to_keep()
|
|
||||||
|
|
||||||
def clean(self):
|
|
||||||
"""
|
|
||||||
This method will delete image versions, based on the selected tags to delete.
|
|
||||||
It behaves more like an unlinking than actual deletion. Removing the tag
|
|
||||||
simply removes a pointer to an image, but the actual image data remains accessible
|
|
||||||
if one has the sha256 digest of it.
|
|
||||||
"""
|
|
||||||
for tag_to_delete in self.tags_to_delete:
|
|
||||||
package_version_info = self.all_pkgs_tags_to_version[tag_to_delete]
|
|
||||||
|
|
||||||
if self.actually_delete:
|
|
||||||
logger.info(
|
|
||||||
f"Deleting {tag_to_delete} (id {package_version_info.id})",
|
|
||||||
)
|
|
||||||
self.package_api.delete_package_version(
|
|
||||||
package_version_info,
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
f"Would delete {tag_to_delete} (id {package_version_info.id})",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.info("No tags to delete")
|
|
||||||
|
|
||||||
def clean_untagged(self, is_manifest_image: bool):
|
|
||||||
"""
|
|
||||||
This method will delete untagged images, that is those which are not named. It
|
|
||||||
handles if the image tag is actually a manifest, which points to images that look otherwise
|
|
||||||
untagged.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _clean_untagged_manifest():
|
|
||||||
"""
|
|
||||||
|
|
||||||
Handles the deletion of untagged images, but where the package is a manifest, ie a multi
|
|
||||||
arch image, which means some "untagged" images need to exist still.
|
|
||||||
|
|
||||||
Ok, bear with me, these are annoying.
|
|
||||||
|
|
||||||
Our images are multi-arch, so the manifest is more like a pointer to a sha256 digest.
|
|
||||||
These images are untagged, but pointed to, and so should not be removed (or every pull fails).
|
|
||||||
|
|
||||||
So for each image getting kept, parse the manifest to find the digest(s) it points to. Then
|
|
||||||
remove those from the list of untagged images. The final result is the untagged, not pointed to
|
|
||||||
version which should be safe to remove.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
Tag: ghcr.io/paperless-ngx/paperless-ngx:1.7.1 refers to
|
|
||||||
amd64: sha256:b9ed4f8753bbf5146547671052d7e91f68cdfc9ef049d06690b2bc866fec2690
|
|
||||||
armv7: sha256:81605222df4ba4605a2ba4893276e5d08c511231ead1d5da061410e1bbec05c3
|
|
||||||
arm64: sha256:374cd68db40734b844705bfc38faae84cc4182371de4bebd533a9a365d5e8f3b
|
|
||||||
each of which appears as untagged image, but isn't really.
|
|
||||||
|
|
||||||
So from the list of untagged packages, remove those digests. Once all tags which
|
|
||||||
are being kept are checked, the remaining untagged packages are actually untagged
|
|
||||||
with no referrals in a manifest to them.
|
|
||||||
"""
|
|
||||||
# Simplify the untagged data, mapping name (which is a digest) to the version
|
|
||||||
# At the moment, these are the images which APPEAR untagged.
|
|
||||||
untagged_versions = {}
|
|
||||||
for x in self.all_package_versions:
|
|
||||||
if x.untagged:
|
|
||||||
untagged_versions[x.name] = x
|
|
||||||
|
|
||||||
skips = 0
|
|
||||||
|
|
||||||
# Parse manifests to locate digests pointed to
|
|
||||||
for tag in sorted(self.tags_to_keep):
|
|
||||||
try:
|
|
||||||
image_index = ImageIndex(
|
|
||||||
f"ghcr.io/{self.repo_owner}/{self.package_name}",
|
|
||||||
tag,
|
|
||||||
)
|
|
||||||
for manifest in image_index.image_pointers:
|
|
||||||
if manifest.digest in untagged_versions:
|
|
||||||
logger.info(
|
|
||||||
f"Skipping deletion of {manifest.digest},"
|
|
||||||
f" referred to by {image_index.qualified_name}"
|
|
||||||
f" for {manifest.platform}",
|
|
||||||
)
|
|
||||||
del untagged_versions[manifest.digest]
|
|
||||||
skips += 1
|
|
||||||
|
|
||||||
except Exception as err:
|
|
||||||
self.actually_delete = False
|
|
||||||
logger.exception(err)
|
|
||||||
return
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"Skipping deletion of {skips} packages referred to by a manifest",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Delete the untagged and not pointed at packages
|
|
||||||
logger.info(f"Deleting untagged packages of {self.package_name}")
|
|
||||||
for to_delete_name in untagged_versions:
|
|
||||||
to_delete_version = untagged_versions[to_delete_name]
|
|
||||||
|
|
||||||
if self.actually_delete:
|
|
||||||
logger.info(
|
|
||||||
f"Deleting id {to_delete_version.id} named {to_delete_version.name}",
|
|
||||||
)
|
|
||||||
self.package_api.delete_package_version(
|
|
||||||
to_delete_version,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
f"Would delete {to_delete_name} (id {to_delete_version.id})",
|
|
||||||
)
|
|
||||||
|
|
||||||
def _clean_untagged_non_manifest():
|
|
||||||
"""
|
|
||||||
If the package is not a multi-arch manifest, images without tags are safe to delete.
|
|
||||||
"""
|
|
||||||
|
|
||||||
for package in self.all_package_versions:
|
|
||||||
if package.untagged:
|
|
||||||
if self.actually_delete:
|
|
||||||
logger.info(
|
|
||||||
f"Deleting id {package.id} named {package.name}",
|
|
||||||
)
|
|
||||||
self.package_api.delete_package_version(
|
|
||||||
package,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
f"Would delete {package.name} (id {package.id})",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
f"Not deleting tag {package.tags[0]} of package {self.package_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info("Beginning untagged image cleaning")
|
|
||||||
|
|
||||||
if is_manifest_image:
|
|
||||||
_clean_untagged_manifest()
|
|
||||||
else:
|
|
||||||
_clean_untagged_non_manifest()
|
|
||||||
|
|
||||||
def decide_what_tags_to_keep(self):
|
|
||||||
"""
|
|
||||||
This method holds the logic to delete what tags to keep and there fore
|
|
||||||
what tags to delete.
|
|
||||||
|
|
||||||
By default, any image with at least 1 tag will be kept
|
|
||||||
"""
|
|
||||||
# By default, keep anything which is tagged
|
|
||||||
self.tags_to_keep = list(set(self.all_pkgs_tags_to_version.keys()))
|
|
||||||
|
|
||||||
def check_remaining_tags_valid(self):
|
|
||||||
"""
|
|
||||||
Checks the non-deleted tags are still valid. The assumption is if the
|
|
||||||
manifest is can be inspected and each image manifest if points to can be
|
|
||||||
inspected, the image will still pull.
|
|
||||||
|
|
||||||
https://github.com/opencontainers/image-spec/blob/main/image-index.md
|
|
||||||
"""
|
|
||||||
logger.info("Beginning confirmation step")
|
|
||||||
a_tag_failed = False
|
|
||||||
for tag in sorted(self.tags_to_keep):
|
|
||||||
try:
|
|
||||||
image_index = ImageIndex(
|
|
||||||
f"ghcr.io/{self.repo_owner}/{self.package_name}",
|
|
||||||
tag,
|
|
||||||
)
|
|
||||||
for manifest in image_index.image_pointers:
|
|
||||||
logger.info(f"Checking {manifest.digest} for {manifest.platform}")
|
|
||||||
|
|
||||||
# This follows the pointer from the index to an actual image, layers and all
|
|
||||||
# Note the format is @
|
|
||||||
digest_name = f"ghcr.io/{self.repo_owner}/{self.package_name}@{manifest.digest}"
|
|
||||||
|
|
||||||
try:
|
|
||||||
subprocess.run(
|
|
||||||
[
|
|
||||||
shutil.which("docker"),
|
|
||||||
"buildx",
|
|
||||||
"imagetools",
|
|
||||||
"inspect",
|
|
||||||
"--raw",
|
|
||||||
digest_name,
|
|
||||||
],
|
|
||||||
capture_output=True,
|
|
||||||
check=True,
|
|
||||||
)
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
logger.error(f"Failed to inspect digest: {e.stderr}")
|
|
||||||
a_tag_failed = True
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
a_tag_failed = True
|
|
||||||
logger.error(f"Failed to inspect: {e.stderr}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
if a_tag_failed:
|
|
||||||
raise Exception("At least one image tag failed to inspect")
|
|
||||||
|
|
||||||
|
|
||||||
class MainImageTagsCleaner(RegistryTagsCleaner):
|
|
||||||
def decide_what_tags_to_keep(self):
|
|
||||||
"""
|
|
||||||
Overrides the default logic for deciding what images to keep. Images tagged as "feature-"
|
|
||||||
will be removed, if the corresponding branch no longer exists.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Default to everything gets kept still
|
|
||||||
super().decide_what_tags_to_keep()
|
|
||||||
|
|
||||||
# Locate the feature branches
|
|
||||||
feature_branches = {}
|
|
||||||
for branch in self.branch_api.get_branches(
|
|
||||||
repo=self.repo_name,
|
|
||||||
):
|
|
||||||
if branch.name.startswith("feature-"):
|
|
||||||
logger.debug(f"Found feature branch {branch.name}")
|
|
||||||
feature_branches[branch.name] = branch
|
|
||||||
|
|
||||||
logger.info(f"Located {len(feature_branches)} feature branches")
|
|
||||||
|
|
||||||
if not len(feature_branches):
|
|
||||||
# Our work here is done, delete nothing
|
|
||||||
return
|
|
||||||
|
|
||||||
# Filter to packages which are tagged with feature-*
|
|
||||||
packages_tagged_feature: List[ContainerPackage] = []
|
|
||||||
for package in self.all_package_versions:
|
|
||||||
if package.tag_matches("feature-"):
|
|
||||||
packages_tagged_feature.append(package)
|
|
||||||
|
|
||||||
# Map tags like "feature-xyz" to a ContainerPackage
|
|
||||||
feature_pkgs_tags_to_versions: Dict[str, ContainerPackage] = {}
|
|
||||||
for pkg in packages_tagged_feature:
|
|
||||||
for tag in pkg.tags:
|
|
||||||
feature_pkgs_tags_to_versions[tag] = pkg
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f'Located {len(feature_pkgs_tags_to_versions)} versions of package {self.package_name} tagged "feature-"',
|
|
||||||
)
|
|
||||||
|
|
||||||
# All the feature tags minus all the feature branches leaves us feature tags
|
|
||||||
# with no corresponding branch
|
|
||||||
self.tags_to_delete = list(
|
|
||||||
set(feature_pkgs_tags_to_versions.keys()) - set(feature_branches.keys()),
|
|
||||||
)
|
|
||||||
|
|
||||||
# All the tags minus the set of going to be deleted tags leaves us the
|
|
||||||
# tags which will be kept around
|
|
||||||
self.tags_to_keep = list(
|
|
||||||
set(self.all_pkgs_tags_to_version.keys()) - set(self.tags_to_delete),
|
|
||||||
)
|
|
||||||
logger.info(
|
|
||||||
f"Located {len(self.tags_to_delete)} versions of package {self.package_name} to delete",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class LibraryTagsCleaner(RegistryTagsCleaner):
|
|
||||||
"""
|
|
||||||
Exists for the off chance that someday, the installer library images
|
|
||||||
will need their own logic
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def _main():
|
|
||||||
parser = ArgumentParser(
|
|
||||||
description="Using the GitHub API locate and optionally delete container"
|
|
||||||
" tags which no longer have an associated feature branch",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Requires an affirmative command to actually do a delete
|
|
||||||
parser.add_argument(
|
|
||||||
"--delete",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="If provided, actually delete the container tags",
|
|
||||||
)
|
|
||||||
|
|
||||||
# When a tagged image is updated, the previous version remains, but it no longer tagged
|
|
||||||
# Add this option to remove them as well
|
|
||||||
parser.add_argument(
|
|
||||||
"--untagged",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="If provided, delete untagged containers as well",
|
|
||||||
)
|
|
||||||
|
|
||||||
# If given, the package is assumed to be a multi-arch manifest. Cache packages are
|
|
||||||
# not multi-arch, all other types are
|
|
||||||
parser.add_argument(
|
|
||||||
"--is-manifest",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="If provided, the package is assumed to be a multi-arch manifest following schema v2",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Allows configuration of log level for debugging
|
|
||||||
parser.add_argument(
|
|
||||||
"--loglevel",
|
|
||||||
default="info",
|
|
||||||
help="Configures the logging level",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get the name of the package being processed this round
|
|
||||||
parser.add_argument(
|
|
||||||
"package",
|
|
||||||
help="The package to process",
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
logging.basicConfig(
|
|
||||||
level=get_log_level(args),
|
|
||||||
datefmt="%Y-%m-%d %H:%M:%S",
|
|
||||||
format="%(asctime)s %(levelname)-8s %(message)s",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Must be provided in the environment
|
|
||||||
repo_owner: Final[str] = os.environ["GITHUB_REPOSITORY_OWNER"]
|
|
||||||
repo: Final[str] = os.environ["GITHUB_REPOSITORY"]
|
|
||||||
gh_token: Final[str] = os.environ["TOKEN"]
|
|
||||||
|
|
||||||
# Find all branches named feature-*
|
|
||||||
# Note: Only relevant to the main application, but simpler to
|
|
||||||
# leave in for all packages
|
|
||||||
with GithubBranchApi(gh_token) as branch_api:
|
|
||||||
with GithubContainerRegistryApi(gh_token, repo_owner) as container_api:
|
|
||||||
if args.package in {"paperless-ngx", "paperless-ngx/builder/cache/app"}:
|
|
||||||
cleaner = MainImageTagsCleaner(
|
|
||||||
args.package,
|
|
||||||
repo_owner,
|
|
||||||
repo,
|
|
||||||
container_api,
|
|
||||||
branch_api,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
cleaner = LibraryTagsCleaner(
|
|
||||||
args.package,
|
|
||||||
repo_owner,
|
|
||||||
repo,
|
|
||||||
container_api,
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Set if actually doing a delete vs dry run
|
|
||||||
cleaner.actually_delete = args.delete
|
|
||||||
|
|
||||||
# Clean images with tags
|
|
||||||
cleaner.clean()
|
|
||||||
|
|
||||||
# Clean images which are untagged
|
|
||||||
cleaner.clean_untagged(args.is_manifest)
|
|
||||||
|
|
||||||
# Verify remaining tags still pull
|
|
||||||
if args.is_manifest:
|
|
||||||
cleaner.check_remaining_tags_valid()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
_main()
|
|
47
.github/scripts/common.py
vendored
@@ -1,47 +0,0 @@
|
|||||||
import logging
|
|
||||||
|
|
||||||
|
|
||||||
def get_image_tag(
|
|
||||||
repo_name: str,
|
|
||||||
pkg_name: str,
|
|
||||||
pkg_version: str,
|
|
||||||
) -> str:
|
|
||||||
"""
|
|
||||||
Returns a string representing the normal image for a given package
|
|
||||||
"""
|
|
||||||
return f"ghcr.io/{repo_name.lower()}/builder/{pkg_name}:{pkg_version}"
|
|
||||||
|
|
||||||
|
|
||||||
def get_cache_image_tag(
|
|
||||||
repo_name: str,
|
|
||||||
pkg_name: str,
|
|
||||||
pkg_version: str,
|
|
||||||
branch_name: str,
|
|
||||||
) -> str:
|
|
||||||
"""
|
|
||||||
Returns a string representing the expected image cache tag for a given package
|
|
||||||
|
|
||||||
Registry type caching is utilized for the builder images, to allow fast
|
|
||||||
rebuilds, generally almost instant for the same version
|
|
||||||
"""
|
|
||||||
return f"ghcr.io/{repo_name.lower()}/builder/cache/{pkg_name}:{pkg_version}"
|
|
||||||
|
|
||||||
|
|
||||||
def get_log_level(args) -> int:
|
|
||||||
"""
|
|
||||||
Returns a logging level, based
|
|
||||||
:param args:
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
levels = {
|
|
||||||
"critical": logging.CRITICAL,
|
|
||||||
"error": logging.ERROR,
|
|
||||||
"warn": logging.WARNING,
|
|
||||||
"warning": logging.WARNING,
|
|
||||||
"info": logging.INFO,
|
|
||||||
"debug": logging.DEBUG,
|
|
||||||
}
|
|
||||||
level = levels.get(args.loglevel.lower())
|
|
||||||
if level is None:
|
|
||||||
level = logging.INFO
|
|
||||||
return level
|
|
91
.github/scripts/get-build-json.py
vendored
@@ -1,91 +0,0 @@
|
|||||||
"""
|
|
||||||
This is a helper script for the mutli-stage Docker image builder.
|
|
||||||
It provides a single point of configuration for package version control.
|
|
||||||
The output JSON object is used by the CI workflow to determine what versions
|
|
||||||
to build and pull into the final Docker image.
|
|
||||||
|
|
||||||
Python package information is obtained from the Pipfile.lock. As this is
|
|
||||||
kept updated by dependabot, it usually will need no further configuration.
|
|
||||||
The sole exception currently is pikepdf, which has a dependency on qpdf,
|
|
||||||
and is configured here to use the latest version of qpdf built by the workflow.
|
|
||||||
|
|
||||||
Other package version information is configured directly below, generally by
|
|
||||||
setting the version and Git information, if any.
|
|
||||||
|
|
||||||
"""
|
|
||||||
import argparse
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Final
|
|
||||||
|
|
||||||
from common import get_cache_image_tag
|
|
||||||
from common import get_image_tag
|
|
||||||
|
|
||||||
|
|
||||||
def _main():
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="Generate a JSON object of information required to build the given package, based on the Pipfile.lock",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"package",
|
|
||||||
help="The name of the package to generate JSON for",
|
|
||||||
)
|
|
||||||
|
|
||||||
PIPFILE_LOCK_PATH: Final[Path] = Path("Pipfile.lock")
|
|
||||||
BUILD_CONFIG_PATH: Final[Path] = Path(".build-config.json")
|
|
||||||
|
|
||||||
# Read the main config file
|
|
||||||
build_json: Final = json.loads(BUILD_CONFIG_PATH.read_text())
|
|
||||||
|
|
||||||
# Read Pipfile.lock file
|
|
||||||
pipfile_data: Final = json.loads(PIPFILE_LOCK_PATH.read_text())
|
|
||||||
|
|
||||||
args: Final = parser.parse_args()
|
|
||||||
|
|
||||||
# Read from environment variables set by GitHub Actions
|
|
||||||
repo_name: Final[str] = os.environ["GITHUB_REPOSITORY"]
|
|
||||||
branch_name: Final[str] = os.environ["GITHUB_REF_NAME"]
|
|
||||||
|
|
||||||
# Default output values
|
|
||||||
version = None
|
|
||||||
extra_config = {}
|
|
||||||
|
|
||||||
if args.package in pipfile_data["default"]:
|
|
||||||
# Read the version from Pipfile.lock
|
|
||||||
pkg_data = pipfile_data["default"][args.package]
|
|
||||||
pkg_version = pkg_data["version"].split("==")[-1]
|
|
||||||
version = pkg_version
|
|
||||||
|
|
||||||
# Any extra/special values needed
|
|
||||||
if args.package == "pikepdf":
|
|
||||||
extra_config["qpdf_version"] = build_json["qpdf"]["version"]
|
|
||||||
|
|
||||||
elif args.package in build_json:
|
|
||||||
version = build_json[args.package]["version"]
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise NotImplementedError(args.package)
|
|
||||||
|
|
||||||
# The JSON object we'll output
|
|
||||||
output = {
|
|
||||||
"name": args.package,
|
|
||||||
"version": version,
|
|
||||||
"image_tag": get_image_tag(repo_name, args.package, version),
|
|
||||||
"cache_tag": get_cache_image_tag(
|
|
||||||
repo_name,
|
|
||||||
args.package,
|
|
||||||
version,
|
|
||||||
branch_name,
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
# Add anything special a package may need
|
|
||||||
output.update(extra_config)
|
|
||||||
|
|
||||||
# Output the JSON info to stdout
|
|
||||||
print(json.dumps(output))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
_main()
|
|
270
.github/scripts/github.py
vendored
@@ -1,270 +0,0 @@
|
|||||||
"""
|
|
||||||
This module contains some useful classes for interacting with the Github API.
|
|
||||||
The full documentation for the API can be found here: https://docs.github.com/en/rest
|
|
||||||
|
|
||||||
Mostly, this focusses on two areas, repo branches and repo packages, as the use case
|
|
||||||
is cleaning up container images which are no longer referred to.
|
|
||||||
|
|
||||||
"""
|
|
||||||
import functools
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
import urllib.parse
|
|
||||||
from typing import Dict
|
|
||||||
from typing import List
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
logger = logging.getLogger("github-api")
|
|
||||||
|
|
||||||
|
|
||||||
class _GithubApiBase:
|
|
||||||
"""
|
|
||||||
A base class for interacting with the Github API. It
|
|
||||||
will handle the session and setting authorization headers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, token: str) -> None:
|
|
||||||
self._token = token
|
|
||||||
self._client: Optional[httpx.Client] = None
|
|
||||||
|
|
||||||
def __enter__(self) -> "_GithubApiBase":
|
|
||||||
"""
|
|
||||||
Sets up the required headers for auth and response
|
|
||||||
type from the API
|
|
||||||
"""
|
|
||||||
self._client = httpx.Client()
|
|
||||||
self._client.headers.update(
|
|
||||||
{
|
|
||||||
"Accept": "application/vnd.github.v3+json",
|
|
||||||
"Authorization": f"token {self._token}",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
"""
|
|
||||||
Ensures the authorization token is cleaned up no matter
|
|
||||||
the reason for the exit
|
|
||||||
"""
|
|
||||||
if "Accept" in self._client.headers:
|
|
||||||
del self._client.headers["Accept"]
|
|
||||||
if "Authorization" in self._client.headers:
|
|
||||||
del self._client.headers["Authorization"]
|
|
||||||
|
|
||||||
# Close the session as well
|
|
||||||
self._client.close()
|
|
||||||
self._client = None
|
|
||||||
|
|
||||||
def _read_all_pages(self, endpoint):
|
|
||||||
"""
|
|
||||||
Helper function to read all pages of an endpoint, utilizing the
|
|
||||||
next.url until exhausted. Assumes the endpoint returns a list
|
|
||||||
"""
|
|
||||||
internal_data = []
|
|
||||||
|
|
||||||
while True:
|
|
||||||
resp = self._client.get(endpoint)
|
|
||||||
if resp.status_code == 200:
|
|
||||||
internal_data += resp.json()
|
|
||||||
if "next" in resp.links:
|
|
||||||
endpoint = resp.links["next"]["url"]
|
|
||||||
else:
|
|
||||||
logger.debug("Exiting pagination loop")
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
logger.warning(f"Request to {endpoint} return HTTP {resp.status_code}")
|
|
||||||
resp.raise_for_status()
|
|
||||||
|
|
||||||
return internal_data
|
|
||||||
|
|
||||||
|
|
||||||
class _EndpointResponse:
|
|
||||||
"""
|
|
||||||
For all endpoint JSON responses, store the full
|
|
||||||
response data, for ease of extending later, if need be.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, data: Dict) -> None:
|
|
||||||
self._data = data
|
|
||||||
|
|
||||||
|
|
||||||
class GithubBranch(_EndpointResponse):
|
|
||||||
"""
|
|
||||||
Simple wrapper for a repository branch, only extracts name information
|
|
||||||
for now.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, data: Dict) -> None:
|
|
||||||
super().__init__(data)
|
|
||||||
self.name = self._data["name"]
|
|
||||||
|
|
||||||
|
|
||||||
class GithubBranchApi(_GithubApiBase):
|
|
||||||
"""
|
|
||||||
Wrapper around branch API.
|
|
||||||
|
|
||||||
See https://docs.github.com/en/rest/branches/branches
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, token: str) -> None:
|
|
||||||
super().__init__(token)
|
|
||||||
|
|
||||||
self._ENDPOINT = "https://api.github.com/repos/{REPO}/branches"
|
|
||||||
|
|
||||||
def get_branches(self, repo: str) -> List[GithubBranch]:
|
|
||||||
"""
|
|
||||||
Returns all current branches of the given repository owned by the given
|
|
||||||
owner or organization.
|
|
||||||
"""
|
|
||||||
# The environment GITHUB_REPOSITORY already contains the owner in the correct location
|
|
||||||
endpoint = self._ENDPOINT.format(REPO=repo)
|
|
||||||
internal_data = self._read_all_pages(endpoint)
|
|
||||||
return [GithubBranch(branch) for branch in internal_data]
|
|
||||||
|
|
||||||
|
|
||||||
class ContainerPackage(_EndpointResponse):
|
|
||||||
"""
|
|
||||||
Data class wrapping the JSON response from the package related
|
|
||||||
endpoints
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, data: Dict):
|
|
||||||
super().__init__(data)
|
|
||||||
# This is a numerical ID, required for interactions with this
|
|
||||||
# specific package, including deletion of it or restoration
|
|
||||||
self.id: int = self._data["id"]
|
|
||||||
|
|
||||||
# A string name. This might be an actual name or it could be a
|
|
||||||
# digest string like "sha256:"
|
|
||||||
self.name: str = self._data["name"]
|
|
||||||
|
|
||||||
# URL to the package, including its ID, can be used for deletion
|
|
||||||
# or restoration without needing to build up a URL ourselves
|
|
||||||
self.url: str = self._data["url"]
|
|
||||||
|
|
||||||
# The list of tags applied to this image. Maybe an empty list
|
|
||||||
self.tags: List[str] = self._data["metadata"]["container"]["tags"]
|
|
||||||
|
|
||||||
@functools.cached_property
|
|
||||||
def untagged(self) -> bool:
|
|
||||||
"""
|
|
||||||
Returns True if the image has no tags applied to it, False otherwise
|
|
||||||
"""
|
|
||||||
return len(self.tags) == 0
|
|
||||||
|
|
||||||
@functools.cache
|
|
||||||
def tag_matches(self, pattern: str) -> bool:
|
|
||||||
"""
|
|
||||||
Returns True if the image has at least one tag which matches the given regex,
|
|
||||||
False otherwise
|
|
||||||
"""
|
|
||||||
return any(re.match(pattern, tag) is not None for tag in self.tags)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"Package {self.name}"
|
|
||||||
|
|
||||||
|
|
||||||
class GithubContainerRegistryApi(_GithubApiBase):
|
|
||||||
"""
|
|
||||||
Class wrapper to deal with the Github packages API. This class only deals with
|
|
||||||
container type packages, the only type published by paperless-ngx.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, token: str, owner_or_org: str) -> None:
|
|
||||||
super().__init__(token)
|
|
||||||
self._owner_or_org = owner_or_org
|
|
||||||
if self._owner_or_org == "paperless-ngx":
|
|
||||||
# https://docs.github.com/en/rest/packages#get-all-package-versions-for-a-package-owned-by-an-organization
|
|
||||||
self._PACKAGES_VERSIONS_ENDPOINT = "https://api.github.com/orgs/{ORG}/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions"
|
|
||||||
# https://docs.github.com/en/rest/packages#delete-package-version-for-an-organization
|
|
||||||
self._PACKAGE_VERSION_DELETE_ENDPOINT = "https://api.github.com/orgs/{ORG}/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions/{PACKAGE_VERSION_ID}"
|
|
||||||
else:
|
|
||||||
# https://docs.github.com/en/rest/packages#get-all-package-versions-for-a-package-owned-by-the-authenticated-user
|
|
||||||
self._PACKAGES_VERSIONS_ENDPOINT = "https://api.github.com/user/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions"
|
|
||||||
# https://docs.github.com/en/rest/packages#delete-a-package-version-for-the-authenticated-user
|
|
||||||
self._PACKAGE_VERSION_DELETE_ENDPOINT = "https://api.github.com/user/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions/{PACKAGE_VERSION_ID}"
|
|
||||||
self._PACKAGE_VERSION_RESTORE_ENDPOINT = (
|
|
||||||
f"{self._PACKAGE_VERSION_DELETE_ENDPOINT}/restore"
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_active_package_versions(
|
|
||||||
self,
|
|
||||||
package_name: str,
|
|
||||||
) -> List[ContainerPackage]:
|
|
||||||
"""
|
|
||||||
Returns all the versions of a given package (container images) from
|
|
||||||
the API
|
|
||||||
"""
|
|
||||||
|
|
||||||
package_type: str = "container"
|
|
||||||
# Need to quote this for slashes in the name
|
|
||||||
package_name = urllib.parse.quote(package_name, safe="")
|
|
||||||
|
|
||||||
endpoint = self._PACKAGES_VERSIONS_ENDPOINT.format(
|
|
||||||
ORG=self._owner_or_org,
|
|
||||||
PACKAGE_TYPE=package_type,
|
|
||||||
PACKAGE_NAME=package_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
pkgs = []
|
|
||||||
|
|
||||||
for data in self._read_all_pages(endpoint):
|
|
||||||
pkgs.append(ContainerPackage(data))
|
|
||||||
|
|
||||||
return pkgs
|
|
||||||
|
|
||||||
def get_deleted_package_versions(
|
|
||||||
self,
|
|
||||||
package_name: str,
|
|
||||||
) -> List[ContainerPackage]:
|
|
||||||
package_type: str = "container"
|
|
||||||
# Need to quote this for slashes in the name
|
|
||||||
package_name = urllib.parse.quote(package_name, safe="")
|
|
||||||
|
|
||||||
endpoint = (
|
|
||||||
self._PACKAGES_VERSIONS_ENDPOINT.format(
|
|
||||||
ORG=self._owner_or_org,
|
|
||||||
PACKAGE_TYPE=package_type,
|
|
||||||
PACKAGE_NAME=package_name,
|
|
||||||
)
|
|
||||||
+ "?state=deleted"
|
|
||||||
)
|
|
||||||
|
|
||||||
pkgs = []
|
|
||||||
|
|
||||||
for data in self._read_all_pages(endpoint):
|
|
||||||
pkgs.append(ContainerPackage(data))
|
|
||||||
|
|
||||||
return pkgs
|
|
||||||
|
|
||||||
def delete_package_version(self, package_data: ContainerPackage):
|
|
||||||
"""
|
|
||||||
Deletes the given package version from the GHCR
|
|
||||||
"""
|
|
||||||
resp = self._client.delete(package_data.url)
|
|
||||||
if resp.status_code != 204:
|
|
||||||
logger.warning(
|
|
||||||
f"Request to delete {package_data.url} returned HTTP {resp.status_code}",
|
|
||||||
)
|
|
||||||
|
|
||||||
def restore_package_version(
|
|
||||||
self,
|
|
||||||
package_name: str,
|
|
||||||
package_data: ContainerPackage,
|
|
||||||
):
|
|
||||||
package_type: str = "container"
|
|
||||||
endpoint = self._PACKAGE_VERSION_RESTORE_ENDPOINT.format(
|
|
||||||
ORG=self._owner_or_org,
|
|
||||||
PACKAGE_TYPE=package_type,
|
|
||||||
PACKAGE_NAME=package_name,
|
|
||||||
PACKAGE_VERSION_ID=package_data.id,
|
|
||||||
)
|
|
||||||
|
|
||||||
resp = self._client.post(endpoint)
|
|
||||||
if resp.status_code != 204:
|
|
||||||
logger.warning(
|
|
||||||
f"Request to delete {endpoint} returned HTTP {resp.status_code}",
|
|
||||||
)
|
|
23
.github/stale.yml
vendored
@@ -1,23 +0,0 @@
|
|||||||
# Number of days of inactivity before an issue becomes stale
|
|
||||||
daysUntilStale: 30
|
|
||||||
|
|
||||||
# Number of days of inactivity before a stale issue is closed
|
|
||||||
daysUntilClose: 7
|
|
||||||
|
|
||||||
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)
|
|
||||||
onlyLabels: [cant-reproduce]
|
|
||||||
|
|
||||||
# Label to use when marking an issue as stale
|
|
||||||
staleLabel: stale
|
|
||||||
|
|
||||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
|
||||||
markComment: >
|
|
||||||
This issue has been automatically marked as stale because it has not had
|
|
||||||
recent activity. It will be closed if no further activity occurs. Thank you
|
|
||||||
for your contributions.
|
|
||||||
|
|
||||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
|
||||||
closeComment: false
|
|
||||||
|
|
||||||
# See https://github.com/marketplace/stale for more info on the app
|
|
||||||
# and https://github.com/probot/stale for the configuration docs
|
|
329
.github/workflows/ci.yml
vendored
@@ -16,19 +16,25 @@ on:
|
|||||||
env:
|
env:
|
||||||
# This is the version of pipenv all the steps will use
|
# This is the version of pipenv all the steps will use
|
||||||
# If changing this, change Dockerfile
|
# If changing this, change Dockerfile
|
||||||
DEFAULT_PIP_ENV_VERSION: "2023.3.20"
|
DEFAULT_PIP_ENV_VERSION: "2023.10.24"
|
||||||
# This is the default version of Python to use in most steps
|
# This is the default version of Python to use in most steps which aren't specific
|
||||||
# If changing this, change Dockerfile
|
DEFAULT_PYTHON_VERSION: "3.10"
|
||||||
DEFAULT_PYTHON_VERSION: "3.9"
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-commit:
|
pre-commit:
|
||||||
|
# We want to run on external PRs, but not on our own internal PRs as they'll be run
|
||||||
|
# by the push to the branch. Without this if check, checks are duplicated since
|
||||||
|
# internal PRs match both the push and pull_request events.
|
||||||
|
if:
|
||||||
|
github.event_name == 'push' || github.event.pull_request.head.repo.full_name !=
|
||||||
|
github.repository
|
||||||
|
|
||||||
name: Linting Checks
|
name: Linting Checks
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout repository
|
name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
-
|
-
|
||||||
name: Install python
|
name: Install python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
@@ -46,7 +52,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
-
|
-
|
||||||
name: Set up Python
|
name: Set up Python
|
||||||
id: setup-python
|
id: setup-python
|
||||||
@@ -58,7 +64,7 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Install pipenv
|
name: Install pipenv
|
||||||
run: |
|
run: |
|
||||||
pip install --user pipenv==${DEFAULT_PIP_ENV_VERSION}
|
pip install --user pipenv==${{ env.DEFAULT_PIP_ENV_VERSION }}
|
||||||
-
|
-
|
||||||
name: Install dependencies
|
name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
@@ -77,6 +83,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
name: documentation
|
name: documentation
|
||||||
path: site/
|
path: site/
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
documentation-deploy:
|
documentation-deploy:
|
||||||
name: "Deploy Documentation"
|
name: "Deploy Documentation"
|
||||||
@@ -87,7 +94,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
-
|
-
|
||||||
name: Deploy docs
|
name: Deploy docs
|
||||||
uses: mhausenblas/mkdocs-deploy-gh-pages@master
|
uses: mhausenblas/mkdocs-deploy-gh-pages@master
|
||||||
@@ -96,29 +103,21 @@ jobs:
|
|||||||
CUSTOM_DOMAIN: docs.paperless-ngx.com
|
CUSTOM_DOMAIN: docs.paperless-ngx.com
|
||||||
CONFIG_FILE: mkdocs.yml
|
CONFIG_FILE: mkdocs.yml
|
||||||
EXTRA_PACKAGES: build-base
|
EXTRA_PACKAGES: build-base
|
||||||
|
REQUIREMENTS: docs/requirements.txt
|
||||||
|
|
||||||
tests-backend:
|
tests-backend:
|
||||||
name: "Tests (${{ matrix.python-version }})"
|
name: "Backend Tests (Python ${{ matrix.python-version }})"
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
needs:
|
needs:
|
||||||
- pre-commit
|
- pre-commit
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ['3.8', '3.9', '3.10']
|
python-version: ['3.9', '3.10', '3.11']
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
env:
|
|
||||||
# Enable Tika end to end testing
|
|
||||||
TIKA_LIVE: 1
|
|
||||||
# Enable paperless_mail testing against real server
|
|
||||||
PAPERLESS_MAIL_TEST_HOST: ${{ secrets.TEST_MAIL_HOST }}
|
|
||||||
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
|
|
||||||
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
|
|
||||||
# Enable Gotenberg end to end testing
|
|
||||||
GOTENBERG_LIVE: 1
|
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
-
|
-
|
||||||
name: Start containers
|
name: Start containers
|
||||||
run: |
|
run: |
|
||||||
@@ -135,7 +134,7 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Install pipenv
|
name: Install pipenv
|
||||||
run: |
|
run: |
|
||||||
pip install --user pipenv==${DEFAULT_PIP_ENV_VERSION}
|
pip install --user pipenv==${{ env.DEFAULT_PIP_ENV_VERSION }}
|
||||||
-
|
-
|
||||||
name: Install system dependencies
|
name: Install system dependencies
|
||||||
run: |
|
run: |
|
||||||
@@ -156,18 +155,24 @@ jobs:
|
|||||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pip list
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pip list
|
||||||
-
|
-
|
||||||
name: Tests
|
name: Tests
|
||||||
|
env:
|
||||||
|
PAPERLESS_CI_TEST: 1
|
||||||
|
# Enable paperless_mail testing against real server
|
||||||
|
PAPERLESS_MAIL_TEST_HOST: ${{ secrets.TEST_MAIL_HOST }}
|
||||||
|
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
|
||||||
|
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
|
||||||
run: |
|
run: |
|
||||||
cd src/
|
cd src/
|
||||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pytest -ra
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pytest -ra
|
||||||
-
|
-
|
||||||
name: Upload coverage to Codecov
|
name: Upload coverage
|
||||||
if: ${{ matrix.python-version == env.DEFAULT_PYTHON_VERSION }}
|
if: ${{ matrix.python-version == env.DEFAULT_PYTHON_VERSION }}
|
||||||
uses: codecov/codecov-action@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
# not required for public repos, but intermittently fails otherwise
|
name: backend-coverage-report
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
path: src/coverage.xml
|
||||||
# future expansion
|
retention-days: 7
|
||||||
flags: backend
|
if-no-files-found: warn
|
||||||
-
|
-
|
||||||
name: Stop containers
|
name: Stop containers
|
||||||
if: always()
|
if: always()
|
||||||
@@ -175,112 +180,151 @@ jobs:
|
|||||||
docker compose --file ${GITHUB_WORKSPACE}/docker/compose/docker-compose.ci-test.yml logs
|
docker compose --file ${GITHUB_WORKSPACE}/docker/compose/docker-compose.ci-test.yml logs
|
||||||
docker compose --file ${GITHUB_WORKSPACE}/docker/compose/docker-compose.ci-test.yml down
|
docker compose --file ${GITHUB_WORKSPACE}/docker/compose/docker-compose.ci-test.yml down
|
||||||
|
|
||||||
tests-frontend:
|
install-frontend-depedendencies:
|
||||||
name: "Tests Frontend"
|
name: "Install Frontend Dependendencies"
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
needs:
|
needs:
|
||||||
- pre-commit
|
- pre-commit
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
node-version: [16.x]
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
-
|
-
|
||||||
name: Use Node.js ${{ matrix.node-version }}
|
name: Use Node.js 20
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node-version }}
|
node-version: 20.x
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
cache-dependency-path: 'src-ui/package-lock.json'
|
cache-dependency-path: 'src-ui/package-lock.json'
|
||||||
- run: cd src-ui && npm ci
|
- name: Cache frontend depdendencies
|
||||||
- run: cd src-ui && npm run lint
|
id: cache-frontend-deps
|
||||||
- run: cd src-ui && npm run test
|
uses: actions/cache@v3
|
||||||
- run: cd src-ui && npm run e2e:ci
|
with:
|
||||||
|
path: |
|
||||||
|
~/.npm
|
||||||
|
~/.cache
|
||||||
|
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
||||||
|
-
|
||||||
|
name: Install dependencies
|
||||||
|
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
||||||
|
run: cd src-ui && npm ci
|
||||||
|
-
|
||||||
|
name: Install Playwright
|
||||||
|
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
||||||
|
run: cd src-ui && npx playwright install --with-deps
|
||||||
|
|
||||||
prepare-docker-build:
|
tests-frontend:
|
||||||
name: Prepare Docker Pipeline Data
|
name: "Frontend Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||||
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v'))
|
runs-on: ubuntu-22.04
|
||||||
|
needs:
|
||||||
|
- install-frontend-depedendencies
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
node-version: [20.x]
|
||||||
|
shard-index: [1, 2, 3, 4]
|
||||||
|
shard-count: [4]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
-
|
||||||
|
name: Use Node.js 20
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 20.x
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: 'src-ui/package-lock.json'
|
||||||
|
- name: Cache frontend depdendencies
|
||||||
|
id: cache-frontend-deps
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.npm
|
||||||
|
~/.cache
|
||||||
|
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
||||||
|
- name: Re-link Angular cli
|
||||||
|
run: cd src-ui && npm link @angular/cli
|
||||||
|
-
|
||||||
|
name: Linting checks
|
||||||
|
run: cd src-ui && npm run lint
|
||||||
|
-
|
||||||
|
name: Run Jest unit tests
|
||||||
|
run: cd src-ui && npm run test -- --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||||
|
-
|
||||||
|
name: Upload Jest coverage
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: jest-coverage-report-${{ matrix.shard-index }}
|
||||||
|
path: |
|
||||||
|
src-ui/coverage/coverage-final.json
|
||||||
|
src-ui/coverage/lcov.info
|
||||||
|
src-ui/coverage/clover.xml
|
||||||
|
retention-days: 7
|
||||||
|
if-no-files-found: warn
|
||||||
|
-
|
||||||
|
name: Run Playwright e2e tests
|
||||||
|
run: cd src-ui && npx playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||||
|
-
|
||||||
|
name: Upload Playwright test results
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: playwright-report
|
||||||
|
path: src-ui/playwright-report
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
|
tests-coverage-upload:
|
||||||
|
name: "Upload Coverage"
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
needs:
|
needs:
|
||||||
- documentation
|
|
||||||
- tests-backend
|
- tests-backend
|
||||||
- tests-frontend
|
- tests-frontend
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Set ghcr repository name
|
uses: actions/checkout@v4
|
||||||
id: set-ghcr-repository
|
|
||||||
run: |
|
|
||||||
ghcr_name=$(echo "${GITHUB_REPOSITORY}" | awk '{ print tolower($0) }')
|
|
||||||
echo "repository=${ghcr_name}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Download frontend coverage
|
||||||
uses: actions/checkout@v3
|
uses: actions/download-artifact@v3
|
||||||
-
|
|
||||||
name: Set up Python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
path: src-ui/coverage/
|
||||||
-
|
-
|
||||||
name: Setup qpdf image
|
name: Upload frontend coverage to Codecov
|
||||||
id: qpdf-setup
|
uses: codecov/codecov-action@v3
|
||||||
run: |
|
with:
|
||||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py qpdf)
|
# not required for public repos, but intermittently fails otherwise
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
echo ${build_json}
|
flags: frontend
|
||||||
|
directory: src-ui/coverage/
|
||||||
echo "qpdf-json=${build_json}" >> $GITHUB_OUTPUT
|
# dont include backend coverage files here
|
||||||
|
files: '!coverage.xml'
|
||||||
-
|
-
|
||||||
name: Setup psycopg2 image
|
name: Download backend coverage
|
||||||
id: psycopg2-setup
|
uses: actions/download-artifact@v3
|
||||||
run: |
|
with:
|
||||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py psycopg2)
|
name: backend-coverage-report
|
||||||
|
path: src/
|
||||||
echo ${build_json}
|
|
||||||
|
|
||||||
echo "psycopg2-json=${build_json}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
-
|
||||||
name: Setup pikepdf image
|
name: Upload coverage to Codecov
|
||||||
id: pikepdf-setup
|
uses: codecov/codecov-action@v3
|
||||||
run: |
|
with:
|
||||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py pikepdf)
|
# not required for public repos, but intermittently fails otherwise
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
# future expansion
|
||||||
|
flags: backend
|
||||||
|
directory: src/
|
||||||
|
|
||||||
echo ${build_json}
|
|
||||||
|
|
||||||
echo "pikepdf-json=${build_json}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
|
||||||
name: Setup jbig2enc image
|
|
||||||
id: jbig2enc-setup
|
|
||||||
run: |
|
|
||||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py jbig2enc)
|
|
||||||
|
|
||||||
echo ${build_json}
|
|
||||||
|
|
||||||
echo "jbig2enc-json=${build_json}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
|
|
||||||
ghcr-repository: ${{ steps.set-ghcr-repository.outputs.repository }}
|
|
||||||
|
|
||||||
qpdf-json: ${{ steps.qpdf-setup.outputs.qpdf-json }}
|
|
||||||
|
|
||||||
pikepdf-json: ${{ steps.pikepdf-setup.outputs.pikepdf-json }}
|
|
||||||
|
|
||||||
psycopg2-json: ${{ steps.psycopg2-setup.outputs.psycopg2-json }}
|
|
||||||
|
|
||||||
jbig2enc-json: ${{ steps.jbig2enc-setup.outputs.jbig2enc-json}}
|
|
||||||
|
|
||||||
# build and push image to docker hub.
|
|
||||||
build-docker-image:
|
build-docker-image:
|
||||||
|
name: Build Docker image for ${{ github.ref_name }}
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
|
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v'))
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
|
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
needs:
|
needs:
|
||||||
- prepare-docker-build
|
- tests-backend
|
||||||
|
- tests-frontend
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Check pushing to Docker Hub
|
name: Check pushing to Docker Hub
|
||||||
id: docker-hub
|
id: push-other-places
|
||||||
# Only push to Dockerhub from the main repo AND the ref is either:
|
# Only push to Dockerhub from the main repo AND the ref is either:
|
||||||
# main
|
# main
|
||||||
# dev
|
# dev
|
||||||
@@ -288,22 +332,29 @@ jobs:
|
|||||||
# a tag
|
# a tag
|
||||||
# Otherwise forks would require a Docker Hub account and secrets setup
|
# Otherwise forks would require a Docker Hub account and secrets setup
|
||||||
run: |
|
run: |
|
||||||
if [[ ${{ needs.prepare-docker-build.outputs.ghcr-repository }} == "paperless-ngx/paperless-ngx" && ( ${{ github.ref_name }} == "main" || ${{ github.ref_name }} == "dev" || ${{ github.ref_name }} == "beta" || ${{ startsWith(github.ref, 'refs/tags/v') }} == "true" ) ]] ; then
|
if [[ ${{ github.repository_owner }} == "paperless-ngx" && ( ${{ github.ref_name }} == "dev" || ${{ github.ref_name }} == "beta" || ${{ startsWith(github.ref, 'refs/tags/v') }} == "true" ) ]] ; then
|
||||||
echo "Enabling DockerHub image push"
|
echo "Enabling DockerHub image push"
|
||||||
echo "enable=true" >> $GITHUB_OUTPUT
|
echo "enable=true" >> $GITHUB_OUTPUT
|
||||||
else
|
else
|
||||||
echo "Not pushing to DockerHub"
|
echo "Not pushing to DockerHub"
|
||||||
echo "enable=false" >> $GITHUB_OUTPUT
|
echo "enable=false" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
|
-
|
||||||
|
name: Set ghcr repository name
|
||||||
|
id: set-ghcr-repository
|
||||||
|
run: |
|
||||||
|
ghcr_name=$(echo "${{ github.repository }}" | awk '{ print tolower($0) }')
|
||||||
|
echo "Name is ${ghcr_name}"
|
||||||
|
echo "ghcr-repository=${ghcr_name}" >> $GITHUB_OUTPUT
|
||||||
-
|
-
|
||||||
name: Gather Docker metadata
|
name: Gather Docker metadata
|
||||||
id: docker-meta
|
id: docker-meta
|
||||||
uses: docker/metadata-action@v4
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}
|
ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}
|
||||||
name=paperlessngx/paperless-ngx,enable=${{ steps.docker-hub.outputs.enable }}
|
name=paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
|
||||||
name=quay.io/paperlessngx/paperless-ngx,enable=${{ steps.docker-hub.outputs.enable }}
|
name=quay.io/paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
|
||||||
tags: |
|
tags: |
|
||||||
# Tag branches with branch name
|
# Tag branches with branch name
|
||||||
type=ref,event=branch
|
type=ref,event=branch
|
||||||
@@ -313,60 +364,59 @@ jobs:
|
|||||||
type=semver,pattern={{major}}.{{minor}}
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
# If https://github.com/docker/buildx/issues/1044 is resolved,
|
||||||
|
# the append input with a native arm64 arch could be used to
|
||||||
|
# significantly speed up building
|
||||||
-
|
-
|
||||||
name: Set up Docker Buildx
|
name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
-
|
-
|
||||||
name: Set up QEMU
|
name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
with:
|
||||||
|
platforms: arm64
|
||||||
-
|
-
|
||||||
name: Login to Github Container Registry
|
name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
-
|
-
|
||||||
name: Login to Docker Hub
|
name: Login to Docker Hub
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
# Don't attempt to login is not pushing to Docker Hub
|
# Don't attempt to login is not pushing to Docker Hub
|
||||||
if: steps.docker-hub.outputs.enable == 'true'
|
if: steps.push-other-places.outputs.enable == 'true'
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
-
|
-
|
||||||
name: Login to Quay.io
|
name: Login to Quay.io
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
# Don't attempt to login is not pushing to Docker Hub
|
# Don't attempt to login is not pushing to Quay.io
|
||||||
if: steps.docker-hub.outputs.enable == 'true'
|
if: steps.push-other-places.outputs.enable == 'true'
|
||||||
with:
|
with:
|
||||||
registry: quay.io
|
registry: quay.io
|
||||||
username: ${{ secrets.QUAY_USERNAME }}
|
username: ${{ secrets.QUAY_USERNAME }}
|
||||||
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
||||||
-
|
-
|
||||||
name: Build and push
|
name: Build and push
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
push: ${{ github.event_name != 'pull_request' }}
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
tags: ${{ steps.docker-meta.outputs.tags }}
|
tags: ${{ steps.docker-meta.outputs.tags }}
|
||||||
labels: ${{ steps.docker-meta.outputs.labels }}
|
labels: ${{ steps.docker-meta.outputs.labels }}
|
||||||
build-args: |
|
# Get cache layers from this branch, then dev
|
||||||
JBIG2ENC_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.jbig2enc-json).version }}
|
|
||||||
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
|
|
||||||
PIKEPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.pikepdf-json).version }}
|
|
||||||
PSYCOPG2_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.psycopg2-json).version }}
|
|
||||||
# Get cache layers from this branch, then dev, then main
|
|
||||||
# This allows new branches to get at least some cache benefits, generally from dev
|
# This allows new branches to get at least some cache benefits, generally from dev
|
||||||
cache-from: |
|
cache-from: |
|
||||||
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
||||||
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:dev
|
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:dev
|
||||||
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:main
|
|
||||||
cache-to: |
|
cache-to: |
|
||||||
type=registry,mode=max,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
type=registry,mode=max,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
||||||
-
|
-
|
||||||
name: Inspect image
|
name: Inspect image
|
||||||
run: |
|
run: |
|
||||||
@@ -382,15 +432,17 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
name: frontend-compiled
|
name: frontend-compiled
|
||||||
path: src/documents/static/frontend/
|
path: src/documents/static/frontend/
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
build-release:
|
build-release:
|
||||||
|
name: "Build Release"
|
||||||
needs:
|
needs:
|
||||||
- build-docker-image
|
- build-docker-image
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
-
|
-
|
||||||
name: Set up Python
|
name: Set up Python
|
||||||
id: setup-python
|
id: setup-python
|
||||||
@@ -402,11 +454,17 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Install pipenv + tools
|
name: Install pipenv + tools
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade --user pipenv==${DEFAULT_PIP_ENV_VERSION} setuptools wheel
|
pip install --upgrade --user pipenv==${{ env.DEFAULT_PIP_ENV_VERSION }} setuptools wheel
|
||||||
-
|
-
|
||||||
name: Install Python dependencies
|
name: Install Python dependencies
|
||||||
run: |
|
run: |
|
||||||
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
||||||
|
-
|
||||||
|
name: Patch whitenoise
|
||||||
|
run: |
|
||||||
|
curl --fail --silent --show-error --location --output 484.patch https://github.com/evansd/whitenoise/pull/484.patch
|
||||||
|
patch -d $(pipenv --venv)/lib/python3.10/site-packages --verbose -p2 < 484.patch
|
||||||
|
rm 484.patch
|
||||||
-
|
-
|
||||||
name: Install system dependencies
|
name: Install system dependencies
|
||||||
run: |
|
run: |
|
||||||
@@ -490,8 +548,10 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
name: release
|
name: release
|
||||||
path: dist/paperless-ngx.tar.xz
|
path: dist/paperless-ngx.tar.xz
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
publish-release:
|
publish-release:
|
||||||
|
name: "Publish Release"
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
outputs:
|
outputs:
|
||||||
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
||||||
@@ -541,6 +601,7 @@ jobs:
|
|||||||
asset_content_type: application/x-xz
|
asset_content_type: application/x-xz
|
||||||
|
|
||||||
append-changelog:
|
append-changelog:
|
||||||
|
name: "Append Changelog"
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
needs:
|
needs:
|
||||||
- publish-release
|
- publish-release
|
||||||
@@ -548,7 +609,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: main
|
ref: main
|
||||||
-
|
-
|
||||||
@@ -561,7 +622,7 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Install pipenv + tools
|
name: Install pipenv + tools
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade --user pipenv==${DEFAULT_PIP_ENV_VERSION} setuptools wheel
|
pip install --upgrade --user pipenv==${{ env.DEFAULT_PIP_ENV_VERSION }} setuptools wheel
|
||||||
-
|
-
|
||||||
name: Append Changelog to docs
|
name: Append Changelog to docs
|
||||||
id: append-Changelog
|
id: append-Changelog
|
||||||
|
98
.github/workflows/cleanup-tags.yml
vendored
@@ -12,9 +12,6 @@ on:
|
|||||||
push:
|
push:
|
||||||
paths:
|
paths:
|
||||||
- ".github/workflows/cleanup-tags.yml"
|
- ".github/workflows/cleanup-tags.yml"
|
||||||
- ".github/scripts/cleanup-tags.py"
|
|
||||||
- ".github/scripts/github.py"
|
|
||||||
- ".github/scripts/common.py"
|
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: registry-tags-cleanup
|
group: registry-tags-cleanup
|
||||||
@@ -22,62 +19,59 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
cleanup-images:
|
cleanup-images:
|
||||||
name: Cleanup Image Tags for ${{ matrix.primary-name }}
|
name: Cleanup Image Tags for paperless-ngx
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
if: github.repository_owner == 'paperless-ngx'
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- primary-name: "paperless-ngx"
|
|
||||||
cache-name: "paperless-ngx/builder/cache/app"
|
|
||||||
|
|
||||||
- primary-name: "paperless-ngx/builder/qpdf"
|
|
||||||
cache-name: "paperless-ngx/builder/cache/qpdf"
|
|
||||||
|
|
||||||
- primary-name: "paperless-ngx/builder/pikepdf"
|
|
||||||
cache-name: "paperless-ngx/builder/cache/pikepdf"
|
|
||||||
|
|
||||||
- primary-name: "paperless-ngx/builder/jbig2enc"
|
|
||||||
cache-name: "paperless-ngx/builder/cache/jbig2enc"
|
|
||||||
|
|
||||||
- primary-name: "paperless-ngx/builder/psycopg2"
|
|
||||||
cache-name: "paperless-ngx/builder/cache/psycopg2"
|
|
||||||
env:
|
env:
|
||||||
# Requires a personal access token with the OAuth scope delete:packages
|
# Requires a personal access token with the OAuth scope delete:packages
|
||||||
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
|
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Clean temporary images
|
||||||
uses: actions/checkout@v3
|
|
||||||
-
|
|
||||||
name: Login to Github Container Registry
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
-
|
|
||||||
name: Set up Python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: "3.10"
|
|
||||||
-
|
|
||||||
name: Install Python libraries
|
|
||||||
run: |
|
|
||||||
python -m pip install httpx docker
|
|
||||||
#
|
|
||||||
# Clean up primary package
|
|
||||||
#
|
|
||||||
-
|
|
||||||
name: Cleanup for package "${{ matrix.primary-name }}"
|
|
||||||
if: "${{ env.TOKEN != '' }}"
|
if: "${{ env.TOKEN != '' }}"
|
||||||
run: |
|
uses: stumpylog/image-cleaner-action/ephemeral@v0.3.0
|
||||||
python ${GITHUB_WORKSPACE}/.github/scripts/cleanup-tags.py --untagged --is-manifest --delete "${{ matrix.primary-name }}"
|
with:
|
||||||
#
|
token: "${{ env.TOKEN }}"
|
||||||
# Clean up registry cache package
|
owner: "${{ github.repository_owner }}"
|
||||||
#
|
is_org: "true"
|
||||||
|
package_name: "paperless-ngx"
|
||||||
|
scheme: "branch"
|
||||||
|
repo_name: "paperless-ngx"
|
||||||
|
match_regex: "feature-"
|
||||||
|
do_delete: "true"
|
||||||
|
|
||||||
|
cleanup-untagged-images:
|
||||||
|
name: Cleanup Untagged Images Tags for ${{ matrix.primary-name }}
|
||||||
|
if: github.repository_owner == 'paperless-ngx'
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs:
|
||||||
|
- cleanup-images
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- primary-name: "paperless-ngx"
|
||||||
|
- primary-name: "paperless-ngx/builder/cache/app"
|
||||||
|
# TODO: Remove the above and replace with the below
|
||||||
|
# - primary-name: "builder/qpdf"
|
||||||
|
# - primary-name: "builder/cache/qpdf"
|
||||||
|
# - primary-name: "builder/pikepdf"
|
||||||
|
# - primary-name: "builder/cache/pikepdf"
|
||||||
|
# - primary-name: "builder/jbig2enc"
|
||||||
|
# - primary-name: "builder/cache/jbig2enc"
|
||||||
|
# - primary-name: "builder/psycopg2"
|
||||||
|
# - primary-name: "builder/cache/psycopg2"
|
||||||
|
env:
|
||||||
|
# Requires a personal access token with the OAuth scope delete:packages
|
||||||
|
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
|
||||||
|
steps:
|
||||||
-
|
-
|
||||||
name: Cleanup for package "${{ matrix.cache-name }}"
|
name: Clean untagged images
|
||||||
if: "${{ env.TOKEN != '' }}"
|
if: "${{ env.TOKEN != '' }}"
|
||||||
run: |
|
uses: stumpylog/image-cleaner-action/untagged@v0.3.0
|
||||||
python ${GITHUB_WORKSPACE}/.github/scripts/cleanup-tags.py --untagged --delete "${{ matrix.cache-name }}"
|
with:
|
||||||
|
token: "${{ env.TOKEN }}"
|
||||||
|
owner: "${{ github.repository_owner }}"
|
||||||
|
is_org: "true"
|
||||||
|
package_name: "${{ matrix.primary-name }}"
|
||||||
|
do_delete: "true"
|
||||||
|
2
.github/workflows/codeql-analysis.yml
vendored
@@ -38,7 +38,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
|
310
.github/workflows/installer-library.yml
vendored
@@ -1,310 +0,0 @@
|
|||||||
# This workflow will run to update the installer library of
|
|
||||||
# Docker images. These are the images which provide updated wheels
|
|
||||||
# .deb installation packages or maybe just some compiled library
|
|
||||||
|
|
||||||
name: Build Image Library
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
# Must match one of these branches AND one of the paths
|
|
||||||
# to be triggered
|
|
||||||
branches:
|
|
||||||
- "main"
|
|
||||||
- "dev"
|
|
||||||
- "library-*"
|
|
||||||
- "feature-*"
|
|
||||||
paths:
|
|
||||||
# Trigger the workflow if a Dockerfile changed
|
|
||||||
- "docker-builders/**"
|
|
||||||
# Trigger if a package was updated
|
|
||||||
- ".build-config.json"
|
|
||||||
- "Pipfile.lock"
|
|
||||||
# Also trigger on workflow changes related to the library
|
|
||||||
- ".github/workflows/installer-library.yml"
|
|
||||||
- ".github/workflows/reusable-workflow-builder.yml"
|
|
||||||
- ".github/scripts/**"
|
|
||||||
|
|
||||||
# Set a workflow level concurrency group so primary workflow
|
|
||||||
# can wait for this to complete if needed
|
|
||||||
# DO NOT CHANGE without updating main workflow group
|
|
||||||
concurrency:
|
|
||||||
group: build-installer-library
|
|
||||||
cancel-in-progress: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
prepare-docker-build:
|
|
||||||
name: Prepare Docker Image Version Data
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
steps:
|
|
||||||
-
|
|
||||||
name: Set ghcr repository name
|
|
||||||
id: set-ghcr-repository
|
|
||||||
run: |
|
|
||||||
ghcr_name=$(echo "${GITHUB_REPOSITORY}" | awk '{ print tolower($0) }')
|
|
||||||
echo "repository=${ghcr_name}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
|
||||||
name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
-
|
|
||||||
name: Set up Python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: "3.9"
|
|
||||||
-
|
|
||||||
name: Install jq
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install jq
|
|
||||||
-
|
|
||||||
name: Setup qpdf image
|
|
||||||
id: qpdf-setup
|
|
||||||
run: |
|
|
||||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py qpdf)
|
|
||||||
|
|
||||||
echo ${build_json}
|
|
||||||
|
|
||||||
echo "qpdf-json=${build_json}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
|
||||||
name: Setup psycopg2 image
|
|
||||||
id: psycopg2-setup
|
|
||||||
run: |
|
|
||||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py psycopg2)
|
|
||||||
|
|
||||||
echo ${build_json}
|
|
||||||
|
|
||||||
echo "psycopg2-json=${build_json}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
|
||||||
name: Setup pikepdf image
|
|
||||||
id: pikepdf-setup
|
|
||||||
run: |
|
|
||||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py pikepdf)
|
|
||||||
|
|
||||||
echo ${build_json}
|
|
||||||
|
|
||||||
echo "pikepdf-json=${build_json}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
|
||||||
name: Setup jbig2enc image
|
|
||||||
id: jbig2enc-setup
|
|
||||||
run: |
|
|
||||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py jbig2enc)
|
|
||||||
|
|
||||||
echo ${build_json}
|
|
||||||
|
|
||||||
echo "jbig2enc-json=${build_json}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
|
||||||
name: Setup other versions
|
|
||||||
id: cache-bust-setup
|
|
||||||
run: |
|
|
||||||
pillow_version=$(jq -r '.default.pillow.version | gsub("=";"")' Pipfile.lock)
|
|
||||||
lxml_version=$(jq -r '.default.lxml.version | gsub("=";"")' Pipfile.lock)
|
|
||||||
|
|
||||||
echo "Pillow is ${pillow_version}"
|
|
||||||
echo "lxml is ${lxml_version}"
|
|
||||||
|
|
||||||
echo "pillow-version=${pillow_version}" >> $GITHUB_OUTPUT
|
|
||||||
echo "lxml-version=${lxml_version}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
|
|
||||||
ghcr-repository: ${{ steps.set-ghcr-repository.outputs.repository }}
|
|
||||||
|
|
||||||
qpdf-json: ${{ steps.qpdf-setup.outputs.qpdf-json }}
|
|
||||||
|
|
||||||
pikepdf-json: ${{ steps.pikepdf-setup.outputs.pikepdf-json }}
|
|
||||||
|
|
||||||
psycopg2-json: ${{ steps.psycopg2-setup.outputs.psycopg2-json }}
|
|
||||||
|
|
||||||
jbig2enc-json: ${{ steps.jbig2enc-setup.outputs.jbig2enc-json }}
|
|
||||||
|
|
||||||
pillow-version: ${{ steps.cache-bust-setup.outputs.pillow-version }}
|
|
||||||
|
|
||||||
lxml-version: ${{ steps.cache-bust-setup.outputs.lxml-version }}
|
|
||||||
|
|
||||||
build-qpdf-debs:
|
|
||||||
name: qpdf
|
|
||||||
needs:
|
|
||||||
- prepare-docker-build
|
|
||||||
uses: ./.github/workflows/reusable-workflow-builder.yml
|
|
||||||
with:
|
|
||||||
dockerfile: ./docker-builders/Dockerfile.qpdf
|
|
||||||
build-platforms: linux/amd64
|
|
||||||
build-json: ${{ needs.prepare-docker-build.outputs.qpdf-json }}
|
|
||||||
build-args: |
|
|
||||||
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
|
|
||||||
|
|
||||||
build-jbig2enc:
|
|
||||||
name: jbig2enc
|
|
||||||
needs:
|
|
||||||
- prepare-docker-build
|
|
||||||
uses: ./.github/workflows/reusable-workflow-builder.yml
|
|
||||||
with:
|
|
||||||
dockerfile: ./docker-builders/Dockerfile.jbig2enc
|
|
||||||
build-json: ${{ needs.prepare-docker-build.outputs.jbig2enc-json }}
|
|
||||||
build-args: |
|
|
||||||
JBIG2ENC_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.jbig2enc-json).version }}
|
|
||||||
|
|
||||||
build-psycopg2-wheel:
|
|
||||||
name: psycopg2
|
|
||||||
needs:
|
|
||||||
- prepare-docker-build
|
|
||||||
uses: ./.github/workflows/reusable-workflow-builder.yml
|
|
||||||
with:
|
|
||||||
dockerfile: ./docker-builders/Dockerfile.psycopg2
|
|
||||||
build-json: ${{ needs.prepare-docker-build.outputs.psycopg2-json }}
|
|
||||||
build-args: |
|
|
||||||
PSYCOPG2_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.psycopg2-json).version }}
|
|
||||||
|
|
||||||
build-pikepdf-wheel:
|
|
||||||
name: pikepdf
|
|
||||||
needs:
|
|
||||||
- prepare-docker-build
|
|
||||||
- build-qpdf-debs
|
|
||||||
uses: ./.github/workflows/reusable-workflow-builder.yml
|
|
||||||
with:
|
|
||||||
dockerfile: ./docker-builders/Dockerfile.pikepdf
|
|
||||||
build-json: ${{ needs.prepare-docker-build.outputs.pikepdf-json }}
|
|
||||||
build-args: |
|
|
||||||
REPO=${{ needs.prepare-docker-build.outputs.ghcr-repository }}
|
|
||||||
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
|
|
||||||
PIKEPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.pikepdf-json).version }}
|
|
||||||
PILLOW_VERSION=${{ needs.prepare-docker-build.outputs.pillow-version }}
|
|
||||||
LXML_VERSION=${{ needs.prepare-docker-build.outputs.lxml-version }}
|
|
||||||
|
|
||||||
commit-binary-files:
|
|
||||||
name: Store installers
|
|
||||||
needs:
|
|
||||||
- prepare-docker-build
|
|
||||||
- build-qpdf-debs
|
|
||||||
- build-jbig2enc
|
|
||||||
- build-psycopg2-wheel
|
|
||||||
- build-pikepdf-wheel
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
steps:
|
|
||||||
-
|
|
||||||
name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
ref: binary-library
|
|
||||||
-
|
|
||||||
name: Set up Python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: "3.9"
|
|
||||||
-
|
|
||||||
name: Install system dependencies
|
|
||||||
run: |
|
|
||||||
sudo apt-get update -qq
|
|
||||||
sudo apt-get install -qq --no-install-recommends tree
|
|
||||||
-
|
|
||||||
name: Extract qpdf files
|
|
||||||
run: |
|
|
||||||
version=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
|
|
||||||
tag=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).image_tag }}
|
|
||||||
|
|
||||||
docker pull --quiet ${tag}
|
|
||||||
docker create --name qpdf-extract ${tag}
|
|
||||||
|
|
||||||
mkdir --parents qpdf/${version}/amd64
|
|
||||||
docker cp qpdf-extract:/usr/src/qpdf/${version}/amd64 qpdf/${version}
|
|
||||||
|
|
||||||
mkdir --parents qpdf/${version}/arm64
|
|
||||||
docker cp qpdf-extract:/usr/src/qpdf/${version}/arm64 qpdf/${version}
|
|
||||||
|
|
||||||
mkdir --parents qpdf/${version}/armv7
|
|
||||||
docker cp qpdf-extract:/usr/src/qpdf/${version}/armv7 qpdf/${version}
|
|
||||||
-
|
|
||||||
name: Extract psycopg2 files
|
|
||||||
run: |
|
|
||||||
version=${{ fromJSON(needs.prepare-docker-build.outputs.psycopg2-json).version }}
|
|
||||||
tag=${{ fromJSON(needs.prepare-docker-build.outputs.psycopg2-json).image_tag }}
|
|
||||||
|
|
||||||
docker pull --quiet --platform linux/amd64 ${tag}
|
|
||||||
docker create --platform linux/amd64 --name psycopg2-extract ${tag}
|
|
||||||
mkdir --parents psycopg2/${version}/amd64
|
|
||||||
docker cp psycopg2-extract:/usr/src/wheels/ psycopg2/${version}/amd64
|
|
||||||
mv psycopg2/${version}/amd64/wheels/* psycopg2/${version}/amd64
|
|
||||||
rm -r psycopg2/${version}/amd64/wheels/
|
|
||||||
docker rm psycopg2-extract
|
|
||||||
|
|
||||||
docker pull --quiet --platform linux/arm64 ${tag}
|
|
||||||
docker create --platform linux/arm64 --name psycopg2-extract ${tag}
|
|
||||||
mkdir --parents psycopg2/${version}/arm64
|
|
||||||
docker cp psycopg2-extract:/usr/src/wheels/ psycopg2/${version}/arm64
|
|
||||||
mv psycopg2/${version}/arm64/wheels/* psycopg2/${version}/arm64
|
|
||||||
rm -r psycopg2/${version}/arm64/wheels/
|
|
||||||
docker rm psycopg2-extract
|
|
||||||
|
|
||||||
docker pull --quiet --platform linux/arm/v7 ${tag}
|
|
||||||
docker create --platform linux/arm/v7 --name psycopg2-extract ${tag}
|
|
||||||
mkdir --parents psycopg2/${version}/armv7
|
|
||||||
docker cp psycopg2-extract:/usr/src/wheels/ psycopg2/${version}/armv7
|
|
||||||
mv psycopg2/${version}/armv7/wheels/* psycopg2/${version}/armv7
|
|
||||||
rm -r psycopg2/${version}/armv7/wheels/
|
|
||||||
docker rm psycopg2-extract
|
|
||||||
-
|
|
||||||
name: Extract pikepdf files
|
|
||||||
run: |
|
|
||||||
version=${{ fromJSON(needs.prepare-docker-build.outputs.pikepdf-json).version }}
|
|
||||||
tag=${{ fromJSON(needs.prepare-docker-build.outputs.pikepdf-json).image_tag }}
|
|
||||||
|
|
||||||
docker pull --quiet --platform linux/amd64 ${tag}
|
|
||||||
docker create --platform linux/amd64 --name pikepdf-extract ${tag}
|
|
||||||
mkdir --parents pikepdf/${version}/amd64
|
|
||||||
docker cp pikepdf-extract:/usr/src/wheels/ pikepdf/${version}/amd64
|
|
||||||
mv pikepdf/${version}/amd64/wheels/* pikepdf/${version}/amd64
|
|
||||||
rm -r pikepdf/${version}/amd64/wheels/
|
|
||||||
docker rm pikepdf-extract
|
|
||||||
|
|
||||||
docker pull --quiet --platform linux/arm64 ${tag}
|
|
||||||
docker create --platform linux/arm64 --name pikepdf-extract ${tag}
|
|
||||||
mkdir --parents pikepdf/${version}/arm64
|
|
||||||
docker cp pikepdf-extract:/usr/src/wheels/ pikepdf/${version}/arm64
|
|
||||||
mv pikepdf/${version}/arm64/wheels/* pikepdf/${version}/arm64
|
|
||||||
rm -r pikepdf/${version}/arm64/wheels/
|
|
||||||
docker rm pikepdf-extract
|
|
||||||
|
|
||||||
docker pull --quiet --platform linux/arm/v7 ${tag}
|
|
||||||
docker create --platform linux/arm/v7 --name pikepdf-extract ${tag}
|
|
||||||
mkdir --parents pikepdf/${version}/armv7
|
|
||||||
docker cp pikepdf-extract:/usr/src/wheels/ pikepdf/${version}/armv7
|
|
||||||
mv pikepdf/${version}/armv7/wheels/* pikepdf/${version}/armv7
|
|
||||||
rm -r pikepdf/${version}/armv7/wheels/
|
|
||||||
docker rm pikepdf-extract
|
|
||||||
-
|
|
||||||
name: Extract jbig2enc files
|
|
||||||
run: |
|
|
||||||
version=${{ fromJSON(needs.prepare-docker-build.outputs.jbig2enc-json).version }}
|
|
||||||
tag=${{ fromJSON(needs.prepare-docker-build.outputs.jbig2enc-json).image_tag }}
|
|
||||||
|
|
||||||
docker pull --quiet --platform linux/amd64 ${tag}
|
|
||||||
docker create --platform linux/amd64 --name jbig2enc-extract ${tag}
|
|
||||||
mkdir --parents jbig2enc/${version}/amd64
|
|
||||||
docker cp jbig2enc-extract:/usr/src/jbig2enc/build jbig2enc/${version}/amd64/
|
|
||||||
mv jbig2enc/${version}/amd64/build/* jbig2enc/${version}/amd64/
|
|
||||||
docker rm jbig2enc-extract
|
|
||||||
|
|
||||||
docker pull --quiet --platform linux/arm64 ${tag}
|
|
||||||
docker create --platform linux/arm64 --name jbig2enc-extract ${tag}
|
|
||||||
mkdir --parents jbig2enc/${version}/arm64
|
|
||||||
docker cp jbig2enc-extract:/usr/src/jbig2enc/build jbig2enc/${version}/arm64
|
|
||||||
mv jbig2enc/${version}/arm64/build/* jbig2enc/${version}/arm64/
|
|
||||||
docker rm jbig2enc-extract
|
|
||||||
|
|
||||||
docker pull --quiet --platform linux/arm/v7 ${tag}
|
|
||||||
docker create --platform linux/arm/v7 --name jbig2enc-extract ${tag}
|
|
||||||
mkdir --parents jbig2enc/${version}/armv7
|
|
||||||
docker cp jbig2enc-extract:/usr/src/jbig2enc/build jbig2enc/${version}/armv7
|
|
||||||
mv jbig2enc/${version}/armv7/build/* jbig2enc/${version}/armv7/
|
|
||||||
docker rm jbig2enc-extract
|
|
||||||
-
|
|
||||||
name: Show file structure
|
|
||||||
run: |
|
|
||||||
tree .
|
|
||||||
-
|
|
||||||
name: Commit files
|
|
||||||
run: |
|
|
||||||
git config --global user.name "github-actions"
|
|
||||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
|
||||||
git add pikepdf/ qpdf/ psycopg2/ jbig2enc/
|
|
||||||
git commit -m "Updating installer packages" || true
|
|
||||||
git push origin || true
|
|
4
.github/workflows/project-actions.yml
vendored
@@ -28,7 +28,7 @@ jobs:
|
|||||||
if: github.event_name == 'issues' && (github.event.action == 'opened' || github.event.action == 'reopened')
|
if: github.event_name == 'issues' && (github.event.action == 'opened' || github.event.action == 'reopened')
|
||||||
steps:
|
steps:
|
||||||
- name: Add issue to project and set status to ${{ env.todo }}
|
- name: Add issue to project and set status to ${{ env.todo }}
|
||||||
uses: leonsteinhaeuser/project-beta-automations@v2.1.0
|
uses: leonsteinhaeuser/project-beta-automations@v2.2.1
|
||||||
with:
|
with:
|
||||||
gh_token: ${{ secrets.GH_TOKEN }}
|
gh_token: ${{ secrets.GH_TOKEN }}
|
||||||
organization: paperless-ngx
|
organization: paperless-ngx
|
||||||
@@ -44,7 +44,7 @@ jobs:
|
|||||||
if: github.event_name == 'pull_request_target' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request.user.login != 'dependabot'
|
if: github.event_name == 'pull_request_target' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request.user.login != 'dependabot'
|
||||||
steps:
|
steps:
|
||||||
- name: Add PR to project and set status to "Needs Review"
|
- name: Add PR to project and set status to "Needs Review"
|
||||||
uses: leonsteinhaeuser/project-beta-automations@v2.1.0
|
uses: leonsteinhaeuser/project-beta-automations@v2.2.1
|
||||||
with:
|
with:
|
||||||
gh_token: ${{ secrets.GH_TOKEN }}
|
gh_token: ${{ secrets.GH_TOKEN }}
|
||||||
organization: paperless-ngx
|
organization: paperless-ngx
|
||||||
|
70
.github/workflows/repo-maintenance.yml
vendored
@@ -8,6 +8,7 @@ on:
|
|||||||
permissions:
|
permissions:
|
||||||
issues: write
|
issues: write
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
discussions: write
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: lock
|
group: lock
|
||||||
@@ -19,9 +20,9 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v8
|
- uses: actions/stale@v8
|
||||||
with:
|
with:
|
||||||
days-before-stale: 30
|
days-before-stale: 7
|
||||||
days-before-close: 7
|
days-before-close: 14
|
||||||
only-labels: 'cant-reproduce'
|
any-of-labels: 'cant-reproduce,not a bug'
|
||||||
stale-issue-label: stale
|
stale-issue-label: stale
|
||||||
stale-pr-label: stale
|
stale-pr-label: stale
|
||||||
stale-issue-message: >
|
stale-issue-message: >
|
||||||
@@ -32,10 +33,11 @@ jobs:
|
|||||||
name: 'Lock Old Threads'
|
name: 'Lock Old Threads'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: dessant/lock-threads@v4
|
- uses: dessant/lock-threads@v5
|
||||||
with:
|
with:
|
||||||
issue-inactive-days: '30'
|
issue-inactive-days: '30'
|
||||||
pr-inactive-days: '30'
|
pr-inactive-days: '30'
|
||||||
|
discussion-inactive-days: '30'
|
||||||
log-output: true
|
log-output: true
|
||||||
issue-comment: >
|
issue-comment: >
|
||||||
This issue has been automatically locked since there
|
This issue has been automatically locked since there
|
||||||
@@ -45,3 +47,63 @@ jobs:
|
|||||||
This pull request has been automatically locked since there
|
This pull request has been automatically locked since there
|
||||||
has not been any recent activity after it was closed.
|
has not been any recent activity after it was closed.
|
||||||
Please open a new discussion or issue for related concerns.
|
Please open a new discussion or issue for related concerns.
|
||||||
|
discussion-comment: >
|
||||||
|
This discussion has been automatically locked since there
|
||||||
|
has not been any recent activity after it was closed.
|
||||||
|
Please open a new discussion for related concerns.
|
||||||
|
close-answered-discussions:
|
||||||
|
name: 'Close Answered Discussions'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
function sleep(ms) {
|
||||||
|
return new Promise(resolve => setTimeout(resolve, ms));
|
||||||
|
}
|
||||||
|
|
||||||
|
const query = `query($owner:String!, $name:String!) {
|
||||||
|
repository(owner:$owner, name:$name){
|
||||||
|
discussions(first:100, answered:true, states:[OPEN]) {
|
||||||
|
nodes {
|
||||||
|
id,
|
||||||
|
number
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`;
|
||||||
|
const variables = {
|
||||||
|
owner: context.repo.owner,
|
||||||
|
name: context.repo.repo,
|
||||||
|
}
|
||||||
|
const result = await github.graphql(query, variables)
|
||||||
|
|
||||||
|
console.log(`Found ${result.repository.discussions.nodes.length} open answered discussions`)
|
||||||
|
|
||||||
|
for (const discussion of result.repository.discussions.nodes) {
|
||||||
|
console.log(`Closing dicussion #${discussion.number} (${discussion.id})`)
|
||||||
|
|
||||||
|
const addCommentMutation = `mutation($discussion:ID!, $body:String!) {
|
||||||
|
addDiscussionComment(input:{discussionId:$discussion, body:$body}) {
|
||||||
|
clientMutationId
|
||||||
|
}
|
||||||
|
}`;
|
||||||
|
const commentVariables = {
|
||||||
|
discussion: discussion.id,
|
||||||
|
body: 'This discussion has been automatically closed because it was marked as answered.',
|
||||||
|
}
|
||||||
|
await github.graphql(addCommentMutation, commentVariables)
|
||||||
|
|
||||||
|
const closeDiscussionMutation = `mutation($discussion:ID!, $reason:DiscussionCloseReason!) {
|
||||||
|
closeDiscussion(input:{discussionId:$discussion, reason:$reason}) {
|
||||||
|
clientMutationId
|
||||||
|
}
|
||||||
|
}`;
|
||||||
|
const closeVariables = {
|
||||||
|
discussion: discussion.id,
|
||||||
|
reason: "RESOLVED",
|
||||||
|
}
|
||||||
|
await github.graphql(closeDiscussionMutation, closeVariables)
|
||||||
|
|
||||||
|
await sleep(1000)
|
||||||
|
}
|
||||||
|
57
.github/workflows/reusable-workflow-builder.yml
vendored
@@ -1,57 +0,0 @@
|
|||||||
name: Reusable Image Builder
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
dockerfile:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
build-json:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
build-args:
|
|
||||||
required: false
|
|
||||||
default: ""
|
|
||||||
type: string
|
|
||||||
build-platforms:
|
|
||||||
required: false
|
|
||||||
default: linux/amd64,linux/arm64,linux/arm/v7
|
|
||||||
type: string
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ fromJSON(inputs.build-json).name }}-${{ fromJSON(inputs.build-json).version }}
|
|
||||||
cancel-in-progress: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-image:
|
|
||||||
name: Build ${{ fromJSON(inputs.build-json).name }} @ ${{ fromJSON(inputs.build-json).version }}
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
steps:
|
|
||||||
-
|
|
||||||
name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
-
|
|
||||||
name: Login to Github Container Registry
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
-
|
|
||||||
name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
-
|
|
||||||
name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v2
|
|
||||||
-
|
|
||||||
name: Build ${{ fromJSON(inputs.build-json).name }}
|
|
||||||
uses: docker/build-push-action@v4
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ${{ inputs.dockerfile }}
|
|
||||||
tags: ${{ fromJSON(inputs.build-json).image_tag }}
|
|
||||||
platforms: ${{ inputs.build-platforms }}
|
|
||||||
build-args: ${{ inputs.build-args }}
|
|
||||||
push: true
|
|
||||||
cache-from: type=registry,ref=${{ fromJSON(inputs.build-json).cache_tag }}
|
|
||||||
cache-to: type=registry,mode=max,ref=${{ fromJSON(inputs.build-json).cache_tag }}
|
|
@@ -5,7 +5,7 @@
|
|||||||
repos:
|
repos:
|
||||||
# General hooks
|
# General hooks
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.4.0
|
rev: v4.5.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-docstring-first
|
- id: check-docstring-first
|
||||||
- id: check-json
|
- id: check-json
|
||||||
@@ -27,7 +27,7 @@ repos:
|
|||||||
- id: check-case-conflict
|
- id: check-case-conflict
|
||||||
- id: detect-private-key
|
- id: detect-private-key
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
rev: "v2.7.1"
|
rev: 'v3.1.0'
|
||||||
hooks:
|
hooks:
|
||||||
- id: prettier
|
- id: prettier
|
||||||
types_or:
|
types_or:
|
||||||
@@ -36,17 +36,17 @@ repos:
|
|||||||
- markdown
|
- markdown
|
||||||
exclude: "(^Pipfile\\.lock$)"
|
exclude: "(^Pipfile\\.lock$)"
|
||||||
# Python hooks
|
# Python hooks
|
||||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: 'v0.0.263'
|
rev: 'v0.1.5'
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||||
rev: 23.3.0
|
rev: 23.11.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
# Dockerfile hooks
|
# Dockerfile hooks
|
||||||
- repo: https://github.com/AleksaC/hadolint-py
|
- repo: https://github.com/AleksaC/hadolint-py
|
||||||
rev: v2.12.0.2
|
rev: v2.12.0.3
|
||||||
hooks:
|
hooks:
|
||||||
- id: hadolint
|
- id: hadolint
|
||||||
# Shell script hooks
|
# Shell script hooks
|
||||||
@@ -57,6 +57,6 @@ repos:
|
|||||||
args:
|
args:
|
||||||
- "--tab"
|
- "--tab"
|
||||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||||
rev: "v0.9.0.2"
|
rev: "v0.9.0.6"
|
||||||
hooks:
|
hooks:
|
||||||
- id: shellcheck
|
- id: shellcheck
|
||||||
|
20
.prettierrc
@@ -1,4 +1,16 @@
|
|||||||
# https://prettier.io/docs/en/options.html#semicolons
|
{
|
||||||
semi: false
|
# https://prettier.io/docs/en/options.html#semicolons
|
||||||
# https://prettier.io/docs/en/options.html#quotes
|
"semi": false,
|
||||||
singleQuote: true
|
# https://prettier.io/docs/en/options.html#quotes
|
||||||
|
"singleQuote": true,
|
||||||
|
# https://prettier.io/docs/en/options.html#trailing-commas
|
||||||
|
"trailingComma": "es5",
|
||||||
|
"overrides": [
|
||||||
|
{
|
||||||
|
"files": ["index.md", "administration.md"],
|
||||||
|
"options": {
|
||||||
|
"tabWidth": 4
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
@@ -1 +1 @@
|
|||||||
3.8.16
|
3.9.18
|
||||||
|
@@ -2,13 +2,13 @@
|
|||||||
# https://beta.ruff.rs/docs/rules/
|
# https://beta.ruff.rs/docs/rules/
|
||||||
extend-select = ["I", "W", "UP", "COM", "DJ", "EXE", "ISC", "ICN", "G201", "INP", "PIE", "RSE", "SIM", "TID", "PLC", "PLE", "RUF"]
|
extend-select = ["I", "W", "UP", "COM", "DJ", "EXE", "ISC", "ICN", "G201", "INP", "PIE", "RSE", "SIM", "TID", "PLC", "PLE", "RUF"]
|
||||||
# TODO PTH
|
# TODO PTH
|
||||||
ignore = ["DJ001", "SIM105"]
|
ignore = ["DJ001", "SIM105", "RUF012"]
|
||||||
fix = true
|
fix = true
|
||||||
line-length = 88
|
line-length = 88
|
||||||
respect-gitignore = true
|
respect-gitignore = true
|
||||||
src = ["src"]
|
src = ["src"]
|
||||||
target-version = "py38"
|
target-version = "py39"
|
||||||
format = "grouped"
|
output-format = "grouped"
|
||||||
show-fixes = true
|
show-fixes = true
|
||||||
|
|
||||||
[per-file-ignores]
|
[per-file-ignores]
|
||||||
|
@@ -11,7 +11,7 @@ If you want to implement something big:
|
|||||||
|
|
||||||
## Python
|
## Python
|
||||||
|
|
||||||
Paperless supports python 3.8 and 3.9. We format Python code with [Black](https://github.com/psf/black).
|
Paperless supports python 3.9 - 3.11. We format Python code with [Black](https://github.com/psf/black).
|
||||||
|
|
||||||
## Branches
|
## Branches
|
||||||
|
|
||||||
@@ -45,7 +45,7 @@ Examples of `non-trivial` PRs might include:
|
|||||||
|
|
||||||
- Additional features
|
- Additional features
|
||||||
- Large changes to many distinct files
|
- Large changes to many distinct files
|
||||||
- Breaking or depreciation of existing features
|
- Breaking or deprecation of existing features
|
||||||
|
|
||||||
Our community review process for `non-trivial` PRs is the following:
|
Our community review process for `non-trivial` PRs is the following:
|
||||||
|
|
||||||
@@ -58,6 +58,13 @@ Our community review process for `non-trivial` PRs is the following:
|
|||||||
|
|
||||||
This process might be slow as community members have different schedules and time to dedicate to the Paperless project. However it ensures community code reviews are as brilliantly thorough as they once were with @jonaswinkler.
|
This process might be slow as community members have different schedules and time to dedicate to the Paperless project. However it ensures community code reviews are as brilliantly thorough as they once were with @jonaswinkler.
|
||||||
|
|
||||||
|
# AI-Generated Code
|
||||||
|
|
||||||
|
This project does not specifically prohibit the use of AI-generated code _during the process_ of creating a PR, however:
|
||||||
|
|
||||||
|
1. Any code present in the final PR that was generated using AI sources should be clearly attributed as such and must not violate copyright protections.
|
||||||
|
2. We will not accept PRs that are entirely or mostly AI-derived.
|
||||||
|
|
||||||
# Translating Paperless-ngx
|
# Translating Paperless-ngx
|
||||||
|
|
||||||
Some notes about translation:
|
Some notes about translation:
|
||||||
|
159
Dockerfile
@@ -1,11 +1,11 @@
|
|||||||
# syntax=docker/dockerfile:1.4
|
# syntax=docker/dockerfile:1
|
||||||
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/reference.md
|
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/reference.md
|
||||||
|
|
||||||
# Stage: compile-frontend
|
# Stage: compile-frontend
|
||||||
# Purpose: Compiles the frontend
|
# Purpose: Compiles the frontend
|
||||||
# Notes:
|
# Notes:
|
||||||
# - Does NPM stuff with Typescript and such
|
# - Does NPM stuff with Typescript and such
|
||||||
FROM --platform=$BUILDPLATFORM node:16-bullseye-slim AS compile-frontend
|
FROM --platform=$BUILDPLATFORM docker.io/node:20-bookworm-slim AS compile-frontend
|
||||||
|
|
||||||
COPY ./src-ui /src/src-ui
|
COPY ./src-ui /src/src-ui
|
||||||
|
|
||||||
@@ -21,7 +21,7 @@ RUN set -eux \
|
|||||||
# Comments:
|
# Comments:
|
||||||
# - pipenv dependencies are not left in the final image
|
# - pipenv dependencies are not left in the final image
|
||||||
# - pipenv can't touch the final image somehow
|
# - pipenv can't touch the final image somehow
|
||||||
FROM --platform=$BUILDPLATFORM python:3.9-slim-bullseye as pipenv-base
|
FROM --platform=$BUILDPLATFORM docker.io/python:3.11-alpine as pipenv-base
|
||||||
|
|
||||||
WORKDIR /usr/src/pipenv
|
WORKDIR /usr/src/pipenv
|
||||||
|
|
||||||
@@ -29,7 +29,7 @@ COPY Pipfile* ./
|
|||||||
|
|
||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
&& echo "Installing pipenv" \
|
&& echo "Installing pipenv" \
|
||||||
&& python3 -m pip install --no-cache-dir --upgrade pipenv==2023.3.20 \
|
&& python3 -m pip install --no-cache-dir --upgrade pipenv==2023.10.24 \
|
||||||
&& echo "Generating requirement.txt" \
|
&& echo "Generating requirement.txt" \
|
||||||
&& pipenv requirements > requirements.txt
|
&& pipenv requirements > requirements.txt
|
||||||
|
|
||||||
@@ -37,7 +37,7 @@ RUN set -eux \
|
|||||||
# Purpose: The final image
|
# Purpose: The final image
|
||||||
# Comments:
|
# Comments:
|
||||||
# - Don't leave anything extra in here
|
# - Don't leave anything extra in here
|
||||||
FROM python:3.9-slim-bullseye as main-app
|
FROM docker.io/python:3.11-slim-bookworm as main-app
|
||||||
|
|
||||||
LABEL org.opencontainers.image.authors="paperless-ngx team <hello@paperless-ngx.com>"
|
LABEL org.opencontainers.image.authors="paperless-ngx team <hello@paperless-ngx.com>"
|
||||||
LABEL org.opencontainers.image.documentation="https://docs.paperless-ngx.com/"
|
LABEL org.opencontainers.image.documentation="https://docs.paperless-ngx.com/"
|
||||||
@@ -47,6 +47,14 @@ LABEL org.opencontainers.image.licenses="GPL-3.0-only"
|
|||||||
|
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
# Buildx provided, must be defined to use though
|
||||||
|
ARG TARGETARCH
|
||||||
|
|
||||||
|
# Can be workflow provided, defaults set for manual building
|
||||||
|
ARG JBIG2ENC_VERSION=0.29
|
||||||
|
ARG QPDF_VERSION=11.6.3
|
||||||
|
ARG GS_VERSION=10.02.0
|
||||||
|
|
||||||
#
|
#
|
||||||
# Begin installation and configuration
|
# Begin installation and configuration
|
||||||
# Order the steps below from least often changed to most
|
# Order the steps below from least often changed to most
|
||||||
@@ -67,23 +75,11 @@ ARG RUNTIME_PACKAGES="\
|
|||||||
gnupg \
|
gnupg \
|
||||||
icc-profiles-free \
|
icc-profiles-free \
|
||||||
imagemagick \
|
imagemagick \
|
||||||
# Image processing
|
|
||||||
liblept5 \
|
|
||||||
liblcms2-2 \
|
|
||||||
libtiff5 \
|
|
||||||
libfreetype6 \
|
|
||||||
libwebp6 \
|
|
||||||
libopenjp2-7 \
|
|
||||||
libimagequant0 \
|
|
||||||
libraqm0 \
|
|
||||||
libjpeg62-turbo \
|
|
||||||
# PostgreSQL
|
# PostgreSQL
|
||||||
libpq5 \
|
libpq5 \
|
||||||
postgresql-client \
|
postgresql-client \
|
||||||
# MySQL / MariaDB
|
# MySQL / MariaDB
|
||||||
mariadb-client \
|
mariadb-client \
|
||||||
# For Numpy
|
|
||||||
libatlas3-base \
|
|
||||||
# OCRmyPDF dependencies
|
# OCRmyPDF dependencies
|
||||||
tesseract-ocr \
|
tesseract-ocr \
|
||||||
tesseract-ocr-eng \
|
tesseract-ocr-eng \
|
||||||
@@ -93,11 +89,12 @@ ARG RUNTIME_PACKAGES="\
|
|||||||
tesseract-ocr-spa \
|
tesseract-ocr-spa \
|
||||||
unpaper \
|
unpaper \
|
||||||
pngquant \
|
pngquant \
|
||||||
# pikepdf / qpdf
|
|
||||||
jbig2dec \
|
jbig2dec \
|
||||||
|
# lxml
|
||||||
libxml2 \
|
libxml2 \
|
||||||
libxslt1.1 \
|
libxslt1.1 \
|
||||||
libgnutls30 \
|
# itself
|
||||||
|
qpdf \
|
||||||
# Mime type detection
|
# Mime type detection
|
||||||
file \
|
file \
|
||||||
libmagic1 \
|
libmagic1 \
|
||||||
@@ -105,9 +102,7 @@ ARG RUNTIME_PACKAGES="\
|
|||||||
zlib1g \
|
zlib1g \
|
||||||
# Barcode splitter
|
# Barcode splitter
|
||||||
libzbar0 \
|
libzbar0 \
|
||||||
poppler-utils \
|
poppler-utils"
|
||||||
# RapidFuzz on armv7
|
|
||||||
libatomic1"
|
|
||||||
|
|
||||||
# Install basic runtime packages.
|
# Install basic runtime packages.
|
||||||
# These change very infrequently
|
# These change very infrequently
|
||||||
@@ -115,7 +110,37 @@ RUN set -eux \
|
|||||||
echo "Installing system packages" \
|
echo "Installing system packages" \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES} \
|
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES} \
|
||||||
&& rm -rf /var/lib/apt/lists/* \
|
&& echo "Installing pre-built updates" \
|
||||||
|
&& echo "Installing qpdf ${QPDF_VERSION}" \
|
||||||
|
&& curl --fail --silent --show-error --location \
|
||||||
|
--output libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||||
|
&& curl --fail --silent --show-error --location \
|
||||||
|
--output qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/qpdf-${QPDF_VERSION}/qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||||
|
&& dpkg --install ./libqpdf29_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||||
|
&& dpkg --install ./qpdf_${QPDF_VERSION}-1_${TARGETARCH}.deb \
|
||||||
|
&& echo "Installing Ghostscript ${GS_VERSION}" \
|
||||||
|
&& curl --fail --silent --show-error --location \
|
||||||
|
--output libgs10_${GS_VERSION}.dfsg-2_${TARGETARCH}.deb \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10_${GS_VERSION}.dfsg-2_${TARGETARCH}.deb \
|
||||||
|
&& curl --fail --silent --show-error --location \
|
||||||
|
--output ghostscript_${GS_VERSION}.dfsg-2_${TARGETARCH}.deb \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/ghostscript_${GS_VERSION}.dfsg-2_${TARGETARCH}.deb \
|
||||||
|
&& curl --fail --silent --show-error --location \
|
||||||
|
--output libgs10-common_${GS_VERSION}.dfsg-2_all.deb \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/ghostscript-${GS_VERSION}/libgs10-common_${GS_VERSION}.dfsg-2_all.deb \
|
||||||
|
&& dpkg --install ./libgs10-common_${GS_VERSION}.dfsg-2_all.deb \
|
||||||
|
&& dpkg --install ./libgs10_${GS_VERSION}.dfsg-2_${TARGETARCH}.deb \
|
||||||
|
&& dpkg --install ./ghostscript_${GS_VERSION}.dfsg-2_${TARGETARCH}.deb \
|
||||||
|
&& echo "Installing jbig2enc" \
|
||||||
|
&& curl --fail --silent --show-error --location \
|
||||||
|
--output jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
||||||
|
https://github.com/paperless-ngx/builder/releases/download/jbig2enc-${JBIG2ENC_VERSION}/jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
||||||
|
&& dpkg --install ./jbig2enc_${JBIG2ENC_VERSION}-1_${TARGETARCH}.deb \
|
||||||
|
&& echo "Cleaning up image layer" \
|
||||||
|
&& rm --force --verbose *.deb \
|
||||||
|
&& rm --recursive --force --verbose /var/lib/apt/lists/* \
|
||||||
&& echo "Installing supervisor" \
|
&& echo "Installing supervisor" \
|
||||||
&& python3 -m pip install --default-timeout=1000 --upgrade --no-cache-dir supervisor==4.2.5
|
&& python3 -m pip install --default-timeout=1000 --upgrade --no-cache-dir supervisor==4.2.5
|
||||||
|
|
||||||
@@ -166,43 +191,6 @@ RUN set -eux \
|
|||||||
&& chmod +x install_management_commands.sh \
|
&& chmod +x install_management_commands.sh \
|
||||||
&& ./install_management_commands.sh
|
&& ./install_management_commands.sh
|
||||||
|
|
||||||
# Buildx provided, must be defined to use though
|
|
||||||
ARG TARGETARCH
|
|
||||||
ARG TARGETVARIANT
|
|
||||||
|
|
||||||
# Workflow provided, defaults set for manual building
|
|
||||||
ARG JBIG2ENC_VERSION=0.29
|
|
||||||
ARG QPDF_VERSION=11.3.0
|
|
||||||
ARG PIKEPDF_VERSION=7.1.1
|
|
||||||
ARG PSYCOPG2_VERSION=2.9.5
|
|
||||||
|
|
||||||
# Install the built packages from the installer library images
|
|
||||||
# These change sometimes
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Getting binaries" \
|
|
||||||
&& mkdir paperless-ngx \
|
|
||||||
&& curl --fail --silent --show-error --output paperless-ngx.tar.gz --location https://github.com/paperless-ngx/paperless-ngx/archive/ba28a1e16c27d121b644b4f6bdb78855a2850561.tar.gz \
|
|
||||||
&& tar -xf paperless-ngx.tar.gz --directory paperless-ngx --strip-components=1 \
|
|
||||||
&& cd paperless-ngx \
|
|
||||||
# Setting a specific revision ensures we know what this installed
|
|
||||||
# and ensures cache breaking on changes
|
|
||||||
&& echo "Installing jbig2enc" \
|
|
||||||
&& cp ./jbig2enc/${JBIG2ENC_VERSION}/${TARGETARCH}${TARGETVARIANT}/jbig2 /usr/local/bin/ \
|
|
||||||
&& cp ./jbig2enc/${JBIG2ENC_VERSION}/${TARGETARCH}${TARGETVARIANT}/libjbig2enc* /usr/local/lib/ \
|
|
||||||
&& echo "Installing qpdf" \
|
|
||||||
&& apt-get install --yes --no-install-recommends ./qpdf/${QPDF_VERSION}/${TARGETARCH}${TARGETVARIANT}/libqpdf29_*.deb \
|
|
||||||
&& apt-get install --yes --no-install-recommends ./qpdf/${QPDF_VERSION}/${TARGETARCH}${TARGETVARIANT}/qpdf_*.deb \
|
|
||||||
&& echo "Installing pikepdf and dependencies" \
|
|
||||||
&& python3 -m pip install --no-cache-dir ./pikepdf/${PIKEPDF_VERSION}/${TARGETARCH}${TARGETVARIANT}/*.whl \
|
|
||||||
&& python3 -m pip list \
|
|
||||||
&& echo "Installing psycopg2" \
|
|
||||||
&& python3 -m pip install --no-cache-dir ./psycopg2/${PSYCOPG2_VERSION}/${TARGETARCH}${TARGETVARIANT}/psycopg2*.whl \
|
|
||||||
&& python3 -m pip list \
|
|
||||||
&& echo "Cleaning up image layer" \
|
|
||||||
&& cd ../ \
|
|
||||||
&& rm -rf paperless-ngx \
|
|
||||||
&& rm paperless-ngx.tar.gz
|
|
||||||
|
|
||||||
WORKDIR /usr/src/paperless/src/
|
WORKDIR /usr/src/paperless/src/
|
||||||
|
|
||||||
# Python dependencies
|
# Python dependencies
|
||||||
@@ -214,44 +202,61 @@ COPY --from=pipenv-base /usr/src/pipenv/requirements.txt ./
|
|||||||
ARG BUILD_PACKAGES="\
|
ARG BUILD_PACKAGES="\
|
||||||
build-essential \
|
build-essential \
|
||||||
git \
|
git \
|
||||||
|
# https://www.psycopg.org/docs/install.html#prerequisites
|
||||||
|
libpq-dev \
|
||||||
|
# https://github.com/PyMySQL/mysqlclient#linux
|
||||||
default-libmysqlclient-dev \
|
default-libmysqlclient-dev \
|
||||||
python3-dev"
|
pkg-config"
|
||||||
|
|
||||||
RUN set -eux \
|
# hadolint ignore=DL3042
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/pip/,id=pip-cache \
|
||||||
|
set -eux \
|
||||||
&& echo "Installing build system packages" \
|
&& echo "Installing build system packages" \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||||
&& python3 -m pip install --no-cache-dir --upgrade wheel \
|
&& python3 -m pip install --no-cache-dir --upgrade wheel \
|
||||||
&& echo "Installing Python requirements" \
|
&& echo "Installing Python requirements" \
|
||||||
&& python3 -m pip install --default-timeout=1000 --no-cache-dir --requirement requirements.txt \
|
&& python3 -m pip install --default-timeout=1000 --requirement requirements.txt \
|
||||||
|
&& echo "Patching whitenoise for compression speedup" \
|
||||||
|
&& curl --fail --silent --show-error --location --output 484.patch https://github.com/evansd/whitenoise/pull/484.patch \
|
||||||
|
&& patch -d /usr/local/lib/python3.11/site-packages --verbose -p2 < 484.patch \
|
||||||
|
&& rm 484.patch \
|
||||||
&& echo "Installing NLTK data" \
|
&& echo "Installing NLTK data" \
|
||||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" snowball_data \
|
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" snowball_data \
|
||||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" stopwords \
|
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" stopwords \
|
||||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" punkt \
|
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" punkt \
|
||||||
&& echo "Cleaning up image" \
|
&& echo "Cleaning up image" \
|
||||||
&& apt-get -y purge ${BUILD_PACKAGES} \
|
&& apt-get --yes purge ${BUILD_PACKAGES} \
|
||||||
&& apt-get -y autoremove --purge \
|
&& apt-get --yes autoremove --purge \
|
||||||
&& apt-get clean --yes \
|
&& apt-get clean --yes \
|
||||||
&& rm -rf /var/lib/apt/lists/* \
|
&& rm --recursive --force --verbose /var/lib/apt/lists/* \
|
||||||
&& rm -rf /tmp/* \
|
&& rm --recursive --force --verbose /tmp/* \
|
||||||
&& rm -rf /var/tmp/* \
|
&& rm --recursive --force --verbose /var/tmp/* \
|
||||||
&& rm -rf /var/cache/apt/archives/* \
|
&& rm --recursive --force --verbose /var/cache/apt/archives/* \
|
||||||
&& truncate -s 0 /var/log/*log
|
&& truncate --size 0 /var/log/*log
|
||||||
|
|
||||||
# copy backend
|
# copy backend
|
||||||
COPY ./src ./
|
COPY --chown=1000:1000 ./src ./
|
||||||
|
|
||||||
# copy frontend
|
# copy frontend
|
||||||
COPY --from=compile-frontend /src/src/documents/static/frontend/ ./documents/static/frontend/
|
COPY --from=compile-frontend --chown=1000:1000 /src/src/documents/static/frontend/ ./documents/static/frontend/
|
||||||
|
|
||||||
# add users, setup scripts
|
# add users, setup scripts
|
||||||
# Mount the compiled frontend to expected location
|
# Mount the compiled frontend to expected location
|
||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
&& addgroup --gid 1000 paperless \
|
&& echo "Setting up user/group" \
|
||||||
&& useradd --uid 1000 --gid paperless --home-dir /usr/src/paperless paperless \
|
&& addgroup --gid 1000 paperless \
|
||||||
&& chown -R paperless:paperless /usr/src/paperless \
|
&& useradd --uid 1000 --gid paperless --home-dir /usr/src/paperless paperless \
|
||||||
&& gosu paperless python3 manage.py collectstatic --clear --no-input --link \
|
&& echo "Creating volume directories" \
|
||||||
&& gosu paperless python3 manage.py compilemessages
|
&& mkdir --parents --verbose /usr/src/paperless/data \
|
||||||
|
&& mkdir --parents --verbose /usr/src/paperless/media \
|
||||||
|
&& mkdir --parents --verbose /usr/src/paperless/consume \
|
||||||
|
&& mkdir --parents --verbose /usr/src/paperless/export \
|
||||||
|
&& echo "Adjusting all permissions" \
|
||||||
|
&& chown --from root:root --changes --recursive paperless:paperless /usr/src/paperless \
|
||||||
|
&& echo "Collecting static files" \
|
||||||
|
&& gosu paperless python3 manage.py collectstatic --clear --no-input --link \
|
||||||
|
&& gosu paperless python3 manage.py compilemessages
|
||||||
|
|
||||||
VOLUME ["/usr/src/paperless/data", \
|
VOLUME ["/usr/src/paperless/data", \
|
||||||
"/usr/src/paperless/media", \
|
"/usr/src/paperless/media", \
|
||||||
|
94
Pipfile
@@ -3,82 +3,78 @@ url = "https://pypi.python.org/simple"
|
|||||||
verify_ssl = true
|
verify_ssl = true
|
||||||
name = "pypi"
|
name = "pypi"
|
||||||
|
|
||||||
[[source]]
|
|
||||||
url = "https://www.piwheels.org/simple"
|
|
||||||
verify_ssl = true
|
|
||||||
name = "piwheels"
|
|
||||||
|
|
||||||
[packages]
|
[packages]
|
||||||
dateparser = "~=1.1"
|
dateparser = "~=1.1"
|
||||||
django = "~=4.1"
|
# WARNING: django does not use semver.
|
||||||
django-cors-headers = "*"
|
# Only patch versions are guaranteed to not introduce breaking changes.
|
||||||
|
django = "~=4.2.7"
|
||||||
|
django-auditlog = "*"
|
||||||
django-celery-results = "*"
|
django-celery-results = "*"
|
||||||
django-compression-middleware = "*"
|
django-compression-middleware = "*"
|
||||||
django-guardian = "*"
|
django-cors-headers = "*"
|
||||||
django-extensions = "*"
|
django-extensions = "*"
|
||||||
django-filter = "~=22.1"
|
django-filter = "~=23.3"
|
||||||
|
django-guardian = "*"
|
||||||
|
django-multiselectfield = "*"
|
||||||
djangorestframework = "~=3.14"
|
djangorestframework = "~=3.14"
|
||||||
djangorestframework-guardian = "*"
|
djangorestframework-guardian = "*"
|
||||||
django-ipware = "*"
|
drf-writable-nested = "*"
|
||||||
|
bleach = "*"
|
||||||
|
celery = {extras = ["redis"], version = "*"}
|
||||||
|
channels = "~=4.0"
|
||||||
|
channels-redis = "*"
|
||||||
|
concurrent-log-handler = "*"
|
||||||
filelock = "*"
|
filelock = "*"
|
||||||
|
flower = "*"
|
||||||
|
gotenberg-client = "*"
|
||||||
gunicorn = "*"
|
gunicorn = "*"
|
||||||
imap-tools = "*"
|
imap-tools = "*"
|
||||||
|
inotifyrecursive = "~=0.3"
|
||||||
langdetect = "*"
|
langdetect = "*"
|
||||||
|
mysqlclient = "*"
|
||||||
|
nltk = "*"
|
||||||
|
ocrmypdf = "~=15.0"
|
||||||
pathvalidate = "*"
|
pathvalidate = "*"
|
||||||
pillow = "~=9.4"
|
pdf2image = "*"
|
||||||
pikepdf = "*"
|
|
||||||
python-gnupg = "*"
|
|
||||||
python-dotenv = "*"
|
|
||||||
python-dateutil = "*"
|
|
||||||
python-magic = "*"
|
|
||||||
psycopg2 = "*"
|
psycopg2 = "*"
|
||||||
|
python-dateutil = "*"
|
||||||
|
python-dotenv = "*"
|
||||||
|
python-gnupg = "*"
|
||||||
|
python-ipware = "*"
|
||||||
|
python-magic = "*"
|
||||||
|
pyzbar = "*"
|
||||||
rapidfuzz = "*"
|
rapidfuzz = "*"
|
||||||
redis = {extras = ["hiredis"], version = "*"}
|
redis = {extras = ["hiredis"], version = "*"}
|
||||||
scikit-learn = "~=1.2"
|
scikit-learn = "~=1.3"
|
||||||
numpy = "*"
|
|
||||||
whitenoise = "~=6.3"
|
|
||||||
watchdog = "~=2.2"
|
|
||||||
whoosh="~=2.7"
|
|
||||||
inotifyrecursive = "~=0.3"
|
|
||||||
ocrmypdf = "~=14.0"
|
|
||||||
tqdm = "*"
|
|
||||||
tika = "*"
|
|
||||||
# TODO: This will sadly also install daphne+dependencies,
|
|
||||||
# which an ASGI server we don't need. Adds about 15MB image size.
|
|
||||||
channels = "~=3.0"
|
|
||||||
channels-redis = "*"
|
|
||||||
uvicorn = {extras = ["standard"], version = "*"}
|
|
||||||
concurrent-log-handler = "*"
|
|
||||||
"pdfminer.six" = "*"
|
|
||||||
pyzbar = "*"
|
|
||||||
mysqlclient = "*"
|
|
||||||
celery = {extras = ["redis"], version = "*"}
|
|
||||||
setproctitle = "*"
|
setproctitle = "*"
|
||||||
nltk = "*"
|
tika-client = "*"
|
||||||
pdf2image = "*"
|
tqdm = "*"
|
||||||
flower = "*"
|
uvicorn = {extras = ["standard"], version = "*"}
|
||||||
bleach = "*"
|
watchdog = "~=3.0"
|
||||||
|
whitenoise = "~=6.6"
|
||||||
|
whoosh="~=2.7"
|
||||||
zxing-cpp = {version = "*", platform_machine = "== 'x86_64'"}
|
zxing-cpp = {version = "*", platform_machine = "== 'x86_64'"}
|
||||||
#
|
|
||||||
# Packages locked due to issues (try to check if these are fixed in a release every so often)
|
|
||||||
#
|
|
||||||
# Pin this until piwheels is building 1.9 (see https://www.piwheels.org/project/scipy/)
|
|
||||||
scipy = "==1.8.1"
|
|
||||||
|
|
||||||
[dev-packages]
|
[dev-packages]
|
||||||
coveralls = "*"
|
# Linting
|
||||||
|
black = "*"
|
||||||
|
pre-commit = "*"
|
||||||
|
ruff = "*"
|
||||||
|
# Testing
|
||||||
factory-boy = "*"
|
factory-boy = "*"
|
||||||
pytest = "*"
|
pytest = "*"
|
||||||
pytest-cov = "*"
|
pytest-cov = "*"
|
||||||
pytest-django = "*"
|
pytest-django = "*"
|
||||||
|
pytest-httpx = "*"
|
||||||
pytest-env = "*"
|
pytest-env = "*"
|
||||||
pytest-sugar = "*"
|
pytest-sugar = "*"
|
||||||
pytest-xdist = "*"
|
pytest-xdist = "*"
|
||||||
black = "*"
|
pytest-rerunfailures = "*"
|
||||||
pre-commit = "*"
|
|
||||||
imagehash = "*"
|
imagehash = "*"
|
||||||
|
daphne = "*"
|
||||||
|
# Documentation
|
||||||
mkdocs-material = "*"
|
mkdocs-material = "*"
|
||||||
ruff = "*"
|
mkdocs-glightbox = "*"
|
||||||
|
|
||||||
[typing-dev]
|
[typing-dev]
|
||||||
mypy = "*"
|
mypy = "*"
|
||||||
@@ -90,12 +86,10 @@ celery-types = "*"
|
|||||||
django-stubs = {extras= ["compatible-mypy"], version="*"}
|
django-stubs = {extras= ["compatible-mypy"], version="*"}
|
||||||
types-dateparser = "*"
|
types-dateparser = "*"
|
||||||
types-bleach = "*"
|
types-bleach = "*"
|
||||||
types-humanfriendly = "*"
|
|
||||||
types-redis = "*"
|
types-redis = "*"
|
||||||
types-tqdm = "*"
|
types-tqdm = "*"
|
||||||
types-Markdown = "*"
|
types-Markdown = "*"
|
||||||
types-Pygments = "*"
|
types-Pygments = "*"
|
||||||
types-backports = "*"
|
|
||||||
types-colorama = "*"
|
types-colorama = "*"
|
||||||
types-psycopg2 = "*"
|
types-psycopg2 = "*"
|
||||||
types-setuptools = "*"
|
types-setuptools = "*"
|
||||||
|
5659
Pipfile.lock
generated
55
README.md
@@ -6,8 +6,11 @@
|
|||||||
[](https://demo.paperless-ngx.com)
|
[](https://demo.paperless-ngx.com)
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<img src="https://github.com/paperless-ngx/paperless-ngx/raw/main/resources/logo/web/png/Black%20logo%20-%20no%20background.png#gh-light-mode-only" width="50%" />
|
<picture>
|
||||||
<img src="https://github.com/paperless-ngx/paperless-ngx/raw/main/resources/logo/web/png/White%20logo%20-%20no%20background.png#gh-dark-mode-only" width="50%" />
|
<source media="(prefers-color-scheme: dark)" srcset="https://github.com/paperless-ngx/paperless-ngx/blob/main/resources/logo/web/png/White%20logo%20-%20no%20background.png" width="50%">
|
||||||
|
<source media="(prefers-color-scheme: light)" srcset="https://github.com/paperless-ngx/paperless-ngx/raw/main/resources/logo/web/png/Black%20logo%20-%20no%20background.png" width="50%">
|
||||||
|
<img src="https://github.com/paperless-ngx/paperless-ngx/raw/main/resources/logo/web/png/Black%20logo%20-%20no%20background.png" width="50%">
|
||||||
|
</picture>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<!-- omit in toc -->
|
<!-- omit in toc -->
|
||||||
@@ -16,8 +19,7 @@
|
|||||||
|
|
||||||
Paperless-ngx is a document management system that transforms your physical documents into a searchable online archive so you can keep, well, _less paper_.
|
Paperless-ngx is a document management system that transforms your physical documents into a searchable online archive so you can keep, well, _less paper_.
|
||||||
|
|
||||||
Paperless-ngx forked from [paperless-ng](https://github.com/jonaswinkler/paperless-ng) to continue the great work and distribute responsibility of supporting and advancing the project among a team of people. [Consider joining us!](#community-support) Discussion of this transition can be found in issues
|
Paperless-ngx is the official successor to the original [Paperless](https://github.com/the-paperless-project/paperless) & [Paperless-ng](https://github.com/jonaswinkler/paperless-ng) projects and is designed to distribute the responsibility of advancing and supporting the project among a team of people. [Consider joining us!](#community-support)
|
||||||
[#1599](https://github.com/jonaswinkler/paperless-ng/issues/1599) and [#1632](https://github.com/jonaswinkler/paperless-ng/issues/1632).
|
|
||||||
|
|
||||||
A demo is available at [demo.paperless-ngx.com](https://demo.paperless-ngx.com) using login `demo` / `demo`. _Note: demo content is reset frequently and confidential information should not be uploaded._
|
A demo is available at [demo.paperless-ngx.com](https://demo.paperless-ngx.com) using login `demo` / `demo`. _Note: demo content is reset frequently and confidential information should not be uploaded._
|
||||||
|
|
||||||
@@ -33,37 +35,19 @@ A demo is available at [demo.paperless-ngx.com](https://demo.paperless-ngx.com)
|
|||||||
|
|
||||||
# Features
|
# Features
|
||||||
|
|
||||||

|
<picture>
|
||||||

|
<source media="(prefers-color-scheme: dark)" srcset="https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docs/assets/screenshots/documents-smallcards-dark.png">
|
||||||
|
<source media="(prefers-color-scheme: light)" srcset="https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docs/assets/screenshots/documents-smallcards.png">
|
||||||
|
<img src="https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docs/assets/screenshots/documents-smallcards.png">
|
||||||
|
</picture>
|
||||||
|
|
||||||
- Organize and index your scanned documents with tags, correspondents, types, and more.
|
A full list of [features](https://docs.paperless-ngx.com/#features) and [screenshots](https://docs.paperless-ngx.com/#screenshots) are available in the [documentation](https://docs.paperless-ngx.com/).
|
||||||
- Performs OCR on your documents, adds selectable text to image only documents and adds tags, correspondents and document types to your documents.
|
|
||||||
- Supports PDF documents, images, plain text files, and Office documents (Word, Excel, Powerpoint, and LibreOffice equivalents).
|
|
||||||
- Office document support is optional and provided by Apache Tika (see [configuration](https://docs.paperless-ngx.com/configuration/#tika))
|
|
||||||
- Paperless stores your documents plain on disk. Filenames and folders are managed by paperless and their format can be configured freely.
|
|
||||||
- Single page application front end.
|
|
||||||
- Includes a dashboard that shows basic statistics and has document upload.
|
|
||||||
- Filtering by tags, correspondents, types, and more.
|
|
||||||
- Customizable views can be saved and displayed on the dashboard.
|
|
||||||
- Full text search helps you find what you need.
|
|
||||||
- Auto completion suggests relevant words from your documents.
|
|
||||||
- Results are sorted by relevance to your search query.
|
|
||||||
- Highlighting shows you which parts of the document matched the query.
|
|
||||||
- Searching for similar documents ("More like this")
|
|
||||||
- Email processing: Paperless adds documents from your email accounts.
|
|
||||||
- Configure multiple accounts and filters for each account.
|
|
||||||
- When adding documents from mail, paperless can move these mail to a new folder, mark them as read, flag them as important or delete them.
|
|
||||||
- Machine learning powered document matching.
|
|
||||||
- Paperless-ngx learns from your documents and will be able to automatically assign tags, correspondents and types to documents once you've stored a few documents in paperless.
|
|
||||||
- Optimized for multi core systems: Paperless-ngx consumes multiple documents in parallel.
|
|
||||||
- The integrated sanity checker makes sure that your document archive is in good health.
|
|
||||||
- [More screenshots are available in the documentation](https://docs.paperless-ngx.com/#screenshots).
|
|
||||||
|
|
||||||
# Getting started
|
# Getting started
|
||||||
|
|
||||||
The easiest way to deploy paperless is docker-compose. The files in the [`/docker/compose` directory](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose) are configured to pull the image from Github Packages.
|
The easiest way to deploy paperless is `docker compose`. The files in the [`/docker/compose` directory](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose) are configured to pull the image from GitHub Packages.
|
||||||
|
|
||||||
If you'd like to jump right in, you can configure a docker-compose environment with our install script:
|
If you'd like to jump right in, you can configure a `docker compose` environment with our install script:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
||||||
@@ -85,7 +69,7 @@ If you feel like contributing to the project, please do! Bug fixes, enhancements
|
|||||||
|
|
||||||
## Community Support
|
## Community Support
|
||||||
|
|
||||||
People interested in continuing the work on paperless-ngx are encouraged to reach out here on github and in the [Matrix Room](https://matrix.to/#/#paperless:adnidor.de). If you would like to contribute to the project on an ongoing basis there are multiple [teams](https://github.com/orgs/paperless-ngx/people) (frontend, ci/cd, etc) that could use your help so please reach out!
|
People interested in continuing the work on paperless-ngx are encouraged to reach out here on github and in the [Matrix Room](https://matrix.to/#/#paperless:matrix.org). If you would like to contribute to the project on an ongoing basis there are multiple [teams](https://github.com/orgs/paperless-ngx/people) (frontend, ci/cd, etc) that could use your help so please reach out!
|
||||||
|
|
||||||
## Translation
|
## Translation
|
||||||
|
|
||||||
@@ -101,14 +85,7 @@ For bugs please [open an issue](https://github.com/paperless-ngx/paperless-ngx/i
|
|||||||
|
|
||||||
# Affiliated Projects
|
# Affiliated Projects
|
||||||
|
|
||||||
Paperless has been around for a while now, and people have built tools that interact with it. If you're one of them, please reach out and we can add your project to the list. Current projects include:
|
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Affiliated-Projects) for a user-maintained list of affiliated projects and software that is compatible with Paperless-ngx.
|
||||||
|
|
||||||
- **Mobile**
|
|
||||||
- [Paperless App](https://github.com/bauerj/paperless_app): An Android/iOS application for Paperless-ngx.
|
|
||||||
- [Paperless Mobile](https://github.com/astubenbord/paperless-mobile): A modern, feature rich Android app for Paperless-ngx.
|
|
||||||
- [Paperless Share](https://github.com/qcasey/paperless_share): Share any files from your Android application with Paperless-ngx. Very simple, but works with all mobile scanning apps that allow you to share scanned documents.
|
|
||||||
- **Desktop**
|
|
||||||
- [Scan to Paperless](https://github.com/sbrunner/scan-to-paperless): Scan and prepare (crop, deskew, OCR, ...) your documents for use in Paperless-ngx.
|
|
||||||
|
|
||||||
# Important Note
|
# Important Note
|
||||||
|
|
||||||
|
@@ -1,81 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# Helper script for building the Docker image locally.
|
|
||||||
# Parses and provides the nessecary versions of other images to Docker
|
|
||||||
# before passing in the rest of script args.
|
|
||||||
|
|
||||||
# First Argument: The Dockerfile to build
|
|
||||||
# Other Arguments: Additional arguments to docker build
|
|
||||||
|
|
||||||
# Example Usage:
|
|
||||||
# ./build-docker-image.sh Dockerfile -t paperless-ngx:my-awesome-feature
|
|
||||||
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
if ! command -v jq &> /dev/null ; then
|
|
||||||
echo "jq required"
|
|
||||||
exit 1
|
|
||||||
elif [ ! -f "$1" ]; then
|
|
||||||
echo "$1 is not a file, please provide the Dockerfile"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Get the branch name (used for caching)
|
|
||||||
branch_name=$(git rev-parse --abbrev-ref HEAD)
|
|
||||||
|
|
||||||
# Parse eithe Pipfile.lock or the .build-config.json
|
|
||||||
jbig2enc_version=$(jq -r '.jbig2enc.version' .build-config.json)
|
|
||||||
qpdf_version=$(jq -r '.qpdf.version' .build-config.json)
|
|
||||||
psycopg2_version=$(jq -r '.default.psycopg2.version | gsub("=";"")' Pipfile.lock)
|
|
||||||
pikepdf_version=$(jq -r '.default.pikepdf.version | gsub("=";"")' Pipfile.lock)
|
|
||||||
pillow_version=$(jq -r '.default.pillow.version | gsub("=";"")' Pipfile.lock)
|
|
||||||
lxml_version=$(jq -r '.default.lxml.version | gsub("=";"")' Pipfile.lock)
|
|
||||||
|
|
||||||
base_filename="$(basename -- "${1}")"
|
|
||||||
build_args_str=""
|
|
||||||
cache_from_str=""
|
|
||||||
|
|
||||||
case "${base_filename}" in
|
|
||||||
|
|
||||||
*.jbig2enc)
|
|
||||||
build_args_str="--build-arg JBIG2ENC_VERSION=${jbig2enc_version}"
|
|
||||||
cache_from_str="--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/jbig2enc:${jbig2enc_version}"
|
|
||||||
;;
|
|
||||||
|
|
||||||
*.psycopg2)
|
|
||||||
build_args_str="--build-arg PSYCOPG2_VERSION=${psycopg2_version}"
|
|
||||||
cache_from_str="--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/psycopg2:${psycopg2_version}"
|
|
||||||
;;
|
|
||||||
|
|
||||||
*.qpdf)
|
|
||||||
build_args_str="--build-arg QPDF_VERSION=${qpdf_version}"
|
|
||||||
cache_from_str="--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/qpdf:${qpdf_version}"
|
|
||||||
;;
|
|
||||||
|
|
||||||
*.pikepdf)
|
|
||||||
build_args_str="--build-arg QPDF_VERSION=${qpdf_version} --build-arg PIKEPDF_VERSION=${pikepdf_version} --build-arg PILLOW_VERSION=${pillow_version} --build-arg LXML_VERSION=${lxml_version}"
|
|
||||||
cache_from_str="--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/pikepdf:${pikepdf_version}"
|
|
||||||
;;
|
|
||||||
|
|
||||||
Dockerfile)
|
|
||||||
build_args_str="--build-arg QPDF_VERSION=${qpdf_version} --build-arg PIKEPDF_VERSION=${pikepdf_version} --build-arg PSYCOPG2_VERSION=${psycopg2_version} --build-arg JBIG2ENC_VERSION=${jbig2enc_version}"
|
|
||||||
cache_from_str="--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/app:${branch_name} --cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/app:dev"
|
|
||||||
;;
|
|
||||||
|
|
||||||
*)
|
|
||||||
echo "Unable to match ${base_filename}"
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
read -r -a build_args_arr <<< "${build_args_str}"
|
|
||||||
read -r -a cache_from_arr <<< "${cache_from_str}"
|
|
||||||
|
|
||||||
set -eux
|
|
||||||
|
|
||||||
docker buildx build --file "${1}" \
|
|
||||||
--progress=plain \
|
|
||||||
--output=type=docker \
|
|
||||||
"${cache_from_arr[@]}" \
|
|
||||||
"${build_args_arr[@]}" \
|
|
||||||
"${@:2}" .
|
|
@@ -1,4 +1,8 @@
|
|||||||
commit_message: '[ci skip]'
|
commit_message: '[ci skip]'
|
||||||
|
pull_request_labels: [
|
||||||
|
"skip-changelog",
|
||||||
|
"translation"
|
||||||
|
]
|
||||||
files:
|
files:
|
||||||
- source: /src/locale/en_US/LC_MESSAGES/django.po
|
- source: /src/locale/en_US/LC_MESSAGES/django.po
|
||||||
translation: /src/locale/%locale_with_underscore%/LC_MESSAGES/django.po
|
translation: /src/locale/%locale_with_underscore%/LC_MESSAGES/django.po
|
||||||
|
@@ -1,48 +0,0 @@
|
|||||||
# This Dockerfile compiles the jbig2enc library
|
|
||||||
# Inputs:
|
|
||||||
# - JBIG2ENC_VERSION - the Git tag to checkout and build
|
|
||||||
|
|
||||||
FROM debian:bullseye-slim as main
|
|
||||||
|
|
||||||
LABEL org.opencontainers.image.description="A intermediate image with jbig2enc built"
|
|
||||||
|
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
|
||||||
ARG JBIG2ENC_VERSION
|
|
||||||
|
|
||||||
ARG BUILD_PACKAGES="\
|
|
||||||
build-essential \
|
|
||||||
automake \
|
|
||||||
libtool \
|
|
||||||
libleptonica-dev \
|
|
||||||
zlib1g-dev \
|
|
||||||
git \
|
|
||||||
ca-certificates"
|
|
||||||
|
|
||||||
WORKDIR /usr/src/jbig2enc
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Installing build tools" \
|
|
||||||
&& apt-get update --quiet \
|
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
|
||||||
&& echo "Building jbig2enc" \
|
|
||||||
&& git clone --quiet --branch $JBIG2ENC_VERSION https://github.com/agl/jbig2enc . \
|
|
||||||
&& ./autogen.sh \
|
|
||||||
&& ./configure \
|
|
||||||
&& make \
|
|
||||||
&& echo "Gathering package data" \
|
|
||||||
&& dpkg-query -f '${Package;-40}${Version}\n' -W > ./pkg-list.txt \
|
|
||||||
&& echo "Cleaning up image" \
|
|
||||||
&& apt-get -y purge ${BUILD_PACKAGES} \
|
|
||||||
&& apt-get -y autoremove --purge \
|
|
||||||
&& rm -rf /var/lib/apt/lists/* \
|
|
||||||
&& echo "Moving files around" \
|
|
||||||
&& mkdir build \
|
|
||||||
# Unlink a symlink that causes problems
|
|
||||||
&& unlink ./src/.libs/libjbig2enc.la \
|
|
||||||
# Move what the link pointed to
|
|
||||||
&& mv ./src/libjbig2enc.la ./build/ \
|
|
||||||
# Move the shared library .so files
|
|
||||||
&& mv ./src/.libs/libjbig2enc* ./build/ \
|
|
||||||
# And move the cli binary
|
|
||||||
&& mv ./src/jbig2 ./build/ \
|
|
||||||
&& mv ./pkg-list.txt ./build/
|
|
@@ -1,118 +0,0 @@
|
|||||||
# This Dockerfile builds the pikepdf wheel
|
|
||||||
# Inputs:
|
|
||||||
# - REPO - Docker repository to pull qpdf from
|
|
||||||
# - QPDF_VERSION - The image qpdf version to copy .deb files from
|
|
||||||
# - PIKEPDF_VERSION - Version of pikepdf to build wheel for
|
|
||||||
|
|
||||||
# Default to pulling from the main repo registry when manually building
|
|
||||||
ARG REPO="paperless-ngx/paperless-ngx"
|
|
||||||
|
|
||||||
# This does nothing, except provide a name for a copy below
|
|
||||||
ARG QPDF_VERSION
|
|
||||||
FROM --platform=$BUILDPLATFORM ghcr.io/${REPO}/builder/qpdf:${QPDF_VERSION} as qpdf-builder
|
|
||||||
|
|
||||||
#
|
|
||||||
# Stage: builder
|
|
||||||
# Purpose:
|
|
||||||
# - Build the pikepdf wheel
|
|
||||||
# - Build any dependent wheels which can't be found
|
|
||||||
#
|
|
||||||
FROM python:3.9-slim-bullseye as builder
|
|
||||||
|
|
||||||
LABEL org.opencontainers.image.description="A intermediate image with pikepdf wheel built"
|
|
||||||
|
|
||||||
# Buildx provided
|
|
||||||
ARG TARGETARCH
|
|
||||||
ARG TARGETVARIANT
|
|
||||||
|
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
|
||||||
# Workflow provided
|
|
||||||
ARG QPDF_VERSION
|
|
||||||
ARG PIKEPDF_VERSION
|
|
||||||
# These are not used, but will still bust the cache if one changes
|
|
||||||
# Otherwise, the main image will try to build thing (and fail)
|
|
||||||
ARG PILLOW_VERSION
|
|
||||||
ARG LXML_VERSION
|
|
||||||
|
|
||||||
ARG BUILD_PACKAGES="\
|
|
||||||
build-essential \
|
|
||||||
python3-dev \
|
|
||||||
python3-pip \
|
|
||||||
# qpdf requirement - https://github.com/qpdf/qpdf#crypto-providers
|
|
||||||
libgnutls28-dev \
|
|
||||||
# lxml requrements - https://lxml.de/installation.html
|
|
||||||
libxml2-dev \
|
|
||||||
libxslt1-dev \
|
|
||||||
# Pillow requirements - https://pillow.readthedocs.io/en/stable/installation.html#external-libraries
|
|
||||||
# JPEG functionality
|
|
||||||
libjpeg62-turbo-dev \
|
|
||||||
# conpressed PNG
|
|
||||||
zlib1g-dev \
|
|
||||||
# compressed TIFF
|
|
||||||
libtiff-dev \
|
|
||||||
# type related services
|
|
||||||
libfreetype-dev \
|
|
||||||
# color management
|
|
||||||
liblcms2-dev \
|
|
||||||
# WebP format
|
|
||||||
libwebp-dev \
|
|
||||||
# JPEG 2000
|
|
||||||
libopenjp2-7-dev \
|
|
||||||
# improved color quantization
|
|
||||||
libimagequant-dev \
|
|
||||||
# complex text layout support
|
|
||||||
libraqm-dev"
|
|
||||||
|
|
||||||
WORKDIR /usr/src
|
|
||||||
|
|
||||||
COPY --from=qpdf-builder /usr/src/qpdf/${QPDF_VERSION}/${TARGETARCH}${TARGETVARIANT}/*.deb ./
|
|
||||||
|
|
||||||
# As this is an base image for a multi-stage final image
|
|
||||||
# the added size of the install is basically irrelevant
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Installing build tools" \
|
|
||||||
&& apt-get update --quiet \
|
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
|
||||||
&& echo "Installing qpdf" \
|
|
||||||
&& dpkg --install libqpdf29_*.deb \
|
|
||||||
&& dpkg --install libqpdf-dev_*.deb \
|
|
||||||
&& echo "Installing Python tools" \
|
|
||||||
&& python3 -m pip install --no-cache-dir --upgrade \
|
|
||||||
pip \
|
|
||||||
wheel \
|
|
||||||
# https://pikepdf.readthedocs.io/en/latest/installation.html#requirements
|
|
||||||
pybind11 \
|
|
||||||
&& echo "Building pikepdf wheel ${PIKEPDF_VERSION}" \
|
|
||||||
&& mkdir wheels \
|
|
||||||
&& python3 -m pip wheel \
|
|
||||||
# Build the package at the required version
|
|
||||||
pikepdf==${PIKEPDF_VERSION} \
|
|
||||||
# Look to piwheels for additional pre-built wheels
|
|
||||||
--extra-index-url https://www.piwheels.org/simple \
|
|
||||||
# Output the *.whl into this directory
|
|
||||||
--wheel-dir wheels \
|
|
||||||
# Do not use a binary packge for the package being built
|
|
||||||
--no-binary=pikepdf \
|
|
||||||
# Do use binary packages for dependencies
|
|
||||||
--prefer-binary \
|
|
||||||
# Don't cache build files
|
|
||||||
--no-cache-dir \
|
|
||||||
&& ls -ahl wheels \
|
|
||||||
&& echo "Gathering package data" \
|
|
||||||
&& dpkg-query -f '${Package;-40}${Version}\n' -W > ./wheels/pkg-list.txt \
|
|
||||||
&& echo "Cleaning up image" \
|
|
||||||
&& apt-get -y purge ${BUILD_PACKAGES} \
|
|
||||||
&& apt-get -y autoremove --purge \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
#
|
|
||||||
# Stage: package
|
|
||||||
# Purpose: Holds the compiled .whl files in a tiny image to pull
|
|
||||||
#
|
|
||||||
FROM alpine:3.17 as package
|
|
||||||
|
|
||||||
WORKDIR /usr/src/wheels/
|
|
||||||
|
|
||||||
COPY --from=builder /usr/src/wheels/*.whl ./
|
|
||||||
COPY --from=builder /usr/src/wheels/pkg-list.txt ./
|
|
@@ -1,66 +0,0 @@
|
|||||||
# This Dockerfile builds the psycopg2 wheel
|
|
||||||
# Inputs:
|
|
||||||
# - PSYCOPG2_VERSION - Version to build
|
|
||||||
|
|
||||||
#
|
|
||||||
# Stage: builder
|
|
||||||
# Purpose:
|
|
||||||
# - Build the psycopg2 wheel
|
|
||||||
#
|
|
||||||
FROM python:3.9-slim-bullseye as builder
|
|
||||||
|
|
||||||
LABEL org.opencontainers.image.description="A intermediate image with psycopg2 wheel built"
|
|
||||||
|
|
||||||
ARG PSYCOPG2_VERSION
|
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
|
||||||
|
|
||||||
ARG BUILD_PACKAGES="\
|
|
||||||
build-essential \
|
|
||||||
python3-dev \
|
|
||||||
python3-pip \
|
|
||||||
# https://www.psycopg.org/docs/install.html#prerequisites
|
|
||||||
libpq-dev"
|
|
||||||
|
|
||||||
WORKDIR /usr/src
|
|
||||||
|
|
||||||
# As this is an base image for a multi-stage final image
|
|
||||||
# the added size of the install is basically irrelevant
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Installing build tools" \
|
|
||||||
&& apt-get update --quiet \
|
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
|
||||||
&& echo "Installing Python tools" \
|
|
||||||
&& python3 -m pip install --no-cache-dir --upgrade pip wheel \
|
|
||||||
&& echo "Building psycopg2 wheel ${PSYCOPG2_VERSION}" \
|
|
||||||
&& cd /usr/src \
|
|
||||||
&& mkdir wheels \
|
|
||||||
&& python3 -m pip wheel \
|
|
||||||
# Build the package at the required version
|
|
||||||
psycopg2==${PSYCOPG2_VERSION} \
|
|
||||||
# Output the *.whl into this directory
|
|
||||||
--wheel-dir wheels \
|
|
||||||
# Do not use a binary packge for the package being built
|
|
||||||
--no-binary=psycopg2 \
|
|
||||||
# Do use binary packages for dependencies
|
|
||||||
--prefer-binary \
|
|
||||||
# Don't cache build files
|
|
||||||
--no-cache-dir \
|
|
||||||
&& ls -ahl wheels/ \
|
|
||||||
&& echo "Gathering package data" \
|
|
||||||
&& dpkg-query -f '${Package;-40}${Version}\n' -W > ./wheels/pkg-list.txt \
|
|
||||||
&& echo "Cleaning up image" \
|
|
||||||
&& apt-get -y purge ${BUILD_PACKAGES} \
|
|
||||||
&& apt-get -y autoremove --purge \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
#
|
|
||||||
# Stage: package
|
|
||||||
# Purpose: Holds the compiled .whl files in a tiny image to pull
|
|
||||||
#
|
|
||||||
FROM alpine:3.17 as package
|
|
||||||
|
|
||||||
WORKDIR /usr/src/wheels/
|
|
||||||
|
|
||||||
COPY --from=builder /usr/src/wheels/*.whl ./
|
|
||||||
COPY --from=builder /usr/src/wheels/pkg-list.txt ./
|
|
@@ -1,156 +0,0 @@
|
|||||||
#
|
|
||||||
# Stage: pre-build
|
|
||||||
# Purpose:
|
|
||||||
# - Installs common packages
|
|
||||||
# - Sets common environment variables related to dpkg
|
|
||||||
# - Aquires the qpdf source from bookwork
|
|
||||||
# Useful Links:
|
|
||||||
# - https://qpdf.readthedocs.io/en/stable/installation.html#system-requirements
|
|
||||||
# - https://wiki.debian.org/Multiarch/HOWTO
|
|
||||||
# - https://wiki.debian.org/CrossCompiling
|
|
||||||
#
|
|
||||||
|
|
||||||
FROM debian:bullseye-slim as pre-build
|
|
||||||
|
|
||||||
ARG QPDF_VERSION
|
|
||||||
|
|
||||||
ARG COMMON_BUILD_PACKAGES="\
|
|
||||||
cmake \
|
|
||||||
debhelper\
|
|
||||||
debian-keyring \
|
|
||||||
devscripts \
|
|
||||||
dpkg-dev \
|
|
||||||
equivs \
|
|
||||||
packaging-dev \
|
|
||||||
libtool"
|
|
||||||
|
|
||||||
ENV DEB_BUILD_OPTIONS="terse nocheck nodoc parallel=2"
|
|
||||||
|
|
||||||
WORKDIR /usr/src
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Installing common packages" \
|
|
||||||
&& apt-get update --quiet \
|
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${COMMON_BUILD_PACKAGES} \
|
|
||||||
&& echo "Getting qpdf source" \
|
|
||||||
&& echo "deb-src http://deb.debian.org/debian/ bookworm main" > /etc/apt/sources.list.d/bookworm-src.list \
|
|
||||||
&& apt-get update --quiet \
|
|
||||||
&& apt-get source --yes --quiet qpdf=${QPDF_VERSION}-1/bookworm
|
|
||||||
|
|
||||||
#
|
|
||||||
# Stage: amd64-builder
|
|
||||||
# Purpose: Builds qpdf for x86_64 (native build)
|
|
||||||
#
|
|
||||||
FROM pre-build as amd64-builder
|
|
||||||
|
|
||||||
ARG AMD64_BUILD_PACKAGES="\
|
|
||||||
build-essential \
|
|
||||||
libjpeg62-turbo-dev:amd64 \
|
|
||||||
libgnutls28-dev:amd64 \
|
|
||||||
zlib1g-dev:amd64"
|
|
||||||
|
|
||||||
WORKDIR /usr/src/qpdf-${QPDF_VERSION}
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Beginning amd64" \
|
|
||||||
&& echo "Install amd64 packages" \
|
|
||||||
&& apt-get update --quiet \
|
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${AMD64_BUILD_PACKAGES} \
|
|
||||||
&& echo "Building amd64" \
|
|
||||||
&& dpkg-buildpackage --build=binary --unsigned-source --unsigned-changes --post-clean \
|
|
||||||
&& echo "Removing debug files" \
|
|
||||||
&& rm -f ../libqpdf29-dbgsym* \
|
|
||||||
&& rm -f ../qpdf-dbgsym* \
|
|
||||||
&& echo "Gathering package data" \
|
|
||||||
&& dpkg-query -f '${Package;-40}${Version}\n' -W > ../pkg-list.txt
|
|
||||||
#
|
|
||||||
# Stage: armhf-builder
|
|
||||||
# Purpose:
|
|
||||||
# - Sets armhf specific environment
|
|
||||||
# - Builds qpdf for armhf (cross compile)
|
|
||||||
#
|
|
||||||
FROM pre-build as armhf-builder
|
|
||||||
|
|
||||||
ARG ARMHF_PACKAGES="\
|
|
||||||
crossbuild-essential-armhf \
|
|
||||||
libjpeg62-turbo-dev:armhf \
|
|
||||||
libgnutls28-dev:armhf \
|
|
||||||
zlib1g-dev:armhf"
|
|
||||||
|
|
||||||
WORKDIR /usr/src/qpdf-${QPDF_VERSION}
|
|
||||||
|
|
||||||
ENV CXX="/usr/bin/arm-linux-gnueabihf-g++" \
|
|
||||||
CC="/usr/bin/arm-linux-gnueabihf-gcc"
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Beginning armhf" \
|
|
||||||
&& echo "Install armhf packages" \
|
|
||||||
&& dpkg --add-architecture armhf \
|
|
||||||
&& apt-get update --quiet \
|
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${ARMHF_PACKAGES} \
|
|
||||||
&& echo "Building armhf" \
|
|
||||||
&& dpkg-buildpackage --build=binary --unsigned-source --unsigned-changes --post-clean --host-arch armhf \
|
|
||||||
&& echo "Removing debug files" \
|
|
||||||
&& rm -f ../libqpdf29-dbgsym* \
|
|
||||||
&& rm -f ../qpdf-dbgsym* \
|
|
||||||
&& echo "Gathering package data" \
|
|
||||||
&& dpkg-query -f '${Package;-40}${Version}\n' -W > ../pkg-list.txt
|
|
||||||
|
|
||||||
#
|
|
||||||
# Stage: aarch64-builder
|
|
||||||
# Purpose:
|
|
||||||
# - Sets aarch64 specific environment
|
|
||||||
# - Builds qpdf for aarch64 (cross compile)
|
|
||||||
#
|
|
||||||
FROM pre-build as aarch64-builder
|
|
||||||
|
|
||||||
ARG ARM64_PACKAGES="\
|
|
||||||
crossbuild-essential-arm64 \
|
|
||||||
libjpeg62-turbo-dev:arm64 \
|
|
||||||
libgnutls28-dev:arm64 \
|
|
||||||
zlib1g-dev:arm64"
|
|
||||||
|
|
||||||
ENV CXX="/usr/bin/aarch64-linux-gnu-g++" \
|
|
||||||
CC="/usr/bin/aarch64-linux-gnu-gcc"
|
|
||||||
|
|
||||||
WORKDIR /usr/src/qpdf-${QPDF_VERSION}
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Beginning arm64" \
|
|
||||||
&& echo "Install arm64 packages" \
|
|
||||||
&& dpkg --add-architecture arm64 \
|
|
||||||
&& apt-get update --quiet \
|
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${ARM64_PACKAGES} \
|
|
||||||
&& echo "Building arm64" \
|
|
||||||
&& dpkg-buildpackage --build=binary --unsigned-source --unsigned-changes --post-clean --host-arch arm64 \
|
|
||||||
&& echo "Removing debug files" \
|
|
||||||
&& rm -f ../libqpdf29-dbgsym* \
|
|
||||||
&& rm -f ../qpdf-dbgsym* \
|
|
||||||
&& echo "Gathering package data" \
|
|
||||||
&& dpkg-query -f '${Package;-40}${Version}\n' -W > ../pkg-list.txt
|
|
||||||
|
|
||||||
#
|
|
||||||
# Stage: package
|
|
||||||
# Purpose: Holds the compiled .deb files in arch/variant specific folders
|
|
||||||
#
|
|
||||||
FROM alpine:3.17 as package
|
|
||||||
|
|
||||||
LABEL org.opencontainers.image.description="A image with qpdf installers stored in architecture & version specific folders"
|
|
||||||
|
|
||||||
ARG QPDF_VERSION
|
|
||||||
|
|
||||||
WORKDIR /usr/src/qpdf/${QPDF_VERSION}/amd64
|
|
||||||
|
|
||||||
COPY --from=amd64-builder /usr/src/*.deb ./
|
|
||||||
COPY --from=amd64-builder /usr/src/pkg-list.txt ./
|
|
||||||
|
|
||||||
# Note this is ${TARGETARCH}${TARGETVARIANT} for armv7
|
|
||||||
WORKDIR /usr/src/qpdf/${QPDF_VERSION}/armv7
|
|
||||||
|
|
||||||
COPY --from=armhf-builder /usr/src/*.deb ./
|
|
||||||
COPY --from=armhf-builder /usr/src/pkg-list.txt ./
|
|
||||||
|
|
||||||
WORKDIR /usr/src/qpdf/${QPDF_VERSION}/arm64
|
|
||||||
|
|
||||||
COPY --from=aarch64-builder /usr/src/*.deb ./
|
|
||||||
COPY --from=aarch64-builder /usr/src/pkg-list.txt ./
|
|
@@ -1,57 +0,0 @@
|
|||||||
# Installer Library
|
|
||||||
|
|
||||||
This folder contains the Dockerfiles for building certain installers or libraries, which are then pulled into the main image.
|
|
||||||
|
|
||||||
## [jbig2enc](https://github.com/agl/jbig2enc)
|
|
||||||
|
|
||||||
### Why
|
|
||||||
|
|
||||||
JBIG is an image coding which can achieve better compression of images for PDFs.
|
|
||||||
|
|
||||||
### What
|
|
||||||
|
|
||||||
The Docker image builds a shared library file and utility, which is copied into the correct location in the final image.
|
|
||||||
|
|
||||||
### Updating
|
|
||||||
|
|
||||||
1. Ensure the given qpdf version is present in [Debian bookworm](https://packages.debian.org/bookworm/qpdf)
|
|
||||||
2. Update `.build-config.json` to the given version
|
|
||||||
3. If the Debian specific version has incremented, update `Dockerfile.qpdf`
|
|
||||||
|
|
||||||
See Also:
|
|
||||||
|
|
||||||
- [OCRMyPDF Documentation](https://ocrmypdf.readthedocs.io/en/latest/jbig2.html)
|
|
||||||
|
|
||||||
## [psycopg2](https://www.psycopg.org/)
|
|
||||||
|
|
||||||
### Why
|
|
||||||
|
|
||||||
The pre-built wheels of psycopg2 are built on Debian 9, which provides a quite old version of libpq-dev. This causes issue with authentication methods.
|
|
||||||
|
|
||||||
### What
|
|
||||||
|
|
||||||
The image builds psycopg2 wheels on Debian 10 and places the produced wheels into `/usr/src/wheels/`.
|
|
||||||
|
|
||||||
See Also:
|
|
||||||
|
|
||||||
- [Issue 266](https://github.com/paperless-ngx/paperless-ngx/issues/266)
|
|
||||||
|
|
||||||
## [qpdf](https://qpdf.readthedocs.io/en/stable/index.html)
|
|
||||||
|
|
||||||
### Why
|
|
||||||
|
|
||||||
qpdf and it's library provide tools to read, manipulate and fix up PDFs. Version 11 is also required by `pikepdf` 6+ and Debian 9 does not provide above version 10.
|
|
||||||
|
|
||||||
### What
|
|
||||||
|
|
||||||
The Docker image cross compiles .deb installers for each supported architecture of the main image. The installers are placed in `/usr/src/qpdf/${QPDF_VERSION}/${TARGETARCH}${TARGETVARIANT}/`
|
|
||||||
|
|
||||||
## [pikepdf](https://pikepdf.readthedocs.io/en/latest/)
|
|
||||||
|
|
||||||
### Why
|
|
||||||
|
|
||||||
Required by OCRMyPdf, this is a general purpose library for PDF manipulation in Python via the qpdf libraries.
|
|
||||||
|
|
||||||
### What
|
|
||||||
|
|
||||||
The built wheels are placed into `/usr/src/wheels/`
|
|
@@ -1,4 +1,4 @@
|
|||||||
# docker-compose file for running paperless testing with actual gotenberg
|
# Docker Compose file for running paperless testing with actual gotenberg
|
||||||
# and Tika containers for a more end to end test of the Tika related functionality
|
# and Tika containers for a more end to end test of the Tika related functionality
|
||||||
# Can be used locally or by the CI to start the nessecary containers with the
|
# Can be used locally or by the CI to start the nessecary containers with the
|
||||||
# correct networking for the tests
|
# correct networking for the tests
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
# docker-compose file for running paperless from the Docker Hub.
|
# docker compose file for running paperless from the Docker Hub.
|
||||||
# This file contains everything paperless needs to run.
|
# This file contains everything paperless needs to run.
|
||||||
# Paperless supports amd64, arm and arm64 hardware.
|
# Paperless supports amd64, arm and arm64 hardware.
|
||||||
#
|
#
|
||||||
@@ -10,7 +10,7 @@
|
|||||||
# as this file and mounted to the correct folders inside the container.
|
# as this file and mounted to the correct folders inside the container.
|
||||||
# - Paperless listens on port 8000.
|
# - Paperless listens on port 8000.
|
||||||
#
|
#
|
||||||
# In addition to that, this docker-compose file adds the following optional
|
# In addition to that, this Docker Compose file adds the following optional
|
||||||
# configurations:
|
# configurations:
|
||||||
#
|
#
|
||||||
# - Instead of SQLite (default), MariaDB is used as the database server.
|
# - Instead of SQLite (default), MariaDB is used as the database server.
|
||||||
@@ -23,9 +23,9 @@
|
|||||||
#
|
#
|
||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker-compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
# - Run 'docker-compose run --rm webserver createsuperuser' to create a user.
|
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
||||||
# - Run 'docker-compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
# documentation.
|
# documentation.
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
# docker-compose file for running paperless from the Docker Hub.
|
# Docker Compose file for running paperless from the Docker Hub.
|
||||||
# This file contains everything paperless needs to run.
|
# This file contains everything paperless needs to run.
|
||||||
# Paperless supports amd64, arm and arm64 hardware.
|
# Paperless supports amd64, arm and arm64 hardware.
|
||||||
#
|
#
|
||||||
@@ -10,7 +10,7 @@
|
|||||||
# as this file and mounted to the correct folders inside the container.
|
# as this file and mounted to the correct folders inside the container.
|
||||||
# - Paperless listens on port 8000.
|
# - Paperless listens on port 8000.
|
||||||
#
|
#
|
||||||
# In addition to that, this docker-compose file adds the following optional
|
# In addition to that, this Docker Compose file adds the following optional
|
||||||
# configurations:
|
# configurations:
|
||||||
#
|
#
|
||||||
# - Instead of SQLite (default), MariaDB is used as the database server.
|
# - Instead of SQLite (default), MariaDB is used as the database server.
|
||||||
@@ -19,9 +19,9 @@
|
|||||||
#
|
#
|
||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker-compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
# - Run 'docker-compose run --rm webserver createsuperuser' to create a user.
|
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
||||||
# - Run 'docker-compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
# documentation.
|
# documentation.
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
# docker-compose file for running paperless from the Docker Hub.
|
# Docker Compose file for running paperless from the Docker Hub.
|
||||||
# This file contains everything paperless needs to run.
|
# This file contains everything paperless needs to run.
|
||||||
# Paperless supports amd64, arm and arm64 hardware.
|
# Paperless supports amd64, arm and arm64 hardware.
|
||||||
#
|
#
|
||||||
@@ -10,7 +10,7 @@
|
|||||||
# as this file and mounted to the correct folders inside the container.
|
# as this file and mounted to the correct folders inside the container.
|
||||||
# - Paperless listens on port 8010.
|
# - Paperless listens on port 8010.
|
||||||
#
|
#
|
||||||
# In addition to that, this docker-compose file adds the following optional
|
# In addition to that, this Docker Compose file adds the following optional
|
||||||
# configurations:
|
# configurations:
|
||||||
#
|
#
|
||||||
# - Instead of SQLite (default), PostgreSQL is used as the database server.
|
# - Instead of SQLite (default), PostgreSQL is used as the database server.
|
||||||
@@ -37,7 +37,7 @@ services:
|
|||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/postgres:13
|
image: docker.io/library/postgres:15
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
# docker-compose file for running paperless from the docker container registry.
|
# Docker Compose file for running paperless from the docker container registry.
|
||||||
# This file contains everything paperless needs to run.
|
# This file contains everything paperless needs to run.
|
||||||
# Paperless supports amd64, arm and arm64 hardware.
|
# Paperless supports amd64, arm and arm64 hardware.
|
||||||
#
|
#
|
||||||
@@ -10,7 +10,7 @@
|
|||||||
# as this file and mounted to the correct folders inside the container.
|
# as this file and mounted to the correct folders inside the container.
|
||||||
# - Paperless listens on port 8000.
|
# - Paperless listens on port 8000.
|
||||||
#
|
#
|
||||||
# In addition to that, this docker-compose file adds the following optional
|
# In addition to that, this Docker Compose file adds the following optional
|
||||||
# configurations:
|
# configurations:
|
||||||
#
|
#
|
||||||
# - Instead of SQLite (default), PostgreSQL is used as the database server.
|
# - Instead of SQLite (default), PostgreSQL is used as the database server.
|
||||||
@@ -23,9 +23,9 @@
|
|||||||
#
|
#
|
||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker-compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
# - Run 'docker-compose run --rm webserver createsuperuser' to create a user.
|
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
||||||
# - Run 'docker-compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
# documentation.
|
# documentation.
|
||||||
@@ -39,7 +39,7 @@ services:
|
|||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/postgres:13
|
image: docker.io/library/postgres:15
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
# docker-compose file for running paperless from the Docker Hub.
|
# Docker Compose file for running paperless from the Docker Hub.
|
||||||
# This file contains everything paperless needs to run.
|
# This file contains everything paperless needs to run.
|
||||||
# Paperless supports amd64, arm and arm64 hardware.
|
# Paperless supports amd64, arm and arm64 hardware.
|
||||||
#
|
#
|
||||||
@@ -10,7 +10,7 @@
|
|||||||
# as this file and mounted to the correct folders inside the container.
|
# as this file and mounted to the correct folders inside the container.
|
||||||
# - Paperless listens on port 8000.
|
# - Paperless listens on port 8000.
|
||||||
#
|
#
|
||||||
# In addition to that, this docker-compose file adds the following optional
|
# In addition to that, this Docker Compose file adds the following optional
|
||||||
# configurations:
|
# configurations:
|
||||||
#
|
#
|
||||||
# - Instead of SQLite (default), PostgreSQL is used as the database server.
|
# - Instead of SQLite (default), PostgreSQL is used as the database server.
|
||||||
@@ -19,9 +19,9 @@
|
|||||||
#
|
#
|
||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker-compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
# - Run 'docker-compose run --rm webserver createsuperuser' to create a user.
|
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
||||||
# - Run 'docker-compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
# documentation.
|
# documentation.
|
||||||
@@ -35,7 +35,7 @@ services:
|
|||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/postgres:13
|
image: docker.io/library/postgres:15
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
# docker-compose file for running paperless from the docker container registry.
|
# Docker Compose file for running paperless from the docker container registry.
|
||||||
# This file contains everything paperless needs to run.
|
# This file contains everything paperless needs to run.
|
||||||
# Paperless supports amd64, arm and arm64 hardware.
|
# Paperless supports amd64, arm and arm64 hardware.
|
||||||
# All compose files of paperless configure paperless in the following way:
|
# All compose files of paperless configure paperless in the following way:
|
||||||
@@ -11,7 +11,7 @@
|
|||||||
#
|
#
|
||||||
# SQLite is used as the database. The SQLite file is stored in the data volume.
|
# SQLite is used as the database. The SQLite file is stored in the data volume.
|
||||||
#
|
#
|
||||||
# In addition to that, this docker-compose file adds the following optional
|
# In addition to that, this Docker Compose file adds the following optional
|
||||||
# configurations:
|
# configurations:
|
||||||
#
|
#
|
||||||
# - Apache Tika and Gotenberg servers are started with paperless and paperless
|
# - Apache Tika and Gotenberg servers are started with paperless and paperless
|
||||||
@@ -23,9 +23,9 @@
|
|||||||
#
|
#
|
||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker-compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
# - Run 'docker-compose run --rm webserver createsuperuser' to create a user.
|
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
||||||
# - Run 'docker-compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
# documentation.
|
# documentation.
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
# docker-compose file for running paperless from the Docker Hub.
|
# Docker Compose file for running paperless from the Docker Hub.
|
||||||
# This file contains everything paperless needs to run.
|
# This file contains everything paperless needs to run.
|
||||||
# Paperless supports amd64, arm and arm64 hardware.
|
# Paperless supports amd64, arm and arm64 hardware.
|
||||||
#
|
#
|
||||||
@@ -16,9 +16,9 @@
|
|||||||
#
|
#
|
||||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||||
# and '.env' into a folder.
|
# and '.env' into a folder.
|
||||||
# - Run 'docker-compose pull'.
|
# - Run 'docker compose pull'.
|
||||||
# - Run 'docker-compose run --rm webserver createsuperuser' to create a user.
|
# - Run 'docker compose run --rm webserver createsuperuser' to create a user.
|
||||||
# - Run 'docker-compose up -d'.
|
# - Run 'docker compose up -d'.
|
||||||
#
|
#
|
||||||
# For more extensive installation and update instructions, refer to the
|
# For more extensive installation and update instructions, refer to the
|
||||||
# documentation.
|
# documentation.
|
||||||
|
@@ -80,7 +80,7 @@ django_checks() {
|
|||||||
|
|
||||||
search_index() {
|
search_index() {
|
||||||
|
|
||||||
local -r index_version=5
|
local -r index_version=7
|
||||||
local -r index_version_file=${DATA_DIR}/.index_version
|
local -r index_version_file=${DATA_DIR}/.index_version
|
||||||
|
|
||||||
if [[ (! -f "${index_version_file}") || $(<"${index_version_file}") != "$index_version" ]]; then
|
if [[ (! -f "${index_version_file}") || $(<"${index_version_file}") != "$index_version" ]]; then
|
||||||
|
@@ -13,6 +13,7 @@ for command in decrypt_documents \
|
|||||||
document_retagger \
|
document_retagger \
|
||||||
document_thumbnails \
|
document_thumbnails \
|
||||||
document_sanity_checker \
|
document_sanity_checker \
|
||||||
|
document_fuzzy_match \
|
||||||
manage_superuser;
|
manage_superuser;
|
||||||
do
|
do
|
||||||
echo "installing $command..."
|
echo "installing $command..."
|
||||||
|
@@ -15,6 +15,7 @@ stdout_logfile=/dev/stdout
|
|||||||
stdout_logfile_maxbytes=0
|
stdout_logfile_maxbytes=0
|
||||||
stderr_logfile=/dev/stderr
|
stderr_logfile=/dev/stderr
|
||||||
stderr_logfile_maxbytes=0
|
stderr_logfile_maxbytes=0
|
||||||
|
environment = HOME="/usr/src/paperless",USER="paperless"
|
||||||
|
|
||||||
[program:consumer]
|
[program:consumer]
|
||||||
command=python3 manage.py document_consumer
|
command=python3 manage.py document_consumer
|
||||||
@@ -25,6 +26,7 @@ stdout_logfile=/dev/stdout
|
|||||||
stdout_logfile_maxbytes=0
|
stdout_logfile_maxbytes=0
|
||||||
stderr_logfile=/dev/stderr
|
stderr_logfile=/dev/stderr
|
||||||
stderr_logfile_maxbytes=0
|
stderr_logfile_maxbytes=0
|
||||||
|
environment = HOME="/usr/src/paperless",USER="paperless"
|
||||||
|
|
||||||
[program:celery]
|
[program:celery]
|
||||||
|
|
||||||
@@ -37,6 +39,7 @@ stdout_logfile=/dev/stdout
|
|||||||
stdout_logfile_maxbytes=0
|
stdout_logfile_maxbytes=0
|
||||||
stderr_logfile=/dev/stderr
|
stderr_logfile=/dev/stderr
|
||||||
stderr_logfile_maxbytes=0
|
stderr_logfile_maxbytes=0
|
||||||
|
environment = HOME="/usr/src/paperless",USER="paperless"
|
||||||
|
|
||||||
[program:celery-beat]
|
[program:celery-beat]
|
||||||
|
|
||||||
@@ -48,6 +51,7 @@ stdout_logfile=/dev/stdout
|
|||||||
stdout_logfile_maxbytes=0
|
stdout_logfile_maxbytes=0
|
||||||
stderr_logfile=/dev/stderr
|
stderr_logfile=/dev/stderr
|
||||||
stderr_logfile_maxbytes=0
|
stderr_logfile_maxbytes=0
|
||||||
|
environment = HOME="/usr/src/paperless",USER="paperless"
|
||||||
|
|
||||||
[program:celery-flower]
|
[program:celery-flower]
|
||||||
command = /usr/local/bin/flower-conditional.sh
|
command = /usr/local/bin/flower-conditional.sh
|
||||||
@@ -58,3 +62,4 @@ stdout_logfile=/dev/stdout
|
|||||||
stdout_logfile_maxbytes=0
|
stdout_logfile_maxbytes=0
|
||||||
stderr_logfile=/dev/stderr
|
stderr_logfile=/dev/stderr
|
||||||
stderr_logfile_maxbytes=0
|
stderr_logfile_maxbytes=0
|
||||||
|
environment = HOME="/usr/src/paperless",USER="paperless"
|
||||||
|
@@ -28,7 +28,7 @@ if __name__ == "__main__":
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(
|
print(
|
||||||
f"Redis ping #{attempt} failed.\n"
|
f"Redis ping #{attempt} failed.\n"
|
||||||
f"Error: {str(e)}.\n"
|
f"Error: {e!s}.\n"
|
||||||
f"Waiting {RETRY_SLEEP_SECONDS}s",
|
f"Waiting {RETRY_SLEEP_SECONDS}s",
|
||||||
flush=True,
|
flush=True,
|
||||||
)
|
)
|
||||||
|
@@ -5,17 +5,19 @@
|
|||||||
Multiple options exist for making backups of your paperless instance,
|
Multiple options exist for making backups of your paperless instance,
|
||||||
depending on how you installed paperless.
|
depending on how you installed paperless.
|
||||||
|
|
||||||
Before making backups, make sure that paperless is not running.
|
Before making a backup, it's probably best to make sure that paperless is not actively
|
||||||
|
consuming documents at that time.
|
||||||
|
|
||||||
Options available to any installation of paperless:
|
Options available to any installation of paperless:
|
||||||
|
|
||||||
- Use the [document exporter](#exporter). The document exporter exports all your documents,
|
- Use the [document exporter](#exporter). The document exporter exports all your documents,
|
||||||
thumbnails and metadata to a specific folder. You may import your
|
thumbnails, metadata, and database contents to a specific folder. You may import your
|
||||||
documents into a fresh instance of paperless again or store your
|
documents and settings into a fresh instance of paperless again or store your
|
||||||
documents in another DMS with this export.
|
documents in another DMS with this export.
|
||||||
- The document exporter is also able to update an already existing
|
|
||||||
export. Therefore, incremental backups with `rsync` are entirely
|
The document exporter is also able to update an already existing
|
||||||
possible.
|
export. Therefore, incremental backups with `rsync` are entirely
|
||||||
|
possible.
|
||||||
|
|
||||||
!!! caution
|
!!! caution
|
||||||
|
|
||||||
@@ -25,31 +27,37 @@ Options available to any installation of paperless:
|
|||||||
|
|
||||||
Options available to docker installations:
|
Options available to docker installations:
|
||||||
|
|
||||||
- Backup the docker volumes. These usually reside within
|
- Backup the docker volumes. These usually reside within
|
||||||
`/var/lib/docker/volumes` on the host and you need to be root in
|
`/var/lib/docker/volumes` on the host and you need to be root in
|
||||||
order to access them.
|
order to access them.
|
||||||
|
|
||||||
Paperless uses 4 volumes:
|
Paperless uses 4 volumes:
|
||||||
|
|
||||||
- `paperless_media`: This is where your documents are stored.
|
- `paperless_media`: This is where your documents are stored.
|
||||||
- `paperless_data`: This is where auxillary data is stored. This
|
- `paperless_data`: This is where auxillary data is stored. This
|
||||||
folder also contains the SQLite database, if you use it.
|
folder also contains the SQLite database, if you use it.
|
||||||
- `paperless_pgdata`: Exists only if you use PostgreSQL and
|
- `paperless_pgdata`: Exists only if you use PostgreSQL and
|
||||||
contains the database.
|
contains the database.
|
||||||
- `paperless_dbdata`: Exists only if you use MariaDB and contains
|
- `paperless_dbdata`: Exists only if you use MariaDB and contains
|
||||||
the database.
|
the database.
|
||||||
|
|
||||||
Options available to bare-metal and non-docker installations:
|
Options available to bare-metal and non-docker installations:
|
||||||
|
|
||||||
- Backup the entire paperless folder. This ensures that if your
|
- Backup the entire paperless folder. This ensures that if your
|
||||||
paperless instance crashes at some point or your disk fails, you can
|
paperless instance crashes at some point or your disk fails, you can
|
||||||
simply copy the folder back into place and it works.
|
simply copy the folder back into place and it works.
|
||||||
|
|
||||||
When using PostgreSQL or MariaDB, you'll also have to backup the
|
When using PostgreSQL or MariaDB, you'll also have to backup the
|
||||||
database.
|
database.
|
||||||
|
|
||||||
### Restoring {#migrating-restoring}
|
### Restoring {#migrating-restoring}
|
||||||
|
|
||||||
|
If you've backed-up Paperless-ngx using the [document exporter](#exporter),
|
||||||
|
restoring can simply be done with the [document importer](#importer).
|
||||||
|
|
||||||
|
Of course, other backup strategies require restoring any volumes, folders and database
|
||||||
|
copies you created in the steps above.
|
||||||
|
|
||||||
## Updating Paperless {#updating}
|
## Updating Paperless {#updating}
|
||||||
|
|
||||||
### Docker Route {#docker-updating}
|
### Docker Route {#docker-updating}
|
||||||
@@ -63,30 +71,30 @@ First of all, ensure that paperless is stopped.
|
|||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ cd /path/to/paperless
|
$ cd /path/to/paperless
|
||||||
$ docker-compose down
|
$ docker compose down
|
||||||
```
|
```
|
||||||
|
|
||||||
After that, [make a backup](#backup).
|
After that, [make a backup](#backup).
|
||||||
|
|
||||||
1. If you pull the image from the docker hub, all you need to do is:
|
1. If you pull the image from the docker hub, all you need to do is:
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ docker-compose pull
|
$ docker compose pull
|
||||||
$ docker-compose up
|
$ docker compose up
|
||||||
```
|
```
|
||||||
|
|
||||||
The docker-compose files refer to the `latest` version, which is
|
The Docker Compose files refer to the `latest` version, which is
|
||||||
always the latest stable release.
|
always the latest stable release.
|
||||||
|
|
||||||
2. If you built the image yourself, do the following:
|
1. If you built the image yourself, do the following:
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ git pull
|
$ git pull
|
||||||
$ docker-compose build
|
$ docker compose build
|
||||||
$ docker-compose up
|
$ docker compose up
|
||||||
```
|
```
|
||||||
|
|
||||||
Running `docker-compose up` will also apply any new database migrations.
|
Running `docker compose up` will also apply any new database migrations.
|
||||||
If you see everything working, press CTRL+C once to gracefully stop
|
If you see everything working, press CTRL+C once to gracefully stop
|
||||||
paperless. Then you can start paperless-ngx with `-d` to have it run in
|
paperless. Then you can start paperless-ngx with `-d` to have it run in
|
||||||
the background.
|
the background.
|
||||||
@@ -94,7 +102,7 @@ the background.
|
|||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
In version 0.9.14, the update process was changed. In 0.9.13 and
|
In version 0.9.14, the update process was changed. In 0.9.13 and
|
||||||
earlier, the docker-compose files specified exact versions and pull
|
earlier, the Docker Compose files specified exact versions and pull
|
||||||
won't automatically update to newer versions. In order to enable
|
won't automatically update to newer versions. In order to enable
|
||||||
updates as described above, either get the new `docker-compose.yml`
|
updates as described above, either get the new `docker-compose.yml`
|
||||||
file from
|
file from
|
||||||
@@ -139,7 +147,7 @@ following:
|
|||||||
1. Update dependencies. New paperless version may require additional
|
1. Update dependencies. New paperless version may require additional
|
||||||
dependencies. The dependencies required are listed in the section
|
dependencies. The dependencies required are listed in the section
|
||||||
about
|
about
|
||||||
[bare metal installations](/setup#bare_metal).
|
[bare metal installations](setup.md#bare_metal).
|
||||||
|
|
||||||
2. Update python requirements. Keep in mind to activate your virtual
|
2. Update python requirements. Keep in mind to activate your virtual
|
||||||
environment before that, if you use one.
|
environment before that, if you use one.
|
||||||
@@ -148,6 +156,13 @@ following:
|
|||||||
$ pip install -r requirements.txt
|
$ pip install -r requirements.txt
|
||||||
```
|
```
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
At times, some dependencies will be removed from requirements.txt.
|
||||||
|
Comparing the versions and removing no longer needed dependencies
|
||||||
|
will keep your system or virtual environment clean and prevent
|
||||||
|
possible conflicts.
|
||||||
|
|
||||||
3. Migrate the database.
|
3. Migrate the database.
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
@@ -160,6 +175,16 @@ following:
|
|||||||
This might not actually do anything. Not every new paperless version
|
This might not actually do anything. Not every new paperless version
|
||||||
comes with new database migrations.
|
comes with new database migrations.
|
||||||
|
|
||||||
|
### Database Upgrades
|
||||||
|
|
||||||
|
In general, paperless does not require a specific version of PostgreSQL or MariaDB and it is
|
||||||
|
safe to update them to newer versions. However, you should always take a backup and follow
|
||||||
|
the instructions from your database's documentation for how to upgrade between major versions.
|
||||||
|
|
||||||
|
For PostgreSQL, refer to [Upgrading a PostgreSQL Cluster](https://www.postgresql.org/docs/current/upgrading.html).
|
||||||
|
|
||||||
|
For MariaDB, refer to [Upgrading MariaDB](https://mariadb.com/kb/en/upgrading/)
|
||||||
|
|
||||||
## Downgrading Paperless {#downgrade-paperless}
|
## Downgrading Paperless {#downgrade-paperless}
|
||||||
|
|
||||||
Downgrades are possible. However, some updates also contain database
|
Downgrades are possible. However, some updates also contain database
|
||||||
@@ -195,11 +220,11 @@ Paperless comes with some management commands that perform various
|
|||||||
maintenance tasks on your paperless instance. You can invoke these
|
maintenance tasks on your paperless instance. You can invoke these
|
||||||
commands in the following way:
|
commands in the following way:
|
||||||
|
|
||||||
With docker-compose, while paperless is running:
|
With Docker Compose, while paperless is running:
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ cd /path/to/paperless
|
$ cd /path/to/paperless
|
||||||
$ docker-compose exec webserver <command> <arguments>
|
$ docker compose exec webserver <command> <arguments>
|
||||||
```
|
```
|
||||||
|
|
||||||
With docker, while paperless is running:
|
With docker, while paperless is running:
|
||||||
@@ -222,26 +247,28 @@ with the argument `--help`.
|
|||||||
|
|
||||||
### Document exporter {#exporter}
|
### Document exporter {#exporter}
|
||||||
|
|
||||||
The document exporter exports all your data from paperless into a folder
|
The document exporter exports all your data (including your settings
|
||||||
for backup or migration to another DMS.
|
and database contents) from paperless into a folder for backup or
|
||||||
|
migration to another DMS.
|
||||||
|
|
||||||
If you use the document exporter within a cronjob to backup your data
|
If you use the document exporter within a cronjob to backup your data
|
||||||
you might use the `-T` flag behind exec to suppress "The input device
|
you might use the `-T` flag behind exec to suppress "The input device
|
||||||
is not a TTY" errors. For example:
|
is not a TTY" errors. For example:
|
||||||
`docker-compose exec -T webserver document_exporter ../export`
|
`docker compose exec -T webserver document_exporter ../export`
|
||||||
|
|
||||||
```
|
```
|
||||||
document_exporter target [-c] [-d] [-f] [-na] [-nt] [-p] [-sm] [-z]
|
document_exporter target [-c] [-d] [-f] [-na] [-nt] [-p] [-sm] [-z]
|
||||||
|
|
||||||
optional arguments:
|
optional arguments:
|
||||||
-c, --compare-checksums
|
-c, --compare-checksums
|
||||||
-d, --delete
|
-d, --delete
|
||||||
-f, --use-filename-format
|
-f, --use-filename-format
|
||||||
-na, --no-archive
|
-na, --no-archive
|
||||||
-nt, --no-thumbnail
|
-nt, --no-thumbnail
|
||||||
-p, --use-folder-prefix
|
-p, --use-folder-prefix
|
||||||
-sm, --split-manifest
|
-sm, --split-manifest
|
||||||
-z --zip
|
-z, --zip
|
||||||
|
-zn, --zip-name
|
||||||
```
|
```
|
||||||
|
|
||||||
`target` is a folder to which the data gets written. This includes
|
`target` is a folder to which the data gets written. This includes
|
||||||
@@ -269,7 +296,7 @@ other files.
|
|||||||
|
|
||||||
The filenames generated by this command follow the format
|
The filenames generated by this command follow the format
|
||||||
`[date created] [correspondent] [title].[extension]`. If you want
|
`[date created] [correspondent] [title].[extension]`. If you want
|
||||||
paperless to use `PAPERLESS_FILENAME_FORMAT` for exported filenames
|
paperless to use [`PAPERLESS_FILENAME_FORMAT`](configuration.md#PAPERLESS_FILENAME_FORMAT) for exported filenames
|
||||||
instead, specify `-f` or `--use-filename-format`.
|
instead, specify `-f` or `--use-filename-format`.
|
||||||
|
|
||||||
If `-na` or `--no-archive` is provided, no archive files will be exported,
|
If `-na` or `--no-archive` is provided, no archive files will be exported,
|
||||||
@@ -296,8 +323,9 @@ will be placed in individual json files, instead of a single JSON file. The main
|
|||||||
manifest.json will still contain application wide information (e.g. tags, correspondent,
|
manifest.json will still contain application wide information (e.g. tags, correspondent,
|
||||||
documenttype, etc)
|
documenttype, etc)
|
||||||
|
|
||||||
If `-z` or `--zip` is provided, the export will be a zipfile
|
If `-z` or `--zip` is provided, the export will be a zip file
|
||||||
in the target directory, named according to the current date.
|
in the target directory, named according to the current local date or the
|
||||||
|
value set in `-zn` or `--zip-name`.
|
||||||
|
|
||||||
!!! warning
|
!!! warning
|
||||||
|
|
||||||
@@ -334,7 +362,7 @@ currently-imported docs. This problem is common enough that there are
|
|||||||
tools for it.
|
tools for it.
|
||||||
|
|
||||||
```
|
```
|
||||||
document_retagger [-h] [-c] [-T] [-t] [-i] [--use-first] [-f]
|
document_retagger [-h] [-c] [-T] [-t] [-i] [--id-range] [--use-first] [-f]
|
||||||
|
|
||||||
optional arguments:
|
optional arguments:
|
||||||
-c, --correspondent
|
-c, --correspondent
|
||||||
@@ -342,6 +370,7 @@ optional arguments:
|
|||||||
-t, --document_type
|
-t, --document_type
|
||||||
-s, --storage_path
|
-s, --storage_path
|
||||||
-i, --inbox-only
|
-i, --inbox-only
|
||||||
|
--id-range
|
||||||
--use-first
|
--use-first
|
||||||
-f, --overwrite
|
-f, --overwrite
|
||||||
```
|
```
|
||||||
@@ -358,6 +387,11 @@ Specify `-i` to have the document retagger work on documents tagged with
|
|||||||
inbox tags only. This is useful when you don't want to mess with your
|
inbox tags only. This is useful when you don't want to mess with your
|
||||||
already processed documents.
|
already processed documents.
|
||||||
|
|
||||||
|
Specify `--id-range 1 100` to have the document retagger work only on a
|
||||||
|
specific range of document id´s. This can be useful if you have a lot of
|
||||||
|
documents and want to test the matching rules only on a subset of
|
||||||
|
documents.
|
||||||
|
|
||||||
When multiple document types or correspondents match a single document,
|
When multiple document types or correspondents match a single document,
|
||||||
the retagger won't assign these to the document. Specify `--use-first`
|
the retagger won't assign these to the document. Specify `--use-first`
|
||||||
to override this behavior and just use the first correspondent or type
|
to override this behavior and just use the first correspondent or type
|
||||||
@@ -389,6 +423,9 @@ This command takes no arguments.
|
|||||||
|
|
||||||
Use this command to re-create document thumbnails. Optionally include the ` --document {id}` option to generate thumbnails for a specific document only.
|
Use this command to re-create document thumbnails. Optionally include the ` --document {id}` option to generate thumbnails for a specific document only.
|
||||||
|
|
||||||
|
You may also specify `--processes` to control the number of processes used to generate new thumbnails. The default is to utilize
|
||||||
|
a quarter of the available processors.
|
||||||
|
|
||||||
```
|
```
|
||||||
document_thumbnails
|
document_thumbnails
|
||||||
```
|
```
|
||||||
@@ -416,7 +453,7 @@ task scheduler.
|
|||||||
### Managing filenames {#renamer}
|
### Managing filenames {#renamer}
|
||||||
|
|
||||||
If you use paperless' feature to
|
If you use paperless' feature to
|
||||||
[assign custom filenames to your documents](/advanced_usage#file-name-handling), you can use this command to move all your files after
|
[assign custom filenames to your documents](advanced_usage.md#file-name-handling), you can use this command to move all your files after
|
||||||
changing the naming scheme.
|
changing the naming scheme.
|
||||||
|
|
||||||
!!! warning
|
!!! warning
|
||||||
@@ -441,19 +478,19 @@ collection for issues.
|
|||||||
|
|
||||||
The issues detected by the sanity checker are as follows:
|
The issues detected by the sanity checker are as follows:
|
||||||
|
|
||||||
- Missing original files.
|
- Missing original files.
|
||||||
- Missing archive files.
|
- Missing archive files.
|
||||||
- Inaccessible original files due to improper permissions.
|
- Inaccessible original files due to improper permissions.
|
||||||
- Inaccessible archive files due to improper permissions.
|
- Inaccessible archive files due to improper permissions.
|
||||||
- Corrupted original documents by comparing their checksum against
|
- Corrupted original documents by comparing their checksum against
|
||||||
what is stored in the database.
|
what is stored in the database.
|
||||||
- Corrupted archive documents by comparing their checksum against what
|
- Corrupted archive documents by comparing their checksum against what
|
||||||
is stored in the database.
|
is stored in the database.
|
||||||
- Missing thumbnails.
|
- Missing thumbnails.
|
||||||
- Inaccessible thumbnails due to improper permissions.
|
- Inaccessible thumbnails due to improper permissions.
|
||||||
- Documents without any content (warning).
|
- Documents without any content (warning).
|
||||||
- Orphaned files in the media directory (warning). These are files
|
- Orphaned files in the media directory (warning). These are files
|
||||||
that are not referenced by any document im paperless.
|
that are not referenced by any document in paperless.
|
||||||
|
|
||||||
```
|
```
|
||||||
document_sanity_checker
|
document_sanity_checker
|
||||||
@@ -522,7 +559,7 @@ Documents can be stored in Paperless using GnuPG encryption.
|
|||||||
|
|
||||||
!!! warning
|
!!! warning
|
||||||
|
|
||||||
Encryption is deprecated since [paperless-ng 0.9](/changelog#paperless-ng-090) and doesn't really
|
Encryption is deprecated since [paperless-ng 0.9](changelog.md#paperless-ng-090) and doesn't really
|
||||||
provide any additional security, since you have to store the passphrase
|
provide any additional security, since you have to store the passphrase
|
||||||
in a configuration file on the same system as the encrypted documents
|
in a configuration file on the same system as the encrypted documents
|
||||||
for paperless to work. Furthermore, the entire text content of the
|
for paperless to work. Furthermore, the entire text content of the
|
||||||
@@ -543,9 +580,30 @@ Enabling encryption is no longer supported.
|
|||||||
|
|
||||||
Basic usage to disable encryption of your document store:
|
Basic usage to disable encryption of your document store:
|
||||||
|
|
||||||
(Note: If `PAPERLESS_PASSPHRASE` isn't set already, you need to specify
|
(Note: If [`PAPERLESS_PASSPHRASE`](configuration.md#PAPERLESS_PASSPHRASE) isn't set already, you need to specify
|
||||||
it here)
|
it here)
|
||||||
|
|
||||||
```
|
```
|
||||||
decrypt_documents [--passphrase SECR3TP4SSPHRA$E]
|
decrypt_documents [--passphrase SECR3TP4SSPHRA$E]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Detecting duplicates {#fuzzy_duplicate}
|
||||||
|
|
||||||
|
Paperless already catches and prevents upload of exactly matching documents,
|
||||||
|
however a new scan of an existing document may not produce an exact bit for bit
|
||||||
|
duplicate. But the content should be exact or close, allowing detection.
|
||||||
|
|
||||||
|
This tool does a fuzzy match over document content, looking for
|
||||||
|
those which look close according to a given ratio.
|
||||||
|
|
||||||
|
At this time, other metadata (such as correspondent or type) is not
|
||||||
|
take into account by the detection.
|
||||||
|
|
||||||
|
```
|
||||||
|
document_fuzzy_match [--ratio] [--processes N]
|
||||||
|
```
|
||||||
|
|
||||||
|
| Option | Required | Default | Description |
|
||||||
|
| ----------- | -------- | ------------------- | ------------------------------------------------------------------------------------------------------------------------------ |
|
||||||
|
| --ratio | No | 85.0 | a number between 0 and 100, setting how similar a document must be for it to be reported. Higher numbers mean more similarity. |
|
||||||
|
| --processes | No | 1/4 of system cores | Number of processes to use for matching. Setting 1 disables multiple processes |
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
# Advanced Topics
|
# Advanced Topics
|
||||||
|
|
||||||
Paperless offers a couple features that automate certain tasks and make
|
Paperless offers a couple of features that automate certain tasks and make
|
||||||
your life easier.
|
your life easier.
|
||||||
|
|
||||||
## Matching tags, correspondents, document types, and storage paths {#matching}
|
## Matching tags, correspondents, document types, and storage paths {#matching}
|
||||||
@@ -35,9 +35,10 @@ The following algorithms are available:
|
|||||||
(i.e. preserve ordering) in the PDF.
|
(i.e. preserve ordering) in the PDF.
|
||||||
- **Regular expression:** Parses the match as a regular expression and
|
- **Regular expression:** Parses the match as a regular expression and
|
||||||
tries to find a match within the document.
|
tries to find a match within the document.
|
||||||
- **Fuzzy match:** I don't know. Look at the source.
|
- **Fuzzy match:** Uses a partial matching based on locating the tag text
|
||||||
|
inside the document, using a [partial ratio](https://maxbachmann.github.io/RapidFuzz/Usage/fuzz.html#partial-ratio)
|
||||||
- **Auto:** Tries to automatically match new documents. This does not
|
- **Auto:** Tries to automatically match new documents. This does not
|
||||||
require you to set a match. See the notes below.
|
require you to set a match. See the [notes below](#automatic-matching).
|
||||||
|
|
||||||
When using the _any_ or _all_ matching algorithms, you can search for
|
When using the _any_ or _all_ matching algorithms, you can search for
|
||||||
terms that consist of multiple words by enclosing them in double quotes.
|
terms that consist of multiple words by enclosing them in double quotes.
|
||||||
@@ -92,7 +93,7 @@ when using this feature:
|
|||||||
decide when not to assign a certain tag, correspondent, document
|
decide when not to assign a certain tag, correspondent, document
|
||||||
type, or storage path. This will usually be the case as you start
|
type, or storage path. This will usually be the case as you start
|
||||||
filling up paperless with documents. Example: If all your documents
|
filling up paperless with documents. Example: If all your documents
|
||||||
are either from "Webshop" and "Bank", paperless will assign one
|
are either from "Webshop" or "Bank", paperless will assign one
|
||||||
of these correspondents to ANY new document, if both are set to
|
of these correspondents to ANY new document, if both are set to
|
||||||
automatic matching.
|
automatic matching.
|
||||||
|
|
||||||
@@ -101,12 +102,12 @@ when using this feature:
|
|||||||
Sometimes you may want to do something arbitrary whenever a document is
|
Sometimes you may want to do something arbitrary whenever a document is
|
||||||
consumed. Rather than try to predict what you may want to do, Paperless
|
consumed. Rather than try to predict what you may want to do, Paperless
|
||||||
lets you execute scripts of your own choosing just before or after a
|
lets you execute scripts of your own choosing just before or after a
|
||||||
document is consumed using a couple simple hooks.
|
document is consumed using a couple of simple hooks.
|
||||||
|
|
||||||
Just write a script, put it somewhere that Paperless can read & execute,
|
Just write a script, put it somewhere that Paperless can read & execute,
|
||||||
and then put the path to that script in `paperless.conf` or
|
and then put the path to that script in `paperless.conf` or
|
||||||
`docker-compose.env` with the variable name of either
|
`docker-compose.env` with the variable name of either
|
||||||
`PAPERLESS_PRE_CONSUME_SCRIPT` or `PAPERLESS_POST_CONSUME_SCRIPT`.
|
[`PAPERLESS_PRE_CONSUME_SCRIPT`](configuration.md#PAPERLESS_PRE_CONSUME_SCRIPT) or [`PAPERLESS_POST_CONSUME_SCRIPT`](configuration.md#PAPERLESS_POST_CONSUME_SCRIPT).
|
||||||
|
|
||||||
!!! info
|
!!! info
|
||||||
|
|
||||||
@@ -126,6 +127,7 @@ script can access the following relevant environment variables set:
|
|||||||
| ----------------------- | ------------------------------------------------------------ |
|
| ----------------------- | ------------------------------------------------------------ |
|
||||||
| `DOCUMENT_SOURCE_PATH` | Original path of the consumed document |
|
| `DOCUMENT_SOURCE_PATH` | Original path of the consumed document |
|
||||||
| `DOCUMENT_WORKING_PATH` | Path to a copy of the original that consumption will work on |
|
| `DOCUMENT_WORKING_PATH` | Path to a copy of the original that consumption will work on |
|
||||||
|
| `TASK_ID` | UUID of the task used to process the new document (if any) |
|
||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
@@ -168,21 +170,22 @@ Executed after the consumer has successfully processed a document and
|
|||||||
has moved it into paperless. It receives the following environment
|
has moved it into paperless. It receives the following environment
|
||||||
variables:
|
variables:
|
||||||
|
|
||||||
| Environment Variable | Description |
|
| Environment Variable | Description |
|
||||||
| ---------------------------- | --------------------------------------------- |
|
| ---------------------------- | ---------------------------------------------- |
|
||||||
| `DOCUMENT_ID` | Database primary key of the document |
|
| `DOCUMENT_ID` | Database primary key of the document |
|
||||||
| `DOCUMENT_FILE_NAME` | Formatted filename, not including paths |
|
| `DOCUMENT_FILE_NAME` | Formatted filename, not including paths |
|
||||||
| `DOCUMENT_CREATED` | Date & time when document created |
|
| `DOCUMENT_CREATED` | Date & time when document created |
|
||||||
| `DOCUMENT_MODIFIED` | Date & time when document was last modified |
|
| `DOCUMENT_MODIFIED` | Date & time when document was last modified |
|
||||||
| `DOCUMENT_ADDED` | Date & time when document was added |
|
| `DOCUMENT_ADDED` | Date & time when document was added |
|
||||||
| `DOCUMENT_SOURCE_PATH` | Path to the original document file |
|
| `DOCUMENT_SOURCE_PATH` | Path to the original document file |
|
||||||
| `DOCUMENT_ARCHIVE_PATH` | Path to the generate archive file (if any) |
|
| `DOCUMENT_ARCHIVE_PATH` | Path to the generate archive file (if any) |
|
||||||
| `DOCUMENT_THUMBNAIL_PATH` | Path to the generated thumbnail |
|
| `DOCUMENT_THUMBNAIL_PATH` | Path to the generated thumbnail |
|
||||||
| `DOCUMENT_DOWNLOAD_URL` | URL for document download |
|
| `DOCUMENT_DOWNLOAD_URL` | URL for document download |
|
||||||
| `DOCUMENT_THUMBNAIL_URL` | URL for the document thumbnail |
|
| `DOCUMENT_THUMBNAIL_URL` | URL for the document thumbnail |
|
||||||
| `DOCUMENT_CORRESPONDENT` | Assigned correspondent (if any) |
|
| `DOCUMENT_CORRESPONDENT` | Assigned correspondent (if any) |
|
||||||
| `DOCUMENT_TAGS` | Comma separated list of tags applied (if any) |
|
| `DOCUMENT_TAGS` | Comma separated list of tags applied (if any) |
|
||||||
| `DOCUMENT_ORIGINAL_FILENAME` | Filename of original document |
|
| `DOCUMENT_ORIGINAL_FILENAME` | Filename of original document |
|
||||||
|
| `TASK_ID` | Task UUID used to import the document (if any) |
|
||||||
|
|
||||||
The script can be in any language, A simple shell script example:
|
The script can be in any language, A simple shell script example:
|
||||||
|
|
||||||
@@ -197,7 +200,7 @@ The script can be in any language, A simple shell script example:
|
|||||||
!!! warning
|
!!! warning
|
||||||
|
|
||||||
The post consumption script should not modify the document files
|
The post consumption script should not modify the document files
|
||||||
directly
|
directly.
|
||||||
|
|
||||||
The script's stdout and stderr will be logged line by line to the
|
The script's stdout and stderr will be logged line by line to the
|
||||||
webserver log, along with the exit code of the script.
|
webserver log, along with the exit code of the script.
|
||||||
@@ -233,8 +236,8 @@ webserver:
|
|||||||
|
|
||||||
Troubleshooting:
|
Troubleshooting:
|
||||||
|
|
||||||
- Monitor the docker-compose log
|
- Monitor the Docker Compose log
|
||||||
`cd ~/paperless-ngx; docker-compose logs -f`
|
`cd ~/paperless-ngx; docker compose logs -f`
|
||||||
- Check your script's permission e.g. in case of permission error
|
- Check your script's permission e.g. in case of permission error
|
||||||
`sudo chmod 755 post-consumption-example.sh`
|
`sudo chmod 755 post-consumption-example.sh`
|
||||||
- Pipe your scripts's output to a log file e.g.
|
- Pipe your scripts's output to a log file e.g.
|
||||||
@@ -248,7 +251,7 @@ document. You will end up getting files like `0000123.pdf` in your media
|
|||||||
directory. This isn't necessarily a bad thing, because you normally
|
directory. This isn't necessarily a bad thing, because you normally
|
||||||
don't have to access these files manually. However, if you wish to name
|
don't have to access these files manually. However, if you wish to name
|
||||||
your files differently, you can do that by adjusting the
|
your files differently, you can do that by adjusting the
|
||||||
`PAPERLESS_FILENAME_FORMAT` configuration option. Paperless adds the
|
[`PAPERLESS_FILENAME_FORMAT`](configuration.md#PAPERLESS_FILENAME_FORMAT) configuration option. Paperless adds the
|
||||||
correct file extension e.g. `.pdf`, `.jpg` automatically.
|
correct file extension e.g. `.pdf`, `.jpg` automatically.
|
||||||
|
|
||||||
This variable allows you to configure the filename (folders are allowed)
|
This variable allows you to configure the filename (folders are allowed)
|
||||||
@@ -311,6 +314,7 @@ Paperless provides the following placeholders within filenames:
|
|||||||
- `{added_day}`: Day added only (number 01-31).
|
- `{added_day}`: Day added only (number 01-31).
|
||||||
- `{owner_username}`: Username of document owner, if any, or "none"
|
- `{owner_username}`: Username of document owner, if any, or "none"
|
||||||
- `{original_name}`: Document original filename, minus the extension, if any, or "none"
|
- `{original_name}`: Document original filename, minus the extension, if any, or "none"
|
||||||
|
- `{doc_pk}`: The paperless identifier (primary key) for the document.
|
||||||
|
|
||||||
Paperless will try to conserve the information from your database as
|
Paperless will try to conserve the information from your database as
|
||||||
much as possible. However, some characters that you can use in document
|
much as possible. However, some characters that you can use in document
|
||||||
@@ -340,7 +344,7 @@ value.
|
|||||||
Paperless checks the filename of a document whenever it is saved.
|
Paperless checks the filename of a document whenever it is saved.
|
||||||
Therefore, you need to update the filenames of your documents and move
|
Therefore, you need to update the filenames of your documents and move
|
||||||
them after altering this setting by invoking the
|
them after altering this setting by invoking the
|
||||||
[`document renamer`](/administration#renamer).
|
[`document renamer`](administration.md#renamer).
|
||||||
|
|
||||||
!!! warning
|
!!! warning
|
||||||
|
|
||||||
@@ -375,7 +379,7 @@ When a single storage layout is not sufficient for your use case,
|
|||||||
storage paths come to the rescue. Storage paths allow you to configure
|
storage paths come to the rescue. Storage paths allow you to configure
|
||||||
more precisely where each document is stored in the file system.
|
more precisely where each document is stored in the file system.
|
||||||
|
|
||||||
- Each storage path is a `PAPERLESS_FILENAME_FORMAT` and
|
- Each storage path is a [`PAPERLESS_FILENAME_FORMAT`](configuration.md#PAPERLESS_FILENAME_FORMAT) and
|
||||||
follows the rules described above
|
follows the rules described above
|
||||||
- Each document is assigned a storage path using the matching
|
- Each document is assigned a storage path using the matching
|
||||||
algorithms described above, but can be overwritten at any time
|
algorithms described above, but can be overwritten at any time
|
||||||
@@ -415,7 +419,7 @@ Insurances/ # Insurances
|
|||||||
!!! tip
|
!!! tip
|
||||||
|
|
||||||
Defining a storage path is optional. If no storage path is defined for a
|
Defining a storage path is optional. If no storage path is defined for a
|
||||||
document, the global `PAPERLESS_FILENAME_FORMAT` is applied.
|
document, the global [`PAPERLESS_FILENAME_FORMAT`](configuration.md#PAPERLESS_FILENAME_FORMAT) is applied.
|
||||||
|
|
||||||
## Celery Monitoring {#celery-monitoring}
|
## Celery Monitoring {#celery-monitoring}
|
||||||
|
|
||||||
@@ -488,7 +492,7 @@ database to be case sensitive. This would prevent a user from creating a
|
|||||||
tag `Name` and `NAME` as they are considered the same.
|
tag `Name` and `NAME` as they are considered the same.
|
||||||
|
|
||||||
Per Django documentation, to enable this requires manual intervention.
|
Per Django documentation, to enable this requires manual intervention.
|
||||||
To enable case sensetive tables, you can execute the following command
|
To enable case sensitive tables, you can execute the following command
|
||||||
against each table:
|
against each table:
|
||||||
|
|
||||||
`ALTER TABLE <table_name> CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;`
|
`ALTER TABLE <table_name> CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;`
|
||||||
@@ -508,7 +512,7 @@ existing tables) with:
|
|||||||
|
|
||||||
Paperless is able to utilize barcodes for automatically preforming some tasks.
|
Paperless is able to utilize barcodes for automatically preforming some tasks.
|
||||||
|
|
||||||
At this time, the library utilized for detection of bacodes supports the following types:
|
At this time, the library utilized for detection of barcodes supports the following types:
|
||||||
|
|
||||||
- AN-13/UPC-A
|
- AN-13/UPC-A
|
||||||
- UPC-E
|
- UPC-E
|
||||||
@@ -524,11 +528,11 @@ At this time, the library utilized for detection of bacodes supports the followi
|
|||||||
You may check for updates on the [zbar library homepage](https://github.com/mchehab/zbar).
|
You may check for updates on the [zbar library homepage](https://github.com/mchehab/zbar).
|
||||||
For usage in Paperless, the type of barcode does not matter, only the contents of it.
|
For usage in Paperless, the type of barcode does not matter, only the contents of it.
|
||||||
|
|
||||||
For how to enable barcode usage, see [the configuration](/configuration#barcodes).
|
For how to enable barcode usage, see [the configuration](configuration.md#barcodes).
|
||||||
The two settings may be enabled independently, but do have interactions as explained
|
The two settings may be enabled independently, but do have interactions as explained
|
||||||
below.
|
below.
|
||||||
|
|
||||||
### Document Splitting
|
### Document Splitting {#document-splitting}
|
||||||
|
|
||||||
When enabled, Paperless will look for a barcode with the configured value and create a new document
|
When enabled, Paperless will look for a barcode with the configured value and create a new document
|
||||||
starting from the next page. The page with the barcode on it will _not_ be retained. It
|
starting from the next page. The page with the barcode on it will _not_ be retained. It
|
||||||
@@ -543,3 +547,75 @@ If document splitting via barcode is also enabled, documents will be split when
|
|||||||
barcode is located. However, differing from the splitting, the page with the
|
barcode is located. However, differing from the splitting, the page with the
|
||||||
barcode _will_ be retained. This allows application of a barcode to any page, including
|
barcode _will_ be retained. This allows application of a barcode to any page, including
|
||||||
one which holds data to keep in the document.
|
one which holds data to keep in the document.
|
||||||
|
|
||||||
|
## Automatic collation of double-sided documents {#collate}
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
If your scanner supports double-sided scanning natively, you do not need this feature.
|
||||||
|
|
||||||
|
This feature is turned off by default, see [configuration](configuration.md#collate) on how to turn it on.
|
||||||
|
|
||||||
|
### Summary
|
||||||
|
|
||||||
|
If you have a scanner with an automatic document feeder (ADF) that only scans a single side,
|
||||||
|
this feature makes scanning double-sided documents much more convenient by automatically
|
||||||
|
collating two separate scans into one document, reordering the pages as necessary.
|
||||||
|
|
||||||
|
### Usage example
|
||||||
|
|
||||||
|
Suppose you have a double-sided document with 6 pages (3 sheets of paper). First,
|
||||||
|
put the stack into your ADF as normal, ensuring that page 1 is scanned first. Your ADF
|
||||||
|
will now scan pages 1, 3, and 5. Then you (or your the scanner, if it supports it) upload
|
||||||
|
the scan into the correct sub-directory of the consume folder (`double-sided` by default;
|
||||||
|
keep in mind that Paperless will _not_ automatically create the directory for you.)
|
||||||
|
Paperless will then process the scan and move it into an internal staging area.
|
||||||
|
|
||||||
|
The next step is to turn your stack upside down (without reordering the sheets of paper),
|
||||||
|
and scan it once again, your ADF will now scan pages 6, 4, and 2, in that order. Once this
|
||||||
|
scan is copied into the sub-directory, Paperless will collate the previous scan with the
|
||||||
|
new one, reversing the order of the pages on the second, "even numbered" scan. The
|
||||||
|
resulting document will have the pages 1-6 in the correct order, and this new file will
|
||||||
|
then be processed as normal.
|
||||||
|
|
||||||
|
!!! tip
|
||||||
|
|
||||||
|
When scanning the even numbered pages, you can omit the last empty pages, if there are
|
||||||
|
any. For example, if page 6 is empty, you only need to scan pages 2 and 4. _Do not_ omit
|
||||||
|
empty pages in the middle of the document.
|
||||||
|
|
||||||
|
### Things that could go wrong
|
||||||
|
|
||||||
|
Paperless will notice when the first, "odd numbered" scan has less pages than the second
|
||||||
|
scan (this can happen when e.g. the ADF skipped a few pages in the first pass). In that
|
||||||
|
case, Paperless will remove the staging copy as well as the scan, and give you an error
|
||||||
|
message asking you to restart the process from scratch, by scanning the odd pages again,
|
||||||
|
followed by the even pages.
|
||||||
|
|
||||||
|
It's important that the scan files get consumed in the correct order, and one at a time.
|
||||||
|
You therefore need to make sure that Paperless is running while you upload the files into
|
||||||
|
the directory; and if you're using [polling](configuration.md#polling), make sure that
|
||||||
|
`CONSUMER_POLLING` is set to a value lower than it takes for the second scan to appear,
|
||||||
|
like 5-10 or even lower.
|
||||||
|
|
||||||
|
Another thing that might happen is that you start a double sided scan, but then forget
|
||||||
|
to upload the second file. To avoid collating the wrong documents if you then come back
|
||||||
|
a day later to scan a new double-sided document, Paperless will only keep an "odd numbered
|
||||||
|
pages" file for up to 30 minutes. If more time passes, it will consider the next incoming
|
||||||
|
scan a completely new "odd numbered pages" one. The old staging file will get discarded.
|
||||||
|
|
||||||
|
### Interaction with "subdirs as tags"
|
||||||
|
|
||||||
|
The collation feature can be used together with the [subdirs as tags](configuration.md#consume_config)
|
||||||
|
feature (but this is not a requirement). Just create a correctly named double-sided subdir
|
||||||
|
in the hierachy and upload your scans there. For example, both `double-sided/foo/bar` as
|
||||||
|
well as `foo/bar/double-sided` will cause the collated document to be treated as if it
|
||||||
|
were uploaded into `foo/bar` and receive both `foo` and `bar` tags, but not `double-sided`.
|
||||||
|
|
||||||
|
### Interaction with document splitting
|
||||||
|
|
||||||
|
You can use the [document splitting](#document-splitting) feature, but if you use a normal
|
||||||
|
single-sided split marker page, the split document(s) will have an empty page at the front (or
|
||||||
|
whatever else was on the backside of the split marker page.) You can work around that by having
|
||||||
|
a split marker page that has the split barcode on _both_ sides. This way, the extra page will
|
||||||
|
get automatically removed.
|
||||||
|
53
docs/api.md
@@ -6,7 +6,7 @@ provides a browsable API for most of its endpoints, which you can
|
|||||||
inspect at `http://<paperless-host>:<port>/api/`. This also documents
|
inspect at `http://<paperless-host>:<port>/api/`. This also documents
|
||||||
most of the available filters and ordering fields.
|
most of the available filters and ordering fields.
|
||||||
|
|
||||||
The API provides 7 main endpoints:
|
The API provides the following main endpoints:
|
||||||
|
|
||||||
- `/api/documents/`: Full CRUD support, except POSTing new documents.
|
- `/api/documents/`: Full CRUD support, except POSTing new documents.
|
||||||
See below.
|
See below.
|
||||||
@@ -19,6 +19,8 @@ The API provides 7 main endpoints:
|
|||||||
- `/api/mail_rules/`: Full CRUD support.
|
- `/api/mail_rules/`: Full CRUD support.
|
||||||
- `/api/users/`: Full CRUD support.
|
- `/api/users/`: Full CRUD support.
|
||||||
- `/api/groups/`: Full CRUD support.
|
- `/api/groups/`: Full CRUD support.
|
||||||
|
- `/api/share_links/`: Full CRUD support.
|
||||||
|
- `/api/custom_fields/`: Full CRUD support.
|
||||||
|
|
||||||
All of these endpoints except for the logging endpoint allow you to
|
All of these endpoints except for the logging endpoint allow you to
|
||||||
fetch (and edit and delete where appropriate) individual objects by
|
fetch (and edit and delete where appropriate) individual objects by
|
||||||
@@ -47,6 +49,11 @@ fields:
|
|||||||
Read-only.
|
Read-only.
|
||||||
- `archived_file_name`: Verbose filename of the archived document.
|
- `archived_file_name`: Verbose filename of the archived document.
|
||||||
Read-only. Null if no archived document is available.
|
Read-only. Null if no archived document is available.
|
||||||
|
- `notes`: Array of notes associated with the document.
|
||||||
|
- `set_permissions`: Allows setting document permissions. Optional,
|
||||||
|
write-only. See [below](#permissions).
|
||||||
|
- `custom_fields`: Array of custom fields & values, specified as
|
||||||
|
{ field: CUSTOM_FIELD_ID, value: VALUE }
|
||||||
|
|
||||||
## Downloading documents
|
## Downloading documents
|
||||||
|
|
||||||
@@ -122,6 +129,11 @@ File metadata is reported as a list of objects in the following form:
|
|||||||
depends on the file type and the metadata available in that specific
|
depends on the file type and the metadata available in that specific
|
||||||
document. Paperless only reports PDF metadata at this point.
|
document. Paperless only reports PDF metadata at this point.
|
||||||
|
|
||||||
|
## Documents additional endpoints
|
||||||
|
|
||||||
|
- `/api/documents/<id>/notes/`: Retrieve notes for a document.
|
||||||
|
- `/api/documents/<id>/share_links/`: Retrieve share links for a document.
|
||||||
|
|
||||||
## Authorization
|
## Authorization
|
||||||
|
|
||||||
The REST api provides three different forms of authentication.
|
The REST api provides three different forms of authentication.
|
||||||
@@ -165,7 +177,7 @@ specific query parameters cause the API to return full text search
|
|||||||
results:
|
results:
|
||||||
|
|
||||||
- `/api/documents/?query=your%20search%20query`: Search for a document
|
- `/api/documents/?query=your%20search%20query`: Search for a document
|
||||||
using a full text query. For details on the syntax, see [Basic Usage - Searching](/usage#basic-usage_searching).
|
using a full text query. For details on the syntax, see [Basic Usage - Searching](usage.md#basic-usage_searching).
|
||||||
- `/api/documents/?more_like=1234`: Search for documents similar to
|
- `/api/documents/?more_like=1234`: Search for documents similar to
|
||||||
the document with id 1234.
|
the document with id 1234.
|
||||||
|
|
||||||
@@ -267,6 +279,43 @@ However, querying the tasks endpoint with the returned UUID e.g.
|
|||||||
`/api/tasks/?task_id={uuid}` will provide information on the state of the
|
`/api/tasks/?task_id={uuid}` will provide information on the state of the
|
||||||
consumption including the ID of a created document if consumption succeeded.
|
consumption including the ID of a created document if consumption succeeded.
|
||||||
|
|
||||||
|
## Permissions
|
||||||
|
|
||||||
|
All objects (documents, tags, etc.) allow setting object-level permissions
|
||||||
|
with optional `owner` and / or a `set_permissions` parameters which are of
|
||||||
|
the form:
|
||||||
|
|
||||||
|
```
|
||||||
|
"owner": ...,
|
||||||
|
"set_permissions": {
|
||||||
|
"view": {
|
||||||
|
"users": [...],
|
||||||
|
"groups": [...],
|
||||||
|
},
|
||||||
|
"change": {
|
||||||
|
"users": [...],
|
||||||
|
"groups": [...],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
Arrays should contain user or group ID numbers.
|
||||||
|
|
||||||
|
If these parameters are supplied the object's permissions will be overwritten,
|
||||||
|
assuming the authenticated user has permission to do so (the user must be
|
||||||
|
the object owner or a superuser).
|
||||||
|
|
||||||
|
### Retrieving full permissions
|
||||||
|
|
||||||
|
By default, the API will return a truncated version of object-level
|
||||||
|
permissions, returning `user_can_change` indicating whether the current user
|
||||||
|
can edit the object (either because they are the object owner or have permissions
|
||||||
|
granted). You can pass the parameter `full_perms=true` to API calls to view the
|
||||||
|
full permissions of objects in a format that mirrors the `set_permissions`
|
||||||
|
parameter above.
|
||||||
|
|
||||||
## API Versioning
|
## API Versioning
|
||||||
|
|
||||||
The REST API is versioned since Paperless-ngx 1.3.0.
|
The REST API is versioned since Paperless-ngx 1.3.0.
|
||||||
|
@@ -20,6 +20,28 @@
|
|||||||
margin-left: 4%;
|
margin-left: 4%;
|
||||||
float: left;
|
float: left;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.grid-flipped-left {
|
||||||
|
width: 66%;
|
||||||
|
float: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.grid-flipped-right {
|
||||||
|
width: 29%;
|
||||||
|
margin-left: 4%;
|
||||||
|
float: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.grid-half-left {
|
||||||
|
width: 48%;
|
||||||
|
float: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.grid-half-right {
|
||||||
|
width: 48%;
|
||||||
|
margin-left: 4%;
|
||||||
|
float: left;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.grid-left > p {
|
.grid-left > p {
|
||||||
@@ -31,6 +53,48 @@
|
|||||||
margin: 0;
|
margin: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.clear {
|
||||||
|
clear: both;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
.index-callout {
|
.index-callout {
|
||||||
margin-right: .5rem;
|
margin-right: .5rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* make code in headers not bold */
|
||||||
|
h4 code {
|
||||||
|
font-weight: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide config vars from sidebar, toc and move the border on mobile case their hidden */
|
||||||
|
.md-nav.md-nav--secondary .md-nav__item .md-nav__link[href*="PAPERLESS_"],
|
||||||
|
.md-nav.md-nav--secondary .md-nav__item .md-nav__link[href*="USERMAP_"] {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media screen and (max-width: 76.1875em) {
|
||||||
|
.md-nav--primary .md-nav__item {
|
||||||
|
border-top: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.md-nav--primary .md-nav__link {
|
||||||
|
border-top: .05rem solid var(--md-default-fg-color--lightest);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Show search shortcut key */
|
||||||
|
[data-md-toggle="search"]:not(:checked) ~ .md-header .md-search__form::after {
|
||||||
|
position: absolute;
|
||||||
|
top: .3rem;
|
||||||
|
right: .3rem;
|
||||||
|
display: block;
|
||||||
|
padding: .1rem .4rem;
|
||||||
|
color: var(--md-default-fg-color--lighter);
|
||||||
|
font-weight: bold;
|
||||||
|
font-size: .8rem;
|
||||||
|
border: .05rem solid var(--md-default-fg-color--lighter);
|
||||||
|
border-radius: .1rem;
|
||||||
|
content: "/";
|
||||||
|
}
|
||||||
|
Before Width: | Height: | Size: 740 KiB After Width: | Height: | Size: 1.8 MiB |
BIN
docs/assets/screenshots/consumption_template.png
Normal file
After Width: | Height: | Size: 160 KiB |
Before Width: | Height: | Size: 383 KiB After Width: | Height: | Size: 501 KiB |
BIN
docs/assets/screenshots/custom_field1.png
Normal file
After Width: | Height: | Size: 21 KiB |
BIN
docs/assets/screenshots/custom_field2.png
Normal file
After Width: | Height: | Size: 2.2 MiB |
Before Width: | Height: | Size: 704 KiB After Width: | Height: | Size: 644 KiB |
Before Width: | Height: | Size: 474 KiB After Width: | Height: | Size: 667 KiB |
Before Width: | Height: | Size: 616 KiB After Width: | Height: | Size: 1003 KiB |
Before Width: | Height: | Size: 708 KiB After Width: | Height: | Size: 1.7 MiB |
BIN
docs/assets/screenshots/documents-smallcards-slimsidebar.png
Normal file
After Width: | Height: | Size: 2.1 MiB |
Before Width: | Height: | Size: 705 KiB After Width: | Height: | Size: 1.8 MiB |
Before Width: | Height: | Size: 480 KiB After Width: | Height: | Size: 925 KiB |
Before Width: | Height: | Size: 689 KiB After Width: | Height: | Size: 1.7 MiB |
Before Width: | Height: | Size: 685 KiB After Width: | Height: | Size: 1.8 MiB |
Before Width: | Height: | Size: 859 KiB After Width: | Height: | Size: 2.7 MiB |
Before Width: | Height: | Size: 706 KiB After Width: | Height: | Size: 726 KiB |
Before Width: | Height: | Size: 76 KiB After Width: | Height: | Size: 169 KiB |
Before Width: | Height: | Size: 393 KiB |
BIN
docs/assets/screenshots/mobile1.png
Normal file
After Width: | Height: | Size: 432 KiB |
BIN
docs/assets/screenshots/mobile2.png
Normal file
After Width: | Height: | Size: 280 KiB |
BIN
docs/assets/screenshots/mobile3.png
Normal file
After Width: | Height: | Size: 246 KiB |
BIN
docs/assets/screenshots/new-correspondent.png
Normal file
After Width: | Height: | Size: 27 KiB |
BIN
docs/assets/screenshots/new-document_type.png
Normal file
After Width: | Height: | Size: 29 KiB |
BIN
docs/assets/screenshots/new-storage_path.png
Normal file
After Width: | Height: | Size: 48 KiB |
Before Width: | Height: | Size: 26 KiB After Width: | Height: | Size: 45 KiB |
BIN
docs/assets/screenshots/permissions_document.png
Normal file
After Width: | Height: | Size: 550 KiB |
BIN
docs/assets/screenshots/permissions_global.png
Normal file
After Width: | Height: | Size: 116 KiB |
Before Width: | Height: | Size: 54 KiB After Width: | Height: | Size: 87 KiB |
Before Width: | Height: | Size: 516 KiB After Width: | Height: | Size: 792 KiB |
@@ -1,5 +1,497 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## paperless-ngx 1.17.4
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix: ghostscript rendering error doesnt trigger frontend failure message [@shamoon](https://github.com/shamoon) ([#4092](https://github.com/paperless-ngx/paperless-ngx/pull/4092))
|
||||||
|
|
||||||
|
### All App Changes
|
||||||
|
|
||||||
|
- Fix: ghostscript rendering error doesnt trigger frontend failure message [@shamoon](https://github.com/shamoon) ([#4092](https://github.com/paperless-ngx/paperless-ngx/pull/4092))
|
||||||
|
|
||||||
|
## paperless-ngx 1.17.3
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix: When PDF/A rendering fails, add a consideration for the user to add args to override [@stumpylog](https://github.com/stumpylog) ([#4083](https://github.com/paperless-ngx/paperless-ngx/pull/4083))
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
|
||||||
|
- Chore: update frontend PDF viewer (including pdf-js) [@shamoon](https://github.com/shamoon) ([#4065](https://github.com/paperless-ngx/paperless-ngx/pull/4065))
|
||||||
|
|
||||||
|
### Maintenance
|
||||||
|
|
||||||
|
- Dev: Upload code coverage in the same job [@stumpylog](https://github.com/stumpylog) ([#4084](https://github.com/paperless-ngx/paperless-ngx/pull/4084))
|
||||||
|
|
||||||
|
### All App Changes
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>3 changes</summary>
|
||||||
|
|
||||||
|
- Fix: When PDF/A rendering fails, add a consideration for the user to add args to override [@stumpylog](https://github.com/stumpylog) ([#4083](https://github.com/paperless-ngx/paperless-ngx/pull/4083))
|
||||||
|
- Chore: update frontend PDF viewer (including pdf-js) [@shamoon](https://github.com/shamoon) ([#4065](https://github.com/paperless-ngx/paperless-ngx/pull/4065))
|
||||||
|
- Chore: Prepare for Python 3.11 support [@stumpylog](https://github.com/stumpylog) ([#4066](https://github.com/paperless-ngx/paperless-ngx/pull/4066))
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## paperless-ngx 1.17.2
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Enhancement: Allow to set a prefix for keys and channels in redis [@amo13](https://github.com/amo13) ([#3993](https://github.com/paperless-ngx/paperless-ngx/pull/3993))
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix: Increase the HTTP timeouts for Tika/Gotenberg to maximum task time [@stumpylog](https://github.com/stumpylog) ([#4061](https://github.com/paperless-ngx/paperless-ngx/pull/4061))
|
||||||
|
- Fix: Allow adding an SSL certificate for IMAP SSL context [@stumpylog](https://github.com/stumpylog) ([#4048](https://github.com/paperless-ngx/paperless-ngx/pull/4048))
|
||||||
|
- Fix: tag creation sometimes retained search text [@shamoon](https://github.com/shamoon) ([#4038](https://github.com/paperless-ngx/paperless-ngx/pull/4038))
|
||||||
|
- Fix: enforce permissions on bulk_edit operations [@shamoon](https://github.com/shamoon) ([#4007](https://github.com/paperless-ngx/paperless-ngx/pull/4007))
|
||||||
|
|
||||||
|
### All App Changes
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>6 changes</summary>
|
||||||
|
|
||||||
|
- Fix: Increase the HTTP timeouts for Tika/Gotenberg to maximum task time [@stumpylog](https://github.com/stumpylog) ([#4061](https://github.com/paperless-ngx/paperless-ngx/pull/4061))
|
||||||
|
- Enhancement: disable / hide some UI buttons / elements if insufficient permissions, show errors [@shamoon](https://github.com/shamoon) ([#4014](https://github.com/paperless-ngx/paperless-ngx/pull/4014))
|
||||||
|
- Fix: Allow adding an SSL certificate for IMAP SSL context [@stumpylog](https://github.com/stumpylog) ([#4048](https://github.com/paperless-ngx/paperless-ngx/pull/4048))
|
||||||
|
- Fix: tag creation sometimes retained search text [@shamoon](https://github.com/shamoon) ([#4038](https://github.com/paperless-ngx/paperless-ngx/pull/4038))
|
||||||
|
- Fix: enforce permissions on bulk_edit operations [@shamoon](https://github.com/shamoon) ([#4007](https://github.com/paperless-ngx/paperless-ngx/pull/4007))
|
||||||
|
- Enhancement: Allow to set a prefix for keys and channels in redis [@amo13](https://github.com/amo13) ([#3993](https://github.com/paperless-ngx/paperless-ngx/pull/3993))
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## paperless-ngx 1.17.1
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Fix / Enhancement: restrict status messages by owner if set \& improve 404 page [@shamoon](https://github.com/shamoon) ([#3959](https://github.com/paperless-ngx/paperless-ngx/pull/3959))
|
||||||
|
- Feature: Add Ukrainian translation [@shamoon](https://github.com/shamoon) ([#3941](https://github.com/paperless-ngx/paperless-ngx/pull/3941))
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix: handle ASN = 0 on frontend cards [@shamoon](https://github.com/shamoon) ([#3988](https://github.com/paperless-ngx/paperless-ngx/pull/3988))
|
||||||
|
- Fix: improve light color filled primary button text legibility [@shamoon](https://github.com/shamoon) ([#3980](https://github.com/paperless-ngx/paperless-ngx/pull/3980))
|
||||||
|
- Fix / Enhancement: restrict status messages by owner if set \& improve 404 page [@shamoon](https://github.com/shamoon) ([#3959](https://github.com/paperless-ngx/paperless-ngx/pull/3959))
|
||||||
|
- Fix: handle very old date strings in correspondent list [@shamoon](https://github.com/shamoon) ([#3953](https://github.com/paperless-ngx/paperless-ngx/pull/3953))
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- docs(bare-metal): add new dependency [@bin101](https://github.com/bin101) ([#3931](https://github.com/paperless-ngx/paperless-ngx/pull/3931))
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
|
||||||
|
- Chore: Loosen Pipfile restriction on some packages and update them [@stumpylog](https://github.com/stumpylog) ([#3972](https://github.com/paperless-ngx/paperless-ngx/pull/3972))
|
||||||
|
|
||||||
|
### All App Changes
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>6 changes</summary>
|
||||||
|
|
||||||
|
- Fix: handle ASN = 0 on frontend cards [@shamoon](https://github.com/shamoon) ([#3988](https://github.com/paperless-ngx/paperless-ngx/pull/3988))
|
||||||
|
- Fix: improve light color filled primary button text legibility [@shamoon](https://github.com/shamoon) ([#3980](https://github.com/paperless-ngx/paperless-ngx/pull/3980))
|
||||||
|
- Fix / Enhancement: restrict status messages by owner if set \& improve 404 page [@shamoon](https://github.com/shamoon) ([#3959](https://github.com/paperless-ngx/paperless-ngx/pull/3959))
|
||||||
|
- Fix: handle very old date strings in correspondent list [@shamoon](https://github.com/shamoon) ([#3953](https://github.com/paperless-ngx/paperless-ngx/pull/3953))
|
||||||
|
- Chore: Reduces the 2 mail tests flakiness [@stumpylog](https://github.com/stumpylog) ([#3949](https://github.com/paperless-ngx/paperless-ngx/pull/3949))
|
||||||
|
- Feature: Add Ukrainian translation [@shamoon](https://github.com/shamoon) ([#3941](https://github.com/paperless-ngx/paperless-ngx/pull/3941))
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## paperless-ngx 1.17.0
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Add support for additional UK date formats [@brainrecursion](https://github.com/brainrecursion) ([#3887](https://github.com/paperless-ngx/paperless-ngx/pull/3887))
|
||||||
|
- Add 'doc_pk' to PAPERLESS_FILENAME_FORMAT handling [@mechanarchy](https://github.com/mechanarchy) ([#3861](https://github.com/paperless-ngx/paperless-ngx/pull/3861))
|
||||||
|
- Feature: hover buttons for saved view widgets [@shamoon](https://github.com/shamoon) ([#3875](https://github.com/paperless-ngx/paperless-ngx/pull/3875))
|
||||||
|
- Feature: collate two single-sided multipage scans [@brakhane](https://github.com/brakhane) ([#3784](https://github.com/paperless-ngx/paperless-ngx/pull/3784))
|
||||||
|
- Feature: include global and object-level permissions in export / import [@shamoon](https://github.com/shamoon) ([#3672](https://github.com/paperless-ngx/paperless-ngx/pull/3672))
|
||||||
|
- Enhancement / Fix: Migrate encrypted png thumbnails to webp [@shamoon](https://github.com/shamoon) ([#3719](https://github.com/paperless-ngx/paperless-ngx/pull/3719))
|
||||||
|
- Feature: Add Slovak translation [@shamoon](https://github.com/shamoon) ([#3722](https://github.com/paperless-ngx/paperless-ngx/pull/3722))
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix: cancel possibly slow queries on doc details [@shamoon](https://github.com/shamoon) ([#3925](https://github.com/paperless-ngx/paperless-ngx/pull/3925))
|
||||||
|
- Fix: note creation / deletion should respect doc permissions [@shamoon](https://github.com/shamoon) ([#3903](https://github.com/paperless-ngx/paperless-ngx/pull/3903))
|
||||||
|
- Fix: notes show persistent scrollbars [@shamoon](https://github.com/shamoon) ([#3904](https://github.com/paperless-ngx/paperless-ngx/pull/3904))
|
||||||
|
- Fix: Provide SSL context to IMAP client [@stumpylog](https://github.com/stumpylog) ([#3886](https://github.com/paperless-ngx/paperless-ngx/pull/3886))
|
||||||
|
- Fix/enhancement: permissions for mail rules \& accounts [@shamoon](https://github.com/shamoon) ([#3869](https://github.com/paperless-ngx/paperless-ngx/pull/3869))
|
||||||
|
- Fix: Classifier special case when no items are set to automatic matching [@stumpylog](https://github.com/stumpylog) ([#3858](https://github.com/paperless-ngx/paperless-ngx/pull/3858))
|
||||||
|
- Fix: issues with copy2 or copystat and SELinux permissions [@stumpylog](https://github.com/stumpylog) ([#3847](https://github.com/paperless-ngx/paperless-ngx/pull/3847))
|
||||||
|
- Fix: Parsing office document timestamps [@stumpylog](https://github.com/stumpylog) ([#3836](https://github.com/paperless-ngx/paperless-ngx/pull/3836))
|
||||||
|
- Fix: Add warning to install script need for permissions [@shamoon](https://github.com/shamoon) ([#3835](https://github.com/paperless-ngx/paperless-ngx/pull/3835))
|
||||||
|
- Fix interaction between API and barcode archive serial number [@stumpylog](https://github.com/stumpylog) ([#3834](https://github.com/paperless-ngx/paperless-ngx/pull/3834))
|
||||||
|
- Enhancement / Fix: Migrate encrypted png thumbnails to webp [@shamoon](https://github.com/shamoon) ([#3719](https://github.com/paperless-ngx/paperless-ngx/pull/3719))
|
||||||
|
- Fix: add UI tour step padding [@hakimio](https://github.com/hakimio) ([#3791](https://github.com/paperless-ngx/paperless-ngx/pull/3791))
|
||||||
|
- Fix: translate file tasks types in footer [@shamoon](https://github.com/shamoon) ([#3749](https://github.com/paperless-ngx/paperless-ngx/pull/3749))
|
||||||
|
- Fix: limit ng-select size for addition of filter button [@shamoon](https://github.com/shamoon) ([#3731](https://github.com/paperless-ngx/paperless-ngx/pull/3731))
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Documentation: improvements to grammar, spelling, indentation [@mechanarchy](https://github.com/mechanarchy) ([#3844](https://github.com/paperless-ngx/paperless-ngx/pull/3844))
|
||||||
|
|
||||||
|
### Maintenance
|
||||||
|
|
||||||
|
- Bump stumpylog/image-cleaner-action from 0.1.0 to 0.2.0 [@dependabot](https://github.com/dependabot) ([#3910](https://github.com/paperless-ngx/paperless-ngx/pull/3910))
|
||||||
|
- Chore: group frontend angular dependabot updates [@shamoon](https://github.com/shamoon) ([#3750](https://github.com/paperless-ngx/paperless-ngx/pull/3750))
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>17 changes</summary>
|
||||||
|
|
||||||
|
- Chore: Bump the frontend-angular-dependencies group in /src-ui with 11 updates [@shamoon](https://github.com/shamoon) ([#3918](https://github.com/paperless-ngx/paperless-ngx/pull/3918))
|
||||||
|
- Bump stumpylog/image-cleaner-action from 0.1.0 to 0.2.0 [@dependabot](https://github.com/dependabot) ([#3910](https://github.com/paperless-ngx/paperless-ngx/pull/3910))
|
||||||
|
- Bump the frontend-eslint-dependencies group in /src-ui with 3 updates [@dependabot](https://github.com/dependabot) ([#3911](https://github.com/paperless-ngx/paperless-ngx/pull/3911))
|
||||||
|
- Bump tslib from 2.6.0 to 2.6.1 in /src-ui [@dependabot](https://github.com/dependabot) ([#3909](https://github.com/paperless-ngx/paperless-ngx/pull/3909))
|
||||||
|
- Bump jest-environment-jsdom from 29.5.0 to 29.6.2 in /src-ui [@dependabot](https://github.com/dependabot) ([#3916](https://github.com/paperless-ngx/paperless-ngx/pull/3916))
|
||||||
|
- Bump [@<!---->types/node from 20.3.3 to 20.4.5 in /src-ui @dependabot](https://github.com/<!---->types/node from 20.3.3 to 20.4.5 in /src-ui @dependabot) ([#3915](https://github.com/paperless-ngx/paperless-ngx/pull/3915))
|
||||||
|
- Bump bootstrap from 5.3.0 to 5.3.1 in /src-ui [@dependabot](https://github.com/dependabot) ([#3914](https://github.com/paperless-ngx/paperless-ngx/pull/3914))
|
||||||
|
- Bump [@<!---->playwright/test from 1.36.1 to 1.36.2 in /src-ui @dependabot](https://github.com/<!---->playwright/test from 1.36.1 to 1.36.2 in /src-ui @dependabot) ([#3912](https://github.com/paperless-ngx/paperless-ngx/pull/3912))
|
||||||
|
- Bump the frontend-jest-dependencies group in /src-ui with 1 update [@dependabot](https://github.com/dependabot) ([#3906](https://github.com/paperless-ngx/paperless-ngx/pull/3906))
|
||||||
|
- Chore: Update dependencies [@stumpylog](https://github.com/stumpylog) ([#3883](https://github.com/paperless-ngx/paperless-ngx/pull/3883))
|
||||||
|
- Chore: Update Python dependencies [@stumpylog](https://github.com/stumpylog) ([#3842](https://github.com/paperless-ngx/paperless-ngx/pull/3842))
|
||||||
|
- Bump the frontend-angular-dependencies group in /src-ui with 16 updates [@dependabot](https://github.com/dependabot) ([#3826](https://github.com/paperless-ngx/paperless-ngx/pull/3826))
|
||||||
|
- Bump [@<!---->typescript-eslint/eslint-plugin from 5.60.1 to 6.1.0 in /src-ui @dependabot](https://github.com/<!---->typescript-eslint/eslint-plugin from 5.60.1 to 6.1.0 in /src-ui @dependabot) ([#3829](https://github.com/paperless-ngx/paperless-ngx/pull/3829))
|
||||||
|
- Bump jest and [@<!---->types/jest in /src-ui @dependabot](https://github.com/<!---->types/jest in /src-ui @dependabot) ([#3828](https://github.com/paperless-ngx/paperless-ngx/pull/3828))
|
||||||
|
- Bump [@<!---->playwright/test from 1.36.0 to 1.36.1 in /src-ui @dependabot](https://github.com/<!---->playwright/test from 1.36.0 to 1.36.1 in /src-ui @dependabot) ([#3827](https://github.com/paperless-ngx/paperless-ngx/pull/3827))
|
||||||
|
- Bump semver from 5.7.1 to 5.7.2 in /src-ui [@dependabot](https://github.com/dependabot) ([#3793](https://github.com/paperless-ngx/paperless-ngx/pull/3793))
|
||||||
|
- Chore: Bump Angular to v16 and other frontend packages [@dependabot](https://github.com/dependabot) ([#3727](https://github.com/paperless-ngx/paperless-ngx/pull/3727))
|
||||||
|
</details>
|
||||||
|
|
||||||
|
### All App Changes
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>35 changes</summary>
|
||||||
|
|
||||||
|
- Fix: cancel possibly slow queries on doc details [@shamoon](https://github.com/shamoon) ([#3925](https://github.com/paperless-ngx/paperless-ngx/pull/3925))
|
||||||
|
- [BUG] Set office document creation date with timezone, if it is naive [@a17t](https://github.com/a17t) ([#3760](https://github.com/paperless-ngx/paperless-ngx/pull/3760))
|
||||||
|
- Fix: note creation / deletion should respect doc permissions [@shamoon](https://github.com/shamoon) ([#3903](https://github.com/paperless-ngx/paperless-ngx/pull/3903))
|
||||||
|
- Chore: Bump the frontend-angular-dependencies group in /src-ui with 11 updates [@shamoon](https://github.com/shamoon) ([#3918](https://github.com/paperless-ngx/paperless-ngx/pull/3918))
|
||||||
|
- Bump the frontend-eslint-dependencies group in /src-ui with 3 updates [@dependabot](https://github.com/dependabot) ([#3911](https://github.com/paperless-ngx/paperless-ngx/pull/3911))
|
||||||
|
- Bump tslib from 2.6.0 to 2.6.1 in /src-ui [@dependabot](https://github.com/dependabot) ([#3909](https://github.com/paperless-ngx/paperless-ngx/pull/3909))
|
||||||
|
- Bump jest-environment-jsdom from 29.5.0 to 29.6.2 in /src-ui [@dependabot](https://github.com/dependabot) ([#3916](https://github.com/paperless-ngx/paperless-ngx/pull/3916))
|
||||||
|
- Bump [@<!---->types/node from 20.3.3 to 20.4.5 in /src-ui @dependabot](https://github.com/<!---->types/node from 20.3.3 to 20.4.5 in /src-ui @dependabot) ([#3915](https://github.com/paperless-ngx/paperless-ngx/pull/3915))
|
||||||
|
- Bump bootstrap from 5.3.0 to 5.3.1 in /src-ui [@dependabot](https://github.com/dependabot) ([#3914](https://github.com/paperless-ngx/paperless-ngx/pull/3914))
|
||||||
|
- Bump [@<!---->playwright/test from 1.36.1 to 1.36.2 in /src-ui @dependabot](https://github.com/<!---->playwright/test from 1.36.1 to 1.36.2 in /src-ui @dependabot) ([#3912](https://github.com/paperless-ngx/paperless-ngx/pull/3912))
|
||||||
|
- Bump the frontend-jest-dependencies group in /src-ui with 1 update [@dependabot](https://github.com/dependabot) ([#3906](https://github.com/paperless-ngx/paperless-ngx/pull/3906))
|
||||||
|
- Fix: notes show persistent scrollbars [@shamoon](https://github.com/shamoon) ([#3904](https://github.com/paperless-ngx/paperless-ngx/pull/3904))
|
||||||
|
- Add support for additional UK date formats [@brainrecursion](https://github.com/brainrecursion) ([#3887](https://github.com/paperless-ngx/paperless-ngx/pull/3887))
|
||||||
|
- Add 'doc_pk' to PAPERLESS_FILENAME_FORMAT handling [@mechanarchy](https://github.com/mechanarchy) ([#3861](https://github.com/paperless-ngx/paperless-ngx/pull/3861))
|
||||||
|
- Fix: Provide SSL context to IMAP client [@stumpylog](https://github.com/stumpylog) ([#3886](https://github.com/paperless-ngx/paperless-ngx/pull/3886))
|
||||||
|
- Feature: hover buttons for saved view widgets [@shamoon](https://github.com/shamoon) ([#3875](https://github.com/paperless-ngx/paperless-ngx/pull/3875))
|
||||||
|
- Fix/enhancement: permissions for mail rules \& accounts [@shamoon](https://github.com/shamoon) ([#3869](https://github.com/paperless-ngx/paperless-ngx/pull/3869))
|
||||||
|
- Chore: typing improvements [@stumpylog](https://github.com/stumpylog) ([#3860](https://github.com/paperless-ngx/paperless-ngx/pull/3860))
|
||||||
|
- Fix: Classifier special case when no items are set to automatic matching [@stumpylog](https://github.com/stumpylog) ([#3858](https://github.com/paperless-ngx/paperless-ngx/pull/3858))
|
||||||
|
- Fix: issues with copy2 or copystat and SELinux permissions [@stumpylog](https://github.com/stumpylog) ([#3847](https://github.com/paperless-ngx/paperless-ngx/pull/3847))
|
||||||
|
- Chore: Update Python dependencies [@stumpylog](https://github.com/stumpylog) ([#3842](https://github.com/paperless-ngx/paperless-ngx/pull/3842))
|
||||||
|
- Feature: include global and object-level permissions in export / import [@shamoon](https://github.com/shamoon) ([#3672](https://github.com/paperless-ngx/paperless-ngx/pull/3672))
|
||||||
|
- Fix: Parsing office document timestamps [@stumpylog](https://github.com/stumpylog) ([#3836](https://github.com/paperless-ngx/paperless-ngx/pull/3836))
|
||||||
|
- Fix interaction between API and barcode archive serial number [@stumpylog](https://github.com/stumpylog) ([#3834](https://github.com/paperless-ngx/paperless-ngx/pull/3834))
|
||||||
|
- Bump the frontend-angular-dependencies group in /src-ui with 16 updates [@dependabot](https://github.com/dependabot) ([#3826](https://github.com/paperless-ngx/paperless-ngx/pull/3826))
|
||||||
|
- Enhancement / Fix: Migrate encrypted png thumbnails to webp [@shamoon](https://github.com/shamoon) ([#3719](https://github.com/paperless-ngx/paperless-ngx/pull/3719))
|
||||||
|
- Bump [@<!---->typescript-eslint/eslint-plugin from 5.60.1 to 6.1.0 in /src-ui @dependabot](https://github.com/<!---->typescript-eslint/eslint-plugin from 5.60.1 to 6.1.0 in /src-ui @dependabot) ([#3829](https://github.com/paperless-ngx/paperless-ngx/pull/3829))
|
||||||
|
- Bump jest and [@<!---->types/jest in /src-ui @dependabot](https://github.com/<!---->types/jest in /src-ui @dependabot) ([#3828](https://github.com/paperless-ngx/paperless-ngx/pull/3828))
|
||||||
|
- Bump [@<!---->playwright/test from 1.36.0 to 1.36.1 in /src-ui @dependabot](https://github.com/<!---->playwright/test from 1.36.0 to 1.36.1 in /src-ui @dependabot) ([#3827](https://github.com/paperless-ngx/paperless-ngx/pull/3827))
|
||||||
|
- Bump semver from 5.7.1 to 5.7.2 in /src-ui [@dependabot](https://github.com/dependabot) ([#3793](https://github.com/paperless-ngx/paperless-ngx/pull/3793))
|
||||||
|
- Fix: add UI tour step padding [@hakimio](https://github.com/hakimio) ([#3791](https://github.com/paperless-ngx/paperless-ngx/pull/3791))
|
||||||
|
- Fix: translate file tasks types in footer [@shamoon](https://github.com/shamoon) ([#3749](https://github.com/paperless-ngx/paperless-ngx/pull/3749))
|
||||||
|
- Feature: Add Slovak translation [@shamoon](https://github.com/shamoon) ([#3722](https://github.com/paperless-ngx/paperless-ngx/pull/3722))
|
||||||
|
- Fix: limit ng-select size for addition of filter button [@shamoon](https://github.com/shamoon) ([#3731](https://github.com/paperless-ngx/paperless-ngx/pull/3731))
|
||||||
|
- Chore: Bump Angular to v16 and other frontend packages [@dependabot](https://github.com/dependabot) ([#3727](https://github.com/paperless-ngx/paperless-ngx/pull/3727))
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## paperless-ngx 1.16.5
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Feature: support barcode upscaling for better detection of small barcodes [@bmachek](https://github.com/bmachek) ([#3655](https://github.com/paperless-ngx/paperless-ngx/pull/3655))
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix: owner removed when set_permissions passed on object create [@shamoon](https://github.com/shamoon) ([#3702](https://github.com/paperless-ngx/paperless-ngx/pull/3702))
|
||||||
|
|
||||||
|
### All App Changes
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>2 changes</summary>
|
||||||
|
|
||||||
|
- Feature: support barcode upscaling for better detection of small barcodes [@bmachek](https://github.com/bmachek) ([#3655](https://github.com/paperless-ngx/paperless-ngx/pull/3655))
|
||||||
|
- Fix: owner removed when set_permissions passed on object create [@shamoon](https://github.com/shamoon) ([#3702](https://github.com/paperless-ngx/paperless-ngx/pull/3702))
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## paperless-ngx 1.16.4
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix: prevent button wrapping when sidebar narrows in MS Edge [@shamoon](https://github.com/shamoon) ([#3682](https://github.com/paperless-ngx/paperless-ngx/pull/3682))
|
||||||
|
- Fix: Handling for filenames with non-ascii and no content attribute [@stumpylog](https://github.com/stumpylog) ([#3695](https://github.com/paperless-ngx/paperless-ngx/pull/3695))
|
||||||
|
- Fix: Generation of thumbnails for existing stored emails [@stumpylog](https://github.com/stumpylog) ([#3696](https://github.com/paperless-ngx/paperless-ngx/pull/3696))
|
||||||
|
- Fix: Use row gap for filter editor [@kleinweby](https://github.com/kleinweby) ([#3662](https://github.com/paperless-ngx/paperless-ngx/pull/3662))
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Documentation: update API docs re permissions [@shamoon](https://github.com/shamoon) ([#3697](https://github.com/paperless-ngx/paperless-ngx/pull/3697))
|
||||||
|
|
||||||
|
### Maintenance
|
||||||
|
|
||||||
|
- Chore: Updates codecov configuration for the flag settings and notification delay [@stumpylog](https://github.com/stumpylog) ([#3656](https://github.com/paperless-ngx/paperless-ngx/pull/3656))
|
||||||
|
|
||||||
|
### All App Changes
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>4 changes</summary>
|
||||||
|
|
||||||
|
- Fix: prevent button wrapping when sidebar narrows in MS Edge [@shamoon](https://github.com/shamoon) ([#3682](https://github.com/paperless-ngx/paperless-ngx/pull/3682))
|
||||||
|
- Fix: Handling for filenames with non-ascii and no content attribute [@stumpylog](https://github.com/stumpylog) ([#3695](https://github.com/paperless-ngx/paperless-ngx/pull/3695))
|
||||||
|
- Fix: Generation of thumbnails for existing stored emails [@stumpylog](https://github.com/stumpylog) ([#3696](https://github.com/paperless-ngx/paperless-ngx/pull/3696))
|
||||||
|
- Fix: Use row gap for filter editor [@kleinweby](https://github.com/kleinweby) ([#3662](https://github.com/paperless-ngx/paperless-ngx/pull/3662))
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## paperless-ngx 1.16.3
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix: Set user and home environment through supervisord [@stumpylog](https://github.com/stumpylog) ([#3638](https://github.com/paperless-ngx/paperless-ngx/pull/3638))
|
||||||
|
- Fix: Ignore errors when trying to copy the original file's stats [@stumpylog](https://github.com/stumpylog) ([#3652](https://github.com/paperless-ngx/paperless-ngx/pull/3652))
|
||||||
|
- Fix: Copy default thumbnail if thumbnail generation fails [@plu](https://github.com/plu) ([#3632](https://github.com/paperless-ngx/paperless-ngx/pull/3632))
|
||||||
|
- Fix: Set user and home environment through supervisord [@stumpylog](https://github.com/stumpylog) ([#3638](https://github.com/paperless-ngx/paperless-ngx/pull/3638))
|
||||||
|
- Fix: Fix quick install with external database not being fully ready [@stumpylog](https://github.com/stumpylog) ([#3637](https://github.com/paperless-ngx/paperless-ngx/pull/3637))
|
||||||
|
|
||||||
|
### Maintenance
|
||||||
|
|
||||||
|
- Chore: Update default Postgres version for new installs [@stumpylog](https://github.com/stumpylog) ([#3640](https://github.com/paperless-ngx/paperless-ngx/pull/3640))
|
||||||
|
|
||||||
|
### All App Changes
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>2 changes</summary>
|
||||||
|
|
||||||
|
- Fix: Ignore errors when trying to copy the original file's stats [@stumpylog](https://github.com/stumpylog) ([#3652](https://github.com/paperless-ngx/paperless-ngx/pull/3652))
|
||||||
|
- Fix: Copy default thumbnail if thumbnail generation fails [@plu](https://github.com/plu) ([#3632](https://github.com/paperless-ngx/paperless-ngx/pull/3632))
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## paperless-ngx 1.16.2
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix: Increase httpx operation timeouts to 30s [@stumpylog](https://github.com/stumpylog) ([#3627](https://github.com/paperless-ngx/paperless-ngx/pull/3627))
|
||||||
|
- Fix: Better error handling and checking when parsing documents via Tika [@stumpylog](https://github.com/stumpylog) ([#3617](https://github.com/paperless-ngx/paperless-ngx/pull/3617))
|
||||||
|
|
||||||
|
### Development
|
||||||
|
|
||||||
|
- Development: frontend unit testing [@shamoon](https://github.com/shamoon) ([#3597](https://github.com/paperless-ngx/paperless-ngx/pull/3597))
|
||||||
|
|
||||||
|
### Maintenance
|
||||||
|
|
||||||
|
- Chore: Bumps the CI/Docker pipenv version [@stumpylog](https://github.com/stumpylog) ([#3622](https://github.com/paperless-ngx/paperless-ngx/pull/3622))
|
||||||
|
- Chore: Set CI artifact retention days [@stumpylog](https://github.com/stumpylog) ([#3621](https://github.com/paperless-ngx/paperless-ngx/pull/3621))
|
||||||
|
|
||||||
|
### All App Changes
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>3 changes</summary>
|
||||||
|
|
||||||
|
- Fix: Increase httpx operation timeouts to 30s [@stumpylog](https://github.com/stumpylog) ([#3627](https://github.com/paperless-ngx/paperless-ngx/pull/3627))
|
||||||
|
- Fix: Better error handling and checking when parsing documents via Tika [@stumpylog](https://github.com/stumpylog) ([#3617](https://github.com/paperless-ngx/paperless-ngx/pull/3617))
|
||||||
|
- Development: frontend unit testing [@shamoon](https://github.com/shamoon) ([#3597](https://github.com/paperless-ngx/paperless-ngx/pull/3597))
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## paperless-ngx 1.16.1
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix: PIL ImportError on ARM devices with Docker [@stumpylog](https://github.com/stumpylog) ([#3605](https://github.com/paperless-ngx/paperless-ngx/pull/3605))
|
||||||
|
|
||||||
|
### Maintenance
|
||||||
|
|
||||||
|
- Chore: Enable the image cleanup action [@stumpylog](https://github.com/stumpylog) ([#3606](https://github.com/paperless-ngx/paperless-ngx/pull/3606))
|
||||||
|
|
||||||
|
## paperless-ngx 1.16.0
|
||||||
|
|
||||||
|
### Notable Changes
|
||||||
|
|
||||||
|
- Chore: Update base image to Debian bookworm [@stumpylog](https://github.com/stumpylog) ([#3469](https://github.com/paperless-ngx/paperless-ngx/pull/3469))
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Feature: Update to a simpler Tika library [@stumpylog](https://github.com/stumpylog) ([#3517](https://github.com/paperless-ngx/paperless-ngx/pull/3517))
|
||||||
|
- Feature: Allow to filter documents by original filename and checksum [@jayme-github](https://github.com/jayme-github) ([#3485](https://github.com/paperless-ngx/paperless-ngx/pull/3485))
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix: return user first / last name from backend [@shamoon](https://github.com/shamoon) ([#3579](https://github.com/paperless-ngx/paperless-ngx/pull/3579))
|
||||||
|
- Fix use of `PAPERLESS_DB_TIMEOUT` for all db types [@shamoon](https://github.com/shamoon) ([#3576](https://github.com/paperless-ngx/paperless-ngx/pull/3576))
|
||||||
|
- Fix: handle mail rules with no filters on some imap servers [@shamoon](https://github.com/shamoon) ([#3554](https://github.com/paperless-ngx/paperless-ngx/pull/3554))
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
|
||||||
|
- Chore: Python dependency updates (celery 5.3.0 in particular) [@stumpylog](https://github.com/stumpylog) ([#3584](https://github.com/paperless-ngx/paperless-ngx/pull/3584))
|
||||||
|
|
||||||
|
### All App Changes
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>8 changes</summary>
|
||||||
|
|
||||||
|
- Chore: Python dependency updates (celery 5.3.0 in particular) [@stumpylog](https://github.com/stumpylog) ([#3584](https://github.com/paperless-ngx/paperless-ngx/pull/3584))
|
||||||
|
- Fix: return user first / last name from backend [@shamoon](https://github.com/shamoon) ([#3579](https://github.com/paperless-ngx/paperless-ngx/pull/3579))
|
||||||
|
- Fix use of `PAPERLESS_DB_TIMEOUT` for all db types [@shamoon](https://github.com/shamoon) ([#3576](https://github.com/paperless-ngx/paperless-ngx/pull/3576))
|
||||||
|
- Fix: handle mail rules with no filters on some imap servers [@shamoon](https://github.com/shamoon) ([#3554](https://github.com/paperless-ngx/paperless-ngx/pull/3554))
|
||||||
|
- Chore: Copy file stats from original file [@stumpylog](https://github.com/stumpylog) ([#3551](https://github.com/paperless-ngx/paperless-ngx/pull/3551))
|
||||||
|
- Chore: Adds test for barcode ASN when it already exists [@stumpylog](https://github.com/stumpylog) ([#3550](https://github.com/paperless-ngx/paperless-ngx/pull/3550))
|
||||||
|
- Feature: Update to a simpler Tika library [@stumpylog](https://github.com/stumpylog) ([#3517](https://github.com/paperless-ngx/paperless-ngx/pull/3517))
|
||||||
|
- Feature: Allow to filter documents by original filename and checksum [@jayme-github](https://github.com/jayme-github) ([#3485](https://github.com/paperless-ngx/paperless-ngx/pull/3485))
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## paperless-ngx 1.15.1
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix incorrect colors in v1.15.0 [@shamoon](https://github.com/shamoon) ([#3523](https://github.com/paperless-ngx/paperless-ngx/pull/3523))
|
||||||
|
|
||||||
|
### All App Changes
|
||||||
|
|
||||||
|
- Fix incorrect colors in v1.15.0 [@shamoon](https://github.com/shamoon) ([#3523](https://github.com/paperless-ngx/paperless-ngx/pull/3523))
|
||||||
|
|
||||||
|
## paperless-ngx 1.15.0
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Feature: quick filters from document detail [@shamoon](https://github.com/shamoon) ([#3476](https://github.com/paperless-ngx/paperless-ngx/pull/3476))
|
||||||
|
- Feature: Add explanations to relative dates [@shamoon](https://github.com/shamoon) ([#3471](https://github.com/paperless-ngx/paperless-ngx/pull/3471))
|
||||||
|
- Enhancement: paginate frontend tasks [@shamoon](https://github.com/shamoon) ([#3445](https://github.com/paperless-ngx/paperless-ngx/pull/3445))
|
||||||
|
- Feature: Better encapsulation of barcode logic [@stumpylog](https://github.com/stumpylog) ([#3425](https://github.com/paperless-ngx/paperless-ngx/pull/3425))
|
||||||
|
- Enhancement: Improve frontend error handling [@shamoon](https://github.com/shamoon) ([#3413](https://github.com/paperless-ngx/paperless-ngx/pull/3413))
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix: KeyError error on unauthenticated API calls \& persist authentication when enabled [@ajgon](https://github.com/ajgon) ([#3516](https://github.com/paperless-ngx/paperless-ngx/pull/3516))
|
||||||
|
- Fix: exclude consumer \& AnonymousUser users from export manifest [@shamoon](https://github.com/shamoon) ([#3487](https://github.com/paperless-ngx/paperless-ngx/pull/3487))
|
||||||
|
- Fix: prevent date suggestion search if disabled [@shamoon](https://github.com/shamoon) ([#3472](https://github.com/paperless-ngx/paperless-ngx/pull/3472))
|
||||||
|
- Sync Pipfile.lock based on latest Pipfile [@adamantike](https://github.com/adamantike) ([#3475](https://github.com/paperless-ngx/paperless-ngx/pull/3475))
|
||||||
|
- Fix: DocumentSerializer should return correct original filename [@jayme-github](https://github.com/jayme-github) ([#3473](https://github.com/paperless-ngx/paperless-ngx/pull/3473))
|
||||||
|
- consumer.py: read from original file (instead of temp copy) [@chrisblech](https://github.com/chrisblech) ([#3466](https://github.com/paperless-ngx/paperless-ngx/pull/3466))
|
||||||
|
- Bugfix: Catch an nltk AttributeError and handle it [@stumpylog](https://github.com/stumpylog) ([#3453](https://github.com/paperless-ngx/paperless-ngx/pull/3453))
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Adding doc on how to setup Fail2ban [@GuillaumeHullin](https://github.com/GuillaumeHullin) ([#3414](https://github.com/paperless-ngx/paperless-ngx/pull/3414))
|
||||||
|
- Docs: Fix typo [@MarcelBochtler](https://github.com/MarcelBochtler) ([#3437](https://github.com/paperless-ngx/paperless-ngx/pull/3437))
|
||||||
|
- [Documentation] Move nginx [@shamoon](https://github.com/shamoon) ([#3420](https://github.com/paperless-ngx/paperless-ngx/pull/3420))
|
||||||
|
- Documentation: Note possible dependency removal for bare metal [@stumpylog](https://github.com/stumpylog) ([#3408](https://github.com/paperless-ngx/paperless-ngx/pull/3408))
|
||||||
|
|
||||||
|
### Development
|
||||||
|
|
||||||
|
- Development: migrate frontend tests to playwright [@shamoon](https://github.com/shamoon) ([#3401](https://github.com/paperless-ngx/paperless-ngx/pull/3401))
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>10 changes</summary>
|
||||||
|
|
||||||
|
- Bump eslint from 8.39.0 to 8.41.0 in /src-ui [@dependabot](https://github.com/dependabot) ([#3513](https://github.com/paperless-ngx/paperless-ngx/pull/3513))
|
||||||
|
- Bump concurrently from 8.0.1 to 8.1.0 in /src-ui [@dependabot](https://github.com/dependabot) ([#3510](https://github.com/paperless-ngx/paperless-ngx/pull/3510))
|
||||||
|
- Bump [@<!---->ng-bootstrap/ng-bootstrap from 14.1.0 to 14.2.0 in /src-ui @dependabot](https://github.com/<!---->ng-bootstrap/ng-bootstrap from 14.1.0 to 14.2.0 in /src-ui @dependabot) ([#3507](https://github.com/paperless-ngx/paperless-ngx/pull/3507))
|
||||||
|
- Bump [@<!---->popperjs/core from 2.11.7 to 2.11.8 in /src-ui @dependabot](https://github.com/<!---->popperjs/core from 2.11.7 to 2.11.8 in /src-ui @dependabot) ([#3508](https://github.com/paperless-ngx/paperless-ngx/pull/3508))
|
||||||
|
- Bump [@<!---->typescript-eslint/parser from 5.59.2 to 5.59.8 in /src-ui @dependabot](https://github.com/<!---->typescript-eslint/parser from 5.59.2 to 5.59.8 in /src-ui @dependabot) ([#3505](https://github.com/paperless-ngx/paperless-ngx/pull/3505))
|
||||||
|
- Bump bootstrap from 5.2.3 to 5.3.0 in /src-ui [@dependabot](https://github.com/dependabot) ([#3497](https://github.com/paperless-ngx/paperless-ngx/pull/3497))
|
||||||
|
- Bump [@<!---->typescript-eslint/eslint-plugin from 5.59.2 to 5.59.8 in /src-ui @dependabot](https://github.com/<!---->typescript-eslint/eslint-plugin from 5.59.2 to 5.59.8 in /src-ui @dependabot) ([#3500](https://github.com/paperless-ngx/paperless-ngx/pull/3500))
|
||||||
|
- Bump tslib from 2.5.0 to 2.5.2 in /src-ui [@dependabot](https://github.com/dependabot) ([#3501](https://github.com/paperless-ngx/paperless-ngx/pull/3501))
|
||||||
|
- Bump [@<!---->types/node from 18.16.3 to 20.2.5 in /src-ui @dependabot](https://github.com/<!---->types/node from 18.16.3 to 20.2.5 in /src-ui @dependabot) ([#3498](https://github.com/paperless-ngx/paperless-ngx/pull/3498))
|
||||||
|
- Bump [@<!---->playwright/test from 1.33.0 to 1.34.3 in /src-ui @dependabot](https://github.com/<!---->playwright/test from 1.33.0 to 1.34.3 in /src-ui @dependabot) ([#3499](https://github.com/paperless-ngx/paperless-ngx/pull/3499))
|
||||||
|
</details>
|
||||||
|
|
||||||
|
### All App Changes
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>22 changes</summary>
|
||||||
|
|
||||||
|
- Fix: KeyError error on unauthenticated API calls \& persist authentication when enabled [@ajgon](https://github.com/ajgon) ([#3516](https://github.com/paperless-ngx/paperless-ngx/pull/3516))
|
||||||
|
- Bump eslint from 8.39.0 to 8.41.0 in /src-ui [@dependabot](https://github.com/dependabot) ([#3513](https://github.com/paperless-ngx/paperless-ngx/pull/3513))
|
||||||
|
- Bump concurrently from 8.0.1 to 8.1.0 in /src-ui [@dependabot](https://github.com/dependabot) ([#3510](https://github.com/paperless-ngx/paperless-ngx/pull/3510))
|
||||||
|
- Bump [@<!---->ng-bootstrap/ng-bootstrap from 14.1.0 to 14.2.0 in /src-ui @dependabot](https://github.com/<!---->ng-bootstrap/ng-bootstrap from 14.1.0 to 14.2.0 in /src-ui @dependabot) ([#3507](https://github.com/paperless-ngx/paperless-ngx/pull/3507))
|
||||||
|
- Bump [@<!---->popperjs/core from 2.11.7 to 2.11.8 in /src-ui @dependabot](https://github.com/<!---->popperjs/core from 2.11.7 to 2.11.8 in /src-ui @dependabot) ([#3508](https://github.com/paperless-ngx/paperless-ngx/pull/3508))
|
||||||
|
- Bump [@<!---->typescript-eslint/parser from 5.59.2 to 5.59.8 in /src-ui @dependabot](https://github.com/<!---->typescript-eslint/parser from 5.59.2 to 5.59.8 in /src-ui @dependabot) ([#3505](https://github.com/paperless-ngx/paperless-ngx/pull/3505))
|
||||||
|
- Bump bootstrap from 5.2.3 to 5.3.0 in /src-ui [@dependabot](https://github.com/dependabot) ([#3497](https://github.com/paperless-ngx/paperless-ngx/pull/3497))
|
||||||
|
- Bump [@<!---->typescript-eslint/eslint-plugin from 5.59.2 to 5.59.8 in /src-ui @dependabot](https://github.com/<!---->typescript-eslint/eslint-plugin from 5.59.2 to 5.59.8 in /src-ui @dependabot) ([#3500](https://github.com/paperless-ngx/paperless-ngx/pull/3500))
|
||||||
|
- Bump tslib from 2.5.0 to 2.5.2 in /src-ui [@dependabot](https://github.com/dependabot) ([#3501](https://github.com/paperless-ngx/paperless-ngx/pull/3501))
|
||||||
|
- Bump [@<!---->types/node from 18.16.3 to 20.2.5 in /src-ui @dependabot](https://github.com/<!---->types/node from 18.16.3 to 20.2.5 in /src-ui @dependabot) ([#3498](https://github.com/paperless-ngx/paperless-ngx/pull/3498))
|
||||||
|
- Bump [@<!---->playwright/test from 1.33.0 to 1.34.3 in /src-ui @dependabot](https://github.com/<!---->playwright/test from 1.33.0 to 1.34.3 in /src-ui @dependabot) ([#3499](https://github.com/paperless-ngx/paperless-ngx/pull/3499))
|
||||||
|
- Feature: quick filters from document detail [@shamoon](https://github.com/shamoon) ([#3476](https://github.com/paperless-ngx/paperless-ngx/pull/3476))
|
||||||
|
- Fix: exclude consumer \& AnonymousUser users from export manifest [@shamoon](https://github.com/shamoon) ([#3487](https://github.com/paperless-ngx/paperless-ngx/pull/3487))
|
||||||
|
- Fix: prevent date suggestion search if disabled [@shamoon](https://github.com/shamoon) ([#3472](https://github.com/paperless-ngx/paperless-ngx/pull/3472))
|
||||||
|
- Feature: Add explanations to relative dates [@shamoon](https://github.com/shamoon) ([#3471](https://github.com/paperless-ngx/paperless-ngx/pull/3471))
|
||||||
|
- Fix: DocumentSerializer should return correct original filename [@jayme-github](https://github.com/jayme-github) ([#3473](https://github.com/paperless-ngx/paperless-ngx/pull/3473))
|
||||||
|
- consumer.py: read from original file (instead of temp copy) [@chrisblech](https://github.com/chrisblech) ([#3466](https://github.com/paperless-ngx/paperless-ngx/pull/3466))
|
||||||
|
- Bugfix: Catch an nltk AttributeError and handle it [@stumpylog](https://github.com/stumpylog) ([#3453](https://github.com/paperless-ngx/paperless-ngx/pull/3453))
|
||||||
|
- Chore: Improves the logging mixin and allows it to be typed better [@stumpylog](https://github.com/stumpylog) ([#3451](https://github.com/paperless-ngx/paperless-ngx/pull/3451))
|
||||||
|
- Enhancement: paginate frontend tasks [@shamoon](https://github.com/shamoon) ([#3445](https://github.com/paperless-ngx/paperless-ngx/pull/3445))
|
||||||
|
- Add SSL Support for MariaDB [@kimdre](https://github.com/kimdre) ([#3444](https://github.com/paperless-ngx/paperless-ngx/pull/3444))
|
||||||
|
- Enhancement: Improve frontend error handling [@shamoon](https://github.com/shamoon) ([#3413](https://github.com/paperless-ngx/paperless-ngx/pull/3413))
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## paperless-ngx 1.14.5
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Feature: owner filtering [@shamoon](https://github.com/shamoon) ([#3309](https://github.com/paperless-ngx/paperless-ngx/pull/3309))
|
||||||
|
- Enhancement: dynamic counts include all pages, hide for Any [@shamoon](https://github.com/shamoon) ([#3329](https://github.com/paperless-ngx/paperless-ngx/pull/3329))
|
||||||
|
- Enhancement: save tour completion, hide welcome widget [@shamoon](https://github.com/shamoon) ([#3321](https://github.com/paperless-ngx/paperless-ngx/pull/3321))
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix: Adds better handling for files with invalid utf8 content [@stumpylog](https://github.com/stumpylog) ([#3387](https://github.com/paperless-ngx/paperless-ngx/pull/3387))
|
||||||
|
- Fix: respect permissions for autocomplete suggestions [@shamoon](https://github.com/shamoon) ([#3359](https://github.com/paperless-ngx/paperless-ngx/pull/3359))
|
||||||
|
- Fix: Transition to new library for finding IPs for failed logins [@stumpylog](https://github.com/stumpylog) ([#3382](https://github.com/paperless-ngx/paperless-ngx/pull/3382))
|
||||||
|
- [Security] Render frontend text as plain text [@shamoon](https://github.com/shamoon) ([#3366](https://github.com/paperless-ngx/paperless-ngx/pull/3366))
|
||||||
|
- Fix: default frontend to current owner, allow setting no owner on create [@shamoon](https://github.com/shamoon) ([#3347](https://github.com/paperless-ngx/paperless-ngx/pull/3347))
|
||||||
|
- Fix: dont perform mail actions when rule filename filter not met [@shamoon](https://github.com/shamoon) ([#3336](https://github.com/paperless-ngx/paperless-ngx/pull/3336))
|
||||||
|
- Fix: permission-aware bulk editing in 1.14.1+ [@shamoon](https://github.com/shamoon) ([#3345](https://github.com/paperless-ngx/paperless-ngx/pull/3345))
|
||||||
|
|
||||||
|
### Maintenance
|
||||||
|
|
||||||
|
- Chore: Rework workflows [@stumpylog](https://github.com/stumpylog) ([#3242](https://github.com/paperless-ngx/paperless-ngx/pull/3242))
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
|
||||||
|
- Chore: Upgrade channels to v4 [@stumpylog](https://github.com/stumpylog) ([#3383](https://github.com/paperless-ngx/paperless-ngx/pull/3383))
|
||||||
|
- Chore: Upgrades Python dependencies to their latest allowed versions [@stumpylog](https://github.com/stumpylog) ([#3365](https://github.com/paperless-ngx/paperless-ngx/pull/3365))
|
||||||
|
|
||||||
|
### All App Changes
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>13 changes</summary>
|
||||||
|
|
||||||
|
- Fix: Adds better handling for files with invalid utf8 content [@stumpylog](https://github.com/stumpylog) ([#3387](https://github.com/paperless-ngx/paperless-ngx/pull/3387))
|
||||||
|
- Fix: respect permissions for autocomplete suggestions [@shamoon](https://github.com/shamoon) ([#3359](https://github.com/paperless-ngx/paperless-ngx/pull/3359))
|
||||||
|
- Chore: Upgrade channels to v4 [@stumpylog](https://github.com/stumpylog) ([#3383](https://github.com/paperless-ngx/paperless-ngx/pull/3383))
|
||||||
|
- Fix: Transition to new library for finding IPs for failed logins [@stumpylog](https://github.com/stumpylog) ([#3382](https://github.com/paperless-ngx/paperless-ngx/pull/3382))
|
||||||
|
- Feature: owner filtering [@shamoon](https://github.com/shamoon) ([#3309](https://github.com/paperless-ngx/paperless-ngx/pull/3309))
|
||||||
|
- [Security] Render frontend text as plain text [@shamoon](https://github.com/shamoon) ([#3366](https://github.com/paperless-ngx/paperless-ngx/pull/3366))
|
||||||
|
- Enhancement: dynamic counts include all pages, hide for Any [@shamoon](https://github.com/shamoon) ([#3329](https://github.com/paperless-ngx/paperless-ngx/pull/3329))
|
||||||
|
- Fix: default frontend to current owner, allow setting no owner on create [@shamoon](https://github.com/shamoon) ([#3347](https://github.com/paperless-ngx/paperless-ngx/pull/3347))
|
||||||
|
- [Fix] Position:fixed for .global-dropzone-overlay [@denilsonsa](https://github.com/denilsonsa) ([#3367](https://github.com/paperless-ngx/paperless-ngx/pull/3367))
|
||||||
|
- Fix: dont perform mail actions when rule filename filter not met [@shamoon](https://github.com/shamoon) ([#3336](https://github.com/paperless-ngx/paperless-ngx/pull/3336))
|
||||||
|
- Enhancement: save tour completion, hide welcome widget [@shamoon](https://github.com/shamoon) ([#3321](https://github.com/paperless-ngx/paperless-ngx/pull/3321))
|
||||||
|
- Fix: permission-aware bulk editing in 1.14.1+ [@shamoon](https://github.com/shamoon) ([#3345](https://github.com/paperless-ngx/paperless-ngx/pull/3345))
|
||||||
|
- Fix: Add proper testing for \*\_\_id\_\_in testing [@shamoon](https://github.com/shamoon) ([#3315](https://github.com/paperless-ngx/paperless-ngx/pull/3315))
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## paperless-ngx 1.14.4
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix: Inversion in tagged mail searching [@stumpylog](https://github.com/stumpylog) ([#3305](https://github.com/paperless-ngx/paperless-ngx/pull/3305))
|
||||||
|
- Fix dynamic count labels hidden in light mode [@shamoon](https://github.com/shamoon) ([#3303](https://github.com/paperless-ngx/paperless-ngx/pull/3303))
|
||||||
|
|
||||||
|
### All App Changes
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>3 changes</summary>
|
||||||
|
|
||||||
|
- New Crowdin updates [@paperlessngx-bot](https://github.com/paperlessngx-bot) ([#3298](https://github.com/paperless-ngx/paperless-ngx/pull/3298))
|
||||||
|
- Fix: Inversion in tagged mail searching [@stumpylog](https://github.com/stumpylog) ([#3305](https://github.com/paperless-ngx/paperless-ngx/pull/3305))
|
||||||
|
- Fix dynamic count labels hidden in light mode [@shamoon](https://github.com/shamoon) ([#3303](https://github.com/paperless-ngx/paperless-ngx/pull/3303))
|
||||||
|
</details>
|
||||||
|
|
||||||
## paperless-ngx 1.14.3
|
## paperless-ngx 1.14.3
|
||||||
|
|
||||||
### Features
|
### Features
|
||||||
@@ -1587,7 +2079,7 @@ This is a maintenance release.
|
|||||||
The changed to the full text searching require you to reindex your
|
The changed to the full text searching require you to reindex your
|
||||||
documents. _The docker image does this automatically, you don't need to
|
documents. _The docker image does this automatically, you don't need to
|
||||||
do anything._ To do this, execute the `document_index reindex`
|
do anything._ To do this, execute the `document_index reindex`
|
||||||
management command (see [Managing the document search index](/administration#index)).
|
management command (see [Managing the document search index](administration.md#index)).
|
||||||
|
|
||||||
### paperless-ng 1.3.2
|
### paperless-ng 1.3.2
|
||||||
|
|
||||||
@@ -1626,7 +2118,7 @@ This release contains new database migrations.
|
|||||||
- Changes
|
- Changes
|
||||||
- The REST API is versioned from this point onwards. This will
|
- The REST API is versioned from this point onwards. This will
|
||||||
allow me to make changes without breaking existing clients. See
|
allow me to make changes without breaking existing clients. See
|
||||||
the documentation about [API versioning](/api#api-versioning) for details.
|
the documentation about [API versioning](api.md#api-versioning) for details.
|
||||||
- Added a color picker for tag colors.
|
- Added a color picker for tag colors.
|
||||||
- Added the ability to use the filter for searching the document
|
- Added the ability to use the filter for searching the document
|
||||||
content as well.
|
content as well.
|
||||||
@@ -1660,7 +2152,7 @@ This release contains new database migrations.
|
|||||||
- Changes to the OCRmyPDF integration
|
- Changes to the OCRmyPDF integration
|
||||||
- Added support for deskewing and automatic rotation of
|
- Added support for deskewing and automatic rotation of
|
||||||
incorrectly rotated pages. This is enabled by default, see
|
incorrectly rotated pages. This is enabled by default, see
|
||||||
[OCR settings](/configuration#ocr).
|
[OCR settings](configuration.md#ocr).
|
||||||
- Better support for encrypted files.
|
- Better support for encrypted files.
|
||||||
- Better support for various other PDF files: Paperless will now
|
- Better support for various other PDF files: Paperless will now
|
||||||
attempt to force OCR with safe options when OCR fails with the
|
attempt to force OCR with safe options when OCR fails with the
|
||||||
@@ -1687,7 +2179,7 @@ This release contains new database migrations.
|
|||||||
|
|
||||||
- Added a docker-specific configuration option to adjust the number of
|
- Added a docker-specific configuration option to adjust the number of
|
||||||
worker processes of the web server. See
|
worker processes of the web server. See
|
||||||
[Docker options](/configuration#docker).
|
[Docker options](configuration.md#docker).
|
||||||
- Some more memory usage optimizations.
|
- Some more memory usage optimizations.
|
||||||
- Don't show inbox statistics if no inbox tag is defined.
|
- Don't show inbox statistics if no inbox tag is defined.
|
||||||
|
|
||||||
@@ -1696,7 +2188,7 @@ This release contains new database migrations.
|
|||||||
- Always show top left corner of thumbnails, even for extra wide
|
- Always show top left corner of thumbnails, even for extra wide
|
||||||
documents.
|
documents.
|
||||||
- Added a management command for executing the sanity checker
|
- Added a management command for executing the sanity checker
|
||||||
directly. See [management utilities](/administration#sanity-checker).
|
directly. See [management utilities](administration.md#sanity-checker).
|
||||||
- The weekly sanity check now reports messages in the log files.
|
- The weekly sanity check now reports messages in the log files.
|
||||||
- Fixed an issue with the metadata tab not reporting anything in case
|
- Fixed an issue with the metadata tab not reporting anything in case
|
||||||
of missing files.
|
of missing files.
|
||||||
@@ -1730,7 +2222,7 @@ This release contains new database migrations.
|
|||||||
management commands, since these also ensure that they're always
|
management commands, since these also ensure that they're always
|
||||||
executed as the paperless user and you're less likely to run into
|
executed as the paperless user and you're less likely to run into
|
||||||
permission issues. See
|
permission issues. See
|
||||||
[management commands](/administration#management-commands).
|
[management commands](administration.md#management-commands).
|
||||||
|
|
||||||
### paperless-ng 1.1.0
|
### paperless-ng 1.1.0
|
||||||
|
|
||||||
@@ -1772,7 +2264,7 @@ This release contains new database migrations.
|
|||||||
status notifications.
|
status notifications.
|
||||||
|
|
||||||
Apache `mod_wsgi` users, see
|
Apache `mod_wsgi` users, see
|
||||||
[this note](/faq#how-do-i-get-websocket-support-with-apache-mod_wsgi).
|
[this note](faq.md#how-do-i-get-websocket-support-with-apache-mod_wsgi).
|
||||||
|
|
||||||
- Paperless now offers suggestions for tags, correspondents and types
|
- Paperless now offers suggestions for tags, correspondents and types
|
||||||
on the document detail page.
|
on the document detail page.
|
||||||
@@ -1817,7 +2309,7 @@ bug reports coming in, I think that this is reasonably stable.
|
|||||||
- The document exporter locks the media directory and the database
|
- The document exporter locks the media directory and the database
|
||||||
during execution to ensure that the resulting export is
|
during execution to ensure that the resulting export is
|
||||||
consistent.
|
consistent.
|
||||||
- See the [updated documentation](/administration#exporter) for more details.
|
- See the [updated documentation](administration.md#exporter) for more details.
|
||||||
- Other changes and additions
|
- Other changes and additions
|
||||||
- Added a language selector to the settings.
|
- Added a language selector to the settings.
|
||||||
- Added date format options to the settings.
|
- Added date format options to the settings.
|
||||||
@@ -1906,7 +2398,7 @@ paperless.
|
|||||||
- Thanks to [Jo Vandeginste](https://github.com/jovandeginste),
|
- Thanks to [Jo Vandeginste](https://github.com/jovandeginste),
|
||||||
Paperless has optional support for Office documents such as .docx,
|
Paperless has optional support for Office documents such as .docx,
|
||||||
.doc, .odt and more.
|
.doc, .odt and more.
|
||||||
- See the [Tika settings](/configuration#tika) on how to enable this
|
- See the [Tika settings](configuration.md#tika) on how to enable this
|
||||||
feature. This feature requires two additional services (one for
|
feature. This feature requires two additional services (one for
|
||||||
parsing Office documents and metadata extraction and another for
|
parsing Office documents and metadata extraction and another for
|
||||||
converting Office documents to PDF), and is therefore not enabled
|
converting Office documents to PDF), and is therefore not enabled
|
||||||
@@ -1993,7 +2485,7 @@ paperless.
|
|||||||
|
|
||||||
However, this change is not retroactive: If you used the delete method
|
However, this change is not retroactive: If you used the delete method
|
||||||
of the bulk editor, you need to reindex your search index by
|
of the bulk editor, you need to reindex your search index by
|
||||||
[running the management command `document_index` with the argument `reindex`](/administration#index).
|
[running the management command `document_index` with the argument `reindex`](administration.md#index).
|
||||||
|
|
||||||
### paperless-ng 0.9.9
|
### paperless-ng 0.9.9
|
||||||
|
|
||||||
@@ -2150,13 +2642,13 @@ primarily.
|
|||||||
edit page. If available, a dropdown menu will appear next to the
|
edit page. If available, a dropdown menu will appear next to the
|
||||||
download button.
|
download button.
|
||||||
- Many of the configuration options regarding OCR have changed.
|
- Many of the configuration options regarding OCR have changed.
|
||||||
See [OCR settings](/configuration#ocr) for details.
|
See [OCR settings](configuration.md#ocr) for details.
|
||||||
- Paperless no longer guesses the language of your documents. It
|
- Paperless no longer guesses the language of your documents. It
|
||||||
always uses the language that you specified with
|
always uses the language that you specified with
|
||||||
`PAPERLESS_OCR_LANGUAGE`. Be sure to set this to the language
|
`PAPERLESS_OCR_LANGUAGE`. Be sure to set this to the language
|
||||||
the majority of your documents are in. Multiple languages can be
|
the majority of your documents are in. Multiple languages can be
|
||||||
specified, but that requires more CPU time.
|
specified, but that requires more CPU time.
|
||||||
- The management command [`document_archiver`](/administration#archiver)
|
- The management command [`document_archiver`](administration.md#archiver)
|
||||||
can be used to create archived versions for already existing documents.
|
can be used to create archived versions for already existing documents.
|
||||||
- Tags from consumption folder.
|
- Tags from consumption folder.
|
||||||
- Thanks to [jayme-github](https://github.com/jayme-github),
|
- Thanks to [jayme-github](https://github.com/jayme-github),
|
||||||
@@ -2170,7 +2662,7 @@ primarily.
|
|||||||
- The endpoint for uploading documents now supports specifying
|
- The endpoint for uploading documents now supports specifying
|
||||||
custom titles, correspondents, tags and types. This can be used
|
custom titles, correspondents, tags and types. This can be used
|
||||||
by clients to override the default behavior of paperless. See
|
by clients to override the default behavior of paperless. See
|
||||||
[POSTing documents](/api#file-uploads).
|
[POSTing documents](api.md#file-uploads).
|
||||||
- The document endpoint of API now serves documents in this form:
|
- The document endpoint of API now serves documents in this form:
|
||||||
- correspondents, document types and tags are referenced by
|
- correspondents, document types and tags are referenced by
|
||||||
their ID in the fields `correspondent`, `document_type` and
|
their ID in the fields `correspondent`, `document_type` and
|
||||||
@@ -2204,14 +2696,14 @@ primarily.
|
|||||||
- Paperless now supports searching by tags, types and dates and
|
- Paperless now supports searching by tags, types and dates and
|
||||||
correspondents. In order to have this applied to your existing
|
correspondents. In order to have this applied to your existing
|
||||||
documents, you need to perform a `document_index reindex`
|
documents, you need to perform a `document_index reindex`
|
||||||
management command (see [document search index](/administration#index))
|
management command (see [document search index](administration.md#index))
|
||||||
that adds the data to the search index. You only need to do this
|
that adds the data to the search index. You only need to do this
|
||||||
once, since the schema of the search index changed. Paperless
|
once, since the schema of the search index changed. Paperless
|
||||||
keeps the index updated after that whenever something changes.
|
keeps the index updated after that whenever something changes.
|
||||||
- Paperless now has spelling corrections ("Did you mean") for
|
- Paperless now has spelling corrections ("Did you mean") for
|
||||||
miss-typed queries.
|
miss-typed queries.
|
||||||
- The documentation contains
|
- The documentation contains
|
||||||
[information about the query syntax](/usage#basic-usage_searching).
|
[information about the query syntax](usage.md#basic-usage_searching).
|
||||||
- Front end:
|
- Front end:
|
||||||
- Clickable tags, correspondents and types allow quick filtering
|
- Clickable tags, correspondents and types allow quick filtering
|
||||||
for related documents.
|
for related documents.
|
||||||
@@ -2272,7 +2764,7 @@ primarily.
|
|||||||
|
|
||||||
### paperless-ng 0.9.0
|
### paperless-ng 0.9.0
|
||||||
|
|
||||||
- **Deprecated:** GnuPG. [See this note on the state of GnuPG in paperless-ng.](/administration#encryption)
|
- **Deprecated:** GnuPG. [See this note on the state of GnuPG in paperless-ng.](administration.md#encryption)
|
||||||
This features will most likely be removed in future versions.
|
This features will most likely be removed in future versions.
|
||||||
- **Added:** New frontend. Features:
|
- **Added:** New frontend. Features:
|
||||||
- Single page application: It's much more responsive than the
|
- Single page application: It's much more responsive than the
|
||||||
@@ -2330,7 +2822,7 @@ primarily.
|
|||||||
uses PostgreSQL instead of SQLite. Username, database and
|
uses PostgreSQL instead of SQLite. Username, database and
|
||||||
password all default to `paperless` if not specified.
|
password all default to `paperless` if not specified.
|
||||||
- **Modified \[breaking\]:** document_retagger management command
|
- **Modified \[breaking\]:** document_retagger management command
|
||||||
rework. See [Document retagger](/administration#retagger) for
|
rework. See [Document retagger](administration.md#retagger) for
|
||||||
details. Replaces `document_correspondents` management command.
|
details. Replaces `document_correspondents` management command.
|
||||||
- **Removed \[breaking\]:** Reminders.
|
- **Removed \[breaking\]:** Reminders.
|
||||||
- **Removed:** All customizations made to the django admin pages.
|
- **Removed:** All customizations made to the django admin pages.
|
||||||
@@ -2799,7 +3291,7 @@ this big change.
|
|||||||
wherein Paperless wasn't recognising `.tif` files properly. Thanks
|
wherein Paperless wasn't recognising `.tif` files properly. Thanks
|
||||||
to [ayounggun](https://github.com/ayounggun) for reporting this one
|
to [ayounggun](https://github.com/ayounggun) for reporting this one
|
||||||
and to [Kusti Skytén](https://github.com/kskyten) for posting the
|
and to [Kusti Skytén](https://github.com/kskyten) for posting the
|
||||||
correct solution in the Github issue.
|
correct solution in the GitHub issue.
|
||||||
|
|
||||||
### 0.6.0
|
### 0.6.0
|
||||||
|
|
||||||
|
@@ -58,10 +58,10 @@ first-time setup.
|
|||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
Every command is executed directly from the root folder of the project unless specified otherwise.
|
Every command is executed directly from the root folder of the project unless specified otherwise.
|
||||||
|
|
||||||
1. Install prerequisites + pipenv as mentioned in
|
1. Install prerequisites + pipenv as mentioned in
|
||||||
[Bare metal route](/setup#bare_metal).
|
[Bare metal route](setup.md#bare_metal).
|
||||||
|
|
||||||
2. Copy `paperless.conf.example` to `paperless.conf` and enable debug
|
2. Copy `paperless.conf.example` to `paperless.conf` and enable debug
|
||||||
mode within the file via `PAPERLESS_DEBUG=true`.
|
mode within the file via `PAPERLESS_DEBUG=true`.
|
||||||
@@ -177,69 +177,69 @@ The front end is built using AngularJS. In order to get started, you need Node.j
|
|||||||
|
|
||||||
The following commands are all performed in the `src-ui`-directory. You will need a running back end (including an active session) to connect to the back end API. To spin it up refer to the commands under the section [above](#back-end-development).
|
The following commands are all performed in the `src-ui`-directory. You will need a running back end (including an active session) to connect to the back end API. To spin it up refer to the commands under the section [above](#back-end-development).
|
||||||
|
|
||||||
1. Install the Angular CLI. You might need sudo privileges
|
1. Install the Angular CLI. You might need sudo privileges to perform this command:
|
||||||
to perform this command:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ npm install -g @angular/cli
|
$ npm install -g @angular/cli
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Make sure that it's on your path.
|
2. Make sure that it's on your path.
|
||||||
|
|
||||||
3. Install all neccessary modules:
|
3. Install all necessary modules:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ npm install
|
$ npm install
|
||||||
```
|
```
|
||||||
|
|
||||||
4. You can launch a development server by running:
|
4. You can launch a development server by running:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ ng serve
|
$ ng serve
|
||||||
```
|
```
|
||||||
|
|
||||||
This will automatically update whenever you save. However, in-place
|
This will automatically update whenever you save. However, in-place
|
||||||
compilation might fail on syntax errors, in which case you need to
|
compilation might fail on syntax errors, in which case you need to
|
||||||
restart it.
|
restart it.
|
||||||
|
|
||||||
By default, the development server is available on `http://localhost:4200/` and is configured to access the API at
|
By default, the development server is available on `http://localhost:4200/` and is configured to access the API at
|
||||||
`http://localhost:8000/api/`, which is the default of the backend. If you enabled `DEBUG` on the back end, several security overrides for allowed hosts, CORS and X-Frame-Options are in place so that the front end behaves exactly as in production.
|
`http://localhost:8000/api/`, which is the default of the backend. If you enabled `DEBUG` on the back end, several security overrides for allowed hosts, CORS and X-Frame-Options are in place so that the front end behaves exactly as in production.
|
||||||
|
|
||||||
### Testing and code style
|
### Testing and code style
|
||||||
|
|
||||||
- The front end code (.ts, .html, .scss) use `prettier` for code
|
The front end code (.ts, .html, .scss) use `prettier` for code
|
||||||
formatting via the Git `pre-commit` hooks which run automatically on
|
formatting via the Git `pre-commit` hooks which run automatically on
|
||||||
commit. See [above](#code-formatting-with-pre-commit-hooks) for installation instructions. You can also run this via the CLI with a
|
commit. See [above](#code-formatting-with-pre-commit-hooks) for installation instructions. You can also run this via the CLI with a
|
||||||
command such as
|
command such as
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ git ls-files -- '*.ts' | xargs pre-commit run prettier --files
|
$ git ls-files -- '*.ts' | xargs pre-commit run prettier --files
|
||||||
```
|
```
|
||||||
|
|
||||||
- Front end testing uses jest and cypress. There is currently a need
|
Front end testing uses Jest and Playwright. Unit tests and e2e tests,
|
||||||
for significantly more front end tests. Unit tests and e2e tests,
|
respectively, can be run non-interactively with:
|
||||||
respectively, can be run non-interactively with:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ ng test
|
$ ng test
|
||||||
$ npm run e2e:ci
|
$ npx playwright test
|
||||||
```
|
```
|
||||||
|
|
||||||
- Cypress also includes a UI which can be run with:
|
Playwright also includes a UI which can be run with:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ ./node_modules/.bin/cypress open
|
$ npx playwright test --ui
|
||||||
```
|
```
|
||||||
|
|
||||||
- In order to build the front end and serve it as part of Django, execute:
|
### Building the frontend
|
||||||
|
|
||||||
```bash
|
In order to build the front end and serve it as part of Django, execute:
|
||||||
$ ng build --configuration production
|
|
||||||
```
|
|
||||||
|
|
||||||
This will build the front end and put it in a location from which the
|
```bash
|
||||||
Django server will serve it as static content. This way, you can verify
|
$ ng build --configuration production
|
||||||
that authentication is working.
|
```
|
||||||
|
|
||||||
|
This will build the front end and put it in a location from which the
|
||||||
|
Django server will serve it as static content. This way, you can verify
|
||||||
|
that authentication is working.
|
||||||
|
|
||||||
## Localization
|
## Localization
|
||||||
|
|
||||||
@@ -362,7 +362,7 @@ If you want to build the documentation locally, this is how you do it:
|
|||||||
|
|
||||||
3. Serve the documentation. This will spin up a
|
3. Serve the documentation. This will spin up a
|
||||||
copy of the documentation at http://127.0.0.1:8000
|
copy of the documentation at http://127.0.0.1:8000
|
||||||
that will automatically refresh everytime you change
|
that will automatically refresh every time you change
|
||||||
something.
|
something.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -374,13 +374,10 @@ If you want to build the documentation locally, this is how you do it:
|
|||||||
The docker image is primarily built by the GitHub actions workflow, but
|
The docker image is primarily built by the GitHub actions workflow, but
|
||||||
it can be faster when developing to build and tag an image locally.
|
it can be faster when developing to build and tag an image locally.
|
||||||
|
|
||||||
To provide the build arguments automatically, build the image using the
|
Building the image works as with any image:
|
||||||
helper script `build-docker-image.sh`.
|
|
||||||
|
|
||||||
Building the docker image from source:
|
```
|
||||||
|
docker build --file Dockerfile --tag paperless:local --progress simple .
|
||||||
```bash
|
|
||||||
./build-docker-image.sh Dockerfile -t <your-tag>
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Extending Paperless-ngx
|
## Extending Paperless-ngx
|
||||||
@@ -398,7 +395,7 @@ responsible for:
|
|||||||
- Retrieving the content from the original
|
- Retrieving the content from the original
|
||||||
- Creating a thumbnail
|
- Creating a thumbnail
|
||||||
- _optional:_ Retrieving a created date from the original
|
- _optional:_ Retrieving a created date from the original
|
||||||
- _optional:_ Creainge an archived document from the original
|
- _optional:_ Creating an archived document from the original
|
||||||
|
|
||||||
Custom parsers can be added to Paperless-ngx to support more file types. In
|
Custom parsers can be added to Paperless-ngx to support more file types. In
|
||||||
order to do that, you need to write the parser itself and announce its
|
order to do that, you need to write the parser itself and announce its
|
||||||
|
43
docs/faq.md
@@ -3,15 +3,16 @@
|
|||||||
## _What's the general plan for Paperless-ngx?_
|
## _What's the general plan for Paperless-ngx?_
|
||||||
|
|
||||||
**A:** While Paperless-ngx is already considered largely
|
**A:** While Paperless-ngx is already considered largely
|
||||||
"feature-complete" it is a community-driven project and development
|
"feature-complete", it is a community-driven project and development
|
||||||
will be guided in this way. New features can be submitted via GitHub
|
will be guided in this way. New features can be submitted via
|
||||||
discussions and "up-voted" by the community but this is not a
|
[GitHub discussions](https://github.com/paperless-ngx/paperless-ngx/discussions)
|
||||||
guarantee the feature will be implemented. This project will always be
|
and "up-voted" by the community, but this is not a
|
||||||
|
guarantee that the feature will be implemented. This project will always be
|
||||||
open to collaboration in the form of PRs, ideas etc.
|
open to collaboration in the form of PRs, ideas etc.
|
||||||
|
|
||||||
## _I'm using docker. Where are my documents?_
|
## _I'm using docker. Where are my documents?_
|
||||||
|
|
||||||
**A:** Your documents are stored inside the docker volume
|
**A:** By default, your documents are stored inside the docker volume
|
||||||
`paperless_media`. Docker manages this volume automatically for you. It
|
`paperless_media`. Docker manages this volume automatically for you. It
|
||||||
is a persistent storage and will persist as long as you don't
|
is a persistent storage and will persist as long as you don't
|
||||||
explicitly delete it. The actual location depends on your host operating
|
explicitly delete it. The actual location depends on your host operating
|
||||||
@@ -27,6 +28,12 @@ system. On Linux, chances are high that this location is
|
|||||||
files around manually. This folder is meant to be entirely managed by
|
files around manually. This folder is meant to be entirely managed by
|
||||||
docker and paperless.
|
docker and paperless.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
Files consumed from the consumption directory are re-created inside
|
||||||
|
this media directory and are removed from the consumption directory
|
||||||
|
itself.
|
||||||
|
|
||||||
## Let's say I want to switch tools in a year. Can I easily move to other systems?
|
## Let's say I want to switch tools in a year. Can I easily move to other systems?
|
||||||
|
|
||||||
**A:** Your documents are stored as plain files inside the media folder.
|
**A:** Your documents are stored as plain files inside the media folder.
|
||||||
@@ -39,8 +46,8 @@ elsewhere. Here are a couple notes about that.
|
|||||||
- By default, paperless uses the internal ID of each document as its
|
- By default, paperless uses the internal ID of each document as its
|
||||||
filename. This might not be very convenient for export. However, you
|
filename. This might not be very convenient for export. However, you
|
||||||
can adjust the way files are stored in paperless by
|
can adjust the way files are stored in paperless by
|
||||||
[configuring the filename format](/advanced_usage#file-name-handling).
|
[configuring the filename format](advanced_usage.md#file-name-handling).
|
||||||
- [The exporter](/administration#exporter) is
|
- [The exporter](administration.md#exporter) is
|
||||||
another easy way to get your files out of paperless with reasonable
|
another easy way to get your files out of paperless with reasonable
|
||||||
file names.
|
file names.
|
||||||
|
|
||||||
@@ -52,7 +59,7 @@ elsewhere. Here are a couple notes about that.
|
|||||||
WebP images are processed with OCR and converted into PDF documents.
|
WebP images are processed with OCR and converted into PDF documents.
|
||||||
- Plain text documents are supported as well and are added verbatim to
|
- Plain text documents are supported as well and are added verbatim to
|
||||||
paperless.
|
paperless.
|
||||||
- With the optional Tika integration enabled (see [Tika configuration](/configuration#tika),
|
- With the optional Tika integration enabled (see [Tika configuration](https://docs.paperless-ngx.com/configuration#tika)),
|
||||||
Paperless also supports various Office documents (.docx, .doc, odt,
|
Paperless also supports various Office documents (.docx, .doc, odt,
|
||||||
.ppt, .pptx, .odp, .xls, .xlsx, .ods).
|
.ppt, .pptx, .odp, .xls, .xlsx, .ods).
|
||||||
|
|
||||||
@@ -71,12 +78,12 @@ has to do much less work to serve the data.
|
|||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
You can adjust some of the settings so that paperless uses less
|
You can adjust some of the settings so that paperless uses less
|
||||||
processing power. See [setup](/setup#less-powerful-devices) for details.
|
processing power. See [setup](setup.md#less-powerful-devices) for details.
|
||||||
|
|
||||||
## _How do I install paperless-ngx on Raspberry Pi?_
|
## _How do I install paperless-ngx on Raspberry Pi?_
|
||||||
|
|
||||||
**A:** Docker images are available for armv7 and arm64 hardware, so just
|
**A:** Docker images are available for arm64 hardware, so just
|
||||||
follow the docker-compose instructions. Apart from more required disk
|
follow the [Docker Compose instructions](https://docs.paperless-ngx.com/setup/#installation). Apart from more required disk
|
||||||
space compared to a bare metal installation, docker comes with close to
|
space compared to a bare metal installation, docker comes with close to
|
||||||
zero overhead, even on Raspberry Pi.
|
zero overhead, even on Raspberry Pi.
|
||||||
|
|
||||||
@@ -85,6 +92,13 @@ the python requirements do not have precompiled packages for ARM /
|
|||||||
ARM64. Installation of these will require additional development
|
ARM64. Installation of these will require additional development
|
||||||
libraries and compilation will take a long time.
|
libraries and compilation will take a long time.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
For ARMv7 (32-bit) systems, paperless may still function, but it could require
|
||||||
|
modifications to the Dockerfile (if using Docker) or additional
|
||||||
|
tools for installing bare metal. It is suggested to upgrade to arm64
|
||||||
|
instead.
|
||||||
|
|
||||||
## _How do I run this on Unraid?_
|
## _How do I run this on Unraid?_
|
||||||
|
|
||||||
**A:** Paperless-ngx is available as [community
|
**A:** Paperless-ngx is available as [community
|
||||||
@@ -96,14 +110,11 @@ Fahrer](https://github.com/Tooa) created a container template for that.
|
|||||||
**A:** I honestly don't know! As for all other devices that might be
|
**A:** I honestly don't know! As for all other devices that might be
|
||||||
able to run paperless, you're a bit on your own. If you can't run the
|
able to run paperless, you're a bit on your own. If you can't run the
|
||||||
docker image, the documentation has instructions for bare metal
|
docker image, the documentation has instructions for bare metal
|
||||||
installs. I'm running paperless on an i3 processor from 2015 or so.
|
installs.
|
||||||
This is also what I use to test new releases with. Apart from that, I
|
|
||||||
also have a Raspberry Pi, which I occasionally build the image on and
|
|
||||||
see if it works.
|
|
||||||
|
|
||||||
## _How do I proxy this with NGINX?_
|
## _How do I proxy this with NGINX?_
|
||||||
|
|
||||||
**A:** See [here](/setup#nginx).
|
**A:** See [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Using-a-Reverse-Proxy-with-Paperless-ngx#nginx).
|
||||||
|
|
||||||
## _How do I get WebSocket support with Apache mod_wsgi_?
|
## _How do I get WebSocket support with Apache mod_wsgi_?
|
||||||
|
|
||||||
|
190
docs/index.md
@@ -5,7 +5,7 @@
|
|||||||
**Paperless-ngx** is a _community-supported_ open-source document management system that transforms your
|
**Paperless-ngx** is a _community-supported_ open-source document management system that transforms your
|
||||||
physical documents into a searchable online archive so you can keep, well, _less paper_.
|
physical documents into a searchable online archive so you can keep, well, _less paper_.
|
||||||
|
|
||||||
[Get started](/setup){ .md-button .md-button--primary .index-callout }
|
[Get started](setup.md){ .md-button .md-button--primary .index-callout }
|
||||||
[Demo](https://demo.paperless-ngx.com){ .md-button .md-button--secondary target=\_blank }
|
[Demo](https://demo.paperless-ngx.com){ .md-button .md-button--secondary target=\_blank }
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
@@ -15,103 +15,161 @@ physical documents into a searchable online archive so you can keep, well, _less
|
|||||||
</div>
|
</div>
|
||||||
<div class="clear"></div>
|
<div class="clear"></div>
|
||||||
|
|
||||||
## Why This Exists
|
## Features
|
||||||
|
|
||||||
Paper is a nightmare. Environmental issues aside, there's no excuse for
|
- **Organize and index** your scanned documents with tags, correspondents, types, and more.
|
||||||
it in the 21st century. It takes up space, collects dust, doesn't
|
- Performs **OCR** on your documents, adding searchable and selectable text, even to documents scanned with only images.
|
||||||
support any form of a search feature, indexing is tedious, it's heavy
|
- Utilizes the open-source Tesseract engine to recognize more than 100 languages.
|
||||||
and prone to damage & loss.
|
- Documents are saved as PDF/A format which is designed for long term storage, alongside the unaltered originals.
|
||||||
|
- Uses machine-learning to automatically add tags, correspondents and document types to your documents.
|
||||||
|
- Supports PDF documents, images, plain text files, Office documents (Word, Excel, Powerpoint, and LibreOffice equivalents)[^1] and more.
|
||||||
|
- Paperless stores your documents plain on disk. Filenames and folders are managed by paperless and their format can be configured freely with different configurations assigned to different documents.
|
||||||
|
- **Beautiful, modern web application** that features:
|
||||||
|
- Customizable dashboard with statistics.
|
||||||
|
- Filtering by tags, correspondents, types, and more.
|
||||||
|
- Bulk editing of tags, correspondents, types and more.
|
||||||
|
- Drag-and-drop uploading of documents throughout the app.
|
||||||
|
- Customizable views can be saved and displayed on the dashboard and / or sidebar.
|
||||||
|
- Support for custom fields of various data types.
|
||||||
|
- Shareable public links with optional expiration.
|
||||||
|
- **Full text search** helps you find what you need:
|
||||||
|
- Auto completion suggests relevant words from your documents.
|
||||||
|
- Results are sorted by relevance to your search query.
|
||||||
|
- Highlighting shows you which parts of the document matched the query.
|
||||||
|
- Searching for similar documents ("More like this")
|
||||||
|
- **Email processing**[^1]: import documents from your email accounts:
|
||||||
|
- Configure multiple accounts and rules for each account.
|
||||||
|
- After processing, paperless can perform actions on the messages such as marking as read, deleting and more.
|
||||||
|
- A built-in robust **multi-user permissions** system that supports 'global' permissions as well as per document or object.
|
||||||
|
- A powerful templating system that gives you more control over the consumption pipeline.
|
||||||
|
- **Optimized** for multi core systems: Paperless-ngx consumes multiple documents in parallel.
|
||||||
|
- The integrated sanity checker makes sure that your document archive is in good health.
|
||||||
|
|
||||||
This software is designed to make "going paperless" easier. No more worrying
|
[^1]: Office document and email consumption support is optional and provided by Apache Tika (see [configuration](https://docs.paperless-ngx.com/configuration/#tika))
|
||||||
about finding stuff again, feed documents right from the post box into
|
|
||||||
the scanner and then shred them. Perhaps you might find it useful too.
|
|
||||||
|
|
||||||
## Paperless, a history
|
## Paperless, a history
|
||||||
|
|
||||||
Paperless is a simple Django application running in two parts: a
|
Paperless-ngx is the official successor to the original [Paperless](https://github.com/the-paperless-project/paperless) & [Paperless-ng](https://github.com/jonaswinkler/paperless-ng) projects and is designed to distribute the responsibility of advancing and supporting the project among a team of people. [Consider joining us!](https://github.com/paperless-ngx/paperless-ngx#community-support)
|
||||||
_Consumer_ (the thing that does the indexing) and the _Web server_ (the
|
|
||||||
part that lets you search & download already-indexed documents). If you
|
|
||||||
want to learn more about its functions keep on reading after the
|
|
||||||
installation section.
|
|
||||||
|
|
||||||
Paperless-ngx is a document management system that transforms your
|
Further discussion of the transition between these projects can be found at
|
||||||
physical documents into a searchable online archive so you can keep,
|
[ng#1599](https://github.com/jonaswinkler/paperless-ng/issues/1599) and [ng#1632](https://github.com/jonaswinkler/paperless-ng/issues/1632).
|
||||||
well, _less paper_.
|
|
||||||
|
|
||||||
Paperless-ngx forked from paperless-ng to continue the great work and
|
|
||||||
distribute responsibility of supporting and advancing the project among
|
|
||||||
a team of people.
|
|
||||||
|
|
||||||
NG stands for both Angular (the framework used for the Frontend) and
|
|
||||||
next-gen. Publishing this project under a different name also avoids
|
|
||||||
confusion between paperless and paperless-ngx.
|
|
||||||
|
|
||||||
If you want to learn about what's different in paperless-ngx from
|
|
||||||
Paperless, check out these resources in the documentation:
|
|
||||||
|
|
||||||
- [Some screenshots](#screenshots) of the new UI are available.
|
|
||||||
- Read [this section](/advanced_usage#automatic-matching) if you want to learn about how paperless automates all
|
|
||||||
tagging using machine learning.
|
|
||||||
- Paperless now comes with a [proper email consumer](/usage#usage-email) that's fully tested and production ready.
|
|
||||||
- Paperless creates searchable PDF/A documents from whatever you put into the consumption directory. This means
|
|
||||||
that you can select text in image-only documents coming from your scanner.
|
|
||||||
- See [this note](/administration#encryption) about GnuPG encryption in paperless-ngx.
|
|
||||||
- Paperless is now integrated with a
|
|
||||||
[task processing queue](/setup#task_processor) that tells you at a glance when and why something is not working.
|
|
||||||
- The [changelog](/changelog) contains a detailed list of all changes in paperless-ngx.
|
|
||||||
|
|
||||||
## Screenshots
|
## Screenshots
|
||||||
|
|
||||||
This is what Paperless-ngx looks like.
|
Paperless-ngx aims to be as nice to use as it is useful. Check out some screenshots below.
|
||||||
|
|
||||||
The dashboard shows customizable views on your document and allows
|
<div class="grid-flipped-left" markdown>
|
||||||
document uploads:
|

|
||||||
|
</div>
|
||||||
|
<div class="grid-flipped-right" markdown>
|
||||||
|
The dashboard shows saved views which can be sorted. Documents can be uploaded with the button or dropped anywhere in the application.
|
||||||
|
</div>
|
||||||
|
<div class="clear"></div>
|
||||||
|
|
||||||
[](assets/screenshots/dashboard.png)
|
The document list provides three different styles to browse your documents.
|
||||||
|
|
||||||
The document list provides three different styles to scroll through your
|
{: style="width:32%"}
|
||||||
documents:
|
{: style="width:32%"}
|
||||||
|
{: style="width:32%"}
|
||||||
|
|
||||||
[](assets/screenshots/documents-table.png)
|
<div class="clear"></div>
|
||||||
|
|
||||||
[](assets/screenshots/documents-smallcards.png)
|
<div class="grid-left" markdown>
|
||||||
|
Use the 'slim' sidebar to focus on your docs and minimize the UI.
|
||||||
|
</div>
|
||||||
|
<div class="grid-right" markdown>
|
||||||
|

|
||||||
|
</div>
|
||||||
|
<div class="clear"></div>
|
||||||
|
|
||||||
[](assets/screenshots/documents-largecards.png)
|
Of course, Paperless-ngx also supports dark mode:
|
||||||
|
|
||||||
Paperless-ngx also supports dark mode:
|

|
||||||
|
|
||||||
[](assets/screenshots/documents-smallcards-dark.png)
|
<div class="clear"></div>
|
||||||
|
|
||||||
Extensive filtering mechanisms:
|
<div class="grid-left" markdown>
|
||||||
|
Quickly find documents with extensive filtering mechanisms.
|
||||||
|
</div>
|
||||||
|
<div class="grid-right" markdown>
|
||||||
|

|
||||||
|
</div>
|
||||||
|
<div class="clear"></div>
|
||||||
|
<div class="grid-left" markdown>
|
||||||
|
And perform bulk edit operations to set tags, correspondents, etc. as well as permissions.
|
||||||
|
</div>
|
||||||
|
<div class="grid-right" markdown>
|
||||||
|

|
||||||
|
</div>
|
||||||
|
<div class="clear"></div>
|
||||||
|
|
||||||
[](assets/screenshots/documents-filter.png)
|
Side-by-side editing of documents.
|
||||||
|
|
||||||
Bulk editing of document tags, correspondents, etc.:
|

|
||||||
|
|
||||||
[](assets/screenshots/bulk-edit.png)
|
<div class="grid-left" markdown>
|
||||||
|
Support for custom fields.
|
||||||
|
|
||||||
Side-by-side editing of documents:
|

|
||||||
|
|
||||||
[](assets/screenshots/editing.png)
|
</div>
|
||||||
|
<div class="grid-right" markdown>
|
||||||
|

|
||||||
|
</div>
|
||||||
|
<div class="clear"></div>
|
||||||
|
|
||||||
Tag editing. This looks about the same for correspondents and document
|
<div class="grid-left" markdown>
|
||||||
types.
|
A robust permissions system with support for 'global' and document / object permissions.
|
||||||
|
|
||||||
[](assets/screenshots/new-tag.png)
|

|
||||||
|
|
||||||
Searching provides auto complete and highlights the results.
|
</div>
|
||||||
|
<div class="grid-right" markdown>
|
||||||
|

|
||||||
|
</div>
|
||||||
|
<div class="clear"></div>
|
||||||
|
|
||||||
[](assets/screenshots/search-preview.png)
|
<div class="grid-left" markdown>
|
||||||
|
Searching provides auto complete and highlights the results.
|
||||||
|
|
||||||
[](assets/screenshots/search-results.png)
|

|
||||||
|
|
||||||
Fancy mail filters!
|
</div>
|
||||||
|
<div class="grid-right" markdown>
|
||||||
|

|
||||||
|
</div>
|
||||||
|
<div class="clear"></div>
|
||||||
|
|
||||||
[](assets/screenshots/mail-rules-edited.png)
|
Tag, correspondent, document type and storage path editing.
|
||||||
|
|
||||||
|
{: style="width:21%; float: left"}
|
||||||
|
{: style="width:21%; margin-left: 4%; float: left"}
|
||||||
|
{: style="width:21%; margin-left: 4%; float: left"}
|
||||||
|
{: style="width:21%; margin-left: 4%; float: left"}
|
||||||
|
|
||||||
|
<div class="clear"></div>
|
||||||
|
|
||||||
|
<div class="grid-half-left" markdown>
|
||||||
|
Mail rules support various filters and actions for incoming e-mails.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
</div>
|
||||||
|
<div class="grid-half-right" markdown>
|
||||||
|
Consumption templates provide finer control over the document pipeline.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
</div>
|
||||||
|
<div class="clear"></div>
|
||||||
|
|
||||||
|
<div class="clear"></div>
|
||||||
|
|
||||||
Mobile devices are supported.
|
Mobile devices are supported.
|
||||||
|
|
||||||
[](assets/screenshots/mobile.png)
|
{: style="width:32%"}
|
||||||
|
{: style="width:32%"}
|
||||||
|
{: style="width:32%"}
|
||||||
|
|
||||||
## Support
|
## Support
|
||||||
|
|
||||||
@@ -131,7 +189,7 @@ People interested in continuing the work on paperless-ngx are encouraged to reac
|
|||||||
|
|
||||||
### Translation
|
### Translation
|
||||||
|
|
||||||
Paperless-ngx is available in many languages that are coordinated on [Crowdin](https://crwd.in/paperless-ngx). If you want to help out by translating paperless-ngx into your language, please head over to https://crwd.in/paperless-ngx, and thank you!
|
Paperless-ngx is available in many languages that are coordinated on [Crowdin](https://crwd.in/paperless-ngx). If you want to help out by translating paperless-ngx into your language, please head over to the [Paperless-ngx project at Crowdin](https://crwd.in/paperless-ngx), and thank you!
|
||||||
|
|
||||||
## Scanners & Software
|
## Scanners & Software
|
||||||
|
|
||||||
|
2
docs/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
-i https://pypi.python.org/simple
|
||||||
|
mkdocs-glightbox==0.3.4; python_version >= '3.8'
|
187
docs/setup.md
@@ -25,12 +25,15 @@ necessary configuration files, pull the docker image, start paperless
|
|||||||
and create your user account. This script essentially performs all the
|
and create your user account. This script essentially performs all the
|
||||||
steps described in [Docker setup](#docker_hub) automatically.
|
steps described in [Docker setup](#docker_hub) automatically.
|
||||||
|
|
||||||
1. Make sure that docker and docker-compose are installed.
|
1. Make sure that Docker and Docker Compose are installed.
|
||||||
|
|
||||||
|
!!! tip
|
||||||
|
See the Docker installation instructions at https://docs.docker.com/engine/install/
|
||||||
|
|
||||||
2. Download and run the installation script:
|
2. Download and run the installation script:
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
$ bash -c "$(curl --location --silent --show-error https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
@@ -62,19 +65,19 @@ steps described in [Docker setup](#docker_hub) automatically.
|
|||||||
For new installations, it is recommended to use PostgreSQL as the
|
For new installations, it is recommended to use PostgreSQL as the
|
||||||
database backend.
|
database backend.
|
||||||
|
|
||||||
3. Install [Docker](https://www.docker.com/) and
|
3. Install [Docker](https://docs.docker.com/engine/install/) and
|
||||||
[docker-compose](https://docs.docker.com/compose/install/).
|
[Docker Compose](https://docs.docker.com/compose/install/).
|
||||||
|
|
||||||
!!! warning
|
!!! warning
|
||||||
|
|
||||||
If you want to use the included `docker-compose.*.yml` file, you
|
If you want to use the included `docker-compose.*.yml` file, you
|
||||||
need to have at least Docker version **17.09.0** and docker-compose
|
need to have at least Docker version **17.09.0** and Docker Compose
|
||||||
version **1.17.0**. To check do: `docker-compose -v` or `docker -v`
|
version **v2**. To check do: `docker compose -v` or `docker -v`
|
||||||
|
|
||||||
See the [Docker installation guide](https://docs.docker.com/engine/install/) on how to install the current
|
See the [Docker installation guide](https://docs.docker.com/engine/install/) on how to install the current
|
||||||
version of Docker for your operating system or Linux distribution of
|
version of Docker for your operating system or Linux distribution of
|
||||||
choice. To get the latest version of docker-compose, follow the
|
choice. To get the latest version of Docker Compose, follow the
|
||||||
[docker-compose installation guide](https://docs.docker.com/compose/install/linux/) if your package repository
|
[Docker Compose installation guide](https://docs.docker.com/compose/install/linux/) if your package repository
|
||||||
doesn't include it.
|
doesn't include it.
|
||||||
|
|
||||||
4. Modify `docker-compose.yml` to your preferences. You may want to
|
4. Modify `docker-compose.yml` to your preferences. You may want to
|
||||||
@@ -124,7 +127,7 @@ steps described in [Docker setup](#docker_hub) automatically.
|
|||||||
user in the container. This value (`user_id` below), should be
|
user in the container. This value (`user_id` below), should be
|
||||||
the same id that `USERMAP_UID` and `USERMAP_GID` are set to in
|
the same id that `USERMAP_UID` and `USERMAP_GID` are set to in
|
||||||
the next step. See `USERMAP_UID` and `USERMAP_GID`
|
the next step. See `USERMAP_UID` and `USERMAP_GID`
|
||||||
[here](/configuration#docker).
|
[here](configuration.md#docker).
|
||||||
|
|
||||||
Your entry for Paperless should contain something like:
|
Your entry for Paperless should contain something like:
|
||||||
|
|
||||||
@@ -148,12 +151,12 @@ steps described in [Docker setup](#docker_hub) automatically.
|
|||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
You can copy any setting from the file `paperless.conf.example` and
|
You can copy any setting from the file `paperless.conf.example` and
|
||||||
paste it here. Have a look at [configuration](/configuration) to see what's available.
|
paste it here. Have a look at [configuration](configuration.md) to see what's available.
|
||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
You can utilize Docker secrets for configuration settings by
|
You can utilize Docker secrets for configuration settings by
|
||||||
appending `_FILE` to configuration values. For example `PAPERLESS_DBUSER`
|
appending `_FILE` to configuration values. For example [`PAPERLESS_DBUSER`](configuration.md#PAPERLESS_DBUSER)
|
||||||
can be set using `PAPERLESS_DBUSER_FILE=/var/run/secrets/password.txt`.
|
can be set using `PAPERLESS_DBUSER_FILE=/var/run/secrets/password.txt`.
|
||||||
|
|
||||||
!!! warning
|
!!! warning
|
||||||
@@ -162,16 +165,16 @@ steps described in [Docker setup](#docker_hub) automatically.
|
|||||||
system notifications with `inotify`. When storing the consumption
|
system notifications with `inotify`. When storing the consumption
|
||||||
directory on such a file system, paperless will not pick up new
|
directory on such a file system, paperless will not pick up new
|
||||||
files with the default configuration. You will need to use
|
files with the default configuration. You will need to use
|
||||||
`PAPERLESS_CONSUMER_POLLING`, which will disable inotify. See
|
[`PAPERLESS_CONSUMER_POLLING`](configuration.md#PAPERLESS_CONSUMER_POLLING), which will disable inotify. See
|
||||||
[here](/configuration#polling).
|
[here](configuration.md#polling).
|
||||||
|
|
||||||
6. Run `docker-compose pull`. This will pull the image.
|
6. Run `docker compose pull`. This will pull the image.
|
||||||
|
|
||||||
7. To be able to login, you will need a super user. To create it,
|
7. To be able to login, you will need a super user. To create it,
|
||||||
execute the following command:
|
execute the following command:
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ docker-compose run --rm webserver createsuperuser
|
$ docker compose run --rm webserver createsuperuser
|
||||||
```
|
```
|
||||||
|
|
||||||
or using docker exec from within the container:
|
or using docker exec from within the container:
|
||||||
@@ -183,7 +186,7 @@ steps described in [Docker setup](#docker_hub) automatically.
|
|||||||
This will prompt you to set a username, an optional e-mail address
|
This will prompt you to set a username, an optional e-mail address
|
||||||
and finally a password (at least 8 characters).
|
and finally a password (at least 8 characters).
|
||||||
|
|
||||||
8. Run `docker-compose up -d`. This will create and start the necessary containers.
|
8. Run `docker compose up -d`. This will create and start the necessary containers.
|
||||||
|
|
||||||
9. The default `docker-compose.yml` exports the webserver on your local
|
9. The default `docker-compose.yml` exports the webserver on your local
|
||||||
port
|
port
|
||||||
@@ -209,39 +212,27 @@ steps described in [Docker setup](#docker_hub) automatically.
|
|||||||
root as well.
|
root as well.
|
||||||
|
|
||||||
3. In the `docker-compose.yml` file, find the line that instructs
|
3. In the `docker-compose.yml` file, find the line that instructs
|
||||||
docker-compose to pull the paperless image from Docker Hub:
|
Docker Compose to pull the paperless image from Docker Hub:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
webserver:
|
webserver:
|
||||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
and replace it with a line that instructs docker-compose to build
|
and replace it with a line that instructs Docker Compose to build
|
||||||
the image from the current working directory instead:
|
the image from the current working directory instead:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
webserver:
|
webserver:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
args:
|
|
||||||
QPDF_VERSION: x.y.x
|
|
||||||
PIKEPDF_VERSION: x.y.z
|
|
||||||
PSYCOPG2_VERSION: x.y.z
|
|
||||||
JBIG2ENC_VERSION: 0.29
|
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! note
|
|
||||||
|
|
||||||
You should match the build argument versions to the version for the
|
|
||||||
release you have checked out. These are pre-built images with
|
|
||||||
certain, more updated software. If you want to build these images
|
|
||||||
your self, that is possible, but beyond the scope of these steps.
|
|
||||||
|
|
||||||
4. Follow steps 3 to 8 of [Docker Setup](#docker_hub). When asked to run
|
4. Follow steps 3 to 8 of [Docker Setup](#docker_hub). When asked to run
|
||||||
`docker-compose pull` to pull the image, do
|
`docker compose pull` to pull the image, do
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ docker-compose build
|
$ docker compose build
|
||||||
```
|
```
|
||||||
|
|
||||||
instead to build the image.
|
instead to build the image.
|
||||||
@@ -255,10 +246,11 @@ supported.
|
|||||||
|
|
||||||
1. Install dependencies. Paperless requires the following packages.
|
1. Install dependencies. Paperless requires the following packages.
|
||||||
|
|
||||||
- `python3` 3.8, 3.9
|
- `python3` - 3.9 - 3.11 are supported
|
||||||
- `python3-pip`
|
- `python3-pip`
|
||||||
- `python3-dev`
|
- `python3-dev`
|
||||||
- `default-libmysqlclient-dev` for MariaDB
|
- `default-libmysqlclient-dev` for MariaDB
|
||||||
|
- `pkg-config` for mysqlclient (python dependency)
|
||||||
- `fonts-liberation` for generating thumbnails for plain text
|
- `fonts-liberation` for generating thumbnails for plain text
|
||||||
files
|
files
|
||||||
- `imagemagick` >= 6 for PDF conversion
|
- `imagemagick` >= 6 for PDF conversion
|
||||||
@@ -273,7 +265,7 @@ supported.
|
|||||||
Use this list for your preferred package management:
|
Use this list for your preferred package management:
|
||||||
|
|
||||||
```
|
```
|
||||||
python3 python3-pip python3-dev imagemagick fonts-liberation gnupg libpq-dev default-libmysqlclient-dev libmagic-dev mime-support libzbar0 poppler-utils
|
python3 python3-pip python3-dev imagemagick fonts-liberation gnupg libpq-dev default-libmysqlclient-dev pkg-config libmagic-dev mime-support libzbar0 poppler-utils
|
||||||
```
|
```
|
||||||
|
|
||||||
These dependencies are required for OCRmyPDF, which is used for text
|
These dependencies are required for OCRmyPDF, which is used for text
|
||||||
@@ -341,41 +333,41 @@ supported.
|
|||||||
home folder of the user you created before (`/opt/paperless`).
|
home folder of the user you created before (`/opt/paperless`).
|
||||||
|
|
||||||
Optional: If you cloned the git repo, you will have to
|
Optional: If you cloned the git repo, you will have to
|
||||||
compile the frontend yourself, see [here](/development#front-end-development)
|
compile the frontend yourself, see [here](development.md#front-end-development)
|
||||||
and use the `build` step, not `serve`.
|
and use the `build` step, not `serve`.
|
||||||
|
|
||||||
6. Configure paperless. See [configuration](/configuration) for details.
|
6. Configure paperless. See [configuration](configuration.md) for details.
|
||||||
Edit the included `paperless.conf` and adjust the settings to your
|
Edit the included `paperless.conf` and adjust the settings to your
|
||||||
needs. Required settings for getting
|
needs. Required settings for getting
|
||||||
paperless running are:
|
paperless running are:
|
||||||
|
|
||||||
- `PAPERLESS_REDIS` should point to your redis server, such as
|
- [`PAPERLESS_REDIS`](configuration.md#PAPERLESS_REDIS) should point to your redis server, such as
|
||||||
<redis://localhost:6379>.
|
<redis://localhost:6379>.
|
||||||
- `PAPERLESS_DBENGINE` optional, and should be one of `postgres`,
|
- [`PAPERLESS_DBENGINE`](configuration.md#PAPERLESS_DBENGINE) optional, and should be one of `postgres`,
|
||||||
`mariadb`, or `sqlite`
|
`mariadb`, or `sqlite`
|
||||||
- `PAPERLESS_DBHOST` should be the hostname on which your
|
- [`PAPERLESS_DBHOST`](configuration.md#PAPERLESS_DBHOST) should be the hostname on which your
|
||||||
PostgreSQL server is running. Do not configure this to use
|
PostgreSQL server is running. Do not configure this to use
|
||||||
SQLite instead. Also configure port, database name, user and
|
SQLite instead. Also configure port, database name, user and
|
||||||
password as necessary.
|
password as necessary.
|
||||||
- `PAPERLESS_CONSUMPTION_DIR` should point to a folder which
|
- [`PAPERLESS_CONSUMPTION_DIR`](configuration.md#PAPERLESS_CONSUMPTION_DIR) should point to a folder which
|
||||||
paperless should watch for documents. You might want to have
|
paperless should watch for documents. You might want to have
|
||||||
this somewhere else. Likewise, `PAPERLESS_DATA_DIR` and
|
this somewhere else. Likewise, [`PAPERLESS_DATA_DIR`](configuration.md#PAPERLESS_DATA_DIR) and
|
||||||
`PAPERLESS_MEDIA_ROOT` define where paperless stores its data.
|
[`PAPERLESS_MEDIA_ROOT`](configuration.md#PAPERLESS_MEDIA_ROOT) define where paperless stores its data.
|
||||||
If you like, you can point both to the same directory.
|
If you like, you can point both to the same directory.
|
||||||
- `PAPERLESS_SECRET_KEY` should be a random sequence of
|
- [`PAPERLESS_SECRET_KEY`](configuration.md#PAPERLESS_SECRET_KEY) should be a random sequence of
|
||||||
characters. It's used for authentication. Failure to do so
|
characters. It's used for authentication. Failure to do so
|
||||||
allows third parties to forge authentication credentials.
|
allows third parties to forge authentication credentials.
|
||||||
- `PAPERLESS_URL` if you are behind a reverse proxy. This should
|
- [`PAPERLESS_URL`](configuration.md#PAPERLESS_URL) if you are behind a reverse proxy. This should
|
||||||
point to your domain. Please see
|
point to your domain. Please see
|
||||||
[configuration](/configuration) for more
|
[configuration](configuration.md) for more
|
||||||
information.
|
information.
|
||||||
|
|
||||||
Many more adjustments can be made to paperless, especially the OCR
|
Many more adjustments can be made to paperless, especially the OCR
|
||||||
part. The following options are recommended for everyone:
|
part. The following options are recommended for everyone:
|
||||||
|
|
||||||
- Set `PAPERLESS_OCR_LANGUAGE` to the language most of your
|
- Set [`PAPERLESS_OCR_LANGUAGE`](configuration.md#PAPERLESS_OCR_LANGUAGE) to the language most of your
|
||||||
documents are written in.
|
documents are written in.
|
||||||
- Set `PAPERLESS_TIME_ZONE` to your local time zone.
|
- Set [`PAPERLESS_TIME_ZONE`](configuration.md#PAPERLESS_TIME_ZONE) to your local time zone.
|
||||||
|
|
||||||
!!! warning
|
!!! warning
|
||||||
|
|
||||||
@@ -483,7 +475,7 @@ supported.
|
|||||||
in front of gunicorn instead.
|
in front of gunicorn instead.
|
||||||
|
|
||||||
For instructions on how to use nginx for that,
|
For instructions on how to use nginx for that,
|
||||||
[see the instructions below](/setup#nginx).
|
[see the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Using-a-Reverse-Proxy-with-Paperless-ngx#nginx).
|
||||||
|
|
||||||
!!! warning
|
!!! warning
|
||||||
|
|
||||||
@@ -521,7 +513,7 @@ supported.
|
|||||||
not available for most distributions.
|
not available for most distributions.
|
||||||
|
|
||||||
15. Optional: If using the NLTK machine learning processing (see
|
15. Optional: If using the NLTK machine learning processing (see
|
||||||
`PAPERLESS_ENABLE_NLTK` in [configuration](/configuration#software_tweaks) for details),
|
[`PAPERLESS_ENABLE_NLTK`](configuration.md#PAPERLESS_ENABLE_NLTK) for details),
|
||||||
download the NLTK data for the Snowball
|
download the NLTK data for the Snowball
|
||||||
Stemmer, Stopwords and Punkt tokenizer to your
|
Stemmer, Stopwords and Punkt tokenizer to your
|
||||||
`PAPERLESS_DATA_DIR/nltk`. Refer to the [NLTK
|
`PAPERLESS_DATA_DIR/nltk`. Refer to the [NLTK
|
||||||
@@ -552,14 +544,14 @@ to
|
|||||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
and then run `docker-compose up -d` which will pull the new image
|
and then run `docker compose up -d` which will pull the new image
|
||||||
recreate the container. That's it!
|
recreate the container. That's it!
|
||||||
|
|
||||||
Users who installed with the bare-metal route should also update their
|
Users who installed with the bare-metal route should also update their
|
||||||
Git clone to point to `https://github.com/paperless-ngx/paperless-ngx`,
|
Git clone to point to `https://github.com/paperless-ngx/paperless-ngx`,
|
||||||
e.g. using the command
|
e.g. using the command
|
||||||
`git remote set-url origin https://github.com/paperless-ngx/paperless-ngx`
|
`git remote set-url origin https://github.com/paperless-ngx/paperless-ngx`
|
||||||
and then pull the lastest version.
|
and then pull the latest version.
|
||||||
|
|
||||||
## Migrating from Paperless
|
## Migrating from Paperless
|
||||||
|
|
||||||
@@ -570,7 +562,7 @@ your setup depending on how you installed paperless.
|
|||||||
This setup describes how to update an existing paperless Docker
|
This setup describes how to update an existing paperless Docker
|
||||||
installation. The important things to keep in mind are as follows:
|
installation. The important things to keep in mind are as follows:
|
||||||
|
|
||||||
- Read the [changelog](/changelog) and
|
- Read the [changelog](changelog.md) and
|
||||||
take note of breaking changes.
|
take note of breaking changes.
|
||||||
- You should decide if you want to stick with SQLite or want to
|
- You should decide if you want to stick with SQLite or want to
|
||||||
migrate your database to PostgreSQL. See [documentation](#sqlite_to_psql)
|
migrate your database to PostgreSQL. See [documentation](#sqlite_to_psql)
|
||||||
@@ -581,7 +573,7 @@ installation. The important things to keep in mind are as follows:
|
|||||||
- The task scheduler of paperless, which is used to execute periodic
|
- The task scheduler of paperless, which is used to execute periodic
|
||||||
tasks such as email checking and maintenance, requires a
|
tasks such as email checking and maintenance, requires a
|
||||||
[redis](https://redis.io/) message broker instance. The
|
[redis](https://redis.io/) message broker instance. The
|
||||||
docker-compose route takes care of that.
|
Docker Compose route takes care of that.
|
||||||
- The layout of the folder structure for your documents and data
|
- The layout of the folder structure for your documents and data
|
||||||
remains the same, so you can just plug your old docker volumes into
|
remains the same, so you can just plug your old docker volumes into
|
||||||
paperless-ngx and expect it to find everything where it should be.
|
paperless-ngx and expect it to find everything where it should be.
|
||||||
@@ -592,7 +584,7 @@ Migration to paperless-ngx is then performed in a few simple steps:
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ cd /path/to/current/paperless
|
$ cd /path/to/current/paperless
|
||||||
$ docker-compose down
|
$ docker compose down
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Do a backup for two purposes: If something goes wrong, you still
|
2. Do a backup for two purposes: If something goes wrong, you still
|
||||||
@@ -600,7 +592,7 @@ Migration to paperless-ngx is then performed in a few simple steps:
|
|||||||
switch back to paperless.
|
switch back to paperless.
|
||||||
|
|
||||||
3. Download the latest release of paperless-ngx. You can either go with
|
3. Download the latest release of paperless-ngx. You can either go with
|
||||||
the docker-compose files from
|
the Docker Compose files from
|
||||||
[here](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose)
|
[here](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose)
|
||||||
or clone the repository to build the image yourself (see
|
or clone the repository to build the image yourself (see
|
||||||
[above](#docker_build)). You can
|
[above](#docker_build)). You can
|
||||||
@@ -630,14 +622,14 @@ Migration to paperless-ngx is then performed in a few simple steps:
|
|||||||
See [Docker setup](#docker_hub) details on
|
See [Docker setup](#docker_hub) details on
|
||||||
which edits are advised.
|
which edits are advised.
|
||||||
|
|
||||||
6. [Update paperless.](/administration#updating)
|
6. [Update paperless.](administration.md#updating)
|
||||||
|
|
||||||
7. In order to find your existing documents with the new search
|
7. In order to find your existing documents with the new search
|
||||||
feature, you need to invoke a one-time operation that will create
|
feature, you need to invoke a one-time operation that will create
|
||||||
the search index:
|
the search index:
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ docker-compose run --rm webserver document_index reindex
|
$ docker compose run --rm webserver document_index reindex
|
||||||
```
|
```
|
||||||
|
|
||||||
This will migrate your database and create the search index. After
|
This will migrate your database and create the search index. After
|
||||||
@@ -646,7 +638,7 @@ Migration to paperless-ngx is then performed in a few simple steps:
|
|||||||
8. Start paperless-ngx.
|
8. Start paperless-ngx.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ docker-compose up -d
|
$ docker compose up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
This will run paperless in the background and automatically start it
|
This will run paperless in the background and automatically start it
|
||||||
@@ -669,28 +661,28 @@ commands as well.
|
|||||||
1. Stop and remove the paperless container
|
1. Stop and remove the paperless container
|
||||||
2. If using an external database, stop the container
|
2. If using an external database, stop the container
|
||||||
3. Update Redis configuration
|
3. Update Redis configuration
|
||||||
a) If `REDIS_URL` is already set, change it to `PAPERLESS_REDIS`
|
a) If `REDIS_URL` is already set, change it to [`PAPERLESS_REDIS`](configuration.md#PAPERLESS_REDIS)
|
||||||
and continue to step 4.
|
and continue to step 4.
|
||||||
b) Otherwise, in the `docker-compose.yml` add a new service for
|
b) Otherwise, in the `docker-compose.yml` add a new service for
|
||||||
Redis, following [the example compose
|
Redis, following [the example compose
|
||||||
files](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose)
|
files](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose)
|
||||||
c) Set the environment variable `PAPERLESS_REDIS` so it points to
|
c) Set the environment variable [`PAPERLESS_REDIS`](configuration.md#PAPERLESS_REDIS) so it points to
|
||||||
the new Redis container
|
the new Redis container
|
||||||
4. Update user mapping
|
4. Update user mapping
|
||||||
a) If set, change the environment variable `PUID` to `USERMAP_UID`
|
a) If set, change the environment variable `PUID` to `USERMAP_UID`
|
||||||
b) If set, change the environment variable `PGID` to `USERMAP_GID`
|
b) If set, change the environment variable `PGID` to `USERMAP_GID`
|
||||||
5. Update configuration paths
|
5. Update configuration paths
|
||||||
a) Set the environment variable `PAPERLESS_DATA_DIR` to `/config`
|
a) Set the environment variable [`PAPERLESS_DATA_DIR`](configuration.md#PAPERLESS_DATA_DIR) to `/config`
|
||||||
6. Update media paths
|
6. Update media paths
|
||||||
a) Set the environment variable `PAPERLESS_MEDIA_ROOT` to
|
a) Set the environment variable [`PAPERLESS_MEDIA_ROOT`](configuration.md#PAPERLESS_MEDIA_ROOT) to
|
||||||
`/data/media`
|
`/data/media`
|
||||||
7. Update timezone
|
7. Update timezone
|
||||||
a) Set the environment variable `PAPERLESS_TIME_ZONE` to the same
|
a) Set the environment variable [`PAPERLESS_TIME_ZONE`](configuration.md#PAPERLESS_TIME_ZONE) to the same
|
||||||
value as `TZ`
|
value as `TZ`
|
||||||
8. Modify the `image:` to point to
|
8. Modify the `image:` to point to
|
||||||
`ghcr.io/paperless-ngx/paperless-ngx:latest` or a specific version
|
`ghcr.io/paperless-ngx/paperless-ngx:latest` or a specific version
|
||||||
if preferred.
|
if preferred.
|
||||||
9. Start the containers as before, using `docker-compose`.
|
9. Start the containers as before, using `docker compose`.
|
||||||
|
|
||||||
## Moving data from SQLite to PostgreSQL or MySQL/MariaDB {#sqlite_to_psql}
|
## Moving data from SQLite to PostgreSQL or MySQL/MariaDB {#sqlite_to_psql}
|
||||||
|
|
||||||
@@ -717,7 +709,7 @@ below use PostgreSQL, but are applicable to MySQL/MariaDB with the
|
|||||||
!!! warning
|
!!! warning
|
||||||
|
|
||||||
MySQL is case insensitive by default, treating values like "Name" and
|
MySQL is case insensitive by default, treating values like "Name" and
|
||||||
"NAME" as identical. See [MySQL caveats](/advanced_usage#mysql-caveats) for details.
|
"NAME" as identical. See [MySQL caveats](advanced_usage.md#mysql-caveats) for details.
|
||||||
|
|
||||||
!!! warning
|
!!! warning
|
||||||
|
|
||||||
@@ -738,7 +730,7 @@ below use PostgreSQL, but are applicable to MySQL/MariaDB with the
|
|||||||
file to `docker-compose.yml`. Remember to adjust the consumption
|
file to `docker-compose.yml`. Remember to adjust the consumption
|
||||||
directory, if necessary.
|
directory, if necessary.
|
||||||
b) Without docker, configure the database in your `paperless.conf`
|
b) Without docker, configure the database in your `paperless.conf`
|
||||||
file. See [configuration](/configuration) for
|
file. See [configuration](configuration.md) for
|
||||||
details.
|
details.
|
||||||
|
|
||||||
3. Open a shell and initialize the database:
|
3. Open a shell and initialize the database:
|
||||||
@@ -748,7 +740,7 @@ below use PostgreSQL, but are applicable to MySQL/MariaDB with the
|
|||||||
|
|
||||||
``` shell-session
|
``` shell-session
|
||||||
$ cd /path/to/paperless
|
$ cd /path/to/paperless
|
||||||
$ docker-compose run --rm webserver /bin/bash
|
$ docker compose run --rm webserver /bin/bash
|
||||||
```
|
```
|
||||||
|
|
||||||
This will launch the container and initialize the PostgreSQL
|
This will launch the container and initialize the PostgreSQL
|
||||||
@@ -801,7 +793,7 @@ Execute this:
|
|||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ cd /path/to/paperless
|
$ cd /path/to/paperless
|
||||||
$ docker-compose run --rm webserver migrate documents 0023
|
$ docker compose run --rm webserver migrate documents 0023
|
||||||
```
|
```
|
||||||
|
|
||||||
Or without docker:
|
Or without docker:
|
||||||
@@ -822,36 +814,36 @@ the Pi and configuring some options in paperless can help improve
|
|||||||
performance immensely:
|
performance immensely:
|
||||||
|
|
||||||
- Stick with SQLite to save some resources.
|
- Stick with SQLite to save some resources.
|
||||||
- Consider setting `PAPERLESS_OCR_PAGES` to 1, so that paperless will
|
- Consider setting [`PAPERLESS_OCR_PAGES`](configuration.md#PAPERLESS_OCR_PAGES) to 1, so that paperless will
|
||||||
only OCR the first page of your documents. In most cases, this page
|
only OCR the first page of your documents. In most cases, this page
|
||||||
contains enough information to be able to find it.
|
contains enough information to be able to find it.
|
||||||
- `PAPERLESS_TASK_WORKERS` and `PAPERLESS_THREADS_PER_WORKER` are
|
- [`PAPERLESS_TASK_WORKERS`](configuration.md#PAPERLESS_TASK_WORKERS) and [`PAPERLESS_THREADS_PER_WORKER`](configuration.md#PAPERLESS_THREADS_PER_WORKER) are
|
||||||
configured to use all cores. The Raspberry Pi models 3 and up have 4
|
configured to use all cores. The Raspberry Pi models 3 and up have 4
|
||||||
cores, meaning that paperless will use 2 workers and 2 threads per
|
cores, meaning that paperless will use 2 workers and 2 threads per
|
||||||
worker. This may result in sluggish response times during
|
worker. This may result in sluggish response times during
|
||||||
consumption, so you might want to lower these settings (example: 2
|
consumption, so you might want to lower these settings (example: 2
|
||||||
workers and 1 thread to always have some computing power left for
|
workers and 1 thread to always have some computing power left for
|
||||||
other tasks).
|
other tasks).
|
||||||
- Keep `PAPERLESS_OCR_MODE` at its default value `skip` and consider
|
- Keep [`PAPERLESS_OCR_MODE`](configuration.md#PAPERLESS_OCR_MODE) at its default value `skip` and consider
|
||||||
OCR'ing your documents before feeding them into paperless. Some
|
OCR'ing your documents before feeding them into paperless. Some
|
||||||
scanners are able to do this!
|
scanners are able to do this!
|
||||||
- Set `PAPERLESS_OCR_SKIP_ARCHIVE_FILE` to `with_text` to skip archive
|
- Set [`PAPERLESS_OCR_SKIP_ARCHIVE_FILE`](configuration.md#PAPERLESS_OCR_SKIP_ARCHIVE_FILE) to `with_text` to skip archive
|
||||||
file generation for already ocr'ed documents, or `always` to skip it
|
file generation for already ocr'ed documents, or `always` to skip it
|
||||||
for all documents.
|
for all documents.
|
||||||
- If you want to perform OCR on the device, consider using
|
- If you want to perform OCR on the device, consider using
|
||||||
`PAPERLESS_OCR_CLEAN=none`. This will speed up OCR times and use
|
`PAPERLESS_OCR_CLEAN=none`. This will speed up OCR times and use
|
||||||
less memory at the expense of slightly worse OCR results.
|
less memory at the expense of slightly worse OCR results.
|
||||||
- If using docker, consider setting `PAPERLESS_WEBSERVER_WORKERS` to 1. This will save some memory.
|
- If using docker, consider setting [`PAPERLESS_WEBSERVER_WORKERS`](configuration.md#PAPERLESS_WEBSERVER_WORKERS) to 1. This will save some memory.
|
||||||
- Consider setting `PAPERLESS_ENABLE_NLTK` to false, to disable the
|
- Consider setting [`PAPERLESS_ENABLE_NLTK`](configuration.md#PAPERLESS_ENABLE_NLTK) to false, to disable the
|
||||||
more advanced language processing, which can take more memory and
|
more advanced language processing, which can take more memory and
|
||||||
processing time.
|
processing time.
|
||||||
|
|
||||||
For details, refer to [configuration](/configuration).
|
For details, refer to [configuration](configuration.md).
|
||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
Updating the
|
Updating the
|
||||||
[automatic matching algorithm](/advanced_usage#automatic-matching) takes quite a bit of time. However, the update mechanism
|
[automatic matching algorithm](advanced_usage.md#automatic-matching) takes quite a bit of time. However, the update mechanism
|
||||||
checks if your data has changed before doing the heavy lifting. If you
|
checks if your data has changed before doing the heavy lifting. If you
|
||||||
experience the algorithm taking too much cpu time, consider changing the
|
experience the algorithm taking too much cpu time, consider changing the
|
||||||
schedule in the admin interface to daily. You can also manually invoke
|
schedule in the admin interface to daily. You can also manually invoke
|
||||||
@@ -862,45 +854,8 @@ For details, refer to [configuration](/configuration).
|
|||||||
|
|
||||||
# Using nginx as a reverse proxy {#nginx}
|
# Using nginx as a reverse proxy {#nginx}
|
||||||
|
|
||||||
If you want to expose paperless to the internet, you should hide it
|
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Using-a-Reverse-Proxy-with-Paperless-ngx#nginx) for user-maintained documentation of using nginx with Paperless-ngx.
|
||||||
behind a reverse proxy with SSL enabled.
|
|
||||||
|
|
||||||
In addition to the usual configuration for SSL, the following
|
# Enhancing security {#security}
|
||||||
configuration is required for paperless to operate:
|
|
||||||
|
|
||||||
```nginx
|
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Using-Security-Tools-with-Paperless-ngx) for user-maintained documentation of how to configure security tools like Fail2ban with Paperless-ngx.
|
||||||
http {
|
|
||||||
|
|
||||||
# Adjust as required. This is the maximum size for file uploads.
|
|
||||||
# The default value 1M might be a little too small.
|
|
||||||
client_max_body_size 10M;
|
|
||||||
|
|
||||||
server {
|
|
||||||
|
|
||||||
location / {
|
|
||||||
|
|
||||||
# Adjust host and port as required.
|
|
||||||
proxy_pass http://localhost:8000/;
|
|
||||||
|
|
||||||
# These configuration options are required for WebSockets to work.
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection "upgrade";
|
|
||||||
|
|
||||||
proxy_redirect off;
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Host $server_name;
|
|
||||||
add_header P3P 'CP=""'; # may not be required in all setups
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
The `PAPERLESS_URL` configuration variable is also required when using a
|
|
||||||
reverse proxy. Please refer to the [hosting and security](/configuration#hosting-and-security) docs.
|
|
||||||
|
|
||||||
Also read
|
|
||||||
[this](https://channels.readthedocs.io/en/stable/deploying.html#nginx-supervisor-ubuntu),
|
|
||||||
towards the end of the section.
|
|
||||||
|
@@ -6,7 +6,7 @@ Check for the following issues:
|
|||||||
|
|
||||||
- Ensure that the directory you're putting your documents in is the
|
- Ensure that the directory you're putting your documents in is the
|
||||||
folder paperless is watching. With docker, this setting is performed
|
folder paperless is watching. With docker, this setting is performed
|
||||||
in the `docker-compose.yml` file. Without docker, look at the
|
in the `docker-compose.yml` file. Without Docker, look at the
|
||||||
`CONSUMPTION_DIR` setting. Don't adjust this setting if you're
|
`CONSUMPTION_DIR` setting. Don't adjust this setting if you're
|
||||||
using docker.
|
using docker.
|
||||||
|
|
||||||
@@ -46,8 +46,7 @@ run:
|
|||||||
If you notice that the consumer will only pickup files in the
|
If you notice that the consumer will only pickup files in the
|
||||||
consumption directory at startup, but won't find any other files added
|
consumption directory at startup, but won't find any other files added
|
||||||
later, you will need to enable filesystem polling with the configuration
|
later, you will need to enable filesystem polling with the configuration
|
||||||
option `PAPERLESS_CONSUMER_POLLING`, see
|
option [`PAPERLESS_CONSUMER_POLLING`](configuration.md#PAPERLESS_CONSUMER_POLLING).
|
||||||
`[here](/configuration#polling).
|
|
||||||
|
|
||||||
This will disable listening to filesystem changes with inotify and
|
This will disable listening to filesystem changes with inotify and
|
||||||
paperless will manually check the consumption directory for changes
|
paperless will manually check the consumption directory for changes
|
||||||
@@ -121,7 +120,7 @@ Gotenberg raises this error.
|
|||||||
|
|
||||||
You can increase the timeout by configuring a command flag for Gotenberg
|
You can increase the timeout by configuring a command flag for Gotenberg
|
||||||
(see also [here](https://gotenberg.dev/docs/modules/api#properties)). If
|
(see also [here](https://gotenberg.dev/docs/modules/api#properties)). If
|
||||||
using docker-compose, this is achieved by the following configuration
|
using Docker Compose, this is achieved by the following configuration
|
||||||
change in the `docker-compose.yml` file:
|
change in the `docker-compose.yml` file:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
@@ -145,7 +144,7 @@ The following error occured while consuming document.pdf: [Errno 13] Permission
|
|||||||
This happens when paperless does not have permission to delete files
|
This happens when paperless does not have permission to delete files
|
||||||
inside the consumption directory. Ensure that `USERMAP_UID` and
|
inside the consumption directory. Ensure that `USERMAP_UID` and
|
||||||
`USERMAP_GID` are set to the user id and group id you use on the host
|
`USERMAP_GID` are set to the user id and group id you use on the host
|
||||||
operating system, if these are different from `1000`. See [Docker setup](/setup#docker_hub).
|
operating system, if these are different from `1000`. See [Docker setup](setup.md#docker_hub).
|
||||||
|
|
||||||
Also ensure that you are able to read and write to the consumption
|
Also ensure that you are able to read and write to the consumption
|
||||||
directory on the host.
|
directory on the host.
|
||||||
@@ -265,8 +264,8 @@ This probably indicates paperless tried to consume the same file twice.
|
|||||||
This can happen for a number of reasons, depending on how documents are
|
This can happen for a number of reasons, depending on how documents are
|
||||||
placed into the consume folder. If paperless is using inotify (the
|
placed into the consume folder. If paperless is using inotify (the
|
||||||
default) to check for documents, try adjusting the
|
default) to check for documents, try adjusting the
|
||||||
[inotify configuration](/configuration#inotify). If polling is enabled, try adjusting the
|
[inotify configuration](configuration.md#inotify). If polling is enabled, try adjusting the
|
||||||
[polling configuration](/configuration#polling).
|
[polling configuration](configuration.md#polling).
|
||||||
|
|
||||||
## Consumer fails waiting for file to remain unmodified.
|
## Consumer fails waiting for file to remain unmodified.
|
||||||
|
|
||||||
@@ -278,7 +277,7 @@ You might find messages like these in your log files:
|
|||||||
|
|
||||||
This indicates paperless timed out while waiting for the file to be
|
This indicates paperless timed out while waiting for the file to be
|
||||||
completely written to the consume folder. Adjusting
|
completely written to the consume folder. Adjusting
|
||||||
[polling configuration](/configuration#polling) values should resolve the issue.
|
[polling configuration](configuration.md#polling) values should resolve the issue.
|
||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
@@ -297,8 +296,8 @@ This indicates paperless was unable to open the file, as the OS reported
|
|||||||
the file as still being in use. To prevent a crash, paperless did not
|
the file as still being in use. To prevent a crash, paperless did not
|
||||||
try to consume the file. If paperless is using inotify (the default) to
|
try to consume the file. If paperless is using inotify (the default) to
|
||||||
check for documents, try adjusting the
|
check for documents, try adjusting the
|
||||||
[inotify configuration](/configuration#inotify). If polling is enabled, try adjusting the
|
[inotify configuration](configuration.md#inotify). If polling is enabled, try adjusting the
|
||||||
[polling configuration](/configuration#polling).
|
[polling configuration](configuration.md#polling).
|
||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
@@ -320,7 +319,7 @@ many workers attempting to access the database simultaneously.
|
|||||||
|
|
||||||
Consider changing to the PostgreSQL database if you will be processing
|
Consider changing to the PostgreSQL database if you will be processing
|
||||||
many documents at once often. Otherwise, try tweaking the
|
many documents at once often. Otherwise, try tweaking the
|
||||||
`PAPERLESS_DB_TIMEOUT` setting to allow more time for the database to
|
[`PAPERLESS_DB_TIMEOUT`](configuration.md#PAPERLESS_DB_TIMEOUT) setting to allow more time for the database to
|
||||||
unlock. This may have minor performance implications.
|
unlock. This may have minor performance implications.
|
||||||
|
|
||||||
## gunicorn fails to start with "is not a valid port number"
|
## gunicorn fails to start with "is not a valid port number"
|
||||||
@@ -330,7 +329,7 @@ environment variable named `${serviceName}_PORT`. This is
|
|||||||
the same environment variable which is used by Paperless to optionally
|
the same environment variable which is used by Paperless to optionally
|
||||||
change the port gunicorn listens on.
|
change the port gunicorn listens on.
|
||||||
|
|
||||||
To fix this, set `PAPERLESS_PORT` again to your desired port, or the
|
To fix this, set [`PAPERLESS_PORT`](configuration.md#PAPERLESS_PORT) again to your desired port, or the
|
||||||
default of 8000.
|
default of 8000.
|
||||||
|
|
||||||
## Database Warns about unique constraint "documents_tag_name_uniq
|
## Database Warns about unique constraint "documents_tag_name_uniq
|
||||||
@@ -345,3 +344,15 @@ STATEMENT: INSERT INTO "documents_tag" ("owner_id", "name", "match", "matching_
|
|||||||
|
|
||||||
This can happen during heavy consumption when using polling. Paperless will handle it correctly and the file
|
This can happen during heavy consumption when using polling. Paperless will handle it correctly and the file
|
||||||
will still be consumed
|
will still be consumed
|
||||||
|
|
||||||
|
## Consumption fails with "Ghostscript PDF/A rendering failed"
|
||||||
|
|
||||||
|
Newer versions of OCRmyPDF will fail if it encounters errors during processing.
|
||||||
|
This is intentional as the output archive file may differ in unexpected or undesired
|
||||||
|
ways from the original. As the logs indicate, if you encounter this error you can set
|
||||||
|
`PAPERLESS_OCR_USER_ARGS: '{"continue_on_soft_render_error": true}'` to try to 'force'
|
||||||
|
processing documents with this issue.
|
||||||
|
|
||||||
|
## Platform-Specific Deployment Troubleshooting
|
||||||
|
|
||||||
|
A user-maintained wiki page is available to help troubleshoot issues that may arise when trying to deploy Paperless-ngx on specific platforms, for example SELinux. Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Platform%E2%80%90Specific-Troubleshooting).
|
||||||
|
185
docs/usage.md
@@ -62,14 +62,16 @@ following operations on your documents:
|
|||||||
paperless to create archived versions for digital documents, you can
|
paperless to create archived versions for digital documents, you can
|
||||||
configure that by configuring
|
configure that by configuring
|
||||||
`PAPERLESS_OCR_SKIP_ARCHIVE_FILE=with_text`. Please read the
|
`PAPERLESS_OCR_SKIP_ARCHIVE_FILE=with_text`. Please read the
|
||||||
[relevant section in the documentation](/configuration#ocr).
|
[relevant section in the documentation](configuration.md#ocr).
|
||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
No matter which options you choose, Paperless will always store the
|
No matter which options you choose, Paperless will always store the
|
||||||
original document that it found in the consumption directory or in the
|
original document that it found in the consumption directory or in the
|
||||||
mail and will never overwrite that document. Archived versions are
|
mail and will never overwrite that document. Archived versions are
|
||||||
stored alongside the original versions.
|
stored alongside the original versions. Any files found in the
|
||||||
|
consumption directory will stored inside the Paperless-ngx file
|
||||||
|
structure and will not be retained in the consumption directory.
|
||||||
|
|
||||||
### The consumption directory
|
### The consumption directory
|
||||||
|
|
||||||
@@ -77,7 +79,9 @@ The primary method of getting documents into your database is by putting
|
|||||||
them in the consumption directory. The consumer waits patiently, looking
|
them in the consumption directory. The consumer waits patiently, looking
|
||||||
for new additions to this directory. When it finds them,
|
for new additions to this directory. When it finds them,
|
||||||
the consumer goes about the process of parsing them with the OCR,
|
the consumer goes about the process of parsing them with the OCR,
|
||||||
indexing what it finds, and storing it in the media directory.
|
indexing what it finds, and storing it in the media directory. You should
|
||||||
|
think of this folder as a temporary location, as files will be re-created
|
||||||
|
inside Paperless-ngx and removed from the consumption folder.
|
||||||
|
|
||||||
Getting stuff into this directory is up to you. If you're running
|
Getting stuff into this directory is up to you. If you're running
|
||||||
Paperless on your local computer, you might just want to drag and drop
|
Paperless on your local computer, you might just want to drag and drop
|
||||||
@@ -88,27 +92,25 @@ Typically, you're looking at an FTP server like
|
|||||||
[Proftpd](http://www.proftpd.org/) or a Windows folder share with
|
[Proftpd](http://www.proftpd.org/) or a Windows folder share with
|
||||||
[Samba](https://www.samba.org/).
|
[Samba](https://www.samba.org/).
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
|
||||||
|
Files found in the consumption directory that are consumed will be
|
||||||
|
removed from the consumption directory and stored inside the
|
||||||
|
Paperless-ngx file structure using any settings / storage paths
|
||||||
|
you have specified. This action is performed as safely as possible
|
||||||
|
but this means it is expected that files in the consumption
|
||||||
|
directory will no longer exist (there) after being consumed.
|
||||||
|
|
||||||
### Web UI Upload
|
### Web UI Upload
|
||||||
|
|
||||||
The dashboard has a file drop field to upload documents to paperless.
|
The dashboard has a button to upload documents to paperless or you
|
||||||
Simply drag a file onto this field or select a file with the file
|
can simply drag a file anywhere into the app to initiate the consumption
|
||||||
dialog. Multiple files are supported.
|
process.
|
||||||
|
|
||||||
You can also upload documents on any other page of the web UI by
|
|
||||||
dragging-and-dropping files into your browser window.
|
|
||||||
|
|
||||||
### Mobile upload {#usage-mobile_upload}
|
### Mobile upload {#usage-mobile_upload}
|
||||||
|
|
||||||
The mobile app over at [https://github.com/qcasey/paperless_share](https://github.com/qcasey/paperless_share)
|
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Affiliated-Projects) for a user-maintained list of affiliated projects and
|
||||||
allows Android users to share any documents with paperless. This can be
|
software (e.g. for mobile devices) that is compatible with Paperless-ngx.
|
||||||
combined with any of the mobile scanning apps out there, such as Office
|
|
||||||
Lens.
|
|
||||||
|
|
||||||
Furthermore, there is the [Paperless
|
|
||||||
App](https://github.com/bauerj/paperless_app) as well, which not only
|
|
||||||
has document upload, but also document browsing and download features.
|
|
||||||
|
|
||||||
Another option is [Paperless Mobile](https://github.com/astubenbord/paperless-mobile), an Android app that supports document upload, scanning, management of labels and more.
|
|
||||||
|
|
||||||
### IMAP (Email) {#usage-email}
|
### IMAP (Email) {#usage-email}
|
||||||
|
|
||||||
@@ -132,9 +134,9 @@ These rules perform the following:
|
|||||||
5. If documents were consumed from a mail, the rule action is performed
|
5. If documents were consumed from a mail, the rule action is performed
|
||||||
on that mail.
|
on that mail.
|
||||||
|
|
||||||
Paperless will completely ignore mails that do not match your filters.
|
Paperless will check all emails only once and completely ignore messages
|
||||||
It will also only perform the action on mails that it has consumed
|
that do not match your filters. It will also only perform the rule action
|
||||||
documents from.
|
on e-mails that it has consumed documents from.
|
||||||
|
|
||||||
The actions all ensure that the same mail is not consumed twice by
|
The actions all ensure that the same mail is not consumed twice by
|
||||||
different means. These are as follows:
|
different means. These are as follows:
|
||||||
@@ -195,20 +197,31 @@ different means. These are as follows:
|
|||||||
them further.
|
them further.
|
||||||
|
|
||||||
Paperless is set up to check your mails every 10 minutes. This can be
|
Paperless is set up to check your mails every 10 minutes. This can be
|
||||||
configured via `PAPERLESS_EMAIL_TASK_CRON` (see [software tweaks](/configuration#software_tweaks))
|
configured via [`PAPERLESS_EMAIL_TASK_CRON`](configuration.md#PAPERLESS_EMAIL_TASK_CRON)
|
||||||
|
|
||||||
### REST API
|
### REST API
|
||||||
|
|
||||||
You can also submit a document using the REST API, see [POSTing documents](/api#file-uploads)
|
You can also submit a document using the REST API, see [POSTing documents](api.md#file-uploads)
|
||||||
for details.
|
for details.
|
||||||
|
|
||||||
## Permissions
|
## Permissions
|
||||||
|
|
||||||
As of version 1.14.0 Paperless-ngx added core support for user / group permissions. Permissions is
|
As of version 1.14.0 Paperless-ngx added core support for user / group permissions. Permissions is
|
||||||
based around an object 'owner' and 'view' and 'edit' permissions can be granted to other users
|
based around 'global' permissions as well as 'object-level' permissions. Global permissions designate
|
||||||
or groups.
|
which parts of the application a user can access (e.g. Documents, Tags, Settings) and object-level
|
||||||
|
determine which objects are visible or editable. All objects have an 'owner' and 'view' and 'edit'
|
||||||
|
permissions which can be granted to other users or groups. The paperless-ngx permissions system uses
|
||||||
|
the built-in user model of the backend framework, Django.
|
||||||
|
|
||||||
Permissions uses the built-in user model of the backend framework, Django.
|
!!! tip
|
||||||
|
|
||||||
|
Object-level permissions only apply to the object itself. In other words, setting permissions
|
||||||
|
for a Tag will _not_ affect the permissions of documents that have the Tag.
|
||||||
|
|
||||||
|
Permissions can be set using the new "Permissions" tab when editing documents, or bulk-applied
|
||||||
|
in the UI by selecting documents and choosing the "Permissions" button. Owner can also optionally
|
||||||
|
be set for documents uploaded via the API. Documents consumed via the consumption dir currently
|
||||||
|
do not have an owner set.
|
||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
@@ -223,10 +236,12 @@ Permissions uses the built-in user model of the backend framework, Django.
|
|||||||
|
|
||||||
Note that superusers have access to all objects.
|
Note that superusers have access to all objects.
|
||||||
|
|
||||||
Permissions can be set using the new "Permissions" tab when editing documents, or bulk-applied
|
### Default permissions
|
||||||
in the UI by selecting documents and choosing the "Permissions" button. Owner can also optionally
|
|
||||||
be set for documents uploaded via the API. Documents consumed via the consumption dir currently
|
Default permissions for documents can be set using consumption templates.
|
||||||
do not have an owner set.
|
|
||||||
|
For objects created via the web UI (tags, doc types, etc.) the default is to set the current user
|
||||||
|
as owner and no extra permissions, but you explicitly set these under Settings > Permissions.
|
||||||
|
|
||||||
### Users and Groups
|
### Users and Groups
|
||||||
|
|
||||||
@@ -235,6 +250,112 @@ These can be found under Settings > Users & Groups, assuming the user has access
|
|||||||
as a member of a group those permissions will be inherited and this is reflected in the UI. Explicit
|
as a member of a group those permissions will be inherited and this is reflected in the UI. Explicit
|
||||||
permissions can be granted to limit access to certain parts of the UI (and corresponding API endpoints).
|
permissions can be granted to limit access to certain parts of the UI (and corresponding API endpoints).
|
||||||
|
|
||||||
|
### Password reset
|
||||||
|
|
||||||
|
In order to enable the password reset feature you will need to setup an SMTP backend, see
|
||||||
|
[`PAPERLESS_EMAIL_HOST`](configuration.md#PAPERLESS_EMAIL_HOST)
|
||||||
|
|
||||||
|
## Consumption templates
|
||||||
|
|
||||||
|
Consumption templates were introduced in v2.0 and allow for finer control over what metadata (tags, doc
|
||||||
|
types) and permissions (owner, privileges) are assigned to documents during consumption. In general,
|
||||||
|
templates are applied sequentially (by sort order) but subsequent templates will never override an
|
||||||
|
assignment from a preceding template. The same is true for mail rules, e.g. if you set the correspondent
|
||||||
|
in a mail rule any subsequent consumption templates that are applied _will not_ overwrite this. The
|
||||||
|
exception to this is assignments that can be multiple e.g. tags and permissions, which will be merged.
|
||||||
|
|
||||||
|
Consumption templates allow you to filter by:
|
||||||
|
|
||||||
|
- Source, e.g. documents uploaded via consume folder, API (& the web UI) and mail fetch
|
||||||
|
- File name, including wildcards e.g. \*.pdf will apply to all pdfs
|
||||||
|
- File path, including wildcards. Note that enabling `PAPERLESS_CONSUMER_RECURSIVE` would allow, for
|
||||||
|
example, automatically assigning documents to different owners based on the upload directory.
|
||||||
|
- Mail rule. Choosing this option will force 'mail fetch' to be the template source.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
You must include a file name filter, a path filter or a mail rule filter. Use * for either to apply
|
||||||
|
to all files.
|
||||||
|
|
||||||
|
Consumption templates can assign:
|
||||||
|
|
||||||
|
- Title, see [title placeholders](usage.md#title_placeholders) below
|
||||||
|
- Tags, correspondent, document types
|
||||||
|
- Document owner
|
||||||
|
- View and / or edit permissions to users or groups
|
||||||
|
|
||||||
|
### Consumption template permissions
|
||||||
|
|
||||||
|
All users who have application permissions for editing consumption templates can see the same set
|
||||||
|
of templates. In other words, templates themselves intentionally do not have an owner or permissions.
|
||||||
|
|
||||||
|
Given their potentially far-reaching capabilities, you may want to restrict access to templates.
|
||||||
|
|
||||||
|
Upon migration, existing installs will grant access to consumption templates to users who can add
|
||||||
|
documents (and superusers who can always access all parts of the app).
|
||||||
|
|
||||||
|
### Title placeholders
|
||||||
|
|
||||||
|
Consumption template titles can include placeholders, _only for items that are assigned within the template_.
|
||||||
|
This is because at the time of consumption (when the title is to be set), no automatic tags etc. have been
|
||||||
|
applied. You can use the following placeholders:
|
||||||
|
|
||||||
|
- `{correspondent}`: assigned correspondent name
|
||||||
|
- `{document_type}`: assigned document type name
|
||||||
|
- `{owner_username}`: assigned owner username
|
||||||
|
- `{added}`: added datetime
|
||||||
|
- `{added_year}`: added year
|
||||||
|
- `{added_year_short}`: added year
|
||||||
|
- `{added_month}`: added month
|
||||||
|
- `{added_month_name}`: added month name
|
||||||
|
- `{added_month_name_short}`: added month short name
|
||||||
|
- `{added_day}`: added day
|
||||||
|
|
||||||
|
## Custom Fields {#custom-fields}
|
||||||
|
|
||||||
|
Paperless-ngx supports the use of custom fields for documents as of v2.0, allowing a user
|
||||||
|
to optionally attach data to documents which does not fit in the existing set of fields
|
||||||
|
Paperless-ngx provides.
|
||||||
|
|
||||||
|
1. First, create a custom field (under "Manage"), with a given name and data type. This could be something like "Invoice Number" or "Date Paid", with a data type of "Number", "Date", "String", etc.
|
||||||
|
2. Once created, a field can be used with documents and data stored. To do so, use the "Custom Fields" menu on the document detail page, choose your existing field and click "Add". Once the field is visible in the form you can enter the appropriate
|
||||||
|
data which will be validated according to the custom field "data type".
|
||||||
|
3. Fields can be removed by hovering over the field name revealing a "Remove" button.
|
||||||
|
|
||||||
|
!!! important
|
||||||
|
|
||||||
|
Added / removed fields, as well as any data is not saved to the document until you
|
||||||
|
actually hit the "Save" button, similar to other changes on the document details page.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
Once the data type for a field is set, it cannot be changed.
|
||||||
|
|
||||||
|
Multiple fields may be attached to a document but the same field name cannot be assigned multiple times to the a single document.
|
||||||
|
|
||||||
|
The following custom field types are supported:
|
||||||
|
|
||||||
|
- `Text`: any text
|
||||||
|
- `Boolean`: true / false (check / unchecked) field
|
||||||
|
- `Date`: date
|
||||||
|
- `URL`: a valid url
|
||||||
|
- `Integer`: integer number e.g. 12
|
||||||
|
- `Number`: float number e.g. 12.3456
|
||||||
|
- `Monetary`: float number with exactly two decimals, e.g. 12.30
|
||||||
|
|
||||||
|
## Share Links
|
||||||
|
|
||||||
|
Paperless-ngx added the abiltiy to create shareable links to files in version 2.0. You can find the button for this on the document detail screen.
|
||||||
|
|
||||||
|
- Share links do not require a user to login and thus link directly to a file.
|
||||||
|
- Links are unique and are of the form `{paperless-url}/share/{randomly-generated-slug}`.
|
||||||
|
- Links can optionally have an expiration time set.
|
||||||
|
- After a link expires or is deleted users will be redirected to the regular paperless-ngx login.
|
||||||
|
|
||||||
|
!!! tip
|
||||||
|
|
||||||
|
If your paperless-ngx instance is behind a reverse-proxy you may want to create an exception to bypass any authentication layers that are part of your setup in order to make links truly publicly-accessible. Of course, do so with caution.
|
||||||
|
|
||||||
## Best practices {#basic-searching}
|
## Best practices {#basic-searching}
|
||||||
|
|
||||||
Paperless offers a couple tools that help you organize your document
|
Paperless offers a couple tools that help you organize your document
|
||||||
@@ -443,7 +564,7 @@ Once you have scanned in a document, proceed in paperless as follows.
|
|||||||
paperless will assign them automatically. After consuming a couple
|
paperless will assign them automatically. After consuming a couple
|
||||||
documents, you can even ask paperless to *learn* when to assign tags and
|
documents, you can even ask paperless to *learn* when to assign tags and
|
||||||
correspondents by itself. For details on this feature, see
|
correspondents by itself. For details on this feature, see
|
||||||
[advanced matching](/advanced_usage#matching).
|
[advanced matching](advanced_usage.md#matching).
|
||||||
|
|
||||||
### Task management
|
### Task management
|
||||||
|
|
||||||
|
@@ -38,7 +38,6 @@ ask_docker_folder() {
|
|||||||
echo "Invalid folder: $result"
|
echo "Invalid folder: $result"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -57,14 +56,9 @@ if ! command -v docker &> /dev/null ; then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
DOCKER_COMPOSE_CMD="docker-compose"
|
if ! command -v docker compose &> /dev/null ; then
|
||||||
if ! command -v ${DOCKER_COMPOSE_CMD} ; then
|
echo "docker compose executable not found. Is docker compose installed?"
|
||||||
if docker compose version &> /dev/null ; then
|
exit 1
|
||||||
DOCKER_COMPOSE_CMD="docker compose"
|
|
||||||
else
|
|
||||||
echo "docker-compose executable not found. Is docker-compose installed?"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check if user has permissions to run Docker by trying to get the status of Docker (docker status).
|
# Check if user has permissions to run Docker by trying to get the status of Docker (docker status).
|
||||||
@@ -72,7 +66,7 @@ fi
|
|||||||
if ! docker stats --no-stream &> /dev/null ; then
|
if ! docker stats --no-stream &> /dev/null ; then
|
||||||
echo ""
|
echo ""
|
||||||
echo "WARN: It look like the current user does not have Docker permissions."
|
echo "WARN: It look like the current user does not have Docker permissions."
|
||||||
echo "WARN: Use 'sudo usermod -aG docker $USER' to assign Docker permissions to the user."
|
echo "WARN: Use 'sudo usermod -aG docker $USER' to assign Docker permissions to the user (may require restarting shell)."
|
||||||
echo ""
|
echo ""
|
||||||
sleep 3
|
sleep 3
|
||||||
fi
|
fi
|
||||||
@@ -321,7 +315,8 @@ fi
|
|||||||
wget "https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docker/compose/docker-compose.$DOCKER_COMPOSE_VERSION.yml" -O docker-compose.yml
|
wget "https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docker/compose/docker-compose.$DOCKER_COMPOSE_VERSION.yml" -O docker-compose.yml
|
||||||
wget "https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docker/compose/.env" -O .env
|
wget "https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docker/compose/.env" -O .env
|
||||||
|
|
||||||
SECRET_KEY=$(tr --delete --complement 'a-zA-Z0-9' < /dev/urandom 2>/dev/null | head --bytes 64)
|
SECRET_KEY=$(LC_ALL=C tr -dc 'a-zA-Z0-9!"#$%&'\''()*+,-./:;<=>?@[\]^_`{|}~' < /dev/urandom | head --bytes 64)
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_LANGUAGES=("deu eng fra ita spa")
|
DEFAULT_LANGUAGES=("deu eng fra ita spa")
|
||||||
|
|
||||||
@@ -382,8 +377,16 @@ if [ "$l1" -eq "$l2" ] ; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
${DOCKER_COMPOSE_CMD} pull
|
docker compose pull
|
||||||
|
|
||||||
${DOCKER_COMPOSE_CMD} run --rm -e DJANGO_SUPERUSER_PASSWORD="$PASSWORD" webserver createsuperuser --noinput --username "$USERNAME" --email "$EMAIL"
|
if [ "$DATABASE_BACKEND" == "postgres" ] || [ "$DATABASE_BACKEND" == "mariadb" ] ; then
|
||||||
|
echo "Starting DB first for initilzation"
|
||||||
|
docker compose up --detach db
|
||||||
|
# hopefully enough time for even the slower systems
|
||||||
|
sleep 15
|
||||||
|
docker compose stop
|
||||||
|
fi
|
||||||
|
|
||||||
${DOCKER_COMPOSE_CMD} up --detach
|
docker compose run --rm -e DJANGO_SUPERUSER_PASSWORD="$PASSWORD" webserver createsuperuser --noinput --username "$USERNAME" --email "$EMAIL"
|
||||||
|
|
||||||
|
docker compose up --detach
|
||||||
|
@@ -28,6 +28,7 @@ theme:
|
|||||||
repo: fontawesome/brands/github
|
repo: fontawesome/brands/github
|
||||||
favicon: assets/favicon.png
|
favicon: assets/favicon.png
|
||||||
repo_url: https://github.com/paperless-ngx/paperless-ngx
|
repo_url: https://github.com/paperless-ngx/paperless-ngx
|
||||||
|
repo_name: paperless-ngx/paperless-ngx
|
||||||
edit_uri: blob/main/docs/
|
edit_uri: blob/main/docs/
|
||||||
extra_css:
|
extra_css:
|
||||||
- assets/extra.css
|
- assets/extra.css
|
||||||
@@ -42,6 +43,7 @@ markdown_extensions:
|
|||||||
- pymdownx.superfences
|
- pymdownx.superfences
|
||||||
- pymdownx.inlinehilite
|
- pymdownx.inlinehilite
|
||||||
- pymdownx.snippets
|
- pymdownx.snippets
|
||||||
|
- footnotes
|
||||||
strict: true
|
strict: true
|
||||||
nav:
|
nav:
|
||||||
- index.md
|
- index.md
|
||||||
@@ -64,3 +66,5 @@ extra:
|
|||||||
link: https://hub.docker.com/r/paperlessngx/paperless-ngx
|
link: https://hub.docker.com/r/paperlessngx/paperless-ngx
|
||||||
- icon: material/chat
|
- icon: material/chat
|
||||||
link: https://matrix.to/#/#paperless:matrix.org
|
link: https://matrix.to/#/#paperless:matrix.org
|
||||||
|
plugins:
|
||||||
|
- glightbox
|
||||||
|
@@ -66,6 +66,11 @@
|
|||||||
#PAPERLESS_CONSUMER_SUBDIRS_AS_TAGS=false
|
#PAPERLESS_CONSUMER_SUBDIRS_AS_TAGS=false
|
||||||
#PAPERLESS_CONSUMER_ENABLE_BARCODES=false
|
#PAPERLESS_CONSUMER_ENABLE_BARCODES=false
|
||||||
#PAPERLESS_CONSUMER_BARCODE_STRING=PATCHT
|
#PAPERLESS_CONSUMER_BARCODE_STRING=PATCHT
|
||||||
|
#PAPERLESS_CONSUMER_BARCODE_UPSCALE=0.0
|
||||||
|
#PAPERLESS_CONSUMER_BARCODE_DPI=300
|
||||||
|
#PAPERLESS_CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED=false
|
||||||
|
#PAPERLESS_CONSUMER_COLLATE_DOUBLE_SIDED_SUBDIR_NAME=double-sided
|
||||||
|
#PAPERLESS_CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT=false
|
||||||
#PAPERLESS_PRE_CONSUME_SCRIPT=/path/to/an/arbitrary/script.sh
|
#PAPERLESS_PRE_CONSUME_SCRIPT=/path/to/an/arbitrary/script.sh
|
||||||
#PAPERLESS_POST_CONSUME_SCRIPT=/path/to/an/arbitrary/script.sh
|
#PAPERLESS_POST_CONSUME_SCRIPT=/path/to/an/arbitrary/script.sh
|
||||||
#PAPERLESS_FILENAME_DATE_ORDER=YMD
|
#PAPERLESS_FILENAME_DATE_ORDER=YMD
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
docker run -p 5432:5432 -e POSTGRES_PASSWORD=password -v paperless_pgdata:/var/lib/postgresql/data -d postgres:13
|
docker run -p 5432:5432 -e POSTGRES_PASSWORD=password -v paperless_pgdata:/var/lib/postgresql/data -d postgres:15
|
||||||
docker run -d -p 6379:6379 redis:latest
|
docker run -d -p 6379:6379 redis:latest
|
||||||
docker run -p 3000:3000 -d gotenberg/gotenberg:7.8 gotenberg --chromium-disable-javascript=true --chromium-allow-list="file:///tmp/.*"
|
docker run -p 3000:3000 -d gotenberg/gotenberg:7.8 gotenberg --chromium-disable-javascript=true --chromium-allow-list="file:///tmp/.*"
|
||||||
docker run -p 9998:9998 -d ghcr.io/paperless-ngx/tika:latest
|
docker run -p 9998:9998 -d ghcr.io/paperless-ngx/tika:latest
|
||||||
|
@@ -24,7 +24,7 @@
|
|||||||
"error",
|
"error",
|
||||||
{
|
{
|
||||||
"type": "attribute",
|
"type": "attribute",
|
||||||
"prefix": "app",
|
"prefix": "pngx",
|
||||||
"style": "camelCase"
|
"style": "camelCase"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@@ -32,7 +32,7 @@
|
|||||||
"error",
|
"error",
|
||||||
{
|
{
|
||||||
"type": "element",
|
"type": "element",
|
||||||
"prefix": "app",
|
"prefix": "pngx",
|
||||||
"style": "kebab-case"
|
"style": "kebab-case"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
3
src-ui/.gitignore
vendored
@@ -49,3 +49,6 @@ Thumbs.db
|
|||||||
# Cypress
|
# Cypress
|
||||||
cypress/videos/**/*
|
cypress/videos/**/*
|
||||||
cypress/screenshots/**/*
|
cypress/screenshots/**/*
|
||||||
|
/test-results/
|
||||||
|
/playwright-report/
|
||||||
|
/playwright/.cache/
|
||||||
|
@@ -12,32 +12,39 @@
|
|||||||
},
|
},
|
||||||
"root": "",
|
"root": "",
|
||||||
"sourceRoot": "src",
|
"sourceRoot": "src",
|
||||||
"prefix": "app",
|
"prefix": "pngx",
|
||||||
"i18n": {
|
"i18n": {
|
||||||
"sourceLocale": "en-US",
|
"sourceLocale": "en-US",
|
||||||
"locales": {
|
"locales": {
|
||||||
"ar-AR": "src/locale/messages.ar_AR.xlf",
|
"ar-AR": "src/locale/messages.ar_AR.xlf",
|
||||||
|
"af-ZA": "src/locale/messages.af_ZA.xlf",
|
||||||
|
"bg-BG": "src/locale/messages.bg_BG.xlf",
|
||||||
"be-BY": "src/locale/messages.be_BY.xlf",
|
"be-BY": "src/locale/messages.be_BY.xlf",
|
||||||
"ca-ES": "src/locale/messages.ca_ES.xlf",
|
"ca-ES": "src/locale/messages.ca_ES.xlf",
|
||||||
"cs-CZ": "src/locale/messages.cs_CZ.xlf",
|
"cs-CZ": "src/locale/messages.cs_CZ.xlf",
|
||||||
"da-DK": "src/locale/messages.da_DK.xlf",
|
"da-DK": "src/locale/messages.da_DK.xlf",
|
||||||
"de-DE": "src/locale/messages.de_DE.xlf",
|
"de-DE": "src/locale/messages.de_DE.xlf",
|
||||||
|
"el-GR": "src/locale/messages.el_GR.xlf",
|
||||||
"en-GB": "src/locale/messages.en_GB.xlf",
|
"en-GB": "src/locale/messages.en_GB.xlf",
|
||||||
"es-ES": "src/locale/messages.es_ES.xlf",
|
"es-ES": "src/locale/messages.es_ES.xlf",
|
||||||
"fi-FI": "src/locale/messages.fi_FI.xlf",
|
"fi-FI": "src/locale/messages.fi_FI.xlf",
|
||||||
"fr-FR": "src/locale/messages.fr_FR.xlf",
|
"fr-FR": "src/locale/messages.fr_FR.xlf",
|
||||||
|
"hu-HU": "src/locale/messages.hu_HU.xlf",
|
||||||
"it-IT": "src/locale/messages.it_IT.xlf",
|
"it-IT": "src/locale/messages.it_IT.xlf",
|
||||||
"lb-LU": "src/locale/messages.lb_LU.xlf",
|
"lb-LU": "src/locale/messages.lb_LU.xlf",
|
||||||
"nl-NL": "src/locale/messages.nl_NL.xlf",
|
"nl-NL": "src/locale/messages.nl_NL.xlf",
|
||||||
|
"no-NO": "src/locale/messages.no_NO.xlf",
|
||||||
"pl-PL": "src/locale/messages.pl_PL.xlf",
|
"pl-PL": "src/locale/messages.pl_PL.xlf",
|
||||||
"pt-BR": "src/locale/messages.pt_BR.xlf",
|
"pt-BR": "src/locale/messages.pt_BR.xlf",
|
||||||
"pt-PT": "src/locale/messages.pt_PT.xlf",
|
"pt-PT": "src/locale/messages.pt_PT.xlf",
|
||||||
"ro-RO": "src/locale/messages.ro_RO.xlf",
|
"ro-RO": "src/locale/messages.ro_RO.xlf",
|
||||||
"ru-RU": "src/locale/messages.ru_RU.xlf",
|
"ru-RU": "src/locale/messages.ru_RU.xlf",
|
||||||
|
"sk-SK": "src/locale/messages.sk_SK.xlf",
|
||||||
"sl-SI": "src/locale/messages.sl_SI.xlf",
|
"sl-SI": "src/locale/messages.sl_SI.xlf",
|
||||||
"sr-CS": "src/locale/messages.sr_CS.xlf",
|
"sr-CS": "src/locale/messages.sr_CS.xlf",
|
||||||
"sv-SE": "src/locale/messages.sv_SE.xlf",
|
"sv-SE": "src/locale/messages.sv_SE.xlf",
|
||||||
"tr-TR": "src/locale/messages.tr_TR.xlf",
|
"tr-TR": "src/locale/messages.tr_TR.xlf",
|
||||||
|
"uk-UA": "src/locale/messages.uk_UA.xlf",
|
||||||
"zh-CN": "src/locale/messages.zh_CN.xlf"
|
"zh-CN": "src/locale/messages.zh_CN.xlf"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -147,37 +154,6 @@
|
|||||||
"scripts": []
|
"scripts": []
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"e2e": {
|
|
||||||
"builder": "@cypress/schematic:cypress",
|
|
||||||
"options": {
|
|
||||||
"devServerTarget": "paperless-ui:serve",
|
|
||||||
"watch": true,
|
|
||||||
"headless": false
|
|
||||||
},
|
|
||||||
"configurations": {
|
|
||||||
"production": {
|
|
||||||
"devServerTarget": "paperless-ui:serve:production"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"cypress-run": {
|
|
||||||
"builder": "@cypress/schematic:cypress",
|
|
||||||
"options": {
|
|
||||||
"devServerTarget": "paperless-ui:serve"
|
|
||||||
},
|
|
||||||
"configurations": {
|
|
||||||
"production": {
|
|
||||||
"devServerTarget": "paperless-ui:serve:production"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"cypress-open": {
|
|
||||||
"builder": "@cypress/schematic:cypress",
|
|
||||||
"options": {
|
|
||||||
"watch": true,
|
|
||||||
"headless": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"lint": {
|
"lint": {
|
||||||
"builder": "@angular-eslint/builder:lint",
|
"builder": "@angular-eslint/builder:lint",
|
||||||
"options": {
|
"options": {
|
||||||
@@ -190,7 +166,6 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"defaultProject": "paperless-ui",
|
|
||||||
"cli": {
|
"cli": {
|
||||||
"schematicCollections": [
|
"schematicCollections": [
|
||||||
"@angular-eslint/schematics"
|
"@angular-eslint/schematics"
|
||||||
|
@@ -1,14 +0,0 @@
|
|||||||
import { defineConfig } from 'cypress'
|
|
||||||
|
|
||||||
export default defineConfig({
|
|
||||||
videosFolder: 'cypress/videos',
|
|
||||||
video: false,
|
|
||||||
screenshotsFolder: 'cypress/screenshots',
|
|
||||||
fixturesFolder: 'cypress/fixtures',
|
|
||||||
e2e: {
|
|
||||||
setupNodeEvents(on, config) {
|
|
||||||
return require('./cypress/plugins/index.ts')(on, config)
|
|
||||||
},
|
|
||||||
baseUrl: 'http://localhost:4200',
|
|
||||||
},
|
|
||||||
})
|
|
@@ -1,68 +0,0 @@
|
|||||||
describe('settings', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
// also uses global fixtures from cypress/support/e2e.ts
|
|
||||||
|
|
||||||
// mock restricted permissions
|
|
||||||
cy.intercept('http://localhost:8000/api/ui_settings/', {
|
|
||||||
fixture: 'ui_settings/settings_restricted.json',
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should not allow user to edit settings', () => {
|
|
||||||
cy.visit('/dashboard')
|
|
||||||
cy.contains('Settings').should('not.exist')
|
|
||||||
cy.visit('/settings').wait(2000)
|
|
||||||
cy.contains("You don't have permissions to do that").should('exist')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should not allow user to view documents', () => {
|
|
||||||
cy.visit('/dashboard')
|
|
||||||
cy.contains('Documents').should('not.exist')
|
|
||||||
cy.visit('/documents').wait(2000)
|
|
||||||
cy.contains("You don't have permissions to do that").should('exist')
|
|
||||||
cy.visit('/documents/1').wait(2000)
|
|
||||||
cy.contains("You don't have permissions to do that").should('exist')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should not allow user to view correspondents', () => {
|
|
||||||
cy.visit('/dashboard')
|
|
||||||
cy.contains('Correspondents').should('not.exist')
|
|
||||||
cy.visit('/correspondents').wait(2000)
|
|
||||||
cy.contains("You don't have permissions to do that").should('exist')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should not allow user to view tags', () => {
|
|
||||||
cy.visit('/dashboard')
|
|
||||||
cy.contains('Tags').should('not.exist')
|
|
||||||
cy.visit('/tags').wait(2000)
|
|
||||||
cy.contains("You don't have permissions to do that").should('exist')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should not allow user to view document types', () => {
|
|
||||||
cy.visit('/dashboard')
|
|
||||||
cy.contains('Document Types').should('not.exist')
|
|
||||||
cy.visit('/documenttypes').wait(2000)
|
|
||||||
cy.contains("You don't have permissions to do that").should('exist')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should not allow user to view storage paths', () => {
|
|
||||||
cy.visit('/dashboard')
|
|
||||||
cy.contains('Storage Paths').should('not.exist')
|
|
||||||
cy.visit('/storagepaths').wait(2000)
|
|
||||||
cy.contains("You don't have permissions to do that").should('exist')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should not allow user to view logs', () => {
|
|
||||||
cy.visit('/dashboard')
|
|
||||||
cy.contains('Logs').should('not.exist')
|
|
||||||
cy.visit('/logs').wait(2000)
|
|
||||||
cy.contains("You don't have permissions to do that").should('exist')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should not allow user to view tasks', () => {
|
|
||||||
cy.visit('/dashboard')
|
|
||||||
cy.contains('Tasks').should('not.exist')
|
|
||||||
cy.visit('/tasks').wait(2000)
|
|
||||||
cy.contains("You don't have permissions to do that").should('exist')
|
|
||||||
})
|
|
||||||
})
|
|
@@ -1,118 +0,0 @@
|
|||||||
describe('document-detail', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
// also uses global fixtures from cypress/support/e2e.ts
|
|
||||||
|
|
||||||
this.modifiedDocuments = []
|
|
||||||
|
|
||||||
cy.fixture('documents/documents.json').then((documentsJson) => {
|
|
||||||
cy.intercept(
|
|
||||||
'GET',
|
|
||||||
'http://localhost:8000/api/documents/1/?full_perms=true',
|
|
||||||
(req) => {
|
|
||||||
let response = { ...documentsJson }
|
|
||||||
response = response.results.find((d) => d.id == 1)
|
|
||||||
req.reply(response)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
cy.intercept('PUT', 'http://localhost:8000/api/documents/1/', (req) => {
|
|
||||||
this.modifiedDocuments.push(req.body) // store this for later
|
|
||||||
req.reply({ result: 'OK' })
|
|
||||||
}).as('saveDoc')
|
|
||||||
|
|
||||||
cy.fixture('documents/1/notes.json').then((notesJson) => {
|
|
||||||
cy.intercept(
|
|
||||||
'GET',
|
|
||||||
'http://localhost:8000/api/documents/1/notes/',
|
|
||||||
(req) => {
|
|
||||||
req.reply(notesJson.filter((c) => c.id != 10)) // 3
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
cy.intercept(
|
|
||||||
'DELETE',
|
|
||||||
'http://localhost:8000/api/documents/1/notes/?id=9',
|
|
||||||
(req) => {
|
|
||||||
req.reply(notesJson.filter((c) => c.id != 9 && c.id != 10)) // 2
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
cy.intercept(
|
|
||||||
'POST',
|
|
||||||
'http://localhost:8000/api/documents/1/notes/',
|
|
||||||
(req) => {
|
|
||||||
req.reply(notesJson) // 4
|
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
cy.viewport(1024, 1024)
|
|
||||||
cy.visit('/documents/1/').wait('@ui-settings')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should activate / deactivate save button when changes are saved', () => {
|
|
||||||
cy.contains('button', 'Save').should('be.disabled')
|
|
||||||
cy.get('app-input-text[formcontrolname="title"]')
|
|
||||||
.type(' additional')
|
|
||||||
.wait(1500) // this delay is for frontend debounce
|
|
||||||
cy.contains('button', 'Save').should('not.be.disabled')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should warn on unsaved changes', () => {
|
|
||||||
cy.get('app-input-text[formcontrolname="title"]')
|
|
||||||
.type(' additional')
|
|
||||||
.wait(1500) // this delay is for frontend debounce
|
|
||||||
cy.get('button[title="Close"]').click()
|
|
||||||
cy.contains('You have unsaved changes')
|
|
||||||
cy.contains('button', 'Cancel').click().wait(150)
|
|
||||||
cy.contains('button', 'Save').click().wait('@saveDoc').wait(2000) // navigates away after saving
|
|
||||||
cy.contains('You have unsaved changes').should('not.exist')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a mobile preview', () => {
|
|
||||||
cy.viewport(440, 1000)
|
|
||||||
cy.get('a')
|
|
||||||
.contains('Preview')
|
|
||||||
.scrollIntoView({ offset: { top: 150, left: 0 } })
|
|
||||||
.click()
|
|
||||||
cy.get('pdf-viewer').should('be.visible')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of notes', () => {
|
|
||||||
cy.wait(1000).get('a').contains('Notes').click({ force: true }).wait(1000)
|
|
||||||
cy.get('app-document-notes').find('.card').its('length').should('eq', 3)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should support note deletion', () => {
|
|
||||||
cy.wait(1000).get('a').contains('Notes').click().wait(1000)
|
|
||||||
cy.get('app-document-notes')
|
|
||||||
.find('.card')
|
|
||||||
.first()
|
|
||||||
.find('button')
|
|
||||||
.click({ force: true })
|
|
||||||
.wait(500)
|
|
||||||
cy.get('app-document-notes').find('.card').its('length').should('eq', 2)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should support note insertion', () => {
|
|
||||||
cy.wait(1000).get('a').contains('Notes').click().wait(1000)
|
|
||||||
cy.get('app-document-notes')
|
|
||||||
.find('form textarea')
|
|
||||||
.type('Testing new note')
|
|
||||||
.wait(500)
|
|
||||||
cy.get('app-document-notes').find('form button').click().wait(1500)
|
|
||||||
cy.get('app-document-notes').find('.card').its('length').should('eq', 4)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should support navigation to notes tab by url', () => {
|
|
||||||
cy.visit('/documents/1/notes')
|
|
||||||
cy.get('app-document-notes').should('exist')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should dynamically update note counts', () => {
|
|
||||||
cy.visit('/documents/1/notes')
|
|
||||||
cy.get('app-document-notes').within(() => cy.contains('Delete').click())
|
|
||||||
cy.get('ul.nav').find('li').contains('Notes').find('.badge').contains('2')
|
|
||||||
})
|
|
||||||
})
|
|
@@ -1,196 +0,0 @@
|
|||||||
describe('documents-list', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
// also uses global fixtures from cypress/support/e2e.ts
|
|
||||||
|
|
||||||
this.bulkEdits = {}
|
|
||||||
|
|
||||||
cy.fixture('documents/documents.json').then((documentsJson) => {
|
|
||||||
// bulk edit
|
|
||||||
cy.intercept(
|
|
||||||
'POST',
|
|
||||||
'http://localhost:8000/api/documents/bulk_edit/',
|
|
||||||
(req) => {
|
|
||||||
this.bulkEdits = req.body // store this for later
|
|
||||||
req.reply({ result: 'OK' })
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
cy.intercept('GET', 'http://localhost:8000/api/documents/*', (req) => {
|
|
||||||
let response = { ...documentsJson }
|
|
||||||
|
|
||||||
// bulkEdits was set earlier by bulk_edit intercept
|
|
||||||
if (this.bulkEdits.hasOwnProperty('documents')) {
|
|
||||||
response.results = response.results.map((d) => {
|
|
||||||
if ((this.bulkEdits['documents'] as Array<number>).includes(d.id)) {
|
|
||||||
switch (this.bulkEdits['method']) {
|
|
||||||
case 'modify_tags':
|
|
||||||
d.tags = (d.tags as Array<number>).concat([
|
|
||||||
this.bulkEdits['parameters']['add_tags'],
|
|
||||||
])
|
|
||||||
break
|
|
||||||
case 'set_correspondent':
|
|
||||||
d.correspondent =
|
|
||||||
this.bulkEdits['parameters']['correspondent']
|
|
||||||
break
|
|
||||||
case 'set_document_type':
|
|
||||||
d.document_type =
|
|
||||||
this.bulkEdits['parameters']['document_type']
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return d
|
|
||||||
})
|
|
||||||
} else if (req.query.hasOwnProperty('tags__id__all')) {
|
|
||||||
// filtering e.g. http://localhost:8000/api/documents/?page=1&page_size=50&ordering=-created&tags__id__all=2
|
|
||||||
const tag_id = +req.query['tags__id__all']
|
|
||||||
response.results = (documentsJson.results as Array<any>).filter((d) =>
|
|
||||||
(d.tags as Array<number>).includes(tag_id)
|
|
||||||
)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (req.query.hasOwnProperty('correspondent__id__in')) {
|
|
||||||
// filtering e.g. http://localhost:8000/api/documents/?page=1&page_size=50&ordering=-created&correspondent__id__in=9,14
|
|
||||||
const correspondent_ids = req.query['correspondent__id__in']
|
|
||||||
.toString()
|
|
||||||
.split(',')
|
|
||||||
.map((c) => +c)
|
|
||||||
response.results = (documentsJson.results as Array<any>).filter((d) =>
|
|
||||||
correspondent_ids.includes(d.correspondent)
|
|
||||||
)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (req.query.hasOwnProperty('correspondent__id__none')) {
|
|
||||||
// filtering e.g. http://localhost:8000/api/documents/?page=1&page_size=50&ordering=-created&correspondent__id__none=9,14
|
|
||||||
const correspondent_ids = req.query['correspondent__id__none']
|
|
||||||
.toString()
|
|
||||||
.split(',')
|
|
||||||
.map((c) => +c)
|
|
||||||
response.results = (documentsJson.results as Array<any>).filter(
|
|
||||||
(d) => !correspondent_ids.includes(d.correspondent)
|
|
||||||
)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
req.reply(response)
|
|
||||||
})
|
|
||||||
|
|
||||||
cy.intercept('http://localhost:8000/api/documents/selection_data/', {
|
|
||||||
fixture: 'documents/selection_data.json',
|
|
||||||
}).as('selection-data')
|
|
||||||
})
|
|
||||||
|
|
||||||
cy.viewport(1280, 1024)
|
|
||||||
cy.visit('/documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents rendered as cards with thumbnails', () => {
|
|
||||||
cy.contains('3 documents')
|
|
||||||
cy.contains('lorem ipsum')
|
|
||||||
cy.get('app-document-card-small:first-of-type img')
|
|
||||||
.invoke('attr', 'src')
|
|
||||||
.should('eq', 'http://localhost:8000/api/documents/1/thumb/')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should change to table "details" view', () => {
|
|
||||||
cy.get('div.btn-group input[value="details"]').next().click()
|
|
||||||
cy.get('table')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should change to large cards view', () => {
|
|
||||||
cy.get('div.btn-group input[value="largeCards"]').next().click()
|
|
||||||
cy.get('app-document-card-large')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show partial tag selection', () => {
|
|
||||||
cy.get('app-document-card-small:nth-child(1)').click()
|
|
||||||
cy.get('app-document-card-small:nth-child(4)').click()
|
|
||||||
cy.get('app-bulk-editor button')
|
|
||||||
.contains('Tags')
|
|
||||||
.click()
|
|
||||||
.wait('@selection-data')
|
|
||||||
cy.get('svg.bi-dash').should('be.visible')
|
|
||||||
cy.get('svg.bi-check').should('be.visible')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should allow bulk removal', () => {
|
|
||||||
cy.get('app-document-card-small:nth-child(1)').click()
|
|
||||||
cy.get('app-document-card-small:nth-child(4)').click()
|
|
||||||
cy.get('app-bulk-editor').within(() => {
|
|
||||||
cy.get('button').contains('Tags').click().wait('@selection-data')
|
|
||||||
cy.get('button').contains('Another Sample Tag').click()
|
|
||||||
cy.get('button').contains('Apply').click()
|
|
||||||
})
|
|
||||||
cy.contains('operation will remove the tag')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should filter tags', () => {
|
|
||||||
cy.get('app-filter-editor app-filterable-dropdown[title="Tags"]').within(
|
|
||||||
() => {
|
|
||||||
cy.contains('button', 'Tags').click()
|
|
||||||
cy.contains('button', 'Tag 2').click()
|
|
||||||
}
|
|
||||||
)
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should filter including multiple correspondents', () => {
|
|
||||||
cy.get('app-filter-editor app-filterable-dropdown[title="Correspondent"]')
|
|
||||||
.click()
|
|
||||||
.within(() => {
|
|
||||||
cy.contains('button', 'ABC Test Correspondent').click()
|
|
||||||
cy.contains('button', 'Corresp 11').click()
|
|
||||||
})
|
|
||||||
cy.contains('3 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should filter excluding multiple correspondents', () => {
|
|
||||||
cy.get('app-filter-editor app-filterable-dropdown[title="Correspondent"]')
|
|
||||||
.click()
|
|
||||||
.within(() => {
|
|
||||||
cy.contains('button', 'ABC Test Correspondent').click()
|
|
||||||
cy.contains('button', 'Corresp 11').click()
|
|
||||||
cy.contains('label', 'Exclude').click()
|
|
||||||
})
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should apply tags', () => {
|
|
||||||
cy.get('app-document-card-small:first-of-type').click()
|
|
||||||
cy.get('app-bulk-editor app-filterable-dropdown[title="Tags"]').within(
|
|
||||||
() => {
|
|
||||||
cy.contains('button', 'Tags').click()
|
|
||||||
cy.contains('button', 'Test Tag').click()
|
|
||||||
cy.contains('button', 'Apply').click()
|
|
||||||
}
|
|
||||||
)
|
|
||||||
cy.contains('button', 'Confirm').click()
|
|
||||||
cy.get('app-document-card-small:first-of-type').contains('Test Tag')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should apply correspondent', () => {
|
|
||||||
cy.get('app-document-card-small:first-of-type').click()
|
|
||||||
cy.get(
|
|
||||||
'app-bulk-editor app-filterable-dropdown[title="Correspondent"]'
|
|
||||||
).within(() => {
|
|
||||||
cy.contains('button', 'Correspondent').click()
|
|
||||||
cy.contains('button', 'ABC Test Correspondent').click()
|
|
||||||
cy.contains('button', 'Apply').click()
|
|
||||||
})
|
|
||||||
cy.contains('button', 'Confirm').click()
|
|
||||||
cy.get('app-document-card-small:first-of-type').contains(
|
|
||||||
'ABC Test Correspondent'
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should apply document type', () => {
|
|
||||||
cy.get('app-document-card-small:first-of-type').click()
|
|
||||||
cy.get(
|
|
||||||
'app-bulk-editor app-filterable-dropdown[title="Document type"]'
|
|
||||||
).within(() => {
|
|
||||||
cy.contains('button', 'Document type').click()
|
|
||||||
cy.contains('button', 'Test Doc Type').click()
|
|
||||||
cy.contains('button', 'Apply').click()
|
|
||||||
})
|
|
||||||
cy.contains('button', 'Confirm').click()
|
|
||||||
cy.get('app-document-card-small:first-of-type').contains('Test Doc Type')
|
|
||||||
})
|
|
||||||
})
|
|
@@ -1,341 +0,0 @@
|
|||||||
import { PaperlessDocument } from 'src/app/data/paperless-document'
|
|
||||||
|
|
||||||
describe('documents query params', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
// also uses global fixtures from cypress/support/e2e.ts
|
|
||||||
|
|
||||||
cy.fixture('documents/documents.json').then((documentsJson) => {
|
|
||||||
// mock api filtering
|
|
||||||
cy.intercept('GET', 'http://localhost:8000/api/documents/*', (req) => {
|
|
||||||
let response = { ...documentsJson }
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('ordering')) {
|
|
||||||
const sort_field = req.query['ordering'].toString().replace('-', '')
|
|
||||||
const reverse = req.query['ordering'].toString().indexOf('-') !== -1
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).sort((docA, docB) => {
|
|
||||||
let result = 0
|
|
||||||
switch (sort_field) {
|
|
||||||
case 'created':
|
|
||||||
case 'added':
|
|
||||||
result =
|
|
||||||
new Date(docA[sort_field]) < new Date(docB[sort_field])
|
|
||||||
? -1
|
|
||||||
: 1
|
|
||||||
break
|
|
||||||
case 'archive_serial_number':
|
|
||||||
result = docA[sort_field] < docB[sort_field] ? -1 : 1
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if (reverse) result = -result
|
|
||||||
return result
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('tags__id__in')) {
|
|
||||||
const tag_ids: Array<number> = req.query['tags__id__in']
|
|
||||||
.toString()
|
|
||||||
.split(',')
|
|
||||||
.map((v) => +v)
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter(
|
|
||||||
(d) =>
|
|
||||||
d.tags.length > 0 &&
|
|
||||||
d.tags.filter((t) => tag_ids.includes(t)).length > 0
|
|
||||||
)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (req.query.hasOwnProperty('tags__id__none')) {
|
|
||||||
const tag_ids: Array<number> = req.query['tags__id__none']
|
|
||||||
.toString()
|
|
||||||
.split(',')
|
|
||||||
.map((v) => +v)
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.tags.filter((t) => tag_ids.includes(t)).length == 0)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (
|
|
||||||
req.query.hasOwnProperty('is_tagged') &&
|
|
||||||
req.query['is_tagged'] == '0'
|
|
||||||
) {
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.tags.length == 0)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('document_type__id')) {
|
|
||||||
const doctype_id = +req.query['document_type__id']
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.document_type == doctype_id)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (
|
|
||||||
req.query.hasOwnProperty('document_type__isnull') &&
|
|
||||||
req.query['document_type__isnull'] == '1'
|
|
||||||
) {
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.document_type == undefined)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('correspondent__id')) {
|
|
||||||
const correspondent_id = +req.query['correspondent__id']
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.correspondent == correspondent_id)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (
|
|
||||||
req.query.hasOwnProperty('correspondent__isnull') &&
|
|
||||||
req.query['correspondent__isnull'] == '1'
|
|
||||||
) {
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.correspondent == undefined)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('storage_path__id')) {
|
|
||||||
const storage_path_id = +req.query['storage_path__id']
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.storage_path == storage_path_id)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (
|
|
||||||
req.query.hasOwnProperty('storage_path__isnull') &&
|
|
||||||
req.query['storage_path__isnull'] == '1'
|
|
||||||
) {
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.storage_path == undefined)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('created__date__gt')) {
|
|
||||||
const date = new Date(req.query['created__date__gt'])
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => new Date(d.created) > date)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (req.query.hasOwnProperty('created__date__lt')) {
|
|
||||||
const date = new Date(req.query['created__date__lt'])
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => new Date(d.created) < date)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('added__date__gt')) {
|
|
||||||
const date = new Date(req.query['added__date__gt'])
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => new Date(d.added) > date)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (req.query.hasOwnProperty('added__date__lt')) {
|
|
||||||
const date = new Date(req.query['added__date__lt'])
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => new Date(d.added) < date)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('title_content')) {
|
|
||||||
const title_content_regexp = new RegExp(
|
|
||||||
req.query['title_content'].toString(),
|
|
||||||
'i'
|
|
||||||
)
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter(
|
|
||||||
(d) =>
|
|
||||||
title_content_regexp.test(d.title) ||
|
|
||||||
title_content_regexp.test(d.content)
|
|
||||||
)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('archive_serial_number')) {
|
|
||||||
const asn = +req.query['archive_serial_number']
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.archive_serial_number == asn)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (req.query.hasOwnProperty('archive_serial_number__isnull')) {
|
|
||||||
const isnull = req.query['storage_path__isnull'] == '1'
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) =>
|
|
||||||
isnull
|
|
||||||
? d.archive_serial_number == undefined
|
|
||||||
: d.archive_serial_number != undefined
|
|
||||||
)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (req.query.hasOwnProperty('archive_serial_number__gt')) {
|
|
||||||
const asn = +req.query['archive_serial_number__gt']
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter(
|
|
||||||
(d) => d.archive_serial_number > 0 && d.archive_serial_number > asn
|
|
||||||
)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (req.query.hasOwnProperty('archive_serial_number__lt')) {
|
|
||||||
const asn = +req.query['archive_serial_number__lt']
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter(
|
|
||||||
(d) => d.archive_serial_number > 0 && d.archive_serial_number < asn
|
|
||||||
)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
req.reply(response)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents sorted by created', () => {
|
|
||||||
cy.visit('/documents?sort=created')
|
|
||||||
cy.get('app-document-card-small').first().contains('No latin title')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents reverse sorted by created', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true')
|
|
||||||
cy.get('app-document-card-small').first().contains('sit amet')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents sorted by added', () => {
|
|
||||||
cy.visit('/documents?sort=added')
|
|
||||||
cy.get('app-document-card-small').first().contains('No latin title')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents reverse sorted by added', () => {
|
|
||||||
cy.visit('/documents?sort=added&reverse=true')
|
|
||||||
cy.get('app-document-card-small').first().contains('sit amet')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by any tags', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&tags__id__in=2,4,5')
|
|
||||||
cy.contains('3 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by excluded tags', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&tags__id__none=2,4')
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by no tags', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&is_tagged=0')
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by document type', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&document_type__id=1')
|
|
||||||
cy.contains('2 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by multiple correspondents', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&document_type__id__in=1,2')
|
|
||||||
cy.contains('3 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by no document type', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&document_type__isnull=1')
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by correspondent', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&correspondent__id=9')
|
|
||||||
cy.contains('2 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by multiple correspondents', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&correspondent__id__in=9,14')
|
|
||||||
cy.contains('3 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by no correspondent', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&correspondent__isnull=1')
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by storage path', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&storage_path__id=2')
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by no storage path', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&storage_path__isnull=1')
|
|
||||||
cy.contains('3 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by title or content', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&title_content=lorem')
|
|
||||||
cy.contains('2 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by asn', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&archive_serial_number=12345')
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by empty asn', () => {
|
|
||||||
cy.visit(
|
|
||||||
'/documents?sort=created&reverse=true&archive_serial_number__isnull=1'
|
|
||||||
)
|
|
||||||
cy.contains('2 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by non-empty asn', () => {
|
|
||||||
cy.visit(
|
|
||||||
'/documents?sort=created&reverse=true&archive_serial_number__isnull=0'
|
|
||||||
)
|
|
||||||
cy.contains('2 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by asn greater than', () => {
|
|
||||||
cy.visit(
|
|
||||||
'/documents?sort=created&reverse=true&archive_serial_number__gt=12346'
|
|
||||||
)
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by asn less than', () => {
|
|
||||||
cy.visit(
|
|
||||||
'/documents?sort=created&reverse=true&archive_serial_number__lt=12346'
|
|
||||||
)
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by created date greater than', () => {
|
|
||||||
cy.visit(
|
|
||||||
'/documents?sort=created&reverse=true&created__date__gt=2022-03-23'
|
|
||||||
)
|
|
||||||
cy.contains('3 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by created date less than', () => {
|
|
||||||
cy.visit(
|
|
||||||
'/documents?sort=created&reverse=true&created__date__lt=2022-03-23'
|
|
||||||
)
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by added date greater than', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&added__date__gt=2022-03-24')
|
|
||||||
cy.contains('2 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by added date less than', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&added__date__lt=2022-03-24')
|
|
||||||
cy.contains('2 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by multiple filters', () => {
|
|
||||||
cy.visit(
|
|
||||||
'/documents?sort=created&reverse=true&document_type__id=1&correspondent__id=9&tags__id__in=4,5'
|
|
||||||
)
|
|
||||||
cy.contains('2 documents')
|
|
||||||
})
|
|
||||||
})
|
|