Compare commits
1282 Commits
v1.11.2
...
archive/v1
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f216b322c2 | ||
![]() |
d54c7ca27c | ||
![]() |
077de8dcaa | ||
![]() |
6f50e5671a | ||
![]() |
bab9bdc832 | ||
![]() |
8e0adbb0fb | ||
![]() |
6b175ae7e3 | ||
![]() |
9494633da3 | ||
![]() |
c754a5f391 | ||
![]() |
2b692c6fc8 | ||
![]() |
35b6fb1a6d | ||
![]() |
61566a34d1 | ||
![]() |
e14f4c94c2 | ||
![]() |
8d35f22daf | ||
![]() |
6f8ef9bbdc | ||
![]() |
e7ddf6ba8f | ||
![]() |
407a119b9a | ||
![]() |
96aa2451bf | ||
![]() |
7680aab04d | ||
![]() |
3aef26b229 | ||
![]() |
935141adc0 | ||
![]() |
4300733d0c | ||
![]() |
c284a091c0 | ||
![]() |
7e768bfe23 | ||
![]() |
938f388a0b | ||
![]() |
d22a6fb8ea | ||
![]() |
11142e2f05 | ||
![]() |
fe7fb488c0 | ||
![]() |
0929c5086a | ||
![]() |
eb3ed7719b | ||
![]() |
6c19a0f8c7 | ||
![]() |
a0ece589b0 | ||
![]() |
8165071edf | ||
![]() |
a667974378 | ||
![]() |
fe1f88ce5d | ||
![]() |
8f2715e437 | ||
![]() |
57a3223c77 | ||
![]() |
df82ac8ac4 | ||
![]() |
16adddc803 | ||
![]() |
d1ae82c5c2 | ||
![]() |
0098936347 | ||
![]() |
17b85f6400 | ||
![]() |
48be9c0fd1 | ||
![]() |
ce36c2d0ea | ||
![]() |
06c63ef4a4 | ||
![]() |
03d93a7d6e | ||
![]() |
a0005c8b3e | ||
![]() |
2220343c50 | ||
![]() |
68fbed996f | ||
![]() |
20a4d8949d | ||
![]() |
1029ecfd49 | ||
![]() |
4de2a3b09e | ||
![]() |
651407c88e | ||
![]() |
3e2e485f66 | ||
![]() |
10efaad224 | ||
![]() |
3dda02660c | ||
![]() |
0b4e8141b0 | ||
![]() |
16ab0efa59 | ||
![]() |
20ca1ec547 | ||
![]() |
a1c9ab237f | ||
![]() |
a65239f7f1 | ||
![]() |
a0622675fd | ||
![]() |
1e0b778097 | ||
![]() |
3b666fef77 | ||
![]() |
9291c98189 | ||
![]() |
f6dadd8c82 | ||
![]() |
022bb272e6 | ||
![]() |
edad1a41e8 | ||
![]() |
421e78a748 | ||
![]() |
b961df90a7 | ||
![]() |
0b26b7098a | ||
![]() |
b09566a9a9 | ||
![]() |
b869deab66 | ||
![]() |
3d552c3112 | ||
![]() |
2a2bf3bf55 | ||
![]() |
26863b8cdc | ||
![]() |
b0f7d07214 | ||
![]() |
30c6557a32 | ||
![]() |
e18e173089 | ||
![]() |
6a8bdbd4f6 | ||
![]() |
b5dec87a62 | ||
![]() |
e50d30876a | ||
![]() |
cbcd9ed67d | ||
![]() |
6bcc26b487 | ||
![]() |
56fcb3fee1 | ||
![]() |
557e1790dd | ||
![]() |
2e67697d36 | ||
![]() |
ca4500692f | ||
![]() |
5aba6bff09 | ||
![]() |
9ed74068b9 | ||
![]() |
be4685742c | ||
![]() |
86b0a38811 | ||
![]() |
3e528f0a9a | ||
![]() |
170d7b6922 | ||
![]() |
f05249a9ad | ||
![]() |
5957ed7af3 | ||
![]() |
8ed63893eb | ||
![]() |
ea9dd926bc | ||
![]() |
f31d3b531f | ||
![]() |
d851448c32 | ||
![]() |
65327d52a6 | ||
![]() |
2ea5ae59b2 | ||
![]() |
a04d09028b | ||
![]() |
dd9255cb81 | ||
![]() |
536d7ecd3e | ||
![]() |
f50aac08df | ||
![]() |
db0f1d2159 | ||
![]() |
d97b565d6c | ||
![]() |
d3071f13d8 | ||
![]() |
768407c1d7 | ||
![]() |
c5d18b03cd | ||
![]() |
5f05b44cde | ||
![]() |
beaa09e9b3 | ||
![]() |
d6960f537b | ||
![]() |
0918eab004 | ||
![]() |
9b16789a17 | ||
![]() |
157240351f | ||
![]() |
6ad3d45d60 | ||
![]() |
b715e4d426 | ||
![]() |
aad5e9e99f | ||
![]() |
2a104ad33f | ||
![]() |
851290ee89 | ||
![]() |
a8c6c55e3b | ||
![]() |
992a647424 | ||
![]() |
c22461a1b6 | ||
![]() |
23fefc3ab7 | ||
![]() |
0beb9f0b5f | ||
![]() |
d376f9e7a3 | ||
![]() |
07e7bcd30b | ||
![]() |
95e86cb649 | ||
![]() |
802e5591ce | ||
![]() |
26d5730ad2 | ||
![]() |
8c7554e081 | ||
![]() |
9f5d47c320 | ||
![]() |
4aa452ce63 | ||
![]() |
7ef81ae10f | ||
![]() |
3628292afa | ||
![]() |
8aa5ecde62 | ||
![]() |
2f149eac9d | ||
![]() |
fcfc705b87 | ||
![]() |
7bd5c010a1 | ||
![]() |
52168d8e61 | ||
![]() |
a3842d9228 | ||
![]() |
452c51bd16 | ||
![]() |
22bedd9957 | ||
![]() |
cb318c723d | ||
![]() |
9a81d3c28e | ||
![]() |
3ca59e3b7a | ||
![]() |
07a12bdf15 | ||
![]() |
62e81d8bf0 | ||
![]() |
e295a41caa | ||
![]() |
c545a80aa3 | ||
![]() |
56f1a0cb51 | ||
![]() |
7218b6da97 | ||
![]() |
83f9f2d387 | ||
![]() |
13a2e38385 | ||
![]() |
996d942387 | ||
![]() |
cc42eb9fab | ||
![]() |
44125be979 | ||
![]() |
c2e9cc9a51 | ||
![]() |
fcd10f2adc | ||
![]() |
93009c1eed | ||
![]() |
d875be60d4 | ||
![]() |
db48d4c576 | ||
![]() |
3293231ad2 | ||
![]() |
aa1f2d3b59 | ||
![]() |
f492b679e3 | ||
![]() |
7ca84322bd | ||
![]() |
e3257b8fa3 | ||
![]() |
0bcda5ded8 | ||
![]() |
7ec82c0891 | ||
![]() |
3241ac7dc2 | ||
![]() |
e974605fc8 | ||
![]() |
ce13380533 | ||
![]() |
e23e3acda3 | ||
![]() |
63ab9972da | ||
![]() |
feb4901620 | ||
![]() |
67788a1b1b | ||
![]() |
001faf9ed7 | ||
![]() |
7a464d8a6e | ||
![]() |
5acd1c7c1b | ||
![]() |
de14540374 | ||
![]() |
37e0c2667b | ||
![]() |
252abb41c3 | ||
![]() |
fb2af341d8 | ||
![]() |
931f5f9c27 | ||
![]() |
b5d04e575e | ||
![]() |
3d395601fe | ||
![]() |
6630ce646c | ||
![]() |
f5508eea1c | ||
![]() |
c8c460432f | ||
![]() |
18299dafd2 | ||
![]() |
817d09026e | ||
![]() |
d76b009390 | ||
![]() |
9effed3ce1 | ||
![]() |
c1bbfc5dcf | ||
![]() |
b6c9cfb76f | ||
![]() |
59ca7bbcf2 | ||
![]() |
52c8d5e999 | ||
![]() |
5851e7f1b7 | ||
![]() |
e05b3441de | ||
![]() |
0d6e79cb93 | ||
![]() |
76a102d901 | ||
![]() |
bbd4659fbf | ||
![]() |
0880420ef6 | ||
![]() |
2351c79282 | ||
![]() |
d6016fc798 | ||
![]() |
bc17291006 | ||
![]() |
ed6cb14c4d | ||
![]() |
08de8a04b8 | ||
![]() |
5c67de8b47 | ||
![]() |
38b0408b1a | ||
![]() |
9ccad7ea86 | ||
![]() |
4a4e810a14 | ||
![]() |
76d2df3bde | ||
![]() |
c02563d894 | ||
![]() |
574ec6780b | ||
![]() |
9e0f56982b | ||
![]() |
1c66daf12b | ||
![]() |
59d683849e | ||
![]() |
9946acb1a0 | ||
![]() |
83a760644d | ||
![]() |
25ccff8640 | ||
![]() |
5c4c5a7794 | ||
![]() |
cb6af97595 | ||
![]() |
c4407dccf6 | ||
![]() |
ecdea4c3c8 | ||
![]() |
26d6f302cf | ||
![]() |
ecf10622ef | ||
![]() |
0fb553675b | ||
![]() |
2080fde4f9 | ||
![]() |
d10e67ce09 | ||
![]() |
74fe7c586b | ||
![]() |
05188aed6d | ||
![]() |
865efb7752 | ||
![]() |
4782b4da07 | ||
![]() |
4693632c7d | ||
![]() |
4c4b571a88 | ||
![]() |
328c87995b | ||
![]() |
a1d10e7d4a | ||
![]() |
77d9a7e9d3 | ||
![]() |
981b090088 | ||
![]() |
6d1c788ee0 | ||
![]() |
02de773d5b | ||
![]() |
2a240d83fd | ||
![]() |
25cdf7916d | ||
![]() |
11b5983a0d | ||
![]() |
4964987245 | ||
![]() |
ed129d6074 | ||
![]() |
37e928d869 | ||
![]() |
06def8c11e | ||
![]() |
10571676a4 | ||
![]() |
af5160237d | ||
![]() |
b86842ba73 | ||
![]() |
bfc271e743 | ||
![]() |
ee88140fdd | ||
![]() |
51249a1dce | ||
![]() |
57ec9e6b13 | ||
![]() |
1324d17d87 | ||
![]() |
26b438a888 | ||
![]() |
70f3f98363 | ||
![]() |
71e4be2d5e | ||
![]() |
5740806a28 | ||
![]() |
9b50a1b7a6 | ||
![]() |
19caad832e | ||
![]() |
dd6ae13281 | ||
![]() |
077abbe961 | ||
![]() |
3d85dc1127 | ||
![]() |
e3ea5dd13c | ||
![]() |
714b2ecd9c | ||
![]() |
883937bfd7 | ||
![]() |
0ebe08d796 | ||
![]() |
36b4fff5c7 | ||
![]() |
0684c8c388 | ||
![]() |
67744c877d | ||
![]() |
45d8c945e2 | ||
![]() |
ee19307ea2 | ||
![]() |
2c1cd25be4 | ||
![]() |
6e65558ea4 | ||
![]() |
304324ebd0 | ||
![]() |
97cd06d2ba | ||
![]() |
df948065a3 | ||
![]() |
f92126b44f | ||
![]() |
e329f6cdf1 | ||
![]() |
2c96438d61 | ||
![]() |
41a9aac75d | ||
![]() |
8768168536 | ||
![]() |
325809fbbf | ||
![]() |
3dd47a9f5b | ||
![]() |
00f16ef8f0 | ||
![]() |
5e67aae83b | ||
![]() |
ae5c603c98 | ||
![]() |
c62aa3cb55 | ||
![]() |
7073cb6d5c | ||
![]() |
a495ad58d0 | ||
![]() |
569165371c | ||
![]() |
ea14fa5251 | ||
![]() |
4a02865697 | ||
![]() |
3a2a20cefd | ||
![]() |
f2f42de701 | ||
![]() |
6d60d4897c | ||
![]() |
9f71ce8083 | ||
![]() |
50f8f7da93 | ||
![]() |
d475344b51 | ||
![]() |
2630863409 | ||
![]() |
e120f4a3f7 | ||
![]() |
6aff4c986c | ||
![]() |
2e891b1634 | ||
![]() |
35a0c5d36f | ||
![]() |
82d786b94c | ||
![]() |
de49d602a1 | ||
![]() |
3cb6511b66 | ||
![]() |
1d3ae777d5 | ||
![]() |
dbb2ea39d2 | ||
![]() |
fb607332b9 | ||
![]() |
b956f627b0 | ||
![]() |
2ac64ab573 | ||
![]() |
482e00970c | ||
![]() |
d62340efb5 | ||
![]() |
eb0f35219c | ||
![]() |
243598ae50 | ||
![]() |
74c965d21d | ||
![]() |
30316179a0 | ||
![]() |
f16a1101e6 | ||
![]() |
0466f7a18a | ||
![]() |
97cf3b2079 | ||
![]() |
6542d75a6a | ||
![]() |
c6900c5d51 | ||
![]() |
0e9642ea3e | ||
![]() |
3ab2892066 | ||
![]() |
69b69aca6a | ||
![]() |
a05dbd2e5a | ||
![]() |
c1641f6fb8 | ||
![]() |
452c79f9a1 | ||
![]() |
37959fe31c | ||
![]() |
30f73f39a0 | ||
![]() |
fa613cd5fb | ||
![]() |
b8afb22902 | ||
![]() |
07e07fc7e8 | ||
![]() |
58f95c1891 | ||
![]() |
7ad8e3b3da | ||
![]() |
b9a548758a | ||
![]() |
a436caf2ad | ||
![]() |
5461f8a225 | ||
![]() |
78b747571c | ||
![]() |
0c6a9a189b | ||
![]() |
45d666ff2d | ||
![]() |
4143925322 | ||
![]() |
9be3d2ccaf | ||
![]() |
8be8a310d7 | ||
![]() |
b81c339922 | ||
![]() |
d3d103f141 | ||
![]() |
dd575ccb88 | ||
![]() |
a83c7c64b5 | ||
![]() |
c04ded6fd8 | ||
![]() |
f24c779737 | ||
![]() |
3cdd358fc8 | ||
![]() |
e11939b149 | ||
![]() |
a4ef0702c9 | ||
![]() |
548b6352ca | ||
![]() |
2658c16073 | ||
![]() |
49d0b6aa00 | ||
![]() |
e65f584197 | ||
![]() |
ce1bbda188 | ||
![]() |
846897fb4c | ||
![]() |
457e134261 | ||
![]() |
3e129763c7 | ||
![]() |
c49d086965 | ||
![]() |
df7bfc4efd | ||
![]() |
7fba1f9ed2 | ||
![]() |
3205d52331 | ||
![]() |
111960c530 | ||
![]() |
e1bc1a0129 | ||
![]() |
8b543a5fa9 | ||
![]() |
dc7a67a1d7 | ||
![]() |
350c20d6ab | ||
![]() |
b5f0cd7c70 | ||
![]() |
90488cd77a | ||
![]() |
5bbc59e87c | ||
![]() |
c02758213b | ||
![]() |
09c62d67c1 | ||
![]() |
3f3fa3044c | ||
![]() |
62673145fb | ||
![]() |
0baf73de5e | ||
![]() |
66a0783e7b | ||
![]() |
17144c45e5 | ||
![]() |
311c0ba4f1 | ||
![]() |
e293d23ae3 | ||
![]() |
93769d2608 | ||
![]() |
e7540563d0 | ||
![]() |
fc1047550e | ||
![]() |
dadc618719 | ||
![]() |
36f3bd2869 | ||
![]() |
fdcea983a4 | ||
![]() |
081534457c | ||
![]() |
94a6272a1d | ||
![]() |
d389e0ecf8 | ||
![]() |
17eb1c604f | ||
![]() |
99474aab06 | ||
![]() |
f3d3bf20de | ||
![]() |
3c999e9847 | ||
![]() |
692fa5f606 | ||
![]() |
752b8e79ff | ||
![]() |
3f82cf4ab3 | ||
![]() |
1549b9df74 | ||
![]() |
78ef87a952 | ||
![]() |
29ede48e0f | ||
![]() |
6349d25219 | ||
![]() |
830a450f00 | ||
![]() |
18f9ce9c0b | ||
![]() |
2471be0c78 | ||
![]() |
60cfd687dc | ||
![]() |
e06c61b95d | ||
![]() |
471eee0872 | ||
![]() |
20abd8a9f8 | ||
![]() |
88e5c471de | ||
![]() |
09086e574d | ||
![]() |
8d95c13e31 | ||
![]() |
c922cc4351 | ||
![]() |
a42f28c502 | ||
![]() |
b802f3a71f | ||
![]() |
f78f212a77 | ||
![]() |
22cbfd473b | ||
![]() |
e5973ef713 | ||
![]() |
5364a29b5f | ||
![]() |
49754d33fa | ||
![]() |
d7d95037be | ||
![]() |
515146d4a2 | ||
![]() |
b7540fab58 | ||
![]() |
88e6f8abf6 | ||
![]() |
3c4dadd905 | ||
![]() |
f18f997796 | ||
![]() |
3a1daf46ae | ||
![]() |
8dffea4a42 | ||
![]() |
3852a6c5cf | ||
![]() |
6493f51a29 | ||
![]() |
028f42e775 | ||
![]() |
eb1cc55f94 | ||
![]() |
fb864f1132 | ||
![]() |
8b8d988c07 | ||
![]() |
c2b5451fe4 | ||
![]() |
487d3a6262 | ||
![]() |
fe990b4cd2 | ||
![]() |
019c7e2f78 | ||
![]() |
1c64a4f145 | ||
![]() |
fc869aa203 | ||
![]() |
3a0ada9f46 | ||
![]() |
cc9980fc19 | ||
![]() |
7515d8af64 | ||
![]() |
5e7579c1fd | ||
![]() |
38af53f281 | ||
![]() |
a26bec5b00 | ||
![]() |
feb943b6df | ||
![]() |
059e37a41f | ||
![]() |
7ce67fd465 | ||
![]() |
6b8b8209f3 | ||
![]() |
fd1d12859d | ||
![]() |
efb00b2387 | ||
![]() |
9b2ca57038 | ||
![]() |
9694face16 | ||
![]() |
7ef14832d0 | ||
![]() |
33f7b58e6e | ||
![]() |
9e992da863 | ||
![]() |
c986a218c7 | ||
![]() |
8ee6312402 | ||
![]() |
3c86b12ef9 | ||
![]() |
02d09edd49 | ||
![]() |
55af3c3dd1 | ||
![]() |
f8f5a77744 | ||
![]() |
5b6956ff24 | ||
![]() |
f1c138eaed | ||
![]() |
caf43638de | ||
![]() |
b783d2e210 | ||
![]() |
9a40a5f019 | ||
![]() |
81a7b34101 | ||
![]() |
f124e2a889 | ||
![]() |
02b2bcafc5 | ||
![]() |
81a5fd377e | ||
![]() |
f875ae4abf | ||
![]() |
01fd400ec7 | ||
![]() |
c59420581c | ||
![]() |
0aa9462cea | ||
![]() |
bf2f6f84e5 | ||
![]() |
5126f01b57 | ||
![]() |
ec4814a76e | ||
![]() |
69b53d70c5 | ||
![]() |
02875f5a34 | ||
![]() |
29d8c4e08d | ||
![]() |
df203311fe | ||
![]() |
10f9b91c44 | ||
![]() |
cd861364a2 | ||
![]() |
90b52abc04 | ||
![]() |
093b726c52 | ||
![]() |
fd84fc9dbe | ||
![]() |
4353646b3a | ||
![]() |
7545e5312c | ||
![]() |
bd494ce9ec | ||
![]() |
ee3cf8e6d1 | ||
![]() |
df524fdc1f | ||
![]() |
f0c0cfee1d | ||
![]() |
cf1bf3c163 | ||
![]() |
b9d703fe25 | ||
![]() |
46f7e685b6 | ||
![]() |
8023331fca | ||
![]() |
597db7d4bd | ||
![]() |
773bd32cd0 | ||
![]() |
c7e3756de1 | ||
![]() |
64bf122c95 | ||
![]() |
a92b0411fd | ||
![]() |
728d61762a | ||
![]() |
d9783e2a4d | ||
![]() |
b6303d2c16 | ||
![]() |
dd673a62b5 | ||
![]() |
b7577038a0 | ||
![]() |
613b71d23b | ||
![]() |
0284100c2d | ||
![]() |
26cd470d31 | ||
![]() |
ebaf509a42 | ||
![]() |
646db73061 | ||
![]() |
e6df581909 | ||
![]() |
a8e12409b5 | ||
![]() |
b7c7e293f7 | ||
![]() |
fe85aff052 | ||
![]() |
12d8bcad6e | ||
![]() |
bbfc244f16 | ||
![]() |
e275a2736a | ||
![]() |
d2a8076596 | ||
![]() |
83344f748f | ||
![]() |
1d5dbc454d | ||
![]() |
16e2dc60aa | ||
![]() |
a6fd4a8472 | ||
![]() |
c25698dfa7 | ||
![]() |
2ab2064a72 | ||
![]() |
356c26ce84 | ||
![]() |
bc56dfbcb5 | ||
![]() |
daaeb36363 | ||
![]() |
a46a9cf0bf | ||
![]() |
cbf435169a | ||
![]() |
7d05f6c54a | ||
![]() |
8a8667d1f4 | ||
![]() |
b9b8b764db | ||
![]() |
4978af351d | ||
![]() |
b5d639652d | ||
![]() |
c4ebfaf7f6 | ||
![]() |
256266280d | ||
![]() |
f886b58529 | ||
![]() |
14fe93b9ab | ||
![]() |
a2fb0ceb7d | ||
![]() |
bc284ecf6d | ||
![]() |
6113f586c9 | ||
![]() |
6c0862248c | ||
![]() |
0e8f2a7c6c | ||
![]() |
a808f8bbd5 | ||
![]() |
9428d5638e | ||
![]() |
e00cd5e304 | ||
![]() |
3c04bf2742 | ||
![]() |
2ef6d450bc | ||
![]() |
628b0bffeb | ||
![]() |
27eaa566a5 | ||
![]() |
fb36646bd3 | ||
![]() |
304cc37618 | ||
![]() |
8239e8a581 | ||
![]() |
69e117d898 | ||
![]() |
c773ec8a30 | ||
![]() |
b4b49ee096 | ||
![]() |
cf5ab87db9 | ||
![]() |
deaff293d2 | ||
![]() |
dccdebd2c0 | ||
![]() |
2674d4f034 | ||
![]() |
cb529561e1 | ||
![]() |
1a1cf49c67 | ||
![]() |
757b61a010 | ||
![]() |
448dcbab46 | ||
![]() |
30fc5bbb09 | ||
![]() |
d3e14818df | ||
![]() |
864e242ed9 | ||
![]() |
8f18baea8f | ||
![]() |
130489a1a9 | ||
![]() |
88a5a2049b | ||
![]() |
15fb3e5328 | ||
![]() |
90b800b030 | ||
![]() |
be88ad2676 | ||
![]() |
dfadfc0f13 | ||
![]() |
5ae48c8012 | ||
![]() |
6f163111ce | ||
![]() |
3bcbd05252 | ||
![]() |
e0d2697618 | ||
![]() |
7340535b9a | ||
![]() |
c385355c2b | ||
![]() |
a119790697 | ||
![]() |
e392098e35 | ||
![]() |
a2d4d16867 | ||
![]() |
7f74a85400 | ||
![]() |
1fc9eaf360 | ||
![]() |
1898f9b183 | ||
![]() |
1fb03a755f | ||
![]() |
8a505e3b66 | ||
![]() |
45ecec5623 | ||
![]() |
b34dfcd72f | ||
![]() |
319aa39925 | ||
![]() |
08ac40dd48 | ||
![]() |
0a0dc25e15 | ||
![]() |
0557a15fa8 | ||
![]() |
c5fafdda11 | ||
![]() |
434d1fe225 | ||
![]() |
405769dc97 | ||
![]() |
ffa116bf44 | ||
![]() |
6b1d8cabf4 | ||
![]() |
20c21e9e65 | ||
![]() |
088743a155 | ||
![]() |
d1fba28936 | ||
![]() |
65064a6934 | ||
![]() |
c7f5b7ae82 | ||
![]() |
2b244165e2 | ||
![]() |
47682bc143 | ||
![]() |
0dcfb97824 | ||
![]() |
50af671e02 | ||
![]() |
1ef273c35d | ||
![]() |
91b9831548 | ||
![]() |
3241968626 | ||
![]() |
5dee65afcb | ||
![]() |
4108eabd0d | ||
![]() |
829a693128 | ||
![]() |
bf1e49fc4c | ||
![]() |
d1984c0dda | ||
![]() |
22bb28db62 | ||
![]() |
721b52a45b | ||
![]() |
edf4f98d41 | ||
![]() |
7321ea1603 | ||
![]() |
b80c2126a3 | ||
![]() |
70afed9122 | ||
![]() |
87479c32de | ||
![]() |
35f75563a7 | ||
![]() |
f5d6a9f428 | ||
![]() |
2d314efb98 | ||
![]() |
930bac3c8b | ||
![]() |
d457f66e8b | ||
![]() |
98ef1ba579 | ||
![]() |
dd8514a84d | ||
![]() |
7bcfeab85c | ||
![]() |
d1cd03302c | ||
![]() |
36669652bf | ||
![]() |
48234896a4 | ||
![]() |
d4c310433e | ||
![]() |
a9d82a64a8 | ||
![]() |
74b6b8cb62 | ||
![]() |
52e8a1aba3 | ||
![]() |
0f92523d28 | ||
![]() |
6934fc6510 | ||
![]() |
ad746be010 | ||
![]() |
97119f729a | ||
![]() |
e576f1b0c4 | ||
![]() |
1771293fcf | ||
![]() |
d872423a76 | ||
![]() |
6c12f65b2d | ||
![]() |
566f50ec66 | ||
![]() |
e702f9c317 | ||
![]() |
680f8086e7 | ||
![]() |
9216f000ad | ||
![]() |
cea6ef7a66 | ||
![]() |
3287daf4e4 | ||
![]() |
376b40b25f | ||
![]() |
b00ca90d15 | ||
![]() |
06ec83701c | ||
![]() |
2b61ec32d0 | ||
![]() |
08299dd85d | ||
![]() |
a2e7199ff5 | ||
![]() |
28d68c8a77 | ||
![]() |
999abb7016 | ||
![]() |
5af2658d88 | ||
![]() |
46fd55d100 | ||
![]() |
fd4d747927 | ||
![]() |
ec3ea965d0 | ||
![]() |
41e4438a05 | ||
![]() |
254e0ea132 | ||
![]() |
190e24b25d | ||
![]() |
3f22aa9638 | ||
![]() |
0e679841a4 | ||
![]() |
e512e658e6 | ||
![]() |
f7cea2f92e | ||
![]() |
de5689f5b2 | ||
![]() |
e6cf5a5984 | ||
![]() |
23e7ccb543 | ||
![]() |
b4d97d4a2b | ||
![]() |
8f90fe79c8 | ||
![]() |
eb0df5d5e9 | ||
![]() |
e75510309d | ||
![]() |
3425d01853 | ||
![]() |
606737f3b2 | ||
![]() |
bf3b5fbf8e | ||
![]() |
f2fb06e6f3 | ||
![]() |
e58ba44e3d | ||
![]() |
1e19ec6b9a | ||
![]() |
6ed637cfdd | ||
![]() |
1d17e24c6e | ||
![]() |
de155a753d | ||
![]() |
1b4020b3d7 | ||
![]() |
b948750d55 | ||
![]() |
ce41ac9158 | ||
![]() |
5869467db3 | ||
![]() |
80472af53c | ||
![]() |
eaa6039082 | ||
![]() |
03b84e7c43 | ||
![]() |
0ee8f5c498 | ||
![]() |
41d15e8731 | ||
![]() |
bef081d353 | ||
![]() |
a79045c064 | ||
![]() |
24ae8249f9 | ||
![]() |
81341df635 | ||
![]() |
3c2bbf244d | ||
![]() |
fa60251c18 | ||
![]() |
5bd06494d5 | ||
![]() |
2fd217ef1f | ||
![]() |
d7068ca42b | ||
![]() |
f7b19cddbf | ||
![]() |
62e756a11e | ||
![]() |
c992661a17 | ||
![]() |
dde3205425 | ||
![]() |
15a264de3d | ||
![]() |
66929a9088 | ||
![]() |
45c9518a6d | ||
![]() |
95d32dc0da | ||
![]() |
d5ab1119d3 | ||
![]() |
d9110f4ef7 | ||
![]() |
5468394ef2 | ||
![]() |
7ad21e0e45 | ||
![]() |
5012c0c97c | ||
![]() |
698208fcd5 | ||
![]() |
e0d5fd9290 | ||
![]() |
4bd1974911 | ||
![]() |
d246e4090a | ||
![]() |
ff172f5ea1 | ||
![]() |
09b1413748 | ||
![]() |
14b997fe2c | ||
![]() |
908b412a9a | ||
![]() |
cbd80615be | ||
![]() |
a9707f0ab0 | ||
![]() |
4637e33326 | ||
![]() |
4a5f21dd87 | ||
![]() |
0778c2808b | ||
![]() |
567a1bb770 | ||
![]() |
743ee886be | ||
![]() |
20c6abae63 | ||
![]() |
ae0c585918 | ||
![]() |
4cfc416cdc | ||
![]() |
9902c4745d | ||
![]() |
e373ca7bdc | ||
![]() |
6a34a35585 | ||
![]() |
ee935a2988 | ||
![]() |
0a977a9d0a | ||
![]() |
4d26a3d2c6 | ||
![]() |
276d11e4e8 | ||
![]() |
e89c0f15dd | ||
![]() |
2bdf0aae14 | ||
![]() |
2bc7f0b8e0 | ||
![]() |
bf8ae22f3f | ||
![]() |
a5c6dab7c3 | ||
![]() |
f3eedec402 | ||
![]() |
741152dd50 | ||
![]() |
89c639f850 | ||
![]() |
727fb38baf | ||
![]() |
e19dd2d527 | ||
![]() |
9aa41b3524 | ||
![]() |
3911740360 | ||
![]() |
f161722b34 | ||
![]() |
adb956467b | ||
![]() |
00e17f4d69 | ||
![]() |
dbe49b24df | ||
![]() |
7e75193f4a | ||
![]() |
96e8cfb765 | ||
![]() |
d47ca6109a | ||
![]() |
8ac7d56fc5 | ||
![]() |
bfaede26c4 | ||
![]() |
477fd360f8 | ||
![]() |
5b50870f21 | ||
![]() |
8161316c01 | ||
![]() |
3cb26722f1 | ||
![]() |
7912f4a22a | ||
![]() |
851b23fb09 | ||
![]() |
849a108520 | ||
![]() |
97ff2e126c | ||
![]() |
ef6c4e6789 | ||
![]() |
3e467c517d | ||
![]() |
70ac696f17 | ||
![]() |
39c80ded58 | ||
![]() |
98782ca69d | ||
![]() |
c332eea354 | ||
![]() |
41a3f039b5 | ||
![]() |
2d3cf43bc5 | ||
![]() |
7c90702ff7 | ||
![]() |
952cf11b9e | ||
![]() |
4383550d98 | ||
![]() |
fcba2cca77 | ||
![]() |
2042b85056 | ||
![]() |
283a2ab648 | ||
![]() |
5e7b93d153 | ||
![]() |
c4ac35164b | ||
![]() |
22a13981f3 | ||
![]() |
29251b6e38 | ||
![]() |
b382f1412a | ||
![]() |
320537a054 | ||
![]() |
2fe7f8be46 | ||
![]() |
f100198a8a | ||
![]() |
b470fc0140 | ||
![]() |
db02d5eff0 | ||
![]() |
9564a9c28d | ||
![]() |
55295922c8 | ||
![]() |
c5b701f99d | ||
![]() |
3c606efc46 | ||
![]() |
cbab1a51f1 | ||
![]() |
41bcfcaffe | ||
![]() |
7cb14374cf | ||
![]() |
19b9fd0578 | ||
![]() |
d668c475de | ||
![]() |
248f3f2181 | ||
![]() |
bcd10f63ea | ||
![]() |
db9733f0d5 | ||
![]() |
e6aa213aa1 | ||
![]() |
f0fa726e71 | ||
![]() |
ae46ef7add | ||
![]() |
c87ca25f22 | ||
![]() |
ef627d53e5 | ||
![]() |
d0fcf3607d | ||
![]() |
3dbb7e5781 | ||
![]() |
9597358cb0 | ||
![]() |
c5a21a3b0e | ||
![]() |
f56ccec77f | ||
![]() |
489340a338 | ||
![]() |
e9e3d75383 | ||
![]() |
74a1e5ed86 | ||
![]() |
3d961a3dbb | ||
![]() |
604d56d0b8 | ||
![]() |
a82e259c1d | ||
![]() |
34ef99a8ab | ||
![]() |
fa389d19d0 | ||
![]() |
ebe70d996c | ||
![]() |
e83e13fd57 | ||
![]() |
02362ae5e1 | ||
![]() |
e0f324f61c | ||
![]() |
c422a081bf | ||
![]() |
72efb24b73 | ||
![]() |
ee7653a8cd | ||
![]() |
020abaf7d6 | ||
![]() |
6e2f6350e6 | ||
![]() |
6b939f7567 | ||
![]() |
c958a7c593 | ||
![]() |
8709ea4df0 | ||
![]() |
16d3041d7a | ||
![]() |
64b2037eda | ||
![]() |
4133001c73 | ||
![]() |
d5f46eedab | ||
![]() |
7a2a3e048e | ||
![]() |
20891a370b | ||
![]() |
ca412e0184 | ||
![]() |
8a89f5ae27 | ||
![]() |
dbea2acc8f | ||
![]() |
65167625c4 | ||
![]() |
77b23d3acb | ||
![]() |
5d8aa27831 | ||
![]() |
b15eda9466 | ||
![]() |
5eb4b975ae | ||
![]() |
3ce1e01d96 | ||
![]() |
afcefe3d04 | ||
![]() |
4726fe8b6f | ||
![]() |
201a4a7ef9 | ||
![]() |
93a6391f96 | ||
![]() |
782db3f324 | ||
![]() |
7610a0459e | ||
![]() |
927616decb | ||
![]() |
8b2b7bbe6d | ||
![]() |
ca30dbc832 | ||
![]() |
3c3c847db5 | ||
![]() |
b7a2601724 | ||
![]() |
1189df1fe6 | ||
![]() |
a37177703c | ||
![]() |
8df1324afd | ||
![]() |
a6e2708605 | ||
![]() |
0df91c31f1 | ||
![]() |
1718cf6504 | ||
![]() |
bec8d00232 | ||
![]() |
1471dd72a6 | ||
![]() |
588a786d73 | ||
![]() |
7e1c1da424 | ||
![]() |
94ebe3b61c | ||
![]() |
75c5ccccec | ||
![]() |
eb4c8e1b1e | ||
![]() |
73b1b942a9 | ||
![]() |
8c5ef111d8 | ||
![]() |
b6266ad18f | ||
![]() |
2635c3a1a0 | ||
![]() |
13ece25de0 | ||
![]() |
c69ece1d0e | ||
![]() |
07ec6ff7ab | ||
![]() |
66e23bd356 | ||
![]() |
abc58000b4 | ||
![]() |
5e3ef94697 | ||
![]() |
2daee375d0 | ||
![]() |
857944aabe | ||
![]() |
72f58d54a3 | ||
![]() |
668b068bb5 | ||
![]() |
9893ae9880 | ||
![]() |
9cdf2f046f | ||
![]() |
f7f841ce6d | ||
![]() |
e8a52d48cf | ||
![]() |
21eb253c57 | ||
![]() |
754286cb9a | ||
![]() |
08f5d9a92f | ||
![]() |
a1a61000ab | ||
![]() |
dd91d4264a | ||
![]() |
9d87ac5244 | ||
![]() |
3559e27cdd | ||
![]() |
4b9c79fa07 | ||
![]() |
ee197bf89f | ||
![]() |
31dac60d04 | ||
![]() |
75a7dd38a1 | ||
![]() |
6c658a676e | ||
![]() |
38de2a7767 | ||
![]() |
d7cb7c78af | ||
![]() |
ee272da95f | ||
![]() |
8098ac6b15 | ||
![]() |
c08f0054da | ||
![]() |
29f8cda104 | ||
![]() |
0d1a8d6d2f | ||
![]() |
edfd3bbe91 | ||
![]() |
148f394679 | ||
![]() |
b91ec5a520 | ||
![]() |
f57873fb1b | ||
![]() |
32754defef | ||
![]() |
0d777343fb | ||
![]() |
3b0fa4f707 | ||
![]() |
8b3d01c49b | ||
![]() |
1d5eb983ea | ||
![]() |
add647afe6 | ||
![]() |
3e777f2a5b | ||
![]() |
7bfb11a711 | ||
![]() |
808cf93a19 | ||
![]() |
37ddc3b8f7 | ||
![]() |
bc91b830ed | ||
![]() |
ced248ad49 | ||
![]() |
d73fbb1643 | ||
![]() |
40db244d4a | ||
![]() |
8181535f40 | ||
![]() |
2d9fa58157 | ||
![]() |
7af0b47ba9 | ||
![]() |
b9f0418038 | ||
![]() |
74b729bf5a | ||
![]() |
66333caebc | ||
![]() |
405c0922f8 | ||
![]() |
bdf9b2453f | ||
![]() |
8ef5f0e93c | ||
![]() |
597bb98cb9 | ||
![]() |
cc971a4569 | ||
![]() |
8955d25a00 | ||
![]() |
bdcba570cb | ||
![]() |
0e83c94832 | ||
![]() |
d2a6f79612 | ||
![]() |
8154c7b53a | ||
![]() |
ac611acaa1 | ||
![]() |
faecd59432 | ||
![]() |
0f536a9b9a | ||
![]() |
a203b006e7 | ||
![]() |
5978650ec6 | ||
![]() |
8fea4c00ad | ||
![]() |
9b3ec22beb | ||
![]() |
54e1c17539 | ||
![]() |
06e2500443 | ||
![]() |
f43c3e0dd6 | ||
![]() |
73b2c366df | ||
![]() |
1547a698cb | ||
![]() |
87b1f5adec | ||
![]() |
8a281452b8 | ||
![]() |
dd38b84116 | ||
![]() |
658d372cd2 | ||
![]() |
61f7e73961 | ||
![]() |
31bcb613ad | ||
![]() |
b85d39b189 | ||
![]() |
dc7bef5d48 | ||
![]() |
dae4550bc3 | ||
![]() |
dace08d4e3 | ||
![]() |
c42a9b8b8f | ||
![]() |
94db39e055 | ||
![]() |
9ee324915f | ||
![]() |
bb211ee779 | ||
![]() |
00c3d8c523 | ||
![]() |
4f0d2cd612 | ||
![]() |
f4304a120b | ||
![]() |
b8c3e564c6 | ||
![]() |
0a47fba9ae | ||
![]() |
9aea8a7d7c | ||
![]() |
7b9c0d65b9 | ||
![]() |
7dd9a4e089 | ||
![]() |
d15773f282 | ||
![]() |
9784ea4a60 | ||
![]() |
4fce5aba63 | ||
![]() |
2ab77fbaf7 | ||
![]() |
94ad290e14 | ||
![]() |
118e3703dc | ||
![]() |
d2b290f789 | ||
![]() |
583f05af2d | ||
![]() |
1b2cb13a21 | ||
![]() |
4dc0c7bbe2 | ||
![]() |
44212d492d | ||
![]() |
3ccb83e49c | ||
![]() |
215691ac1a | ||
![]() |
a884647a7c | ||
![]() |
590d129cd3 | ||
![]() |
8fcb7efbd2 | ||
![]() |
f1204d2749 | ||
![]() |
b07b8d65a6 | ||
![]() |
dadd7472fd | ||
![]() |
2801b60b0e | ||
![]() |
e625ac21c3 | ||
![]() |
7ace9eb325 | ||
![]() |
02465672f9 | ||
![]() |
9b1b620a9c | ||
![]() |
6ea6c79575 | ||
![]() |
c430b9f8cf | ||
![]() |
92fb390f7b | ||
![]() |
8164840cba | ||
![]() |
8e8b2d7e8a | ||
![]() |
459de80124 | ||
![]() |
b38aacd1ce | ||
![]() |
a4535c11e4 | ||
![]() |
f78e93a364 | ||
![]() |
75d2a3a45f | ||
![]() |
6d3feaebfd | ||
![]() |
781929e9a8 | ||
![]() |
1871ef1a72 | ||
![]() |
5e9a7b94ba | ||
![]() |
51a5746611 | ||
![]() |
16fc7ebecc | ||
![]() |
10a5d50ce9 | ||
![]() |
454264a87f | ||
![]() |
7ecb76dddc | ||
![]() |
5fc7c15039 | ||
![]() |
44f860d9b0 | ||
![]() |
64eabbe8d0 | ||
![]() |
197938eaab | ||
![]() |
02a40055f5 | ||
![]() |
72bacc016a | ||
![]() |
aeecc10e45 | ||
![]() |
2b3edbaa46 | ||
![]() |
270f8677a7 | ||
![]() |
447edd1355 | ||
![]() |
024921212a | ||
![]() |
5d08a34365 | ||
![]() |
20763e7c26 | ||
![]() |
b33ba4c902 | ||
![]() |
fae5e834b9 | ||
![]() |
4cb4bd13ad | ||
![]() |
896304ccaa | ||
![]() |
9ae186e6f9 | ||
![]() |
c7690c05f5 | ||
![]() |
7273a8c7a5 | ||
![]() |
4195d5746f | ||
![]() |
8b90b51b1a | ||
![]() |
e74af5c73c | ||
![]() |
99c2442b28 | ||
![]() |
3c2df48a1a | ||
![]() |
a0c1c48dca | ||
![]() |
4e05aba0a5 | ||
![]() |
299a69a2de | ||
![]() |
7bc077ac08 | ||
![]() |
64752f6b57 | ||
![]() |
c2880bcf9a | ||
![]() |
159dcdbda5 | ||
![]() |
1838fa971e | ||
![]() |
d8d111f093 | ||
![]() |
31a03b1d30 | ||
![]() |
5004771d79 | ||
![]() |
92b9fc1ba9 | ||
![]() |
585cc24dd5 | ||
![]() |
f261c70f1e | ||
![]() |
8c9dfa449c | ||
![]() |
d94ca2962e | ||
![]() |
3c7eacf923 | ||
![]() |
643486b14b | ||
![]() |
87045da1e2 | ||
![]() |
a109723ada | ||
![]() |
151573a26e | ||
![]() |
284e0d3f60 | ||
![]() |
7048af276a | ||
![]() |
e6cd3c1970 | ||
![]() |
623ac441d5 | ||
![]() |
003201bc1b | ||
![]() |
1bf6d9165f | ||
![]() |
4b49bd9de8 | ||
![]() |
69f82d503a | ||
![]() |
0cfa5211e9 | ||
![]() |
6c7ff54aad | ||
![]() |
0b53a8981c | ||
![]() |
c4dbd58efd | ||
![]() |
959f80604a | ||
![]() |
dee691b72b | ||
![]() |
a4829ce26a | ||
![]() |
7ed4dedd5e | ||
![]() |
93d272f50b | ||
![]() |
6fe5674ac3 | ||
![]() |
6024a862d6 | ||
![]() |
195f3a5dbf | ||
![]() |
94f0808a2f | ||
![]() |
e3f062b981 | ||
![]() |
22142203ce | ||
![]() |
412d9f5cd2 | ||
![]() |
133532a463 | ||
![]() |
c9683808c9 | ||
![]() |
b25f083687 | ||
![]() |
62ba4b9730 | ||
![]() |
150c7f26a5 | ||
![]() |
4b4111ec03 | ||
![]() |
9e33344808 | ||
![]() |
bba1fc7194 | ||
![]() |
efaa1c4dd7 | ||
![]() |
a88b318d7d | ||
![]() |
2460c3e076 | ||
![]() |
9763b72f81 | ||
![]() |
19ab62c06c | ||
![]() |
eb8f37d846 | ||
![]() |
5c9e2d7070 | ||
![]() |
da9f2b1a8c | ||
![]() |
985f298c46 | ||
![]() |
2bb63b2d02 | ||
![]() |
ac75c61c8c | ||
![]() |
f8f0915a32 | ||
![]() |
7b87511e88 | ||
![]() |
bb05c2218f | ||
![]() |
e96e8472d9 | ||
![]() |
3191c15889 | ||
![]() |
d4af7aa411 | ||
![]() |
7b3719101a | ||
![]() |
4def3bf5c2 | ||
![]() |
3daee46c3d | ||
![]() |
fbebd8d7c0 | ||
![]() |
af5cb35531 | ||
![]() |
61a2dca81f | ||
![]() |
4aa8e9b800 | ||
![]() |
b81fe1695d | ||
![]() |
3625e5080c | ||
![]() |
d689a707a4 | ||
![]() |
55e1745889 | ||
![]() |
c21775980f | ||
![]() |
33e597f5bb | ||
![]() |
6fa2ca648a | ||
![]() |
adecf5d927 | ||
![]() |
e69d7d804b | ||
![]() |
43ec058593 | ||
![]() |
a4d96061de | ||
![]() |
a0eecb83cf | ||
![]() |
9955315a10 | ||
![]() |
ee7097b497 | ||
![]() |
387c23d27a | ||
![]() |
359593728e | ||
![]() |
9708832ccd | ||
![]() |
aa2ae8fe4c | ||
![]() |
729845662f | ||
![]() |
6ff28c92a4 | ||
![]() |
d19bf59f47 | ||
![]() |
a340b9c8a1 | ||
![]() |
d7939ca958 | ||
![]() |
00d67d53bf | ||
![]() |
b869ad02a1 | ||
![]() |
91d4941438 | ||
![]() |
5746e8b56d | ||
![]() |
8e83f90952 | ||
![]() |
80910c72cf | ||
![]() |
ca4ece3ccd | ||
![]() |
ac6c0484ed | ||
![]() |
1e4923835b | ||
![]() |
7be9ae9c02 | ||
![]() |
da38efebdf | ||
![]() |
0fd51e35e1 | ||
![]() |
59e0c1fe4e | ||
![]() |
cfe9528884 | ||
![]() |
f1eecd146d | ||
![]() |
1b45637e9c | ||
![]() |
76acf2b01d | ||
![]() |
eda2bd2dbd | ||
![]() |
6819decec3 | ||
![]() |
c2220aa1ef | ||
![]() |
0d87e529f3 | ||
![]() |
24ce1830eb | ||
![]() |
dfed4176ed | ||
![]() |
be8615741e | ||
![]() |
fd1f6aa960 | ||
![]() |
067a6107f5 | ||
![]() |
62782be08e | ||
![]() |
428fe4a372 | ||
![]() |
91e3302e54 | ||
![]() |
906d5d0bab | ||
![]() |
06c62abfbd | ||
![]() |
31e4a0a88b | ||
![]() |
1663450c1f | ||
![]() |
d840308392 | ||
![]() |
a08467342c | ||
![]() |
cf82cb35c9 | ||
![]() |
53fff1d54a | ||
![]() |
60cf260b71 | ||
![]() |
b9d1499d04 | ||
![]() |
3fe68d7bbe | ||
![]() |
2eeb02638b | ||
![]() |
cb4beb5e71 | ||
![]() |
729f25c435 | ||
![]() |
d8e02c6fa0 | ||
![]() |
26c7fad005 | ||
![]() |
28b26eb4c7 | ||
![]() |
4032315851 | ||
![]() |
3c8d7f2dee | ||
![]() |
c76460bd96 | ||
![]() |
25f0a79d06 | ||
![]() |
20e586fa60 | ||
![]() |
384a118672 | ||
![]() |
7d3110f392 | ||
![]() |
b12ab5fe04 | ||
![]() |
50c0c65c60 | ||
![]() |
d71d388c08 | ||
![]() |
06c6f33d97 | ||
![]() |
e2093436ac | ||
![]() |
b7e2013589 | ||
![]() |
f31cee75f3 | ||
![]() |
ec27f3c053 | ||
![]() |
737f00df3a | ||
![]() |
e6804dad2f | ||
![]() |
1875e9852e | ||
![]() |
f021e7fcc3 | ||
![]() |
4cf9ed9d26 | ||
![]() |
f8b77d7ef7 | ||
![]() |
31850c3351 | ||
![]() |
446842ecfc | ||
![]() |
8159b7574c | ||
![]() |
7050f29cff | ||
![]() |
2f32565476 | ||
![]() |
ceeb2da3fe | ||
![]() |
6dc5c1de32 | ||
![]() |
a5ab6f2558 | ||
![]() |
f846c2934c | ||
![]() |
1adb5e724d | ||
![]() |
8fad13b500 | ||
![]() |
6b2ee0e301 | ||
![]() |
7a241950d4 | ||
![]() |
c1a1f6d74e | ||
![]() |
b99422da12 | ||
![]() |
2ec695fba7 | ||
![]() |
109c07b23b | ||
![]() |
bf34c955ff | ||
![]() |
6ece5240a5 | ||
![]() |
211fbf0cf6 | ||
![]() |
f2d635671d | ||
![]() |
8b204cac99 | ||
![]() |
d15c701510 | ||
![]() |
c73688d167 | ||
![]() |
32da039d5f | ||
![]() |
79da613cb6 | ||
![]() |
2973e4672a | ||
![]() |
18e0012a59 | ||
![]() |
2554ced198 | ||
![]() |
fad13c148e | ||
![]() |
dbaa606a9f | ||
![]() |
c0bccc6a95 | ||
![]() |
b1a4eec7be | ||
![]() |
bb8a0d26e2 | ||
![]() |
629a5dd61e | ||
![]() |
b21970fd53 | ||
![]() |
4279ba13e9 | ||
![]() |
28d70438ec | ||
![]() |
ca6454f9fd | ||
![]() |
0ffc9955b2 | ||
![]() |
cf53d0866a | ||
![]() |
3f19c0ed03 | ||
![]() |
355efadf87 | ||
![]() |
927a9781ad | ||
![]() |
70eb22df42 | ||
![]() |
f461485aa0 | ||
![]() |
b6f1ced455 | ||
![]() |
10f36870e6 | ||
![]() |
fdaf9e9b46 | ||
![]() |
57b709824f | ||
![]() |
c7b46ac861 | ||
![]() |
bf28a512c6 | ||
![]() |
4333bd58cf | ||
![]() |
96a29883cd | ||
![]() |
59e359ff98 | ||
![]() |
4603813896 |
@@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
"qpdf": {
|
|
||||||
"version": "11.2.0"
|
|
||||||
},
|
|
||||||
"jbig2enc": {
|
|
||||||
"version": "0.29",
|
|
||||||
"git_tag": "0.29"
|
|
||||||
}
|
|
||||||
}
|
|
33
.codecov.yml
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
codecov:
|
||||||
|
require_ci_to_pass: true
|
||||||
|
# https://docs.codecov.com/docs/flags#recommended-automatic-flag-management
|
||||||
|
# Require each flag to have 1 upload before notification
|
||||||
|
flag_management:
|
||||||
|
default_rules:
|
||||||
|
after_n_builds: 1
|
||||||
|
individual_flags:
|
||||||
|
- name: backend
|
||||||
|
paths:
|
||||||
|
- src/
|
||||||
|
- name: frontend
|
||||||
|
paths:
|
||||||
|
- src-ui/
|
||||||
|
# https://docs.codecov.com/docs/pull-request-comments
|
||||||
|
# codecov will only comment if coverage changes
|
||||||
|
comment:
|
||||||
|
require_changes: true
|
||||||
|
coverage:
|
||||||
|
status:
|
||||||
|
project:
|
||||||
|
default:
|
||||||
|
# https://docs.codecov.com/docs/commit-status#threshold
|
||||||
|
threshold: 1%
|
||||||
|
# https://docs.codecov.com/docs/commit-status#only_pulls
|
||||||
|
only_pulls: true
|
||||||
|
patch:
|
||||||
|
default:
|
||||||
|
# For the changed lines only, target 75% covered, but
|
||||||
|
# allow as low as 50%
|
||||||
|
target: 75%
|
||||||
|
threshold: 25%
|
||||||
|
only_pulls: true
|
14
.github/DISCUSSION_TEMPLATE/feature-requests.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
title: "[Feature Request] "
|
||||||
|
body:
|
||||||
|
- type: textarea
|
||||||
|
id: description
|
||||||
|
attributes:
|
||||||
|
label: Description
|
||||||
|
description: A clear and concise description of what you would like to see.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: other
|
||||||
|
attributes:
|
||||||
|
label: Other
|
||||||
|
description: Add any other context or information about the feature request here.
|
7
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -20,11 +20,16 @@ NOTE: Please check only one box!
|
|||||||
- [ ] Bug fix (non-breaking change which fixes an issue)
|
- [ ] Bug fix (non-breaking change which fixes an issue)
|
||||||
- [ ] New feature (non-breaking change which adds functionality)
|
- [ ] New feature (non-breaking change which adds functionality)
|
||||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||||
- [ ] Other (please explain)
|
- [ ] Other (please explain):
|
||||||
|
|
||||||
## Checklist:
|
## Checklist:
|
||||||
|
|
||||||
|
<!--
|
||||||
|
NOTE: PRs that do not address the following will not be merged, please do not skip any relevant items.
|
||||||
|
-->
|
||||||
|
|
||||||
- [ ] I have read & agree with the [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/main/CONTRIBUTING.md).
|
- [ ] I have read & agree with the [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/main/CONTRIBUTING.md).
|
||||||
|
- [ ] If applicable, I have included testing coverage for new code in this PR, for [backend](https://docs.paperless-ngx.com/development/#testing) and / or [front-end](https://docs.paperless-ngx.com/development/#testing-and-code-style) changes.
|
||||||
- [ ] If applicable, I have tested my code for new features & regressions on both mobile & desktop devices, using the latest version of major browsers.
|
- [ ] If applicable, I have tested my code for new features & regressions on both mobile & desktop devices, using the latest version of major browsers.
|
||||||
- [ ] If applicable, I have checked that all tests pass, see [documentation](https://docs.paperless-ngx.com/development/#back-end-development).
|
- [ ] If applicable, I have checked that all tests pass, see [documentation](https://docs.paperless-ngx.com/development/#back-end-development).
|
||||||
- [ ] I have run all `pre-commit` hooks, see [documentation](https://docs.paperless-ngx.com/development/#code-formatting-with-pre-commit-hooks).
|
- [ ] I have run all `pre-commit` hooks, see [documentation](https://docs.paperless-ngx.com/development/#code-formatting-with-pre-commit-hooks).
|
||||||
|
15
.github/dependabot.yml
vendored
@@ -17,6 +17,21 @@ updates:
|
|||||||
# Add reviewers
|
# Add reviewers
|
||||||
reviewers:
|
reviewers:
|
||||||
- "paperless-ngx/frontend"
|
- "paperless-ngx/frontend"
|
||||||
|
groups:
|
||||||
|
frontend-angular-dependencies:
|
||||||
|
patterns:
|
||||||
|
- "@angular*"
|
||||||
|
- "@ng-*"
|
||||||
|
- "ngx-*"
|
||||||
|
- "ng2-pdf-viewer"
|
||||||
|
frontend-jest-dependencies:
|
||||||
|
patterns:
|
||||||
|
- "@types/jest"
|
||||||
|
- "jest*"
|
||||||
|
frontend-eslint-dependencies:
|
||||||
|
patterns:
|
||||||
|
- "@typescript-eslint*"
|
||||||
|
- "eslint"
|
||||||
|
|
||||||
# Enable version updates for Python
|
# Enable version updates for Python
|
||||||
- package-ecosystem: "pip"
|
- package-ecosystem: "pip"
|
||||||
|
4
.github/release-drafter.yml
vendored
@@ -40,7 +40,7 @@ categories:
|
|||||||
labels:
|
labels:
|
||||||
- 'frontend'
|
- 'frontend'
|
||||||
- 'backend'
|
- 'backend'
|
||||||
collapse-after: 0
|
collapse-after: 1
|
||||||
include-labels:
|
include-labels:
|
||||||
- 'enhancement'
|
- 'enhancement'
|
||||||
- 'bug'
|
- 'bug'
|
||||||
@@ -54,6 +54,8 @@ include-labels:
|
|||||||
- 'ci-cd'
|
- 'ci-cd'
|
||||||
- 'breaking-change'
|
- 'breaking-change'
|
||||||
- 'notable'
|
- 'notable'
|
||||||
|
exclude-labels:
|
||||||
|
- 'skip-changelog'
|
||||||
category-template: '### $TITLE'
|
category-template: '### $TITLE'
|
||||||
change-template: '- $TITLE @$AUTHOR ([#$NUMBER]($URL))'
|
change-template: '- $TITLE @$AUTHOR ([#$NUMBER]($URL))'
|
||||||
change-title-escapes: '\<*_&#@'
|
change-title-escapes: '\<*_&#@'
|
||||||
|
402
.github/scripts/cleanup-tags.py
vendored
@@ -1,402 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
from argparse import ArgumentParser
|
|
||||||
from typing import Dict
|
|
||||||
from typing import Final
|
|
||||||
from typing import List
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from common import get_log_level
|
|
||||||
from github import ContainerPackage
|
|
||||||
from github import GithubBranchApi
|
|
||||||
from github import GithubContainerRegistryApi
|
|
||||||
|
|
||||||
logger = logging.getLogger("cleanup-tags")
|
|
||||||
|
|
||||||
|
|
||||||
class DockerManifest2:
|
|
||||||
"""
|
|
||||||
Data class wrapping the Docker Image Manifest Version 2.
|
|
||||||
|
|
||||||
See https://docs.docker.com/registry/spec/manifest-v2-2/
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, data: Dict) -> None:
|
|
||||||
self._data = data
|
|
||||||
# This is the sha256: digest string. Corresponds to GitHub API name
|
|
||||||
# if the package is an untagged package
|
|
||||||
self.digest = self._data["digest"]
|
|
||||||
platform_data_os = self._data["platform"]["os"]
|
|
||||||
platform_arch = self._data["platform"]["architecture"]
|
|
||||||
platform_variant = self._data["platform"].get(
|
|
||||||
"variant",
|
|
||||||
"",
|
|
||||||
)
|
|
||||||
self.platform = f"{platform_data_os}/{platform_arch}{platform_variant}"
|
|
||||||
|
|
||||||
|
|
||||||
class RegistryTagsCleaner:
|
|
||||||
"""
|
|
||||||
This is the base class for the image registry cleaning. Given a package
|
|
||||||
name, it will keep all images which are tagged and all untagged images
|
|
||||||
referred to by a manifest. This results in only images which have been untagged
|
|
||||||
and cannot be referenced except by their SHA in being removed. None of these
|
|
||||||
images should be referenced, so it is fine to delete them.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
package_name: str,
|
|
||||||
repo_owner: str,
|
|
||||||
repo_name: str,
|
|
||||||
package_api: GithubContainerRegistryApi,
|
|
||||||
branch_api: Optional[GithubBranchApi],
|
|
||||||
):
|
|
||||||
self.actually_delete = False
|
|
||||||
self.package_api = package_api
|
|
||||||
self.branch_api = branch_api
|
|
||||||
self.package_name = package_name
|
|
||||||
self.repo_owner = repo_owner
|
|
||||||
self.repo_name = repo_name
|
|
||||||
self.tags_to_delete: List[str] = []
|
|
||||||
self.tags_to_keep: List[str] = []
|
|
||||||
|
|
||||||
# Get the information about all versions of the given package
|
|
||||||
# These are active, not deleted, the default returned from the API
|
|
||||||
self.all_package_versions = self.package_api.get_active_package_versions(
|
|
||||||
self.package_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get a mapping from a tag like "1.7.0" or "feature-xyz" to the ContainerPackage
|
|
||||||
# tagged with it. It makes certain lookups easy
|
|
||||||
self.all_pkgs_tags_to_version: Dict[str, ContainerPackage] = {}
|
|
||||||
for pkg in self.all_package_versions:
|
|
||||||
for tag in pkg.tags:
|
|
||||||
self.all_pkgs_tags_to_version[tag] = pkg
|
|
||||||
logger.info(
|
|
||||||
f"Located {len(self.all_package_versions)} versions of package {self.package_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
self.decide_what_tags_to_keep()
|
|
||||||
|
|
||||||
def clean(self):
|
|
||||||
"""
|
|
||||||
This method will delete image versions, based on the selected tags to delete
|
|
||||||
"""
|
|
||||||
for tag_to_delete in self.tags_to_delete:
|
|
||||||
package_version_info = self.all_pkgs_tags_to_version[tag_to_delete]
|
|
||||||
|
|
||||||
if self.actually_delete:
|
|
||||||
logger.info(
|
|
||||||
f"Deleting {tag_to_delete} (id {package_version_info.id})",
|
|
||||||
)
|
|
||||||
self.package_api.delete_package_version(
|
|
||||||
package_version_info,
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
f"Would delete {tag_to_delete} (id {package_version_info.id})",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.info("No tags to delete")
|
|
||||||
|
|
||||||
def clean_untagged(self, is_manifest_image: bool):
|
|
||||||
"""
|
|
||||||
This method will delete untagged images, that is those which are not named. It
|
|
||||||
handles if the image tag is actually a manifest, which points to images that look otherwise
|
|
||||||
untagged.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _clean_untagged_manifest():
|
|
||||||
"""
|
|
||||||
|
|
||||||
Handles the deletion of untagged images, but where the package is a manifest, ie a multi
|
|
||||||
arch image, which means some "untagged" images need to exist still.
|
|
||||||
|
|
||||||
Ok, bear with me, these are annoying.
|
|
||||||
|
|
||||||
Our images are multi-arch, so the manifest is more like a pointer to a sha256 digest.
|
|
||||||
These images are untagged, but pointed to, and so should not be removed (or every pull fails).
|
|
||||||
|
|
||||||
So for each image getting kept, parse the manifest to find the digest(s) it points to. Then
|
|
||||||
remove those from the list of untagged images. The final result is the untagged, not pointed to
|
|
||||||
version which should be safe to remove.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
Tag: ghcr.io/paperless-ngx/paperless-ngx:1.7.1 refers to
|
|
||||||
amd64: sha256:b9ed4f8753bbf5146547671052d7e91f68cdfc9ef049d06690b2bc866fec2690
|
|
||||||
armv7: sha256:81605222df4ba4605a2ba4893276e5d08c511231ead1d5da061410e1bbec05c3
|
|
||||||
arm64: sha256:374cd68db40734b844705bfc38faae84cc4182371de4bebd533a9a365d5e8f3b
|
|
||||||
each of which appears as untagged image, but isn't really.
|
|
||||||
|
|
||||||
So from the list of untagged packages, remove those digests. Once all tags which
|
|
||||||
are being kept are checked, the remaining untagged packages are actually untagged
|
|
||||||
with no referrals in a manifest to them.
|
|
||||||
"""
|
|
||||||
# Simplify the untagged data, mapping name (which is a digest) to the version
|
|
||||||
# At the moment, these are the images which APPEAR untagged.
|
|
||||||
untagged_versions = {}
|
|
||||||
for x in self.all_package_versions:
|
|
||||||
if x.untagged:
|
|
||||||
untagged_versions[x.name] = x
|
|
||||||
|
|
||||||
skips = 0
|
|
||||||
|
|
||||||
# Parse manifests to locate digests pointed to
|
|
||||||
for tag in sorted(self.tags_to_keep):
|
|
||||||
full_name = f"ghcr.io/{self.repo_owner}/{self.package_name}:{tag}"
|
|
||||||
logger.info(f"Checking manifest for {full_name}")
|
|
||||||
try:
|
|
||||||
proc = subprocess.run(
|
|
||||||
[
|
|
||||||
shutil.which("docker"),
|
|
||||||
"manifest",
|
|
||||||
"inspect",
|
|
||||||
full_name,
|
|
||||||
],
|
|
||||||
capture_output=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
manifest_list = json.loads(proc.stdout)
|
|
||||||
for manifest_data in manifest_list["manifests"]:
|
|
||||||
manifest = DockerManifest2(manifest_data)
|
|
||||||
|
|
||||||
if manifest.digest in untagged_versions:
|
|
||||||
logger.info(
|
|
||||||
f"Skipping deletion of {manifest.digest},"
|
|
||||||
f" referred to by {full_name}"
|
|
||||||
f" for {manifest.platform}",
|
|
||||||
)
|
|
||||||
del untagged_versions[manifest.digest]
|
|
||||||
skips += 1
|
|
||||||
|
|
||||||
except Exception as err:
|
|
||||||
self.actually_delete = False
|
|
||||||
logger.exception(err)
|
|
||||||
return
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"Skipping deletion of {skips} packages referred to by a manifest",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Delete the untagged and not pointed at packages
|
|
||||||
logger.info(f"Deleting untagged packages of {self.package_name}")
|
|
||||||
for to_delete_name in untagged_versions:
|
|
||||||
to_delete_version = untagged_versions[to_delete_name]
|
|
||||||
|
|
||||||
if self.actually_delete:
|
|
||||||
logger.info(
|
|
||||||
f"Deleting id {to_delete_version.id} named {to_delete_version.name}",
|
|
||||||
)
|
|
||||||
self.package_api.delete_package_version(
|
|
||||||
to_delete_version,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
f"Would delete {to_delete_name} (id {to_delete_version.id})",
|
|
||||||
)
|
|
||||||
|
|
||||||
def _clean_untagged_non_manifest():
|
|
||||||
"""
|
|
||||||
If the package is not a multi-arch manifest, images without tags are safe to delete.
|
|
||||||
"""
|
|
||||||
|
|
||||||
for package in self.all_package_versions:
|
|
||||||
if package.untagged:
|
|
||||||
if self.actually_delete:
|
|
||||||
logger.info(
|
|
||||||
f"Deleting id {package.id} named {package.name}",
|
|
||||||
)
|
|
||||||
self.package_api.delete_package_version(
|
|
||||||
package,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
f"Would delete {package.name} (id {package.id})",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
f"Not deleting tag {package.tags[0]} of package {self.package_name}",
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info("Beginning untagged image cleaning")
|
|
||||||
|
|
||||||
if is_manifest_image:
|
|
||||||
_clean_untagged_manifest()
|
|
||||||
else:
|
|
||||||
_clean_untagged_non_manifest()
|
|
||||||
|
|
||||||
def decide_what_tags_to_keep(self):
|
|
||||||
"""
|
|
||||||
This method holds the logic to delete what tags to keep and there fore
|
|
||||||
what tags to delete.
|
|
||||||
|
|
||||||
By default, any image with at least 1 tag will be kept
|
|
||||||
"""
|
|
||||||
# By default, keep anything which is tagged
|
|
||||||
self.tags_to_keep = list(set(self.all_pkgs_tags_to_version.keys()))
|
|
||||||
|
|
||||||
|
|
||||||
class MainImageTagsCleaner(RegistryTagsCleaner):
|
|
||||||
def decide_what_tags_to_keep(self):
|
|
||||||
"""
|
|
||||||
Overrides the default logic for deciding what images to keep. Images tagged as "feature-"
|
|
||||||
will be removed, if the corresponding branch no longer exists.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Default to everything gets kept still
|
|
||||||
super().decide_what_tags_to_keep()
|
|
||||||
|
|
||||||
# Locate the feature branches
|
|
||||||
feature_branches = {}
|
|
||||||
for branch in self.branch_api.get_branches(
|
|
||||||
repo=self.repo_name,
|
|
||||||
):
|
|
||||||
if branch.name.startswith("feature-"):
|
|
||||||
logger.debug(f"Found feature branch {branch.name}")
|
|
||||||
feature_branches[branch.name] = branch
|
|
||||||
|
|
||||||
logger.info(f"Located {len(feature_branches)} feature branches")
|
|
||||||
|
|
||||||
if not len(feature_branches):
|
|
||||||
# Our work here is done, delete nothing
|
|
||||||
return
|
|
||||||
|
|
||||||
# Filter to packages which are tagged with feature-*
|
|
||||||
packages_tagged_feature: List[ContainerPackage] = []
|
|
||||||
for package in self.all_package_versions:
|
|
||||||
if package.tag_matches("feature-"):
|
|
||||||
packages_tagged_feature.append(package)
|
|
||||||
|
|
||||||
# Map tags like "feature-xyz" to a ContainerPackage
|
|
||||||
feature_pkgs_tags_to_versions: Dict[str, ContainerPackage] = {}
|
|
||||||
for pkg in packages_tagged_feature:
|
|
||||||
for tag in pkg.tags:
|
|
||||||
feature_pkgs_tags_to_versions[tag] = pkg
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f'Located {len(feature_pkgs_tags_to_versions)} versions of package {self.package_name} tagged "feature-"',
|
|
||||||
)
|
|
||||||
|
|
||||||
# All the feature tags minus all the feature branches leaves us feature tags
|
|
||||||
# with no corresponding branch
|
|
||||||
self.tags_to_delete = list(
|
|
||||||
set(feature_pkgs_tags_to_versions.keys()) - set(feature_branches.keys()),
|
|
||||||
)
|
|
||||||
|
|
||||||
# All the tags minus the set of going to be deleted tags leaves us the
|
|
||||||
# tags which will be kept around
|
|
||||||
self.tags_to_keep = list(
|
|
||||||
set(self.all_pkgs_tags_to_version.keys()) - set(self.tags_to_delete),
|
|
||||||
)
|
|
||||||
logger.info(
|
|
||||||
f"Located {len(self.tags_to_delete)} versions of package {self.package_name} to delete",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class LibraryTagsCleaner(RegistryTagsCleaner):
|
|
||||||
"""
|
|
||||||
Exists for the off change that someday, the installer library images
|
|
||||||
will need their own logic
|
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def _main():
|
|
||||||
parser = ArgumentParser(
|
|
||||||
description="Using the GitHub API locate and optionally delete container"
|
|
||||||
" tags which no longer have an associated feature branch",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Requires an affirmative command to actually do a delete
|
|
||||||
parser.add_argument(
|
|
||||||
"--delete",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="If provided, actually delete the container tags",
|
|
||||||
)
|
|
||||||
|
|
||||||
# When a tagged image is updated, the previous version remains, but it no longer tagged
|
|
||||||
# Add this option to remove them as well
|
|
||||||
parser.add_argument(
|
|
||||||
"--untagged",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="If provided, delete untagged containers as well",
|
|
||||||
)
|
|
||||||
|
|
||||||
# If given, the package is assumed to be a multi-arch manifest. Cache packages are
|
|
||||||
# not multi-arch, all other types are
|
|
||||||
parser.add_argument(
|
|
||||||
"--is-manifest",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="If provided, the package is assumed to be a multi-arch manifest following schema v2",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Allows configuration of log level for debugging
|
|
||||||
parser.add_argument(
|
|
||||||
"--loglevel",
|
|
||||||
default="info",
|
|
||||||
help="Configures the logging level",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get the name of the package being processed this round
|
|
||||||
parser.add_argument(
|
|
||||||
"package",
|
|
||||||
help="The package to process",
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
logging.basicConfig(
|
|
||||||
level=get_log_level(args),
|
|
||||||
datefmt="%Y-%m-%d %H:%M:%S",
|
|
||||||
format="%(asctime)s %(levelname)-8s %(message)s",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Must be provided in the environment
|
|
||||||
repo_owner: Final[str] = os.environ["GITHUB_REPOSITORY_OWNER"]
|
|
||||||
repo: Final[str] = os.environ["GITHUB_REPOSITORY"]
|
|
||||||
gh_token: Final[str] = os.environ["TOKEN"]
|
|
||||||
|
|
||||||
# Find all branches named feature-*
|
|
||||||
# Note: Only relevant to the main application, but simpler to
|
|
||||||
# leave in for all packages
|
|
||||||
with GithubBranchApi(gh_token) as branch_api:
|
|
||||||
with GithubContainerRegistryApi(gh_token, repo_owner) as container_api:
|
|
||||||
if args.package in {"paperless-ngx", "paperless-ngx/builder/cache/app"}:
|
|
||||||
cleaner = MainImageTagsCleaner(
|
|
||||||
args.package,
|
|
||||||
repo_owner,
|
|
||||||
repo,
|
|
||||||
container_api,
|
|
||||||
branch_api,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
cleaner = LibraryTagsCleaner(
|
|
||||||
args.package,
|
|
||||||
repo_owner,
|
|
||||||
repo,
|
|
||||||
container_api,
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Set if actually doing a delete vs dry run
|
|
||||||
cleaner.actually_delete = args.delete
|
|
||||||
|
|
||||||
# Clean images with tags
|
|
||||||
cleaner.clean()
|
|
||||||
|
|
||||||
# Clean images which are untagged
|
|
||||||
cleaner.clean_untagged(args.is_manifest)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
_main()
|
|
48
.github/scripts/common.py
vendored
@@ -1,48 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
import logging
|
|
||||||
|
|
||||||
|
|
||||||
def get_image_tag(
|
|
||||||
repo_name: str,
|
|
||||||
pkg_name: str,
|
|
||||||
pkg_version: str,
|
|
||||||
) -> str:
|
|
||||||
"""
|
|
||||||
Returns a string representing the normal image for a given package
|
|
||||||
"""
|
|
||||||
return f"ghcr.io/{repo_name.lower()}/builder/{pkg_name}:{pkg_version}"
|
|
||||||
|
|
||||||
|
|
||||||
def get_cache_image_tag(
|
|
||||||
repo_name: str,
|
|
||||||
pkg_name: str,
|
|
||||||
pkg_version: str,
|
|
||||||
branch_name: str,
|
|
||||||
) -> str:
|
|
||||||
"""
|
|
||||||
Returns a string representing the expected image cache tag for a given package
|
|
||||||
|
|
||||||
Registry type caching is utilized for the builder images, to allow fast
|
|
||||||
rebuilds, generally almost instant for the same version
|
|
||||||
"""
|
|
||||||
return f"ghcr.io/{repo_name.lower()}/builder/cache/{pkg_name}:{pkg_version}"
|
|
||||||
|
|
||||||
|
|
||||||
def get_log_level(args) -> int:
|
|
||||||
"""
|
|
||||||
Returns a logging level, based
|
|
||||||
:param args:
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
levels = {
|
|
||||||
"critical": logging.CRITICAL,
|
|
||||||
"error": logging.ERROR,
|
|
||||||
"warn": logging.WARNING,
|
|
||||||
"warning": logging.WARNING,
|
|
||||||
"info": logging.INFO,
|
|
||||||
"debug": logging.DEBUG,
|
|
||||||
}
|
|
||||||
level = levels.get(args.loglevel.lower())
|
|
||||||
if level is None:
|
|
||||||
level = logging.INFO
|
|
||||||
return level
|
|
92
.github/scripts/get-build-json.py
vendored
@@ -1,92 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
This is a helper script for the mutli-stage Docker image builder.
|
|
||||||
It provides a single point of configuration for package version control.
|
|
||||||
The output JSON object is used by the CI workflow to determine what versions
|
|
||||||
to build and pull into the final Docker image.
|
|
||||||
|
|
||||||
Python package information is obtained from the Pipfile.lock. As this is
|
|
||||||
kept updated by dependabot, it usually will need no further configuration.
|
|
||||||
The sole exception currently is pikepdf, which has a dependency on qpdf,
|
|
||||||
and is configured here to use the latest version of qpdf built by the workflow.
|
|
||||||
|
|
||||||
Other package version information is configured directly below, generally by
|
|
||||||
setting the version and Git information, if any.
|
|
||||||
|
|
||||||
"""
|
|
||||||
import argparse
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Final
|
|
||||||
|
|
||||||
from common import get_cache_image_tag
|
|
||||||
from common import get_image_tag
|
|
||||||
|
|
||||||
|
|
||||||
def _main():
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="Generate a JSON object of information required to build the given package, based on the Pipfile.lock",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"package",
|
|
||||||
help="The name of the package to generate JSON for",
|
|
||||||
)
|
|
||||||
|
|
||||||
PIPFILE_LOCK_PATH: Final[Path] = Path("Pipfile.lock")
|
|
||||||
BUILD_CONFIG_PATH: Final[Path] = Path(".build-config.json")
|
|
||||||
|
|
||||||
# Read the main config file
|
|
||||||
build_json: Final = json.loads(BUILD_CONFIG_PATH.read_text())
|
|
||||||
|
|
||||||
# Read Pipfile.lock file
|
|
||||||
pipfile_data: Final = json.loads(PIPFILE_LOCK_PATH.read_text())
|
|
||||||
|
|
||||||
args: Final = parser.parse_args()
|
|
||||||
|
|
||||||
# Read from environment variables set by GitHub Actions
|
|
||||||
repo_name: Final[str] = os.environ["GITHUB_REPOSITORY"]
|
|
||||||
branch_name: Final[str] = os.environ["GITHUB_REF_NAME"]
|
|
||||||
|
|
||||||
# Default output values
|
|
||||||
version = None
|
|
||||||
extra_config = {}
|
|
||||||
|
|
||||||
if args.package in pipfile_data["default"]:
|
|
||||||
# Read the version from Pipfile.lock
|
|
||||||
pkg_data = pipfile_data["default"][args.package]
|
|
||||||
pkg_version = pkg_data["version"].split("==")[-1]
|
|
||||||
version = pkg_version
|
|
||||||
|
|
||||||
# Any extra/special values needed
|
|
||||||
if args.package == "pikepdf":
|
|
||||||
extra_config["qpdf_version"] = build_json["qpdf"]["version"]
|
|
||||||
|
|
||||||
elif args.package in build_json:
|
|
||||||
version = build_json[args.package]["version"]
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise NotImplementedError(args.package)
|
|
||||||
|
|
||||||
# The JSON object we'll output
|
|
||||||
output = {
|
|
||||||
"name": args.package,
|
|
||||||
"version": version,
|
|
||||||
"image_tag": get_image_tag(repo_name, args.package, version),
|
|
||||||
"cache_tag": get_cache_image_tag(
|
|
||||||
repo_name,
|
|
||||||
args.package,
|
|
||||||
version,
|
|
||||||
branch_name,
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
# Add anything special a package may need
|
|
||||||
output.update(extra_config)
|
|
||||||
|
|
||||||
# Output the JSON info to stdout
|
|
||||||
print(json.dumps(output))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
_main()
|
|
274
.github/scripts/github.py
vendored
@@ -1,274 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
This module contains some useful classes for interacting with the Github API.
|
|
||||||
The full documentation for the API can be found here: https://docs.github.com/en/rest
|
|
||||||
|
|
||||||
Mostly, this focusses on two areas, repo branches and repo packages, as the use case
|
|
||||||
is cleaning up container images which are no longer referred to.
|
|
||||||
|
|
||||||
"""
|
|
||||||
import functools
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
import urllib.parse
|
|
||||||
from typing import Dict
|
|
||||||
from typing import List
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
logger = logging.getLogger("github-api")
|
|
||||||
|
|
||||||
|
|
||||||
class _GithubApiBase:
|
|
||||||
"""
|
|
||||||
A base class for interacting with the Github API. It
|
|
||||||
will handle the session and setting authorization headers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, token: str) -> None:
|
|
||||||
self._token = token
|
|
||||||
self._client: Optional[httpx.Client] = None
|
|
||||||
|
|
||||||
def __enter__(self) -> "_GithubApiBase":
|
|
||||||
"""
|
|
||||||
Sets up the required headers for auth and response
|
|
||||||
type from the API
|
|
||||||
"""
|
|
||||||
self._client = httpx.Client()
|
|
||||||
self._client.headers.update(
|
|
||||||
{
|
|
||||||
"Accept": "application/vnd.github.v3+json",
|
|
||||||
"Authorization": f"token {self._token}",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
"""
|
|
||||||
Ensures the authorization token is cleaned up no matter
|
|
||||||
the reason for the exit
|
|
||||||
"""
|
|
||||||
if "Accept" in self._client.headers:
|
|
||||||
del self._client.headers["Accept"]
|
|
||||||
if "Authorization" in self._client.headers:
|
|
||||||
del self._client.headers["Authorization"]
|
|
||||||
|
|
||||||
# Close the session as well
|
|
||||||
self._client.close()
|
|
||||||
self._client = None
|
|
||||||
|
|
||||||
def _read_all_pages(self, endpoint):
|
|
||||||
"""
|
|
||||||
Helper function to read all pages of an endpoint, utilizing the
|
|
||||||
next.url until exhausted. Assumes the endpoint returns a list
|
|
||||||
"""
|
|
||||||
internal_data = []
|
|
||||||
|
|
||||||
while True:
|
|
||||||
resp = self._client.get(endpoint)
|
|
||||||
if resp.status_code == 200:
|
|
||||||
internal_data += resp.json()
|
|
||||||
if "next" in resp.links:
|
|
||||||
endpoint = resp.links["next"]["url"]
|
|
||||||
else:
|
|
||||||
logger.debug("Exiting pagination loop")
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
logger.warning(f"Request to {endpoint} return HTTP {resp.status_code}")
|
|
||||||
resp.raise_for_status()
|
|
||||||
|
|
||||||
return internal_data
|
|
||||||
|
|
||||||
|
|
||||||
class _EndpointResponse:
|
|
||||||
"""
|
|
||||||
For all endpoint JSON responses, store the full
|
|
||||||
response data, for ease of extending later, if need be.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, data: Dict) -> None:
|
|
||||||
self._data = data
|
|
||||||
|
|
||||||
|
|
||||||
class GithubBranch(_EndpointResponse):
|
|
||||||
"""
|
|
||||||
Simple wrapper for a repository branch, only extracts name information
|
|
||||||
for now.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, data: Dict) -> None:
|
|
||||||
super().__init__(data)
|
|
||||||
self.name = self._data["name"]
|
|
||||||
|
|
||||||
|
|
||||||
class GithubBranchApi(_GithubApiBase):
|
|
||||||
"""
|
|
||||||
Wrapper around branch API.
|
|
||||||
|
|
||||||
See https://docs.github.com/en/rest/branches/branches
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, token: str) -> None:
|
|
||||||
super().__init__(token)
|
|
||||||
|
|
||||||
self._ENDPOINT = "https://api.github.com/repos/{REPO}/branches"
|
|
||||||
|
|
||||||
def get_branches(self, repo: str) -> List[GithubBranch]:
|
|
||||||
"""
|
|
||||||
Returns all current branches of the given repository owned by the given
|
|
||||||
owner or organization.
|
|
||||||
"""
|
|
||||||
# The environment GITHUB_REPOSITORY already contains the owner in the correct location
|
|
||||||
endpoint = self._ENDPOINT.format(REPO=repo)
|
|
||||||
internal_data = self._read_all_pages(endpoint)
|
|
||||||
return [GithubBranch(branch) for branch in internal_data]
|
|
||||||
|
|
||||||
|
|
||||||
class ContainerPackage(_EndpointResponse):
|
|
||||||
"""
|
|
||||||
Data class wrapping the JSON response from the package related
|
|
||||||
endpoints
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, data: Dict):
|
|
||||||
super().__init__(data)
|
|
||||||
# This is a numerical ID, required for interactions with this
|
|
||||||
# specific package, including deletion of it or restoration
|
|
||||||
self.id: int = self._data["id"]
|
|
||||||
|
|
||||||
# A string name. This might be an actual name or it could be a
|
|
||||||
# digest string like "sha256:"
|
|
||||||
self.name: str = self._data["name"]
|
|
||||||
|
|
||||||
# URL to the package, including its ID, can be used for deletion
|
|
||||||
# or restoration without needing to build up a URL ourselves
|
|
||||||
self.url: str = self._data["url"]
|
|
||||||
|
|
||||||
# The list of tags applied to this image. Maybe an empty list
|
|
||||||
self.tags: List[str] = self._data["metadata"]["container"]["tags"]
|
|
||||||
|
|
||||||
@functools.cached_property
|
|
||||||
def untagged(self) -> bool:
|
|
||||||
"""
|
|
||||||
Returns True if the image has no tags applied to it, False otherwise
|
|
||||||
"""
|
|
||||||
return len(self.tags) == 0
|
|
||||||
|
|
||||||
@functools.cache
|
|
||||||
def tag_matches(self, pattern: str) -> bool:
|
|
||||||
"""
|
|
||||||
Returns True if the image has at least one tag which matches the given regex,
|
|
||||||
False otherwise
|
|
||||||
"""
|
|
||||||
for tag in self.tags:
|
|
||||||
if re.match(pattern, tag) is not None:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"Package {self.name}"
|
|
||||||
|
|
||||||
|
|
||||||
class GithubContainerRegistryApi(_GithubApiBase):
|
|
||||||
"""
|
|
||||||
Class wrapper to deal with the Github packages API. This class only deals with
|
|
||||||
container type packages, the only type published by paperless-ngx.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, token: str, owner_or_org: str) -> None:
|
|
||||||
super().__init__(token)
|
|
||||||
self._owner_or_org = owner_or_org
|
|
||||||
if self._owner_or_org == "paperless-ngx":
|
|
||||||
# https://docs.github.com/en/rest/packages#get-all-package-versions-for-a-package-owned-by-an-organization
|
|
||||||
self._PACKAGES_VERSIONS_ENDPOINT = "https://api.github.com/orgs/{ORG}/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions"
|
|
||||||
# https://docs.github.com/en/rest/packages#delete-package-version-for-an-organization
|
|
||||||
self._PACKAGE_VERSION_DELETE_ENDPOINT = "https://api.github.com/orgs/{ORG}/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions/{PACKAGE_VERSION_ID}"
|
|
||||||
else:
|
|
||||||
# https://docs.github.com/en/rest/packages#get-all-package-versions-for-a-package-owned-by-the-authenticated-user
|
|
||||||
self._PACKAGES_VERSIONS_ENDPOINT = "https://api.github.com/user/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions"
|
|
||||||
# https://docs.github.com/en/rest/packages#delete-a-package-version-for-the-authenticated-user
|
|
||||||
self._PACKAGE_VERSION_DELETE_ENDPOINT = "https://api.github.com/user/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions/{PACKAGE_VERSION_ID}"
|
|
||||||
self._PACKAGE_VERSION_RESTORE_ENDPOINT = (
|
|
||||||
f"{self._PACKAGE_VERSION_DELETE_ENDPOINT}/restore"
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_active_package_versions(
|
|
||||||
self,
|
|
||||||
package_name: str,
|
|
||||||
) -> List[ContainerPackage]:
|
|
||||||
"""
|
|
||||||
Returns all the versions of a given package (container images) from
|
|
||||||
the API
|
|
||||||
"""
|
|
||||||
|
|
||||||
package_type: str = "container"
|
|
||||||
# Need to quote this for slashes in the name
|
|
||||||
package_name = urllib.parse.quote(package_name, safe="")
|
|
||||||
|
|
||||||
endpoint = self._PACKAGES_VERSIONS_ENDPOINT.format(
|
|
||||||
ORG=self._owner_or_org,
|
|
||||||
PACKAGE_TYPE=package_type,
|
|
||||||
PACKAGE_NAME=package_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
pkgs = []
|
|
||||||
|
|
||||||
for data in self._read_all_pages(endpoint):
|
|
||||||
pkgs.append(ContainerPackage(data))
|
|
||||||
|
|
||||||
return pkgs
|
|
||||||
|
|
||||||
def get_deleted_package_versions(
|
|
||||||
self,
|
|
||||||
package_name: str,
|
|
||||||
) -> List[ContainerPackage]:
|
|
||||||
package_type: str = "container"
|
|
||||||
# Need to quote this for slashes in the name
|
|
||||||
package_name = urllib.parse.quote(package_name, safe="")
|
|
||||||
|
|
||||||
endpoint = (
|
|
||||||
self._PACKAGES_VERSIONS_ENDPOINT.format(
|
|
||||||
ORG=self._owner_or_org,
|
|
||||||
PACKAGE_TYPE=package_type,
|
|
||||||
PACKAGE_NAME=package_name,
|
|
||||||
)
|
|
||||||
+ "?state=deleted"
|
|
||||||
)
|
|
||||||
|
|
||||||
pkgs = []
|
|
||||||
|
|
||||||
for data in self._read_all_pages(endpoint):
|
|
||||||
pkgs.append(ContainerPackage(data))
|
|
||||||
|
|
||||||
return pkgs
|
|
||||||
|
|
||||||
def delete_package_version(self, package_data: ContainerPackage):
|
|
||||||
"""
|
|
||||||
Deletes the given package version from the GHCR
|
|
||||||
"""
|
|
||||||
resp = self._client.delete(package_data.url)
|
|
||||||
if resp.status_code != 204:
|
|
||||||
logger.warning(
|
|
||||||
f"Request to delete {package_data.url} returned HTTP {resp.status_code}",
|
|
||||||
)
|
|
||||||
|
|
||||||
def restore_package_version(
|
|
||||||
self,
|
|
||||||
package_name: str,
|
|
||||||
package_data: ContainerPackage,
|
|
||||||
):
|
|
||||||
package_type: str = "container"
|
|
||||||
endpoint = self._PACKAGE_VERSION_RESTORE_ENDPOINT.format(
|
|
||||||
ORG=self._owner_or_org,
|
|
||||||
PACKAGE_TYPE=package_type,
|
|
||||||
PACKAGE_NAME=package_name,
|
|
||||||
PACKAGE_VERSION_ID=package_data.id,
|
|
||||||
)
|
|
||||||
|
|
||||||
resp = self._client.post(endpoint)
|
|
||||||
if resp.status_code != 204:
|
|
||||||
logger.warning(
|
|
||||||
f"Request to delete {endpoint} returned HTTP {resp.status_code}",
|
|
||||||
)
|
|
23
.github/stale.yml
vendored
@@ -1,23 +0,0 @@
|
|||||||
# Number of days of inactivity before an issue becomes stale
|
|
||||||
daysUntilStale: 30
|
|
||||||
|
|
||||||
# Number of days of inactivity before a stale issue is closed
|
|
||||||
daysUntilClose: 7
|
|
||||||
|
|
||||||
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)
|
|
||||||
onlyLabels: [cant-reproduce]
|
|
||||||
|
|
||||||
# Label to use when marking an issue as stale
|
|
||||||
staleLabel: stale
|
|
||||||
|
|
||||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
|
||||||
markComment: >
|
|
||||||
This issue has been automatically marked as stale because it has not had
|
|
||||||
recent activity. It will be closed if no further activity occurs. Thank you
|
|
||||||
for your contributions.
|
|
||||||
|
|
||||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
|
||||||
closeComment: false
|
|
||||||
|
|
||||||
# See https://github.com/marketplace/stale for more info on the app
|
|
||||||
# and https://github.com/probot/stale for the configuration docs
|
|
367
.github/workflows/ci.yml
vendored
@@ -13,6 +13,14 @@ on:
|
|||||||
branches-ignore:
|
branches-ignore:
|
||||||
- 'translations**'
|
- 'translations**'
|
||||||
|
|
||||||
|
env:
|
||||||
|
# This is the version of pipenv all the steps will use
|
||||||
|
# If changing this, change Dockerfile
|
||||||
|
DEFAULT_PIP_ENV_VERSION: "2023.7.23"
|
||||||
|
# This is the default version of Python to use in most steps
|
||||||
|
# If changing this, change Dockerfile
|
||||||
|
DEFAULT_PYTHON_VERSION: "3.9"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-commit:
|
pre-commit:
|
||||||
name: Linting Checks
|
name: Linting Checks
|
||||||
@@ -21,13 +29,11 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Checkout repository
|
name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
-
|
-
|
||||||
name: Install tools
|
name: Install python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.9"
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
|
|
||||||
-
|
-
|
||||||
name: Check files
|
name: Check files
|
||||||
uses: pre-commit/action@v3.0.0
|
uses: pre-commit/action@v3.0.0
|
||||||
@@ -41,35 +47,37 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
-
|
|
||||||
name: Install pipenv
|
|
||||||
run: |
|
|
||||||
pipx install pipenv==2022.11.30
|
|
||||||
-
|
-
|
||||||
name: Set up Python
|
name: Set up Python
|
||||||
|
id: setup-python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.8
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
cache: "pipenv"
|
cache: "pipenv"
|
||||||
cache-dependency-path: 'Pipfile.lock'
|
cache-dependency-path: 'Pipfile.lock'
|
||||||
|
-
|
||||||
|
name: Install pipenv
|
||||||
|
run: |
|
||||||
|
pip install --user pipenv==${DEFAULT_PIP_ENV_VERSION}
|
||||||
-
|
-
|
||||||
name: Install dependencies
|
name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
pipenv sync --dev
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
||||||
-
|
-
|
||||||
name: List installed Python dependencies
|
name: List installed Python dependencies
|
||||||
run: |
|
run: |
|
||||||
pipenv run pip list
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pip list
|
||||||
-
|
-
|
||||||
name: Make documentation
|
name: Make documentation
|
||||||
run: |
|
run: |
|
||||||
pipenv run mkdocs build --config-file ./mkdocs.yml
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run mkdocs build --config-file ./mkdocs.yml
|
||||||
-
|
-
|
||||||
name: Upload artifact
|
name: Upload artifact
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: documentation
|
name: documentation
|
||||||
path: site/
|
path: site/
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
documentation-deploy:
|
documentation-deploy:
|
||||||
name: "Deploy Documentation"
|
name: "Deploy Documentation"
|
||||||
@@ -99,79 +107,65 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
python-version: ['3.8', '3.9', '3.10']
|
python-version: ['3.8', '3.9', '3.10']
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
env:
|
|
||||||
# Enable Tika end to end testing
|
|
||||||
TIKA_LIVE: 1
|
|
||||||
# Enable paperless_mail testing against real server
|
|
||||||
PAPERLESS_MAIL_TEST_HOST: ${{ secrets.TEST_MAIL_HOST }}
|
|
||||||
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
|
|
||||||
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
|
|
||||||
# Skip Tests which require convert
|
|
||||||
PAPERLESS_TEST_SKIP_CONVERT: 1
|
|
||||||
# Enable Gotenberg end to end testing
|
|
||||||
GOTENBERG_LIVE: 1
|
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
-
|
-
|
||||||
name: Start containers
|
name: Start containers
|
||||||
run: |
|
run: |
|
||||||
docker compose --file ${GITHUB_WORKSPACE}/docker/compose/docker-compose.ci-test.yml pull --quiet
|
docker compose --file ${GITHUB_WORKSPACE}/docker/compose/docker-compose.ci-test.yml pull --quiet
|
||||||
docker compose --file ${GITHUB_WORKSPACE}/docker/compose/docker-compose.ci-test.yml up --detach
|
docker compose --file ${GITHUB_WORKSPACE}/docker/compose/docker-compose.ci-test.yml up --detach
|
||||||
-
|
|
||||||
name: Install pipenv
|
|
||||||
run: |
|
|
||||||
pipx install pipenv==2022.11.30
|
|
||||||
-
|
-
|
||||||
name: Set up Python
|
name: Set up Python
|
||||||
|
id: setup-python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "${{ matrix.python-version }}"
|
python-version: "${{ matrix.python-version }}"
|
||||||
cache: "pipenv"
|
cache: "pipenv"
|
||||||
cache-dependency-path: 'Pipfile.lock'
|
cache-dependency-path: 'Pipfile.lock'
|
||||||
|
-
|
||||||
|
name: Install pipenv
|
||||||
|
run: |
|
||||||
|
pip install --user pipenv==${DEFAULT_PIP_ENV_VERSION}
|
||||||
-
|
-
|
||||||
name: Install system dependencies
|
name: Install system dependencies
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update -qq
|
sudo apt-get update -qq
|
||||||
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
|
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
|
||||||
|
-
|
||||||
|
name: Configure ImageMagick
|
||||||
|
run: |
|
||||||
|
sudo cp docker/imagemagick-policy.xml /etc/ImageMagick-6/policy.xml
|
||||||
-
|
-
|
||||||
name: Install Python dependencies
|
name: Install Python dependencies
|
||||||
run: |
|
run: |
|
||||||
pipenv sync --dev
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run python --version
|
||||||
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
||||||
-
|
-
|
||||||
name: List installed Python dependencies
|
name: List installed Python dependencies
|
||||||
run: |
|
run: |
|
||||||
pipenv run pip list
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pip list
|
||||||
-
|
-
|
||||||
name: Tests
|
name: Tests
|
||||||
run: |
|
|
||||||
cd src/
|
|
||||||
pipenv run pytest -rfEp
|
|
||||||
-
|
|
||||||
name: Get changed files
|
|
||||||
id: changed-files-specific
|
|
||||||
uses: tj-actions/changed-files@v34
|
|
||||||
with:
|
|
||||||
files: |
|
|
||||||
src/**
|
|
||||||
-
|
|
||||||
name: List all changed files
|
|
||||||
run: |
|
|
||||||
for file in ${{ steps.changed-files-specific.outputs.all_changed_files }}; do
|
|
||||||
echo "${file} was changed"
|
|
||||||
done
|
|
||||||
-
|
|
||||||
name: Publish coverage results
|
|
||||||
if: matrix.python-version == '3.9' && steps.changed-files-specific.outputs.any_changed == 'true'
|
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
PAPERLESS_CI_TEST: 1
|
||||||
# https://github.com/coveralls-clients/coveralls-python/issues/251
|
# Enable paperless_mail testing against real server
|
||||||
|
PAPERLESS_MAIL_TEST_HOST: ${{ secrets.TEST_MAIL_HOST }}
|
||||||
|
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
|
||||||
|
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
|
||||||
run: |
|
run: |
|
||||||
cd src/
|
cd src/
|
||||||
pipenv run coveralls --service=github
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run pytest -ra
|
||||||
|
-
|
||||||
|
name: Upload coverage
|
||||||
|
if: ${{ matrix.python-version == env.DEFAULT_PYTHON_VERSION }}
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: backend-coverage-report
|
||||||
|
path: src/coverage.xml
|
||||||
|
retention-days: 7
|
||||||
|
if-no-files-found: warn
|
||||||
-
|
-
|
||||||
name: Stop containers
|
name: Stop containers
|
||||||
if: always()
|
if: always()
|
||||||
@@ -194,101 +188,94 @@ jobs:
|
|||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node-version }}
|
node-version: ${{ matrix.node-version }}
|
||||||
- run: cd src-ui && npm ci
|
cache: 'npm'
|
||||||
- run: cd src-ui && npm run lint
|
cache-dependency-path: 'src-ui/package-lock.json'
|
||||||
- run: cd src-ui && npm run test
|
-
|
||||||
- run: cd src-ui && npm run e2e:ci
|
name: Install dependencies
|
||||||
|
run: cd src-ui && npm ci
|
||||||
|
-
|
||||||
|
name: Install Playwright
|
||||||
|
run: npx playwright install --with-deps
|
||||||
|
-
|
||||||
|
name: Linting checks
|
||||||
|
run: cd src-ui && npm run lint
|
||||||
|
-
|
||||||
|
name: Run Jest unit tests
|
||||||
|
run: cd src-ui && npm run test
|
||||||
|
-
|
||||||
|
name: Upload Jest coverage
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: jest-coverage-report
|
||||||
|
path: src-ui/coverage
|
||||||
|
retention-days: 7
|
||||||
|
if-no-files-found: warn
|
||||||
|
-
|
||||||
|
name: Run Playwright e2e tests
|
||||||
|
run: cd src-ui && npx playwright test
|
||||||
|
-
|
||||||
|
name: Upload Playwright test results
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: playwright-report
|
||||||
|
path: src-ui/playwright-report
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
prepare-docker-build:
|
tests-coverage-upload:
|
||||||
name: Prepare Docker Pipeline Data
|
name: "Upload coverage"
|
||||||
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v'))
|
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
# If the push triggered the installer library workflow, wait for it to
|
|
||||||
# complete here. This ensures the required versions for the final
|
|
||||||
# image have been built, while not waiting at all if the versions haven't changed
|
|
||||||
concurrency:
|
|
||||||
group: build-installer-library
|
|
||||||
cancel-in-progress: false
|
|
||||||
needs:
|
needs:
|
||||||
- documentation
|
|
||||||
- tests-backend
|
- tests-backend
|
||||||
- tests-frontend
|
- tests-frontend
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Set ghcr repository name
|
|
||||||
id: set-ghcr-repository
|
|
||||||
run: |
|
|
||||||
ghcr_name=$(echo "${GITHUB_REPOSITORY}" | awk '{ print tolower($0) }')
|
|
||||||
echo "repository=${ghcr_name}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
|
||||||
name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
-
|
-
|
||||||
name: Set up Python
|
name: Download frontend coverage
|
||||||
uses: actions/setup-python@v4
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
python-version: "3.9"
|
name: jest-coverage-report
|
||||||
|
path: src-ui/
|
||||||
-
|
-
|
||||||
name: Setup qpdf image
|
name: Upload frontend coverage to Codecov
|
||||||
id: qpdf-setup
|
uses: codecov/codecov-action@v3
|
||||||
run: |
|
with:
|
||||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py qpdf)
|
# not required for public repos, but intermittently fails otherwise
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
echo ${build_json}
|
flags: frontend
|
||||||
|
directory: src-ui/
|
||||||
echo "qpdf-json=${build_json}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
-
|
||||||
name: Setup psycopg2 image
|
name: Download backend coverage
|
||||||
id: psycopg2-setup
|
uses: actions/download-artifact@v3
|
||||||
run: |
|
with:
|
||||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py psycopg2)
|
name: backend-coverage-report
|
||||||
|
path: src/
|
||||||
echo ${build_json}
|
|
||||||
|
|
||||||
echo "psycopg2-json=${build_json}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
-
|
||||||
name: Setup pikepdf image
|
name: Upload coverage to Codecov
|
||||||
id: pikepdf-setup
|
uses: codecov/codecov-action@v3
|
||||||
run: |
|
with:
|
||||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py pikepdf)
|
# not required for public repos, but intermittently fails otherwise
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
# future expansion
|
||||||
|
flags: backend
|
||||||
|
directory: src/
|
||||||
|
|
||||||
echo ${build_json}
|
|
||||||
|
|
||||||
echo "pikepdf-json=${build_json}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
|
||||||
name: Setup jbig2enc image
|
|
||||||
id: jbig2enc-setup
|
|
||||||
run: |
|
|
||||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py jbig2enc)
|
|
||||||
|
|
||||||
echo ${build_json}
|
|
||||||
|
|
||||||
echo "jbig2enc-json=${build_json}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
|
|
||||||
ghcr-repository: ${{ steps.set-ghcr-repository.outputs.repository }}
|
|
||||||
|
|
||||||
qpdf-json: ${{ steps.qpdf-setup.outputs.qpdf-json }}
|
|
||||||
|
|
||||||
pikepdf-json: ${{ steps.pikepdf-setup.outputs.pikepdf-json }}
|
|
||||||
|
|
||||||
psycopg2-json: ${{ steps.psycopg2-setup.outputs.psycopg2-json }}
|
|
||||||
|
|
||||||
jbig2enc-json: ${{ steps.jbig2enc-setup.outputs.jbig2enc-json}}
|
|
||||||
|
|
||||||
# build and push image to docker hub.
|
|
||||||
build-docker-image:
|
build-docker-image:
|
||||||
|
name: Build Docker image for ${{ github.ref_name }}
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
|
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v'))
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
|
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
needs:
|
needs:
|
||||||
- prepare-docker-build
|
- tests-backend
|
||||||
|
- tests-frontend
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Check pushing to Docker Hub
|
name: Check pushing to Docker Hub
|
||||||
id: docker-hub
|
id: push-other-places
|
||||||
# Only push to Dockerhub from the main repo AND the ref is either:
|
# Only push to Dockerhub from the main repo AND the ref is either:
|
||||||
# main
|
# main
|
||||||
# dev
|
# dev
|
||||||
@@ -296,21 +283,29 @@ jobs:
|
|||||||
# a tag
|
# a tag
|
||||||
# Otherwise forks would require a Docker Hub account and secrets setup
|
# Otherwise forks would require a Docker Hub account and secrets setup
|
||||||
run: |
|
run: |
|
||||||
if [[ ${{ needs.prepare-docker-build.outputs.ghcr-repository }} == "paperless-ngx/paperless-ngx" && ( ${{ github.ref_name }} == "main" || ${{ github.ref_name }} == "dev" || ${{ github.ref_name }} == "beta" || ${{ startsWith(github.ref, 'refs/tags/v') }} == "true" ) ]] ; then
|
if [[ ${{ github.repository_owner }} == "paperless-ngx" && ( ${{ github.ref_name }} == "main" || ${{ github.ref_name }} == "dev" || ${{ github.ref_name }} == "beta" || ${{ startsWith(github.ref, 'refs/tags/v') }} == "true" ) ]] ; then
|
||||||
echo "Enabling DockerHub image push"
|
echo "Enabling DockerHub image push"
|
||||||
echo "enable=true" >> $GITHUB_OUTPUT
|
echo "enable=true" >> $GITHUB_OUTPUT
|
||||||
else
|
else
|
||||||
echo "Not pushing to DockerHub"
|
echo "Not pushing to DockerHub"
|
||||||
echo "enable=false" >> $GITHUB_OUTPUT
|
echo "enable=false" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
|
-
|
||||||
|
name: Set ghcr repository name
|
||||||
|
id: set-ghcr-repository
|
||||||
|
run: |
|
||||||
|
ghcr_name=$(echo "${{ github.repository }}" | awk '{ print tolower($0) }')
|
||||||
|
echo "Name is ${ghcr_name}"
|
||||||
|
echo "ghcr-repository=${ghcr_name}" >> $GITHUB_OUTPUT
|
||||||
-
|
-
|
||||||
name: Gather Docker metadata
|
name: Gather Docker metadata
|
||||||
id: docker-meta
|
id: docker-meta
|
||||||
uses: docker/metadata-action@v4
|
uses: docker/metadata-action@v4
|
||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}
|
ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}
|
||||||
name=paperlessngx/paperless-ngx,enable=${{ steps.docker-hub.outputs.enable }}
|
name=paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
|
||||||
|
name=quay.io/paperlessngx/paperless-ngx,enable=${{ steps.push-other-places.outputs.enable }}
|
||||||
tags: |
|
tags: |
|
||||||
# Tag branches with branch name
|
# Tag branches with branch name
|
||||||
type=ref,event=branch
|
type=ref,event=branch
|
||||||
@@ -321,6 +316,9 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
# If https://github.com/docker/buildx/issues/1044 is resolved,
|
||||||
|
# the append input with a native arm64 arch could be used to
|
||||||
|
# significantly speed up building
|
||||||
-
|
-
|
||||||
name: Set up Docker Buildx
|
name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v2
|
||||||
@@ -338,13 +336,22 @@ jobs:
|
|||||||
name: Login to Docker Hub
|
name: Login to Docker Hub
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v2
|
||||||
# Don't attempt to login is not pushing to Docker Hub
|
# Don't attempt to login is not pushing to Docker Hub
|
||||||
if: steps.docker-hub.outputs.enable == 'true'
|
if: steps.push-other-places.outputs.enable == 'true'
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
-
|
||||||
|
name: Login to Quay.io
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
# Don't attempt to login is not pushing to Quay.io
|
||||||
|
if: steps.push-other-places.outputs.enable == 'true'
|
||||||
|
with:
|
||||||
|
registry: quay.io
|
||||||
|
username: ${{ secrets.QUAY_USERNAME }}
|
||||||
|
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
||||||
-
|
-
|
||||||
name: Build and push
|
name: Build and push
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v4
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
@@ -352,19 +359,13 @@ jobs:
|
|||||||
push: ${{ github.event_name != 'pull_request' }}
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
tags: ${{ steps.docker-meta.outputs.tags }}
|
tags: ${{ steps.docker-meta.outputs.tags }}
|
||||||
labels: ${{ steps.docker-meta.outputs.labels }}
|
labels: ${{ steps.docker-meta.outputs.labels }}
|
||||||
build-args: |
|
# Get cache layers from this branch, then dev
|
||||||
JBIG2ENC_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.jbig2enc-json).version }}
|
|
||||||
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
|
|
||||||
PIKEPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.pikepdf-json).version }}
|
|
||||||
PSYCOPG2_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.psycopg2-json).version }}
|
|
||||||
# Get cache layers from this branch, then dev, then main
|
|
||||||
# This allows new branches to get at least some cache benefits, generally from dev
|
# This allows new branches to get at least some cache benefits, generally from dev
|
||||||
cache-from: |
|
cache-from: |
|
||||||
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
||||||
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:dev
|
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:dev
|
||||||
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:main
|
|
||||||
cache-to: |
|
cache-to: |
|
||||||
type=registry,mode=max,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
type=registry,mode=max,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
||||||
-
|
-
|
||||||
name: Inspect image
|
name: Inspect image
|
||||||
run: |
|
run: |
|
||||||
@@ -380,6 +381,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
name: frontend-compiled
|
name: frontend-compiled
|
||||||
path: src/documents/static/frontend/
|
path: src/documents/static/frontend/
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
build-release:
|
build-release:
|
||||||
needs:
|
needs:
|
||||||
@@ -389,22 +391,22 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
-
|
|
||||||
name: Install pipenv
|
|
||||||
run: |
|
|
||||||
pip3 install --upgrade pip setuptools wheel pipx
|
|
||||||
pipx install pipenv
|
|
||||||
-
|
-
|
||||||
name: Set up Python
|
name: Set up Python
|
||||||
|
id: setup-python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
cache: "pipenv"
|
cache: "pipenv"
|
||||||
cache-dependency-path: 'Pipfile.lock'
|
cache-dependency-path: 'Pipfile.lock'
|
||||||
|
-
|
||||||
|
name: Install pipenv + tools
|
||||||
|
run: |
|
||||||
|
pip install --upgrade --user pipenv==${DEFAULT_PIP_ENV_VERSION} setuptools wheel
|
||||||
-
|
-
|
||||||
name: Install Python dependencies
|
name: Install Python dependencies
|
||||||
run: |
|
run: |
|
||||||
pipenv sync --dev
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} sync --dev
|
||||||
-
|
-
|
||||||
name: Install system dependencies
|
name: Install system dependencies
|
||||||
run: |
|
run: |
|
||||||
@@ -425,35 +427,62 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Generate requirements file
|
name: Generate requirements file
|
||||||
run: |
|
run: |
|
||||||
pipenv requirements > requirements.txt
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} requirements > requirements.txt
|
||||||
-
|
-
|
||||||
name: Compile messages
|
name: Compile messages
|
||||||
run: |
|
run: |
|
||||||
cd src/
|
cd src/
|
||||||
pipenv run python3 manage.py compilemessages
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run python3 manage.py compilemessages
|
||||||
-
|
-
|
||||||
name: Collect static files
|
name: Collect static files
|
||||||
run: |
|
run: |
|
||||||
cd src/
|
cd src/
|
||||||
pipenv run python3 manage.py collectstatic --no-input
|
pipenv --python ${{ steps.setup-python.outputs.python-version }} run python3 manage.py collectstatic --no-input
|
||||||
-
|
-
|
||||||
name: Move files
|
name: Move files
|
||||||
run: |
|
run: |
|
||||||
mkdir dist
|
echo "Making dist folders"
|
||||||
mkdir dist/paperless-ngx
|
for directory in dist \
|
||||||
mkdir dist/paperless-ngx/scripts
|
dist/paperless-ngx \
|
||||||
cp .dockerignore .env Dockerfile Pipfile Pipfile.lock requirements.txt LICENSE README.md dist/paperless-ngx/
|
dist/paperless-ngx/scripts;
|
||||||
cp paperless.conf.example dist/paperless-ngx/paperless.conf
|
do
|
||||||
cp gunicorn.conf.py dist/paperless-ngx/gunicorn.conf.py
|
mkdir --verbose --parents ${directory}
|
||||||
cp -r docker/ dist/paperless-ngx/docker
|
done
|
||||||
cp scripts/*.service scripts/*.sh dist/paperless-ngx/scripts/
|
|
||||||
cp -r src/ dist/paperless-ngx/src
|
echo "Copying basic files"
|
||||||
cp -r docs/_build/html/ dist/paperless-ngx/docs
|
for file_name in .dockerignore \
|
||||||
mv static dist/paperless-ngx
|
.env \
|
||||||
|
Dockerfile \
|
||||||
|
Pipfile \
|
||||||
|
Pipfile.lock \
|
||||||
|
requirements.txt \
|
||||||
|
LICENSE \
|
||||||
|
README.md \
|
||||||
|
paperless.conf.example \
|
||||||
|
gunicorn.conf.py
|
||||||
|
do
|
||||||
|
cp --verbose ${file_name} dist/paperless-ngx/
|
||||||
|
done
|
||||||
|
mv --verbose dist/paperless-ngx/paperless.conf.example dist/paperless-ngx/paperless.conf
|
||||||
|
|
||||||
|
echo "Copying Docker related files"
|
||||||
|
cp --recursive docker/ dist/paperless-ngx/docker
|
||||||
|
|
||||||
|
echo "Copying startup scripts"
|
||||||
|
cp --verbose scripts/*.service scripts/*.sh scripts/*.socket dist/paperless-ngx/scripts/
|
||||||
|
|
||||||
|
echo "Copying source files"
|
||||||
|
cp --recursive src/ dist/paperless-ngx/src
|
||||||
|
echo "Copying documentation"
|
||||||
|
cp --recursive docs/_build/html/ dist/paperless-ngx/docs
|
||||||
|
|
||||||
|
mv --verbose static dist/paperless-ngx
|
||||||
-
|
-
|
||||||
name: Make release package
|
name: Make release package
|
||||||
run: |
|
run: |
|
||||||
|
echo "Creating release archive"
|
||||||
cd dist
|
cd dist
|
||||||
|
sudo chown -R 1000:1000 paperless-ngx/
|
||||||
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
||||||
-
|
-
|
||||||
name: Upload release artifact
|
name: Upload release artifact
|
||||||
@@ -461,6 +490,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
name: release
|
name: release
|
||||||
path: dist/paperless-ngx.tar.xz
|
path: dist/paperless-ngx.tar.xz
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
publish-release:
|
publish-release:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
@@ -491,7 +521,7 @@ jobs:
|
|||||||
-
|
-
|
||||||
name: Create Release and Changelog
|
name: Create Release and Changelog
|
||||||
id: create-release
|
id: create-release
|
||||||
uses: paperless-ngx/release-drafter@master
|
uses: release-drafter/release-drafter@v5
|
||||||
with:
|
with:
|
||||||
name: Paperless-ngx ${{ steps.get_version.outputs.version }}
|
name: Paperless-ngx ${{ steps.get_version.outputs.version }}
|
||||||
tag: ${{ steps.get_version.outputs.version }}
|
tag: ${{ steps.get_version.outputs.version }}
|
||||||
@@ -522,18 +552,17 @@ jobs:
|
|||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: main
|
ref: main
|
||||||
-
|
|
||||||
name: Install pipenv
|
|
||||||
run: |
|
|
||||||
pip3 install --upgrade pip setuptools wheel pipx
|
|
||||||
pipx install pipenv
|
|
||||||
-
|
-
|
||||||
name: Set up Python
|
name: Set up Python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
cache: "pipenv"
|
cache: "pipenv"
|
||||||
cache-dependency-path: 'Pipfile.lock'
|
cache-dependency-path: 'Pipfile.lock'
|
||||||
|
-
|
||||||
|
name: Install pipenv + tools
|
||||||
|
run: |
|
||||||
|
pip install --upgrade --user pipenv==${DEFAULT_PIP_ENV_VERSION} setuptools wheel
|
||||||
-
|
-
|
||||||
name: Append Changelog to docs
|
name: Append Changelog to docs
|
||||||
id: append-Changelog
|
id: append-Changelog
|
||||||
@@ -570,5 +599,5 @@ jobs:
|
|||||||
owner,
|
owner,
|
||||||
repo,
|
repo,
|
||||||
issue_number: result.data.number,
|
issue_number: result.data.number,
|
||||||
labels: ['documentation']
|
labels: ['documentation', 'skip-changelog']
|
||||||
});
|
});
|
||||||
|
108
.github/workflows/cleanup-tags.yml
vendored
@@ -12,9 +12,6 @@ on:
|
|||||||
push:
|
push:
|
||||||
paths:
|
paths:
|
||||||
- ".github/workflows/cleanup-tags.yml"
|
- ".github/workflows/cleanup-tags.yml"
|
||||||
- ".github/scripts/cleanup-tags.py"
|
|
||||||
- ".github/scripts/github.py"
|
|
||||||
- ".github/scripts/common.py"
|
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: registry-tags-cleanup
|
group: registry-tags-cleanup
|
||||||
@@ -22,72 +19,59 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
cleanup-images:
|
cleanup-images:
|
||||||
name: Cleanup Image Tags for ${{ matrix.primary-name }}
|
name: Cleanup Image Tags for paperless-ngx
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
if: github.repository_owner == 'paperless-ngx'
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- primary-name: "paperless-ngx"
|
|
||||||
cache-name: "paperless-ngx/builder/cache/app"
|
|
||||||
|
|
||||||
- primary-name: "paperless-ngx/builder/qpdf"
|
|
||||||
cache-name: "paperless-ngx/builder/cache/qpdf"
|
|
||||||
|
|
||||||
- primary-name: "paperless-ngx/builder/pikepdf"
|
|
||||||
cache-name: "paperless-ngx/builder/cache/pikepdf"
|
|
||||||
|
|
||||||
- primary-name: "paperless-ngx/builder/jbig2enc"
|
|
||||||
cache-name: "paperless-ngx/builder/cache/jbig2enc"
|
|
||||||
|
|
||||||
- primary-name: "paperless-ngx/builder/psycopg2"
|
|
||||||
cache-name: "paperless-ngx/builder/cache/psycopg2"
|
|
||||||
env:
|
env:
|
||||||
# Requires a personal access token with the OAuth scope delete:packages
|
# Requires a personal access token with the OAuth scope delete:packages
|
||||||
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
|
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Clean temporary images
|
||||||
uses: actions/checkout@v3
|
|
||||||
-
|
|
||||||
name: Login to Github Container Registry
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
-
|
|
||||||
name: Set up Python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: "3.10"
|
|
||||||
-
|
|
||||||
name: Install httpx
|
|
||||||
run: |
|
|
||||||
python -m pip install httpx
|
|
||||||
#
|
|
||||||
# Clean up primary package
|
|
||||||
#
|
|
||||||
-
|
|
||||||
name: Cleanup for package "${{ matrix.primary-name }}"
|
|
||||||
if: "${{ env.TOKEN != '' }}"
|
if: "${{ env.TOKEN != '' }}"
|
||||||
run: |
|
uses: stumpylog/image-cleaner-action/ephemeral@v0.2.0
|
||||||
python ${GITHUB_WORKSPACE}/.github/scripts/cleanup-tags.py --untagged --is-manifest --delete "${{ matrix.primary-name }}"
|
with:
|
||||||
#
|
token: "${{ env.TOKEN }}"
|
||||||
# Clean up registry cache package
|
owner: "${{ github.repository_owner }}"
|
||||||
#
|
is_org: "true"
|
||||||
|
package_name: "paperless-ngx"
|
||||||
|
scheme: "branch"
|
||||||
|
repo_name: "paperless-ngx"
|
||||||
|
match_regex: "feature-"
|
||||||
|
do_delete: "true"
|
||||||
|
|
||||||
|
cleanup-untagged-images:
|
||||||
|
name: Cleanup Untagged Images Tags for ${{ matrix.primary-name }}
|
||||||
|
if: github.repository_owner == 'paperless-ngx'
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs:
|
||||||
|
- cleanup-images
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- primary-name: "paperless-ngx"
|
||||||
|
- primary-name: "paperless-ngx/builder/cache/app"
|
||||||
|
# TODO: Remove the above and replace with the below
|
||||||
|
# - primary-name: "builder/qpdf"
|
||||||
|
# - primary-name: "builder/cache/qpdf"
|
||||||
|
# - primary-name: "builder/pikepdf"
|
||||||
|
# - primary-name: "builder/cache/pikepdf"
|
||||||
|
# - primary-name: "builder/jbig2enc"
|
||||||
|
# - primary-name: "builder/cache/jbig2enc"
|
||||||
|
# - primary-name: "builder/psycopg2"
|
||||||
|
# - primary-name: "builder/cache/psycopg2"
|
||||||
|
env:
|
||||||
|
# Requires a personal access token with the OAuth scope delete:packages
|
||||||
|
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
|
||||||
|
steps:
|
||||||
-
|
-
|
||||||
name: Cleanup for package "${{ matrix.cache-name }}"
|
name: Clean untagged images
|
||||||
if: "${{ env.TOKEN != '' }}"
|
if: "${{ env.TOKEN != '' }}"
|
||||||
run: |
|
uses: stumpylog/image-cleaner-action/untagged@v0.2.0
|
||||||
python ${GITHUB_WORKSPACE}/.github/scripts/cleanup-tags.py --untagged --delete "${{ matrix.cache-name }}"
|
with:
|
||||||
#
|
token: "${{ env.TOKEN }}"
|
||||||
# Verify tags which are left still pull
|
owner: "${{ github.repository_owner }}"
|
||||||
#
|
is_org: "true"
|
||||||
-
|
package_name: "${{ matrix.primary-name }}"
|
||||||
name: Check all tags still pull
|
do_delete: "true"
|
||||||
run: |
|
|
||||||
ghcr_name=$(echo "ghcr.io/${GITHUB_REPOSITORY_OWNER}/${{ matrix.primary-name }}" | awk '{ print tolower($0) }')
|
|
||||||
echo "Pulling all tags of ${ghcr_name}"
|
|
||||||
docker pull --quiet --all-tags ${ghcr_name}
|
|
||||||
docker image list
|
|
||||||
|
171
.github/workflows/installer-library.yml
vendored
@@ -1,171 +0,0 @@
|
|||||||
# This workflow will run to update the installer library of
|
|
||||||
# Docker images. These are the images which provide updated wheels
|
|
||||||
# .deb installation packages or maybe just some compiled library
|
|
||||||
|
|
||||||
name: Build Image Library
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
# Must match one of these branches AND one of the paths
|
|
||||||
# to be triggered
|
|
||||||
branches:
|
|
||||||
- "main"
|
|
||||||
- "dev"
|
|
||||||
- "library-*"
|
|
||||||
- "feature-*"
|
|
||||||
paths:
|
|
||||||
# Trigger the workflow if a Dockerfile changed
|
|
||||||
- "docker-builders/**"
|
|
||||||
# Trigger if a package was updated
|
|
||||||
- ".build-config.json"
|
|
||||||
- "Pipfile.lock"
|
|
||||||
# Also trigger on workflow changes related to the library
|
|
||||||
- ".github/workflows/installer-library.yml"
|
|
||||||
- ".github/workflows/reusable-workflow-builder.yml"
|
|
||||||
- ".github/scripts/**"
|
|
||||||
|
|
||||||
# Set a workflow level concurrency group so primary workflow
|
|
||||||
# can wait for this to complete if needed
|
|
||||||
# DO NOT CHANGE without updating main workflow group
|
|
||||||
concurrency:
|
|
||||||
group: build-installer-library
|
|
||||||
cancel-in-progress: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
prepare-docker-build:
|
|
||||||
name: Prepare Docker Image Version Data
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
steps:
|
|
||||||
-
|
|
||||||
name: Set ghcr repository name
|
|
||||||
id: set-ghcr-repository
|
|
||||||
run: |
|
|
||||||
ghcr_name=$(echo "${GITHUB_REPOSITORY}" | awk '{ print tolower($0) }')
|
|
||||||
echo "repository=${ghcr_name}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
|
||||||
name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
-
|
|
||||||
name: Set up Python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: "3.9"
|
|
||||||
-
|
|
||||||
name: Install jq
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install jq
|
|
||||||
-
|
|
||||||
name: Setup qpdf image
|
|
||||||
id: qpdf-setup
|
|
||||||
run: |
|
|
||||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py qpdf)
|
|
||||||
|
|
||||||
echo ${build_json}
|
|
||||||
|
|
||||||
echo "qpdf-json=${build_json}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
|
||||||
name: Setup psycopg2 image
|
|
||||||
id: psycopg2-setup
|
|
||||||
run: |
|
|
||||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py psycopg2)
|
|
||||||
|
|
||||||
echo ${build_json}
|
|
||||||
|
|
||||||
echo "psycopg2-json=${build_json}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
|
||||||
name: Setup pikepdf image
|
|
||||||
id: pikepdf-setup
|
|
||||||
run: |
|
|
||||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py pikepdf)
|
|
||||||
|
|
||||||
echo ${build_json}
|
|
||||||
|
|
||||||
echo "pikepdf-json=${build_json}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
|
||||||
name: Setup jbig2enc image
|
|
||||||
id: jbig2enc-setup
|
|
||||||
run: |
|
|
||||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py jbig2enc)
|
|
||||||
|
|
||||||
echo ${build_json}
|
|
||||||
|
|
||||||
echo "jbig2enc-json=${build_json}" >> $GITHUB_OUTPUT
|
|
||||||
-
|
|
||||||
name: Setup other versions
|
|
||||||
id: cache-bust-setup
|
|
||||||
run: |
|
|
||||||
pillow_version=$(jq ".default.pillow.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
|
||||||
lxml_version=$(jq ".default.lxml.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
|
||||||
|
|
||||||
echo "Pillow is ${pillow_version}"
|
|
||||||
echo "lxml is ${lxml_version}"
|
|
||||||
|
|
||||||
echo "pillow-version=${pillow_version}" >> $GITHUB_OUTPUT
|
|
||||||
echo "lxml-version=${lxml_version}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
|
|
||||||
ghcr-repository: ${{ steps.set-ghcr-repository.outputs.repository }}
|
|
||||||
|
|
||||||
qpdf-json: ${{ steps.qpdf-setup.outputs.qpdf-json }}
|
|
||||||
|
|
||||||
pikepdf-json: ${{ steps.pikepdf-setup.outputs.pikepdf-json }}
|
|
||||||
|
|
||||||
psycopg2-json: ${{ steps.psycopg2-setup.outputs.psycopg2-json }}
|
|
||||||
|
|
||||||
jbig2enc-json: ${{ steps.jbig2enc-setup.outputs.jbig2enc-json }}
|
|
||||||
|
|
||||||
pillow-version: ${{ steps.cache-bust-setup.outputs.pillow-version }}
|
|
||||||
|
|
||||||
lxml-version: ${{ steps.cache-bust-setup.outputs.lxml-version }}
|
|
||||||
|
|
||||||
build-qpdf-debs:
|
|
||||||
name: qpdf
|
|
||||||
needs:
|
|
||||||
- prepare-docker-build
|
|
||||||
uses: ./.github/workflows/reusable-workflow-builder.yml
|
|
||||||
with:
|
|
||||||
dockerfile: ./docker-builders/Dockerfile.qpdf
|
|
||||||
build-platforms: linux/amd64
|
|
||||||
build-json: ${{ needs.prepare-docker-build.outputs.qpdf-json }}
|
|
||||||
build-args: |
|
|
||||||
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
|
|
||||||
|
|
||||||
build-jbig2enc:
|
|
||||||
name: jbig2enc
|
|
||||||
needs:
|
|
||||||
- prepare-docker-build
|
|
||||||
uses: ./.github/workflows/reusable-workflow-builder.yml
|
|
||||||
with:
|
|
||||||
dockerfile: ./docker-builders/Dockerfile.jbig2enc
|
|
||||||
build-json: ${{ needs.prepare-docker-build.outputs.jbig2enc-json }}
|
|
||||||
build-args: |
|
|
||||||
JBIG2ENC_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.jbig2enc-json).version }}
|
|
||||||
|
|
||||||
build-psycopg2-wheel:
|
|
||||||
name: psycopg2
|
|
||||||
needs:
|
|
||||||
- prepare-docker-build
|
|
||||||
uses: ./.github/workflows/reusable-workflow-builder.yml
|
|
||||||
with:
|
|
||||||
dockerfile: ./docker-builders/Dockerfile.psycopg2
|
|
||||||
build-json: ${{ needs.prepare-docker-build.outputs.psycopg2-json }}
|
|
||||||
build-args: |
|
|
||||||
PSYCOPG2_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.psycopg2-json).version }}
|
|
||||||
|
|
||||||
build-pikepdf-wheel:
|
|
||||||
name: pikepdf
|
|
||||||
needs:
|
|
||||||
- prepare-docker-build
|
|
||||||
- build-qpdf-debs
|
|
||||||
uses: ./.github/workflows/reusable-workflow-builder.yml
|
|
||||||
with:
|
|
||||||
dockerfile: ./docker-builders/Dockerfile.pikepdf
|
|
||||||
build-json: ${{ needs.prepare-docker-build.outputs.pikepdf-json }}
|
|
||||||
build-args: |
|
|
||||||
REPO=${{ needs.prepare-docker-build.outputs.ghcr-repository }}
|
|
||||||
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
|
|
||||||
PIKEPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.pikepdf-json).version }}
|
|
||||||
PILLOW_VERSION=${{ needs.prepare-docker-build.outputs.pillow-version }}
|
|
||||||
LXML_VERSION=${{ needs.prepare-docker-build.outputs.lxml-version }}
|
|
4
.github/workflows/project-actions.yml
vendored
@@ -28,7 +28,7 @@ jobs:
|
|||||||
if: github.event_name == 'issues' && (github.event.action == 'opened' || github.event.action == 'reopened')
|
if: github.event_name == 'issues' && (github.event.action == 'opened' || github.event.action == 'reopened')
|
||||||
steps:
|
steps:
|
||||||
- name: Add issue to project and set status to ${{ env.todo }}
|
- name: Add issue to project and set status to ${{ env.todo }}
|
||||||
uses: leonsteinhaeuser/project-beta-automations@v2.0.1
|
uses: leonsteinhaeuser/project-beta-automations@v2.1.0
|
||||||
with:
|
with:
|
||||||
gh_token: ${{ secrets.GH_TOKEN }}
|
gh_token: ${{ secrets.GH_TOKEN }}
|
||||||
organization: paperless-ngx
|
organization: paperless-ngx
|
||||||
@@ -44,7 +44,7 @@ jobs:
|
|||||||
if: github.event_name == 'pull_request_target' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request.user.login != 'dependabot'
|
if: github.event_name == 'pull_request_target' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request.user.login != 'dependabot'
|
||||||
steps:
|
steps:
|
||||||
- name: Add PR to project and set status to "Needs Review"
|
- name: Add PR to project and set status to "Needs Review"
|
||||||
uses: leonsteinhaeuser/project-beta-automations@v2.0.1
|
uses: leonsteinhaeuser/project-beta-automations@v2.1.0
|
||||||
with:
|
with:
|
||||||
gh_token: ${{ secrets.GH_TOKEN }}
|
gh_token: ${{ secrets.GH_TOKEN }}
|
||||||
organization: paperless-ngx
|
organization: paperless-ngx
|
||||||
|
31
.github/workflows/release-chart.yml
vendored
@@ -1,31 +0,0 @@
|
|||||||
---
|
|
||||||
name: Release Charts
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags:
|
|
||||||
- v*
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
release_chart:
|
|
||||||
name: "Release Chart"
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Configure Git
|
|
||||||
run: |
|
|
||||||
git config user.name "$GITHUB_ACTOR"
|
|
||||||
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
|
||||||
- name: Install Helm
|
|
||||||
uses: azure/setup-helm@v3
|
|
||||||
with:
|
|
||||||
version: v3.10.0
|
|
||||||
|
|
||||||
- name: Run chart-releaser
|
|
||||||
uses: helm/chart-releaser-action@v1.4.1
|
|
||||||
env:
|
|
||||||
CR_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
|
47
.github/workflows/repo-maintenance.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
name: 'Repository Maintenance'
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 3 * * *'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: lock
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
stale:
|
||||||
|
name: 'Stale'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/stale@v8
|
||||||
|
with:
|
||||||
|
days-before-stale: 7
|
||||||
|
days-before-close: 14
|
||||||
|
any-of-labels: 'cant-reproduce,not a bug'
|
||||||
|
stale-issue-label: stale
|
||||||
|
stale-pr-label: stale
|
||||||
|
stale-issue-message: >
|
||||||
|
This issue has been automatically marked as stale because it has not had
|
||||||
|
recent activity. It will be closed if no further activity occurs. Thank you
|
||||||
|
for your contributions.
|
||||||
|
lock-threads:
|
||||||
|
name: 'Lock Old Threads'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: dessant/lock-threads@v4
|
||||||
|
with:
|
||||||
|
issue-inactive-days: '30'
|
||||||
|
pr-inactive-days: '30'
|
||||||
|
log-output: true
|
||||||
|
issue-comment: >
|
||||||
|
This issue has been automatically locked since there
|
||||||
|
has not been any recent activity after it was closed.
|
||||||
|
Please open a new discussion or issue for related concerns.
|
||||||
|
pr-comment: >
|
||||||
|
This pull request has been automatically locked since there
|
||||||
|
has not been any recent activity after it was closed.
|
||||||
|
Please open a new discussion or issue for related concerns.
|
57
.github/workflows/reusable-workflow-builder.yml
vendored
@@ -1,57 +0,0 @@
|
|||||||
name: Reusable Image Builder
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
dockerfile:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
build-json:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
build-args:
|
|
||||||
required: false
|
|
||||||
default: ""
|
|
||||||
type: string
|
|
||||||
build-platforms:
|
|
||||||
required: false
|
|
||||||
default: linux/amd64,linux/arm64,linux/arm/v7
|
|
||||||
type: string
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ fromJSON(inputs.build-json).name }}-${{ fromJSON(inputs.build-json).version }}
|
|
||||||
cancel-in-progress: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-image:
|
|
||||||
name: Build ${{ fromJSON(inputs.build-json).name }} @ ${{ fromJSON(inputs.build-json).version }}
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
steps:
|
|
||||||
-
|
|
||||||
name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
-
|
|
||||||
name: Login to Github Container Registry
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
-
|
|
||||||
name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
-
|
|
||||||
name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v2
|
|
||||||
-
|
|
||||||
name: Build ${{ fromJSON(inputs.build-json).name }}
|
|
||||||
uses: docker/build-push-action@v3
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ${{ inputs.dockerfile }}
|
|
||||||
tags: ${{ fromJSON(inputs.build-json).image_tag }}
|
|
||||||
platforms: ${{ inputs.build-platforms }}
|
|
||||||
build-args: ${{ inputs.build-args }}
|
|
||||||
push: true
|
|
||||||
cache-from: type=registry,ref=${{ fromJSON(inputs.build-json).cache_tag }}
|
|
||||||
cache-to: type=registry,mode=max,ref=${{ fromJSON(inputs.build-json).cache_tag }}
|
|
1
.gitignore
vendored
@@ -73,6 +73,7 @@ virtualenv
|
|||||||
.venv/
|
.venv/
|
||||||
/docker-compose.env
|
/docker-compose.env
|
||||||
/docker-compose.yml
|
/docker-compose.yml
|
||||||
|
.ruff_cache/
|
||||||
|
|
||||||
# Used for development
|
# Used for development
|
||||||
scripts/import-for-development
|
scripts/import-for-development
|
||||||
|
@@ -11,14 +11,13 @@ repos:
|
|||||||
- id: check-json
|
- id: check-json
|
||||||
exclude: "tsconfig.*json"
|
exclude: "tsconfig.*json"
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
exclude: "charts/paperless-ngx/templates/common.yaml"
|
|
||||||
- id: check-toml
|
- id: check-toml
|
||||||
- id: check-executables-have-shebangs
|
- id: check-executables-have-shebangs
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
exclude_types:
|
exclude_types:
|
||||||
- svg
|
- svg
|
||||||
- pofile
|
- pofile
|
||||||
exclude: "^(LICENSE|charts/paperless-ngx/README.md)$"
|
exclude: "(^LICENSE$)"
|
||||||
- id: mixed-line-ending
|
- id: mixed-line-ending
|
||||||
args:
|
args:
|
||||||
- "--fix=lf"
|
- "--fix=lf"
|
||||||
@@ -28,51 +27,26 @@ repos:
|
|||||||
- id: check-case-conflict
|
- id: check-case-conflict
|
||||||
- id: detect-private-key
|
- id: detect-private-key
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
rev: "v2.7.1"
|
rev: 'v3.0.0'
|
||||||
hooks:
|
hooks:
|
||||||
- id: prettier
|
- id: prettier
|
||||||
types_or:
|
types_or:
|
||||||
- javascript
|
- javascript
|
||||||
- ts
|
- ts
|
||||||
- markdown
|
- markdown
|
||||||
exclude: "(^Pipfile\\.lock$)|(^charts/paperless-ngx/README.md$)"
|
exclude: "(^Pipfile\\.lock$)"
|
||||||
# Python hooks
|
# Python hooks
|
||||||
- repo: https://github.com/asottile/reorder_python_imports
|
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||||
rev: v3.9.0
|
rev: 'v0.0.280'
|
||||||
hooks:
|
hooks:
|
||||||
- id: reorder-python-imports
|
- id: ruff
|
||||||
exclude: "(migrations)"
|
|
||||||
- repo: https://github.com/asottile/yesqa
|
|
||||||
rev: "v1.4.0"
|
|
||||||
hooks:
|
|
||||||
- id: yesqa
|
|
||||||
exclude: "(migrations)"
|
|
||||||
- repo: https://github.com/asottile/add-trailing-comma
|
|
||||||
rev: "v2.4.0"
|
|
||||||
hooks:
|
|
||||||
- id: add-trailing-comma
|
|
||||||
exclude: "(migrations)"
|
|
||||||
- repo: https://github.com/PyCQA/flake8
|
|
||||||
rev: 6.0.0
|
|
||||||
hooks:
|
|
||||||
- id: flake8
|
|
||||||
files: ^src/
|
|
||||||
args:
|
|
||||||
- "--config=./src/setup.cfg"
|
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 22.12.0
|
rev: 23.7.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
|
||||||
rev: v3.3.1
|
|
||||||
hooks:
|
|
||||||
- id: pyupgrade
|
|
||||||
exclude: "(migrations)"
|
|
||||||
args:
|
|
||||||
- "--py38-plus"
|
|
||||||
# Dockerfile hooks
|
# Dockerfile hooks
|
||||||
- repo: https://github.com/AleksaC/hadolint-py
|
- repo: https://github.com/AleksaC/hadolint-py
|
||||||
rev: v2.10.0
|
rev: v2.12.0.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: hadolint
|
- id: hadolint
|
||||||
# Shell script hooks
|
# Shell script hooks
|
||||||
@@ -83,6 +57,6 @@ repos:
|
|||||||
args:
|
args:
|
||||||
- "--tab"
|
- "--tab"
|
||||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||||
rev: "v0.9.0.2"
|
rev: "v0.9.0.5"
|
||||||
hooks:
|
hooks:
|
||||||
- id: shellcheck
|
- id: shellcheck
|
||||||
|
@@ -2,3 +2,5 @@
|
|||||||
semi: false
|
semi: false
|
||||||
# https://prettier.io/docs/en/options.html#quotes
|
# https://prettier.io/docs/en/options.html#quotes
|
||||||
singleQuote: true
|
singleQuote: true
|
||||||
|
# https://prettier.io/docs/en/options.html#trailing-commas
|
||||||
|
trailingComma: "es5"
|
||||||
|
1
.python-version
Normal file
@@ -0,0 +1 @@
|
|||||||
|
3.8.16
|
23
.ruff.toml
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# https://beta.ruff.rs/docs/settings/
|
||||||
|
# https://beta.ruff.rs/docs/rules/
|
||||||
|
extend-select = ["I", "W", "UP", "COM", "DJ", "EXE", "ISC", "ICN", "G201", "INP", "PIE", "RSE", "SIM", "TID", "PLC", "PLE", "RUF"]
|
||||||
|
# TODO PTH
|
||||||
|
ignore = ["DJ001", "SIM105", "RUF012"]
|
||||||
|
fix = true
|
||||||
|
line-length = 88
|
||||||
|
respect-gitignore = true
|
||||||
|
src = ["src"]
|
||||||
|
target-version = "py38"
|
||||||
|
format = "grouped"
|
||||||
|
show-fixes = true
|
||||||
|
|
||||||
|
[per-file-ignores]
|
||||||
|
".github/scripts/*.py" = ["E501", "INP001", "SIM117"]
|
||||||
|
"docker/wait-for-redis.py" = ["INP001"]
|
||||||
|
"*/tests/*.py" = ["E501", "SIM117"]
|
||||||
|
"*/migrations/*.py" = ["E501", "SIM"]
|
||||||
|
"src/paperless_tesseract/tests/test_parser.py" = ["RUF001"]
|
||||||
|
"src/documents/models.py" = ["SIM115"]
|
||||||
|
|
||||||
|
[isort]
|
||||||
|
force-single-line = true
|
@@ -45,7 +45,7 @@ Examples of `non-trivial` PRs might include:
|
|||||||
|
|
||||||
- Additional features
|
- Additional features
|
||||||
- Large changes to many distinct files
|
- Large changes to many distinct files
|
||||||
- Breaking or depreciation of existing features
|
- Breaking or deprecation of existing features
|
||||||
|
|
||||||
Our community review process for `non-trivial` PRs is the following:
|
Our community review process for `non-trivial` PRs is the following:
|
||||||
|
|
||||||
|
137
Dockerfile
@@ -1,25 +1,11 @@
|
|||||||
# syntax=docker/dockerfile:1.4
|
# syntax=docker/dockerfile:1
|
||||||
|
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/reference.md
|
||||||
|
|
||||||
# Pull the installer images from the library
|
# Stage: compile-frontend
|
||||||
# These are all built previously
|
# Purpose: Compiles the frontend
|
||||||
# They provide either a .deb or .whl
|
# Notes:
|
||||||
|
# - Does NPM stuff with Typescript and such
|
||||||
ARG JBIG2ENC_VERSION
|
FROM --platform=$BUILDPLATFORM docker.io/node:16-bookworm-slim AS compile-frontend
|
||||||
ARG QPDF_VERSION
|
|
||||||
ARG PIKEPDF_VERSION
|
|
||||||
ARG PSYCOPG2_VERSION
|
|
||||||
|
|
||||||
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/jbig2enc:${JBIG2ENC_VERSION} as jbig2enc-builder
|
|
||||||
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/qpdf:${QPDF_VERSION} as qpdf-builder
|
|
||||||
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/pikepdf:${PIKEPDF_VERSION} as pikepdf-builder
|
|
||||||
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/psycopg2:${PSYCOPG2_VERSION} as psycopg2-builder
|
|
||||||
|
|
||||||
FROM --platform=$BUILDPLATFORM node:16-bullseye-slim AS compile-frontend
|
|
||||||
|
|
||||||
# This stage compiles the frontend
|
|
||||||
# This stage runs once for the native platform, as the outputs are not
|
|
||||||
# dependent on target arch
|
|
||||||
# Inputs: None
|
|
||||||
|
|
||||||
COPY ./src-ui /src/src-ui
|
COPY ./src-ui /src/src-ui
|
||||||
|
|
||||||
@@ -30,14 +16,12 @@ RUN set -eux \
|
|||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
&& ./node_modules/.bin/ng build --configuration production
|
&& ./node_modules/.bin/ng build --configuration production
|
||||||
|
|
||||||
FROM --platform=$BUILDPLATFORM python:3.9-slim-bullseye as pipenv-base
|
# Stage: pipenv-base
|
||||||
|
# Purpose: Generates a requirements.txt file for building
|
||||||
# This stage generates the requirements.txt file using pipenv
|
# Comments:
|
||||||
# This stage runs once for the native platform, as the outputs are not
|
# - pipenv dependencies are not left in the final image
|
||||||
# dependent on target arch
|
# - pipenv can't touch the final image somehow
|
||||||
# This way, pipenv dependencies are not left in the final image
|
FROM --platform=$BUILDPLATFORM docker.io/python:3.9-alpine as pipenv-base
|
||||||
# nor can pipenv mess up the final image somehow
|
|
||||||
# Inputs: None
|
|
||||||
|
|
||||||
WORKDIR /usr/src/pipenv
|
WORKDIR /usr/src/pipenv
|
||||||
|
|
||||||
@@ -45,11 +29,15 @@ COPY Pipfile* ./
|
|||||||
|
|
||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
&& echo "Installing pipenv" \
|
&& echo "Installing pipenv" \
|
||||||
&& python3 -m pip install --no-cache-dir --upgrade pipenv==2022.11.30 \
|
&& python3 -m pip install --no-cache-dir --upgrade pipenv==2023.7.23 \
|
||||||
&& echo "Generating requirement.txt" \
|
&& echo "Generating requirement.txt" \
|
||||||
&& pipenv requirements > requirements.txt
|
&& pipenv requirements > requirements.txt
|
||||||
|
|
||||||
FROM python:3.9-slim-bullseye as main-app
|
# Stage: main-app
|
||||||
|
# Purpose: The final image
|
||||||
|
# Comments:
|
||||||
|
# - Don't leave anything extra in here
|
||||||
|
FROM docker.io/python:3.9-slim-bookworm as main-app
|
||||||
|
|
||||||
LABEL org.opencontainers.image.authors="paperless-ngx team <hello@paperless-ngx.com>"
|
LABEL org.opencontainers.image.authors="paperless-ngx team <hello@paperless-ngx.com>"
|
||||||
LABEL org.opencontainers.image.documentation="https://docs.paperless-ngx.com/"
|
LABEL org.opencontainers.image.documentation="https://docs.paperless-ngx.com/"
|
||||||
@@ -58,30 +46,14 @@ LABEL org.opencontainers.image.url="https://github.com/paperless-ngx/paperless-n
|
|||||||
LABEL org.opencontainers.image.licenses="GPL-3.0-only"
|
LABEL org.opencontainers.image.licenses="GPL-3.0-only"
|
||||||
|
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
# Buildx provided
|
|
||||||
ARG TARGETARCH
|
|
||||||
ARG TARGETVARIANT
|
|
||||||
|
|
||||||
# Workflow provided
|
|
||||||
ARG QPDF_VERSION
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Begin installation and configuration
|
# Begin installation and configuration
|
||||||
# Order the steps below from least often changed to most
|
# Order the steps below from least often changed to most
|
||||||
#
|
#
|
||||||
|
|
||||||
# copy jbig2enc
|
|
||||||
# Basically will never change again
|
|
||||||
COPY --from=jbig2enc-builder /usr/src/jbig2enc/src/.libs/libjbig2enc* /usr/local/lib/
|
|
||||||
COPY --from=jbig2enc-builder /usr/src/jbig2enc/src/jbig2 /usr/local/bin/
|
|
||||||
COPY --from=jbig2enc-builder /usr/src/jbig2enc/src/*.h /usr/local/include/
|
|
||||||
|
|
||||||
# Packages need for running
|
# Packages need for running
|
||||||
ARG RUNTIME_PACKAGES="\
|
ARG RUNTIME_PACKAGES="\
|
||||||
# Python
|
|
||||||
python3 \
|
|
||||||
python3-pip \
|
|
||||||
python3-setuptools \
|
|
||||||
# General utils
|
# General utils
|
||||||
curl \
|
curl \
|
||||||
# Docker specific
|
# Docker specific
|
||||||
@@ -98,9 +70,9 @@ ARG RUNTIME_PACKAGES="\
|
|||||||
# Image processing
|
# Image processing
|
||||||
liblept5 \
|
liblept5 \
|
||||||
liblcms2-2 \
|
liblcms2-2 \
|
||||||
libtiff5 \
|
libtiff6 \
|
||||||
libfreetype6 \
|
libfreetype6 \
|
||||||
libwebp6 \
|
libwebp7 \
|
||||||
libopenjp2-7 \
|
libopenjp2-7 \
|
||||||
libimagequant0 \
|
libimagequant0 \
|
||||||
libraqm0 \
|
libraqm0 \
|
||||||
@@ -126,6 +98,8 @@ ARG RUNTIME_PACKAGES="\
|
|||||||
libxml2 \
|
libxml2 \
|
||||||
libxslt1.1 \
|
libxslt1.1 \
|
||||||
libgnutls30 \
|
libgnutls30 \
|
||||||
|
libqpdf29 \
|
||||||
|
qpdf \
|
||||||
# Mime type detection
|
# Mime type detection
|
||||||
file \
|
file \
|
||||||
libmagic1 \
|
libmagic1 \
|
||||||
@@ -145,7 +119,7 @@ RUN set -eux \
|
|||||||
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES} \
|
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES} \
|
||||||
&& rm -rf /var/lib/apt/lists/* \
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
&& echo "Installing supervisor" \
|
&& echo "Installing supervisor" \
|
||||||
&& python3 -m pip install --default-timeout=1000 --upgrade --no-cache-dir supervisor==4.2.4
|
&& python3 -m pip install --default-timeout=1000 --upgrade --no-cache-dir supervisor==4.2.5
|
||||||
|
|
||||||
# Copy gunicorn config
|
# Copy gunicorn config
|
||||||
# Changes very infrequently
|
# Changes very infrequently
|
||||||
@@ -154,7 +128,6 @@ WORKDIR /usr/src/paperless/
|
|||||||
COPY gunicorn.conf.py .
|
COPY gunicorn.conf.py .
|
||||||
|
|
||||||
# setup docker-specific things
|
# setup docker-specific things
|
||||||
# Use mounts to avoid copying installer files into the image
|
|
||||||
# These change sometimes, but rarely
|
# These change sometimes, but rarely
|
||||||
WORKDIR /usr/src/paperless/src/docker/
|
WORKDIR /usr/src/paperless/src/docker/
|
||||||
|
|
||||||
@@ -165,6 +138,7 @@ COPY [ \
|
|||||||
"docker/docker-prepare.sh", \
|
"docker/docker-prepare.sh", \
|
||||||
"docker/paperless_cmd.sh", \
|
"docker/paperless_cmd.sh", \
|
||||||
"docker/wait-for-redis.py", \
|
"docker/wait-for-redis.py", \
|
||||||
|
"docker/env-from-file.sh", \
|
||||||
"docker/management_script.sh", \
|
"docker/management_script.sh", \
|
||||||
"docker/flower-conditional.sh", \
|
"docker/flower-conditional.sh", \
|
||||||
"docker/install_management_commands.sh", \
|
"docker/install_management_commands.sh", \
|
||||||
@@ -184,6 +158,8 @@ RUN set -eux \
|
|||||||
&& chmod 755 /sbin/docker-prepare.sh \
|
&& chmod 755 /sbin/docker-prepare.sh \
|
||||||
&& mv wait-for-redis.py /sbin/wait-for-redis.py \
|
&& mv wait-for-redis.py /sbin/wait-for-redis.py \
|
||||||
&& chmod 755 /sbin/wait-for-redis.py \
|
&& chmod 755 /sbin/wait-for-redis.py \
|
||||||
|
&& mv env-from-file.sh /sbin/env-from-file.sh \
|
||||||
|
&& chmod 755 /sbin/env-from-file.sh \
|
||||||
&& mv paperless_cmd.sh /usr/local/bin/paperless_cmd.sh \
|
&& mv paperless_cmd.sh /usr/local/bin/paperless_cmd.sh \
|
||||||
&& chmod 755 /usr/local/bin/paperless_cmd.sh \
|
&& chmod 755 /usr/local/bin/paperless_cmd.sh \
|
||||||
&& mv flower-conditional.sh /usr/local/bin/flower-conditional.sh \
|
&& mv flower-conditional.sh /usr/local/bin/flower-conditional.sh \
|
||||||
@@ -192,22 +168,40 @@ RUN set -eux \
|
|||||||
&& chmod +x install_management_commands.sh \
|
&& chmod +x install_management_commands.sh \
|
||||||
&& ./install_management_commands.sh
|
&& ./install_management_commands.sh
|
||||||
|
|
||||||
|
# Buildx provided, must be defined to use though
|
||||||
|
ARG TARGETARCH
|
||||||
|
ARG TARGETVARIANT
|
||||||
|
|
||||||
|
# Can be workflow provided, defaults set for manual building
|
||||||
|
ARG JBIG2ENC_VERSION=0.29
|
||||||
|
ARG QPDF_VERSION=11.3.0
|
||||||
|
ARG PIKEPDF_VERSION=7.2.0
|
||||||
|
ARG PSYCOPG2_VERSION=2.9.6
|
||||||
|
|
||||||
# Install the built packages from the installer library images
|
# Install the built packages from the installer library images
|
||||||
# Use mounts to avoid copying installer files into the image
|
|
||||||
# These change sometimes
|
# These change sometimes
|
||||||
RUN --mount=type=bind,from=qpdf-builder,target=/qpdf \
|
RUN set -eux \
|
||||||
--mount=type=bind,from=psycopg2-builder,target=/psycopg2 \
|
&& echo "Getting binaries" \
|
||||||
--mount=type=bind,from=pikepdf-builder,target=/pikepdf \
|
&& mkdir paperless-ngx \
|
||||||
set -eux \
|
&& curl --fail --silent --show-error --output paperless-ngx.tar.gz --location https://github.com/paperless-ngx/builder/archive/58bb061b9b3b63009852d6d875f9a305d9ae6ac9.tar.gz \
|
||||||
&& echo "Installing qpdf" \
|
&& tar -xf paperless-ngx.tar.gz --directory paperless-ngx --strip-components=1 \
|
||||||
&& apt-get install --yes --no-install-recommends /qpdf/usr/src/qpdf/${QPDF_VERSION}/${TARGETARCH}${TARGETVARIANT}/libqpdf29_*.deb \
|
&& cd paperless-ngx \
|
||||||
&& apt-get install --yes --no-install-recommends /qpdf/usr/src/qpdf/${QPDF_VERSION}/${TARGETARCH}${TARGETVARIANT}/qpdf_*.deb \
|
# Setting a specific revision ensures we know what this installed
|
||||||
|
# and ensures cache breaking on changes
|
||||||
|
&& echo "Installing jbig2enc" \
|
||||||
|
&& cp ./jbig2enc/${JBIG2ENC_VERSION}/${TARGETARCH}${TARGETVARIANT}/jbig2 /usr/local/bin/ \
|
||||||
|
&& cp ./jbig2enc/${JBIG2ENC_VERSION}/${TARGETARCH}${TARGETVARIANT}/libjbig2enc* /usr/local/lib/ \
|
||||||
|
&& chmod a+x /usr/local/bin/jbig2 \
|
||||||
&& echo "Installing pikepdf and dependencies" \
|
&& echo "Installing pikepdf and dependencies" \
|
||||||
&& python3 -m pip install --no-cache-dir /pikepdf/usr/src/wheels/*.whl \
|
&& python3 -m pip install --no-cache-dir ./pikepdf/${PIKEPDF_VERSION}/${TARGETARCH}${TARGETVARIANT}/*.whl \
|
||||||
&& python3 -m pip list \
|
&& python3 -m pip list \
|
||||||
&& echo "Installing psycopg2" \
|
&& echo "Installing psycopg2" \
|
||||||
&& python3 -m pip install --no-cache-dir /psycopg2/usr/src/wheels/psycopg2*.whl \
|
&& python3 -m pip install --no-cache-dir ./psycopg2/${PSYCOPG2_VERSION}/${TARGETARCH}${TARGETVARIANT}/psycopg2*.whl \
|
||||||
&& python3 -m pip list
|
&& python3 -m pip list \
|
||||||
|
&& echo "Cleaning up image layer" \
|
||||||
|
&& cd ../ \
|
||||||
|
&& rm -rf paperless-ngx \
|
||||||
|
&& rm paperless-ngx.tar.gz
|
||||||
|
|
||||||
WORKDIR /usr/src/paperless/src/
|
WORKDIR /usr/src/paperless/src/
|
||||||
|
|
||||||
@@ -221,19 +215,21 @@ ARG BUILD_PACKAGES="\
|
|||||||
build-essential \
|
build-essential \
|
||||||
git \
|
git \
|
||||||
default-libmysqlclient-dev \
|
default-libmysqlclient-dev \
|
||||||
python3-dev"
|
pkg-config"
|
||||||
|
|
||||||
RUN set -eux \
|
# hadolint ignore=DL3042
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/pip/,id=pip-cache \
|
||||||
|
set -eux \
|
||||||
&& echo "Installing build system packages" \
|
&& echo "Installing build system packages" \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||||
&& python3 -m pip install --no-cache-dir --upgrade wheel \
|
&& python3 -m pip install --no-cache-dir --upgrade wheel \
|
||||||
&& echo "Installing Python requirements" \
|
&& echo "Installing Python requirements" \
|
||||||
&& python3 -m pip install --default-timeout=1000 --no-cache-dir --requirement requirements.txt \
|
&& python3 -m pip install --default-timeout=1000 --requirement requirements.txt \
|
||||||
&& echo "Installing NLTK data" \
|
&& echo "Installing NLTK data" \
|
||||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/local/share/nltk_data" snowball_data \
|
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" snowball_data \
|
||||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/local/share/nltk_data" stopwords \
|
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" stopwords \
|
||||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/local/share/nltk_data" punkt \
|
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" punkt \
|
||||||
&& echo "Cleaning up image" \
|
&& echo "Cleaning up image" \
|
||||||
&& apt-get -y purge ${BUILD_PACKAGES} \
|
&& apt-get -y purge ${BUILD_PACKAGES} \
|
||||||
&& apt-get -y autoremove --purge \
|
&& apt-get -y autoremove --purge \
|
||||||
@@ -251,11 +247,12 @@ COPY ./src ./
|
|||||||
COPY --from=compile-frontend /src/src/documents/static/frontend/ ./documents/static/frontend/
|
COPY --from=compile-frontend /src/src/documents/static/frontend/ ./documents/static/frontend/
|
||||||
|
|
||||||
# add users, setup scripts
|
# add users, setup scripts
|
||||||
|
# Mount the compiled frontend to expected location
|
||||||
RUN set -eux \
|
RUN set -eux \
|
||||||
&& addgroup --gid 1000 paperless \
|
&& addgroup --gid 1000 paperless \
|
||||||
&& useradd --uid 1000 --gid paperless --home-dir /usr/src/paperless paperless \
|
&& useradd --uid 1000 --gid paperless --home-dir /usr/src/paperless paperless \
|
||||||
&& chown -R paperless:paperless ../ \
|
&& chown -R paperless:paperless /usr/src/paperless \
|
||||||
&& gosu paperless python3 manage.py collectstatic --clear --no-input \
|
&& gosu paperless python3 manage.py collectstatic --clear --no-input --link \
|
||||||
&& gosu paperless python3 manage.py compilemessages
|
&& gosu paperless python3 manage.py compilemessages
|
||||||
|
|
||||||
VOLUME ["/usr/src/paperless/data", \
|
VOLUME ["/usr/src/paperless/data", \
|
||||||
|
85
Pipfile
@@ -10,81 +10,100 @@ name = "piwheels"
|
|||||||
|
|
||||||
[packages]
|
[packages]
|
||||||
dateparser = "~=1.1"
|
dateparser = "~=1.1"
|
||||||
django = "~=4.1"
|
# WARNING: django does not use semver.
|
||||||
|
# Only patch versions are guaranteed to not introduce breaking changes.
|
||||||
|
django = "~=4.1.9"
|
||||||
django-cors-headers = "*"
|
django-cors-headers = "*"
|
||||||
|
django-celery-results = "*"
|
||||||
|
django-compression-middleware = "*"
|
||||||
|
django-guardian = "*"
|
||||||
django-extensions = "*"
|
django-extensions = "*"
|
||||||
django-filter = "~=22.1"
|
django-filter = "~=23.1"
|
||||||
djangorestframework = "~=3.14"
|
djangorestframework = "~=3.14"
|
||||||
|
djangorestframework-guardian = "*"
|
||||||
filelock = "*"
|
filelock = "*"
|
||||||
gunicorn = "*"
|
gunicorn = "*"
|
||||||
imap-tools = "*"
|
imap-tools = "*"
|
||||||
langdetect = "*"
|
langdetect = "*"
|
||||||
pathvalidate = "*"
|
pathvalidate = "*"
|
||||||
pillow = "~=9.3"
|
|
||||||
pikepdf = "*"
|
|
||||||
python-gnupg = "*"
|
python-gnupg = "*"
|
||||||
python-dotenv = "*"
|
python-dotenv = "*"
|
||||||
python-dateutil = "*"
|
python-dateutil = "*"
|
||||||
python-magic = "*"
|
python-magic = "*"
|
||||||
|
python-ipware = "*"
|
||||||
psycopg2 = "*"
|
psycopg2 = "*"
|
||||||
rapidfuzz = "*"
|
rapidfuzz = "*"
|
||||||
redis = {extras = ["hiredis"], version = "*"}
|
redis = {extras = ["hiredis"], version = "*"}
|
||||||
scikit-learn = "~=1.1"
|
scikit-learn = "~=1.3"
|
||||||
numpy = "*"
|
whitenoise = "~=6.5"
|
||||||
whitenoise = "~=6.2"
|
watchdog = "~=3.0"
|
||||||
watchdog = "~=2.1"
|
|
||||||
whoosh="~=2.7"
|
whoosh="~=2.7"
|
||||||
inotifyrecursive = "~=0.3"
|
inotifyrecursive = "~=0.3"
|
||||||
ocrmypdf = "~=14.0"
|
ocrmypdf = "~=14.0"
|
||||||
tqdm = "*"
|
tqdm = "*"
|
||||||
tika = "*"
|
tika-client = "*"
|
||||||
# TODO: This will sadly also install daphne+dependencies,
|
channels = "~=4.0"
|
||||||
# which an ASGI server we don't need. Adds about 15MB image size.
|
channels-redis = "*"
|
||||||
channels = "~=3.0"
|
|
||||||
uvicorn = {extras = ["standard"], version = "*"}
|
uvicorn = {extras = ["standard"], version = "*"}
|
||||||
concurrent-log-handler = "*"
|
concurrent-log-handler = "*"
|
||||||
"pdfminer.six" = "*"
|
|
||||||
"backports.zoneinfo" = {version = "*", markers = "python_version < '3.9'"}
|
|
||||||
"importlib-resources" = {version = "*", markers = "python_version < '3.9'"}
|
|
||||||
zipp = {version = "*", markers = "python_version < '3.9'"}
|
|
||||||
pyzbar = "*"
|
pyzbar = "*"
|
||||||
mysqlclient = "*"
|
mysqlclient = "*"
|
||||||
celery = {extras = ["redis"], version = "*"}
|
celery = {extras = ["redis"], version = "*"}
|
||||||
django-celery-results = "*"
|
|
||||||
setproctitle = "*"
|
setproctitle = "*"
|
||||||
nltk = "*"
|
nltk = "*"
|
||||||
pdf2image = "*"
|
pdf2image = "*"
|
||||||
flower = "*"
|
flower = "*"
|
||||||
bleach = "*"
|
bleach = "*"
|
||||||
|
zxing-cpp = {version = "*", platform_machine = "== 'x86_64'"}
|
||||||
#
|
#
|
||||||
# Packages locked due to issues (try to check if these are fixed in a release every so often)
|
# Packages locked due to issues (try to check if these are fixed in a release every so often)
|
||||||
#
|
#
|
||||||
|
|
||||||
# Pin this until piwheels is building 1.9 (see https://www.piwheels.org/project/scipy/)
|
# Pin this until piwheels is building 1.9 (see https://www.piwheels.org/project/scipy/)
|
||||||
scipy = "==1.8.1"
|
scipy = "==1.8.1"
|
||||||
|
# v4 brings in extra dependencies for features not used here
|
||||||
# Newer versions aren't builting yet (see https://www.piwheels.org/project/cryptography/)
|
reportlab = "==3.6.12"
|
||||||
cryptography = "==38.0.1"
|
# Pin these until piwheels is building a newer version (see https://www.piwheels.org/project/{package}/)
|
||||||
|
cryptography = "==40.0.1"
|
||||||
# Locked version until https://github.com/django/channels_redis/issues/332
|
pikepdf = "==7.2.0"
|
||||||
# is resolved
|
pillow = "==9.5.0"
|
||||||
channels-redis = "==3.4.1"
|
|
||||||
|
|
||||||
[dev-packages]
|
[dev-packages]
|
||||||
coveralls = "*"
|
# Linting
|
||||||
|
black = "*"
|
||||||
|
pre-commit = "*"
|
||||||
|
ruff = "*"
|
||||||
|
# Testing
|
||||||
factory-boy = "*"
|
factory-boy = "*"
|
||||||
pycodestyle = "*"
|
|
||||||
pytest = "*"
|
pytest = "*"
|
||||||
pytest-cov = "*"
|
pytest-cov = "*"
|
||||||
pytest-django = "*"
|
pytest-django = "*"
|
||||||
|
pytest-httpx = "*"
|
||||||
pytest-env = "*"
|
pytest-env = "*"
|
||||||
pytest-sugar = "*"
|
pytest-sugar = "*"
|
||||||
pytest-xdist = "*"
|
pytest-xdist = "*"
|
||||||
tox = "*"
|
pytest-rerunfailures = "*"
|
||||||
black = "*"
|
"pdfminer.six" = "*"
|
||||||
pre-commit = "*"
|
|
||||||
sphinx-autobuild = "*"
|
|
||||||
myst-parser = "*"
|
|
||||||
imagehash = "*"
|
imagehash = "*"
|
||||||
|
daphne = "*"
|
||||||
|
# Documentation
|
||||||
mkdocs-material = "*"
|
mkdocs-material = "*"
|
||||||
|
|
||||||
|
[typing-dev]
|
||||||
|
mypy = "*"
|
||||||
|
types-Pillow = "*"
|
||||||
|
django-filter-stubs = "*"
|
||||||
|
types-python-dateutil = "*"
|
||||||
|
djangorestframework-stubs = {extras= ["compatible-mypy"], version="*"}
|
||||||
|
celery-types = "*"
|
||||||
|
django-stubs = {extras= ["compatible-mypy"], version="*"}
|
||||||
|
types-dateparser = "*"
|
||||||
|
types-bleach = "*"
|
||||||
|
types-humanfriendly = "*"
|
||||||
|
types-redis = "*"
|
||||||
|
types-tqdm = "*"
|
||||||
|
types-Markdown = "*"
|
||||||
|
types-Pygments = "*"
|
||||||
|
types-backports = "*"
|
||||||
|
types-colorama = "*"
|
||||||
|
types-psycopg2 = "*"
|
||||||
|
types-setuptools = "*"
|
||||||
|
4419
Pipfile.lock
generated
21
README.md
@@ -1,7 +1,7 @@
|
|||||||
[](https://github.com/paperless-ngx/paperless-ngx/actions)
|
[](https://github.com/paperless-ngx/paperless-ngx/actions)
|
||||||
[](https://crowdin.com/project/paperless-ngx)
|
[](https://crowdin.com/project/paperless-ngx)
|
||||||
[](https://docs.paperless-ngx.com)
|
[](https://docs.paperless-ngx.com)
|
||||||
[](https://coveralls.io/github/paperless-ngx/paperless-ngx?branch=master)
|
[](https://codecov.io/gh/paperless-ngx/paperless-ngx)
|
||||||
[](https://matrix.to/#/%23paperlessngx%3Amatrix.org)
|
[](https://matrix.to/#/%23paperlessngx%3Amatrix.org)
|
||||||
[](https://demo.paperless-ngx.com)
|
[](https://demo.paperless-ngx.com)
|
||||||
|
|
||||||
@@ -101,22 +101,9 @@ For bugs please [open an issue](https://github.com/paperless-ngx/paperless-ngx/i
|
|||||||
|
|
||||||
# Affiliated Projects
|
# Affiliated Projects
|
||||||
|
|
||||||
Paperless has been around a while now, and people are starting to build stuff on top of it. If you're one of those people, we can add your project to this list:
|
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Affiliated-Projects) for a user-maintained list of affiliated projects and software that is compatible with Paperless-ngx.
|
||||||
|
|
||||||
- [Paperless App](https://github.com/bauerj/paperless_app): An Android/iOS app for Paperless-ngx. Also works with the original Paperless and Paperless-ng.
|
|
||||||
- [Paperless Share](https://github.com/qcasey/paperless_share). Share any files from your Android application with paperless. Very simple, but works with all of the mobile scanning apps out there that allow you to share scanned documents.
|
|
||||||
- [Scan to Paperless](https://github.com/sbrunner/scan-to-paperless): Scan and prepare (crop, deskew, OCR, ...) your documents for Paperless.
|
|
||||||
- [Paperless Mobile](https://github.com/astubenbord/paperless-mobile): A modern, feature rich mobile application for Paperless.
|
|
||||||
|
|
||||||
These projects also exist, but their status and compatibility with paperless-ngx is unknown.
|
|
||||||
|
|
||||||
- [paperless-cli](https://github.com/stgarf/paperless-cli): A golang command line binary to interact with a Paperless instance.
|
|
||||||
|
|
||||||
This project also exists, but needs updates to be compatible with paperless-ngx.
|
|
||||||
|
|
||||||
- [Paperless Desktop](https://github.com/thomasbrueggemann/paperless-desktop): A desktop UI for your Paperless installation. Runs on Mac, Linux, and Windows.
|
|
||||||
Known issues on Mac: (Could not load reminders and documents)
|
|
||||||
|
|
||||||
# Important Note
|
# Important Note
|
||||||
|
|
||||||
Document scanners are typically used to scan sensitive documents. Things like your social insurance number, tax records, invoices, etc. Everything is stored in the clear without encryption. This means that Paperless should never be run on an untrusted host. Instead, I recommend that if you do want to use it, run it locally on a server in your own home.
|
> Document scanners are typically used to scan sensitive documents like your social insurance number, tax records, invoices, etc. **Paperless-ngx should never be run on an untrusted host** because information is stored in clear text without encryption. No guarantees are made regarding security (but we do try!) and you use the app at your own risk.
|
||||||
|
> **The safest way to run Paperless-ngx is on a local server in your own home with backups in place**.
|
||||||
|
@@ -1,81 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# Helper script for building the Docker image locally.
|
|
||||||
# Parses and provides the nessecary versions of other images to Docker
|
|
||||||
# before passing in the rest of script args.
|
|
||||||
|
|
||||||
# First Argument: The Dockerfile to build
|
|
||||||
# Other Arguments: Additional arguments to docker build
|
|
||||||
|
|
||||||
# Example Usage:
|
|
||||||
# ./build-docker-image.sh Dockerfile -t paperless-ngx:my-awesome-feature
|
|
||||||
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
if ! command -v jq &> /dev/null ; then
|
|
||||||
echo "jq required"
|
|
||||||
exit 1
|
|
||||||
elif [ ! -f "$1" ]; then
|
|
||||||
echo "$1 is not a file, please provide the Dockerfile"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Get the branch name (used for caching)
|
|
||||||
branch_name=$(git rev-parse --abbrev-ref HEAD)
|
|
||||||
|
|
||||||
# Parse eithe Pipfile.lock or the .build-config.json
|
|
||||||
jbig2enc_version=$(jq ".jbig2enc.version" .build-config.json | sed 's/"//g')
|
|
||||||
qpdf_version=$(jq ".qpdf.version" .build-config.json | sed 's/"//g')
|
|
||||||
psycopg2_version=$(jq ".default.psycopg2.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
|
||||||
pikepdf_version=$(jq ".default.pikepdf.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
|
||||||
pillow_version=$(jq ".default.pillow.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
|
||||||
lxml_version=$(jq ".default.lxml.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
|
||||||
|
|
||||||
base_filename="$(basename -- "${1}")"
|
|
||||||
build_args_str=""
|
|
||||||
cache_from_str=""
|
|
||||||
|
|
||||||
case "${base_filename}" in
|
|
||||||
|
|
||||||
*.jbig2enc)
|
|
||||||
build_args_str="--build-arg JBIG2ENC_VERSION=${jbig2enc_version}"
|
|
||||||
cache_from_str="--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/jbig2enc:${jbig2enc_version}"
|
|
||||||
;;
|
|
||||||
|
|
||||||
*.psycopg2)
|
|
||||||
build_args_str="--build-arg PSYCOPG2_VERSION=${psycopg2_version}"
|
|
||||||
cache_from_str="--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/psycopg2:${psycopg2_version}"
|
|
||||||
;;
|
|
||||||
|
|
||||||
*.qpdf)
|
|
||||||
build_args_str="--build-arg QPDF_VERSION=${qpdf_version}"
|
|
||||||
cache_from_str="--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/qpdf:${qpdf_version}"
|
|
||||||
;;
|
|
||||||
|
|
||||||
*.pikepdf)
|
|
||||||
build_args_str="--build-arg QPDF_VERSION=${qpdf_version} --build-arg PIKEPDF_VERSION=${pikepdf_version} --build-arg PILLOW_VERSION=${pillow_version} --build-arg LXML_VERSION=${lxml_version}"
|
|
||||||
cache_from_str="--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/pikepdf:${pikepdf_version}"
|
|
||||||
;;
|
|
||||||
|
|
||||||
Dockerfile)
|
|
||||||
build_args_str="--build-arg QPDF_VERSION=${qpdf_version} --build-arg PIKEPDF_VERSION=${pikepdf_version} --build-arg PSYCOPG2_VERSION=${psycopg2_version} --build-arg JBIG2ENC_VERSION=${jbig2enc_version}"
|
|
||||||
cache_from_str="--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/app:${branch_name} --cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/app:dev"
|
|
||||||
;;
|
|
||||||
|
|
||||||
*)
|
|
||||||
echo "Unable to match ${base_filename}"
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
read -r -a build_args_arr <<< "${build_args_str}"
|
|
||||||
read -r -a cache_from_arr <<< "${cache_from_str}"
|
|
||||||
|
|
||||||
set -eux
|
|
||||||
|
|
||||||
docker buildx build --file "${1}" \
|
|
||||||
--progress=plain \
|
|
||||||
--output=type=docker \
|
|
||||||
"${cache_from_arr[@]}" \
|
|
||||||
"${build_args_arr[@]}" \
|
|
||||||
"${@:2}" .
|
|
@@ -1,26 +0,0 @@
|
|||||||
# Patterns to ignore when building packages.
|
|
||||||
# This supports shell glob matching, relative path matching, and
|
|
||||||
# negation (prefixed with !). Only one pattern per line.
|
|
||||||
.DS_Store
|
|
||||||
# Common VCS dirs
|
|
||||||
.git/
|
|
||||||
.gitignore
|
|
||||||
.bzr/
|
|
||||||
.bzrignore
|
|
||||||
.hg/
|
|
||||||
.hgignore
|
|
||||||
.svn/
|
|
||||||
# Common backup files
|
|
||||||
*.swp
|
|
||||||
*.bak
|
|
||||||
*.tmp
|
|
||||||
*~
|
|
||||||
# Various IDEs
|
|
||||||
.project
|
|
||||||
.idea/
|
|
||||||
*.tmproj
|
|
||||||
.vscode/
|
|
||||||
# OWNERS file for Kubernetes
|
|
||||||
OWNERS
|
|
||||||
# helm-docs templates
|
|
||||||
*.gotmpl
|
|
@@ -1,35 +0,0 @@
|
|||||||
---
|
|
||||||
apiVersion: v2
|
|
||||||
appVersion: "1.9.2"
|
|
||||||
description: Paperless-ngx - Index and archive all of your scanned paper documents
|
|
||||||
name: paperless
|
|
||||||
version: 10.0.1
|
|
||||||
kubeVersion: ">=1.16.0-0"
|
|
||||||
keywords:
|
|
||||||
- paperless
|
|
||||||
- paperless-ngx
|
|
||||||
- dms
|
|
||||||
- document
|
|
||||||
home: https://github.com/paperless-ngx/paperless-ngx/tree/main/charts/paperless-ngx
|
|
||||||
icon: https://github.com/paperless-ngx/paperless-ngx/raw/main/resources/logo/web/svg/square.svg
|
|
||||||
sources:
|
|
||||||
- https://github.com/paperless-ngx/paperless-ngx
|
|
||||||
maintainers:
|
|
||||||
- name: Paperless-ngx maintainers
|
|
||||||
dependencies:
|
|
||||||
- name: common
|
|
||||||
repository: https://library-charts.k8s-at-home.com
|
|
||||||
version: 4.5.2
|
|
||||||
- name: postgresql
|
|
||||||
version: 11.6.12
|
|
||||||
repository: https://charts.bitnami.com/bitnami
|
|
||||||
condition: postgresql.enabled
|
|
||||||
- name: redis
|
|
||||||
version: 16.13.1
|
|
||||||
repository: https://charts.bitnami.com/bitnami
|
|
||||||
condition: redis.enabled
|
|
||||||
deprecated: false
|
|
||||||
annotations:
|
|
||||||
artifacthub.io/changes: |
|
|
||||||
- kind: changed
|
|
||||||
description: Moved to Paperless-ngx ownership
|
|
@@ -1,201 +0,0 @@
|
|||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright 2020 k8s@Home
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
@@ -1,50 +0,0 @@
|
|||||||
# paperless
|
|
||||||
|
|
||||||
 
|
|
||||||
|
|
||||||
Paperless-ngx - Index and archive all of your scanned paper documents
|
|
||||||
|
|
||||||
**Homepage:** <https://github.com/paperless-ngx/paperless-ngx/tree/main/charts/paperless-ngx>
|
|
||||||
|
|
||||||
## Maintainers
|
|
||||||
|
|
||||||
| Name | Email | Url |
|
|
||||||
| ---- | ------ | --- |
|
|
||||||
| Paperless-ngx maintainers | | |
|
|
||||||
|
|
||||||
## Source Code
|
|
||||||
|
|
||||||
* <https://github.com/paperless-ngx/paperless-ngx>
|
|
||||||
|
|
||||||
## Requirements
|
|
||||||
|
|
||||||
Kubernetes: `>=1.16.0-0`
|
|
||||||
|
|
||||||
| Repository | Name | Version |
|
|
||||||
|------------|------|---------|
|
|
||||||
| https://charts.bitnami.com/bitnami | postgresql | 11.6.12 |
|
|
||||||
| https://charts.bitnami.com/bitnami | redis | 16.13.1 |
|
|
||||||
| https://library-charts.k8s-at-home.com | common | 4.5.2 |
|
|
||||||
|
|
||||||
## Values
|
|
||||||
|
|
||||||
| Key | Type | Default | Description |
|
|
||||||
|-----|------|---------|-------------|
|
|
||||||
| env | object | See below | See the following files for additional environment variables: https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose/ https://github.com/paperless-ngx/paperless-ngx/blob/main/paperless.conf.example |
|
|
||||||
| env.COMPOSE_PROJECT_NAME | string | `"paperless"` | Project name |
|
|
||||||
| env.PAPERLESS_DBHOST | string | `nil` | Database host to use |
|
|
||||||
| env.PAPERLESS_OCR_LANGUAGE | string | `"eng"` | OCR languages to install |
|
|
||||||
| env.PAPERLESS_PORT | int | `8000` | Port to use |
|
|
||||||
| env.PAPERLESS_REDIS | string | `nil` | Redis to use |
|
|
||||||
| image.pullPolicy | string | `"IfNotPresent"` | image pull policy |
|
|
||||||
| image.repository | string | `"ghcr.io/paperless-ngx/paperless-ngx"` | image repository |
|
|
||||||
| image.tag | string | chart.appVersion | image tag |
|
|
||||||
| ingress.main | object | See values.yaml | Enable and configure ingress settings for the chart under this key. |
|
|
||||||
| persistence.consume | object | See values.yaml | Configure volume to monitor for new documents. |
|
|
||||||
| persistence.data | object | See values.yaml | Configure persistence for data. |
|
|
||||||
| persistence.export | object | See values.yaml | Configure export volume. |
|
|
||||||
| persistence.media | object | See values.yaml | Configure persistence for media. |
|
|
||||||
| postgresql | object | See values.yaml | Enable and configure postgresql database subchart under this key. For more options see [postgresql chart documentation](https://github.com/bitnami/charts/tree/master/bitnami/postgresql) |
|
|
||||||
| redis | object | See values.yaml | Enable and configure redis subchart under this key. For more options see [redis chart documentation](https://github.com/bitnami/charts/tree/master/bitnami/redis) |
|
|
||||||
| service | object | See values.yaml | Configures service settings for the chart. |
|
|
||||||
|
|
@@ -1,8 +0,0 @@
|
|||||||
{{- define "custom.custom.configuration.header" -}}
|
|
||||||
## Custom configuration
|
|
||||||
{{- end -}}
|
|
||||||
|
|
||||||
{{- define "custom.custom.configuration" -}}
|
|
||||||
{{ template "custom.custom.configuration.header" . }}
|
|
||||||
N/A
|
|
||||||
{{- end -}}
|
|
@@ -1,26 +0,0 @@
|
|||||||
env:
|
|
||||||
PAPERLESS_REDIS: redis://paperless-redis-headless:6379
|
|
||||||
|
|
||||||
persistence:
|
|
||||||
data:
|
|
||||||
enabled: true
|
|
||||||
type: emptyDir
|
|
||||||
media:
|
|
||||||
enabled: true
|
|
||||||
type: emptyDir
|
|
||||||
consume:
|
|
||||||
enabled: true
|
|
||||||
type: emptyDir
|
|
||||||
export:
|
|
||||||
enabled: true
|
|
||||||
type: emptyDir
|
|
||||||
|
|
||||||
redis:
|
|
||||||
enabled: true
|
|
||||||
architecture: standalone
|
|
||||||
auth:
|
|
||||||
enabled: false
|
|
||||||
master:
|
|
||||||
persistence:
|
|
||||||
enabled: false
|
|
||||||
fullnameOverride: paperless-redis
|
|
@@ -1,4 +0,0 @@
|
|||||||
{{- include "common.notes.defaultNotes" . }}
|
|
||||||
2. Create a super user by running the command:
|
|
||||||
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "common.names.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
|
|
||||||
kubectl exec -it --namespace {{ .Release.Namespace }} $POD_NAME -- bash -c "python manage.py createsuperuser"
|
|
@@ -1,11 +0,0 @@
|
|||||||
{{/* Make sure all variables are set properly */}}
|
|
||||||
{{- include "common.values.setup" . }}
|
|
||||||
|
|
||||||
{{/* Append the hardcoded settings */}}
|
|
||||||
{{- define "paperless.harcodedValues" -}}
|
|
||||||
env:
|
|
||||||
PAPERLESS_URL: http{{if ne ( len .Values.ingress.main.tls ) 0 }}s{{end}}://{{ (first .Values.ingress.main.hosts).host }}
|
|
||||||
{{- end -}}
|
|
||||||
{{- $_ := merge .Values (include "paperless.harcodedValues" . | fromYaml) -}}
|
|
||||||
|
|
||||||
{{ include "common.all" . }}
|
|
@@ -1,107 +0,0 @@
|
|||||||
#
|
|
||||||
# IMPORTANT NOTE
|
|
||||||
#
|
|
||||||
# This chart inherits from our common library chart. You can check the default values/options here:
|
|
||||||
# https://github.com/k8s-at-home/library-charts/tree/main/charts/stable/common/values.yaml
|
|
||||||
#
|
|
||||||
|
|
||||||
image:
|
|
||||||
# -- image repository
|
|
||||||
repository: ghcr.io/paperless-ngx/paperless-ngx
|
|
||||||
# -- image pull policy
|
|
||||||
pullPolicy: IfNotPresent
|
|
||||||
# -- image tag
|
|
||||||
# @default -- chart.appVersion
|
|
||||||
tag:
|
|
||||||
|
|
||||||
# -- See the following files for additional environment variables:
|
|
||||||
# https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose/
|
|
||||||
# https://github.com/paperless-ngx/paperless-ngx/blob/main/paperless.conf.example
|
|
||||||
# @default -- See below
|
|
||||||
env:
|
|
||||||
# -- Project name
|
|
||||||
COMPOSE_PROJECT_NAME: paperless
|
|
||||||
# -- Redis to use
|
|
||||||
PAPERLESS_REDIS:
|
|
||||||
# -- OCR languages to install
|
|
||||||
PAPERLESS_OCR_LANGUAGE: eng
|
|
||||||
# USERMAP_UID: 1000
|
|
||||||
# USERMAP_GID: 1000
|
|
||||||
# PAPERLESS_TIME_ZONE: Europe/London
|
|
||||||
# -- Database host to use
|
|
||||||
PAPERLESS_DBHOST:
|
|
||||||
# -- Port to use
|
|
||||||
PAPERLESS_PORT: 8000
|
|
||||||
# -- Username for the root user
|
|
||||||
# PAPERLESS_ADMIN_USER: admin
|
|
||||||
# -- Password for the root user
|
|
||||||
# PAPERLESS_ADMIN_PASSWORD: admin
|
|
||||||
# PAPERLESS_URL: <set to main ingress by default>
|
|
||||||
|
|
||||||
# -- Configures service settings for the chart.
|
|
||||||
# @default -- See values.yaml
|
|
||||||
service:
|
|
||||||
main:
|
|
||||||
ports:
|
|
||||||
http:
|
|
||||||
port: 8000
|
|
||||||
|
|
||||||
ingress:
|
|
||||||
# -- Enable and configure ingress settings for the chart under this key.
|
|
||||||
# @default -- See values.yaml
|
|
||||||
main:
|
|
||||||
enabled: false
|
|
||||||
|
|
||||||
persistence:
|
|
||||||
# -- Configure persistence for data.
|
|
||||||
# @default -- See values.yaml
|
|
||||||
data:
|
|
||||||
enabled: false
|
|
||||||
mountPath: /usr/src/paperless/data
|
|
||||||
accessMode: ReadWriteOnce
|
|
||||||
emptyDir:
|
|
||||||
enabled: false
|
|
||||||
# -- Configure persistence for media.
|
|
||||||
# @default -- See values.yaml
|
|
||||||
media:
|
|
||||||
enabled: false
|
|
||||||
mountPath: /usr/src/paperless/media
|
|
||||||
accessMode: ReadWriteOnce
|
|
||||||
emptyDir:
|
|
||||||
enabled: false
|
|
||||||
# -- Configure volume to monitor for new documents.
|
|
||||||
# @default -- See values.yaml
|
|
||||||
consume:
|
|
||||||
enabled: false
|
|
||||||
mountPath: /usr/src/paperless/consume
|
|
||||||
accessMode: ReadWriteOnce
|
|
||||||
emptyDir:
|
|
||||||
enabled: false
|
|
||||||
# -- Configure export volume.
|
|
||||||
# @default -- See values.yaml
|
|
||||||
export:
|
|
||||||
enabled: false
|
|
||||||
mountPath: /usr/src/paperless/export
|
|
||||||
accessMode: ReadWriteOnce
|
|
||||||
emptyDir:
|
|
||||||
enabled: false
|
|
||||||
|
|
||||||
# -- Enable and configure postgresql database subchart under this key.
|
|
||||||
# For more options see [postgresql chart documentation](https://github.com/bitnami/charts/tree/master/bitnami/postgresql)
|
|
||||||
# @default -- See values.yaml
|
|
||||||
postgresql:
|
|
||||||
enabled: false
|
|
||||||
postgresqlUsername: paperless
|
|
||||||
postgresqlPassword: paperless
|
|
||||||
postgresqlDatabase: paperless
|
|
||||||
persistence:
|
|
||||||
enabled: false
|
|
||||||
# storageClass: ""
|
|
||||||
|
|
||||||
# -- Enable and configure redis subchart under this key.
|
|
||||||
# For more options see [redis chart documentation](https://github.com/bitnami/charts/tree/master/bitnami/redis)
|
|
||||||
# @default -- See values.yaml
|
|
||||||
redis:
|
|
||||||
enabled: false
|
|
||||||
auth:
|
|
||||||
enabled: false
|
|
@@ -1,4 +1,8 @@
|
|||||||
commit_message: '[ci skip]'
|
commit_message: '[ci skip]'
|
||||||
|
pull_request_labels: [
|
||||||
|
"skip-changelog",
|
||||||
|
"translation"
|
||||||
|
]
|
||||||
files:
|
files:
|
||||||
- source: /src/locale/en_US/LC_MESSAGES/django.po
|
- source: /src/locale/en_US/LC_MESSAGES/django.po
|
||||||
translation: /src/locale/%locale_with_underscore%/LC_MESSAGES/django.po
|
translation: /src/locale/%locale_with_underscore%/LC_MESSAGES/django.po
|
||||||
|
@@ -1,35 +0,0 @@
|
|||||||
# This Dockerfile compiles the jbig2enc library
|
|
||||||
# Inputs:
|
|
||||||
# - JBIG2ENC_VERSION - the Git tag to checkout and build
|
|
||||||
|
|
||||||
FROM debian:bullseye-slim as main
|
|
||||||
|
|
||||||
LABEL org.opencontainers.image.description="A intermediate image with jbig2enc built"
|
|
||||||
|
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
|
||||||
ARG JBIG2ENC_VERSION
|
|
||||||
|
|
||||||
ARG BUILD_PACKAGES="\
|
|
||||||
build-essential \
|
|
||||||
automake \
|
|
||||||
libtool \
|
|
||||||
libleptonica-dev \
|
|
||||||
zlib1g-dev \
|
|
||||||
git \
|
|
||||||
ca-certificates"
|
|
||||||
|
|
||||||
WORKDIR /usr/src/jbig2enc
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Installing build tools" \
|
|
||||||
&& apt-get update --quiet \
|
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
|
||||||
&& echo "Building jbig2enc" \
|
|
||||||
&& git clone --quiet --branch $JBIG2ENC_VERSION https://github.com/agl/jbig2enc . \
|
|
||||||
&& ./autogen.sh \
|
|
||||||
&& ./configure \
|
|
||||||
&& make \
|
|
||||||
&& echo "Cleaning up image" \
|
|
||||||
&& apt-get -y purge ${BUILD_PACKAGES} \
|
|
||||||
&& apt-get -y autoremove --purge \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
@@ -1,102 +0,0 @@
|
|||||||
# This Dockerfile builds the pikepdf wheel
|
|
||||||
# Inputs:
|
|
||||||
# - REPO - Docker repository to pull qpdf from
|
|
||||||
# - QPDF_VERSION - The image qpdf version to copy .deb files from
|
|
||||||
# - PIKEPDF_VERSION - Version of pikepdf to build wheel for
|
|
||||||
|
|
||||||
# Default to pulling from the main repo registry when manually building
|
|
||||||
ARG REPO="paperless-ngx/paperless-ngx"
|
|
||||||
|
|
||||||
ARG QPDF_VERSION
|
|
||||||
FROM ghcr.io/${REPO}/builder/qpdf:${QPDF_VERSION} as qpdf-builder
|
|
||||||
|
|
||||||
# This does nothing, except provide a name for a copy below
|
|
||||||
|
|
||||||
FROM python:3.9-slim-bullseye as main
|
|
||||||
|
|
||||||
LABEL org.opencontainers.image.description="A intermediate image with pikepdf wheel built"
|
|
||||||
|
|
||||||
# Buildx provided
|
|
||||||
ARG TARGETARCH
|
|
||||||
ARG TARGETVARIANT
|
|
||||||
|
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
|
||||||
# Workflow provided
|
|
||||||
ARG QPDF_VERSION
|
|
||||||
ARG PIKEPDF_VERSION
|
|
||||||
# These are not used, but will still bust the cache if one changes
|
|
||||||
# Otherwise, the main image will try to build thing (and fail)
|
|
||||||
ARG PILLOW_VERSION
|
|
||||||
ARG LXML_VERSION
|
|
||||||
|
|
||||||
ARG BUILD_PACKAGES="\
|
|
||||||
build-essential \
|
|
||||||
python3-dev \
|
|
||||||
python3-pip \
|
|
||||||
# qpdf requirement - https://github.com/qpdf/qpdf#crypto-providers
|
|
||||||
libgnutls28-dev \
|
|
||||||
# lxml requrements - https://lxml.de/installation.html
|
|
||||||
libxml2-dev \
|
|
||||||
libxslt1-dev \
|
|
||||||
# Pillow requirements - https://pillow.readthedocs.io/en/stable/installation.html#external-libraries
|
|
||||||
# JPEG functionality
|
|
||||||
libjpeg62-turbo-dev \
|
|
||||||
# conpressed PNG
|
|
||||||
zlib1g-dev \
|
|
||||||
# compressed TIFF
|
|
||||||
libtiff-dev \
|
|
||||||
# type related services
|
|
||||||
libfreetype-dev \
|
|
||||||
# color management
|
|
||||||
liblcms2-dev \
|
|
||||||
# WebP format
|
|
||||||
libwebp-dev \
|
|
||||||
# JPEG 2000
|
|
||||||
libopenjp2-7-dev \
|
|
||||||
# improved color quantization
|
|
||||||
libimagequant-dev \
|
|
||||||
# complex text layout support
|
|
||||||
libraqm-dev"
|
|
||||||
|
|
||||||
WORKDIR /usr/src
|
|
||||||
|
|
||||||
COPY --from=qpdf-builder /usr/src/qpdf/${QPDF_VERSION}/${TARGETARCH}${TARGETVARIANT}/*.deb ./
|
|
||||||
|
|
||||||
# As this is an base image for a multi-stage final image
|
|
||||||
# the added size of the install is basically irrelevant
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Installing build tools" \
|
|
||||||
&& apt-get update --quiet \
|
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
|
||||||
&& echo "Installing qpdf" \
|
|
||||||
&& dpkg --install libqpdf29_*.deb \
|
|
||||||
&& dpkg --install libqpdf-dev_*.deb \
|
|
||||||
&& echo "Installing Python tools" \
|
|
||||||
&& python3 -m pip install --no-cache-dir --upgrade \
|
|
||||||
pip \
|
|
||||||
wheel \
|
|
||||||
# https://pikepdf.readthedocs.io/en/latest/installation.html#requirements
|
|
||||||
pybind11 \
|
|
||||||
&& echo "Building pikepdf wheel ${PIKEPDF_VERSION}" \
|
|
||||||
&& mkdir wheels \
|
|
||||||
&& python3 -m pip wheel \
|
|
||||||
# Build the package at the required version
|
|
||||||
pikepdf==${PIKEPDF_VERSION} \
|
|
||||||
# Look to piwheels for additional pre-built wheels
|
|
||||||
--extra-index-url https://www.piwheels.org/simple \
|
|
||||||
# Output the *.whl into this directory
|
|
||||||
--wheel-dir wheels \
|
|
||||||
# Do not use a binary packge for the package being built
|
|
||||||
--no-binary=pikepdf \
|
|
||||||
# Do use binary packages for dependencies
|
|
||||||
--prefer-binary \
|
|
||||||
# Don't cache build files
|
|
||||||
--no-cache-dir \
|
|
||||||
&& ls -ahl wheels \
|
|
||||||
&& echo "Gathering package data" \
|
|
||||||
&& dpkg-query -f '${Package;-40}${Version}\n' -W > ./wheels/pkg-list.txt \
|
|
||||||
&& echo "Cleaning up image" \
|
|
||||||
&& apt-get -y purge ${BUILD_PACKAGES} \
|
|
||||||
&& apt-get -y autoremove --purge \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
@@ -1,50 +0,0 @@
|
|||||||
# This Dockerfile builds the psycopg2 wheel
|
|
||||||
# Inputs:
|
|
||||||
# - PSYCOPG2_VERSION - Version to build
|
|
||||||
|
|
||||||
FROM python:3.9-slim-bullseye as main
|
|
||||||
|
|
||||||
LABEL org.opencontainers.image.description="A intermediate image with psycopg2 wheel built"
|
|
||||||
|
|
||||||
ARG PSYCOPG2_VERSION
|
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
|
||||||
|
|
||||||
ARG BUILD_PACKAGES="\
|
|
||||||
build-essential \
|
|
||||||
python3-dev \
|
|
||||||
python3-pip \
|
|
||||||
# https://www.psycopg.org/docs/install.html#prerequisites
|
|
||||||
libpq-dev"
|
|
||||||
|
|
||||||
WORKDIR /usr/src
|
|
||||||
|
|
||||||
# As this is an base image for a multi-stage final image
|
|
||||||
# the added size of the install is basically irrelevant
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Installing build tools" \
|
|
||||||
&& apt-get update --quiet \
|
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
|
||||||
&& echo "Installing Python tools" \
|
|
||||||
&& python3 -m pip install --no-cache-dir --upgrade pip wheel \
|
|
||||||
&& echo "Building psycopg2 wheel ${PSYCOPG2_VERSION}" \
|
|
||||||
&& cd /usr/src \
|
|
||||||
&& mkdir wheels \
|
|
||||||
&& python3 -m pip wheel \
|
|
||||||
# Build the package at the required version
|
|
||||||
psycopg2==${PSYCOPG2_VERSION} \
|
|
||||||
# Output the *.whl into this directory
|
|
||||||
--wheel-dir wheels \
|
|
||||||
# Do not use a binary packge for the package being built
|
|
||||||
--no-binary=psycopg2 \
|
|
||||||
# Do use binary packages for dependencies
|
|
||||||
--prefer-binary \
|
|
||||||
# Don't cache build files
|
|
||||||
--no-cache-dir \
|
|
||||||
&& ls -ahl wheels/ \
|
|
||||||
&& echo "Gathering package data" \
|
|
||||||
&& dpkg-query -f '${Package;-40}${Version}\n' -W > ./wheels/pkg-list.txt \
|
|
||||||
&& echo "Cleaning up image" \
|
|
||||||
&& apt-get -y purge ${BUILD_PACKAGES} \
|
|
||||||
&& apt-get -y autoremove --purge \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
@@ -1,156 +0,0 @@
|
|||||||
#
|
|
||||||
# Stage: pre-build
|
|
||||||
# Purpose:
|
|
||||||
# - Installs common packages
|
|
||||||
# - Sets common environment variables related to dpkg
|
|
||||||
# - Aquires the qpdf source from bookwork
|
|
||||||
# Useful Links:
|
|
||||||
# - https://qpdf.readthedocs.io/en/stable/installation.html#system-requirements
|
|
||||||
# - https://wiki.debian.org/Multiarch/HOWTO
|
|
||||||
# - https://wiki.debian.org/CrossCompiling
|
|
||||||
#
|
|
||||||
|
|
||||||
FROM debian:bullseye-slim as pre-build
|
|
||||||
|
|
||||||
ARG QPDF_VERSION
|
|
||||||
|
|
||||||
ARG COMMON_BUILD_PACKAGES="\
|
|
||||||
cmake \
|
|
||||||
debhelper\
|
|
||||||
debian-keyring \
|
|
||||||
devscripts \
|
|
||||||
dpkg-dev \
|
|
||||||
equivs \
|
|
||||||
packaging-dev \
|
|
||||||
libtool"
|
|
||||||
|
|
||||||
ENV DEB_BUILD_OPTIONS="terse nocheck nodoc parallel=2"
|
|
||||||
|
|
||||||
WORKDIR /usr/src
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Installing common packages" \
|
|
||||||
&& apt-get update --quiet \
|
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${COMMON_BUILD_PACKAGES} \
|
|
||||||
&& echo "Getting qpdf source" \
|
|
||||||
&& echo "deb-src http://deb.debian.org/debian/ bookworm main" > /etc/apt/sources.list.d/bookworm-src.list \
|
|
||||||
&& apt-get update --quiet \
|
|
||||||
&& apt-get source --yes --quiet qpdf=${QPDF_VERSION}-1/bookworm
|
|
||||||
|
|
||||||
#
|
|
||||||
# Stage: amd64-builder
|
|
||||||
# Purpose: Builds qpdf for x86_64 (native build)
|
|
||||||
#
|
|
||||||
FROM pre-build as amd64-builder
|
|
||||||
|
|
||||||
ARG AMD64_BUILD_PACKAGES="\
|
|
||||||
build-essential \
|
|
||||||
libjpeg62-turbo-dev:amd64 \
|
|
||||||
libgnutls28-dev:amd64 \
|
|
||||||
zlib1g-dev:amd64"
|
|
||||||
|
|
||||||
WORKDIR /usr/src/qpdf-${QPDF_VERSION}
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Beginning amd64" \
|
|
||||||
&& echo "Install amd64 packages" \
|
|
||||||
&& apt-get update --quiet \
|
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${AMD64_BUILD_PACKAGES} \
|
|
||||||
&& echo "Building amd64" \
|
|
||||||
&& dpkg-buildpackage --build=binary --unsigned-source --unsigned-changes --post-clean \
|
|
||||||
&& echo "Removing debug files" \
|
|
||||||
&& rm -f ../libqpdf29-dbgsym* \
|
|
||||||
&& rm -f ../qpdf-dbgsym* \
|
|
||||||
&& echo "Gathering package data" \
|
|
||||||
&& dpkg-query -f '${Package;-40}${Version}\n' -W > ../pkg-list.txt
|
|
||||||
#
|
|
||||||
# Stage: armhf-builder
|
|
||||||
# Purpose:
|
|
||||||
# - Sets armhf specific environment
|
|
||||||
# - Builds qpdf for armhf (cross compile)
|
|
||||||
#
|
|
||||||
FROM pre-build as armhf-builder
|
|
||||||
|
|
||||||
ARG ARMHF_PACKAGES="\
|
|
||||||
crossbuild-essential-armhf \
|
|
||||||
libjpeg62-turbo-dev:armhf \
|
|
||||||
libgnutls28-dev:armhf \
|
|
||||||
zlib1g-dev:armhf"
|
|
||||||
|
|
||||||
WORKDIR /usr/src/qpdf-${QPDF_VERSION}
|
|
||||||
|
|
||||||
ENV CXX="/usr/bin/arm-linux-gnueabihf-g++" \
|
|
||||||
CC="/usr/bin/arm-linux-gnueabihf-gcc"
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Beginning armhf" \
|
|
||||||
&& echo "Install armhf packages" \
|
|
||||||
&& dpkg --add-architecture armhf \
|
|
||||||
&& apt-get update --quiet \
|
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${ARMHF_PACKAGES} \
|
|
||||||
&& echo "Building armhf" \
|
|
||||||
&& dpkg-buildpackage --build=binary --unsigned-source --unsigned-changes --post-clean --host-arch armhf \
|
|
||||||
&& echo "Removing debug files" \
|
|
||||||
&& rm -f ../libqpdf29-dbgsym* \
|
|
||||||
&& rm -f ../qpdf-dbgsym* \
|
|
||||||
&& echo "Gathering package data" \
|
|
||||||
&& dpkg-query -f '${Package;-40}${Version}\n' -W > ../pkg-list.txt
|
|
||||||
|
|
||||||
#
|
|
||||||
# Stage: aarch64-builder
|
|
||||||
# Purpose:
|
|
||||||
# - Sets aarch64 specific environment
|
|
||||||
# - Builds qpdf for aarch64 (cross compile)
|
|
||||||
#
|
|
||||||
FROM pre-build as aarch64-builder
|
|
||||||
|
|
||||||
ARG ARM64_PACKAGES="\
|
|
||||||
crossbuild-essential-arm64 \
|
|
||||||
libjpeg62-turbo-dev:arm64 \
|
|
||||||
libgnutls28-dev:arm64 \
|
|
||||||
zlib1g-dev:arm64"
|
|
||||||
|
|
||||||
ENV CXX="/usr/bin/aarch64-linux-gnu-g++" \
|
|
||||||
CC="/usr/bin/aarch64-linux-gnu-gcc"
|
|
||||||
|
|
||||||
WORKDIR /usr/src/qpdf-${QPDF_VERSION}
|
|
||||||
|
|
||||||
RUN set -eux \
|
|
||||||
&& echo "Beginning arm64" \
|
|
||||||
&& echo "Install arm64 packages" \
|
|
||||||
&& dpkg --add-architecture arm64 \
|
|
||||||
&& apt-get update --quiet \
|
|
||||||
&& apt-get install --yes --quiet --no-install-recommends ${ARM64_PACKAGES} \
|
|
||||||
&& echo "Building arm64" \
|
|
||||||
&& dpkg-buildpackage --build=binary --unsigned-source --unsigned-changes --post-clean --host-arch arm64 \
|
|
||||||
&& echo "Removing debug files" \
|
|
||||||
&& rm -f ../libqpdf29-dbgsym* \
|
|
||||||
&& rm -f ../qpdf-dbgsym* \
|
|
||||||
&& echo "Gathering package data" \
|
|
||||||
&& dpkg-query -f '${Package;-40}${Version}\n' -W > ../pkg-list.txt
|
|
||||||
|
|
||||||
#
|
|
||||||
# Stage: package
|
|
||||||
# Purpose: Holds the compiled .deb files in arch/variant specific folders
|
|
||||||
#
|
|
||||||
FROM alpine:3.17 as package
|
|
||||||
|
|
||||||
LABEL org.opencontainers.image.description="A image with qpdf installers stored in architecture & version specific folders"
|
|
||||||
|
|
||||||
ARG QPDF_VERSION
|
|
||||||
|
|
||||||
WORKDIR /usr/src/qpdf/${QPDF_VERSION}/amd64
|
|
||||||
|
|
||||||
COPY --from=amd64-builder /usr/src/*.deb ./
|
|
||||||
COPY --from=amd64-builder /usr/src/pkg-list.txt ./
|
|
||||||
|
|
||||||
# Note this is ${TARGETARCH}${TARGETVARIANT} for armv7
|
|
||||||
WORKDIR /usr/src/qpdf/${QPDF_VERSION}/armv7
|
|
||||||
|
|
||||||
COPY --from=armhf-builder /usr/src/*.deb ./
|
|
||||||
COPY --from=armhf-builder /usr/src/pkg-list.txt ./
|
|
||||||
|
|
||||||
WORKDIR /usr/src/qpdf/${QPDF_VERSION}/arm64
|
|
||||||
|
|
||||||
COPY --from=aarch64-builder /usr/src/*.deb ./
|
|
||||||
COPY --from=aarch64-builder /usr/src/pkg-list.txt ./
|
|
@@ -6,7 +6,7 @@
|
|||||||
version: "3.7"
|
version: "3.7"
|
||||||
services:
|
services:
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:7.6
|
image: docker.io/gotenberg/gotenberg:7.8
|
||||||
hostname: gotenberg
|
hostname: gotenberg
|
||||||
container_name: gotenberg
|
container_name: gotenberg
|
||||||
network_mode: host
|
network_mode: host
|
||||||
|
@@ -59,7 +59,7 @@ services:
|
|||||||
- gotenberg
|
- gotenberg
|
||||||
- tika
|
- tika
|
||||||
ports:
|
ports:
|
||||||
- 8000:8000
|
- "8000:8000"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
@@ -83,7 +83,7 @@ services:
|
|||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:7.6
|
image: docker.io/gotenberg/gotenberg:7.8
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||||
# want to allow external content like tracking pixels or even javascript.
|
# want to allow external content like tracking pixels or even javascript.
|
||||||
|
@@ -53,7 +53,7 @@ services:
|
|||||||
- db
|
- db
|
||||||
- broker
|
- broker
|
||||||
ports:
|
ports:
|
||||||
- 8000:8000
|
- "8000:8000"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
|
@@ -37,7 +37,7 @@ services:
|
|||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/postgres:13
|
image: docker.io/library/postgres:15
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
@@ -53,7 +53,7 @@ services:
|
|||||||
- db
|
- db
|
||||||
- broker
|
- broker
|
||||||
ports:
|
ports:
|
||||||
- 8010:8000
|
- "8010:8000"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
|
@@ -39,7 +39,7 @@ services:
|
|||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/postgres:13
|
image: docker.io/library/postgres:15
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
@@ -57,7 +57,7 @@ services:
|
|||||||
- gotenberg
|
- gotenberg
|
||||||
- tika
|
- tika
|
||||||
ports:
|
ports:
|
||||||
- 8000:8000
|
- "8000:8000"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
@@ -77,7 +77,7 @@ services:
|
|||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:7.6
|
image: docker.io/gotenberg/gotenberg:7.8
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||||
|
@@ -35,7 +35,7 @@ services:
|
|||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/postgres:13
|
image: docker.io/library/postgres:15
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
@@ -51,7 +51,7 @@ services:
|
|||||||
- db
|
- db
|
||||||
- broker
|
- broker
|
||||||
ports:
|
ports:
|
||||||
- 8000:8000
|
- "8000:8000"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
|
@@ -46,7 +46,7 @@ services:
|
|||||||
- gotenberg
|
- gotenberg
|
||||||
- tika
|
- tika
|
||||||
ports:
|
ports:
|
||||||
- 8000:8000
|
- "8000:8000"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
@@ -65,7 +65,7 @@ services:
|
|||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:7.6
|
image: docker.io/gotenberg/gotenberg:7.8
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||||
|
@@ -37,7 +37,7 @@ services:
|
|||||||
depends_on:
|
depends_on:
|
||||||
- broker
|
- broker
|
||||||
ports:
|
ports:
|
||||||
- 8000:8000
|
- "8000:8000"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
|
@@ -2,37 +2,6 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
# Adapted from:
|
|
||||||
# https://github.com/docker-library/postgres/blob/master/docker-entrypoint.sh
|
|
||||||
# usage: file_env VAR
|
|
||||||
# ie: file_env 'XYZ_DB_PASSWORD' will allow for "$XYZ_DB_PASSWORD_FILE" to
|
|
||||||
# fill in the value of "$XYZ_DB_PASSWORD" from a file, especially for Docker's
|
|
||||||
# secrets feature
|
|
||||||
file_env() {
|
|
||||||
local -r var="$1"
|
|
||||||
local -r fileVar="${var}_FILE"
|
|
||||||
|
|
||||||
# Basic validation
|
|
||||||
if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
|
|
||||||
echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Only export var if the _FILE exists
|
|
||||||
if [ "${!fileVar:-}" ]; then
|
|
||||||
# And the file exists
|
|
||||||
if [[ -f ${!fileVar} ]]; then
|
|
||||||
echo "Setting ${var} from file"
|
|
||||||
val="$(< "${!fileVar}")"
|
|
||||||
export "$var"="$val"
|
|
||||||
else
|
|
||||||
echo "File ${!fileVar} doesn't exist"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
# Source: https://github.com/sameersbn/docker-gitlab/
|
# Source: https://github.com/sameersbn/docker-gitlab/
|
||||||
map_uidgid() {
|
map_uidgid() {
|
||||||
local -r usermap_original_uid=$(id -u paperless)
|
local -r usermap_original_uid=$(id -u paperless)
|
||||||
@@ -96,19 +65,11 @@ custom_container_init() {
|
|||||||
initialize() {
|
initialize() {
|
||||||
|
|
||||||
# Setup environment from secrets before anything else
|
# Setup environment from secrets before anything else
|
||||||
for env_var in \
|
|
||||||
PAPERLESS_DBUSER \
|
|
||||||
PAPERLESS_DBPASS \
|
|
||||||
PAPERLESS_SECRET_KEY \
|
|
||||||
PAPERLESS_AUTO_LOGIN_USERNAME \
|
|
||||||
PAPERLESS_ADMIN_USER \
|
|
||||||
PAPERLESS_ADMIN_MAIL \
|
|
||||||
PAPERLESS_ADMIN_PASSWORD \
|
|
||||||
PAPERLESS_REDIS; do
|
|
||||||
# Check for a version of this var with _FILE appended
|
# Check for a version of this var with _FILE appended
|
||||||
# and convert the contents to the env var value
|
# and convert the contents to the env var value
|
||||||
file_env ${env_var}
|
# Source it so export is persistent
|
||||||
done
|
# shellcheck disable=SC1091
|
||||||
|
source /sbin/env-from-file.sh
|
||||||
|
|
||||||
# Change the user and group IDs if needed
|
# Change the user and group IDs if needed
|
||||||
map_uidgid
|
map_uidgid
|
||||||
|
@@ -80,7 +80,7 @@ django_checks() {
|
|||||||
|
|
||||||
search_index() {
|
search_index() {
|
||||||
|
|
||||||
local -r index_version=1
|
local -r index_version=6
|
||||||
local -r index_version_file=${DATA_DIR}/.index_version
|
local -r index_version_file=${DATA_DIR}/.index_version
|
||||||
|
|
||||||
if [[ (! -f "${index_version_file}") || $(<"${index_version_file}") != "$index_version" ]]; then
|
if [[ (! -f "${index_version_file}") || $(<"${index_version_file}") != "$index_version" ]]; then
|
||||||
|
42
docker/env-from-file.sh
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Scans the environment variables for those with the suffix _FILE
|
||||||
|
# When located, checks the file exists, and exports the contents
|
||||||
|
# of the file as the same name, minus the suffix
|
||||||
|
# This allows the use of Docker secrets or mounted files
|
||||||
|
# to fill in any of the settings configurable via environment
|
||||||
|
# variables
|
||||||
|
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
for line in $(printenv)
|
||||||
|
do
|
||||||
|
# Extract the name of the environment variable
|
||||||
|
env_name=${line%%=*}
|
||||||
|
# Check if it starts with "PAPERLESS_" and ends in "_FILE"
|
||||||
|
if [[ ${env_name} == PAPERLESS_*_FILE ]]; then
|
||||||
|
# This should have been named different..
|
||||||
|
if [[ ${env_name} == "PAPERLESS_OCR_SKIP_ARCHIVE_FILE" ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
# Extract the value of the environment
|
||||||
|
env_value=${line#*=}
|
||||||
|
|
||||||
|
# Check the file exists
|
||||||
|
if [[ -f ${env_value} ]]; then
|
||||||
|
|
||||||
|
# Trim off the _FILE suffix
|
||||||
|
non_file_env_name=${env_name%"_FILE"}
|
||||||
|
echo "Setting ${non_file_env_name} from file"
|
||||||
|
|
||||||
|
# Reads the value from th file
|
||||||
|
val="$(< "${!env_name}")"
|
||||||
|
|
||||||
|
# Sets the normal name to the read file contents
|
||||||
|
export "${non_file_env_name}"="${val}"
|
||||||
|
|
||||||
|
else
|
||||||
|
echo "File ${env_value} referenced by ${env_name} doesn't exist"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
@@ -3,5 +3,10 @@
|
|||||||
echo "Checking if we should start flower..."
|
echo "Checking if we should start flower..."
|
||||||
|
|
||||||
if [[ -n "${PAPERLESS_ENABLE_FLOWER}" ]]; then
|
if [[ -n "${PAPERLESS_ENABLE_FLOWER}" ]]; then
|
||||||
celery --app paperless flower
|
# Small delay to allow celery to be up first
|
||||||
|
echo "Starting flower in 5s"
|
||||||
|
sleep 5
|
||||||
|
celery --app paperless flower --conf=/usr/src/paperless/src/paperless/flowerconfig.py
|
||||||
|
else
|
||||||
|
echo "Not starting flower"
|
||||||
fi
|
fi
|
||||||
|
@@ -3,6 +3,9 @@
|
|||||||
set -e
|
set -e
|
||||||
|
|
||||||
cd /usr/src/paperless/src/
|
cd /usr/src/paperless/src/
|
||||||
|
# This ensures environment is setup
|
||||||
|
# shellcheck disable=SC1091
|
||||||
|
source /sbin/env-from-file.sh
|
||||||
|
|
||||||
if [[ $(id -u) == 0 ]] ;
|
if [[ $(id -u) == 0 ]] ;
|
||||||
then
|
then
|
||||||
|
@@ -15,6 +15,7 @@ stdout_logfile=/dev/stdout
|
|||||||
stdout_logfile_maxbytes=0
|
stdout_logfile_maxbytes=0
|
||||||
stderr_logfile=/dev/stderr
|
stderr_logfile=/dev/stderr
|
||||||
stderr_logfile_maxbytes=0
|
stderr_logfile_maxbytes=0
|
||||||
|
environment = HOME="/usr/src/paperless",USER="paperless"
|
||||||
|
|
||||||
[program:consumer]
|
[program:consumer]
|
||||||
command=python3 manage.py document_consumer
|
command=python3 manage.py document_consumer
|
||||||
@@ -25,10 +26,11 @@ stdout_logfile=/dev/stdout
|
|||||||
stdout_logfile_maxbytes=0
|
stdout_logfile_maxbytes=0
|
||||||
stderr_logfile=/dev/stderr
|
stderr_logfile=/dev/stderr
|
||||||
stderr_logfile_maxbytes=0
|
stderr_logfile_maxbytes=0
|
||||||
|
environment = HOME="/usr/src/paperless",USER="paperless"
|
||||||
|
|
||||||
[program:celery]
|
[program:celery]
|
||||||
|
|
||||||
command = celery --app paperless worker --loglevel INFO
|
command = celery --app paperless worker --loglevel INFO --without-mingle --without-gossip
|
||||||
user=paperless
|
user=paperless
|
||||||
stopasgroup = true
|
stopasgroup = true
|
||||||
stopwaitsecs = 60
|
stopwaitsecs = 60
|
||||||
@@ -37,6 +39,7 @@ stdout_logfile=/dev/stdout
|
|||||||
stdout_logfile_maxbytes=0
|
stdout_logfile_maxbytes=0
|
||||||
stderr_logfile=/dev/stderr
|
stderr_logfile=/dev/stderr
|
||||||
stderr_logfile_maxbytes=0
|
stderr_logfile_maxbytes=0
|
||||||
|
environment = HOME="/usr/src/paperless",USER="paperless"
|
||||||
|
|
||||||
[program:celery-beat]
|
[program:celery-beat]
|
||||||
|
|
||||||
@@ -48,6 +51,7 @@ stdout_logfile=/dev/stdout
|
|||||||
stdout_logfile_maxbytes=0
|
stdout_logfile_maxbytes=0
|
||||||
stderr_logfile=/dev/stderr
|
stderr_logfile=/dev/stderr
|
||||||
stderr_logfile_maxbytes=0
|
stderr_logfile_maxbytes=0
|
||||||
|
environment = HOME="/usr/src/paperless",USER="paperless"
|
||||||
|
|
||||||
[program:celery-flower]
|
[program:celery-flower]
|
||||||
command = /usr/local/bin/flower-conditional.sh
|
command = /usr/local/bin/flower-conditional.sh
|
||||||
@@ -58,3 +62,4 @@ stdout_logfile=/dev/stdout
|
|||||||
stdout_logfile_maxbytes=0
|
stdout_logfile_maxbytes=0
|
||||||
stderr_logfile=/dev/stderr
|
stderr_logfile=/dev/stderr
|
||||||
stderr_logfile_maxbytes=0
|
stderr_logfile_maxbytes=0
|
||||||
|
environment = HOME="/usr/src/paperless",USER="paperless"
|
||||||
|
@@ -12,13 +12,12 @@ from typing import Final
|
|||||||
from redis import Redis
|
from redis import Redis
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
||||||
MAX_RETRY_COUNT: Final[int] = 5
|
MAX_RETRY_COUNT: Final[int] = 5
|
||||||
RETRY_SLEEP_SECONDS: Final[int] = 5
|
RETRY_SLEEP_SECONDS: Final[int] = 5
|
||||||
|
|
||||||
REDIS_URL: Final[str] = os.getenv("PAPERLESS_REDIS", "redis://localhost:6379")
|
REDIS_URL: Final[str] = os.getenv("PAPERLESS_REDIS", "redis://localhost:6379")
|
||||||
|
|
||||||
print(f"Waiting for Redis...", flush=True)
|
print("Waiting for Redis...", flush=True)
|
||||||
|
|
||||||
attempt = 0
|
attempt = 0
|
||||||
with Redis.from_url(url=REDIS_URL) as client:
|
with Redis.from_url(url=REDIS_URL) as client:
|
||||||
@@ -29,7 +28,7 @@ if __name__ == "__main__":
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(
|
print(
|
||||||
f"Redis ping #{attempt} failed.\n"
|
f"Redis ping #{attempt} failed.\n"
|
||||||
f"Error: {str(e)}.\n"
|
f"Error: {e!s}.\n"
|
||||||
f"Waiting {RETRY_SLEEP_SECONDS}s",
|
f"Waiting {RETRY_SLEEP_SECONDS}s",
|
||||||
flush=True,
|
flush=True,
|
||||||
)
|
)
|
||||||
@@ -37,8 +36,8 @@ if __name__ == "__main__":
|
|||||||
attempt += 1
|
attempt += 1
|
||||||
|
|
||||||
if attempt >= MAX_RETRY_COUNT:
|
if attempt >= MAX_RETRY_COUNT:
|
||||||
print(f"Failed to connect to redis using environment variable PAPERLESS_REDIS.")
|
print("Failed to connect to redis using environment variable PAPERLESS_REDIS.")
|
||||||
sys.exit(os.EX_UNAVAILABLE)
|
sys.exit(os.EX_UNAVAILABLE)
|
||||||
else:
|
else:
|
||||||
print(f"Connected to Redis broker.")
|
print("Connected to Redis broker.")
|
||||||
sys.exit(os.EX_OK)
|
sys.exit(os.EX_OK)
|
||||||
|
@@ -78,7 +78,7 @@ After that, [make a backup](#backup).
|
|||||||
The docker-compose files refer to the `latest` version, which is
|
The docker-compose files refer to the `latest` version, which is
|
||||||
always the latest stable release.
|
always the latest stable release.
|
||||||
|
|
||||||
2. If you built the image yourself, do the following:
|
1. If you built the image yourself, do the following:
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ git pull
|
$ git pull
|
||||||
@@ -98,7 +98,7 @@ the background.
|
|||||||
won't automatically update to newer versions. In order to enable
|
won't automatically update to newer versions. In order to enable
|
||||||
updates as described above, either get the new `docker-compose.yml`
|
updates as described above, either get the new `docker-compose.yml`
|
||||||
file from
|
file from
|
||||||
[here](https://github.com/paperless-ngx/paperless-ngx/tree/master/docker/compose)
|
[here](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose)
|
||||||
or edit the `docker-compose.yml` file, find the line that says
|
or edit the `docker-compose.yml` file, find the line that says
|
||||||
|
|
||||||
```
|
```
|
||||||
@@ -148,16 +148,35 @@ following:
|
|||||||
$ pip install -r requirements.txt
|
$ pip install -r requirements.txt
|
||||||
```
|
```
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
At times, some dependencies will be removed from requirements.txt.
|
||||||
|
Comparing the versions and removing no longer needed dependencies
|
||||||
|
will keep your system or virtual environment clean and prevent
|
||||||
|
possible conflicts.
|
||||||
|
|
||||||
3. Migrate the database.
|
3. Migrate the database.
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ cd src
|
$ cd src
|
||||||
$ python3 manage.py migrate
|
$ python3 manage.py migrate # (1)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
1. Including `sudo -Hu <paperless_user>` may be required
|
||||||
|
|
||||||
This might not actually do anything. Not every new paperless version
|
This might not actually do anything. Not every new paperless version
|
||||||
comes with new database migrations.
|
comes with new database migrations.
|
||||||
|
|
||||||
|
### Database Upgrades
|
||||||
|
|
||||||
|
In general, paperless does not require a specific version of PostgreSQL or MariaDB and it is
|
||||||
|
safe to update them to newer versions. However, you should always take a backup and follow
|
||||||
|
the instructions from your database's documentation for how to upgrade between major versions.
|
||||||
|
|
||||||
|
For PostgreSQL, refer to [Upgrading a PostgreSQL Cluster](https://www.postgresql.org/docs/current/upgrading.html).
|
||||||
|
|
||||||
|
For MariaDB, refer to [Upgrading MariaDB](https://mariadb.com/kb/en/upgrading/)
|
||||||
|
|
||||||
## Downgrading Paperless {#downgrade-paperless}
|
## Downgrading Paperless {#downgrade-paperless}
|
||||||
|
|
||||||
Downgrades are possible. However, some updates also contain database
|
Downgrades are possible. However, some updates also contain database
|
||||||
@@ -210,9 +229,11 @@ Bare metal:
|
|||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ cd /path/to/paperless/src
|
$ cd /path/to/paperless/src
|
||||||
$ python3 manage.py <command> <arguments>
|
$ python3 manage.py <command> <arguments> # (1)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
1. Including `sudo -Hu <paperless_user>` may be required
|
||||||
|
|
||||||
All commands have built-in help, which can be accessed by executing them
|
All commands have built-in help, which can be accessed by executing them
|
||||||
with the argument `--help`.
|
with the argument `--help`.
|
||||||
|
|
||||||
@@ -227,12 +248,16 @@ is not a TTY" errors. For example:
|
|||||||
`docker-compose exec -T webserver document_exporter ../export`
|
`docker-compose exec -T webserver document_exporter ../export`
|
||||||
|
|
||||||
```
|
```
|
||||||
document_exporter target [-c] [-f] [-d]
|
document_exporter target [-c] [-d] [-f] [-na] [-nt] [-p] [-sm] [-z]
|
||||||
|
|
||||||
optional arguments:
|
optional arguments:
|
||||||
-c, --compare-checksums
|
-c, --compare-checksums
|
||||||
-f, --use-filename-format
|
|
||||||
-d, --delete
|
-d, --delete
|
||||||
|
-f, --use-filename-format
|
||||||
|
-na, --no-archive
|
||||||
|
-nt, --no-thumbnail
|
||||||
|
-p, --use-folder-prefix
|
||||||
|
-sm, --split-manifest
|
||||||
-z --zip
|
-z --zip
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -249,23 +274,53 @@ will assume that the contents of the export directory are a previous
|
|||||||
export and will attempt to update the previous export. Paperless will
|
export and will attempt to update the previous export. Paperless will
|
||||||
only export changed and added files. Paperless determines whether a file
|
only export changed and added files. Paperless determines whether a file
|
||||||
has changed by inspecting the file attributes "date/time modified" and
|
has changed by inspecting the file attributes "date/time modified" and
|
||||||
"size". If that does not work out for you, specify
|
"size". If that does not work out for you, specify `-c` or
|
||||||
`--compare-checksums` and paperless will attempt to compare file
|
`--compare-checksums` and paperless will attempt to compare file
|
||||||
checksums instead. This is slower.
|
checksums instead. This is slower.
|
||||||
|
|
||||||
Paperless will not remove any existing files in the export directory. If
|
Paperless will not remove any existing files in the export directory. If
|
||||||
you want paperless to also remove files that do not belong to the
|
you want paperless to also remove files that do not belong to the
|
||||||
current export such as files from deleted documents, specify `--delete`.
|
current export such as files from deleted documents, specify `-d` or `--delete`.
|
||||||
Be careful when pointing paperless to a directory that already contains
|
Be careful when pointing paperless to a directory that already contains
|
||||||
other files.
|
other files.
|
||||||
|
|
||||||
If `-z` or `--zip` is provided, the export will be a zipfile
|
|
||||||
in the target directory, named according to the current date.
|
|
||||||
|
|
||||||
The filenames generated by this command follow the format
|
The filenames generated by this command follow the format
|
||||||
`[date created] [correspondent] [title].[extension]`. If you want
|
`[date created] [correspondent] [title].[extension]`. If you want
|
||||||
paperless to use `PAPERLESS_FILENAME_FORMAT` for exported filenames
|
paperless to use `PAPERLESS_FILENAME_FORMAT` for exported filenames
|
||||||
instead, specify `--use-filename-format`.
|
instead, specify `-f` or `--use-filename-format`.
|
||||||
|
|
||||||
|
If `-na` or `--no-archive` is provided, no archive files will be exported,
|
||||||
|
only the original files.
|
||||||
|
|
||||||
|
If `-nt` or `--no-thumbnail` is provided, thumbnail files will not be exported.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
When using the `-na`/`--no-archive` or `-nt`/`--no-thumbnail` options
|
||||||
|
the exporter will not output these files for backup. After importing,
|
||||||
|
the [sanity checker](#sanity-checker) will warn about missing thumbnails and archive files
|
||||||
|
until they are regenerated with `document_thumbnails` or [`document_archiver`](#archiver).
|
||||||
|
It can make sense to omit these files from backup as their content and checksum
|
||||||
|
can change (new archiver algorithm) and may then cause additional used space in
|
||||||
|
a deduplicated backup.
|
||||||
|
|
||||||
|
If `-p` or `--use-folder-prefix` is provided, files will be exported
|
||||||
|
in dedicated folders according to their nature: `archive`, `originals`,
|
||||||
|
`thumbnails` or `json`
|
||||||
|
|
||||||
|
If `-sm` or `--split-manifest` is provided, information about document
|
||||||
|
will be placed in individual json files, instead of a single JSON file. The main
|
||||||
|
manifest.json will still contain application wide information (e.g. tags, correspondent,
|
||||||
|
documenttype, etc)
|
||||||
|
|
||||||
|
If `-z` or `--zip` is provided, the export will be a zip file
|
||||||
|
in the target directory, named according to the current date.
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
|
||||||
|
If exporting with the file name format, there may be errors due to
|
||||||
|
your operating system's maximum path lengths. Try adjusting the export
|
||||||
|
target or consider not using the filename format.
|
||||||
|
|
||||||
### Document importer {#importer}
|
### Document importer {#importer}
|
||||||
|
|
||||||
@@ -347,6 +402,14 @@ document_create_classifier
|
|||||||
|
|
||||||
This command takes no arguments.
|
This command takes no arguments.
|
||||||
|
|
||||||
|
### Document thumbnails {#thumbnails}
|
||||||
|
|
||||||
|
Use this command to re-create document thumbnails. Optionally include the ` --document {id}` option to generate thumbnails for a specific document only.
|
||||||
|
|
||||||
|
```
|
||||||
|
document_thumbnails
|
||||||
|
```
|
||||||
|
|
||||||
### Managing the document search index {#index}
|
### Managing the document search index {#index}
|
||||||
|
|
||||||
The document search index is responsible for delivering search results
|
The document search index is responsible for delivering search results
|
||||||
@@ -407,7 +470,7 @@ The issues detected by the sanity checker are as follows:
|
|||||||
- Inaccessible thumbnails due to improper permissions.
|
- Inaccessible thumbnails due to improper permissions.
|
||||||
- Documents without any content (warning).
|
- Documents without any content (warning).
|
||||||
- Orphaned files in the media directory (warning). These are files
|
- Orphaned files in the media directory (warning). These are files
|
||||||
that are not referenced by any document im paperless.
|
that are not referenced by any document in paperless.
|
||||||
|
|
||||||
```
|
```
|
||||||
document_sanity_checker
|
document_sanity_checker
|
||||||
@@ -429,12 +492,13 @@ mail_fetcher
|
|||||||
The command takes no arguments and processes all your mail accounts and
|
The command takes no arguments and processes all your mail accounts and
|
||||||
rules.
|
rules.
|
||||||
|
|
||||||
!!! note
|
!!! tip
|
||||||
|
|
||||||
As of October 2022 Microsoft no longer supports IMAP authentication
|
To use OAuth access tokens for mail fetching,
|
||||||
for Exchange servers, thus Exchange is no longer supported until a
|
select the box to indicate the password is actually
|
||||||
solution is implemented in the Python IMAP library used by Paperless.
|
a token when creating or editing a mail account. The
|
||||||
See [learn.microsoft.com](https://learn.microsoft.com/en-us/exchange/clients-and-mobile-in-exchange-online/deprecation-of-basic-authentication-exchange-online)
|
details for creating a token depend on your email
|
||||||
|
provider.
|
||||||
|
|
||||||
### Creating archived documents {#archiver}
|
### Creating archived documents {#archiver}
|
||||||
|
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
# Advanced Topics
|
# Advanced Topics
|
||||||
|
|
||||||
Paperless offers a couple features that automate certain tasks and make
|
Paperless offers a couple of features that automate certain tasks and make
|
||||||
your life easier.
|
your life easier.
|
||||||
|
|
||||||
## Matching tags, correspondents, document types, and storage paths {#matching}
|
## Matching tags, correspondents, document types, and storage paths {#matching}
|
||||||
@@ -9,7 +9,7 @@ Paperless will compare the matching algorithms defined by every tag,
|
|||||||
correspondent, document type, and storage path in your database to see
|
correspondent, document type, and storage path in your database to see
|
||||||
if they apply to the text in a document. In other words, if you define a
|
if they apply to the text in a document. In other words, if you define a
|
||||||
tag called `Home Utility` that had a `match` property of `bc hydro` and
|
tag called `Home Utility` that had a `match` property of `bc hydro` and
|
||||||
a `matching_algorithm` of `literal`, Paperless will automatically tag
|
a `matching_algorithm` of `Exact`, Paperless will automatically tag
|
||||||
your newly-consumed document with your `Home Utility` tag so long as the
|
your newly-consumed document with your `Home Utility` tag so long as the
|
||||||
text `bc hydro` appears in the body of the document somewhere.
|
text `bc hydro` appears in the body of the document somewhere.
|
||||||
|
|
||||||
@@ -25,18 +25,19 @@ documents.
|
|||||||
|
|
||||||
The following algorithms are available:
|
The following algorithms are available:
|
||||||
|
|
||||||
|
- **None:** No matching will be performed.
|
||||||
- **Any:** Looks for any occurrence of any word provided in match in
|
- **Any:** Looks for any occurrence of any word provided in match in
|
||||||
the PDF. If you define the match as `Bank1 Bank2`, it will match
|
the PDF. If you define the match as `Bank1 Bank2`, it will match
|
||||||
documents containing either of these terms.
|
documents containing either of these terms.
|
||||||
- **All:** Requires that every word provided appears in the PDF,
|
- **All:** Requires that every word provided appears in the PDF,
|
||||||
albeit not in the order provided.
|
albeit not in the order provided.
|
||||||
- **Literal:** Matches only if the match appears exactly as provided
|
- **Exact:** Matches only if the match appears exactly as provided
|
||||||
(i.e. preserve ordering) in the PDF.
|
(i.e. preserve ordering) in the PDF.
|
||||||
- **Regular expression:** Parses the match as a regular expression and
|
- **Regular expression:** Parses the match as a regular expression and
|
||||||
tries to find a match within the document.
|
tries to find a match within the document.
|
||||||
- **Fuzzy match:** I don't know. Look at the source.
|
- **Fuzzy match:** I don't know. Look at [the source](https://github.com/paperless-ngx/paperless-ngx/blob/main/src/documents/matching.py).
|
||||||
- **Auto:** Tries to automatically match new documents. This does not
|
- **Auto:** Tries to automatically match new documents. This does not
|
||||||
require you to set a match. See the notes below.
|
require you to set a match. See the [notes below](#automatic-matching).
|
||||||
|
|
||||||
When using the _any_ or _all_ matching algorithms, you can search for
|
When using the _any_ or _all_ matching algorithms, you can search for
|
||||||
terms that consist of multiple words by enclosing them in double quotes.
|
terms that consist of multiple words by enclosing them in double quotes.
|
||||||
@@ -91,7 +92,7 @@ when using this feature:
|
|||||||
decide when not to assign a certain tag, correspondent, document
|
decide when not to assign a certain tag, correspondent, document
|
||||||
type, or storage path. This will usually be the case as you start
|
type, or storage path. This will usually be the case as you start
|
||||||
filling up paperless with documents. Example: If all your documents
|
filling up paperless with documents. Example: If all your documents
|
||||||
are either from "Webshop" and "Bank", paperless will assign one
|
are either from "Webshop" or "Bank", paperless will assign one
|
||||||
of these correspondents to ANY new document, if both are set to
|
of these correspondents to ANY new document, if both are set to
|
||||||
automatic matching.
|
automatic matching.
|
||||||
|
|
||||||
@@ -100,7 +101,7 @@ when using this feature:
|
|||||||
Sometimes you may want to do something arbitrary whenever a document is
|
Sometimes you may want to do something arbitrary whenever a document is
|
||||||
consumed. Rather than try to predict what you may want to do, Paperless
|
consumed. Rather than try to predict what you may want to do, Paperless
|
||||||
lets you execute scripts of your own choosing just before or after a
|
lets you execute scripts of your own choosing just before or after a
|
||||||
document is consumed using a couple simple hooks.
|
document is consumed using a couple of simple hooks.
|
||||||
|
|
||||||
Just write a script, put it somewhere that Paperless can read & execute,
|
Just write a script, put it somewhere that Paperless can read & execute,
|
||||||
and then put the path to that script in `paperless.conf` or
|
and then put the path to that script in `paperless.conf` or
|
||||||
@@ -121,7 +122,17 @@ Executed after the consumer sees a new document in the consumption
|
|||||||
folder, but before any processing of the document is performed. This
|
folder, but before any processing of the document is performed. This
|
||||||
script can access the following relevant environment variables set:
|
script can access the following relevant environment variables set:
|
||||||
|
|
||||||
- `DOCUMENT_SOURCE_PATH`
|
| Environment Variable | Description |
|
||||||
|
| ----------------------- | ------------------------------------------------------------ |
|
||||||
|
| `DOCUMENT_SOURCE_PATH` | Original path of the consumed document |
|
||||||
|
| `DOCUMENT_WORKING_PATH` | Path to a copy of the original that consumption will work on |
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
Pre-consume scripts which modify the document should only change
|
||||||
|
the `DOCUMENT_WORKING_PATH` file or a second consume task may
|
||||||
|
be triggered, leading to failures as two tasks work on the
|
||||||
|
same document path
|
||||||
|
|
||||||
A simple but common example for this would be creating a simple script
|
A simple but common example for this would be creating a simple script
|
||||||
like this:
|
like this:
|
||||||
@@ -130,7 +141,7 @@ like this:
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
pdf2pdfocr.py -i ${DOCUMENT_SOURCE_PATH}
|
pdf2pdfocr.py -i ${DOCUMENT_WORKING_PATH}
|
||||||
```
|
```
|
||||||
|
|
||||||
`/etc/paperless.conf`
|
`/etc/paperless.conf`
|
||||||
@@ -157,26 +168,36 @@ Executed after the consumer has successfully processed a document and
|
|||||||
has moved it into paperless. It receives the following environment
|
has moved it into paperless. It receives the following environment
|
||||||
variables:
|
variables:
|
||||||
|
|
||||||
- `DOCUMENT_ID`
|
| Environment Variable | Description |
|
||||||
- `DOCUMENT_FILE_NAME`
|
| ---------------------------- | --------------------------------------------- |
|
||||||
- `DOCUMENT_CREATED`
|
| `DOCUMENT_ID` | Database primary key of the document |
|
||||||
- `DOCUMENT_MODIFIED`
|
| `DOCUMENT_FILE_NAME` | Formatted filename, not including paths |
|
||||||
- `DOCUMENT_ADDED`
|
| `DOCUMENT_CREATED` | Date & time when document created |
|
||||||
- `DOCUMENT_SOURCE_PATH`
|
| `DOCUMENT_MODIFIED` | Date & time when document was last modified |
|
||||||
- `DOCUMENT_ARCHIVE_PATH`
|
| `DOCUMENT_ADDED` | Date & time when document was added |
|
||||||
- `DOCUMENT_THUMBNAIL_PATH`
|
| `DOCUMENT_SOURCE_PATH` | Path to the original document file |
|
||||||
- `DOCUMENT_DOWNLOAD_URL`
|
| `DOCUMENT_ARCHIVE_PATH` | Path to the generate archive file (if any) |
|
||||||
- `DOCUMENT_THUMBNAIL_URL`
|
| `DOCUMENT_THUMBNAIL_PATH` | Path to the generated thumbnail |
|
||||||
- `DOCUMENT_CORRESPONDENT`
|
| `DOCUMENT_DOWNLOAD_URL` | URL for document download |
|
||||||
- `DOCUMENT_TAGS`
|
| `DOCUMENT_THUMBNAIL_URL` | URL for the document thumbnail |
|
||||||
- `DOCUMENT_ORIGINAL_FILENAME`
|
| `DOCUMENT_CORRESPONDENT` | Assigned correspondent (if any) |
|
||||||
|
| `DOCUMENT_TAGS` | Comma separated list of tags applied (if any) |
|
||||||
|
| `DOCUMENT_ORIGINAL_FILENAME` | Filename of original document |
|
||||||
|
|
||||||
The script can be in any language, but for a simple shell script
|
The script can be in any language, A simple shell script example:
|
||||||
example, you can take a look at
|
|
||||||
[post-consumption-example.sh](https://github.com/paperless-ngx/paperless-ngx/blob/main/scripts/post-consumption-example.sh)
|
|
||||||
in this project.
|
|
||||||
|
|
||||||
The post consumption script cannot cancel the consumption process.
|
```bash title="post-consumption-example"
|
||||||
|
--8<-- "./scripts/post-consumption-example.sh"
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
The post consumption script cannot cancel the consumption process.
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
|
||||||
|
The post consumption script should not modify the document files
|
||||||
|
directly.
|
||||||
|
|
||||||
The script's stdout and stderr will be logged line by line to the
|
The script's stdout and stderr will be logged line by line to the
|
||||||
webserver log, along with the exit code of the script.
|
webserver log, along with the exit code of the script.
|
||||||
@@ -288,6 +309,9 @@ Paperless provides the following placeholders within filenames:
|
|||||||
- `{added_month_name_short}`: Month added abbreviated name, as per
|
- `{added_month_name_short}`: Month added abbreviated name, as per
|
||||||
locale
|
locale
|
||||||
- `{added_day}`: Day added only (number 01-31).
|
- `{added_day}`: Day added only (number 01-31).
|
||||||
|
- `{owner_username}`: Username of document owner, if any, or "none"
|
||||||
|
- `{original_name}`: Document original filename, minus the extension, if any, or "none"
|
||||||
|
- `{doc_pk}`: The paperless identifier (primary key) for the document.
|
||||||
|
|
||||||
Paperless will try to conserve the information from your database as
|
Paperless will try to conserve the information from your database as
|
||||||
much as possible. However, some characters that you can use in document
|
much as possible. However, some characters that you can use in document
|
||||||
@@ -336,12 +360,19 @@ value.
|
|||||||
However, keep in mind that inside docker, if files get stored outside of
|
However, keep in mind that inside docker, if files get stored outside of
|
||||||
the predefined volumes, they will be lost after a restart of paperless.
|
the predefined volumes, they will be lost after a restart of paperless.
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
|
||||||
|
When file naming handling, in particular when using `{tag_list}`,
|
||||||
|
you may run into the limits of your operating system's maximum
|
||||||
|
path lengths. Files will retain the previous path instead and
|
||||||
|
the issue logged.
|
||||||
|
|
||||||
## Storage paths
|
## Storage paths
|
||||||
|
|
||||||
One of the best things in Paperless is that you can not only access the
|
One of the best things in Paperless is that you can not only access the
|
||||||
documents via the web interface, but also via the file system.
|
documents via the web interface, but also via the file system.
|
||||||
|
|
||||||
When as single storage layout is not sufficient for your use case,
|
When a single storage layout is not sufficient for your use case,
|
||||||
storage paths come to the rescue. Storage paths allow you to configure
|
storage paths come to the rescue. Storage paths allow you to configure
|
||||||
more precisely where each document is stored in the file system.
|
more precisely where each document is stored in the file system.
|
||||||
|
|
||||||
@@ -373,7 +404,7 @@ structure as in the previous example above.
|
|||||||
Statement January.pdf
|
Statement January.pdf
|
||||||
Statement February.pdf
|
Statement February.pdf
|
||||||
|
|
||||||
Insurances/ # Insurances
|
Insurances/ # Insurances
|
||||||
Healthcare 123/
|
Healthcare 123/
|
||||||
2022-01-01 Statement January.pdf
|
2022-01-01 Statement January.pdf
|
||||||
2022-02-02 Letter.pdf
|
2022-02-02 Letter.pdf
|
||||||
@@ -387,13 +418,6 @@ structure as in the previous example above.
|
|||||||
Defining a storage path is optional. If no storage path is defined for a
|
Defining a storage path is optional. If no storage path is defined for a
|
||||||
document, the global `PAPERLESS_FILENAME_FORMAT` is applied.
|
document, the global `PAPERLESS_FILENAME_FORMAT` is applied.
|
||||||
|
|
||||||
!!! warning
|
|
||||||
|
|
||||||
If you adjust the format of an existing storage path, old documents
|
|
||||||
don't get relocated automatically. You need to run the
|
|
||||||
[document renamer](/administration#renamer) to
|
|
||||||
adjust their pathes.
|
|
||||||
|
|
||||||
## Celery Monitoring {#celery-monitoring}
|
## Celery Monitoring {#celery-monitoring}
|
||||||
|
|
||||||
The monitoring tool
|
The monitoring tool
|
||||||
@@ -465,7 +489,7 @@ database to be case sensitive. This would prevent a user from creating a
|
|||||||
tag `Name` and `NAME` as they are considered the same.
|
tag `Name` and `NAME` as they are considered the same.
|
||||||
|
|
||||||
Per Django documentation, to enable this requires manual intervention.
|
Per Django documentation, to enable this requires manual intervention.
|
||||||
To enable case sensetive tables, you can execute the following command
|
To enable case sensitive tables, you can execute the following command
|
||||||
against each table:
|
against each table:
|
||||||
|
|
||||||
`ALTER TABLE <table_name> CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;`
|
`ALTER TABLE <table_name> CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;`
|
||||||
@@ -474,3 +498,121 @@ You can also set the default for new tables (this does NOT affect
|
|||||||
existing tables) with:
|
existing tables) with:
|
||||||
|
|
||||||
`ALTER DATABASE <db_name> CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;`
|
`ALTER DATABASE <db_name> CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;`
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
|
||||||
|
Using mariadb version 10.4+ is recommended. Using the `utf8mb3` character set on
|
||||||
|
an older system may fix issues that can arise while setting up Paperless-ngx but
|
||||||
|
`utf8mb3` can cause issues with consumption (where `utf8mb4` does not).
|
||||||
|
|
||||||
|
## Barcodes {#barcodes}
|
||||||
|
|
||||||
|
Paperless is able to utilize barcodes for automatically preforming some tasks.
|
||||||
|
|
||||||
|
At this time, the library utilized for detection of barcodes supports the following types:
|
||||||
|
|
||||||
|
- AN-13/UPC-A
|
||||||
|
- UPC-E
|
||||||
|
- EAN-8
|
||||||
|
- Code 128
|
||||||
|
- Code 93
|
||||||
|
- Code 39
|
||||||
|
- Codabar
|
||||||
|
- Interleaved 2 of 5
|
||||||
|
- QR Code
|
||||||
|
- SQ Code
|
||||||
|
|
||||||
|
You may check for updates on the [zbar library homepage](https://github.com/mchehab/zbar).
|
||||||
|
For usage in Paperless, the type of barcode does not matter, only the contents of it.
|
||||||
|
|
||||||
|
For how to enable barcode usage, see [the configuration](/configuration#barcodes).
|
||||||
|
The two settings may be enabled independently, but do have interactions as explained
|
||||||
|
below.
|
||||||
|
|
||||||
|
### Document Splitting {#document-splitting}
|
||||||
|
|
||||||
|
When enabled, Paperless will look for a barcode with the configured value and create a new document
|
||||||
|
starting from the next page. The page with the barcode on it will _not_ be retained. It
|
||||||
|
is expected to be a page existing only for triggering the split.
|
||||||
|
|
||||||
|
### Archive Serial Number Assignment
|
||||||
|
|
||||||
|
When enabled, the value of the barcode (as an integer) will be used to set the document's
|
||||||
|
archive serial number, allowing quick reference back to the original, paper document.
|
||||||
|
|
||||||
|
If document splitting via barcode is also enabled, documents will be split when an ASN
|
||||||
|
barcode is located. However, differing from the splitting, the page with the
|
||||||
|
barcode _will_ be retained. This allows application of a barcode to any page, including
|
||||||
|
one which holds data to keep in the document.
|
||||||
|
|
||||||
|
## Automatic collation of double-sided documents {#collate}
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
If your scanner supports double-sided scanning natively, you do not need this feature.
|
||||||
|
|
||||||
|
This feature is turned off by default, see [configuration](/configuration#collate) on how to turn it on.
|
||||||
|
|
||||||
|
### Summary
|
||||||
|
|
||||||
|
If you have a scanner with an automatic document feeder (ADF) that only scans a single side,
|
||||||
|
this feature makes scanning double-sided documents much more convenient by automatically
|
||||||
|
collating two separate scans into one document, reordering the pages as necessary.
|
||||||
|
|
||||||
|
### Usage example
|
||||||
|
|
||||||
|
Suppose you have a double-sided document with 6 pages (3 sheets of paper). First,
|
||||||
|
put the stack into your ADF as normal, ensuring that page 1 is scanned first. Your ADF
|
||||||
|
will now scan pages 1, 3, and 5. Then you (or your the scanner, if it supports it) upload
|
||||||
|
the scan into the correct sub-directory of the consume folder (`double-sided` by default;
|
||||||
|
keep in mind that Paperless will _not_ automatically create the directory for you.)
|
||||||
|
Paperless will then process the scan and move it into an internal staging area.
|
||||||
|
|
||||||
|
The next step is to turn your stack upside down (without reordering the sheets of paper),
|
||||||
|
and scan it once again, your ADF will now scan pages 6, 4, and 2, in that order. Once this
|
||||||
|
scan is copied into the sub-directory, Paperless will collate the previous scan with the
|
||||||
|
new one, reversing the order of the pages on the second, "even numbered" scan. The
|
||||||
|
resulting document will have the pages 1-6 in the correct order, and this new file will
|
||||||
|
then be processed as normal.
|
||||||
|
|
||||||
|
!!! tip
|
||||||
|
|
||||||
|
When scanning the even numbered pages, you can omit the last empty pages, if there are
|
||||||
|
any. For example, if page 6 is empty, you only need to scan pages 2 and 4. _Do not_ omit
|
||||||
|
empty pages in the middle of the document.
|
||||||
|
|
||||||
|
### Things that could go wrong
|
||||||
|
|
||||||
|
Paperless will notice when the first, "odd numbered" scan has less pages than the second
|
||||||
|
scan (this can happen when e.g. the ADF skipped a few pages in the first pass). In that
|
||||||
|
case, Paperless will remove the staging copy as well as the scan, and give you an error
|
||||||
|
message asking you to restart the process from scratch, by scanning the odd pages again,
|
||||||
|
followed by the even pages.
|
||||||
|
|
||||||
|
It's important that the scan files get consumed in the correct order, and one at a time.
|
||||||
|
You therefore need to make sure that Paperless is running while you upload the files into
|
||||||
|
the directory; and if you're using [polling](/configuration#polling), make sure that
|
||||||
|
`CONSUMER_POLLING` is set to a value lower than it takes for the second scan to appear,
|
||||||
|
like 5-10 or even lower.
|
||||||
|
|
||||||
|
Another thing that might happen is that you start a double sided scan, but then forget
|
||||||
|
to upload the second file. To avoid collating the wrong documents if you then come back
|
||||||
|
a day later to scan a new double-sided document, Paperless will only keep an "odd numbered
|
||||||
|
pages" file for up to 30 minutes. If more time passes, it will consider the next incoming
|
||||||
|
scan a completely new "odd numbered pages" one. The old staging file will get discarded.
|
||||||
|
|
||||||
|
### Interaction with "subdirs as tags"
|
||||||
|
|
||||||
|
The collation feature can be used together with the [subdirs as tags](/configuration#consume_config)
|
||||||
|
feature (but this is not a requirement). Just create a correctly named double-sided subdir
|
||||||
|
in the hierachy and upload your scans there. For example, both `double-sided/foo/bar` as
|
||||||
|
well as `foo/bar/double-sided` will cause the collated document to be treated as if it
|
||||||
|
were uploaded into `foo/bar` and receive both `foo` and `bar` tags, but not `double-sided`.
|
||||||
|
|
||||||
|
### Interaction with document splitting
|
||||||
|
|
||||||
|
You can use the [document splitting](#document-splitting) feature, but if you use a normal
|
||||||
|
single-sided split marker page, the split document(s) will have an empty page at the front (or
|
||||||
|
whatever else was on the backside of the split marker page.) You can work around that by having
|
||||||
|
a split marker page that has the split barcode on _both_ sides. This way, the extra page will
|
||||||
|
get automatically removed.
|
||||||
|
59
docs/api.md
@@ -6,7 +6,7 @@ provides a browsable API for most of its endpoints, which you can
|
|||||||
inspect at `http://<paperless-host>:<port>/api/`. This also documents
|
inspect at `http://<paperless-host>:<port>/api/`. This also documents
|
||||||
most of the available filters and ordering fields.
|
most of the available filters and ordering fields.
|
||||||
|
|
||||||
The API provides 5 main endpoints:
|
The API provides 7 main endpoints:
|
||||||
|
|
||||||
- `/api/documents/`: Full CRUD support, except POSTing new documents.
|
- `/api/documents/`: Full CRUD support, except POSTing new documents.
|
||||||
See below.
|
See below.
|
||||||
@@ -14,12 +14,15 @@ The API provides 5 main endpoints:
|
|||||||
- `/api/document_types/`: Full CRUD support.
|
- `/api/document_types/`: Full CRUD support.
|
||||||
- `/api/logs/`: Read-Only.
|
- `/api/logs/`: Read-Only.
|
||||||
- `/api/tags/`: Full CRUD support.
|
- `/api/tags/`: Full CRUD support.
|
||||||
|
- `/api/tasks/`: Read-only.
|
||||||
- `/api/mail_accounts/`: Full CRUD support.
|
- `/api/mail_accounts/`: Full CRUD support.
|
||||||
- `/api/mail_rules/`: Full CRUD support.
|
- `/api/mail_rules/`: Full CRUD support.
|
||||||
|
- `/api/users/`: Full CRUD support.
|
||||||
|
- `/api/groups/`: Full CRUD support.
|
||||||
|
|
||||||
All of these endpoints except for the logging endpoint allow you to
|
All of these endpoints except for the logging endpoint allow you to
|
||||||
fetch, edit and delete individual objects by appending their primary key
|
fetch (and edit and delete where appropriate) individual objects by
|
||||||
to the path, for example `/api/documents/454/`.
|
appending their primary key to the path, e.g. `/api/documents/454/`.
|
||||||
|
|
||||||
The objects served by the document endpoint contain the following
|
The objects served by the document endpoint contain the following
|
||||||
fields:
|
fields:
|
||||||
@@ -44,6 +47,8 @@ fields:
|
|||||||
Read-only.
|
Read-only.
|
||||||
- `archived_file_name`: Verbose filename of the archived document.
|
- `archived_file_name`: Verbose filename of the archived document.
|
||||||
Read-only. Null if no archived document is available.
|
Read-only. Null if no archived document is available.
|
||||||
|
- `set_permissions`: Allows setting document permissions. Optional,
|
||||||
|
write-only. See [below](#permissions).
|
||||||
|
|
||||||
## Downloading documents
|
## Downloading documents
|
||||||
|
|
||||||
@@ -254,11 +259,51 @@ The endpoint supports the following optional form fields:
|
|||||||
- `document_type`: Similar to correspondent.
|
- `document_type`: Similar to correspondent.
|
||||||
- `tags`: Similar to correspondent. Specify this multiple times to
|
- `tags`: Similar to correspondent. Specify this multiple times to
|
||||||
have multiple tags added to the document.
|
have multiple tags added to the document.
|
||||||
|
- `archive_serial_number`: An optional archive serial number to set.
|
||||||
|
|
||||||
The endpoint will immediately return "OK" if the document consumption
|
The endpoint will immediately return HTTP 200 if the document consumption
|
||||||
process was started successfully. No additional status information about
|
process was started successfully, with the UUID of the consumption task
|
||||||
the consumption process itself is available, since that happens in a
|
as the data. No additional status information about the consumption process
|
||||||
different process.
|
itself is available immediately, since that happens in a different process.
|
||||||
|
However, querying the tasks endpoint with the returned UUID e.g.
|
||||||
|
`/api/tasks/?task_id={uuid}` will provide information on the state of the
|
||||||
|
consumption including the ID of a created document if consumption succeeded.
|
||||||
|
|
||||||
|
## Permissions
|
||||||
|
|
||||||
|
All objects (documents, tags, etc.) allow setting object-level permissions
|
||||||
|
with an optional `set_permissions` parameter which is of the form:
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
"owner": user_id,
|
||||||
|
"view": {
|
||||||
|
"users": [...],
|
||||||
|
"groups": [...],
|
||||||
|
},
|
||||||
|
"change": {
|
||||||
|
"users": [...],
|
||||||
|
"groups": [...],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
Arrays should contain user or group ID numbers.
|
||||||
|
|
||||||
|
If this parameter is supplied the object's permissions will be overwritten,
|
||||||
|
assuming the authenticated user has permission to do so (the user must be
|
||||||
|
the object owner or a superuser).
|
||||||
|
|
||||||
|
### Retrieving full permissions
|
||||||
|
|
||||||
|
By default, the API will return a truncated version of object-level
|
||||||
|
permissions, returning `user_can_change` indicating whether the current user
|
||||||
|
can edit the object (either because they are the object owner or have permissions
|
||||||
|
granted). You can pass the parameter `full_perms=true` to API calls to view the
|
||||||
|
full permissions of objects in a format that mirrors the `set_permissions`
|
||||||
|
parameter above.
|
||||||
|
|
||||||
## API Versioning
|
## API Versioning
|
||||||
|
|
||||||
|
Before Width: | Height: | Size: 661 KiB After Width: | Height: | Size: 740 KiB |
Before Width: | Height: | Size: 457 KiB After Width: | Height: | Size: 383 KiB |
Before Width: | Height: | Size: 436 KiB After Width: | Height: | Size: 704 KiB |
Before Width: | Height: | Size: 462 KiB After Width: | Height: | Size: 474 KiB |
Before Width: | Height: | Size: 608 KiB After Width: | Height: | Size: 616 KiB |
Before Width: | Height: | Size: 698 KiB After Width: | Height: | Size: 708 KiB |
Before Width: | Height: | Size: 706 KiB After Width: | Height: | Size: 705 KiB |
Before Width: | Height: | Size: 480 KiB After Width: | Height: | Size: 480 KiB |
Before Width: | Height: | Size: 680 KiB After Width: | Height: | Size: 689 KiB |
Before Width: | Height: | Size: 686 KiB After Width: | Height: | Size: 685 KiB |
Before Width: | Height: | Size: 848 KiB After Width: | Height: | Size: 859 KiB |
Before Width: | Height: | Size: 703 KiB After Width: | Height: | Size: 706 KiB |
Before Width: | Height: | Size: 388 KiB After Width: | Height: | Size: 393 KiB |
Before Width: | Height: | Size: 517 KiB After Width: | Height: | Size: 516 KiB |
1003
docs/changelog.md
@@ -17,6 +17,8 @@ run paperless, these settings have to be defined in different places.
|
|||||||
|
|
||||||
## Required services
|
## Required services
|
||||||
|
|
||||||
|
### Redis Broker
|
||||||
|
|
||||||
`PAPERLESS_REDIS=<url>`
|
`PAPERLESS_REDIS=<url>`
|
||||||
|
|
||||||
: This is required for processing scheduled tasks such as email
|
: This is required for processing scheduled tasks such as email
|
||||||
@@ -33,6 +35,14 @@ matcher.
|
|||||||
|
|
||||||
Defaults to `redis://localhost:6379`.
|
Defaults to `redis://localhost:6379`.
|
||||||
|
|
||||||
|
`PAPERLESS_REDIS_PREFIX=<prefix>`
|
||||||
|
|
||||||
|
: Prefix to be used in Redis for keys and channels. Useful for sharing one Redis server among multiple Paperless instances.
|
||||||
|
|
||||||
|
Defaults to no prefix.
|
||||||
|
|
||||||
|
### Database
|
||||||
|
|
||||||
`PAPERLESS_DBENGINE=<engine_name>`
|
`PAPERLESS_DBENGINE=<engine_name>`
|
||||||
|
|
||||||
: Optional, gives the ability to choose Postgres or MariaDB for
|
: Optional, gives the ability to choose Postgres or MariaDB for
|
||||||
@@ -79,21 +89,108 @@ changed here.
|
|||||||
|
|
||||||
`PAPERLESS_DBSSLMODE=<mode>`
|
`PAPERLESS_DBSSLMODE=<mode>`
|
||||||
|
|
||||||
: SSL mode to use when connecting to PostgreSQL.
|
: SSL mode to use when connecting to PostgreSQL or MariaDB.
|
||||||
|
|
||||||
See [the official documentation about
|
See [the official documentation about
|
||||||
sslmode](https://www.postgresql.org/docs/current/libpq-ssl.html).
|
sslmode for PostgreSQL](https://www.postgresql.org/docs/current/libpq-ssl.html).
|
||||||
|
|
||||||
Default is `prefer`.
|
See [the official documentation about
|
||||||
|
sslmode for MySQL and MariaDB](https://dev.mysql.com/doc/refman/8.0/en/connection-options.html#option_general_ssl-mode).
|
||||||
|
|
||||||
`PAPERLESS_DB_TIMEOUT=<float>`
|
*Note*: SSL mode values differ between PostgreSQL and MariaDB.
|
||||||
|
|
||||||
|
Default is `prefer` for PostgreSQL and `PREFERRED` for MariaDB.
|
||||||
|
|
||||||
|
`PAPERLESS_DBSSLROOTCERT=<ca-path>`
|
||||||
|
|
||||||
|
: SSL root certificate path
|
||||||
|
|
||||||
|
See [the official documentation about
|
||||||
|
sslmode for PostgreSQL](https://www.postgresql.org/docs/current/libpq-ssl.html).
|
||||||
|
Changes path of `root.crt`.
|
||||||
|
|
||||||
|
See [the official documentation about
|
||||||
|
sslmode for MySQL and MariaDB](https://dev.mysql.com/doc/refman/8.0/en/connection-options.html#option_general_ssl-ca).
|
||||||
|
|
||||||
|
Defaults to unset, using the documented path in the home directory.
|
||||||
|
|
||||||
|
`PAPERLESS_DBSSLCERT=<client-cert-path>`
|
||||||
|
|
||||||
|
: SSL client certificate path
|
||||||
|
|
||||||
|
See [the official documentation about
|
||||||
|
sslmode for PostgreSQL](https://www.postgresql.org/docs/current/libpq-ssl.html).
|
||||||
|
|
||||||
|
See [the official documentation about
|
||||||
|
sslmode for MySQL and MariaDB](https://dev.mysql.com/doc/refman/8.0/en/connection-options.html#option_general_ssl-cert).
|
||||||
|
|
||||||
|
Changes path of `postgresql.crt`.
|
||||||
|
|
||||||
|
Defaults to unset, using the documented path in the home directory.
|
||||||
|
|
||||||
|
`PAPERLESS_DBSSLKEY=<client-cert-key>`
|
||||||
|
|
||||||
|
: SSL client key path
|
||||||
|
|
||||||
|
See [the official documentation about
|
||||||
|
sslmode for PostgreSQL](https://www.postgresql.org/docs/current/libpq-ssl.html).
|
||||||
|
|
||||||
|
See [the official documentation about
|
||||||
|
sslmode for MySQL and MariaDB](https://dev.mysql.com/doc/refman/8.0/en/connection-options.html#option_general_ssl-key).
|
||||||
|
|
||||||
|
Changes path of `postgresql.key`.
|
||||||
|
|
||||||
|
Defaults to unset, using the documented path in the home directory.
|
||||||
|
|
||||||
|
`PAPERLESS_DB_TIMEOUT=<int>`
|
||||||
|
|
||||||
: Amount of time for a database connection to wait for the database to
|
: Amount of time for a database connection to wait for the database to
|
||||||
unlock. Mostly applicable for an sqlite based installation, consider
|
unlock. Mostly applicable for sqlite based installation. Consider changing
|
||||||
changing to postgresql if you need to increase this.
|
to postgresql if you are having concurrency problems with sqlite.
|
||||||
|
|
||||||
Defaults to unset, keeping the Django defaults.
|
Defaults to unset, keeping the Django defaults.
|
||||||
|
|
||||||
|
## Optional Services
|
||||||
|
|
||||||
|
### Tika {#tika}
|
||||||
|
|
||||||
|
Paperless can make use of [Tika](https://tika.apache.org/) and
|
||||||
|
[Gotenberg](https://gotenberg.dev/) for parsing and converting
|
||||||
|
"Office" documents (such as ".doc", ".xlsx" and ".odt").
|
||||||
|
Tika and Gotenberg are also needed to allow parsing of E-Mails (.eml).
|
||||||
|
|
||||||
|
If you wish to use this, you must provide a Tika server and a Gotenberg server,
|
||||||
|
configure their endpoints, and enable the feature.
|
||||||
|
|
||||||
|
`PAPERLESS_TIKA_ENABLED=<bool>`
|
||||||
|
|
||||||
|
: Enable (or disable) the Tika parser.
|
||||||
|
|
||||||
|
Defaults to false.
|
||||||
|
|
||||||
|
`PAPERLESS_TIKA_ENDPOINT=<url>`
|
||||||
|
|
||||||
|
: Set the endpoint URL were Paperless can reach your Tika server.
|
||||||
|
|
||||||
|
Defaults to "<http://localhost:9998>".
|
||||||
|
|
||||||
|
`PAPERLESS_TIKA_GOTENBERG_ENDPOINT=<url>`
|
||||||
|
|
||||||
|
: Set the endpoint URL were Paperless can reach your Gotenberg server.
|
||||||
|
|
||||||
|
Defaults to "<http://localhost:3000>".
|
||||||
|
|
||||||
|
If you run paperless on docker, you can add those services to the
|
||||||
|
docker-compose file (see the provided
|
||||||
|
[`docker-compose.sqlite-tika.yml`](https://github.com/paperless-ngx/paperless-ngx/blob/main/docker/compose/docker-compose.sqlite-tika.yml)
|
||||||
|
file for reference).
|
||||||
|
|
||||||
|
Add all three configuration parameters to your configuration. If using
|
||||||
|
Docker, this may be the `environment` key of the webserver or a
|
||||||
|
`docker-compose.env` file. Bare metal installations may have a `.conf` file
|
||||||
|
containing the configuration parameters. Be sure to use the correct format
|
||||||
|
and watch out for indentation if editing the YAML file.
|
||||||
|
|
||||||
## Paths and folders
|
## Paths and folders
|
||||||
|
|
||||||
`PAPERLESS_CONSUMPTION_DIR=<path>`
|
`PAPERLESS_CONSUMPTION_DIR=<path>`
|
||||||
@@ -141,7 +238,8 @@ directory.
|
|||||||
files created using "collectstatic" manager command are stored.
|
files created using "collectstatic" manager command are stored.
|
||||||
|
|
||||||
Unless you're doing something fancy, there is no need to override
|
Unless you're doing something fancy, there is no need to override
|
||||||
this.
|
this. If this is changed, you may need to run
|
||||||
|
`collectstatic` again.
|
||||||
|
|
||||||
Defaults to "../static/", relative to the "src" directory.
|
Defaults to "../static/", relative to the "src" directory.
|
||||||
|
|
||||||
@@ -179,7 +277,7 @@ Previously, the location defaulted to `PAPERLESS_DATA_DIR/nltk`.
|
|||||||
Unless you are using this in a bare metal install or other setup,
|
Unless you are using this in a bare metal install or other setup,
|
||||||
this folder is no longer needed and can be removed manually.
|
this folder is no longer needed and can be removed manually.
|
||||||
|
|
||||||
Defaults to `/usr/local/share/nltk_data`
|
Defaults to `/usr/share/nltk_data`
|
||||||
|
|
||||||
## Logging
|
## Logging
|
||||||
|
|
||||||
@@ -217,12 +315,16 @@ not include a trailing slash. E.g. <https://paperless.domain.com>
|
|||||||
|
|
||||||
Defaults to empty string, leaving the other settings unaffected.
|
Defaults to empty string, leaving the other settings unaffected.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
This value cannot contain a path (e.g. domain.com/path), even if
|
||||||
|
you are installing paperless-ngx at a subpath.
|
||||||
|
|
||||||
`PAPERLESS_CSRF_TRUSTED_ORIGINS=<comma-separated-list>`
|
`PAPERLESS_CSRF_TRUSTED_ORIGINS=<comma-separated-list>`
|
||||||
|
|
||||||
: A list of trusted origins for unsafe requests (e.g. POST). As of
|
: A list of trusted origins for unsafe requests (e.g. POST). As of
|
||||||
Django 4.0 this is required to access the Django admin via the web.
|
Django 4.0 this is required to access the Django admin via the web.
|
||||||
See
|
See the [Django project documentation on the settings](https://docs.djangoproject.com/en/4.1/ref/settings/#csrf-trusted-origins)
|
||||||
<https://docs.djangoproject.com/en/4.0/ref/settings/#csrf-trusted-origins>
|
|
||||||
|
|
||||||
Can also be set using PAPERLESS_URL (see above).
|
Can also be set using PAPERLESS_URL (see above).
|
||||||
|
|
||||||
@@ -233,8 +335,8 @@ See
|
|||||||
|
|
||||||
: If you're planning on putting Paperless on the open internet, then
|
: If you're planning on putting Paperless on the open internet, then
|
||||||
you really should set this value to the domain name you're using.
|
you really should set this value to the domain name you're using.
|
||||||
Failing to do so leaves you open to HTTP host header attacks:
|
Failing to do so leaves you open to HTTP host header attacks.
|
||||||
<https://docs.djangoproject.com/en/3.1/topics/security/#host-header-validation>
|
You can read more about this in [the Django project's documentation](https://docs.djangoproject.com/en/4.1/topics/security/#host-header-validation)
|
||||||
|
|
||||||
Just remember that this is a comma-separated list, so
|
Just remember that this is a comma-separated list, so
|
||||||
"example.com" is fine, as is "example.com,www.example.com", but
|
"example.com" is fine, as is "example.com,www.example.com", but
|
||||||
@@ -242,8 +344,7 @@ Failing to do so leaves you open to HTTP host header attacks:
|
|||||||
|
|
||||||
Can also be set using PAPERLESS_URL (see above).
|
Can also be set using PAPERLESS_URL (see above).
|
||||||
|
|
||||||
If manually set, please remember to include "localhost". Otherwise
|
"localhost" is always allowed for docker healthcheck
|
||||||
docker healthcheck will fail.
|
|
||||||
|
|
||||||
Defaults to "\*", which is all hosts.
|
Defaults to "\*", which is all hosts.
|
||||||
|
|
||||||
@@ -256,6 +357,14 @@ do CORS calls. Set this to your public domain name.
|
|||||||
|
|
||||||
Defaults to "<http://localhost:8000>".
|
Defaults to "<http://localhost:8000>".
|
||||||
|
|
||||||
|
`PAPERLESS_TRUSTED_PROXIES=<comma-separated-list>`
|
||||||
|
|
||||||
|
: This may be needed to prevent IP address spoofing if you are using e.g.
|
||||||
|
fail2ban with log entries for failed authorization attempts. Value should be
|
||||||
|
IP address(es).
|
||||||
|
|
||||||
|
Defaults to empty string.
|
||||||
|
|
||||||
`PAPERLESS_FORCE_SCRIPT_NAME=<path>`
|
`PAPERLESS_FORCE_SCRIPT_NAME=<path>`
|
||||||
|
|
||||||
: To host paperless under a subpath url like example.com/paperless you
|
: To host paperless under a subpath url like example.com/paperless you
|
||||||
@@ -342,16 +451,16 @@ applications.
|
|||||||
If you're exposing paperless to the internet directly, do not use
|
If you're exposing paperless to the internet directly, do not use
|
||||||
this.
|
this.
|
||||||
|
|
||||||
Also see the warning [in the official documentation](https://docs.djangoproject.com/en/3.1/howto/auth-remote-user/#configuration).
|
Also see the warning [in the official documentation](https://docs.djangoproject.com/en/4.1/howto/auth-remote-user/#configuration).
|
||||||
|
|
||||||
Defaults to "false" which disables this feature.
|
Defaults to "false" which disables this feature.
|
||||||
|
|
||||||
`PAPERLESS_HTTP_REMOTE_USER_HEADER_NAME=<str>`
|
`PAPERLESS_HTTP_REMOTE_USER_HEADER_NAME=<str>`
|
||||||
|
|
||||||
: If "PAPERLESS*ENABLE_HTTP_REMOTE_USER" is enabled, this
|
: If "PAPERLESS_ENABLE_HTTP_REMOTE_USER" is enabled, this
|
||||||
property allows to customize the name of the HTTP header from which
|
property allows to customize the name of the HTTP header from which
|
||||||
the authenticated username is extracted. Values are in terms of
|
the authenticated username is extracted. Values are in terms of
|
||||||
[HttpRequest.META](https://docs.djangoproject.com/en/3.1/ref/request-response/#django.http.HttpRequest.META).
|
[HttpRequest.META](https://docs.djangoproject.com/en/4.1/ref/request-response/#django.http.HttpRequest.META).
|
||||||
Thus, the configured value must start with `HTTP*`
|
Thus, the configured value must start with `HTTP*`
|
||||||
followed by the normalized actual header name.
|
followed by the normalized actual header name.
|
||||||
|
|
||||||
@@ -365,6 +474,46 @@ redirect the user back to the SSO application's logout page.
|
|||||||
|
|
||||||
Defaults to None, which disables this feature.
|
Defaults to None, which disables this feature.
|
||||||
|
|
||||||
|
`PAPERLESS_USE_X_FORWARD_HOST=<bool>`
|
||||||
|
|
||||||
|
: Configures the Django setting [USE_X_FORWARDED_HOST](https://docs.djangoproject.com/en/4.2/ref/settings/#use-x-forwarded-host)
|
||||||
|
which may be needed for hosting behind a proxy.
|
||||||
|
|
||||||
|
Defaults to False
|
||||||
|
|
||||||
|
`PAPERLESS_USE_X_FORWARD_PORT=<bool>`
|
||||||
|
|
||||||
|
: Configures the Django setting [USE_X_FORWARDED_PORT](https://docs.djangoproject.com/en/4.2/ref/settings/#use-x-forwarded-port)
|
||||||
|
which may be needed for hosting behind a proxy.
|
||||||
|
|
||||||
|
Defaults to False
|
||||||
|
|
||||||
|
`PAPERLESS_PROXY_SSL_HEADER=<json-list>`
|
||||||
|
|
||||||
|
: Configures the Django setting [SECURE_PROXY_SSL_HEADER](https://docs.djangoproject.com/en/4.2/ref/settings/#secure-proxy-ssl-header)
|
||||||
|
which may be needed for hosting behind a proxy. The two values in the list will form the tuple of
|
||||||
|
HTTP header/value expected by Django, eg `'["HTTP_X_FORWARDED_PROTO", "https"]'`.
|
||||||
|
|
||||||
|
Defaults to None
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
|
||||||
|
Settings this value has security implications. Read the Django documentation
|
||||||
|
and be sure you understand its usage before setting it.
|
||||||
|
|
||||||
|
`PAPERLESS_EMAIL_CERTIFICATE_FILE=<path>`
|
||||||
|
|
||||||
|
: Configures an additional SSL certificate file containing a [certificate](https://docs.python.org/3/library/ssl.html#certificates)
|
||||||
|
or certificate chain which should be trusted for validating SSL connections against mail providers.
|
||||||
|
This is for use with self-signed certificates against local IMAP servers.
|
||||||
|
|
||||||
|
Defaults to None.
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
|
||||||
|
Settings this value has security implications for the security of your email.
|
||||||
|
Understand what it does and be sure you need to before setting.
|
||||||
|
|
||||||
## OCR settings {#ocr}
|
## OCR settings {#ocr}
|
||||||
|
|
||||||
Paperless uses [OCRmyPDF](https://ocrmypdf.readthedocs.io/en/latest/)
|
Paperless uses [OCRmyPDF](https://ocrmypdf.readthedocs.io/en/latest/)
|
||||||
@@ -377,36 +526,29 @@ needs.
|
|||||||
: Customize the language that paperless will attempt to use when
|
: Customize the language that paperless will attempt to use when
|
||||||
parsing documents.
|
parsing documents.
|
||||||
|
|
||||||
It should be a 3-letter language code consistent with ISO 639:
|
It should be a 3-letter code, see the list of [languages Tesseract supports](https://tesseract-ocr.github.io/tessdoc/Data-Files-in-different-versions.html).
|
||||||
https://www.loc.gov/standards/iso639-2/php/code_list.php
|
|
||||||
|
|
||||||
Set this to the language most of your documents are written in.
|
Set this to the language most of your documents are written in.
|
||||||
|
|
||||||
This can be a combination of multiple languages such as `deu+eng`,
|
This can be a combination of multiple languages such as `deu+eng`,
|
||||||
in which case tesseract will use whatever language matches best.
|
in which case Tesseract will use whatever language matches best.
|
||||||
Keep in mind that tesseract uses much more cpu time with multiple
|
Keep in mind that Tesseract uses much more CPU time with multiple
|
||||||
languages enabled.
|
languages enabled.
|
||||||
|
|
||||||
Defaults to "eng".
|
Defaults to "eng".
|
||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
If your language contains a '-' such as chi-sim, you must use chi_sim
|
If your language contains a '-' such as chi-sim, you must use `chi_sim`.
|
||||||
|
|
||||||
`PAPERLESS_OCR_MODE=<mode>`
|
`PAPERLESS_OCR_MODE=<mode>`
|
||||||
|
|
||||||
: Tell paperless when and how to perform ocr on your documents. Four
|
: Tell paperless when and how to perform ocr on your documents. Three
|
||||||
modes are available:
|
modes are available:
|
||||||
|
|
||||||
- `skip`: Paperless skips all pages and will perform ocr only on
|
- `skip`: Paperless skips all pages and will perform ocr only on
|
||||||
pages where no text is present. This is the safest option.
|
pages where no text is present. This is the safest option.
|
||||||
|
|
||||||
- `skip_noarchive`: In addition to skip, paperless won't create
|
|
||||||
an archived version of your documents when it finds any text in
|
|
||||||
them. This is useful if you don't want to have two
|
|
||||||
almost-identical versions of your digital documents in the media
|
|
||||||
folder. This is the fastest option.
|
|
||||||
|
|
||||||
- `redo`: Paperless will OCR all pages of your documents and
|
- `redo`: Paperless will OCR all pages of your documents and
|
||||||
attempt to replace any existing text layers with new text. This
|
attempt to replace any existing text layers with new text. This
|
||||||
will be useful for documents from scanners that already
|
will be useful for documents from scanners that already
|
||||||
@@ -429,6 +571,19 @@ modes are available:
|
|||||||
Read more about this in the [OCRmyPDF
|
Read more about this in the [OCRmyPDF
|
||||||
documentation](https://ocrmypdf.readthedocs.io/en/latest/advanced.html#when-ocr-is-skipped).
|
documentation](https://ocrmypdf.readthedocs.io/en/latest/advanced.html#when-ocr-is-skipped).
|
||||||
|
|
||||||
|
`PAPERLESS_OCR_SKIP_ARCHIVE_FILE=<mode>`
|
||||||
|
|
||||||
|
: Specify when you would like paperless to skip creating an archived
|
||||||
|
version of your documents. This is useful if you don't want to have two
|
||||||
|
almost-identical versions of your documents in the media folder.
|
||||||
|
|
||||||
|
- `never`: Never skip creating an archived version.
|
||||||
|
- `with_text`: Skip creating an archived version for documents
|
||||||
|
that already have embedded text.
|
||||||
|
- `always`: Always skip creating an archived version.
|
||||||
|
|
||||||
|
The default is `never`.
|
||||||
|
|
||||||
`PAPERLESS_OCR_CLEAN=<mode>`
|
`PAPERLESS_OCR_CLEAN=<mode>`
|
||||||
|
|
||||||
: Tells paperless to use `unpaper` to clean any input document before
|
: Tells paperless to use `unpaper` to clean any input document before
|
||||||
@@ -571,76 +726,6 @@ they use underscores instead of dashes.
|
|||||||
{"deskew": true, "optimize": 3, "unpaper_args": "--pre-rotate 90"}
|
{"deskew": true, "optimize": 3, "unpaper_args": "--pre-rotate 90"}
|
||||||
```
|
```
|
||||||
|
|
||||||
## Tika settings {#tika}
|
|
||||||
|
|
||||||
Paperless can make use of [Tika](https://tika.apache.org/) and
|
|
||||||
[Gotenberg](https://gotenberg.dev/) for parsing and converting
|
|
||||||
"Office" documents (such as ".doc", ".xlsx" and ".odt").
|
|
||||||
Tika and Gotenberg are also needed to allow parsing of E-Mails (.eml).
|
|
||||||
|
|
||||||
If you wish to use this, you must provide a Tika server and a Gotenberg server,
|
|
||||||
configure their endpoints, and enable the feature.
|
|
||||||
|
|
||||||
`PAPERLESS_TIKA_ENABLED=<bool>`
|
|
||||||
|
|
||||||
: Enable (or disable) the Tika parser.
|
|
||||||
|
|
||||||
Defaults to false.
|
|
||||||
|
|
||||||
`PAPERLESS_TIKA_ENDPOINT=<url>`
|
|
||||||
|
|
||||||
: Set the endpoint URL were Paperless can reach your Tika server.
|
|
||||||
|
|
||||||
Defaults to "<http://localhost:9998>".
|
|
||||||
|
|
||||||
`PAPERLESS_TIKA_GOTENBERG_ENDPOINT=<url>`
|
|
||||||
|
|
||||||
: Set the endpoint URL were Paperless can reach your Gotenberg server.
|
|
||||||
|
|
||||||
Defaults to "<http://localhost:3000>".
|
|
||||||
|
|
||||||
If you run paperless on docker, you can add those services to the
|
|
||||||
docker-compose file (see the provided `docker-compose.sqlite-tika.yml`
|
|
||||||
file for reference). The changes requires are as follows:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
services:
|
|
||||||
# ...
|
|
||||||
|
|
||||||
webserver:
|
|
||||||
# ...
|
|
||||||
|
|
||||||
environment:
|
|
||||||
# ...
|
|
||||||
|
|
||||||
PAPERLESS_TIKA_ENABLED: 1
|
|
||||||
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
|
||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
|
||||||
|
|
||||||
# ...
|
|
||||||
|
|
||||||
gotenberg:
|
|
||||||
image: gotenberg/gotenberg:7.6
|
|
||||||
restart: unless-stopped
|
|
||||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
|
||||||
# want to allow external content like tracking pixels or even javascript.
|
|
||||||
command:
|
|
||||||
- 'gotenberg'
|
|
||||||
- '--chromium-disable-javascript=true'
|
|
||||||
- '--chromium-allow-list=file:///tmp/.*'
|
|
||||||
|
|
||||||
tika:
|
|
||||||
image: ghcr.io/paperless-ngx/tika:latest
|
|
||||||
restart: unless-stopped
|
|
||||||
```
|
|
||||||
|
|
||||||
Add the configuration variables to the environment of the webserver
|
|
||||||
(alternatively put the configuration in the `docker-compose.env` file)
|
|
||||||
and add the additional services below the webserver service. Watch out
|
|
||||||
for indentation.
|
|
||||||
|
|
||||||
Make sure to use the correct format `PAPERLESS_TIKA_ENABLED = 1` so python_dotenv can parse the statement correctly.
|
|
||||||
|
|
||||||
## Software tweaks {#software_tweaks}
|
## Software tweaks {#software_tweaks}
|
||||||
|
|
||||||
`PAPERLESS_TASK_WORKERS=<num>`
|
`PAPERLESS_TASK_WORKERS=<num>`
|
||||||
@@ -692,17 +777,10 @@ paperless will process in parallel on a single document.
|
|||||||
on large documents within the default 1800 seconds. So extending
|
on large documents within the default 1800 seconds. So extending
|
||||||
this timeout may prove to be useful on weak hardware setups.
|
this timeout may prove to be useful on weak hardware setups.
|
||||||
|
|
||||||
`PAPERLESS_WORKER_RETRY=<num>`
|
|
||||||
|
|
||||||
: If PAPERLESS_WORKER_TIMEOUT has been configured, the retry time for
|
|
||||||
a task can also be configured. By default, this value will be set to
|
|
||||||
10s more than the worker timeout. This value should never be set
|
|
||||||
less than the worker timeout.
|
|
||||||
|
|
||||||
`PAPERLESS_TIME_ZONE=<timezone>`
|
`PAPERLESS_TIME_ZONE=<timezone>`
|
||||||
|
|
||||||
: Set the time zone here. See
|
: Set the time zone here. See more details on
|
||||||
<https://docs.djangoproject.com/en/3.1/ref/settings/#std:setting-TIME_ZONE>
|
why and how to set it [in the Django project documentation](https://docs.djangoproject.com/en/4.1/ref/settings/#std:setting-TIME_ZONE)
|
||||||
for details on how to set it.
|
for details on how to set it.
|
||||||
|
|
||||||
Defaults to UTC.
|
Defaults to UTC.
|
||||||
@@ -711,52 +789,86 @@ for details on how to set it.
|
|||||||
|
|
||||||
: Enables or disables the advanced natural language processing
|
: Enables or disables the advanced natural language processing
|
||||||
used during automatic classification. If disabled, paperless will
|
used during automatic classification. If disabled, paperless will
|
||||||
still preform some basic text pre-processing before matching.
|
still perform some basic text pre-processing before matching.
|
||||||
|
|
||||||
See also `PAPERLESS_NLTK_DIR`.
|
: See also `PAPERLESS_NLTK_DIR`.
|
||||||
|
|
||||||
Defaults to 1.
|
Defaults to 1.
|
||||||
|
|
||||||
## Polling {#polling}
|
`PAPERLESS_EMAIL_TASK_CRON=<cron expression>`
|
||||||
|
|
||||||
`PAPERLESS_CONSUMER_POLLING=<num>`
|
: Configures the scheduled email fetching frequency. The value
|
||||||
|
should be a valid crontab(5) expression describing when to run.
|
||||||
|
|
||||||
: If paperless won't find documents added to your consume folder, it
|
: If set to the string "disable", no emails will be fetched automatically.
|
||||||
might not be able to automatically detect filesystem changes. In
|
|
||||||
that case, specify a polling interval in seconds here, which will
|
|
||||||
then cause paperless to periodically check your consumption
|
|
||||||
directory for changes. This will also disable listening for file
|
|
||||||
system changes with `inotify`.
|
|
||||||
|
|
||||||
Defaults to 0, which disables polling and uses filesystem
|
Defaults to `*/10 * * * *` or every ten minutes.
|
||||||
notifications.
|
|
||||||
|
|
||||||
`PAPERLESS_CONSUMER_POLLING_RETRY_COUNT=<num>`
|
`PAPERLESS_TRAIN_TASK_CRON=<cron expression>`
|
||||||
|
|
||||||
: If consumer polling is enabled, sets the number of times paperless
|
: Configures the scheduled automatic classifier training frequency. The value
|
||||||
will check for a file to remain unmodified.
|
should be a valid crontab(5) expression describing when to run.
|
||||||
|
|
||||||
Defaults to 5.
|
: If set to the string "disable", the classifier will not be trained automatically.
|
||||||
|
|
||||||
`PAPERLESS_CONSUMER_POLLING_DELAY=<num>`
|
Defaults to `5 */1 * * *` or every hour at 5 minutes past the hour.
|
||||||
|
|
||||||
: If consumer polling is enabled, sets the delay in seconds between
|
`PAPERLESS_INDEX_TASK_CRON=<cron expression>`
|
||||||
each check (above) paperless will do while waiting for a file to
|
|
||||||
remain unmodified.
|
|
||||||
|
|
||||||
Defaults to 5.
|
: Configures the scheduled search index update frequency. The value
|
||||||
|
should be a valid crontab(5) expression describing when to run.
|
||||||
|
|
||||||
## iNotify {#inotify}
|
: If set to the string "disable", the search index will not be automatically updated.
|
||||||
|
|
||||||
`PAPERLESS_CONSUMER_INOTIFY_DELAY=<num>`
|
Defaults to `0 0 * * *` or daily at midnight.
|
||||||
|
|
||||||
: Sets the time in seconds the consumer will wait for additional
|
`PAPERLESS_SANITY_TASK_CRON=<cron expression>`
|
||||||
events from inotify before the consumer will consider a file ready
|
|
||||||
and begin consumption. Certain scanners or network setups may
|
|
||||||
generate multiple events for a single file, leading to multiple
|
|
||||||
consumers working on the same file. Configure this to prevent that.
|
|
||||||
|
|
||||||
Defaults to 0.5 seconds.
|
: Configures the scheduled sanity checker frequency.
|
||||||
|
|
||||||
|
: If set to the string "disable", the sanity checker will not run automatically.
|
||||||
|
|
||||||
|
Defaults to `30 0 * * sun` or Sunday at 30 minutes past midnight.
|
||||||
|
|
||||||
|
`PAPERLESS_ENABLE_COMPRESSION=<bool>`
|
||||||
|
|
||||||
|
: Enables compression of the responses from the webserver.
|
||||||
|
|
||||||
|
: Defaults to 1, enabling compression.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
If you are using a proxy such as nginx, it is likely more efficient
|
||||||
|
to enable compression in your proxy configuration rather than
|
||||||
|
the webserver
|
||||||
|
|
||||||
|
`PAPERLESS_CONVERT_MEMORY_LIMIT=<num>`
|
||||||
|
|
||||||
|
: On smaller systems, or even in the case of Very Large Documents, the
|
||||||
|
consumer may explode, complaining about how it's "unable to extend
|
||||||
|
pixel cache". In such cases, try setting this to a reasonably low
|
||||||
|
value, like 32. The default is to use whatever is necessary to do
|
||||||
|
everything without writing to disk, and units are in megabytes.
|
||||||
|
|
||||||
|
For more information on how to use this value, you should search the
|
||||||
|
web for "MAGICK_MEMORY_LIMIT".
|
||||||
|
|
||||||
|
Defaults to 0, which disables the limit.
|
||||||
|
|
||||||
|
`PAPERLESS_CONVERT_TMPDIR=<path>`
|
||||||
|
|
||||||
|
: Similar to the memory limit, if you've got a small system and your
|
||||||
|
OS mounts /tmp as tmpfs, you should set this to a path that's on a
|
||||||
|
physical disk, like /home/your_user/tmp or something. ImageMagick
|
||||||
|
will use this as scratch space when crunching through very large
|
||||||
|
documents.
|
||||||
|
|
||||||
|
For more information on how to use this value, you should search the
|
||||||
|
web for "MAGICK_TMPDIR".
|
||||||
|
|
||||||
|
Default is none, which disables the temporary directory.
|
||||||
|
|
||||||
|
## Document Consumption {#consume_config}
|
||||||
|
|
||||||
`PAPERLESS_CONSUMER_DELETE_DUPLICATES=<bool>`
|
`PAPERLESS_CONSUMER_DELETE_DUPLICATES=<bool>`
|
||||||
|
|
||||||
@@ -787,71 +899,51 @@ don't exist yet.
|
|||||||
|
|
||||||
Defaults to false.
|
Defaults to false.
|
||||||
|
|
||||||
`PAPERLESS_CONSUMER_ENABLE_BARCODES=<bool>`
|
`PAPERLESS_CONSUMER_IGNORE_PATTERNS=<json>`
|
||||||
|
|
||||||
: Enables the scanning and page separation based on detected barcodes.
|
: By default, paperless ignores certain files and folders in the
|
||||||
This allows for scanning and adding multiple documents per uploaded
|
consumption directory, such as system files created by the Mac OS
|
||||||
file, which are separated by one or multiple barcode pages.
|
or hidden folders some tools use to store data.
|
||||||
|
|
||||||
For ease of use, it is suggested to use a standardized separation
|
This can be adjusted by configuring a custom json array with
|
||||||
page, e.g. [here](https://www.alliancegroup.co.uk/patch-codes.htm).
|
patterns to exclude.
|
||||||
|
|
||||||
If no barcodes are detected in the uploaded file, no page separation
|
For example, `.DS_STORE/*` will ignore any files found in a folder
|
||||||
will happen.
|
named `.DS_STORE`, including `.DS_STORE/bar.pdf` and `foo/.DS_STORE/bar.pdf`
|
||||||
|
|
||||||
The original document will be removed and the separated pages will
|
A pattern like `._*` will ignore anything starting with `._`, including:
|
||||||
be saved as pdf.
|
`._foo.pdf` and `._bar/foo.pdf`
|
||||||
|
|
||||||
Defaults to false.
|
Defaults to
|
||||||
|
`[".DS_STORE/*", "._*", ".stfolder/*", ".stversions/*", ".localized/*", "desktop.ini", "@eaDir/*"]`.
|
||||||
|
|
||||||
`PAPERLESS_CONSUMER_BARCODE_TIFF_SUPPORT=<bool>`
|
`PAPERLESS_CONSUMER_BARCODE_SCANNER=<string>`
|
||||||
|
|
||||||
: Whether TIFF image files should be scanned for barcodes. This will
|
: Sets the barcode scanner used for barcode functionality.
|
||||||
automatically convert any TIFF image(s) to pdfs for later
|
|
||||||
processing. This only has an effect, if
|
|
||||||
PAPERLESS_CONSUMER_ENABLE_BARCODES has been enabled.
|
|
||||||
|
|
||||||
Defaults to false.
|
Currently, "PYZBAR" (the default) or "ZXING" might be selected.
|
||||||
|
If you have problems that your Barcodes/QR-Codes are not detected
|
||||||
|
(especially with bad scan quality and/or small codes), try the other one.
|
||||||
|
|
||||||
PAPERLESS_CONSUMER_BARCODE_STRING=PATCHT
|
zxing is not available on all platforms.
|
||||||
|
|
||||||
: Defines the string to be detected as a separator barcode. If
|
`PAPERLESS_PRE_CONSUME_SCRIPT=<filename>`
|
||||||
paperless is used with the PATCH-T separator pages, users shouldn't
|
|
||||||
change this.
|
|
||||||
|
|
||||||
Defaults to "PATCHT"
|
: After some initial validation, Paperless can trigger an arbitrary
|
||||||
|
script if you like before beginning consumption. This script will be provided
|
||||||
|
data for it to work with via the environment.
|
||||||
|
|
||||||
`PAPERLESS_CONVERT_MEMORY_LIMIT=<num>`
|
For more information, take a look at [pre-consumption script](/advanced_usage#pre-consume-script).
|
||||||
|
|
||||||
: On smaller systems, or even in the case of Very Large Documents, the
|
The default is blank, which means nothing will be executed.
|
||||||
consumer may explode, complaining about how it's "unable to extend
|
|
||||||
pixel cache". In such cases, try setting this to a reasonably low
|
|
||||||
value, like 32. The default is to use whatever is necessary to do
|
|
||||||
everything without writing to disk, and units are in megabytes.
|
|
||||||
|
|
||||||
For more information on how to use this value, you should search the
|
|
||||||
web for "MAGICK_MEMORY_LIMIT".
|
|
||||||
|
|
||||||
Defaults to 0, which disables the limit.
|
|
||||||
|
|
||||||
`PAPERLESS_CONVERT_TMPDIR=<path>`
|
|
||||||
|
|
||||||
: Similar to the memory limit, if you've got a small system and your
|
|
||||||
OS mounts /tmp as tmpfs, you should set this to a path that's on a
|
|
||||||
physical disk, like /home/your_user/tmp or something. ImageMagick
|
|
||||||
will use this as scratch space when crunching through very large
|
|
||||||
documents.
|
|
||||||
|
|
||||||
For more information on how to use this value, you should search the
|
|
||||||
web for "MAGICK_TMPDIR".
|
|
||||||
|
|
||||||
Default is none, which disables the temporary directory.
|
|
||||||
|
|
||||||
`PAPERLESS_POST_CONSUME_SCRIPT=<filename>`
|
`PAPERLESS_POST_CONSUME_SCRIPT=<filename>`
|
||||||
|
|
||||||
: After a document is consumed, Paperless can trigger an arbitrary
|
: After a document is consumed, Paperless can trigger an arbitrary
|
||||||
script if you like. This script will be passed a number of arguments
|
script if you like. This script will be provided
|
||||||
for you to work with. For more information, take a look at [Post-consumption script](/advanced_usage#post-consume-script).
|
data for it to work with via the environment.
|
||||||
|
|
||||||
|
For more information, take a look at [Post-consumption script](/advanced_usage#post-consume-script).
|
||||||
|
|
||||||
The default is blank, which means nothing will be executed.
|
The default is blank, which means nothing will be executed.
|
||||||
|
|
||||||
@@ -918,16 +1010,167 @@ within your documents.
|
|||||||
second, and year last order. Characters D, M, or Y can be shuffled
|
second, and year last order. Characters D, M, or Y can be shuffled
|
||||||
to meet the required order.
|
to meet the required order.
|
||||||
|
|
||||||
`PAPERLESS_CONSUMER_IGNORE_PATTERNS=<json>`
|
### Polling {#polling}
|
||||||
|
|
||||||
: By default, paperless ignores certain files and folders in the
|
`PAPERLESS_CONSUMER_POLLING=<num>`
|
||||||
consumption directory, such as system files created by the Mac OS.
|
|
||||||
|
|
||||||
This can be adjusted by configuring a custom json array with
|
: If paperless won't find documents added to your consume folder, it
|
||||||
patterns to exclude.
|
might not be able to automatically detect filesystem changes. In
|
||||||
|
that case, specify a polling interval in seconds here, which will
|
||||||
|
then cause paperless to periodically check your consumption
|
||||||
|
directory for changes. This will also disable listening for file
|
||||||
|
system changes with `inotify`.
|
||||||
|
|
||||||
Defaults to
|
Defaults to 0, which disables polling and uses filesystem
|
||||||
`[".DS_STORE/*", "._*", ".stfolder/*", ".stversions/*", ".localized/*", "desktop.ini"]`.
|
notifications.
|
||||||
|
|
||||||
|
`PAPERLESS_CONSUMER_POLLING_RETRY_COUNT=<num>`
|
||||||
|
|
||||||
|
: If consumer polling is enabled, sets the number of times paperless
|
||||||
|
will check for a file to remain unmodified.
|
||||||
|
|
||||||
|
Defaults to 5.
|
||||||
|
|
||||||
|
`PAPERLESS_CONSUMER_POLLING_DELAY=<num>`
|
||||||
|
|
||||||
|
: If consumer polling is enabled, sets the delay in seconds between
|
||||||
|
each check (above) paperless will do while waiting for a file to
|
||||||
|
remain unmodified.
|
||||||
|
|
||||||
|
Defaults to 5.
|
||||||
|
|
||||||
|
### iNotify {#inotify}
|
||||||
|
|
||||||
|
`PAPERLESS_CONSUMER_INOTIFY_DELAY=<num>`
|
||||||
|
|
||||||
|
: Sets the time in seconds the consumer will wait for additional
|
||||||
|
events from inotify before the consumer will consider a file ready
|
||||||
|
and begin consumption. Certain scanners or network setups may
|
||||||
|
generate multiple events for a single file, leading to multiple
|
||||||
|
consumers working on the same file. Configure this to prevent that.
|
||||||
|
|
||||||
|
Defaults to 0.5 seconds.
|
||||||
|
|
||||||
|
## Barcodes {#barcodes}
|
||||||
|
|
||||||
|
`PAPERLESS_CONSUMER_ENABLE_BARCODES=<bool>`
|
||||||
|
|
||||||
|
: Enables the scanning and page separation based on detected barcodes.
|
||||||
|
This allows for scanning and adding multiple documents per uploaded
|
||||||
|
file, which are separated by one or multiple barcode pages.
|
||||||
|
|
||||||
|
For ease of use, it is suggested to use a standardized separation
|
||||||
|
page, e.g. [here](https://www.alliancegroup.co.uk/patch-codes.htm).
|
||||||
|
|
||||||
|
If no barcodes are detected in the uploaded file, no page separation
|
||||||
|
will happen.
|
||||||
|
|
||||||
|
The original document will be removed and the separated pages will
|
||||||
|
be saved as pdf.
|
||||||
|
|
||||||
|
See additional information in the [advanced usage documentation](/advanced_usage#barcodes)
|
||||||
|
|
||||||
|
Defaults to false.
|
||||||
|
|
||||||
|
`PAPERLESS_CONSUMER_BARCODE_TIFF_SUPPORT=<bool>`
|
||||||
|
|
||||||
|
: Whether TIFF image files should be scanned for barcodes. This will
|
||||||
|
automatically convert any TIFF image(s) to pdfs for later
|
||||||
|
processing. This only has an effect, if
|
||||||
|
PAPERLESS_CONSUMER_ENABLE_BARCODES has been enabled.
|
||||||
|
|
||||||
|
Defaults to false.
|
||||||
|
|
||||||
|
`PAPERLESS_CONSUMER_BARCODE_STRING=<string>`
|
||||||
|
|
||||||
|
: Defines the string to be detected as a separator barcode. If
|
||||||
|
paperless is used with the PATCH-T separator pages, users shouldn't
|
||||||
|
change this.
|
||||||
|
|
||||||
|
Defaults to "PATCHT"
|
||||||
|
|
||||||
|
`PAPERLESS_CONSUMER_ENABLE_ASN_BARCODE=<bool>`
|
||||||
|
|
||||||
|
: Enables the detection of barcodes in the scanned document and
|
||||||
|
setting the ASN (archive serial number) if a properly formatted
|
||||||
|
barcode is detected.
|
||||||
|
|
||||||
|
The barcode must consist of a (configurable) prefix and the ASN
|
||||||
|
to be set, for instance `ASN00123`.
|
||||||
|
|
||||||
|
This option is compatible with barcode page separation, since
|
||||||
|
pages will be split up before reading the ASN.
|
||||||
|
|
||||||
|
If no ASN barcodes are detected in the uploaded file, no ASN will
|
||||||
|
be set. If a barcode with an existing ASN is detected, the
|
||||||
|
document will not be consumed and an error logged.
|
||||||
|
|
||||||
|
Defaults to false.
|
||||||
|
|
||||||
|
`PAPERLESS_CONSUMER_ASN_BARCODE_PREFIX=<string>`
|
||||||
|
|
||||||
|
: Defines the prefix that is used to identify a barcode as an ASN
|
||||||
|
barcode.
|
||||||
|
|
||||||
|
Defaults to "ASN"
|
||||||
|
|
||||||
|
`PAPERLESS_CONSUMER_BARCODE_UPSCALE=<float>`
|
||||||
|
|
||||||
|
: Defines the upscale factor used in barcode detection.
|
||||||
|
Improves the detection of small barcodes, i.e. with a value of 1.5 by
|
||||||
|
upscaling the document beforce the detection process. Upscaling will
|
||||||
|
only take place if value is bigger than 1.0. Otherwise upscaling will
|
||||||
|
not be performed to save resources. Try using in combination with
|
||||||
|
PAPERLESS_CONSUMER_BARCODE_DPI set to a value higher than default.
|
||||||
|
|
||||||
|
Defaults to 0.0
|
||||||
|
|
||||||
|
`PAPERLESS_CONSUMER_BARCODE_DPI=<int>`
|
||||||
|
|
||||||
|
: During barcode detection every page from a PDF document needs
|
||||||
|
to be converted to an image. A dpi value can be specified in the
|
||||||
|
conversion process. Default is 300. If the detection of small barcodes
|
||||||
|
fails a bigger dpi value i.e. 600 can fix the issue. Try using in
|
||||||
|
combination with PAPERLESS_CONSUMER_BARCODE_UPSCALE bigger than 1.0.
|
||||||
|
|
||||||
|
Defaults to "300"
|
||||||
|
|
||||||
|
## Collate Double-Sided Documents {#collate}
|
||||||
|
|
||||||
|
`PAPERLESS_CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED=<bool>`
|
||||||
|
|
||||||
|
: Enables automatic collation of two single-sided scans into a double-sided
|
||||||
|
document.
|
||||||
|
|
||||||
|
This is useful if you have an automatic document feeder that only supports
|
||||||
|
single-sided scans, but you need to scan a double-sided document. If your
|
||||||
|
ADF supports double-sided scans natively, you do not need this feature.
|
||||||
|
|
||||||
|
`PAPERLESS_CONSUMER_RECURSIVE` must be enabled for this to work.
|
||||||
|
|
||||||
|
For more information, read the [corresponding section in the advanced
|
||||||
|
documentation](/advanced_usage#collate).
|
||||||
|
|
||||||
|
Defaults to false.
|
||||||
|
|
||||||
|
`PAPERLESS_CONSUMER_COLLATE_DOUBLE_SIDED_SUBDIR_NAME=<str>`
|
||||||
|
|
||||||
|
: The name of the subdirectory that the collate feature expects documents to
|
||||||
|
arrive.
|
||||||
|
|
||||||
|
This only has an effect if `PAPERLESS_CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED`
|
||||||
|
has been enabled. Note that Paperless will not automatically create the
|
||||||
|
directory.
|
||||||
|
|
||||||
|
Defaults to "double-sided".
|
||||||
|
|
||||||
|
`PAPERLESS_CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT=<bool>`
|
||||||
|
: Whether TIFF image files should be supported when collating documents.
|
||||||
|
This will automatically convert any TIFF image(s) to pdfs for later
|
||||||
|
processing. This only has an effect if
|
||||||
|
`PAPERLESS_CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED` has been enabled.
|
||||||
|
|
||||||
|
Defaults to false.
|
||||||
|
|
||||||
## Binaries
|
## Binaries
|
||||||
|
|
||||||
@@ -936,7 +1179,7 @@ find on your system when it starts up. Unless you've done something
|
|||||||
creative with their installation, you probably won't need to edit any
|
creative with their installation, you probably won't need to edit any
|
||||||
of these. However, if you've installed these programs somewhere where
|
of these. However, if you've installed these programs somewhere where
|
||||||
simply typing the name of the program doesn't automatically execute it
|
simply typing the name of the program doesn't automatically execute it
|
||||||
(ie. the program isn't in your \$PATH), then you'll need to specify
|
(ie. the program isn't in your $PATH), then you'll need to specify
|
||||||
the literal path for that program.
|
the literal path for that program.
|
||||||
|
|
||||||
`PAPERLESS_CONVERT_BINARY=<path>`
|
`PAPERLESS_CONVERT_BINARY=<path>`
|
||||||
@@ -1019,12 +1262,17 @@ actual group ID on the host system, which you can get by executing
|
|||||||
: Additional OCR languages to install. By default, paperless comes
|
: Additional OCR languages to install. By default, paperless comes
|
||||||
with English, German, Italian, Spanish and French. If your language
|
with English, German, Italian, Spanish and French. If your language
|
||||||
is not in this list, install additional languages with this
|
is not in this list, install additional languages with this
|
||||||
configuration option:
|
configuration option. You will need to [find the right LangCodes](https://tesseract-ocr.github.io/tessdoc/Data-Files-in-different-versions.html)
|
||||||
|
but note that [tesseract-ocr-\* package names](https://packages.debian.org/bullseye/graphics/)
|
||||||
|
do not always correspond with the language codes e.g. "chi_tra" should be
|
||||||
|
specified as "chi-tra".
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
PAPERLESS_OCR_LANGUAGES=tur ces
|
PAPERLESS_OCR_LANGUAGES=tur ces chi-tra
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Make sure it's a space separated list when using several values.
|
||||||
|
|
||||||
To actually use these languages, also set the default OCR language
|
To actually use these languages, also set the default OCR language
|
||||||
of paperless:
|
of paperless:
|
||||||
|
|
||||||
|
@@ -1,9 +1,9 @@
|
|||||||
# Development
|
# Development
|
||||||
|
|
||||||
This section describes the steps you need to take to start development
|
This section describes the steps you need to take to start development
|
||||||
on paperless-ngx.
|
on Paperless-ngx.
|
||||||
|
|
||||||
Check out the source from github. The repository is organized in the
|
Check out the source from GitHub. The repository is organized in the
|
||||||
following way:
|
following way:
|
||||||
|
|
||||||
- `main` always represents the latest release and will only see
|
- `main` always represents the latest release and will only see
|
||||||
@@ -12,7 +12,7 @@ following way:
|
|||||||
- `feature-X` contain bigger changes that will be in some release, but
|
- `feature-X` contain bigger changes that will be in some release, but
|
||||||
not necessarily the next one.
|
not necessarily the next one.
|
||||||
|
|
||||||
When making functional changes to paperless, _always_ make your changes
|
When making functional changes to Paperless-ngx, _always_ make your changes
|
||||||
on the `dev` branch.
|
on the `dev` branch.
|
||||||
|
|
||||||
Apart from that, the folder structure is as follows:
|
Apart from that, the folder structure is as follows:
|
||||||
@@ -24,9 +24,9 @@ Apart from that, the folder structure is as follows:
|
|||||||
development.
|
development.
|
||||||
- `docker/` - Files required to build the docker image.
|
- `docker/` - Files required to build the docker image.
|
||||||
|
|
||||||
## Contributing to Paperless
|
## Contributing to Paperless-ngx
|
||||||
|
|
||||||
Maybe you've been using Paperless for a while and want to add a feature
|
Maybe you've been using Paperless-ngx for a while and want to add a feature
|
||||||
or two, or maybe you've come across a bug that you have some ideas how
|
or two, or maybe you've come across a bug that you have some ideas how
|
||||||
to solve. The beauty of open source software is that you can see what's
|
to solve. The beauty of open source software is that you can see what's
|
||||||
wrong and help to get it fixed for everyone!
|
wrong and help to get it fixed for everyone!
|
||||||
@@ -36,13 +36,13 @@ conduct](https://github.com/paperless-ngx/paperless-ngx/blob/main/CODE_OF_CONDUC
|
|||||||
and other important information in the [contributing
|
and other important information in the [contributing
|
||||||
guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/main/CONTRIBUTING.md).
|
guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/main/CONTRIBUTING.md).
|
||||||
|
|
||||||
## Code formatting with pre-commit Hooks
|
## Code formatting with pre-commit hooks
|
||||||
|
|
||||||
To ensure a consistent style and formatting across the project source,
|
To ensure a consistent style and formatting across the project source,
|
||||||
the project utilizes a Git [`pre-commit`](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks)
|
the project utilizes Git [`pre-commit`](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks)
|
||||||
hook to perform some formatting and linting before a commit is allowed.
|
hooks to perform some formatting and linting before a commit is allowed.
|
||||||
That way, everyone uses the same style and some common issues can be caught
|
That way, everyone uses the same style and some common issues can be caught
|
||||||
early on. See below for installation instructions.
|
early on.
|
||||||
|
|
||||||
Once installed, hooks will run when you commit. If the formatting isn't
|
Once installed, hooks will run when you commit. If the formatting isn't
|
||||||
quite right or a linter catches something, the commit will be rejected.
|
quite right or a linter catches something, the commit will be rejected.
|
||||||
@@ -51,129 +51,115 @@ as the Python formatting tool `black`, will format failing
|
|||||||
files, so all you need to do is `git add` those files again
|
files, so all you need to do is `git add` those files again
|
||||||
and retry your commit.
|
and retry your commit.
|
||||||
|
|
||||||
## Initial setup and first start
|
## General setup
|
||||||
|
|
||||||
After you forked and cloned the code from github you need to perform a
|
After you forked and cloned the code from GitHub you need to perform a
|
||||||
first-time setup. To do the setup you need to perform the steps from the
|
first-time setup.
|
||||||
following chapters in a certain order:
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
Every command is executed directly from the root folder of the project unless specified otherwise.
|
||||||
|
|
||||||
1. Install prerequisites + pipenv as mentioned in
|
1. Install prerequisites + pipenv as mentioned in
|
||||||
[Bare metal route](/setup#bare_metal)
|
[Bare metal route](/setup#bare_metal).
|
||||||
|
|
||||||
2. Copy `paperless.conf.example` to `paperless.conf` and enable debug
|
2. Copy `paperless.conf.example` to `paperless.conf` and enable debug
|
||||||
mode.
|
mode within the file via `PAPERLESS_DEBUG=true`.
|
||||||
|
|
||||||
3. Install the Angular CLI interface:
|
3. Create `consume` and `media` directories:
|
||||||
|
|
||||||
```shell-session
|
```bash
|
||||||
$ npm install -g @angular/cli
|
$ mkdir -p consume media
|
||||||
```
|
```
|
||||||
|
|
||||||
4. Install pre-commit hooks
|
4. Install the Python dependencies:
|
||||||
|
|
||||||
```shell-session
|
```bash
|
||||||
pre-commit install
|
$ pipenv install --dev
|
||||||
```
|
```
|
||||||
|
|
||||||
5. Create `consume` and `media` folders in the cloned root folder.
|
!!! note
|
||||||
|
|
||||||
```shell-session
|
Using a virtual environment is highly recommended. You can spawn one via `pipenv shell`.
|
||||||
mkdir -p consume media
|
Make sure you're using Python 3.10.x or lower. Otherwise you might
|
||||||
|
get issues with building dependencies. You can use
|
||||||
|
[pyenv](https://github.com/pyenv/pyenv) to install a specific
|
||||||
|
Python version.
|
||||||
|
|
||||||
|
5. Install pre-commit hooks:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ pre-commit install
|
||||||
```
|
```
|
||||||
|
|
||||||
6. You can now either ...
|
6. Apply migrations and create a superuser for your development instance:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# src/
|
||||||
|
|
||||||
|
$ python3 manage.py migrate
|
||||||
|
$ python3 manage.py createsuperuser
|
||||||
|
```
|
||||||
|
|
||||||
|
7. You can now either ...
|
||||||
|
|
||||||
- install redis or
|
- install redis or
|
||||||
|
|
||||||
- use the included scripts/start-services.sh to use docker to fire
|
- use the included `scripts/start_services.sh` to use docker to fire
|
||||||
up a redis instance (and some other services such as tika,
|
up a redis instance (and some other services such as tika,
|
||||||
gotenberg and a database server) or
|
gotenberg and a database server) or
|
||||||
|
|
||||||
- spin up a bare redis container
|
- spin up a bare redis container
|
||||||
|
|
||||||
```shell-session
|
```
|
||||||
docker run -d -p 6379:6379 --restart unless-stopped redis:latest
|
$ docker run -d -p 6379:6379 --restart unless-stopped redis:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
7. Install the python dependencies by performing in the src/ directory.
|
8. Continue with either back-end or front-end development – or both :-).
|
||||||
|
|
||||||
```shell-session
|
|
||||||
pipenv install --dev
|
|
||||||
```
|
|
||||||
|
|
||||||
!!! note
|
|
||||||
|
|
||||||
Make sure you're using python 3.10.x or lower. Otherwise you might
|
|
||||||
get issues with building dependencies. You can use
|
|
||||||
[pyenv](https://github.com/pyenv/pyenv) to install a specific
|
|
||||||
python version.
|
|
||||||
|
|
||||||
8. Generate the static UI so you can perform a login to get session
|
|
||||||
that is required for frontend development (this needs to be done one
|
|
||||||
time only). From src-ui directory:
|
|
||||||
|
|
||||||
```shell-session
|
|
||||||
npm install .
|
|
||||||
./node_modules/.bin/ng build --configuration production
|
|
||||||
```
|
|
||||||
|
|
||||||
9. Apply migrations and create a superuser for your dev instance:
|
|
||||||
|
|
||||||
```shell-session
|
|
||||||
python3 manage.py migrate
|
|
||||||
python3 manage.py createsuperuser
|
|
||||||
```
|
|
||||||
|
|
||||||
10. Now spin up the dev backend. Depending on which part of paperless
|
|
||||||
you're developing for, you need to have some or all of them
|
|
||||||
running.
|
|
||||||
|
|
||||||
```shell-session
|
|
||||||
python3 manage.py runserver & python3 manage.py document_consumer & celery --app paperless worker
|
|
||||||
```
|
|
||||||
|
|
||||||
11. Login with the superuser credentials provided in step 8 at
|
|
||||||
`http://localhost:8000` to create a session that enables you to use
|
|
||||||
the backend.
|
|
||||||
|
|
||||||
Backend development environment is now ready, to start Frontend
|
|
||||||
development go to `/src-ui` and run `ng serve`. From there you can use
|
|
||||||
`http://localhost:4200` for a preview.
|
|
||||||
|
|
||||||
## Back end development
|
## Back end development
|
||||||
|
|
||||||
The backend is a [Django](https://www.djangoproject.com/) application. PyCharm works well for development,
|
The back end is a [Django](https://www.djangoproject.com/) application.
|
||||||
but you can use whatever you want.
|
[PyCharm](https://www.jetbrains.com/de-de/pycharm/) as well as [Visual Studio Code](https://code.visualstudio.com)
|
||||||
|
work well for development, but you can use whatever you want.
|
||||||
|
|
||||||
Configure the IDE to use the src/ folder as the base source folder.
|
Configure the IDE to use the `src/`-folder as the base source folder.
|
||||||
Configure the following launch configurations in your IDE:
|
Configure the following launch configurations in your IDE:
|
||||||
|
|
||||||
- `python3 manage.py runserver`
|
- `python3 manage.py runserver`
|
||||||
- `celery --app paperless worker`
|
|
||||||
- `python3 manage.py document_consumer`
|
- `python3 manage.py document_consumer`
|
||||||
|
- `celery --app paperless worker -l DEBUG` (or any other log level)
|
||||||
|
|
||||||
To start them all:
|
To start them all:
|
||||||
|
|
||||||
```shell-session
|
```bash
|
||||||
python3 manage.py runserver & python3 manage.py document_consumer & celery --app paperless worker
|
# src/
|
||||||
|
|
||||||
|
$ python3 manage.py runserver & \
|
||||||
|
python3 manage.py document_consumer & \
|
||||||
|
celery --app paperless worker -l DEBUG
|
||||||
```
|
```
|
||||||
|
|
||||||
Testing and code style:
|
You might need the front end to test your back end code.
|
||||||
|
This assumes that you have AngularJS installed on your system.
|
||||||
|
Go to the [Front end development](#front-end-development) section for further details.
|
||||||
|
To build the front end once use this command:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# src-ui/
|
||||||
|
|
||||||
|
$ npm install
|
||||||
|
$ ng build --configuration production
|
||||||
|
```
|
||||||
|
|
||||||
|
### Testing
|
||||||
|
|
||||||
- Run `pytest` in the `src/` directory to execute all tests. This also
|
- Run `pytest` in the `src/` directory to execute all tests. This also
|
||||||
generates a HTML coverage report. When runnings test, paperless.conf
|
generates a HTML coverage report. When runnings test, `paperless.conf`
|
||||||
is loaded as well. However: the tests rely on the default
|
is loaded as well. However, the tests rely on the default
|
||||||
configuration. This is not ideal. But for now, make sure no settings
|
configuration. This is not ideal. But for now, make sure no settings
|
||||||
except for DEBUG are overridden when testing.
|
except for DEBUG are overridden when testing.
|
||||||
|
|
||||||
- Coding style is enforced by the Git pre-commit hooks. These will
|
|
||||||
ensure your code is formatted and do some linting when you do a `git commit`.
|
|
||||||
|
|
||||||
- You can also run `black` manually to format your code
|
|
||||||
|
|
||||||
- The `pre-commit` hooks will modify files and interact with each other.
|
|
||||||
It may take a couple of `git add`, `git commit` cycle to satisfy them.
|
|
||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
The line length rule E501 is generally useful for getting multiple
|
The line length rule E501 is generally useful for getting multiple
|
||||||
@@ -184,71 +170,71 @@ Testing and code style:
|
|||||||
|
|
||||||
## Front end development
|
## Front end development
|
||||||
|
|
||||||
The front end is built using Angular. In order to get started, you need
|
The front end is built using AngularJS. In order to get started, you need Node.js (version 14.15+) and
|
||||||
`npm`. Install the Angular CLI interface with
|
`npm`.
|
||||||
|
|
||||||
```shell-session
|
!!! note
|
||||||
$ npm install -g @angular/cli
|
|
||||||
```
|
|
||||||
|
|
||||||
and make sure that it's on your path. Next, in the src-ui/ directory,
|
The following commands are all performed in the `src-ui`-directory. You will need a running back end (including an active session) to connect to the back end API. To spin it up refer to the commands under the section [above](#back-end-development).
|
||||||
install the required dependencies of the project.
|
|
||||||
|
|
||||||
```shell-session
|
1. Install the Angular CLI. You might need sudo privileges to perform this command:
|
||||||
$ npm install
|
|
||||||
```
|
|
||||||
|
|
||||||
You can launch a development server by running
|
```bash
|
||||||
|
$ npm install -g @angular/cli
|
||||||
```shell-session
|
|
||||||
$ ng serve
|
|
||||||
```
|
|
||||||
|
|
||||||
This will automatically update whenever you save. However, in-place
|
|
||||||
compilation might fail on syntax errors, in which case you need to
|
|
||||||
restart it.
|
|
||||||
|
|
||||||
By default, the development server is available on
|
|
||||||
`http://localhost:4200/` and is configured to access the API at
|
|
||||||
`http://localhost:8000/api/`, which is the default of the backend. If
|
|
||||||
you enabled DEBUG on the back end, several security overrides for
|
|
||||||
allowed hosts, CORS and X-Frame-Options are in place so that the front
|
|
||||||
end behaves exactly as in production. This also relies on you being
|
|
||||||
logged into the back end. Without a valid session, The front end will
|
|
||||||
simply not work.
|
|
||||||
|
|
||||||
Testing and code style:
|
|
||||||
|
|
||||||
- The frontend code (.ts, .html, .scss) use `prettier` for code
|
|
||||||
formatting via the Git `pre-commit` hooks which run automatically on
|
|
||||||
commit. See
|
|
||||||
[above](#code-formatting-with-pre-commit-hooks) for installation. You can also run this via cli with a
|
|
||||||
command such as
|
|
||||||
|
|
||||||
```shell-session
|
|
||||||
$ git ls-files -- '*.ts' | xargs pre-commit run prettier --files
|
|
||||||
```
|
```
|
||||||
|
|
||||||
- Frontend testing uses jest and cypress. There is currently a need
|
2. Make sure that it's on your path.
|
||||||
for significantly more frontend tests. Unit tests and e2e tests,
|
|
||||||
respectively, can be run non-interactively with:
|
|
||||||
|
|
||||||
```shell-session
|
3. Install all necessary modules:
|
||||||
$ ng test
|
|
||||||
$ npm run e2e:ci
|
```bash
|
||||||
|
$ npm install
|
||||||
```
|
```
|
||||||
|
|
||||||
Cypress also includes a UI which can be run from within the `src-ui`
|
4. You can launch a development server by running:
|
||||||
directory with
|
|
||||||
|
|
||||||
```shell-session
|
```bash
|
||||||
$ ./node_modules/.bin/cypress open
|
$ ng serve
|
||||||
```
|
```
|
||||||
|
|
||||||
In order to build the front end and serve it as part of django, execute
|
This will automatically update whenever you save. However, in-place
|
||||||
|
compilation might fail on syntax errors, in which case you need to
|
||||||
|
restart it.
|
||||||
|
|
||||||
```shell-session
|
By default, the development server is available on `http://localhost:4200/` and is configured to access the API at
|
||||||
$ ng build --prod
|
`http://localhost:8000/api/`, which is the default of the backend. If you enabled `DEBUG` on the back end, several security overrides for allowed hosts, CORS and X-Frame-Options are in place so that the front end behaves exactly as in production.
|
||||||
|
|
||||||
|
### Testing and code style
|
||||||
|
|
||||||
|
The front end code (.ts, .html, .scss) use `prettier` for code
|
||||||
|
formatting via the Git `pre-commit` hooks which run automatically on
|
||||||
|
commit. See [above](#code-formatting-with-pre-commit-hooks) for installation instructions. You can also run this via the CLI with a
|
||||||
|
command such as
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ git ls-files -- '*.ts' | xargs pre-commit run prettier --files
|
||||||
|
```
|
||||||
|
|
||||||
|
Front end testing uses Jest and Playwright. Unit tests and e2e tests,
|
||||||
|
respectively, can be run non-interactively with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ ng test
|
||||||
|
$ npx playwright test
|
||||||
|
```
|
||||||
|
|
||||||
|
Playwright also includes a UI which can be run with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ npx playwright test --ui
|
||||||
|
```
|
||||||
|
|
||||||
|
### Building the frontend
|
||||||
|
|
||||||
|
In order to build the front end and serve it as part of Django, execute:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ ng build --configuration production
|
||||||
```
|
```
|
||||||
|
|
||||||
This will build the front end and put it in a location from which the
|
This will build the front end and put it in a location from which the
|
||||||
@@ -257,25 +243,25 @@ that authentication is working.
|
|||||||
|
|
||||||
## Localization
|
## Localization
|
||||||
|
|
||||||
Paperless is available in many different languages. Since paperless
|
Paperless-ngx is available in many different languages. Since Paperless-ngx
|
||||||
consists both of a django application and an Angular front end, both
|
consists both of a Django application and an AngularJS front end, both
|
||||||
these parts have to be translated separately.
|
these parts have to be translated separately.
|
||||||
|
|
||||||
### Front end localization
|
### Front end localization
|
||||||
|
|
||||||
- The Angular front end does localization according to the [Angular
|
- The AngularJS front end does localization according to the [Angular
|
||||||
documentation](https://angular.io/guide/i18n).
|
documentation](https://angular.io/guide/i18n).
|
||||||
- The source language of the project is "en_US".
|
- The source language of the project is "en_US".
|
||||||
- The source strings end up in the file "src-ui/messages.xlf".
|
- The source strings end up in the file `src-ui/messages.xlf`.
|
||||||
- The translated strings need to be placed in the
|
- The translated strings need to be placed in the
|
||||||
"src-ui/src/locale/" folder.
|
`src-ui/src/locale/` folder.
|
||||||
- In order to extract added or changed strings from the source files,
|
- In order to extract added or changed strings from the source files,
|
||||||
call `ng xi18n --ivy`.
|
call `ng extract-i18n`.
|
||||||
|
|
||||||
Adding new languages requires adding the translated files in the
|
Adding new languages requires adding the translated files in the
|
||||||
"src-ui/src/locale/" folder and adjusting a couple files.
|
`src-ui/src/locale/` folder and adjusting a couple files.
|
||||||
|
|
||||||
1. Adjust "src-ui/angular.json":
|
1. Adjust `src-ui/angular.json`:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
"i18n": {
|
"i18n": {
|
||||||
@@ -292,7 +278,7 @@ Adding new languages requires adding the translated files in the
|
|||||||
```
|
```
|
||||||
|
|
||||||
2. Add the language to the available options in
|
2. Add the language to the available options in
|
||||||
"src-ui/src/app/services/settings.service.ts":
|
`src-ui/src/app/services/settings.service.ts`:
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
getLanguageOptions(): LanguageOption[] {
|
getLanguageOptions(): LanguageOption[] {
|
||||||
@@ -313,7 +299,7 @@ Adding new languages requires adding the translated files in the
|
|||||||
and "yyyy".
|
and "yyyy".
|
||||||
|
|
||||||
3. Import and register the Angular data for this locale in
|
3. Import and register the Angular data for this locale in
|
||||||
"src-ui/src/app/app.module.ts":
|
`src-ui/src/app/app.module.ts`:
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
import localeDe from '@angular/common/locales/de'
|
import localeDe from '@angular/common/locales/de'
|
||||||
@@ -326,10 +312,10 @@ A majority of the strings that appear in the back end appear only when
|
|||||||
the admin is used. However, some of these are still shown on the front
|
the admin is used. However, some of these are still shown on the front
|
||||||
end (such as error messages).
|
end (such as error messages).
|
||||||
|
|
||||||
- The django application does localization according to the [django
|
- The django application does localization according to the [Django
|
||||||
documentation](https://docs.djangoproject.com/en/3.1/topics/i18n/translation/).
|
documentation](https://docs.djangoproject.com/en/3.1/topics/i18n/translation/).
|
||||||
- The source language of the project is "en_US".
|
- The source language of the project is "en_US".
|
||||||
- Localization files end up in the folder "src/locale/".
|
- Localization files end up in the folder `src/locale/`.
|
||||||
- In order to extract strings from the application, call
|
- In order to extract strings from the application, call
|
||||||
`python3 manage.py makemessages -l en_US`. This is important after
|
`python3 manage.py makemessages -l en_US`. This is important after
|
||||||
making changes to translatable strings.
|
making changes to translatable strings.
|
||||||
@@ -340,8 +326,8 @@ end (such as error messages).
|
|||||||
command.
|
command.
|
||||||
|
|
||||||
Adding new languages requires adding the translated files in the
|
Adding new languages requires adding the translated files in the
|
||||||
"src/locale/" folder and adjusting the file
|
`src/locale/`-folder and adjusting the file
|
||||||
"src/paperless/settings.py" to include the new language:
|
`src/paperless/settings.py` to include the new language:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
LANGUAGES = [
|
LANGUAGES = [
|
||||||
@@ -360,18 +346,27 @@ LANGUAGES = [
|
|||||||
The documentation is built using material-mkdocs, see their [documentation](https://squidfunk.github.io/mkdocs-material/reference/).
|
The documentation is built using material-mkdocs, see their [documentation](https://squidfunk.github.io/mkdocs-material/reference/).
|
||||||
If you want to build the documentation locally, this is how you do it:
|
If you want to build the documentation locally, this is how you do it:
|
||||||
|
|
||||||
1. Install python dependencies.
|
1. Have an active pipenv shell (`pipenv shell`) and install Python dependencies:
|
||||||
|
|
||||||
```shell-session
|
```bash
|
||||||
$ cd /path/to/paperless
|
|
||||||
$ pipenv install --dev
|
$ pipenv install --dev
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Build the documentation
|
2. Build the documentation
|
||||||
|
|
||||||
```shell-session
|
```bash
|
||||||
$ cd /path/to/paperless
|
$ mkdocs build --config-file mkdocs.yml
|
||||||
$ pipenv mkdocs build --config-file mkdocs.yml
|
```
|
||||||
|
|
||||||
|
_alternatively..._
|
||||||
|
|
||||||
|
3. Serve the documentation. This will spin up a
|
||||||
|
copy of the documentation at http://127.0.0.1:8000
|
||||||
|
that will automatically refresh every time you change
|
||||||
|
something.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ mkdocs serve
|
||||||
```
|
```
|
||||||
|
|
||||||
## Building the Docker image
|
## Building the Docker image
|
||||||
@@ -379,40 +374,37 @@ If you want to build the documentation locally, this is how you do it:
|
|||||||
The docker image is primarily built by the GitHub actions workflow, but
|
The docker image is primarily built by the GitHub actions workflow, but
|
||||||
it can be faster when developing to build and tag an image locally.
|
it can be faster when developing to build and tag an image locally.
|
||||||
|
|
||||||
To provide the build arguments automatically, build the image using the
|
Building the image works as with any image:
|
||||||
helper script `build-docker-image.sh`.
|
|
||||||
|
|
||||||
Building the docker image from source:
|
```
|
||||||
|
docker build --file Dockerfile --tag paperless:local --progress simple .
|
||||||
```shell-session
|
|
||||||
./build-docker-image.sh Dockerfile -t <your-tag>
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Extending Paperless
|
## Extending Paperless-ngx
|
||||||
|
|
||||||
Paperless does not have any fancy plugin systems and will probably never
|
Paperless-ngx does not have any fancy plugin systems and will probably never
|
||||||
have. However, some parts of the application have been designed to allow
|
have. However, some parts of the application have been designed to allow
|
||||||
easy integration of additional features without any modification to the
|
easy integration of additional features without any modification to the
|
||||||
base code.
|
base code.
|
||||||
|
|
||||||
### Making custom parsers
|
### Making custom parsers
|
||||||
|
|
||||||
Paperless uses parsers to add documents to paperless. A parser is
|
Paperless-ngx uses parsers to add documents. A parser is
|
||||||
responsible for:
|
responsible for:
|
||||||
|
|
||||||
- Retrieve the content from the original
|
- Retrieving the content from the original
|
||||||
- Create a thumbnail
|
- Creating a thumbnail
|
||||||
- Optional: Retrieve a created date from the original
|
- _optional:_ Retrieving a created date from the original
|
||||||
- Optional: Create an archived document from the original
|
- _optional:_ Creating an archived document from the original
|
||||||
|
|
||||||
Custom parsers can be added to paperless to support more file types. In
|
Custom parsers can be added to Paperless-ngx to support more file types. In
|
||||||
order to do that, you need to write the parser itself and announce its
|
order to do that, you need to write the parser itself and announce its
|
||||||
existence to paperless.
|
existence to Paperless-ngx.
|
||||||
|
|
||||||
The parser itself must extend `documents.parsers.DocumentParser` and
|
The parser itself must extend `documents.parsers.DocumentParser` and
|
||||||
must implement the methods `parse` and `get_thumbnail`. You can provide
|
must implement the methods `parse` and `get_thumbnail`. You can provide
|
||||||
your own implementation to `get_date` if you don't want to rely on
|
your own implementation to `get_date` if you don't want to rely on
|
||||||
paperless' default date guessing mechanisms.
|
Paperless-ngx' default date guessing mechanisms.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
class MyCustomParser(DocumentParser):
|
class MyCustomParser(DocumentParser):
|
||||||
@@ -444,7 +436,7 @@ to be empty and removed after consumption finished. You can use that
|
|||||||
directory to store any intermediate files and also use it to store the
|
directory to store any intermediate files and also use it to store the
|
||||||
thumbnail / archived document.
|
thumbnail / archived document.
|
||||||
|
|
||||||
After that, you need to announce your parser to paperless. You need to
|
After that, you need to announce your parser to Paperless-ngx. You need to
|
||||||
connect a handler to the `document_consumer_declaration` signal. Have a
|
connect a handler to the `document_consumer_declaration` signal. Have a
|
||||||
look in the file `src/paperless_tesseract/apps.py` on how that's done.
|
look in the file `src/paperless_tesseract/apps.py` on how that's done.
|
||||||
The handler is a method that returns information about your parser:
|
The handler is a method that returns information about your parser:
|
||||||
@@ -464,11 +456,11 @@ def myparser_consumer_declaration(sender, **kwargs):
|
|||||||
- `parser` is a reference to a class that extends `DocumentParser`.
|
- `parser` is a reference to a class that extends `DocumentParser`.
|
||||||
- `weight` is used whenever two or more parsers are able to parse a
|
- `weight` is used whenever two or more parsers are able to parse a
|
||||||
file: The parser with the higher weight wins. This can be used to
|
file: The parser with the higher weight wins. This can be used to
|
||||||
override the parsers provided by paperless.
|
override the parsers provided by Paperless-ngx.
|
||||||
- `mime_types` is a dictionary. The keys are the mime types your
|
- `mime_types` is a dictionary. The keys are the mime types your
|
||||||
parser supports and the value is the default file extension that
|
parser supports and the value is the default file extension that
|
||||||
paperless should use when storing files and serving them for
|
Paperless-ngx should use when storing files and serving them for
|
||||||
download. We could guess that from the file extensions, but some
|
download. We could guess that from the file extensions, but some
|
||||||
mime types have many extensions associated with them and the python
|
mime types have many extensions associated with them and the Python
|
||||||
methods responsible for guessing the extension do not always return
|
methods responsible for guessing the extension do not always return
|
||||||
the same value.
|
the same value.
|
||||||
|
21
docs/faq.md
@@ -3,10 +3,11 @@
|
|||||||
## _What's the general plan for Paperless-ngx?_
|
## _What's the general plan for Paperless-ngx?_
|
||||||
|
|
||||||
**A:** While Paperless-ngx is already considered largely
|
**A:** While Paperless-ngx is already considered largely
|
||||||
"feature-complete" it is a community-driven project and development
|
"feature-complete", it is a community-driven project and development
|
||||||
will be guided in this way. New features can be submitted via GitHub
|
will be guided in this way. New features can be submitted via
|
||||||
discussions and "up-voted" by the community but this is not a
|
[GitHub discussions](https://github.com/paperless-ngx/paperless-ngx/discussions)
|
||||||
guarantee the feature will be implemented. This project will always be
|
and "up-voted" by the community, but this is not a
|
||||||
|
guarantee that the feature will be implemented. This project will always be
|
||||||
open to collaboration in the form of PRs, ideas etc.
|
open to collaboration in the form of PRs, ideas etc.
|
||||||
|
|
||||||
## _I'm using docker. Where are my documents?_
|
## _I'm using docker. Where are my documents?_
|
||||||
@@ -27,6 +28,12 @@ system. On Linux, chances are high that this location is
|
|||||||
files around manually. This folder is meant to be entirely managed by
|
files around manually. This folder is meant to be entirely managed by
|
||||||
docker and paperless.
|
docker and paperless.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
Files consumed from the consumption directory are re-created inside
|
||||||
|
this media directory and are removed from the consumption directory
|
||||||
|
itself.
|
||||||
|
|
||||||
## Let's say I want to switch tools in a year. Can I easily move to other systems?
|
## Let's say I want to switch tools in a year. Can I easily move to other systems?
|
||||||
|
|
||||||
**A:** Your documents are stored as plain files inside the media folder.
|
**A:** Your documents are stored as plain files inside the media folder.
|
||||||
@@ -52,7 +59,7 @@ elsewhere. Here are a couple notes about that.
|
|||||||
WebP images are processed with OCR and converted into PDF documents.
|
WebP images are processed with OCR and converted into PDF documents.
|
||||||
- Plain text documents are supported as well and are added verbatim to
|
- Plain text documents are supported as well and are added verbatim to
|
||||||
paperless.
|
paperless.
|
||||||
- With the optional Tika integration enabled (see [Tika configuration](/configuration#tika),
|
- With the optional Tika integration enabled (see [Tika configuration](https://docs.paperless-ngx.com/configuration#tika)),
|
||||||
Paperless also supports various Office documents (.docx, .doc, odt,
|
Paperless also supports various Office documents (.docx, .doc, odt,
|
||||||
.ppt, .pptx, .odp, .xls, .xlsx, .ods).
|
.ppt, .pptx, .odp, .xls, .xlsx, .ods).
|
||||||
|
|
||||||
@@ -76,7 +83,7 @@ has to do much less work to serve the data.
|
|||||||
## _How do I install paperless-ngx on Raspberry Pi?_
|
## _How do I install paperless-ngx on Raspberry Pi?_
|
||||||
|
|
||||||
**A:** Docker images are available for armv7 and arm64 hardware, so just
|
**A:** Docker images are available for armv7 and arm64 hardware, so just
|
||||||
follow the docker-compose instructions. Apart from more required disk
|
follow the [docker-compose instructions](https://docs.paperless-ngx.com/setup/#installation). Apart from more required disk
|
||||||
space compared to a bare metal installation, docker comes with close to
|
space compared to a bare metal installation, docker comes with close to
|
||||||
zero overhead, even on Raspberry Pi.
|
zero overhead, even on Raspberry Pi.
|
||||||
|
|
||||||
@@ -103,7 +110,7 @@ see if it works.
|
|||||||
|
|
||||||
## _How do I proxy this with NGINX?_
|
## _How do I proxy this with NGINX?_
|
||||||
|
|
||||||
**A:** See [here](/setup#nginx).
|
**A:** See [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Using-a-Reverse-Proxy-with-Paperless-ngx#nginx).
|
||||||
|
|
||||||
## _How do I get WebSocket support with Apache mod_wsgi_?
|
## _How do I get WebSocket support with Apache mod_wsgi_?
|
||||||
|
|
||||||
|
194
docs/setup.md
@@ -33,6 +33,11 @@ steps described in [Docker setup](#docker_hub) automatically.
|
|||||||
$ bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
$ bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
macOS users will need to install e.g. [gnu-sed](https://formulae.brew.sh/formula/gnu-sed) with support
|
||||||
|
for running as `sed`.
|
||||||
|
|
||||||
### From GHCR / Docker Hub {#docker_hub}
|
### From GHCR / Docker Hub {#docker_hub}
|
||||||
|
|
||||||
1. Login with your user and create a folder in your home-directory to have a place for your
|
1. Login with your user and create a folder in your home-directory to have a place for your
|
||||||
@@ -43,10 +48,10 @@ steps described in [Docker setup](#docker_hub) automatically.
|
|||||||
```
|
```
|
||||||
|
|
||||||
2. Go to the [/docker/compose directory on the project
|
2. Go to the [/docker/compose directory on the project
|
||||||
page](https://github.com/paperless-ngx/paperless-ngx/tree/master/docker/compose)
|
page](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose)
|
||||||
and download one of the `docker-compose.*.yml` files,
|
and download one of the `docker-compose.*.yml` files,
|
||||||
depending on which database backend you want to use. Rename this
|
depending on which database backend you want to use. Rename this
|
||||||
file to `docker-compose.*.yml`. If you want to enable
|
file to `docker-compose.yml`. If you want to enable
|
||||||
optional support for Office documents, download a file with
|
optional support for Office documents, download a file with
|
||||||
`-tika` in the file name. Download the
|
`-tika` in the file name. Download the
|
||||||
`docker-compose.env` file and the `.env` file as well and store them
|
`docker-compose.env` file and the `.env` file as well and store them
|
||||||
@@ -64,8 +69,7 @@ steps described in [Docker setup](#docker_hub) automatically.
|
|||||||
|
|
||||||
If you want to use the included `docker-compose.*.yml` file, you
|
If you want to use the included `docker-compose.*.yml` file, you
|
||||||
need to have at least Docker version **17.09.0** and docker-compose
|
need to have at least Docker version **17.09.0** and docker-compose
|
||||||
version **1.17.0**. To check do: `docker-compose -v` or
|
version **1.17.0**. To check do: `docker-compose -v` or `docker -v`
|
||||||
`docker -v`
|
|
||||||
|
|
||||||
See the [Docker installation guide](https://docs.docker.com/engine/install/) on how to install the current
|
See the [Docker installation guide](https://docs.docker.com/engine/install/) on how to install the current
|
||||||
version of Docker for your operating system or Linux distribution of
|
version of Docker for your operating system or Linux distribution of
|
||||||
@@ -144,21 +148,13 @@ steps described in [Docker setup](#docker_hub) automatically.
|
|||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
You can copy any setting from the file `paperless.conf.example` and
|
You can copy any setting from the file `paperless.conf.example` and
|
||||||
paste it here. Have a look at [configuration](/configuration] to see what's available.
|
paste it here. Have a look at [configuration](/configuration) to see what's available.
|
||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
You can utilize Docker secrets for some configuration settings by
|
You can utilize Docker secrets for configuration settings by
|
||||||
appending `_FILE` to some configuration values. This is
|
appending `_FILE` to configuration values. For example `PAPERLESS_DBUSER`
|
||||||
supported currently only by:
|
can be set using `PAPERLESS_DBUSER_FILE=/var/run/secrets/password.txt`.
|
||||||
|
|
||||||
- PAPERLESS_DBUSER
|
|
||||||
- PAPERLESS_DBPASS
|
|
||||||
- PAPERLESS_SECRET_KEY
|
|
||||||
- PAPERLESS_AUTO_LOGIN_USERNAME
|
|
||||||
- PAPERLESS_ADMIN_USER
|
|
||||||
- PAPERLESS_ADMIN_MAIL
|
|
||||||
- PAPERLESS_ADMIN_PASSWORD
|
|
||||||
|
|
||||||
!!! warning
|
!!! warning
|
||||||
|
|
||||||
@@ -169,8 +165,7 @@ steps described in [Docker setup](#docker_hub) automatically.
|
|||||||
`PAPERLESS_CONSUMER_POLLING`, which will disable inotify. See
|
`PAPERLESS_CONSUMER_POLLING`, which will disable inotify. See
|
||||||
[here](/configuration#polling).
|
[here](/configuration#polling).
|
||||||
|
|
||||||
6. Run `docker-compose pull`, followed by `docker-compose up -d`. This
|
6. Run `docker-compose pull`. This will pull the image.
|
||||||
will pull the image, create and start the necessary containers.
|
|
||||||
|
|
||||||
7. To be able to login, you will need a super user. To create it,
|
7. To be able to login, you will need a super user. To create it,
|
||||||
execute the following command:
|
execute the following command:
|
||||||
@@ -179,10 +174,18 @@ steps described in [Docker setup](#docker_hub) automatically.
|
|||||||
$ docker-compose run --rm webserver createsuperuser
|
$ docker-compose run --rm webserver createsuperuser
|
||||||
```
|
```
|
||||||
|
|
||||||
|
or using docker exec from within the container:
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
$ python3 manage.py createsuperuser
|
||||||
|
```
|
||||||
|
|
||||||
This will prompt you to set a username, an optional e-mail address
|
This will prompt you to set a username, an optional e-mail address
|
||||||
and finally a password (at least 8 characters).
|
and finally a password (at least 8 characters).
|
||||||
|
|
||||||
8. The default `docker-compose.yml` exports the webserver on your local
|
8. Run `docker-compose up -d`. This will create and start the necessary containers.
|
||||||
|
|
||||||
|
9. The default `docker-compose.yml` exports the webserver on your local
|
||||||
port
|
port
|
||||||
|
|
||||||
8000\. If you did not change this, you should now be able to visit
|
8000\. If you did not change this, you should now be able to visit
|
||||||
@@ -198,7 +201,7 @@ steps described in [Docker setup](#docker_hub) automatically.
|
|||||||
git clone https://github.com/paperless-ngx/paperless-ngx
|
git clone https://github.com/paperless-ngx/paperless-ngx
|
||||||
```
|
```
|
||||||
|
|
||||||
The master branch always reflects the latest stable version.
|
The main branch always reflects the latest stable version.
|
||||||
|
|
||||||
2. Copy one of the `docker/compose/docker-compose.*.yml` to
|
2. Copy one of the `docker/compose/docker-compose.*.yml` to
|
||||||
`docker-compose.yml` in the root folder, depending on which database
|
`docker-compose.yml` in the root folder, depending on which database
|
||||||
@@ -256,6 +259,7 @@ supported.
|
|||||||
- `python3-pip`
|
- `python3-pip`
|
||||||
- `python3-dev`
|
- `python3-dev`
|
||||||
- `default-libmysqlclient-dev` for MariaDB
|
- `default-libmysqlclient-dev` for MariaDB
|
||||||
|
- `pkg-config` for mysqlclient (python dependency)
|
||||||
- `fonts-liberation` for generating thumbnails for plain text
|
- `fonts-liberation` for generating thumbnails for plain text
|
||||||
files
|
files
|
||||||
- `imagemagick` >= 6 for PDF conversion
|
- `imagemagick` >= 6 for PDF conversion
|
||||||
@@ -270,7 +274,7 @@ supported.
|
|||||||
Use this list for your preferred package management:
|
Use this list for your preferred package management:
|
||||||
|
|
||||||
```
|
```
|
||||||
python3 python3-pip python3-dev imagemagick fonts-liberation gnupg libpq-dev default-libmysqlclient-dev libmagic-dev mime-support libzbar0 poppler-utils
|
python3 python3-pip python3-dev imagemagick fonts-liberation gnupg libpq-dev default-libmysqlclient-dev pkg-config libmagic-dev mime-support libzbar0 poppler-utils
|
||||||
```
|
```
|
||||||
|
|
||||||
These dependencies are required for OCRmyPDF, which is used for text
|
These dependencies are required for OCRmyPDF, which is used for text
|
||||||
@@ -314,14 +318,34 @@ supported.
|
|||||||
extension](https://code.djangoproject.com/wiki/JSON1Extension) is
|
extension](https://code.djangoproject.com/wiki/JSON1Extension) is
|
||||||
enabled. This is usually the case, but not always.
|
enabled. This is usually the case, but not always.
|
||||||
|
|
||||||
4. Get the release archive from
|
4. Create a system user with a new home folder under which you wish
|
||||||
<https://github.com/paperless-ngx/paperless-ngx/releases>. Extract the
|
to run paperless.
|
||||||
archive to a place from where you wish to execute it, such as
|
|
||||||
`/opt/paperless`. If you clone the git repo as it is, you also have to
|
```shell-session
|
||||||
|
adduser paperless --system --home /opt/paperless --group
|
||||||
|
```
|
||||||
|
|
||||||
|
5. Get the release archive from
|
||||||
|
<https://github.com/paperless-ngx/paperless-ngx/releases> for example with
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
curl -O -L https://github.com/paperless-ngx/paperless-ngx/releases/download/v1.10.2/paperless-ngx-v1.10.2.tar.xz
|
||||||
|
```
|
||||||
|
|
||||||
|
Extract the archive with
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
tar -xf paperless-ngx-v1.10.2.tar.xz
|
||||||
|
```
|
||||||
|
|
||||||
|
and copy the contents to the
|
||||||
|
home folder of the user you created before (`/opt/paperless`).
|
||||||
|
|
||||||
|
Optional: If you cloned the git repo, you will have to
|
||||||
compile the frontend yourself, see [here](/development#front-end-development)
|
compile the frontend yourself, see [here](/development#front-end-development)
|
||||||
and use the `build` step, not `serve`.
|
and use the `build` step, not `serve`.
|
||||||
|
|
||||||
5. Configure paperless. See [configuration](/configuration) for details.
|
6. Configure paperless. See [configuration](/configuration) for details.
|
||||||
Edit the included `paperless.conf` and adjust the settings to your
|
Edit the included `paperless.conf` and adjust the settings to your
|
||||||
needs. Required settings for getting
|
needs. Required settings for getting
|
||||||
paperless running are:
|
paperless running are:
|
||||||
@@ -354,29 +378,35 @@ supported.
|
|||||||
documents are written in.
|
documents are written in.
|
||||||
- Set `PAPERLESS_TIME_ZONE` to your local time zone.
|
- Set `PAPERLESS_TIME_ZONE` to your local time zone.
|
||||||
|
|
||||||
6. Create a system user under which you wish to run paperless.
|
!!! warning
|
||||||
|
|
||||||
```shell-session
|
Ensure your Redis instance [is secured](https://redis.io/docs/getting-started/#securing-redis).
|
||||||
adduser paperless --system --home /opt/paperless --group
|
|
||||||
```
|
|
||||||
|
|
||||||
7. Ensure that these directories exist and that the paperless user has
|
7. Create the following directories if they are missing:
|
||||||
write permissions to the following directories:
|
|
||||||
|
|
||||||
- `/opt/paperless/media`
|
- `/opt/paperless/media`
|
||||||
- `/opt/paperless/data`
|
- `/opt/paperless/data`
|
||||||
- `/opt/paperless/consume`
|
- `/opt/paperless/consume`
|
||||||
|
|
||||||
Adjust as necessary if you configured different folders.
|
Adjust as necessary if you configured different folders.
|
||||||
|
Ensure that the paperless user has write permissions for every one
|
||||||
8. Install python requirements from the `requirements.txt` file. It is
|
of these folders with
|
||||||
up to you if you wish to use a virtual environment or not. First you
|
|
||||||
should update your pip, so it gets the actual packages.
|
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
sudo -Hu paperless pip3 install --upgrade pip
|
ls -l -d /opt/paperless/media
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If needed, change the owner with
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
sudo chown paperless:paperless /opt/paperless/media
|
||||||
|
sudo chown paperless:paperless /opt/paperless/data
|
||||||
|
sudo chown paperless:paperless /opt/paperless/consume
|
||||||
|
```
|
||||||
|
|
||||||
|
8. Install python requirements from the `requirements.txt` file. It is
|
||||||
|
up to you if you wish to use a virtual environment or not. First you should update your pip, so it gets the actual packages.
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
sudo -Hu paperless pip3 install -r requirements.txt
|
sudo -Hu paperless pip3 install -r requirements.txt
|
||||||
```
|
```
|
||||||
@@ -397,11 +427,15 @@ supported.
|
|||||||
10. Optional: Test that paperless is working by executing
|
10. Optional: Test that paperless is working by executing
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# This collects static files from paperless and django.
|
# Manually starts the webserver
|
||||||
sudo -Hu paperless python3 manage.py runserver
|
sudo -Hu paperless python3 manage.py runserver
|
||||||
```
|
```
|
||||||
|
|
||||||
and pointing your browser to <http://localhost:8000/>.
|
and pointing your browser to http://localhost:8000 if
|
||||||
|
accessing from the same devices on which paperless is installed.
|
||||||
|
If accessing from another machine, set up systemd services. You may need
|
||||||
|
to set `PAPERLESS_DEBUG=true` in order for the development server to work
|
||||||
|
normally in your browser.
|
||||||
|
|
||||||
!!! warning
|
!!! warning
|
||||||
|
|
||||||
@@ -450,7 +484,15 @@ supported.
|
|||||||
in front of gunicorn instead.
|
in front of gunicorn instead.
|
||||||
|
|
||||||
For instructions on how to use nginx for that,
|
For instructions on how to use nginx for that,
|
||||||
[see the instructions below](/setup#nginx).
|
[see the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Using-a-Reverse-Proxy-with-Paperless-ngx#nginx).
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
|
||||||
|
If celery won't start (check with
|
||||||
|
`sudo systemctl status paperless-task-queue.service` for
|
||||||
|
paperless-task-queue.service and paperless-scheduler.service
|
||||||
|
) you need to change the path in the files. Example:
|
||||||
|
`ExecStart=/opt/paperless/.local/bin/celery --app paperless worker --loglevel INFO`
|
||||||
|
|
||||||
12. Optional: Install a samba server and make the consumption folder
|
12. Optional: Install a samba server and make the consumption folder
|
||||||
available as a network share.
|
available as a network share.
|
||||||
@@ -518,7 +560,7 @@ Users who installed with the bare-metal route should also update their
|
|||||||
Git clone to point to `https://github.com/paperless-ngx/paperless-ngx`,
|
Git clone to point to `https://github.com/paperless-ngx/paperless-ngx`,
|
||||||
e.g. using the command
|
e.g. using the command
|
||||||
`git remote set-url origin https://github.com/paperless-ngx/paperless-ngx`
|
`git remote set-url origin https://github.com/paperless-ngx/paperless-ngx`
|
||||||
and then pull the lastest version.
|
and then pull the latest version.
|
||||||
|
|
||||||
## Migrating from Paperless
|
## Migrating from Paperless
|
||||||
|
|
||||||
@@ -560,7 +602,7 @@ Migration to paperless-ngx is then performed in a few simple steps:
|
|||||||
|
|
||||||
3. Download the latest release of paperless-ngx. You can either go with
|
3. Download the latest release of paperless-ngx. You can either go with
|
||||||
the docker-compose files from
|
the docker-compose files from
|
||||||
[here](https://github.com/paperless-ngx/paperless-ngx/tree/master/docker/compose)
|
[here](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose)
|
||||||
or clone the repository to build the image yourself (see
|
or clone the repository to build the image yourself (see
|
||||||
[above](#docker_build)). You can
|
[above](#docker_build)). You can
|
||||||
either replace your current paperless folder or put paperless-ngx in
|
either replace your current paperless folder or put paperless-ngx in
|
||||||
@@ -683,6 +725,12 @@ below use PostgreSQL, but are applicable to MySQL/MariaDB with the
|
|||||||
MySQL also enforces limits on maximum lengths, but does so differently than
|
MySQL also enforces limits on maximum lengths, but does so differently than
|
||||||
PostgreSQL. It may not be possible to migrate to MySQL due to this.
|
PostgreSQL. It may not be possible to migrate to MySQL due to this.
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
|
||||||
|
Using mariadb version 10.4+ is recommended. Using the `utf8mb3` character set on
|
||||||
|
an older system may fix issues that can arise while setting up Paperless-ngx but
|
||||||
|
`utf8mb3` can cause issues with consumption (where `utf8mb4` does not).
|
||||||
|
|
||||||
1. Stop paperless, if it is running.
|
1. Stop paperless, if it is running.
|
||||||
|
|
||||||
2. Tell paperless to use PostgreSQL:
|
2. Tell paperless to use PostgreSQL:
|
||||||
@@ -746,7 +794,9 @@ with a few simple steps.
|
|||||||
|
|
||||||
Paperless-ngx modified the database schema slightly, however, these
|
Paperless-ngx modified the database schema slightly, however, these
|
||||||
changes can be reverted while keeping your current data, so that your
|
changes can be reverted while keeping your current data, so that your
|
||||||
current data will be compatible with original Paperless.
|
current data will be compatible with original Paperless. Thumbnails
|
||||||
|
were also changed from PNG to WEBP format and will need to be
|
||||||
|
re-generated.
|
||||||
|
|
||||||
Execute this:
|
Execute this:
|
||||||
|
|
||||||
@@ -762,9 +812,9 @@ $ cd /path/to/paperless/src
|
|||||||
$ python3 manage.py migrate documents 0023
|
$ python3 manage.py migrate documents 0023
|
||||||
```
|
```
|
||||||
|
|
||||||
After that, you need to clear your cookies (Paperless-ngx comes with
|
After regenerating thumbnails, you'll need to clear your cookies
|
||||||
updated dependencies that do cookie-processing differently) and probably
|
(Paperless-ngx comes with updated dependencies that do cookie-processing
|
||||||
your cache as well.
|
differently) and probably your cache as well.
|
||||||
|
|
||||||
# Considerations for less powerful devices {#less-powerful-devices}
|
# Considerations for less powerful devices {#less-powerful-devices}
|
||||||
|
|
||||||
@@ -785,14 +835,14 @@ performance immensely:
|
|||||||
other tasks).
|
other tasks).
|
||||||
- Keep `PAPERLESS_OCR_MODE` at its default value `skip` and consider
|
- Keep `PAPERLESS_OCR_MODE` at its default value `skip` and consider
|
||||||
OCR'ing your documents before feeding them into paperless. Some
|
OCR'ing your documents before feeding them into paperless. Some
|
||||||
scanners are able to do this! You might want to even specify
|
scanners are able to do this!
|
||||||
`skip_noarchive` to skip archive file generation for already ocr'ed
|
- Set `PAPERLESS_OCR_SKIP_ARCHIVE_FILE` to `with_text` to skip archive
|
||||||
documents entirely.
|
file generation for already ocr'ed documents, or `always` to skip it
|
||||||
|
for all documents.
|
||||||
- If you want to perform OCR on the device, consider using
|
- If you want to perform OCR on the device, consider using
|
||||||
`PAPERLESS_OCR_CLEAN=none`. This will speed up OCR times and use
|
`PAPERLESS_OCR_CLEAN=none`. This will speed up OCR times and use
|
||||||
less memory at the expense of slightly worse OCR results.
|
less memory at the expense of slightly worse OCR results.
|
||||||
- If using docker, consider setting `PAPERLESS_WEBSERVER_WORKERS` to
|
- If using docker, consider setting `PAPERLESS_WEBSERVER_WORKERS` to 1. This will save some memory.
|
||||||
1. This will save some memory.
|
|
||||||
- Consider setting `PAPERLESS_ENABLE_NLTK` to false, to disable the
|
- Consider setting `PAPERLESS_ENABLE_NLTK` to false, to disable the
|
||||||
more advanced language processing, which can take more memory and
|
more advanced language processing, which can take more memory and
|
||||||
processing time.
|
processing time.
|
||||||
@@ -813,44 +863,8 @@ For details, refer to [configuration](/configuration).
|
|||||||
|
|
||||||
# Using nginx as a reverse proxy {#nginx}
|
# Using nginx as a reverse proxy {#nginx}
|
||||||
|
|
||||||
If you want to expose paperless to the internet, you should hide it
|
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Using-a-Reverse-Proxy-with-Paperless-ngx#nginx) for user-maintained documentation of using nginx with Paperless-ngx.
|
||||||
behind a reverse proxy with SSL enabled.
|
|
||||||
|
|
||||||
In addition to the usual configuration for SSL, the following
|
# Enhancing security {#security}
|
||||||
configuration is required for paperless to operate:
|
|
||||||
|
|
||||||
```nginx
|
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Using-Security-Tools-with-Paperless-ngx) for user-maintained documentation of how to configure security tools like Fail2ban with Paperless-ngx.
|
||||||
http {
|
|
||||||
|
|
||||||
# Adjust as required. This is the maximum size for file uploads.
|
|
||||||
# The default value 1M might be a little too small.
|
|
||||||
client_max_body_size 10M;
|
|
||||||
|
|
||||||
server {
|
|
||||||
|
|
||||||
location / {
|
|
||||||
|
|
||||||
# Adjust host and port as required.
|
|
||||||
proxy_pass http://localhost:8000/;
|
|
||||||
|
|
||||||
# These configuration options are required for WebSockets to work.
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection "upgrade";
|
|
||||||
|
|
||||||
proxy_redirect off;
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Host $server_name;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
The `PAPERLESS_URL` configuration variable is also required when using a
|
|
||||||
reverse proxy. Please refer to the [hosting and security](/configuration#hosting-and-security) docs.
|
|
||||||
|
|
||||||
Also read
|
|
||||||
[this](https://channels.readthedocs.io/en/stable/deploying.html#nginx-supervisor-ubuntu),
|
|
||||||
towards the end of the section.
|
|
||||||
|
@@ -332,3 +332,24 @@ change the port gunicorn listens on.
|
|||||||
|
|
||||||
To fix this, set `PAPERLESS_PORT` again to your desired port, or the
|
To fix this, set `PAPERLESS_PORT` again to your desired port, or the
|
||||||
default of 8000.
|
default of 8000.
|
||||||
|
|
||||||
|
## Database Warns about unique constraint "documents_tag_name_uniq
|
||||||
|
|
||||||
|
You may see database log lines like:
|
||||||
|
|
||||||
|
```
|
||||||
|
ERROR: duplicate key value violates unique constraint "documents_tag_name_uniq"
|
||||||
|
DETAIL: Key (name)=(NameF) already exists.
|
||||||
|
STATEMENT: INSERT INTO "documents_tag" ("owner_id", "name", "match", "matching_algorithm", "is_insensitive", "color", "is_inbox_tag") VALUES (NULL, 'NameF', '', 1, true, '#a6cee3', false) RETURNING "documents_tag"."id"
|
||||||
|
```
|
||||||
|
|
||||||
|
This can happen during heavy consumption when using polling. Paperless will handle it correctly and the file
|
||||||
|
will still be consumed
|
||||||
|
|
||||||
|
## Consumption fails with "Ghostscript PDF/A rendering failed"
|
||||||
|
|
||||||
|
Newer versions of OCRmyPDF will fail if it encounters errors during processing.
|
||||||
|
This is intentional as the output archive file may differ in unexpected or undesired
|
||||||
|
ways from the original. As the logs indicate, if you encounter this error you can set
|
||||||
|
`PAPERLESS_OCR_USER_ARGS: '{"continue_on_soft_render_error": true}'` to try to 'force'
|
||||||
|
processing documents with this issue.
|
||||||
|
@@ -60,8 +60,8 @@ following operations on your documents:
|
|||||||
|
|
||||||
This process can be configured to fit your needs. If you don't want
|
This process can be configured to fit your needs. If you don't want
|
||||||
paperless to create archived versions for digital documents, you can
|
paperless to create archived versions for digital documents, you can
|
||||||
configure that by configuring `PAPERLESS_OCR_MODE=skip_noarchive`.
|
configure that by configuring
|
||||||
Please read the
|
`PAPERLESS_OCR_SKIP_ARCHIVE_FILE=with_text`. Please read the
|
||||||
[relevant section in the documentation](/configuration#ocr).
|
[relevant section in the documentation](/configuration#ocr).
|
||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
@@ -69,7 +69,9 @@ following operations on your documents:
|
|||||||
No matter which options you choose, Paperless will always store the
|
No matter which options you choose, Paperless will always store the
|
||||||
original document that it found in the consumption directory or in the
|
original document that it found in the consumption directory or in the
|
||||||
mail and will never overwrite that document. Archived versions are
|
mail and will never overwrite that document. Archived versions are
|
||||||
stored alongside the original versions.
|
stored alongside the original versions. Any files found in the
|
||||||
|
consumption directory will stored inside the Paperless-ngx file
|
||||||
|
structure and will not be retained in the consumption directory.
|
||||||
|
|
||||||
### The consumption directory
|
### The consumption directory
|
||||||
|
|
||||||
@@ -77,7 +79,9 @@ The primary method of getting documents into your database is by putting
|
|||||||
them in the consumption directory. The consumer waits patiently, looking
|
them in the consumption directory. The consumer waits patiently, looking
|
||||||
for new additions to this directory. When it finds them,
|
for new additions to this directory. When it finds them,
|
||||||
the consumer goes about the process of parsing them with the OCR,
|
the consumer goes about the process of parsing them with the OCR,
|
||||||
indexing what it finds, and storing it in the media directory.
|
indexing what it finds, and storing it in the media directory. You should
|
||||||
|
think of this folder as a temporary location, as files will be re-created
|
||||||
|
inside Paperless-ngx and removed from the consumption folder.
|
||||||
|
|
||||||
Getting stuff into this directory is up to you. If you're running
|
Getting stuff into this directory is up to you. If you're running
|
||||||
Paperless on your local computer, you might just want to drag and drop
|
Paperless on your local computer, you might just want to drag and drop
|
||||||
@@ -88,6 +92,15 @@ Typically, you're looking at an FTP server like
|
|||||||
[Proftpd](http://www.proftpd.org/) or a Windows folder share with
|
[Proftpd](http://www.proftpd.org/) or a Windows folder share with
|
||||||
[Samba](https://www.samba.org/).
|
[Samba](https://www.samba.org/).
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
|
||||||
|
Files found in the consumption directory that are consumed will be
|
||||||
|
removed from the consumption directory and stored inside the
|
||||||
|
Paperless-ngx file structure using any settings / storage paths
|
||||||
|
you have specified. This action is performed as safely as possible
|
||||||
|
but this means it is expected that files in the consumption
|
||||||
|
directory will no longer exist (there) after being consumed.
|
||||||
|
|
||||||
### Web UI Upload
|
### Web UI Upload
|
||||||
|
|
||||||
The dashboard has a file drop field to upload documents to paperless.
|
The dashboard has a file drop field to upload documents to paperless.
|
||||||
@@ -108,6 +121,8 @@ Furthermore, there is the [Paperless
|
|||||||
App](https://github.com/bauerj/paperless_app) as well, which not only
|
App](https://github.com/bauerj/paperless_app) as well, which not only
|
||||||
has document upload, but also document browsing and download features.
|
has document upload, but also document browsing and download features.
|
||||||
|
|
||||||
|
Another option is [Paperless Mobile](https://github.com/astubenbord/paperless-mobile), an Android app that supports document upload, scanning, management of labels and more.
|
||||||
|
|
||||||
### IMAP (Email) {#usage-email}
|
### IMAP (Email) {#usage-email}
|
||||||
|
|
||||||
You can tell paperless-ngx to consume documents from your email
|
You can tell paperless-ngx to consume documents from your email
|
||||||
@@ -151,6 +166,8 @@ different means. These are as follows:
|
|||||||
will not consume mails already tagged. Not all mail servers support
|
will not consume mails already tagged. Not all mail servers support
|
||||||
this feature!
|
this feature!
|
||||||
|
|
||||||
|
- **Apple Mail support:** Apple Mail clients allow differently colored tags. For this to work use `apple:<color>` (e.g. _apple:green_) as a custom tag. Available colors are _red_, _orange_, _yellow_, _blue_, _green_, _violet_ and _grey_.
|
||||||
|
|
||||||
!!! warning
|
!!! warning
|
||||||
|
|
||||||
The mail consumer will perform these actions on all mails it has
|
The mail consumer will perform these actions on all mails it has
|
||||||
@@ -191,13 +208,52 @@ different means. These are as follows:
|
|||||||
them further.
|
them further.
|
||||||
|
|
||||||
Paperless is set up to check your mails every 10 minutes. This can be
|
Paperless is set up to check your mails every 10 minutes. This can be
|
||||||
configured on the 'Scheduled tasks' page in the admin.
|
configured via `PAPERLESS_EMAIL_TASK_CRON` (see [software tweaks](/configuration#software_tweaks))
|
||||||
|
|
||||||
### REST API
|
### REST API
|
||||||
|
|
||||||
You can also submit a document using the REST API, see [POSTing documents](/api#file-uploads)
|
You can also submit a document using the REST API, see [POSTing documents](/api#file-uploads)
|
||||||
for details.
|
for details.
|
||||||
|
|
||||||
|
## Permissions
|
||||||
|
|
||||||
|
As of version 1.14.0 Paperless-ngx added core support for user / group permissions. Permissions is
|
||||||
|
based around 'global' permissions as well as 'object-level' permissions. Global permissions designate
|
||||||
|
which parts of the application a user can access (e.g. Documents, Tags, Settings) and object-level
|
||||||
|
determine which objects are visible or editable. All objects have an 'owner' and 'view' and 'edit'
|
||||||
|
permissions which can be granted to other users or groups. The paperless-ngx permissions system uses
|
||||||
|
the built-in user model of the backend framework, Django.
|
||||||
|
|
||||||
|
!!! tip
|
||||||
|
|
||||||
|
Object-level permissions only apply to the object itself. In other words, setting permissions
|
||||||
|
for a Tag will _not_ affect the permissions of documents that have the Tag.
|
||||||
|
|
||||||
|
Permissions can be set using the new "Permissions" tab when editing documents, or bulk-applied
|
||||||
|
in the UI by selecting documents and choosing the "Permissions" button. Owner can also optionally
|
||||||
|
be set for documents uploaded via the API. Documents consumed via the consumption dir currently
|
||||||
|
do not have an owner set.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
After migration to version 1.14.0 all existing documents, tags etc. will have no explicit owner
|
||||||
|
set which means they will be visible / editable by all users. Once an object has an owner set,
|
||||||
|
only the owner can explicitly grant / revoke permissions.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
When first migrating to permissions it is recommended to use a 'superuser' account (which
|
||||||
|
would usually have been setup during installation) to ensure you have full permissions.
|
||||||
|
|
||||||
|
Note that superusers have access to all objects.
|
||||||
|
|
||||||
|
### Users and Groups
|
||||||
|
|
||||||
|
Paperless-ngx versions after 1.14.0 allow creating and editing users and groups via the 'frontend' UI.
|
||||||
|
These can be found under Settings > Users & Groups, assuming the user has access. If a user is designated
|
||||||
|
as a member of a group those permissions will be inherited and this is reflected in the UI. Explicit
|
||||||
|
permissions can be granted to limit access to certain parts of the UI (and corresponding API endpoints).
|
||||||
|
|
||||||
## Best practices {#basic-searching}
|
## Best practices {#basic-searching}
|
||||||
|
|
||||||
Paperless offers a couple tools that help you organize your document
|
Paperless offers a couple tools that help you organize your document
|
||||||
@@ -359,6 +415,14 @@ documents in your inbox:
|
|||||||
sorted by ASN. Don't order this binder in any other way.
|
sorted by ASN. Don't order this binder in any other way.
|
||||||
5. If the document has no ASN, throw it away. Yay!
|
5. If the document has no ASN, throw it away. Yay!
|
||||||
|
|
||||||
|
!!! tip
|
||||||
|
|
||||||
|
Instead of writing a number on the document by hand, you may also prepare
|
||||||
|
a spool of labels with barcodes with an ascending serial number, that are
|
||||||
|
formatted like `ASN00001`.
|
||||||
|
This also enables Paperless to automatically parse and process the ASN
|
||||||
|
(if enabled in the config), so that you don't need to manually assign it.
|
||||||
|
|
||||||
Over time, you will notice that your physical binder will fill up. If it
|
Over time, you will notice that your physical binder will fill up. If it
|
||||||
is full, label the binder with the range of ASNs in this binder (i.e.,
|
is full, label the binder with the range of ASNs in this binder (i.e.,
|
||||||
"Documents 1 to 343"), store the binder in your cellar or elsewhere,
|
"Documents 1 to 343"), store the binder in your cellar or elsewhere,
|
||||||
|
@@ -30,7 +30,9 @@ def worker_int(worker):
|
|||||||
worker.log.info("worker received INT or QUIT signal")
|
worker.log.info("worker received INT or QUIT signal")
|
||||||
|
|
||||||
## get traceback info
|
## get traceback info
|
||||||
import threading, sys, traceback
|
import sys
|
||||||
|
import threading
|
||||||
|
import traceback
|
||||||
|
|
||||||
id2name = {th.ident: th.name for th in threading.enumerate()}
|
id2name = {th.ident: th.name for th in threading.enumerate()}
|
||||||
code = []
|
code = []
|
||||||
|
@@ -72,7 +72,7 @@ fi
|
|||||||
if ! docker stats --no-stream &> /dev/null ; then
|
if ! docker stats --no-stream &> /dev/null ; then
|
||||||
echo ""
|
echo ""
|
||||||
echo "WARN: It look like the current user does not have Docker permissions."
|
echo "WARN: It look like the current user does not have Docker permissions."
|
||||||
echo "WARN: Use 'sudo usermod -aG docker $USER' to assign Docker permissions to the user."
|
echo "WARN: Use 'sudo usermod -aG docker $USER' to assign Docker permissions to the user (may require restarting shell)."
|
||||||
echo ""
|
echo ""
|
||||||
sleep 3
|
sleep 3
|
||||||
fi
|
fi
|
||||||
@@ -321,7 +321,7 @@ fi
|
|||||||
wget "https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docker/compose/docker-compose.$DOCKER_COMPOSE_VERSION.yml" -O docker-compose.yml
|
wget "https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docker/compose/docker-compose.$DOCKER_COMPOSE_VERSION.yml" -O docker-compose.yml
|
||||||
wget "https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docker/compose/.env" -O .env
|
wget "https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docker/compose/.env" -O .env
|
||||||
|
|
||||||
SECRET_KEY=$(tr -dc 'a-zA-Z0-9' < /dev/urandom | fold -w 64 | head -n 1)
|
SECRET_KEY=$(tr --delete --complement 'a-zA-Z0-9' < /dev/urandom 2>/dev/null | head --bytes 64)
|
||||||
|
|
||||||
DEFAULT_LANGUAGES=("deu eng fra ita spa")
|
DEFAULT_LANGUAGES=("deu eng fra ita spa")
|
||||||
|
|
||||||
@@ -346,7 +346,7 @@ read -r -a OCR_LANGUAGES_ARRAY <<< "${_split_langs}"
|
|||||||
fi
|
fi
|
||||||
} > docker-compose.env
|
} > docker-compose.env
|
||||||
|
|
||||||
sed -i "s/- 8000:8000/- $PORT:8000/g" docker-compose.yml
|
sed -i "s/- \"8000:8000\"/- \"$PORT:8000\"/g" docker-compose.yml
|
||||||
|
|
||||||
sed -i "s#- \./consume:/usr/src/paperless/consume#- $CONSUME_FOLDER:/usr/src/paperless/consume#g" docker-compose.yml
|
sed -i "s#- \./consume:/usr/src/paperless/consume#- $CONSUME_FOLDER:/usr/src/paperless/consume#g" docker-compose.yml
|
||||||
|
|
||||||
@@ -384,6 +384,14 @@ fi
|
|||||||
|
|
||||||
${DOCKER_COMPOSE_CMD} pull
|
${DOCKER_COMPOSE_CMD} pull
|
||||||
|
|
||||||
|
if [ "$DATABASE_BACKEND" == "postgres" ] || [ "$DATABASE_BACKEND" == "mariadb" ] ; then
|
||||||
|
echo "Starting DB first for initilzation"
|
||||||
|
${DOCKER_COMPOSE_CMD} up --detach db
|
||||||
|
# hopefully enough time for even the slower systems
|
||||||
|
sleep 15
|
||||||
|
${DOCKER_COMPOSE_CMD} stop
|
||||||
|
fi
|
||||||
|
|
||||||
${DOCKER_COMPOSE_CMD} run --rm -e DJANGO_SUPERUSER_PASSWORD="$PASSWORD" webserver createsuperuser --noinput --username "$USERNAME" --email "$EMAIL"
|
${DOCKER_COMPOSE_CMD} run --rm -e DJANGO_SUPERUSER_PASSWORD="$PASSWORD" webserver createsuperuser --noinput --username "$USERNAME" --email "$EMAIL"
|
||||||
|
|
||||||
${DOCKER_COMPOSE_CMD} up --detach
|
${DOCKER_COMPOSE_CMD} up --detach
|
||||||
|
@@ -41,6 +41,7 @@ markdown_extensions:
|
|||||||
anchor_linenums: true
|
anchor_linenums: true
|
||||||
- pymdownx.superfences
|
- pymdownx.superfences
|
||||||
- pymdownx.inlinehilite
|
- pymdownx.inlinehilite
|
||||||
|
- pymdownx.snippets
|
||||||
strict: true
|
strict: true
|
||||||
nav:
|
nav:
|
||||||
- index.md
|
- index.md
|
||||||
@@ -54,7 +55,7 @@ nav:
|
|||||||
- 'FAQs': faq.md
|
- 'FAQs': faq.md
|
||||||
- troubleshooting.md
|
- troubleshooting.md
|
||||||
- changelog.md
|
- changelog.md
|
||||||
copyright: Copyright © 2016 - 2022 Daniel Quinn, Jonas Winkler, and the Paperless-ngx team
|
copyright: Copyright © 2016 - 2023 Daniel Quinn, Jonas Winkler, and the Paperless-ngx team
|
||||||
extra:
|
extra:
|
||||||
social:
|
social:
|
||||||
- icon: fontawesome/brands/github
|
- icon: fontawesome/brands/github
|
||||||
|
@@ -42,6 +42,7 @@
|
|||||||
|
|
||||||
#PAPERLESS_OCR_LANGUAGE=eng
|
#PAPERLESS_OCR_LANGUAGE=eng
|
||||||
#PAPERLESS_OCR_MODE=skip
|
#PAPERLESS_OCR_MODE=skip
|
||||||
|
#PAPERLESS_OCR_SKIP_ARCHIVE_FILE=never
|
||||||
#PAPERLESS_OCR_OUTPUT_TYPE=pdfa
|
#PAPERLESS_OCR_OUTPUT_TYPE=pdfa
|
||||||
#PAPERLESS_OCR_PAGES=1
|
#PAPERLESS_OCR_PAGES=1
|
||||||
#PAPERLESS_OCR_IMAGE_DPI=300
|
#PAPERLESS_OCR_IMAGE_DPI=300
|
||||||
@@ -65,6 +66,11 @@
|
|||||||
#PAPERLESS_CONSUMER_SUBDIRS_AS_TAGS=false
|
#PAPERLESS_CONSUMER_SUBDIRS_AS_TAGS=false
|
||||||
#PAPERLESS_CONSUMER_ENABLE_BARCODES=false
|
#PAPERLESS_CONSUMER_ENABLE_BARCODES=false
|
||||||
#PAPERLESS_CONSUMER_BARCODE_STRING=PATCHT
|
#PAPERLESS_CONSUMER_BARCODE_STRING=PATCHT
|
||||||
|
#PAPERLESS_CONSUMER_BARCODE_UPSCALE=0.0
|
||||||
|
#PAPERLESS_CONSUMER_BARCODE_DPI=300
|
||||||
|
#PAPERLESS_CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED=false
|
||||||
|
#PAPERLESS_CONSUMER_COLLATE_DOUBLE_SIDED_SUBDIR_NAME=double-sided
|
||||||
|
#PAPERLESS_CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT=false
|
||||||
#PAPERLESS_PRE_CONSUME_SCRIPT=/path/to/an/arbitrary/script.sh
|
#PAPERLESS_PRE_CONSUME_SCRIPT=/path/to/an/arbitrary/script.sh
|
||||||
#PAPERLESS_POST_CONSUME_SCRIPT=/path/to/an/arbitrary/script.sh
|
#PAPERLESS_POST_CONSUME_SCRIPT=/path/to/an/arbitrary/script.sh
|
||||||
#PAPERLESS_FILENAME_DATE_ORDER=YMD
|
#PAPERLESS_FILENAME_DATE_ORDER=YMD
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
docker run -p 5432:5432 -e POSTGRES_PASSWORD=password -v paperless_pgdata:/var/lib/postgresql/data -d postgres:13
|
docker run -p 5432:5432 -e POSTGRES_PASSWORD=password -v paperless_pgdata:/var/lib/postgresql/data -d postgres:15
|
||||||
docker run -d -p 6379:6379 redis:latest
|
docker run -d -p 6379:6379 redis:latest
|
||||||
docker run -p 3000:3000 -d gotenberg/gotenberg:7.6 gotenberg --chromium-disable-javascript=true --chromium-allow-list="file:///tmp/.*"
|
docker run -p 3000:3000 -d gotenberg/gotenberg:7.8 gotenberg --chromium-disable-javascript=true --chromium-allow-list="file:///tmp/.*"
|
||||||
docker run -p 9998:9998 -d ghcr.io/paperless-ngx/tika:latest
|
docker run -p 9998:9998 -d ghcr.io/paperless-ngx/tika:latest
|
||||||
|
@@ -1,18 +0,0 @@
|
|||||||
# This file is used by the build system to adjust CSS and JS output to support the specified browsers below.
|
|
||||||
# For additional information regarding the format and rule options, please see:
|
|
||||||
# https://github.com/browserslist/browserslist#queries
|
|
||||||
|
|
||||||
# For the full list of supported browsers by the Angular framework, please see:
|
|
||||||
# https://angular.io/guide/browser-support
|
|
||||||
|
|
||||||
# You can see what browsers were selected by your queries by running:
|
|
||||||
# npx browserslist
|
|
||||||
|
|
||||||
last 1 Chrome version
|
|
||||||
last 1 Firefox version
|
|
||||||
last 2 Edge major versions
|
|
||||||
last 2 Safari major versions
|
|
||||||
last 2 iOS major versions
|
|
||||||
Firefox ESR
|
|
||||||
not IE 9-10 # Angular support for IE 9-10 has been deprecated and will be removed as of Angular v11. To opt-in, remove the 'not' prefix on this line.
|
|
||||||
not IE 11 # Angular supports IE 11 only as an opt-in. To opt-in, remove the 'not' prefix on this line.
|
|
3
src-ui/.gitignore
vendored
@@ -49,3 +49,6 @@ Thumbs.db
|
|||||||
# Cypress
|
# Cypress
|
||||||
cypress/videos/**/*
|
cypress/videos/**/*
|
||||||
cypress/screenshots/**/*
|
cypress/screenshots/**/*
|
||||||
|
/test-results/
|
||||||
|
/playwright-report/
|
||||||
|
/playwright/.cache/
|
||||||
|
@@ -12,7 +12,7 @@ Run `ng generate component component-name` to generate a new component. You can
|
|||||||
|
|
||||||
## Build
|
## Build
|
||||||
|
|
||||||
Run `ng build` to build the project. The build artifacts will be stored in the `dist/` directory. Use the `--prod` flag for a production build.
|
Run `ng build` to build the project. The build artifacts will be stored in the `dist/` directory. Use the `--configuration production` flag for a production build.
|
||||||
|
|
||||||
## Running unit tests
|
## Running unit tests
|
||||||
|
|
||||||
|
@@ -16,12 +16,15 @@
|
|||||||
"i18n": {
|
"i18n": {
|
||||||
"sourceLocale": "en-US",
|
"sourceLocale": "en-US",
|
||||||
"locales": {
|
"locales": {
|
||||||
|
"ar-AR": "src/locale/messages.ar_AR.xlf",
|
||||||
"be-BY": "src/locale/messages.be_BY.xlf",
|
"be-BY": "src/locale/messages.be_BY.xlf",
|
||||||
|
"ca-ES": "src/locale/messages.ca_ES.xlf",
|
||||||
"cs-CZ": "src/locale/messages.cs_CZ.xlf",
|
"cs-CZ": "src/locale/messages.cs_CZ.xlf",
|
||||||
"da-DK": "src/locale/messages.da_DK.xlf",
|
"da-DK": "src/locale/messages.da_DK.xlf",
|
||||||
"de-DE": "src/locale/messages.de_DE.xlf",
|
"de-DE": "src/locale/messages.de_DE.xlf",
|
||||||
"en-GB": "src/locale/messages.en_GB.xlf",
|
"en-GB": "src/locale/messages.en_GB.xlf",
|
||||||
"es-ES": "src/locale/messages.es_ES.xlf",
|
"es-ES": "src/locale/messages.es_ES.xlf",
|
||||||
|
"fi-FI": "src/locale/messages.fi_FI.xlf",
|
||||||
"fr-FR": "src/locale/messages.fr_FR.xlf",
|
"fr-FR": "src/locale/messages.fr_FR.xlf",
|
||||||
"it-IT": "src/locale/messages.it_IT.xlf",
|
"it-IT": "src/locale/messages.it_IT.xlf",
|
||||||
"lb-LU": "src/locale/messages.lb_LU.xlf",
|
"lb-LU": "src/locale/messages.lb_LU.xlf",
|
||||||
@@ -31,10 +34,12 @@
|
|||||||
"pt-PT": "src/locale/messages.pt_PT.xlf",
|
"pt-PT": "src/locale/messages.pt_PT.xlf",
|
||||||
"ro-RO": "src/locale/messages.ro_RO.xlf",
|
"ro-RO": "src/locale/messages.ro_RO.xlf",
|
||||||
"ru-RU": "src/locale/messages.ru_RU.xlf",
|
"ru-RU": "src/locale/messages.ru_RU.xlf",
|
||||||
|
"sk-SK": "src/locale/messages.sk_SK.xlf",
|
||||||
"sl-SI": "src/locale/messages.sl_SI.xlf",
|
"sl-SI": "src/locale/messages.sl_SI.xlf",
|
||||||
"sr-CS": "src/locale/messages.sr_CS.xlf",
|
"sr-CS": "src/locale/messages.sr_CS.xlf",
|
||||||
"sv-SE": "src/locale/messages.sv_SE.xlf",
|
"sv-SE": "src/locale/messages.sv_SE.xlf",
|
||||||
"tr-TR": "src/locale/messages.tr_TR.xlf",
|
"tr-TR": "src/locale/messages.tr_TR.xlf",
|
||||||
|
"uk-UA": "src/locale/messages.uk_UA.xlf",
|
||||||
"zh-CN": "src/locale/messages.zh_CN.xlf"
|
"zh-CN": "src/locale/messages.zh_CN.xlf"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -144,37 +149,6 @@
|
|||||||
"scripts": []
|
"scripts": []
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"e2e": {
|
|
||||||
"builder": "@cypress/schematic:cypress",
|
|
||||||
"options": {
|
|
||||||
"devServerTarget": "paperless-ui:serve",
|
|
||||||
"watch": true,
|
|
||||||
"headless": false
|
|
||||||
},
|
|
||||||
"configurations": {
|
|
||||||
"production": {
|
|
||||||
"devServerTarget": "paperless-ui:serve:production"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"cypress-run": {
|
|
||||||
"builder": "@cypress/schematic:cypress",
|
|
||||||
"options": {
|
|
||||||
"devServerTarget": "paperless-ui:serve"
|
|
||||||
},
|
|
||||||
"configurations": {
|
|
||||||
"production": {
|
|
||||||
"devServerTarget": "paperless-ui:serve:production"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"cypress-open": {
|
|
||||||
"builder": "@cypress/schematic:cypress",
|
|
||||||
"options": {
|
|
||||||
"watch": true,
|
|
||||||
"headless": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"lint": {
|
"lint": {
|
||||||
"builder": "@angular-eslint/builder:lint",
|
"builder": "@angular-eslint/builder:lint",
|
||||||
"options": {
|
"options": {
|
||||||
@@ -187,10 +161,18 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"defaultProject": "paperless-ui",
|
|
||||||
"cli": {
|
"cli": {
|
||||||
"schematicCollections": [
|
"schematicCollections": [
|
||||||
"@angular-eslint/schematics"
|
"@angular-eslint/schematics"
|
||||||
]
|
],
|
||||||
|
"analytics": false
|
||||||
|
},
|
||||||
|
"schematics": {
|
||||||
|
"@angular-eslint/schematics:application": {
|
||||||
|
"setParserOptionsProject": true
|
||||||
|
},
|
||||||
|
"@angular-eslint/schematics:library": {
|
||||||
|
"setParserOptionsProject": true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,13 +0,0 @@
|
|||||||
import { defineConfig } from 'cypress'
|
|
||||||
|
|
||||||
export default defineConfig({
|
|
||||||
videosFolder: 'cypress/videos',
|
|
||||||
screenshotsFolder: 'cypress/screenshots',
|
|
||||||
fixturesFolder: 'cypress/fixtures',
|
|
||||||
e2e: {
|
|
||||||
setupNodeEvents(on, config) {
|
|
||||||
return require('./cypress/plugins/index.ts')(on, config)
|
|
||||||
},
|
|
||||||
baseUrl: 'http://localhost:4200',
|
|
||||||
},
|
|
||||||
})
|
|
@@ -1,94 +0,0 @@
|
|||||||
describe('document-detail', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
// also uses global fixtures from cypress/support/e2e.ts
|
|
||||||
|
|
||||||
this.modifiedDocuments = []
|
|
||||||
|
|
||||||
cy.fixture('documents/documents.json').then((documentsJson) => {
|
|
||||||
cy.intercept('GET', 'http://localhost:8000/api/documents/1/', (req) => {
|
|
||||||
let response = { ...documentsJson }
|
|
||||||
response = response.results.find((d) => d.id == 1)
|
|
||||||
req.reply(response)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
cy.intercept('PUT', 'http://localhost:8000/api/documents/1/', (req) => {
|
|
||||||
this.modifiedDocuments.push(req.body) // store this for later
|
|
||||||
req.reply({ result: 'OK' })
|
|
||||||
}).as('saveDoc')
|
|
||||||
|
|
||||||
cy.fixture('documents/1/comments.json').then((commentsJson) => {
|
|
||||||
cy.intercept(
|
|
||||||
'GET',
|
|
||||||
'http://localhost:8000/api/documents/1/comments/',
|
|
||||||
(req) => {
|
|
||||||
req.reply(commentsJson.filter((c) => c.id != 10)) // 3
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
cy.intercept(
|
|
||||||
'DELETE',
|
|
||||||
'http://localhost:8000/api/documents/1/comments/?id=9',
|
|
||||||
(req) => {
|
|
||||||
req.reply(commentsJson.filter((c) => c.id != 9 && c.id != 10)) // 2
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
cy.intercept(
|
|
||||||
'POST',
|
|
||||||
'http://localhost:8000/api/documents/1/comments/',
|
|
||||||
(req) => {
|
|
||||||
req.reply(commentsJson) // 4
|
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
cy.viewport(1024, 1024)
|
|
||||||
cy.visit('/documents/1/')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should activate / deactivate save button when changes are saved', () => {
|
|
||||||
cy.contains('button', 'Save').should('be.disabled')
|
|
||||||
cy.get('app-input-text[formcontrolname="title"]')
|
|
||||||
.type(' additional')
|
|
||||||
.wait(1500) // this delay is for frontend debounce
|
|
||||||
cy.contains('button', 'Save').should('not.be.disabled')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should warn on unsaved changes', () => {
|
|
||||||
cy.get('app-input-text[formcontrolname="title"]')
|
|
||||||
.type(' additional')
|
|
||||||
.wait(1500) // this delay is for frontend debounce
|
|
||||||
cy.get('button[title="Close"]').click()
|
|
||||||
cy.contains('You have unsaved changes')
|
|
||||||
cy.contains('button', 'Cancel').click().wait(150)
|
|
||||||
cy.contains('button', 'Save').click().wait('@saveDoc').wait(2000) // navigates away after saving
|
|
||||||
cy.contains('You have unsaved changes').should('not.exist')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of comments', () => {
|
|
||||||
cy.wait(1000).get('a').contains('Comments').click().wait(1000)
|
|
||||||
cy.get('app-document-comments').find('.card').its('length').should('eq', 3)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should support comment deletion', () => {
|
|
||||||
cy.wait(1000).get('a').contains('Comments').click().wait(1000)
|
|
||||||
cy.get('app-document-comments')
|
|
||||||
.find('.card')
|
|
||||||
.first()
|
|
||||||
.find('button')
|
|
||||||
.click({ force: true })
|
|
||||||
.wait(500)
|
|
||||||
cy.get('app-document-comments').find('.card').its('length').should('eq', 2)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should support comment insertion', () => {
|
|
||||||
cy.wait(1000).get('a').contains('Comments').click().wait(1000)
|
|
||||||
cy.get('app-document-comments')
|
|
||||||
.find('form textarea')
|
|
||||||
.type('Testing new comment')
|
|
||||||
.wait(500)
|
|
||||||
cy.get('app-document-comments').find('form button').click().wait(1500)
|
|
||||||
cy.get('app-document-comments').find('.card').its('length').should('eq', 4)
|
|
||||||
})
|
|
||||||
})
|
|
@@ -1,129 +0,0 @@
|
|||||||
describe('documents-list', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
// also uses global fixtures from cypress/support/e2e.ts
|
|
||||||
|
|
||||||
this.bulkEdits = {}
|
|
||||||
|
|
||||||
cy.fixture('documents/documents.json').then((documentsJson) => {
|
|
||||||
// bulk edit
|
|
||||||
cy.intercept(
|
|
||||||
'POST',
|
|
||||||
'http://localhost:8000/api/documents/bulk_edit/',
|
|
||||||
(req) => {
|
|
||||||
this.bulkEdits = req.body // store this for later
|
|
||||||
req.reply({ result: 'OK' })
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
cy.intercept('GET', 'http://localhost:8000/api/documents/*', (req) => {
|
|
||||||
let response = { ...documentsJson }
|
|
||||||
|
|
||||||
// bulkEdits was set earlier by bulk_edit intercept
|
|
||||||
if (this.bulkEdits.hasOwnProperty('documents')) {
|
|
||||||
response.results = response.results.map((d) => {
|
|
||||||
if ((this.bulkEdits['documents'] as Array<number>).includes(d.id)) {
|
|
||||||
switch (this.bulkEdits['method']) {
|
|
||||||
case 'modify_tags':
|
|
||||||
d.tags = (d.tags as Array<number>).concat([
|
|
||||||
this.bulkEdits['parameters']['add_tags'],
|
|
||||||
])
|
|
||||||
break
|
|
||||||
case 'set_correspondent':
|
|
||||||
d.correspondent =
|
|
||||||
this.bulkEdits['parameters']['correspondent']
|
|
||||||
break
|
|
||||||
case 'set_document_type':
|
|
||||||
d.document_type =
|
|
||||||
this.bulkEdits['parameters']['document_type']
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return d
|
|
||||||
})
|
|
||||||
} else if (req.query.hasOwnProperty('tags__id__all')) {
|
|
||||||
// filtering e.g. http://localhost:8000/api/documents/?page=1&page_size=50&ordering=-created&tags__id__all=2
|
|
||||||
const tag_id = +req.query['tags__id__all']
|
|
||||||
response.results = (documentsJson.results as Array<any>).filter((d) =>
|
|
||||||
(d.tags as Array<number>).includes(tag_id)
|
|
||||||
)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
req.reply(response)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
cy.viewport(1280, 1024)
|
|
||||||
cy.visit('/documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents rendered as cards with thumbnails', () => {
|
|
||||||
cy.contains('3 documents')
|
|
||||||
cy.contains('lorem ipsum')
|
|
||||||
cy.get('app-document-card-small:first-of-type img')
|
|
||||||
.invoke('attr', 'src')
|
|
||||||
.should('eq', 'http://localhost:8000/api/documents/1/thumb/')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should change to table "details" view', () => {
|
|
||||||
cy.get('div.btn-group input[value="details"]').next().click()
|
|
||||||
cy.get('table')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should change to large cards view', () => {
|
|
||||||
cy.get('div.btn-group input[value="largeCards"]').next().click()
|
|
||||||
cy.get('app-document-card-large')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should filter tags', () => {
|
|
||||||
cy.get('app-filter-editor app-filterable-dropdown[title="Tags"]').within(
|
|
||||||
() => {
|
|
||||||
cy.contains('button', 'Tags').click()
|
|
||||||
cy.contains('button', 'Tag 2').click()
|
|
||||||
}
|
|
||||||
)
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should apply tags', () => {
|
|
||||||
cy.get('app-document-card-small:first-of-type').click()
|
|
||||||
cy.get('app-bulk-editor app-filterable-dropdown[title="Tags"]').within(
|
|
||||||
() => {
|
|
||||||
cy.contains('button', 'Tags').click()
|
|
||||||
cy.contains('button', 'Test Tag').click()
|
|
||||||
cy.contains('button', 'Apply').click()
|
|
||||||
}
|
|
||||||
)
|
|
||||||
cy.contains('button', 'Confirm').click()
|
|
||||||
cy.get('app-document-card-small:first-of-type').contains('Test Tag')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should apply correspondent', () => {
|
|
||||||
cy.get('app-document-card-small:first-of-type').click()
|
|
||||||
cy.get(
|
|
||||||
'app-bulk-editor app-filterable-dropdown[title="Correspondent"]'
|
|
||||||
).within(() => {
|
|
||||||
cy.contains('button', 'Correspondent').click()
|
|
||||||
cy.contains('button', 'ABC Test Correspondent').click()
|
|
||||||
cy.contains('button', 'Apply').click()
|
|
||||||
})
|
|
||||||
cy.contains('button', 'Confirm').click()
|
|
||||||
cy.get('app-document-card-small:first-of-type').contains(
|
|
||||||
'ABC Test Correspondent'
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should apply document type', () => {
|
|
||||||
cy.get('app-document-card-small:first-of-type').click()
|
|
||||||
cy.get(
|
|
||||||
'app-bulk-editor app-filterable-dropdown[title="Document type"]'
|
|
||||||
).within(() => {
|
|
||||||
cy.contains('button', 'Document type').click()
|
|
||||||
cy.contains('button', 'Test Doc Type').click()
|
|
||||||
cy.contains('button', 'Apply').click()
|
|
||||||
})
|
|
||||||
cy.contains('button', 'Confirm').click()
|
|
||||||
cy.get('app-document-card-small:first-of-type').contains('Test Doc Type')
|
|
||||||
})
|
|
||||||
})
|
|
@@ -1,331 +0,0 @@
|
|||||||
import { PaperlessDocument } from 'src/app/data/paperless-document'
|
|
||||||
|
|
||||||
describe('documents query params', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
// also uses global fixtures from cypress/support/e2e.ts
|
|
||||||
|
|
||||||
cy.fixture('documents/documents.json').then((documentsJson) => {
|
|
||||||
// mock api filtering
|
|
||||||
cy.intercept('GET', 'http://localhost:8000/api/documents/*', (req) => {
|
|
||||||
let response = { ...documentsJson }
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('ordering')) {
|
|
||||||
const sort_field = req.query['ordering'].toString().replace('-', '')
|
|
||||||
const reverse = req.query['ordering'].toString().indexOf('-') !== -1
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).sort((docA, docB) => {
|
|
||||||
let result = 0
|
|
||||||
switch (sort_field) {
|
|
||||||
case 'created':
|
|
||||||
case 'added':
|
|
||||||
result =
|
|
||||||
new Date(docA[sort_field]) < new Date(docB[sort_field])
|
|
||||||
? -1
|
|
||||||
: 1
|
|
||||||
break
|
|
||||||
case 'archive_serial_number':
|
|
||||||
result = docA[sort_field] < docB[sort_field] ? -1 : 1
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if (reverse) result = -result
|
|
||||||
return result
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('tags__id__in')) {
|
|
||||||
const tag_ids: Array<number> = req.query['tags__id__in']
|
|
||||||
.toString()
|
|
||||||
.split(',')
|
|
||||||
.map((v) => +v)
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter(
|
|
||||||
(d) =>
|
|
||||||
d.tags.length > 0 &&
|
|
||||||
d.tags.filter((t) => tag_ids.includes(t)).length > 0
|
|
||||||
)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (req.query.hasOwnProperty('tags__id__none')) {
|
|
||||||
const tag_ids: Array<number> = req.query['tags__id__none']
|
|
||||||
.toString()
|
|
||||||
.split(',')
|
|
||||||
.map((v) => +v)
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.tags.filter((t) => tag_ids.includes(t)).length == 0)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (
|
|
||||||
req.query.hasOwnProperty('is_tagged') &&
|
|
||||||
req.query['is_tagged'] == '0'
|
|
||||||
) {
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.tags.length == 0)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('document_type__id')) {
|
|
||||||
const doctype_id = +req.query['document_type__id']
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.document_type == doctype_id)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (
|
|
||||||
req.query.hasOwnProperty('document_type__isnull') &&
|
|
||||||
req.query['document_type__isnull'] == '1'
|
|
||||||
) {
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.document_type == undefined)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('correspondent__id')) {
|
|
||||||
const correspondent_id = +req.query['correspondent__id']
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.correspondent == correspondent_id)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (
|
|
||||||
req.query.hasOwnProperty('correspondent__isnull') &&
|
|
||||||
req.query['correspondent__isnull'] == '1'
|
|
||||||
) {
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.correspondent == undefined)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('storage_path__id')) {
|
|
||||||
const storage_path_id = +req.query['storage_path__id']
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.storage_path == storage_path_id)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (
|
|
||||||
req.query.hasOwnProperty('storage_path__isnull') &&
|
|
||||||
req.query['storage_path__isnull'] == '1'
|
|
||||||
) {
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.storage_path == undefined)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('created__date__gt')) {
|
|
||||||
const date = new Date(req.query['created__date__gt'])
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => new Date(d.created) > date)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (req.query.hasOwnProperty('created__date__lt')) {
|
|
||||||
const date = new Date(req.query['created__date__lt'])
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => new Date(d.created) < date)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('added__date__gt')) {
|
|
||||||
const date = new Date(req.query['added__date__gt'])
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => new Date(d.added) > date)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (req.query.hasOwnProperty('added__date__lt')) {
|
|
||||||
const date = new Date(req.query['added__date__lt'])
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => new Date(d.added) < date)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('title_content')) {
|
|
||||||
const title_content_regexp = new RegExp(
|
|
||||||
req.query['title_content'].toString(),
|
|
||||||
'i'
|
|
||||||
)
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter(
|
|
||||||
(d) =>
|
|
||||||
title_content_regexp.test(d.title) ||
|
|
||||||
title_content_regexp.test(d.content)
|
|
||||||
)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.query.hasOwnProperty('archive_serial_number')) {
|
|
||||||
const asn = +req.query['archive_serial_number']
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) => d.archive_serial_number == asn)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (req.query.hasOwnProperty('archive_serial_number__isnull')) {
|
|
||||||
const isnull = req.query['storage_path__isnull'] == '1'
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter((d) =>
|
|
||||||
isnull
|
|
||||||
? d.archive_serial_number == undefined
|
|
||||||
: d.archive_serial_number != undefined
|
|
||||||
)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (req.query.hasOwnProperty('archive_serial_number__gt')) {
|
|
||||||
const asn = +req.query['archive_serial_number__gt']
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter(
|
|
||||||
(d) => d.archive_serial_number > 0 && d.archive_serial_number > asn
|
|
||||||
)
|
|
||||||
response.count = response.results.length
|
|
||||||
} else if (req.query.hasOwnProperty('archive_serial_number__lt')) {
|
|
||||||
const asn = +req.query['archive_serial_number__lt']
|
|
||||||
response.results = (
|
|
||||||
documentsJson.results as Array<PaperlessDocument>
|
|
||||||
).filter(
|
|
||||||
(d) => d.archive_serial_number > 0 && d.archive_serial_number < asn
|
|
||||||
)
|
|
||||||
response.count = response.results.length
|
|
||||||
}
|
|
||||||
|
|
||||||
req.reply(response)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents sorted by created', () => {
|
|
||||||
cy.visit('/documents?sort=created')
|
|
||||||
cy.get('app-document-card-small').first().contains('No latin title')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents reverse sorted by created', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true')
|
|
||||||
cy.get('app-document-card-small').first().contains('sit amet')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents sorted by added', () => {
|
|
||||||
cy.visit('/documents?sort=added')
|
|
||||||
cy.get('app-document-card-small').first().contains('No latin title')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents reverse sorted by added', () => {
|
|
||||||
cy.visit('/documents?sort=added&reverse=true')
|
|
||||||
cy.get('app-document-card-small').first().contains('sit amet')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by any tags', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&tags__id__in=2,4,5')
|
|
||||||
cy.contains('3 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by excluded tags', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&tags__id__none=2,4')
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by no tags', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&is_tagged=0')
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by document type', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&document_type__id=1')
|
|
||||||
cy.contains('3 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by no document type', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&document_type__isnull=1')
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by correspondent', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&correspondent__id=9')
|
|
||||||
cy.contains('2 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by no correspondent', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&correspondent__isnull=1')
|
|
||||||
cy.contains('2 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by storage path', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&storage_path__id=2')
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by no storage path', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&storage_path__isnull=1')
|
|
||||||
cy.contains('3 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by title or content', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&title_content=lorem')
|
|
||||||
cy.contains('2 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by asn', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&archive_serial_number=12345')
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by empty asn', () => {
|
|
||||||
cy.visit(
|
|
||||||
'/documents?sort=created&reverse=true&archive_serial_number__isnull=1'
|
|
||||||
)
|
|
||||||
cy.contains('2 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by non-empty asn', () => {
|
|
||||||
cy.visit(
|
|
||||||
'/documents?sort=created&reverse=true&archive_serial_number__isnull=0'
|
|
||||||
)
|
|
||||||
cy.contains('2 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by asn greater than', () => {
|
|
||||||
cy.visit(
|
|
||||||
'/documents?sort=created&reverse=true&archive_serial_number__gt=12346'
|
|
||||||
)
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by asn less than', () => {
|
|
||||||
cy.visit(
|
|
||||||
'/documents?sort=created&reverse=true&archive_serial_number__lt=12346'
|
|
||||||
)
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by created date greater than', () => {
|
|
||||||
cy.visit(
|
|
||||||
'/documents?sort=created&reverse=true&created__date__gt=2022-03-23'
|
|
||||||
)
|
|
||||||
cy.contains('3 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by created date less than', () => {
|
|
||||||
cy.visit(
|
|
||||||
'/documents?sort=created&reverse=true&created__date__lt=2022-03-23'
|
|
||||||
)
|
|
||||||
cy.contains('One document')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by added date greater than', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&added__date__gt=2022-03-24')
|
|
||||||
cy.contains('2 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by added date less than', () => {
|
|
||||||
cy.visit('/documents?sort=created&reverse=true&added__date__lt=2022-03-24')
|
|
||||||
cy.contains('2 documents')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by multiple filters', () => {
|
|
||||||
cy.visit(
|
|
||||||
'/documents?sort=created&reverse=true&document_type__id=1&correspondent__id=9&tags__id__in=4,5'
|
|
||||||
)
|
|
||||||
cy.contains('2 documents')
|
|
||||||
})
|
|
||||||
})
|
|
@@ -1,25 +0,0 @@
|
|||||||
describe('manage', () => {
|
|
||||||
// also uses global fixtures from cypress/support/e2e.ts
|
|
||||||
|
|
||||||
it('should show a list of correspondents with bottom pagination as well', () => {
|
|
||||||
cy.visit('/correspondents')
|
|
||||||
cy.get('tbody').find('tr').its('length').should('eq', 25)
|
|
||||||
cy.get('ngb-pagination').its('length').should('eq', 2)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of tags without bottom pagination', () => {
|
|
||||||
cy.visit('/tags')
|
|
||||||
cy.get('tbody').find('tr').its('length').should('eq', 8)
|
|
||||||
cy.get('ngb-pagination').its('length').should('eq', 1)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of documents filtered by tag', () => {
|
|
||||||
cy.intercept('http://localhost:8000/api/documents/*', (req) => {
|
|
||||||
if (req.url.indexOf('tags__id__all=4'))
|
|
||||||
req.reply({ count: 3, next: null, previous: null, results: [] })
|
|
||||||
})
|
|
||||||
cy.visit('/tags')
|
|
||||||
cy.get('tbody').find('button:visible').contains('Documents').first().click() // id = 4
|
|
||||||
cy.contains('3 documents')
|
|
||||||
})
|
|
||||||
})
|
|
@@ -1,102 +0,0 @@
|
|||||||
describe('settings', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
// also uses global fixtures from cypress/support/e2e.ts
|
|
||||||
|
|
||||||
this.modifiedViews = []
|
|
||||||
|
|
||||||
// mock API methods
|
|
||||||
cy.intercept('http://localhost:8000/api/ui_settings/', {
|
|
||||||
fixture: 'ui_settings/settings.json',
|
|
||||||
}).then(() => {
|
|
||||||
cy.fixture('saved_views/savedviews.json').then((savedViewsJson) => {
|
|
||||||
// saved views PATCH
|
|
||||||
cy.intercept(
|
|
||||||
'PATCH',
|
|
||||||
'http://localhost:8000/api/saved_views/*',
|
|
||||||
(req) => {
|
|
||||||
this.modifiedViews.push(req.body) // store this for later
|
|
||||||
req.reply({ result: 'OK' })
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
cy.intercept(
|
|
||||||
'GET',
|
|
||||||
'http://localhost:8000/api/saved_views/*',
|
|
||||||
(req) => {
|
|
||||||
let response = { ...savedViewsJson }
|
|
||||||
if (this.modifiedViews.length) {
|
|
||||||
response.results = response.results.map((v) => {
|
|
||||||
if (this.modifiedViews.find((mv) => mv.id == v.id))
|
|
||||||
v = this.modifiedViews.find((mv) => mv.id == v.id)
|
|
||||||
return v
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
req.reply(response)
|
|
||||||
}
|
|
||||||
).as('savedViews')
|
|
||||||
|
|
||||||
cy.intercept('http://localhost:8000/api/mail_accounts/*', {
|
|
||||||
fixture: 'mail_accounts/mail_accounts.json',
|
|
||||||
})
|
|
||||||
cy.intercept('http://localhost:8000/api/mail_rules/*', {
|
|
||||||
fixture: 'mail_rules/mail_rules.json',
|
|
||||||
}).as('mailRules')
|
|
||||||
cy.intercept('http://localhost:8000/api/tasks/', {
|
|
||||||
fixture: 'tasks/tasks.json',
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
cy.fixture('documents/documents.json').then((documentsJson) => {
|
|
||||||
cy.intercept('GET', 'http://localhost:8000/api/documents/1/', (req) => {
|
|
||||||
let response = { ...documentsJson }
|
|
||||||
response = response.results.find((d) => d.id == 1)
|
|
||||||
req.reply(response)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
cy.viewport(1024, 1600)
|
|
||||||
cy.visit('/settings')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should activate / deactivate save button when settings change and are saved', () => {
|
|
||||||
cy.contains('button', 'Save').should('be.disabled')
|
|
||||||
cy.contains('Use system settings').click()
|
|
||||||
cy.contains('button', 'Save').should('not.be.disabled')
|
|
||||||
cy.contains('button', 'Save').click()
|
|
||||||
cy.contains('button', 'Save').should('be.disabled')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should warn on unsaved changes', () => {
|
|
||||||
cy.contains('Use system settings').click()
|
|
||||||
cy.contains('a', 'Dashboard').click()
|
|
||||||
cy.contains('You have unsaved changes')
|
|
||||||
cy.contains('button', 'Cancel').click()
|
|
||||||
cy.contains('button', 'Save').click().wait('@savedViews').wait(2000)
|
|
||||||
cy.contains('a', 'Dashboard').click()
|
|
||||||
cy.contains('You have unsaved changes').should('not.exist')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should apply appearance changes when set', () => {
|
|
||||||
cy.contains('Use system settings').click()
|
|
||||||
cy.get('body').should('not.have.class', 'color-scheme-system')
|
|
||||||
cy.contains('Enable dark mode').click()
|
|
||||||
cy.get('body').should('have.class', 'color-scheme-dark')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should remove saved view from sidebar when unset', () => {
|
|
||||||
cy.contains('a', 'Saved views').click().wait(2000)
|
|
||||||
cy.get('#show_in_sidebar_1').click()
|
|
||||||
cy.contains('button', 'Save').click().wait('@savedViews').wait(2000)
|
|
||||||
cy.contains('li', 'Inbox').should('not.exist')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should remove saved view from dashboard when unset', () => {
|
|
||||||
cy.contains('a', 'Saved views').click()
|
|
||||||
cy.get('#show_on_dashboard_1').click()
|
|
||||||
cy.contains('button', 'Save').click().wait('@savedViews').wait(2000)
|
|
||||||
cy.visit('/dashboard')
|
|
||||||
cy.get('app-saved-view-widget').contains('Inbox').should('not.exist')
|
|
||||||
})
|
|
||||||
})
|
|
@@ -1,93 +0,0 @@
|
|||||||
describe('tasks', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
this.dismissedTasks = new Set<number>()
|
|
||||||
|
|
||||||
cy.fixture('tasks/tasks.json').then((tasksViewsJson) => {
|
|
||||||
// acknowledge tasks POST
|
|
||||||
cy.intercept(
|
|
||||||
'POST',
|
|
||||||
'http://localhost:8000/api/acknowledge_tasks/',
|
|
||||||
(req) => {
|
|
||||||
req.body['tasks'].forEach((t) => this.dismissedTasks.add(t)) // store this for later
|
|
||||||
req.reply({ result: 'OK' })
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
cy.intercept('GET', 'http://localhost:8000/api/tasks/', (req) => {
|
|
||||||
let response = [...tasksViewsJson]
|
|
||||||
if (this.dismissedTasks.size) {
|
|
||||||
response = response.filter((t) => {
|
|
||||||
return !this.dismissedTasks.has(t.id)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
req.reply(response)
|
|
||||||
}).as('tasks')
|
|
||||||
})
|
|
||||||
|
|
||||||
cy.visit('/tasks')
|
|
||||||
cy.wait('@tasks')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should show a list of dismissable tasks in tabs', () => {
|
|
||||||
cy.get('tbody').find('tr:visible').its('length').should('eq', 10) // double because collapsible result tr
|
|
||||||
cy.wait(500) // stabilizes the test, for some reason...
|
|
||||||
cy.get('tbody')
|
|
||||||
.find('button:visible')
|
|
||||||
.contains('Dismiss')
|
|
||||||
.first()
|
|
||||||
.click()
|
|
||||||
.wait('@tasks')
|
|
||||||
.wait(2000)
|
|
||||||
.then(() => {
|
|
||||||
cy.get('tbody').find('tr:visible').its('length').should('eq', 8) // double because collapsible result tr
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should correctly switch between task tabs', () => {
|
|
||||||
cy.get('tbody').find('tr:visible').its('length').should('eq', 10) // double because collapsible result tr
|
|
||||||
cy.wait(500) // stabilizes the test, for some reason...
|
|
||||||
cy.get('app-tasks')
|
|
||||||
.find('a:visible')
|
|
||||||
.contains('Queued')
|
|
||||||
.first()
|
|
||||||
.click()
|
|
||||||
.wait(2000)
|
|
||||||
.then(() => {
|
|
||||||
cy.get('tbody').find('tr:visible').should('not.exist')
|
|
||||||
})
|
|
||||||
cy.get('app-tasks')
|
|
||||||
.find('a:visible')
|
|
||||||
.contains('Started')
|
|
||||||
.first()
|
|
||||||
.click()
|
|
||||||
.wait(2000)
|
|
||||||
.then(() => {
|
|
||||||
cy.get('tbody').find('tr:visible').its('length').should('eq', 2) // double because collapsible result tr
|
|
||||||
})
|
|
||||||
cy.get('app-tasks')
|
|
||||||
.find('a:visible')
|
|
||||||
.contains('Complete')
|
|
||||||
.first()
|
|
||||||
.click()
|
|
||||||
.wait('@tasks')
|
|
||||||
.wait(2000)
|
|
||||||
.then(() => {
|
|
||||||
cy.get('tbody').find('tr:visible').its('length').should('eq', 12) // double because collapsible result tr
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should allow toggling all tasks in list and warn on dismiss', () => {
|
|
||||||
cy.get('thead').find('input[type="checkbox"]').first().click()
|
|
||||||
cy.get('body').find('button').contains('Dismiss selected').first().click()
|
|
||||||
cy.contains('Confirm')
|
|
||||||
cy.get('.modal')
|
|
||||||
.contains('button', 'Dismiss')
|
|
||||||
.click()
|
|
||||||
.wait('@tasks')
|
|
||||||
.wait(2000)
|
|
||||||
.then(() => {
|
|
||||||
cy.get('tbody').find('tr:visible').should('not.exist')
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
@@ -1 +0,0 @@
|
|||||||
{"count":27,"next":"http://localhost:8000/api/correspondents/?page=2","previous":null,"results":[{"id":9,"slug":"abc-test-correspondent","name":"ABC Test Correspondent","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":13,"slug":"corresp-10","name":"Corresp 10","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":14,"slug":"corresp-11","name":"Corresp 11","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":15,"slug":"corresp-12","name":"Corresp 12","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":16,"slug":"corresp-13","name":"Corresp 13","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":18,"slug":"corresp-15","name":"Corresp 15","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":19,"slug":"corresp-16","name":"Corresp 16","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":20,"slug":"corresp-17","name":"Corresp 17","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":21,"slug":"corresp-18","name":"Corresp 18","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":22,"slug":"corresp-19","name":"Corresp 19","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":23,"slug":"corresp-20","name":"Corresp 20","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":24,"slug":"corresp-21","name":"Corresp 21","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":25,"slug":"corresp-22","name":"Corresp 22","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":26,"slug":"corresp-23","name":"Corresp 23","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":5,"slug":"corresp-3","name":"Corresp 3","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":6,"slug":"corresp-4","name":"Corresp 4","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":7,"slug":"corresp-5","name":"Corresp 5","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":8,"slug":"corresp-6","name":"Corresp 6","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":10,"slug":"corresp-7","name":"Corresp 7","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":11,"slug":"corresp-8","name":"Corresp 8","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":12,"slug":"corresp-9","name":"Corresp 9","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":17,"slug":"correspondent-14","name":"Correspondent 14","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0,"last_correspondence":null},{"id":2,"slug":"correspondent-2","name":"Correspondent 2","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":7,"last_correspondence":"2021-01-20T23:37:58.204614Z"},{"id":27,"slug":"michael-shamoon","name":"Michael Shamoon","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":1,"last_correspondence":"2022-03-16T03:48:50.089624Z"},{"id":4,"slug":"newest-correspondent","name":"Newest Correspondent","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":1,"last_correspondence":"2021-02-07T08:00:00Z"}]}
|
|
@@ -1 +0,0 @@
|
|||||||
{"count":1,"next":null,"previous":null,"results":[{"id":1,"slug":"test","name":"Test Doc Type","match":"","matching_algorithm":1,"is_insensitive":true,"document_count":0}]}
|
|