Compare commits
1785 Commits
beta-1.7.0
...
v1.11.0
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f9ce4d8f6a | ||
![]() |
8c9a74ee0c | ||
![]() |
0b59ef2cfa | ||
![]() |
0099631905 | ||
![]() |
4548038525 | ||
![]() |
a2b7687c3b | ||
![]() |
15cba8e14d | ||
![]() |
605f86f0cf | ||
![]() |
8cbaca22c1 | ||
![]() |
4269074944 | ||
![]() |
7b7331683d | ||
![]() |
a83637b2bf | ||
![]() |
721447999e | ||
![]() |
72cbdca6e8 | ||
![]() |
22e060e00e | ||
![]() |
23fb5c2a1f | ||
![]() |
d6e6f49c15 | ||
![]() |
fc259c8bfd | ||
![]() |
ecf90c4718 | ||
![]() |
f0b359889e | ||
![]() |
b0fb44db86 | ||
![]() |
bfd955b210 | ||
![]() |
8af21d6fe3 | ||
![]() |
e9f25190e9 | ||
![]() |
e81b829eb0 | ||
![]() |
5f4e5c2cfb | ||
![]() |
383358376f | ||
![]() |
00f0b55729 | ||
![]() |
b10b981cb5 | ||
![]() |
f805407bce | ||
![]() |
4f169da4a8 | ||
![]() |
4031381c31 | ||
![]() |
b7bc3830cc | ||
![]() |
f2872d6475 | ||
![]() |
f5219c101c | ||
![]() |
f329b5a3d0 | ||
![]() |
6f6a5f2eed | ||
![]() |
266a8cd1a9 | ||
![]() |
7675014c90 | ||
![]() |
ec5971c134 | ||
![]() |
d7fedfcd87 | ||
![]() |
cb99a8741e | ||
![]() |
a63ed236a4 | ||
![]() |
4594a5c41c | ||
![]() |
9109c25b3e | ||
![]() |
5435dc2499 | ||
![]() |
1d300fafad | ||
![]() |
b1194f9524 | ||
![]() |
2532bd1e2c | ||
![]() |
800e842ab3 | ||
![]() |
6f6f365e2b | ||
![]() |
43b863b816 | ||
![]() |
94e32005ca | ||
![]() |
204e14877d | ||
![]() |
92f05e051f | ||
![]() |
17bdf2a233 | ||
![]() |
ce37100a0a | ||
![]() |
81c371d66b | ||
![]() |
c114653977 | ||
![]() |
6280b9948a | ||
![]() |
3a322a7b33 | ||
![]() |
327ae03589 | ||
![]() |
cffbea9053 | ||
![]() |
ad0ef9a5a8 | ||
![]() |
03e7299925 | ||
![]() |
d3ba910f2d | ||
![]() |
329e649878 | ||
![]() |
206ee97554 | ||
![]() |
e512d4af8c | ||
![]() |
6a5e752172 | ||
![]() |
6ba527ef55 | ||
![]() |
220cc1927c | ||
![]() |
9956f4cb47 | ||
![]() |
fe055f6391 | ||
![]() |
eec506a13c | ||
![]() |
1530bbd1cb | ||
![]() |
db6afdd926 | ||
![]() |
5642715721 | ||
![]() |
3bd22f0b0f | ||
![]() |
c92c7e1ced | ||
![]() |
940f5d5b50 | ||
![]() |
4dc893a4fa | ||
![]() |
6d324dbd8e | ||
![]() |
8ddf05e573 | ||
![]() |
d869a6bcca | ||
![]() |
40bdeffa38 | ||
![]() |
5bf5710d39 | ||
![]() |
551a7e606c | ||
![]() |
feec36939b | ||
![]() |
554bba839e | ||
![]() |
ebaaa3a1e8 | ||
![]() |
bae715cd34 | ||
![]() |
2a3b8f5a7f | ||
![]() |
6a023507e2 | ||
![]() |
1551052cde | ||
![]() |
b6dd36a439 | ||
![]() |
ce38e4ae08 | ||
![]() |
97d6503fef | ||
![]() |
b0625cdced | ||
![]() |
31e8c44c18 | ||
![]() |
0472dfe25a | ||
![]() |
8b36c9ad64 | ||
![]() |
1266f2d5b9 | ||
![]() |
8196051959 | ||
![]() |
d198142a1e | ||
![]() |
5e15ede849 | ||
![]() |
06a6eb0326 | ||
![]() |
28819d6d0f | ||
![]() |
8a9e564dac | ||
![]() |
534704693b | ||
![]() |
bc40607c51 | ||
![]() |
6fdc17cc72 | ||
![]() |
69a5ba0618 | ||
![]() |
3c71a9160f | ||
![]() |
48ef8eca80 | ||
![]() |
812df3782a | ||
![]() |
54bb1ae27d | ||
![]() |
ff4a8b37bd | ||
![]() |
37d3a624b7 | ||
![]() |
493f6173da | ||
![]() |
272e87b741 | ||
![]() |
2b5e6f7a9d | ||
![]() |
70960f86ba | ||
![]() |
ee4d25567c | ||
![]() |
80a126e838 | ||
![]() |
c02bd66b3f | ||
![]() |
cea6720c1a | ||
![]() |
700d58058c | ||
![]() |
33e413af65 | ||
![]() |
45a13523d4 | ||
![]() |
95257d5723 | ||
![]() |
8da3ae2c53 | ||
![]() |
f17b541a5b | ||
![]() |
2b2e518dea | ||
![]() |
3f6e3a2750 | ||
![]() |
14784d5832 | ||
![]() |
8cd5e25364 | ||
![]() |
7788d93227 | ||
![]() |
826503802a | ||
![]() |
6db1e36e14 | ||
![]() |
3bc4d7dad7 | ||
![]() |
32d546740b | ||
![]() |
24da3e5034 | ||
![]() |
9e295ddf4f | ||
![]() |
eff6f2fb01 | ||
![]() |
c597da495c | ||
![]() |
de5e9c95ec | ||
![]() |
4e27242373 | ||
![]() |
7bf1e24616 | ||
![]() |
fd0759bf6f | ||
![]() |
d6bbf2cc8d | ||
![]() |
80495d42de | ||
![]() |
eac21f773f | ||
![]() |
52f5831657 | ||
![]() |
f35f33539a | ||
![]() |
46f310603b | ||
![]() |
531d3f03f9 | ||
![]() |
85cfd7610d | ||
![]() |
201b77189a | ||
![]() |
5b76b45e33 | ||
![]() |
bf2fac9393 | ||
![]() |
5a3affe8c0 | ||
![]() |
a5834393b3 | ||
![]() |
cd6e37c520 | ||
![]() |
af51165229 | ||
![]() |
d480620be9 | ||
![]() |
d470de3576 | ||
![]() |
538249b26c | ||
![]() |
fb9d3f736b | ||
![]() |
a6b7beaf6b | ||
![]() |
4d4d545343 | ||
![]() |
049dc17902 | ||
![]() |
b0ca57a7f0 | ||
![]() |
cdd49c5142 | ||
![]() |
4b31e5d0b4 | ||
![]() |
8076ebd78c | ||
![]() |
c864b3cd19 | ||
![]() |
2704bcb979 | ||
![]() |
59f6074093 | ||
![]() |
b1da7f3491 | ||
![]() |
adde88e7b9 | ||
![]() |
8e876ef2d1 | ||
![]() |
2ea0f83a91 | ||
![]() |
05d8ea5a9d | ||
![]() |
967248233f | ||
![]() |
b4c4b9fb6a | ||
![]() |
adb6483abc | ||
![]() |
908db55bb7 | ||
![]() |
610f20de28 | ||
![]() |
b2513a5cde | ||
![]() |
bfa1c13d01 | ||
![]() |
12aaff431f | ||
![]() |
547e5ea55e | ||
![]() |
c301127096 | ||
![]() |
19147855e7 | ||
![]() |
4e7c7ea1d6 | ||
![]() |
fcf8a49160 | ||
![]() |
c6d658a954 | ||
![]() |
a78cd6526c | ||
![]() |
bf895b54f4 | ||
![]() |
e5f84ef583 | ||
![]() |
8c690a9a51 | ||
![]() |
56526b970a | ||
![]() |
94fbf92916 | ||
![]() |
37f5e46d09 | ||
![]() |
38be817637 | ||
![]() |
17303f41da | ||
![]() |
55ef0d4a1b | ||
![]() |
a8f3c4be54 | ||
![]() |
1b9de2be5a | ||
![]() |
0e8265f1ae | ||
![]() |
5b45a140b9 | ||
![]() |
72fb9a475d | ||
![]() |
bf97f5807f | ||
![]() |
a707818b4d | ||
![]() |
fb46c1b96a | ||
![]() |
3226d8b25b | ||
![]() |
5c4363cbea | ||
![]() |
fa62ae820b | ||
![]() |
17891bafaf | ||
![]() |
15fdadadef | ||
![]() |
ce9f604d81 | ||
![]() |
4f876db5d1 | ||
![]() |
5e5f56dc67 | ||
![]() |
93fab8bb95 | ||
![]() |
35ca2195fe | ||
![]() |
7ace66d7fd | ||
![]() |
4f9a31244b | ||
![]() |
14cf4f7095 | ||
![]() |
8bd7c27826 | ||
![]() |
8c4f486fe9 | ||
![]() |
2849414445 | ||
![]() |
ea1ea0816f | ||
![]() |
52d3a8703c | ||
![]() |
4cb4d6adcd | ||
![]() |
24444237f2 | ||
![]() |
40c8629aef | ||
![]() |
98cdf614a5 | ||
![]() |
2eb2d99a91 | ||
![]() |
18ad9bcbf2 | ||
![]() |
997bff4917 | ||
![]() |
78f9a80895 | ||
![]() |
9231df7a4a | ||
![]() |
6f25917c86 | ||
![]() |
c41d1a78a8 | ||
![]() |
c3331086d5 | ||
![]() |
6bd9ccd8f6 | ||
![]() |
68c7cecb07 | ||
![]() |
bcc029a2c7 | ||
![]() |
ea38eb01b2 | ||
![]() |
01d070b882 | ||
![]() |
1727eb00cc | ||
![]() |
9d4180553c | ||
![]() |
8049af4b22 | ||
![]() |
7c6142643d | ||
![]() |
2e8706f4e2 | ||
![]() |
d39d32d555 | ||
![]() |
6f52945449 | ||
![]() |
37025297b5 | ||
![]() |
aa023ea2e3 | ||
![]() |
78bf0b63a5 | ||
![]() |
dc9e9e3b48 | ||
![]() |
ab29c49b7a | ||
![]() |
1c0ac474b8 | ||
![]() |
29391c1c7b | ||
![]() |
693834971c | ||
![]() |
97376d4b72 | ||
![]() |
3ee1d2a9a9 | ||
![]() |
605f885e19 | ||
![]() |
25fb8d9c3b | ||
![]() |
a96ecd673b | ||
![]() |
58a01a57ee | ||
![]() |
c18fc03ef3 | ||
![]() |
a96f79f6a3 | ||
![]() |
d6f1d004a3 | ||
![]() |
da72d3571b | ||
![]() |
8241da0eb3 | ||
![]() |
51562667bf | ||
![]() |
97eeae65a3 | ||
![]() |
1aee2988f7 | ||
![]() |
a63a8dd488 | ||
![]() |
06a9df6dbd | ||
![]() |
49933bb5a8 | ||
![]() |
7d7d9630c1 | ||
![]() |
6f0077efac | ||
![]() |
39be68a1a4 | ||
![]() |
ac69babfce | ||
![]() |
02c782a127 | ||
![]() |
4e90fda80f | ||
![]() |
88e3e556a1 | ||
![]() |
88cf6ef843 | ||
![]() |
9b602a4bf0 | ||
![]() |
fe2db4dbf7 | ||
![]() |
47c88a6bdd | ||
![]() |
a3bc3b78d5 | ||
![]() |
fed7d3e993 | ||
![]() |
3a74f24e49 | ||
![]() |
52afab39cf | ||
![]() |
8659292852 | ||
![]() |
ce73f159fd | ||
![]() |
71382e9c62 | ||
![]() |
a1a802fc92 | ||
![]() |
4200fc610d | ||
![]() |
32d212cd9f | ||
![]() |
5d3a6e230d | ||
![]() |
b33fcc117e | ||
![]() |
e96d65f945 | ||
![]() |
cfeed0ce6e | ||
![]() |
b89ecf7d77 | ||
![]() |
5ca25d44ba | ||
![]() |
2c1333a75f | ||
![]() |
3c48ce0225 | ||
![]() |
1e11c12d96 | ||
![]() |
3e22e8e0b9 | ||
![]() |
dba45f93a4 | ||
![]() |
18f3f44ae9 | ||
![]() |
85a6a271dc | ||
![]() |
abb515d4ea | ||
![]() |
309d1f2b67 | ||
![]() |
fa2f09bc4b | ||
![]() |
c51590cd12 | ||
![]() |
8e01406acf | ||
![]() |
7cce2f0fe6 | ||
![]() |
95091c2f39 | ||
![]() |
4a0aa12bd9 | ||
![]() |
9a0329746a | ||
![]() |
8392a6fd4a | ||
![]() |
8fa18bb8a6 | ||
![]() |
0095b593fb | ||
![]() |
b1e5135e21 | ||
![]() |
e88755e7ac | ||
![]() |
c582947291 | ||
![]() |
98fe3a2cb7 | ||
![]() |
61647606fa | ||
![]() |
95a1e5c645 | ||
![]() |
8ead77f128 | ||
![]() |
b9e9e82f33 | ||
![]() |
487fd3a5dd | ||
![]() |
657786a2fe | ||
![]() |
e74d7dadfb | ||
![]() |
a2937cd54d | ||
![]() |
7b3ce6289f | ||
![]() |
a16e8324be | ||
![]() |
39de531df5 | ||
![]() |
4764d4fd2b | ||
![]() |
e147d4571f | ||
![]() |
dc9aaa6472 | ||
![]() |
8a061c4ac2 | ||
![]() |
d051c5c282 | ||
![]() |
9e60810a8b | ||
![]() |
96ee7990b2 | ||
![]() |
224bfeb72e | ||
![]() |
f0497e7744 | ||
![]() |
c9d6c208af | ||
![]() |
9f2b8b1734 | ||
![]() |
a04b9e3755 | ||
![]() |
a81d4c5e9d | ||
![]() |
2140d42098 | ||
![]() |
43325371fc | ||
![]() |
d10721089e | ||
![]() |
f1a1a2da8b | ||
![]() |
612e0a1163 | ||
![]() |
2a5dc4de38 | ||
![]() |
a5283525bc | ||
![]() |
de98d748a9 | ||
![]() |
f015556562 | ||
![]() |
b897d6de2e | ||
![]() |
54f20b381e | ||
![]() |
c0d4248021 | ||
![]() |
870e295aae | ||
![]() |
4aa318598f | ||
![]() |
00f39d8b58 | ||
![]() |
0b1a16908f | ||
![]() |
d9796e5003 | ||
![]() |
3599bb52c0 | ||
![]() |
af8a6c3764 | ||
![]() |
6d37ebf79e | ||
![]() |
f6a70b85f4 | ||
![]() |
538a4219bd | ||
![]() |
85c41b79be | ||
![]() |
25d014d8ef | ||
![]() |
9b01aa9202 | ||
![]() |
df101f5e7a | ||
![]() |
1fa735eb23 | ||
![]() |
ebe21a0114 | ||
![]() |
d132eba143 | ||
![]() |
073c3c8fed | ||
![]() |
e3c1bde793 | ||
![]() |
27f7f0a941 | ||
![]() |
9f5fd6c3ba | ||
![]() |
914661fdbb | ||
![]() |
0ae8200593 | ||
![]() |
b68906b14e | ||
![]() |
681eecc46e | ||
![]() |
1578e8de2d | ||
![]() |
023c931401 | ||
![]() |
9ec89762a3 | ||
![]() |
fa47595ac8 | ||
![]() |
79f5019b40 | ||
![]() |
756ce2f9d8 | ||
![]() |
d47122340a | ||
![]() |
b01cbc9aa0 | ||
![]() |
3dfeee9332 | ||
![]() |
057f6016cc | ||
![]() |
c4965580de | ||
![]() |
9a47963fd5 | ||
![]() |
50a211f367 | ||
![]() |
5f278d7fbb | ||
![]() |
a17d251913 | ||
![]() |
1cbf088656 | ||
![]() |
d3254d6bcf | ||
![]() |
1543729c7b | ||
![]() |
ef2a96c34b | ||
![]() |
656b1e150f | ||
![]() |
e0f61003cf | ||
![]() |
1ca98678cd | ||
![]() |
9919cc1956 | ||
![]() |
d2096e3c05 | ||
![]() |
5f2b508b7a | ||
![]() |
752d4f4249 | ||
![]() |
72e7d5150e | ||
![]() |
42a9e05a7f | ||
![]() |
b4add2ed55 | ||
![]() |
ed7d9295bd | ||
![]() |
5b7b1b2349 | ||
![]() |
d5c930acc9 | ||
![]() |
4c93d6d7e6 | ||
![]() |
066f3264fb | ||
![]() |
88a803f949 | ||
![]() |
e69615dc06 | ||
![]() |
a1e0840e24 | ||
![]() |
d814353e83 | ||
![]() |
06d7845eca | ||
![]() |
ae8682c7a5 | ||
![]() |
c9c0b3d430 | ||
![]() |
cc46fc7e4b | ||
![]() |
d1b1ba21cd | ||
![]() |
a009417a99 | ||
![]() |
775da720ec | ||
![]() |
aeae6ea0d3 | ||
![]() |
0ae46d2269 | ||
![]() |
0e7f1ec0de | ||
![]() |
13cd55b96f | ||
![]() |
9139e807ec | ||
![]() |
53616f6625 | ||
![]() |
526fdf1153 | ||
![]() |
fc4aceb0ee | ||
![]() |
3d8421b718 | ||
![]() |
6cebceda15 | ||
![]() |
e1fd6bda19 | ||
![]() |
fd34414b17 | ||
![]() |
3ce1886a54 | ||
![]() |
8ed43779a8 | ||
![]() |
a7949b3e22 | ||
![]() |
19c293c3e6 | ||
![]() |
ccb1ec4ff5 | ||
![]() |
e5106bdca0 | ||
![]() |
ba1366f49a | ||
![]() |
1dc271723c | ||
![]() |
f3b3db30a2 | ||
![]() |
69241ce394 | ||
![]() |
10f6195bac | ||
![]() |
1d0cf77e7e | ||
![]() |
beea3eb7eb | ||
![]() |
a7b5b98174 | ||
![]() |
046d43fbe8 | ||
![]() |
8023aae738 | ||
![]() |
2a9bb55559 | ||
![]() |
e635bfedc5 | ||
![]() |
be64552092 | ||
![]() |
91a2dedfec | ||
![]() |
069e0a1903 | ||
![]() |
39149a891c | ||
![]() |
daa49ee7c8 | ||
![]() |
acd3832417 | ||
![]() |
82b2ba3cc2 | ||
![]() |
7e3e0a0fa6 | ||
![]() |
3de6e0bcf1 | ||
![]() |
3c325582d9 | ||
![]() |
e5012cdc5f | ||
![]() |
853d13b6f2 | ||
![]() |
449fa9bf48 | ||
![]() |
0a6828517a | ||
![]() |
8585e77ccd | ||
![]() |
db32431bcc | ||
![]() |
e2d826b4ea | ||
![]() |
06f1a4f744 | ||
![]() |
d09bb563a7 | ||
![]() |
0fafecc6a4 | ||
![]() |
66b60654d9 | ||
![]() |
b479027f3d | ||
![]() |
0a81439415 | ||
![]() |
4fcaa72886 | ||
![]() |
9acb00dcba | ||
![]() |
ebc453d720 | ||
![]() |
770d72c3e8 | ||
![]() |
e97c04c03d | ||
![]() |
34a0111ff5 | ||
![]() |
9214b41255 | ||
![]() |
0d941bfb05 | ||
![]() |
4c68d28a6f | ||
![]() |
b511b084d0 | ||
![]() |
f2939583d7 | ||
![]() |
5951b0d946 | ||
![]() |
bf088c427a | ||
![]() |
ef1eead52e | ||
![]() |
f77a431554 | ||
![]() |
cb930d1e76 | ||
![]() |
c4db7f7b6d | ||
![]() |
a24524fb81 | ||
![]() |
9e253fcc62 | ||
![]() |
490dee2e90 | ||
![]() |
a2032b9979 | ||
![]() |
1561f561d9 | ||
![]() |
6246bfaf28 | ||
![]() |
57763d0c0b | ||
![]() |
d441c8a26e | ||
![]() |
54abeff63a | ||
![]() |
dc045761d9 | ||
![]() |
393c208cfd | ||
![]() |
11f9bc898a | ||
![]() |
a31089ca6e | ||
![]() |
0355fa2cb6 | ||
![]() |
73b945a9f3 | ||
![]() |
396dd9ae1c | ||
![]() |
b026dcc6ae | ||
![]() |
36d2286d03 | ||
![]() |
ea992e92f5 | ||
![]() |
d6b5c733f3 | ||
![]() |
7efdce44f7 | ||
![]() |
9b3243533c | ||
![]() |
0993fc07a3 | ||
![]() |
6df73ae940 | ||
![]() |
28f7b0dc13 | ||
![]() |
bd64684fa4 | ||
![]() |
a9abffaddc | ||
![]() |
89e0f8e3ef | ||
![]() |
9e91440245 | ||
![]() |
4a24ba51c5 | ||
![]() |
d5fb98b7c4 | ||
![]() |
cda0a19b99 | ||
![]() |
932a285b82 | ||
![]() |
c414de9c35 | ||
![]() |
2204090151 | ||
![]() |
3c81a7468b | ||
![]() |
5ef86f9489 | ||
![]() |
90cb0836bb | ||
![]() |
9b82ab95fb | ||
![]() |
aa5aff246b | ||
![]() |
c6a484439d | ||
![]() |
ef1d4264b5 | ||
![]() |
1b55717cc7 | ||
![]() |
e1fa59122d | ||
![]() |
dc1da7cb24 | ||
![]() |
8652b7ddb0 | ||
![]() |
84b3fee0f9 | ||
![]() |
b52cb193e1 | ||
![]() |
6a00d5e08a | ||
![]() |
5bf26369e2 | ||
![]() |
3357fa19f3 | ||
![]() |
37a892d461 | ||
![]() |
f149f9ccb1 | ||
![]() |
d52fbbb040 | ||
![]() |
36239ba09f | ||
![]() |
446eca4ac3 | ||
![]() |
318c1d2fbd | ||
![]() |
f8ce6285df | ||
![]() |
0a19ad4edb | ||
![]() |
ab69961b5c | ||
![]() |
1400dba12c | ||
![]() |
a72cc5da83 | ||
![]() |
630b8fa675 | ||
![]() |
43514ad477 | ||
![]() |
0ed95547d5 | ||
![]() |
250e2d54b4 | ||
![]() |
b02cd541a8 | ||
![]() |
94f2a2ce33 | ||
![]() |
6a7c0279bf | ||
![]() |
5ba11b1161 | ||
![]() |
917bce301c | ||
![]() |
3fb32c5cf1 | ||
![]() |
19a1bf0f5f | ||
![]() |
abe8e678ea | ||
![]() |
d16dee61fa | ||
![]() |
e7b22b15c6 | ||
![]() |
0f62770bce | ||
![]() |
c67cff4f0e | ||
![]() |
dccb9227a2 | ||
![]() |
b7db1cf2c1 | ||
![]() |
3d683d13b8 | ||
![]() |
310c89ffdf | ||
![]() |
dd069d753b | ||
![]() |
27a24f10b3 | ||
![]() |
b6b9bf0e3c | ||
![]() |
5fc7350ea0 | ||
![]() |
9b01c96846 | ||
![]() |
e7c40fc3dc | ||
![]() |
0da0b1c062 | ||
![]() |
08988e11f8 | ||
![]() |
30372b0e85 | ||
![]() |
96aba9acdc | ||
![]() |
43e78c8e69 | ||
![]() |
47d0c77970 | ||
![]() |
567e89d1c7 | ||
![]() |
f1f5227ccd | ||
![]() |
f825b5772d | ||
![]() |
f9cb95e79c | ||
![]() |
73845ef968 | ||
![]() |
8be6c707de | ||
![]() |
60f76d3e1f | ||
![]() |
912dc9a847 | ||
![]() |
70ef6412eb | ||
![]() |
99db828d49 | ||
![]() |
3c5b647303 | ||
![]() |
d1aa08850d | ||
![]() |
53e8d84af2 | ||
![]() |
4c7242df6d | ||
![]() |
a231b92644 | ||
![]() |
c3a62268c7 | ||
![]() |
69913ae250 | ||
![]() |
09b5bd17f2 | ||
![]() |
e384bd78c5 | ||
![]() |
a2da1acdd7 | ||
![]() |
a67ea8ffd9 | ||
![]() |
0050a20710 | ||
![]() |
ad65360a55 | ||
![]() |
15bcb2491c | ||
![]() |
2a2fa90cf9 | ||
![]() |
6e9fbdb8ed | ||
![]() |
097ab55f7a | ||
![]() |
dddb82af23 | ||
![]() |
377c37dfab | ||
![]() |
fda844f64c | ||
![]() |
daf90399bd | ||
![]() |
0df0deb445 | ||
![]() |
3d37e49c1a | ||
![]() |
d05f803c16 | ||
![]() |
25e2ca5295 | ||
![]() |
02e8157fb9 | ||
![]() |
708cac4683 | ||
![]() |
66d79ccd3f | ||
![]() |
261c6fb990 | ||
![]() |
9c1195fc59 | ||
![]() |
deaf02780e | ||
![]() |
5602031b67 | ||
![]() |
4c1bf240d6 | ||
![]() |
b13ced93ed | ||
![]() |
87472b31d2 | ||
![]() |
7f1d01e443 | ||
![]() |
1024d7e6e2 | ||
![]() |
4cc2976614 | ||
![]() |
caf4b54bc7 | ||
![]() |
fb2efe5ab8 | ||
![]() |
98f1722d1b | ||
![]() |
a6dc8a373e | ||
![]() |
91e96bc88f | ||
![]() |
8c06858807 | ||
![]() |
8025df5fe3 | ||
![]() |
5aeb656a48 | ||
![]() |
f96ee4f7a0 | ||
![]() |
fcdb1dc30c | ||
![]() |
dafefa33d6 | ||
![]() |
d08eb0c66b | ||
![]() |
d1a17480ea | ||
![]() |
1e891414a3 | ||
![]() |
c44c914d3d | ||
![]() |
d10d2f5a54 | ||
![]() |
6523cf0c4b | ||
![]() |
1262c121f0 | ||
![]() |
f7cd6974c5 | ||
![]() |
a7e1ba82d6 | ||
![]() |
d856e48045 | ||
![]() |
87972ee7fe | ||
![]() |
e6332944ce | ||
![]() |
96d7dc273e | ||
![]() |
4f4a08ccc7 | ||
![]() |
1716a11f45 | ||
![]() |
9ed0d5f7d6 | ||
![]() |
9b461990b7 | ||
![]() |
8a47e5b8e4 | ||
![]() |
5f12087779 | ||
![]() |
8aa9a4db65 | ||
![]() |
75a54fab2c | ||
![]() |
085b5eb9f1 | ||
![]() |
543e221d3e | ||
![]() |
e25584a202 | ||
![]() |
4c268d5883 | ||
![]() |
bf8fc3ca29 | ||
![]() |
65c1b84508 | ||
![]() |
f78ade5007 | ||
![]() |
6f73aef262 | ||
![]() |
4d715ddfbc | ||
![]() |
cdf99166f4 | ||
![]() |
fc267472b3 | ||
![]() |
a0fde023f5 | ||
![]() |
4a98156731 | ||
![]() |
ecb190d1b8 | ||
![]() |
14d82bd8ff | ||
![]() |
6f50285f47 | ||
![]() |
836ab7a9e7 | ||
![]() |
06d9e38457 | ||
![]() |
9aa7e8e37e | ||
![]() |
fa526dd702 | ||
![]() |
8325dc5977 | ||
![]() |
abd34d8d17 | ||
![]() |
d19b598371 | ||
![]() |
833a301948 | ||
![]() |
0e03633ed0 | ||
![]() |
5e45b1f230 | ||
![]() |
07e2329068 | ||
![]() |
4e56fe339e | ||
![]() |
e7ebc33090 | ||
![]() |
3b07e0fe15 | ||
![]() |
a246b3b598 | ||
![]() |
11ab469a39 | ||
![]() |
694ad53ef9 | ||
![]() |
a3be3bb71a | ||
![]() |
77b3aa5011 | ||
![]() |
9aefff38e7 | ||
![]() |
462b243531 | ||
![]() |
430c5c3b87 | ||
![]() |
97ceb1a8a6 | ||
![]() |
55089aab32 | ||
![]() |
b7c335507f | ||
![]() |
9c0c734b34 | ||
![]() |
7f5a3ca1a3 | ||
![]() |
0b5c6d3532 | ||
![]() |
5357775d42 | ||
![]() |
0f25260163 | ||
![]() |
964cfcd4fb | ||
![]() |
c42388f7e2 | ||
![]() |
ff7d4d15cd | ||
![]() |
5e4a9311ed | ||
![]() |
4e07280102 | ||
![]() |
fdac108cab | ||
![]() |
318f74b34a | ||
![]() |
788b3a5ba8 | ||
![]() |
19d4b85961 | ||
![]() |
821c14fbce | ||
![]() |
8c03d9c638 | ||
![]() |
174a609449 | ||
![]() |
5fd394726e | ||
![]() |
1f27f7c12d | ||
![]() |
ad6ef7314b | ||
![]() |
98c306a315 | ||
![]() |
11ad8ada79 | ||
![]() |
905b28c1d7 | ||
![]() |
84b01b2e4e | ||
![]() |
5e9928e58b | ||
![]() |
0457259e7e | ||
![]() |
1f73a6913f | ||
![]() |
b29593bad9 | ||
![]() |
bd378f79f4 | ||
![]() |
699ed62af4 | ||
![]() |
d6ae4102b4 | ||
![]() |
97eed9c66d | ||
![]() |
b79a560816 | ||
![]() |
a6045bb8e8 | ||
![]() |
9f770e42ba | ||
![]() |
b20fe9f09b | ||
![]() |
48dbdc6c00 | ||
![]() |
d33f993809 | ||
![]() |
8ba44b3f71 | ||
![]() |
83e0a6e179 | ||
![]() |
8e2c4da55e | ||
![]() |
cae79e0555 | ||
![]() |
5b79aec065 | ||
![]() |
91de061c06 | ||
![]() |
f7d6f0bf21 | ||
![]() |
551c765358 | ||
![]() |
34ee26084d | ||
![]() |
47a76412e2 | ||
![]() |
45e6a419b3 | ||
![]() |
c34f982496 | ||
![]() |
f26fda9485 | ||
![]() |
06a29cd45c | ||
![]() |
98ab770437 | ||
![]() |
c87f60c605 | ||
![]() |
9e2430da46 | ||
![]() |
f59abadbc4 | ||
![]() |
4a3a55b923 | ||
![]() |
9bd031fbd7 | ||
![]() |
436f9e891e | ||
![]() |
04faa10e3b | ||
![]() |
38ba1d1a52 | ||
![]() |
4422bb3f69 | ||
![]() |
5b66ef0a74 | ||
![]() |
5639659b63 | ||
![]() |
7ba9cdbe23 | ||
![]() |
6f6f006704 | ||
![]() |
4fe37f6aee | ||
![]() |
5162bdd404 | ||
![]() |
c8f252d165 | ||
![]() |
c289439cab | ||
![]() |
807b7130e5 | ||
![]() |
14b6216b49 | ||
![]() |
9188e25dc5 | ||
![]() |
fad1b03458 | ||
![]() |
49054c61a4 | ||
![]() |
e2d593c023 | ||
![]() |
7455963124 | ||
![]() |
6771e57fca | ||
![]() |
9d117ee11b | ||
![]() |
7d4b2c2413 | ||
![]() |
5bb1824613 | ||
![]() |
8c07b76e6a | ||
![]() |
8cb58b4ff8 | ||
![]() |
426178b6e5 | ||
![]() |
07ec74a5d6 | ||
![]() |
a865f2af7d | ||
![]() |
3409d19139 | ||
![]() |
71b4571524 | ||
![]() |
9247300230 | ||
![]() |
8967f07c8d | ||
![]() |
6e21d3dbee | ||
![]() |
45b3422506 | ||
![]() |
e5de658e78 | ||
![]() |
e0f93c26d6 | ||
![]() |
df7e4d85c6 | ||
![]() |
ae736f8f68 | ||
![]() |
e2674c29a6 | ||
![]() |
aa55162e2e | ||
![]() |
1330390b4f | ||
![]() |
617055fca7 | ||
![]() |
185b352264 | ||
![]() |
58afac2312 | ||
![]() |
ea60b83336 | ||
![]() |
05a00b3057 | ||
![]() |
09139fe434 | ||
![]() |
d003d26a67 | ||
![]() |
ef2789cf57 | ||
![]() |
7d4ce40a37 | ||
![]() |
8feada6907 | ||
![]() |
ed9b0c32d8 | ||
![]() |
8088394d16 | ||
![]() |
cb2823ff45 | ||
![]() |
27906df149 | ||
![]() |
b8e7f0b45f | ||
![]() |
355b3fcb3d | ||
![]() |
7aa0e5650b | ||
![]() |
f9a0adc64e | ||
![]() |
3b34aed64f | ||
![]() |
8a8edfb108 | ||
![]() |
3a7cbd3a42 | ||
![]() |
0e443ba017 | ||
![]() |
8ed401aec1 | ||
![]() |
9ae847039b | ||
![]() |
d4cb84ff76 | ||
![]() |
17ae2aacbf | ||
![]() |
82d03f2dc6 | ||
![]() |
3cf2aaf8ff | ||
![]() |
0c89721133 | ||
![]() |
e206687070 | ||
![]() |
cdb9c48545 | ||
![]() |
0b8eff9643 | ||
![]() |
16882b8fa9 | ||
![]() |
2afa5940e3 | ||
![]() |
8fa7bc3dab | ||
![]() |
4a9dc1e33a | ||
![]() |
b0d842a370 | ||
![]() |
194dda6d84 | ||
![]() |
3267708097 | ||
![]() |
71a808c95a | ||
![]() |
2c3c26edf1 | ||
![]() |
18a4ba7778 | ||
![]() |
140e239bdb | ||
![]() |
537e7c63f4 | ||
![]() |
ed2e884de8 | ||
![]() |
60980cb26a | ||
![]() |
3b84e34c8e | ||
![]() |
ebdf9b55df | ||
![]() |
f528b01de4 | ||
![]() |
6ae9a8f2be | ||
![]() |
65cfd55027 | ||
![]() |
15d074d39c | ||
![]() |
962d0ebb40 | ||
![]() |
d408900a91 | ||
![]() |
0bf9e55ca7 | ||
![]() |
55d36b39b1 | ||
![]() |
45fd01a688 | ||
![]() |
1fda0782ae | ||
![]() |
2680a83455 | ||
![]() |
148e875523 | ||
![]() |
b9e60e0145 | ||
![]() |
03559454f6 | ||
![]() |
50ee1c0bd5 | ||
![]() |
c6b13271cf | ||
![]() |
fd83e8f2a9 | ||
![]() |
5f7c724531 | ||
![]() |
31ebe2675b | ||
![]() |
60d40cc2ce | ||
![]() |
6796cdf947 | ||
![]() |
6592a925c4 | ||
![]() |
6d42b2a29d | ||
![]() |
6b4dccfbd5 | ||
![]() |
2582d325bc | ||
![]() |
078814e77a | ||
![]() |
a80b413d38 | ||
![]() |
8cb93da53e | ||
![]() |
299ae2b828 | ||
![]() |
96cf316eec | ||
![]() |
4aebb8e153 | ||
![]() |
82c6942f09 | ||
![]() |
ff280f0309 | ||
![]() |
06d15a11c8 | ||
![]() |
4b3649ea94 | ||
![]() |
5ffb25b71d | ||
![]() |
8b28159e2d | ||
![]() |
99d6103617 | ||
![]() |
a502fe7c5c | ||
![]() |
bd18a57a5d | ||
![]() |
3828d712bd | ||
![]() |
a6be010464 | ||
![]() |
efe51b30fe | ||
![]() |
26019b9c17 | ||
![]() |
ebdd7afb67 | ||
![]() |
0c90b84a92 | ||
![]() |
9c2265d1aa | ||
![]() |
8046a6f3a7 | ||
![]() |
ddff902291 | ||
![]() |
a406920ae6 | ||
![]() |
44e596b0c4 | ||
![]() |
2b1c8c8d9a | ||
![]() |
d40c13420d | ||
![]() |
8ad2f7daf0 | ||
![]() |
696ebf545f | ||
![]() |
ed515f4e36 | ||
![]() |
06a4949266 | ||
![]() |
f50a01e118 | ||
![]() |
09512be1ad | ||
![]() |
bb951ad860 | ||
![]() |
858ae909e8 | ||
![]() |
1692bac3fe | ||
![]() |
cce1595c3d | ||
![]() |
67bb140eef | ||
![]() |
6d5d308d6c | ||
![]() |
d39b4ae8cb | ||
![]() |
0f4b118b61 | ||
![]() |
f5f2240828 | ||
![]() |
66593a28f5 | ||
![]() |
ba1cdd5914 | ||
![]() |
3f536552a6 | ||
![]() |
1d2282df9e | ||
![]() |
1b56ffd0c0 | ||
![]() |
d5018af2a3 | ||
![]() |
5c1e09cc48 | ||
![]() |
6d956ac13b | ||
![]() |
865fbbd15c | ||
![]() |
97cfd0085e | ||
![]() |
f20f200c8d | ||
![]() |
78bd424ecb | ||
![]() |
6fa32c36e9 | ||
![]() |
817882ff6f | ||
![]() |
d1e8299010 | ||
![]() |
42c50c4e0b | ||
![]() |
c151db6e21 | ||
![]() |
7844537355 | ||
![]() |
a414208327 | ||
![]() |
ab761e837c | ||
![]() |
c8e838e3a0 | ||
![]() |
9b24cf7591 | ||
![]() |
5e3cdcdd6e | ||
![]() |
baeb2a074a | ||
![]() |
a56de4547c | ||
![]() |
e3cc5c3013 | ||
![]() |
4194b248b9 | ||
![]() |
3fcbd8f3ac | ||
![]() |
b3b2519bf0 | ||
![]() |
fccea022fa | ||
![]() |
d80d5e4e70 | ||
![]() |
f1e93eb70a | ||
![]() |
260b709296 | ||
![]() |
9bb762fc8f | ||
![]() |
5c49bbfc73 | ||
![]() |
edabf208bc | ||
![]() |
0878a199f4 | ||
![]() |
eec1f03f86 | ||
![]() |
a4c4b81297 | ||
![]() |
a5f9c8f651 | ||
![]() |
1ec7351842 | ||
![]() |
f2cab81aed | ||
![]() |
9cbc74ebb2 | ||
![]() |
3d36d0445c | ||
![]() |
f4fece5550 | ||
![]() |
6133f745b7 | ||
![]() |
02456b271b | ||
![]() |
ad1f5ae081 | ||
![]() |
86358d5561 | ||
![]() |
87953cb98a | ||
![]() |
f58e0041ce | ||
![]() |
8183de4902 | ||
![]() |
a8c575147b | ||
![]() |
09fcc28ded | ||
![]() |
22fb659b72 | ||
![]() |
40ae184c4e | ||
![]() |
18a2a41682 | ||
![]() |
4faff70b5d | ||
![]() |
8d3361766d | ||
![]() |
5dd4d0c370 | ||
![]() |
a0617c1fad | ||
![]() |
9e9593b899 | ||
![]() |
89b7270233 | ||
![]() |
3af4808864 | ||
![]() |
d4c3b7614d | ||
![]() |
676ba9ca22 | ||
![]() |
d0f5cc839f | ||
![]() |
110bd65c20 | ||
![]() |
ef0080b0a9 | ||
![]() |
a81dc00ccf | ||
![]() |
765fea7f7e | ||
![]() |
a0f48130c0 | ||
![]() |
7e2c693c8a | ||
![]() |
7396e4c326 | ||
![]() |
0175eab031 | ||
![]() |
3d0a26fdb1 | ||
![]() |
c52d18da1f | ||
![]() |
e0f341938a | ||
![]() |
a037e562b2 | ||
![]() |
f1084cbdcf | ||
![]() |
f6e4339069 | ||
![]() |
a754c6047d | ||
![]() |
a5d2ae2588 | ||
![]() |
a6f3378c21 | ||
![]() |
ca75fb5664 | ||
![]() |
32861ad592 | ||
![]() |
ada8516803 | ||
![]() |
d5c27a95aa | ||
![]() |
6b8a21d2b0 | ||
![]() |
cb7e6f8cd0 | ||
![]() |
f48a2cb65e | ||
![]() |
0fdd3d56f4 | ||
![]() |
173934258c | ||
![]() |
b70e21a6d5 | ||
![]() |
cafb884991 | ||
![]() |
94b09614d9 | ||
![]() |
7488505e37 | ||
![]() |
641ff9a71d | ||
![]() |
b596502f74 | ||
![]() |
4fbd760005 | ||
![]() |
6a5ac15b07 | ||
![]() |
57b419fa87 | ||
![]() |
52ce54930b | ||
![]() |
fa9898ebe1 | ||
![]() |
de4b3c39b9 | ||
![]() |
232e358a34 | ||
![]() |
a0d35f9262 | ||
![]() |
8883ef81dc | ||
![]() |
39a425eca4 | ||
![]() |
e31d383cfc | ||
![]() |
cb104b60f4 | ||
![]() |
b88963c900 | ||
![]() |
3597abbcd7 | ||
![]() |
0f780b6271 | ||
![]() |
f1ce4e1f5b | ||
![]() |
5d0d800c0a | ||
![]() |
3b330ef22f | ||
![]() |
873bb4fd2d | ||
![]() |
3973df64ba | ||
![]() |
8cda62ae92 | ||
![]() |
b8e1a49f85 | ||
![]() |
f9147b5405 | ||
![]() |
878f727a2c | ||
![]() |
0169ee4885 | ||
![]() |
904faf27c2 | ||
![]() |
d2a38fe05c | ||
![]() |
9a5d06239f | ||
![]() |
a1fad471a9 | ||
![]() |
57a97365b6 | ||
![]() |
cce36906b1 | ||
![]() |
36158609f0 | ||
![]() |
da99ba114b | ||
![]() |
9a66bd3c34 | ||
![]() |
0d23b33c0e | ||
![]() |
e721092c2a | ||
![]() |
8c7afc5646 | ||
![]() |
7752d83781 | ||
![]() |
4b45e94beb | ||
![]() |
2a4ec13c8e | ||
![]() |
20671c718e | ||
![]() |
e73db49ed0 | ||
![]() |
0553824df2 | ||
![]() |
71bc2c5944 | ||
![]() |
05feadbb7a | ||
![]() |
a4709b1175 | ||
![]() |
3a031084f3 | ||
![]() |
0c517e5351 | ||
![]() |
5fe435048b | ||
![]() |
a722bfd099 | ||
![]() |
f3d99a5fdb | ||
![]() |
79de0989d5 | ||
![]() |
ca334770b7 | ||
![]() |
1071357505 | ||
![]() |
f32dfe0278 | ||
![]() |
278cedf3d0 | ||
![]() |
45a6b5a436 | ||
![]() |
611707a3d1 | ||
![]() |
b4d20d9b9a | ||
![]() |
ecc4553e67 | ||
![]() |
ef790ca6f4 | ||
![]() |
2d88638da7 | ||
![]() |
91ba0bd0af | ||
![]() |
0e2e5f3413 | ||
![]() |
7a99dcf693 | ||
![]() |
4e78ca5d82 | ||
![]() |
83de38e56f | ||
![]() |
f4be2e4fe7 | ||
![]() |
16b0f7f9ee | ||
![]() |
27721aef71 | ||
![]() |
329a317fdf | ||
![]() |
daad634894 | ||
![]() |
4444925dea | ||
![]() |
9c1ae96d33 | ||
![]() |
b1b6d50af6 | ||
![]() |
4c697ab50e | ||
![]() |
7450088674 | ||
![]() |
b141671d90 | ||
![]() |
2ab2d9127d | ||
![]() |
278453451e | ||
![]() |
91ee7972d2 | ||
![]() |
d1f59a6590 | ||
![]() |
cdecf8904e | ||
![]() |
3d16266c69 | ||
![]() |
191676b011 | ||
![]() |
ea07b261ad | ||
![]() |
e86f737320 | ||
![]() |
9a8562c624 | ||
![]() |
145c41f462 | ||
![]() |
1d38367e79 | ||
![]() |
f58c2d0a7b | ||
![]() |
ca7a6fe1f1 | ||
![]() |
95042f73c7 | ||
![]() |
678bcb171a | ||
![]() |
8da7e505c0 | ||
![]() |
cdd2b99b6b | ||
![]() |
72ce4405d5 | ||
![]() |
d8e3d91a79 | ||
![]() |
edaaedae36 | ||
![]() |
da8246d8c3 | ||
![]() |
5243ae80b4 | ||
![]() |
3aca576a0d | ||
![]() |
0bb9d91eae | ||
![]() |
8825d6b15f | ||
![]() |
1f73ca21bf | ||
![]() |
2db0854eef | ||
![]() |
f66e589312 | ||
![]() |
5c9ad3068b | ||
![]() |
d07b786da6 | ||
![]() |
da5d32ed89 | ||
![]() |
55dadea98e | ||
![]() |
77fbbe95ff | ||
![]() |
1aeb95396b | ||
![]() |
48dfbbebc6 | ||
![]() |
ccf3a9f3b2 | ||
![]() |
c0cb97bd42 | ||
![]() |
8efb97ef4e | ||
![]() |
d8cda7fc1b | ||
![]() |
ee9f1e7b70 | ||
![]() |
92dd70098c | ||
![]() |
4bea4c69a4 | ||
![]() |
7734325b71 | ||
![]() |
186ae844bc | ||
![]() |
c9bdf1c184 | ||
![]() |
13ffe468df | ||
![]() |
a090cf7a10 | ||
![]() |
b7250477b5 | ||
![]() |
dfd16c5187 | ||
![]() |
4afd6b78af | ||
![]() |
398f6e5b0c | ||
![]() |
d7f7d839f8 | ||
![]() |
49a843dcdd | ||
![]() |
ec045e81f2 | ||
![]() |
d8a7828cb5 | ||
![]() |
e32cb12ad7 | ||
![]() |
ee2847cfea | ||
![]() |
22e00a7080 | ||
![]() |
cbe567069f | ||
![]() |
53baed0389 | ||
![]() |
39cb9589c8 | ||
![]() |
bde03f3574 | ||
![]() |
8f31d150fd | ||
![]() |
453dbbb031 | ||
![]() |
421754fff6 | ||
![]() |
05ec5feacf | ||
![]() |
639d9b27c8 | ||
![]() |
2c99d027f3 | ||
![]() |
4f176682dc | ||
![]() |
13ef41bd42 | ||
![]() |
3c6ba80323 | ||
![]() |
fcfa8dfac2 | ||
![]() |
5b73e9aee6 | ||
![]() |
9ead264300 | ||
![]() |
a617eda321 | ||
![]() |
c913fa65b2 | ||
![]() |
497c8c84e5 | ||
![]() |
c58a94d497 | ||
![]() |
6a853d1fa2 | ||
![]() |
c30c58e564 | ||
![]() |
3b7a4c6b6b | ||
![]() |
978cdf2514 | ||
![]() |
33609616aa | ||
![]() |
006f6c998d | ||
![]() |
05fd69eae4 | ||
![]() |
b6b8719efa | ||
![]() |
7e8b9549a1 | ||
![]() |
032f78e0f5 | ||
![]() |
4059bc9ec6 | ||
![]() |
b85cd0925a | ||
![]() |
f20254217f | ||
![]() |
9424b763ca | ||
![]() |
08ae3f8771 | ||
![]() |
68f0cf419b | ||
![]() |
26b12512b1 | ||
![]() |
b98afadd5c | ||
![]() |
499bd552a1 | ||
![]() |
0090e27699 | ||
![]() |
e568b3000e | ||
![]() |
7ae8b46ea7 | ||
![]() |
ffb903841b | ||
![]() |
72ee904e67 | ||
![]() |
222e1968d8 | ||
![]() |
1df517afd3 | ||
![]() |
cc4cea1a41 | ||
![]() |
e8868d7ebf | ||
![]() |
7d9a9033f9 | ||
![]() |
87322d7732 | ||
![]() |
08c3d6e84b | ||
![]() |
b50325c3a3 | ||
![]() |
12cdcf7681 | ||
![]() |
34192349be | ||
![]() |
153d0bb12a | ||
![]() |
20092dadad | ||
![]() |
6844f8f2bf | ||
![]() |
58f2c6a5fc | ||
![]() |
5f10d86f04 | ||
![]() |
1120e823ed | ||
![]() |
301b384a02 | ||
![]() |
e4a26164de | ||
![]() |
56d3e8893f | ||
![]() |
99336908f0 | ||
![]() |
090325af35 | ||
![]() |
485be6c3fd | ||
![]() |
faa9d36c34 | ||
![]() |
ea8e108cdf | ||
![]() |
100f5422f6 | ||
![]() |
15c716e53b | ||
![]() |
75e77c5e54 | ||
![]() |
de4fdc07e0 | ||
![]() |
51edb2fa14 | ||
![]() |
bd995089a8 | ||
![]() |
a90dd2ad1e | ||
![]() |
cd44151e16 | ||
![]() |
5715ba1a9a | ||
![]() |
6f4a1c1751 | ||
![]() |
2a1b1eb1a4 | ||
![]() |
20c597b1d7 | ||
![]() |
9dc1989507 | ||
![]() |
681cb1b978 | ||
![]() |
332a9fac5a | ||
![]() |
d118f4a3f0 | ||
![]() |
7620cd02f0 | ||
![]() |
fdfd7bd82a | ||
![]() |
01c17e10cc | ||
![]() |
bed03b301b | ||
![]() |
ef48762da5 | ||
![]() |
7978f3f0e6 | ||
![]() |
8d5fad72bf | ||
![]() |
04db521851 | ||
![]() |
26d27be161 | ||
![]() |
1259d06302 | ||
![]() |
4db3f366ef | ||
![]() |
1c52c5b673 | ||
![]() |
a6a885d4f4 | ||
![]() |
b11066cef1 | ||
![]() |
1941b0c3ed | ||
![]() |
9a47fc747f | ||
![]() |
a425d3a55b | ||
![]() |
f03a0f6e73 | ||
![]() |
74d5724092 | ||
![]() |
3df21fcaa3 | ||
![]() |
fbb5de6740 | ||
![]() |
f7f9096c6e | ||
![]() |
d5a8e1725d | ||
![]() |
34413747d5 | ||
![]() |
18de626919 | ||
![]() |
bef192084f | ||
![]() |
54eef16bfb | ||
![]() |
7be49dba69 | ||
![]() |
218a6af62a | ||
![]() |
f0315d5c70 | ||
![]() |
f82e04201b | ||
![]() |
f41231f017 | ||
![]() |
ae1fb76d13 | ||
![]() |
fa0023223f | ||
![]() |
1c80fd17fd | ||
![]() |
48cb347198 | ||
![]() |
ffd583ed11 | ||
![]() |
97bbca3aef | ||
![]() |
06fd92fd27 | ||
![]() |
c0e05a7572 | ||
![]() |
2abe6eec84 | ||
![]() |
0b6e73840a | ||
![]() |
1707fe8990 | ||
![]() |
9335b0779c | ||
![]() |
277b521fad | ||
![]() |
af1b634d6d | ||
![]() |
cc19008961 | ||
![]() |
be304e37b4 | ||
![]() |
0a34a4a7ad | ||
![]() |
898564c8d8 | ||
![]() |
708638b97f | ||
![]() |
458e857956 | ||
![]() |
ac62bcb7ba | ||
![]() |
6b1c50b051 | ||
![]() |
d4a5376f73 | ||
![]() |
71b34aa3bd | ||
![]() |
6b4d8b18e0 | ||
![]() |
66b2013d23 | ||
![]() |
dc86993c84 | ||
![]() |
00a5c13001 | ||
![]() |
0e34923114 | ||
![]() |
011164bc32 | ||
![]() |
0a43ce9ced | ||
![]() |
d925fb38ce | ||
![]() |
3dc617277f | ||
![]() |
096af09fc4 | ||
![]() |
f97f9b857b | ||
![]() |
5c0829b052 | ||
![]() |
aa999b34e2 | ||
![]() |
0a06c291e2 | ||
![]() |
4bbaf5f89c | ||
![]() |
f88e070455 | ||
![]() |
5c980c31be | ||
![]() |
9eee37bc68 | ||
![]() |
a4927477fb | ||
![]() |
d0a6c6a2f3 | ||
![]() |
92757c5d8c | ||
![]() |
0cc7765f2b | ||
![]() |
ee1ef4ff56 | ||
![]() |
b5eed5e043 | ||
![]() |
62a253f571 | ||
![]() |
080a23dd8c | ||
![]() |
c90129957e | ||
![]() |
0fa717fe11 | ||
![]() |
e72766a5bf | ||
![]() |
104a684514 | ||
![]() |
5a809d7e31 | ||
![]() |
ce3f6837e9 | ||
![]() |
3ffd2a745b | ||
![]() |
6b9c07b809 | ||
![]() |
d4e2722586 | ||
![]() |
1343767295 | ||
![]() |
4b4bfc052f | ||
![]() |
f7539eb931 | ||
![]() |
efcfecca10 | ||
![]() |
6a3735822d | ||
![]() |
5bbcc7f2f7 | ||
![]() |
400f1d37bf | ||
![]() |
985b774378 | ||
![]() |
14cbcb4af6 | ||
![]() |
18ce86407d | ||
![]() |
d0ee203265 | ||
![]() |
fc26fe0ac0 | ||
![]() |
0e0cbe3517 | ||
![]() |
8e2cb6d416 | ||
![]() |
73a6e68e03 | ||
![]() |
48f9cb09af | ||
![]() |
1c83f489d1 | ||
![]() |
f6d78a0044 | ||
![]() |
e60a7df9a2 | ||
![]() |
d0a0ae91c4 | ||
![]() |
feaf2da834 | ||
![]() |
bf8703deae | ||
![]() |
1997b7b2d9 | ||
![]() |
666b938550 | ||
![]() |
4e8f546502 | ||
![]() |
5e9f3586cd | ||
![]() |
69ef26dab0 | ||
![]() |
163231d307 | ||
![]() |
c04b9fd7f6 | ||
![]() |
e530750fc6 | ||
![]() |
c3997c9f26 | ||
![]() |
6d7defa79e | ||
![]() |
bc232582df | ||
![]() |
286affea38 | ||
![]() |
5f5c9e2eb3 | ||
![]() |
de5eaf1c2c | ||
![]() |
6f3755684e | ||
![]() |
6950daca9a | ||
![]() |
b4de83e348 | ||
![]() |
83a1a32a5e | ||
![]() |
3cea4804f8 | ||
![]() |
8ce32003d7 | ||
![]() |
d3191490d9 | ||
![]() |
f6d5ba56b1 | ||
![]() |
998ca64c1e | ||
![]() |
eaa33744a6 | ||
![]() |
c0a47ca999 | ||
![]() |
22bfab840d | ||
![]() |
88b7f8ac1e | ||
![]() |
6fbe4404f5 | ||
![]() |
d5e340d0f6 | ||
![]() |
94954eeba3 | ||
![]() |
b2911b2eba | ||
![]() |
c398f22e76 | ||
![]() |
063f6c1d5a | ||
![]() |
2ca691d3b8 | ||
![]() |
f2086b3a90 | ||
![]() |
bb15b744c8 | ||
![]() |
f1e99de59a | ||
![]() |
e0999c7ba4 | ||
![]() |
89c5aac9ed | ||
![]() |
4a7c9a6050 | ||
![]() |
e94ce3102e | ||
![]() |
0225faddbb | ||
![]() |
fb10d3a5be | ||
![]() |
0613e3ab12 | ||
![]() |
b21edde1bc | ||
![]() |
1953a8ecb7 | ||
![]() |
c84b592543 | ||
![]() |
53f905b88b | ||
![]() |
d057a5076c | ||
![]() |
bcb9c6ccb0 | ||
![]() |
96f86adfb8 | ||
![]() |
de89f75707 | ||
![]() |
b2307d911e | ||
![]() |
35a558ec01 | ||
![]() |
2e97c0a5fb | ||
![]() |
7d9575b7fd | ||
![]() |
321e0ced2a | ||
![]() |
a697eb8530 | ||
![]() |
4b37c1963b | ||
![]() |
8f2687e390 | ||
![]() |
c0f799a807 | ||
![]() |
abc5bd98b4 | ||
![]() |
a51893c849 | ||
![]() |
79e218d00a | ||
![]() |
9ae20a6bec | ||
![]() |
2f739ff0b3 | ||
![]() |
e0a8f4df0d | ||
![]() |
347d7c07ef | ||
![]() |
42e3cf821b | ||
![]() |
574f1be067 | ||
![]() |
cbde0e0286 | ||
![]() |
11012c6be1 | ||
![]() |
432bd83188 | ||
![]() |
f8adfa9873 | ||
![]() |
1efd226f75 | ||
![]() |
ba1bb95935 | ||
![]() |
f5e740f2ec | ||
![]() |
98be7b227f | ||
![]() |
f07cfd4f51 | ||
![]() |
029d81122b | ||
![]() |
26eb7ecdb5 | ||
![]() |
3cf3ea4e20 | ||
![]() |
84a2937d31 | ||
![]() |
1d1f3bde96 | ||
![]() |
329d81ec64 | ||
![]() |
6809b15ce1 | ||
![]() |
c725f50f07 | ||
![]() |
463560c929 | ||
![]() |
c7412deb77 | ||
![]() |
9ed6c78466 | ||
![]() |
9a1bd9637c | ||
![]() |
87fd18bee1 | ||
![]() |
f3dced3199 | ||
![]() |
b36989e8e4 | ||
![]() |
95c45b90a8 | ||
![]() |
0bdc132dcf | ||
![]() |
e450391d9b | ||
![]() |
38f4bf4e28 | ||
![]() |
109b0ea78b | ||
![]() |
5061feb7bc | ||
![]() |
52480e0bc4 | ||
![]() |
109dd45b56 | ||
![]() |
c8da513d97 | ||
![]() |
06e7e40b15 | ||
![]() |
f3d6fcb52b | ||
![]() |
2decae6586 | ||
![]() |
6766041328 | ||
![]() |
2d96cec464 | ||
![]() |
843d229c3d | ||
![]() |
3d5bcd9d75 | ||
![]() |
b391dd6a3f | ||
![]() |
c126cf2bc1 | ||
![]() |
fcb5beb617 | ||
![]() |
dad141726c | ||
![]() |
48637188a7 | ||
![]() |
5d063e8449 | ||
![]() |
2b8a03e93c | ||
![]() |
213665cea2 | ||
![]() |
d230431227 | ||
![]() |
2040c9fe41 | ||
![]() |
b49821a9f4 | ||
![]() |
1701b0afed | ||
![]() |
7785431152 | ||
![]() |
b325233b2d | ||
![]() |
3baf29a6b7 | ||
![]() |
2510216f21 | ||
![]() |
e4d7ae9718 | ||
![]() |
596fabda5d | ||
![]() |
e95f34ae13 | ||
![]() |
169cb9424f | ||
![]() |
536576518e | ||
![]() |
b034e972b0 | ||
![]() |
1bf36b6461 | ||
![]() |
58ab269d3d | ||
![]() |
7cbb73be7a | ||
![]() |
cdbf81c51c | ||
![]() |
cc54368658 | ||
![]() |
d81e4dbe99 | ||
![]() |
4fd075aafa | ||
![]() |
a789649d97 | ||
![]() |
1817d014ef | ||
![]() |
8f1f0f5475 | ||
![]() |
b406a2430d | ||
![]() |
431edb0e67 | ||
![]() |
5b96944940 | ||
![]() |
8a6aaf4e2d | ||
![]() |
b30c4275ef | ||
![]() |
7f7ec625c8 | ||
![]() |
010f1f2bd1 | ||
![]() |
ce32089cc4 | ||
![]() |
f0ee2e14fb | ||
![]() |
9b8a490061 | ||
![]() |
f4e8ab27fa | ||
![]() |
4da49c8d59 | ||
![]() |
3960093231 | ||
![]() |
3003bdd507 | ||
![]() |
7909b30b4b | ||
![]() |
915382f2c7 | ||
![]() |
905a34188d | ||
![]() |
c907d690b7 | ||
![]() |
261cab8450 | ||
![]() |
7c2137fcda | ||
![]() |
2c3cb7f516 | ||
![]() |
dd4d903f69 | ||
![]() |
a823b8f70c | ||
![]() |
3d7aa7a4b9 | ||
![]() |
3e8bff03e7 | ||
![]() |
584082a1df | ||
![]() |
49644ce18a | ||
![]() |
c317eca1ca | ||
![]() |
7432ef9e19 | ||
![]() |
466afa8203 | ||
![]() |
ca40e39da4 | ||
![]() |
c2e3dc76d9 | ||
![]() |
7e271129c7 | ||
![]() |
b5f95a351c | ||
![]() |
5e10befe28 | ||
![]() |
5a899664f8 | ||
![]() |
990e905a04 | ||
![]() |
98ebb095cc | ||
![]() |
c3b5b47b22 | ||
![]() |
3c133c36bd | ||
![]() |
9ceb2e6084 | ||
![]() |
2b322b638e | ||
![]() |
0d298d743a | ||
![]() |
ec5d80585d | ||
![]() |
88e71e12b1 | ||
![]() |
63bb72caab | ||
![]() |
240c7913e9 | ||
![]() |
582629a996 | ||
![]() |
939dd17910 | ||
![]() |
d58b1a7de7 | ||
![]() |
a938895e5e | ||
![]() |
9d9111e4d8 | ||
![]() |
79dbfce36f | ||
![]() |
e7f162e5e5 | ||
![]() |
f23d10f000 | ||
![]() |
67dd86988b | ||
![]() |
e16ab324f4 | ||
![]() |
317926f379 | ||
![]() |
fe61ca2c97 | ||
![]() |
2940686fba | ||
![]() |
2ef2a561ef | ||
![]() |
7ae31def4f | ||
![]() |
8f038d7d26 | ||
![]() |
b9a2652013 | ||
![]() |
dce4166bc8 | ||
![]() |
3c5f509bc7 | ||
![]() |
182cea3385 | ||
![]() |
0ba1ba55bd | ||
![]() |
45440eec1d | ||
![]() |
49fad14920 | ||
![]() |
6b7155a849 | ||
![]() |
47851ddd3f | ||
![]() |
47189643ff | ||
![]() |
0f1e31643d | ||
![]() |
c17c291b1c | ||
![]() |
666641f990 | ||
![]() |
268f99f8ac | ||
![]() |
216f048466 | ||
![]() |
ed8d5f1a80 | ||
![]() |
27a1ef25a5 | ||
![]() |
ecbc8165d5 | ||
![]() |
128594584e | ||
![]() |
8b798013c1 | ||
![]() |
8c19c2c2e9 | ||
![]() |
6f0fee4c43 | ||
![]() |
a5b4a7caad | ||
![]() |
c1b9db19c6 | ||
![]() |
40f88faf37 | ||
![]() |
81e6228ab3 | ||
![]() |
157951343b | ||
![]() |
9325eff6fc | ||
![]() |
8c8f366e0f | ||
![]() |
542221a38d | ||
![]() |
aa7faaaa72 | ||
![]() |
c96db31006 | ||
![]() |
c66de5931c | ||
![]() |
fdec13ef81 | ||
![]() |
9aee44f363 | ||
![]() |
2caa2d5b32 | ||
![]() |
c1efe11cf3 | ||
![]() |
66c7f44bea | ||
![]() |
0e40ef5f35 | ||
![]() |
99e4a79cb8 | ||
![]() |
f7f0df60ec | ||
![]() |
c8081595c4 | ||
![]() |
e012262301 | ||
![]() |
a2b5b3b253 | ||
![]() |
5676155e4e | ||
![]() |
d98bfa5bed | ||
![]() |
7ccac8053b | ||
![]() |
40cf46fe7d | ||
![]() |
f5c05e1283 | ||
![]() |
330e47f0b7 | ||
![]() |
1e9378b429 | ||
![]() |
af58fb5fa3 | ||
![]() |
3d6fb2383a | ||
![]() |
2407798d2e | ||
![]() |
f9194bd28c | ||
![]() |
816f020cc3 | ||
![]() |
9191aa32df | ||
![]() |
0a6395507e | ||
![]() |
eff68c601c | ||
![]() |
3f5540f35b | ||
![]() |
e5f5030e9c | ||
![]() |
df39d37ca9 | ||
![]() |
bea81d0449 | ||
![]() |
d261845fc6 | ||
![]() |
eac0b295d2 | ||
![]() |
f4eefcea13 | ||
![]() |
268715711f | ||
![]() |
c5f70b4401 | ||
![]() |
912caf84a6 | ||
![]() |
448fa4d35f | ||
![]() |
790bcf05ed | ||
![]() |
cd8d4c357d | ||
![]() |
bdcc6f861d | ||
![]() |
d9d6b7b151 | ||
![]() |
4517692c20 | ||
![]() |
609b9e3369 | ||
![]() |
be8ca110f4 | ||
![]() |
f1da37dd12 | ||
![]() |
ecca51dbdd | ||
![]() |
d19015579c | ||
![]() |
6179ca5668 | ||
![]() |
6c70db31bd | ||
![]() |
b0790d7010 | ||
![]() |
eb8158673f | ||
![]() |
409f17980f | ||
![]() |
654ef06682 | ||
![]() |
bca95a4972 | ||
![]() |
4df065d8d5 | ||
![]() |
a358477cda | ||
![]() |
44d6db8c47 | ||
![]() |
49e627d2fd | ||
![]() |
c3a8d93eb4 | ||
![]() |
7a648c6465 | ||
![]() |
5c1c5b5b0d | ||
![]() |
811da4bac5 | ||
![]() |
23b2fbef45 | ||
![]() |
f1b52b495a | ||
![]() |
0bff3891bd | ||
![]() |
a7b1658ee1 | ||
![]() |
c274bbcddf | ||
![]() |
57f32e5360 | ||
![]() |
b4ecf8e28e | ||
![]() |
d8d2d53c59 | ||
![]() |
06cfba8c7e | ||
![]() |
5603834282 | ||
![]() |
33134d4529 | ||
![]() |
bf57b6e4a2 | ||
![]() |
b63f87a5b5 | ||
![]() |
68e3612d36 | ||
![]() |
616a826b8a | ||
![]() |
027897ff03 | ||
![]() |
09d62d76b2 | ||
![]() |
deab7794d6 | ||
![]() |
bc892059a1 | ||
![]() |
6d0fdc7510 | ||
![]() |
dd1d4b86d2 | ||
![]() |
65eeea9453 | ||
![]() |
b2b202586c | ||
![]() |
49341260dc | ||
![]() |
dfcef81001 | ||
![]() |
a834a6c874 | ||
![]() |
ad5188a280 | ||
![]() |
53c2a0c724 | ||
![]() |
3a8cc31f1b | ||
![]() |
0f6b452d17 | ||
![]() |
cca576f518 | ||
![]() |
e994bcf737 | ||
![]() |
5432700b0d | ||
![]() |
7b7534c952 | ||
![]() |
5fcf1b5434 | ||
![]() |
942b5aa9df | ||
![]() |
c05b39a056 | ||
![]() |
b4570545ef | ||
![]() |
6478db13e6 | ||
![]() |
1d7ddcc10d | ||
![]() |
30508c6c2c | ||
![]() |
18f43c5757 | ||
![]() |
52498efd14 | ||
![]() |
40cb721d16 | ||
![]() |
1c2699b16e | ||
![]() |
d98a016087 | ||
![]() |
bc5b6db031 | ||
![]() |
4a4f252ad8 | ||
![]() |
c81dd1a478 | ||
![]() |
31016156be | ||
![]() |
dad352f05e | ||
![]() |
95e94618d8 | ||
![]() |
045a401cd7 | ||
![]() |
d5d7e2edbc | ||
![]() |
64e1a6ec7e | ||
![]() |
2221b425ad | ||
![]() |
86b2ccae94 | ||
![]() |
0bd67a54ab | ||
![]() |
306f254218 | ||
![]() |
449add88a6 | ||
![]() |
941ba8d689 | ||
![]() |
813335f8eb | ||
![]() |
f0cef2f42f | ||
![]() |
e767eb38f4 | ||
![]() |
71cbef4c13 | ||
![]() |
834ad1ef84 | ||
![]() |
753e6661bc | ||
![]() |
1ce19f5444 | ||
![]() |
0de1230a1a | ||
![]() |
5ff304324d | ||
![]() |
37f7ef41f2 | ||
![]() |
4022284059 | ||
![]() |
dde6a2eb7d | ||
![]() |
1083ed4e40 | ||
![]() |
c111825b1e | ||
![]() |
e36c22d29a | ||
![]() |
c192931015 | ||
![]() |
ae895a4aec | ||
![]() |
493d9875d4 | ||
![]() |
3fb8d05f0d | ||
![]() |
04acce4916 | ||
![]() |
3c8196527f | ||
![]() |
1b7a304149 | ||
![]() |
f3c8211ba4 | ||
![]() |
908fc8573a | ||
![]() |
93ad5433e4 | ||
![]() |
5ead8de0bb | ||
![]() |
c062eb751e | ||
![]() |
e273a594ba | ||
![]() |
ebb724e687 | ||
![]() |
b14bf8a96d | ||
![]() |
a728502988 | ||
![]() |
9419f74d13 | ||
![]() |
50523c1566 | ||
![]() |
e9ef3e270d | ||
![]() |
9078f7beef | ||
![]() |
ac2c5abb09 | ||
![]() |
a02235eb2b | ||
![]() |
7658761940 | ||
![]() |
3f03f076cf | ||
![]() |
141e6de88f | ||
![]() |
fab7abb85b | ||
![]() |
07f2c9c1c6 | ||
![]() |
6cc2fa5306 | ||
![]() |
ed2524cbbb | ||
![]() |
76a4ae2aae | ||
![]() |
f3d1bd25ad | ||
![]() |
3ed6d4bc7a | ||
![]() |
15488fbfd0 | ||
![]() |
94afd4f1e2 | ||
![]() |
1218944680 | ||
![]() |
eda69dc881 | ||
![]() |
500e5c41ff |
9
.build-config.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"qpdf": {
|
||||
"version": "11.2.0"
|
||||
},
|
||||
"jbig2enc": {
|
||||
"version": "0.29",
|
||||
"git_tag": "0.29"
|
||||
}
|
||||
}
|
@@ -27,11 +27,14 @@ indent_style = space
|
||||
[*.md]
|
||||
indent_style = space
|
||||
|
||||
[Pipfile.lock]
|
||||
indent_style = space
|
||||
|
||||
# Tests don't get a line width restriction. It's still a good idea to follow
|
||||
# the 79 character rule, but in the interests of clarity, tests often need to
|
||||
# violate it.
|
||||
[**/test_*.py]
|
||||
max_line_length = off
|
||||
|
||||
[Dockerfile]
|
||||
[Dockerfile*]
|
||||
indent_style = space
|
||||
|
97
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
name: Bug report
|
||||
description: Something is not working
|
||||
title: "[BUG] Concise description of the issue"
|
||||
labels: ["bug", "unconfirmed"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Have a question? 👉 [Start a new discussion](https://github.com/paperless-ngx/paperless-ngx/discussions/new) or [ask in chat](https://matrix.to/#/#paperlessngx:matrix.org).
|
||||
|
||||
Before opening an issue, please double check:
|
||||
|
||||
- [The troubleshooting documentation](https://docs.paperless-ngx.com/troubleshooting/).
|
||||
- [The installation instructions](https://docs.paperless-ngx.com/setup/#installation).
|
||||
- [Existing issues and discussions](https://github.com/paperless-ngx/paperless-ngx/search?q=&type=issues).
|
||||
- Disable any customer container initialization scripts, if using any
|
||||
|
||||
If you encounter issues while installing or configuring Paperless-ngx, please post in the ["Support" section of the discussions](https://github.com/paperless-ngx/paperless-ngx/discussions/new?category=support).
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: A clear and concise description of what the bug is. If applicable, add screenshots to help explain your problem.
|
||||
placeholder: |
|
||||
Currently Paperless does not work when...
|
||||
|
||||
[Screenshot if applicable]
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: reproduction
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: Steps to reproduce the behavior.
|
||||
placeholder: |
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. See error
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Webserver logs
|
||||
description: Logs from the web server related to your issue.
|
||||
render: bash
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs_browser
|
||||
attributes:
|
||||
label: Browser logs
|
||||
description: Logs from the web browser related to your issue, if needed
|
||||
render: bash
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Paperless-ngx version
|
||||
placeholder: e.g. 1.6.0
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: host-os
|
||||
attributes:
|
||||
label: Host OS
|
||||
description: Host OS of the machine running paperless-ngx. Please add the architecture (uname -m) if applicable.
|
||||
placeholder: e.g. Archlinux / Ubuntu 20.04 / Raspberry Pi `arm64`
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
label: Installation method
|
||||
options:
|
||||
- Docker - official image
|
||||
- Docker - linuxserver.io image
|
||||
- Bare metal
|
||||
- Other (please describe above)
|
||||
description: Note there are significant differences from the official image and linuxserver.io, please check if your issue is specific to the third-party image.
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: browser
|
||||
attributes:
|
||||
label: Browser
|
||||
description: Which browser you are using, if relevant.
|
||||
placeholder: e.g. Chrome, Safari
|
||||
- type: input
|
||||
id: config-changes
|
||||
attributes:
|
||||
label: Configuration changes
|
||||
description: Any configuration changes you made in `docker-compose.yml`, `docker-compose.env` or `paperless.conf`.
|
||||
- type: input
|
||||
id: other
|
||||
attributes:
|
||||
label: Other
|
||||
description: Any other relevant details.
|
50
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,50 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Something is not working
|
||||
title: '[BUG] Concise description of the issue'
|
||||
labels: ''
|
||||
assignees: ''
|
||||
---
|
||||
|
||||
<!---
|
||||
=> Before opening an issue, please check the documentation and see if it helps you resolve your issue: https://paperless-ngx.readthedocs.io/en/latest/troubleshooting.html
|
||||
=> Please also make sure that you followed the installation instructions.
|
||||
=> Please search the issues and look for similar issues before opening a bug report.
|
||||
|
||||
=> If you would like to submit a feature request please submit one under https://github.com/paperless-ngx/paperless-ngx/discussions/categories/feature-requests
|
||||
|
||||
=> If you encounter issues while installing of configuring Paperless-ngx, please post that in the "Support" section of the discussions. Remember that Paperless successfully runs on a variety of different systems. If paperless does not start, it's probably an issue with your system, and not an issue of paperless.
|
||||
|
||||
=> Don't remove the [BUG] prefix from the title.
|
||||
-->
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Webserver logs**
|
||||
|
||||
```
|
||||
If available, post any logs from the web server related to your issue.
|
||||
```
|
||||
|
||||
**Relevant information**
|
||||
|
||||
- Host OS of the machine running paperless: [e.g. Archlinux / Ubuntu 20.04]
|
||||
- Browser [e.g. chrome, safari]
|
||||
- Version [e.g. 1.0.0]
|
||||
- Installation method: [docker / bare metal]
|
||||
- Any configuration changes you made in `docker-compose.yml`, `docker-compose.env` or `paperless.conf`.
|
11
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: 🤔 Questions and Help
|
||||
url: https://github.com/paperless-ngx/paperless-ngx/discussions
|
||||
about: This issue tracker is not for support questions. Please refer to our Discussions.
|
||||
- name: 💬 Chat
|
||||
url: https://matrix.to/#/#paperlessngx:matrix.org
|
||||
about: Want to discuss Paperless-ngx with others? Check out our chat.
|
||||
- name: 🚀 Feature Request
|
||||
url: https://github.com/paperless-ngx/paperless-ngx/discussions/new?category=feature-requests
|
||||
about: Remember to search for existing feature requests and "up-vote" any you like
|
19
.github/ISSUE_TEMPLATE/other.md
vendored
@@ -1,19 +0,0 @@
|
||||
---
|
||||
name: Other
|
||||
about: Anything that is not a feature request or bug.
|
||||
title: '[Other] Title of your issue'
|
||||
labels: ''
|
||||
assignees: ''
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
=> Discussions, Feedback and other suggestions belong in the "Discussion" section and not on the issue tracker.
|
||||
|
||||
=> If you would like to submit a feature request please submit one under https://github.com/paperless-ngx/paperless-ngx/discussions/categories/feature-requests
|
||||
|
||||
=> If you encounter issues while installing of configuring Paperless-ngx, please post that in the "Support" section of the discussions. Remember that Paperless successfully runs on a variety of different systems. If paperless does not start, it's probably is an issue with your system, and not an issue of paperless.
|
||||
|
||||
=> Don't remove the [Other] prefix from the title.
|
||||
|
||||
-->
|
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -26,7 +26,7 @@ NOTE: Please check only one box!
|
||||
|
||||
- [ ] I have read & agree with the [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/main/CONTRIBUTING.md).
|
||||
- [ ] If applicable, I have tested my code for new features & regressions on both mobile & desktop devices, using the latest version of major browsers.
|
||||
- [ ] If applicable, I have checked that all tests pass, see [documentation](https://paperless-ngx.readthedocs.io/en/latest/extending.html#back-end-development).
|
||||
- [ ] I have run all `pre-commit` hooks, see [documentation](https://paperless-ngx.readthedocs.io/en/latest/extending.html#code-formatting-with-pre-commit-hooks).
|
||||
- [ ] If applicable, I have checked that all tests pass, see [documentation](https://docs.paperless-ngx.com/development/#back-end-development).
|
||||
- [ ] I have run all `pre-commit` hooks, see [documentation](https://docs.paperless-ngx.com/development/#code-formatting-with-pre-commit-hooks).
|
||||
- [ ] I have made corresponding changes to the documentation as needed.
|
||||
- [ ] I have checked my modifications for any breaking changes.
|
||||
|
11
.github/dependabot.yml
vendored
@@ -6,11 +6,14 @@ updates:
|
||||
# Enable version updates for npm
|
||||
- package-ecosystem: "npm"
|
||||
target-branch: "dev"
|
||||
# Look for `package.json` and `lock` files in the `root` directory
|
||||
# Look for `package.json` and `lock` files in the `/src-ui` directory
|
||||
directory: "/src-ui"
|
||||
# Check the npm registry for updates every month
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
labels:
|
||||
- "frontend"
|
||||
- "dependencies"
|
||||
# Add reviewers
|
||||
reviewers:
|
||||
- "paperless-ngx/frontend"
|
||||
@@ -26,9 +29,13 @@ updates:
|
||||
labels:
|
||||
- "backend"
|
||||
- "dependencies"
|
||||
# Add reviewers
|
||||
reviewers:
|
||||
- "paperless-ngx/backend"
|
||||
|
||||
# Enable updates for Github Actions
|
||||
- package-ecosystem: "github-actions"
|
||||
target-branch: "dev"
|
||||
directory: "/"
|
||||
schedule:
|
||||
# Check for updates to GitHub Actions every month
|
||||
@@ -38,4 +45,4 @@ updates:
|
||||
- "dependencies"
|
||||
# Add reviewers
|
||||
reviewers:
|
||||
- "paperless-ngx/backend"
|
||||
- "paperless-ngx/ci-cd"
|
||||
|
45
.github/release-drafter.yml
vendored
@@ -1,4 +1,22 @@
|
||||
autolabeler:
|
||||
- label: "bug"
|
||||
branch:
|
||||
- '/^fix/'
|
||||
title:
|
||||
- "/^fix/i"
|
||||
- "/^Bugfix/i"
|
||||
- label: "enhancement"
|
||||
branch:
|
||||
- '/^feature/'
|
||||
title:
|
||||
- "/^feature/i"
|
||||
categories:
|
||||
- title: 'Breaking Changes'
|
||||
labels:
|
||||
- 'breaking-change'
|
||||
- title: 'Notable Changes'
|
||||
labels:
|
||||
- 'notable'
|
||||
- title: 'Features'
|
||||
labels:
|
||||
- 'enhancement'
|
||||
@@ -6,15 +24,23 @@ categories:
|
||||
labels:
|
||||
- 'bug'
|
||||
- title: 'Documentation'
|
||||
label: 'documentation'
|
||||
labels:
|
||||
- 'documentation'
|
||||
- title: 'Maintenance'
|
||||
labels:
|
||||
- 'chore'
|
||||
- 'deployment'
|
||||
- 'translation'
|
||||
- 'ci-cd'
|
||||
- title: 'Dependencies'
|
||||
collapse-after: 3
|
||||
label: 'dependencies'
|
||||
labels:
|
||||
- 'dependencies'
|
||||
- title: 'All App Changes'
|
||||
labels:
|
||||
- 'frontend'
|
||||
- 'backend'
|
||||
collapse-after: 0
|
||||
include-labels:
|
||||
- 'enhancement'
|
||||
- 'bug'
|
||||
@@ -22,13 +48,16 @@ include-labels:
|
||||
- 'deployment'
|
||||
- 'translation'
|
||||
- 'dependencies'
|
||||
replacers: # Changes "Feature: Update checker" to "Update checker"
|
||||
- search: '/Feature:|Feat:|\[feature\]/gi'
|
||||
replace: ''
|
||||
change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
|
||||
- 'documentation'
|
||||
- 'frontend'
|
||||
- 'backend'
|
||||
- 'ci-cd'
|
||||
- 'breaking-change'
|
||||
- 'notable'
|
||||
category-template: '### $TITLE'
|
||||
change-template: '- $TITLE @$AUTHOR ([#$NUMBER]($URL))'
|
||||
change-title-escapes: '\<*_&#@'
|
||||
tag-prefix: "ngx-"
|
||||
template: |
|
||||
## Changelog
|
||||
## paperless-ngx $RESOLVED_VERSION
|
||||
|
||||
$CHANGES
|
||||
|
402
.github/scripts/cleanup-tags.py
vendored
Normal file
@@ -0,0 +1,402 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
from argparse import ArgumentParser
|
||||
from typing import Dict
|
||||
from typing import Final
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
|
||||
from common import get_log_level
|
||||
from github import ContainerPackage
|
||||
from github import GithubBranchApi
|
||||
from github import GithubContainerRegistryApi
|
||||
|
||||
logger = logging.getLogger("cleanup-tags")
|
||||
|
||||
|
||||
class DockerManifest2:
|
||||
"""
|
||||
Data class wrapping the Docker Image Manifest Version 2.
|
||||
|
||||
See https://docs.docker.com/registry/spec/manifest-v2-2/
|
||||
"""
|
||||
|
||||
def __init__(self, data: Dict) -> None:
|
||||
self._data = data
|
||||
# This is the sha256: digest string. Corresponds to GitHub API name
|
||||
# if the package is an untagged package
|
||||
self.digest = self._data["digest"]
|
||||
platform_data_os = self._data["platform"]["os"]
|
||||
platform_arch = self._data["platform"]["architecture"]
|
||||
platform_variant = self._data["platform"].get(
|
||||
"variant",
|
||||
"",
|
||||
)
|
||||
self.platform = f"{platform_data_os}/{platform_arch}{platform_variant}"
|
||||
|
||||
|
||||
class RegistryTagsCleaner:
|
||||
"""
|
||||
This is the base class for the image registry cleaning. Given a package
|
||||
name, it will keep all images which are tagged and all untagged images
|
||||
referred to by a manifest. This results in only images which have been untagged
|
||||
and cannot be referenced except by their SHA in being removed. None of these
|
||||
images should be referenced, so it is fine to delete them.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
package_name: str,
|
||||
repo_owner: str,
|
||||
repo_name: str,
|
||||
package_api: GithubContainerRegistryApi,
|
||||
branch_api: Optional[GithubBranchApi],
|
||||
):
|
||||
self.actually_delete = False
|
||||
self.package_api = package_api
|
||||
self.branch_api = branch_api
|
||||
self.package_name = package_name
|
||||
self.repo_owner = repo_owner
|
||||
self.repo_name = repo_name
|
||||
self.tags_to_delete: List[str] = []
|
||||
self.tags_to_keep: List[str] = []
|
||||
|
||||
# Get the information about all versions of the given package
|
||||
# These are active, not deleted, the default returned from the API
|
||||
self.all_package_versions = self.package_api.get_active_package_versions(
|
||||
self.package_name,
|
||||
)
|
||||
|
||||
# Get a mapping from a tag like "1.7.0" or "feature-xyz" to the ContainerPackage
|
||||
# tagged with it. It makes certain lookups easy
|
||||
self.all_pkgs_tags_to_version: Dict[str, ContainerPackage] = {}
|
||||
for pkg in self.all_package_versions:
|
||||
for tag in pkg.tags:
|
||||
self.all_pkgs_tags_to_version[tag] = pkg
|
||||
logger.info(
|
||||
f"Located {len(self.all_package_versions)} versions of package {self.package_name}",
|
||||
)
|
||||
|
||||
self.decide_what_tags_to_keep()
|
||||
|
||||
def clean(self):
|
||||
"""
|
||||
This method will delete image versions, based on the selected tags to delete
|
||||
"""
|
||||
for tag_to_delete in self.tags_to_delete:
|
||||
package_version_info = self.all_pkgs_tags_to_version[tag_to_delete]
|
||||
|
||||
if self.actually_delete:
|
||||
logger.info(
|
||||
f"Deleting {tag_to_delete} (id {package_version_info.id})",
|
||||
)
|
||||
self.package_api.delete_package_version(
|
||||
package_version_info,
|
||||
)
|
||||
|
||||
else:
|
||||
logger.info(
|
||||
f"Would delete {tag_to_delete} (id {package_version_info.id})",
|
||||
)
|
||||
else:
|
||||
logger.info("No tags to delete")
|
||||
|
||||
def clean_untagged(self, is_manifest_image: bool):
|
||||
"""
|
||||
This method will delete untagged images, that is those which are not named. It
|
||||
handles if the image tag is actually a manifest, which points to images that look otherwise
|
||||
untagged.
|
||||
"""
|
||||
|
||||
def _clean_untagged_manifest():
|
||||
"""
|
||||
|
||||
Handles the deletion of untagged images, but where the package is a manifest, ie a multi
|
||||
arch image, which means some "untagged" images need to exist still.
|
||||
|
||||
Ok, bear with me, these are annoying.
|
||||
|
||||
Our images are multi-arch, so the manifest is more like a pointer to a sha256 digest.
|
||||
These images are untagged, but pointed to, and so should not be removed (or every pull fails).
|
||||
|
||||
So for each image getting kept, parse the manifest to find the digest(s) it points to. Then
|
||||
remove those from the list of untagged images. The final result is the untagged, not pointed to
|
||||
version which should be safe to remove.
|
||||
|
||||
Example:
|
||||
Tag: ghcr.io/paperless-ngx/paperless-ngx:1.7.1 refers to
|
||||
amd64: sha256:b9ed4f8753bbf5146547671052d7e91f68cdfc9ef049d06690b2bc866fec2690
|
||||
armv7: sha256:81605222df4ba4605a2ba4893276e5d08c511231ead1d5da061410e1bbec05c3
|
||||
arm64: sha256:374cd68db40734b844705bfc38faae84cc4182371de4bebd533a9a365d5e8f3b
|
||||
each of which appears as untagged image, but isn't really.
|
||||
|
||||
So from the list of untagged packages, remove those digests. Once all tags which
|
||||
are being kept are checked, the remaining untagged packages are actually untagged
|
||||
with no referrals in a manifest to them.
|
||||
"""
|
||||
# Simplify the untagged data, mapping name (which is a digest) to the version
|
||||
# At the moment, these are the images which APPEAR untagged.
|
||||
untagged_versions = {}
|
||||
for x in self.all_package_versions:
|
||||
if x.untagged:
|
||||
untagged_versions[x.name] = x
|
||||
|
||||
skips = 0
|
||||
|
||||
# Parse manifests to locate digests pointed to
|
||||
for tag in sorted(self.tags_to_keep):
|
||||
full_name = f"ghcr.io/{self.repo_owner}/{self.package_name}:{tag}"
|
||||
logger.info(f"Checking manifest for {full_name}")
|
||||
try:
|
||||
proc = subprocess.run(
|
||||
[
|
||||
shutil.which("docker"),
|
||||
"manifest",
|
||||
"inspect",
|
||||
full_name,
|
||||
],
|
||||
capture_output=True,
|
||||
)
|
||||
|
||||
manifest_list = json.loads(proc.stdout)
|
||||
for manifest_data in manifest_list["manifests"]:
|
||||
manifest = DockerManifest2(manifest_data)
|
||||
|
||||
if manifest.digest in untagged_versions:
|
||||
logger.info(
|
||||
f"Skipping deletion of {manifest.digest},"
|
||||
f" referred to by {full_name}"
|
||||
f" for {manifest.platform}",
|
||||
)
|
||||
del untagged_versions[manifest.digest]
|
||||
skips += 1
|
||||
|
||||
except Exception as err:
|
||||
self.actually_delete = False
|
||||
logger.exception(err)
|
||||
return
|
||||
|
||||
logger.info(
|
||||
f"Skipping deletion of {skips} packages referred to by a manifest",
|
||||
)
|
||||
|
||||
# Delete the untagged and not pointed at packages
|
||||
logger.info(f"Deleting untagged packages of {self.package_name}")
|
||||
for to_delete_name in untagged_versions:
|
||||
to_delete_version = untagged_versions[to_delete_name]
|
||||
|
||||
if self.actually_delete:
|
||||
logger.info(
|
||||
f"Deleting id {to_delete_version.id} named {to_delete_version.name}",
|
||||
)
|
||||
self.package_api.delete_package_version(
|
||||
to_delete_version,
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"Would delete {to_delete_name} (id {to_delete_version.id})",
|
||||
)
|
||||
|
||||
def _clean_untagged_non_manifest():
|
||||
"""
|
||||
If the package is not a multi-arch manifest, images without tags are safe to delete.
|
||||
"""
|
||||
|
||||
for package in self.all_package_versions:
|
||||
if package.untagged:
|
||||
if self.actually_delete:
|
||||
logger.info(
|
||||
f"Deleting id {package.id} named {package.name}",
|
||||
)
|
||||
self.package_api.delete_package_version(
|
||||
package,
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"Would delete {package.name} (id {package.id})",
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"Not deleting tag {package.tags[0]} of package {self.package_name}",
|
||||
)
|
||||
|
||||
logger.info("Beginning untagged image cleaning")
|
||||
|
||||
if is_manifest_image:
|
||||
_clean_untagged_manifest()
|
||||
else:
|
||||
_clean_untagged_non_manifest()
|
||||
|
||||
def decide_what_tags_to_keep(self):
|
||||
"""
|
||||
This method holds the logic to delete what tags to keep and there fore
|
||||
what tags to delete.
|
||||
|
||||
By default, any image with at least 1 tag will be kept
|
||||
"""
|
||||
# By default, keep anything which is tagged
|
||||
self.tags_to_keep = list(set(self.all_pkgs_tags_to_version.keys()))
|
||||
|
||||
|
||||
class MainImageTagsCleaner(RegistryTagsCleaner):
|
||||
def decide_what_tags_to_keep(self):
|
||||
"""
|
||||
Overrides the default logic for deciding what images to keep. Images tagged as "feature-"
|
||||
will be removed, if the corresponding branch no longer exists.
|
||||
"""
|
||||
|
||||
# Default to everything gets kept still
|
||||
super().decide_what_tags_to_keep()
|
||||
|
||||
# Locate the feature branches
|
||||
feature_branches = {}
|
||||
for branch in self.branch_api.get_branches(
|
||||
repo=self.repo_name,
|
||||
):
|
||||
if branch.name.startswith("feature-"):
|
||||
logger.debug(f"Found feature branch {branch.name}")
|
||||
feature_branches[branch.name] = branch
|
||||
|
||||
logger.info(f"Located {len(feature_branches)} feature branches")
|
||||
|
||||
if not len(feature_branches):
|
||||
# Our work here is done, delete nothing
|
||||
return
|
||||
|
||||
# Filter to packages which are tagged with feature-*
|
||||
packages_tagged_feature: List[ContainerPackage] = []
|
||||
for package in self.all_package_versions:
|
||||
if package.tag_matches("feature-"):
|
||||
packages_tagged_feature.append(package)
|
||||
|
||||
# Map tags like "feature-xyz" to a ContainerPackage
|
||||
feature_pkgs_tags_to_versions: Dict[str, ContainerPackage] = {}
|
||||
for pkg in packages_tagged_feature:
|
||||
for tag in pkg.tags:
|
||||
feature_pkgs_tags_to_versions[tag] = pkg
|
||||
|
||||
logger.info(
|
||||
f'Located {len(feature_pkgs_tags_to_versions)} versions of package {self.package_name} tagged "feature-"',
|
||||
)
|
||||
|
||||
# All the feature tags minus all the feature branches leaves us feature tags
|
||||
# with no corresponding branch
|
||||
self.tags_to_delete = list(
|
||||
set(feature_pkgs_tags_to_versions.keys()) - set(feature_branches.keys()),
|
||||
)
|
||||
|
||||
# All the tags minus the set of going to be deleted tags leaves us the
|
||||
# tags which will be kept around
|
||||
self.tags_to_keep = list(
|
||||
set(self.all_pkgs_tags_to_version.keys()) - set(self.tags_to_delete),
|
||||
)
|
||||
logger.info(
|
||||
f"Located {len(self.tags_to_delete)} versions of package {self.package_name} to delete",
|
||||
)
|
||||
|
||||
|
||||
class LibraryTagsCleaner(RegistryTagsCleaner):
|
||||
"""
|
||||
Exists for the off change that someday, the installer library images
|
||||
will need their own logic
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def _main():
|
||||
parser = ArgumentParser(
|
||||
description="Using the GitHub API locate and optionally delete container"
|
||||
" tags which no longer have an associated feature branch",
|
||||
)
|
||||
|
||||
# Requires an affirmative command to actually do a delete
|
||||
parser.add_argument(
|
||||
"--delete",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="If provided, actually delete the container tags",
|
||||
)
|
||||
|
||||
# When a tagged image is updated, the previous version remains, but it no longer tagged
|
||||
# Add this option to remove them as well
|
||||
parser.add_argument(
|
||||
"--untagged",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="If provided, delete untagged containers as well",
|
||||
)
|
||||
|
||||
# If given, the package is assumed to be a multi-arch manifest. Cache packages are
|
||||
# not multi-arch, all other types are
|
||||
parser.add_argument(
|
||||
"--is-manifest",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="If provided, the package is assumed to be a multi-arch manifest following schema v2",
|
||||
)
|
||||
|
||||
# Allows configuration of log level for debugging
|
||||
parser.add_argument(
|
||||
"--loglevel",
|
||||
default="info",
|
||||
help="Configures the logging level",
|
||||
)
|
||||
|
||||
# Get the name of the package being processed this round
|
||||
parser.add_argument(
|
||||
"package",
|
||||
help="The package to process",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
logging.basicConfig(
|
||||
level=get_log_level(args),
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
format="%(asctime)s %(levelname)-8s %(message)s",
|
||||
)
|
||||
|
||||
# Must be provided in the environment
|
||||
repo_owner: Final[str] = os.environ["GITHUB_REPOSITORY_OWNER"]
|
||||
repo: Final[str] = os.environ["GITHUB_REPOSITORY"]
|
||||
gh_token: Final[str] = os.environ["TOKEN"]
|
||||
|
||||
# Find all branches named feature-*
|
||||
# Note: Only relevant to the main application, but simpler to
|
||||
# leave in for all packages
|
||||
with GithubBranchApi(gh_token) as branch_api:
|
||||
with GithubContainerRegistryApi(gh_token, repo_owner) as container_api:
|
||||
if args.package in {"paperless-ngx", "paperless-ngx/builder/cache/app"}:
|
||||
cleaner = MainImageTagsCleaner(
|
||||
args.package,
|
||||
repo_owner,
|
||||
repo,
|
||||
container_api,
|
||||
branch_api,
|
||||
)
|
||||
else:
|
||||
cleaner = LibraryTagsCleaner(
|
||||
args.package,
|
||||
repo_owner,
|
||||
repo,
|
||||
container_api,
|
||||
None,
|
||||
)
|
||||
|
||||
# Set if actually doing a delete vs dry run
|
||||
cleaner.actually_delete = args.delete
|
||||
|
||||
# Clean images with tags
|
||||
cleaner.clean()
|
||||
|
||||
# Clean images which are untagged
|
||||
cleaner.clean_untagged(args.is_manifest)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
_main()
|
48
.github/scripts/common.py
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env python3
|
||||
import logging
|
||||
|
||||
|
||||
def get_image_tag(
|
||||
repo_name: str,
|
||||
pkg_name: str,
|
||||
pkg_version: str,
|
||||
) -> str:
|
||||
"""
|
||||
Returns a string representing the normal image for a given package
|
||||
"""
|
||||
return f"ghcr.io/{repo_name.lower()}/builder/{pkg_name}:{pkg_version}"
|
||||
|
||||
|
||||
def get_cache_image_tag(
|
||||
repo_name: str,
|
||||
pkg_name: str,
|
||||
pkg_version: str,
|
||||
branch_name: str,
|
||||
) -> str:
|
||||
"""
|
||||
Returns a string representing the expected image cache tag for a given package
|
||||
|
||||
Registry type caching is utilized for the builder images, to allow fast
|
||||
rebuilds, generally almost instant for the same version
|
||||
"""
|
||||
return f"ghcr.io/{repo_name.lower()}/builder/cache/{pkg_name}:{pkg_version}"
|
||||
|
||||
|
||||
def get_log_level(args) -> int:
|
||||
"""
|
||||
Returns a logging level, based
|
||||
:param args:
|
||||
:return:
|
||||
"""
|
||||
levels = {
|
||||
"critical": logging.CRITICAL,
|
||||
"error": logging.ERROR,
|
||||
"warn": logging.WARNING,
|
||||
"warning": logging.WARNING,
|
||||
"info": logging.INFO,
|
||||
"debug": logging.DEBUG,
|
||||
}
|
||||
level = levels.get(args.loglevel.lower())
|
||||
if level is None:
|
||||
level = logging.INFO
|
||||
return level
|
92
.github/scripts/get-build-json.py
vendored
Executable file
@@ -0,0 +1,92 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
This is a helper script for the mutli-stage Docker image builder.
|
||||
It provides a single point of configuration for package version control.
|
||||
The output JSON object is used by the CI workflow to determine what versions
|
||||
to build and pull into the final Docker image.
|
||||
|
||||
Python package information is obtained from the Pipfile.lock. As this is
|
||||
kept updated by dependabot, it usually will need no further configuration.
|
||||
The sole exception currently is pikepdf, which has a dependency on qpdf,
|
||||
and is configured here to use the latest version of qpdf built by the workflow.
|
||||
|
||||
Other package version information is configured directly below, generally by
|
||||
setting the version and Git information, if any.
|
||||
|
||||
"""
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Final
|
||||
|
||||
from common import get_cache_image_tag
|
||||
from common import get_image_tag
|
||||
|
||||
|
||||
def _main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate a JSON object of information required to build the given package, based on the Pipfile.lock",
|
||||
)
|
||||
parser.add_argument(
|
||||
"package",
|
||||
help="The name of the package to generate JSON for",
|
||||
)
|
||||
|
||||
PIPFILE_LOCK_PATH: Final[Path] = Path("Pipfile.lock")
|
||||
BUILD_CONFIG_PATH: Final[Path] = Path(".build-config.json")
|
||||
|
||||
# Read the main config file
|
||||
build_json: Final = json.loads(BUILD_CONFIG_PATH.read_text())
|
||||
|
||||
# Read Pipfile.lock file
|
||||
pipfile_data: Final = json.loads(PIPFILE_LOCK_PATH.read_text())
|
||||
|
||||
args: Final = parser.parse_args()
|
||||
|
||||
# Read from environment variables set by GitHub Actions
|
||||
repo_name: Final[str] = os.environ["GITHUB_REPOSITORY"]
|
||||
branch_name: Final[str] = os.environ["GITHUB_REF_NAME"]
|
||||
|
||||
# Default output values
|
||||
version = None
|
||||
extra_config = {}
|
||||
|
||||
if args.package in pipfile_data["default"]:
|
||||
# Read the version from Pipfile.lock
|
||||
pkg_data = pipfile_data["default"][args.package]
|
||||
pkg_version = pkg_data["version"].split("==")[-1]
|
||||
version = pkg_version
|
||||
|
||||
# Any extra/special values needed
|
||||
if args.package == "pikepdf":
|
||||
extra_config["qpdf_version"] = build_json["qpdf"]["version"]
|
||||
|
||||
elif args.package in build_json:
|
||||
version = build_json[args.package]["version"]
|
||||
|
||||
else:
|
||||
raise NotImplementedError(args.package)
|
||||
|
||||
# The JSON object we'll output
|
||||
output = {
|
||||
"name": args.package,
|
||||
"version": version,
|
||||
"image_tag": get_image_tag(repo_name, args.package, version),
|
||||
"cache_tag": get_cache_image_tag(
|
||||
repo_name,
|
||||
args.package,
|
||||
version,
|
||||
branch_name,
|
||||
),
|
||||
}
|
||||
|
||||
# Add anything special a package may need
|
||||
output.update(extra_config)
|
||||
|
||||
# Output the JSON info to stdout
|
||||
print(json.dumps(output))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
_main()
|
274
.github/scripts/github.py
vendored
Normal file
@@ -0,0 +1,274 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
This module contains some useful classes for interacting with the Github API.
|
||||
The full documentation for the API can be found here: https://docs.github.com/en/rest
|
||||
|
||||
Mostly, this focusses on two areas, repo branches and repo packages, as the use case
|
||||
is cleaning up container images which are no longer referred to.
|
||||
|
||||
"""
|
||||
import functools
|
||||
import logging
|
||||
import re
|
||||
import urllib.parse
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
|
||||
import httpx
|
||||
|
||||
logger = logging.getLogger("github-api")
|
||||
|
||||
|
||||
class _GithubApiBase:
|
||||
"""
|
||||
A base class for interacting with the Github API. It
|
||||
will handle the session and setting authorization headers.
|
||||
"""
|
||||
|
||||
def __init__(self, token: str) -> None:
|
||||
self._token = token
|
||||
self._client: Optional[httpx.Client] = None
|
||||
|
||||
def __enter__(self) -> "_GithubApiBase":
|
||||
"""
|
||||
Sets up the required headers for auth and response
|
||||
type from the API
|
||||
"""
|
||||
self._client = httpx.Client()
|
||||
self._client.headers.update(
|
||||
{
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
"Authorization": f"token {self._token}",
|
||||
},
|
||||
)
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""
|
||||
Ensures the authorization token is cleaned up no matter
|
||||
the reason for the exit
|
||||
"""
|
||||
if "Accept" in self._client.headers:
|
||||
del self._client.headers["Accept"]
|
||||
if "Authorization" in self._client.headers:
|
||||
del self._client.headers["Authorization"]
|
||||
|
||||
# Close the session as well
|
||||
self._client.close()
|
||||
self._client = None
|
||||
|
||||
def _read_all_pages(self, endpoint):
|
||||
"""
|
||||
Helper function to read all pages of an endpoint, utilizing the
|
||||
next.url until exhausted. Assumes the endpoint returns a list
|
||||
"""
|
||||
internal_data = []
|
||||
|
||||
while True:
|
||||
resp = self._client.get(endpoint)
|
||||
if resp.status_code == 200:
|
||||
internal_data += resp.json()
|
||||
if "next" in resp.links:
|
||||
endpoint = resp.links["next"]["url"]
|
||||
else:
|
||||
logger.debug("Exiting pagination loop")
|
||||
break
|
||||
else:
|
||||
logger.warning(f"Request to {endpoint} return HTTP {resp.status_code}")
|
||||
resp.raise_for_status()
|
||||
|
||||
return internal_data
|
||||
|
||||
|
||||
class _EndpointResponse:
|
||||
"""
|
||||
For all endpoint JSON responses, store the full
|
||||
response data, for ease of extending later, if need be.
|
||||
"""
|
||||
|
||||
def __init__(self, data: Dict) -> None:
|
||||
self._data = data
|
||||
|
||||
|
||||
class GithubBranch(_EndpointResponse):
|
||||
"""
|
||||
Simple wrapper for a repository branch, only extracts name information
|
||||
for now.
|
||||
"""
|
||||
|
||||
def __init__(self, data: Dict) -> None:
|
||||
super().__init__(data)
|
||||
self.name = self._data["name"]
|
||||
|
||||
|
||||
class GithubBranchApi(_GithubApiBase):
|
||||
"""
|
||||
Wrapper around branch API.
|
||||
|
||||
See https://docs.github.com/en/rest/branches/branches
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, token: str) -> None:
|
||||
super().__init__(token)
|
||||
|
||||
self._ENDPOINT = "https://api.github.com/repos/{REPO}/branches"
|
||||
|
||||
def get_branches(self, repo: str) -> List[GithubBranch]:
|
||||
"""
|
||||
Returns all current branches of the given repository owned by the given
|
||||
owner or organization.
|
||||
"""
|
||||
# The environment GITHUB_REPOSITORY already contains the owner in the correct location
|
||||
endpoint = self._ENDPOINT.format(REPO=repo)
|
||||
internal_data = self._read_all_pages(endpoint)
|
||||
return [GithubBranch(branch) for branch in internal_data]
|
||||
|
||||
|
||||
class ContainerPackage(_EndpointResponse):
|
||||
"""
|
||||
Data class wrapping the JSON response from the package related
|
||||
endpoints
|
||||
"""
|
||||
|
||||
def __init__(self, data: Dict):
|
||||
super().__init__(data)
|
||||
# This is a numerical ID, required for interactions with this
|
||||
# specific package, including deletion of it or restoration
|
||||
self.id: int = self._data["id"]
|
||||
|
||||
# A string name. This might be an actual name or it could be a
|
||||
# digest string like "sha256:"
|
||||
self.name: str = self._data["name"]
|
||||
|
||||
# URL to the package, including its ID, can be used for deletion
|
||||
# or restoration without needing to build up a URL ourselves
|
||||
self.url: str = self._data["url"]
|
||||
|
||||
# The list of tags applied to this image. Maybe an empty list
|
||||
self.tags: List[str] = self._data["metadata"]["container"]["tags"]
|
||||
|
||||
@functools.cached_property
|
||||
def untagged(self) -> bool:
|
||||
"""
|
||||
Returns True if the image has no tags applied to it, False otherwise
|
||||
"""
|
||||
return len(self.tags) == 0
|
||||
|
||||
@functools.cache
|
||||
def tag_matches(self, pattern: str) -> bool:
|
||||
"""
|
||||
Returns True if the image has at least one tag which matches the given regex,
|
||||
False otherwise
|
||||
"""
|
||||
for tag in self.tags:
|
||||
if re.match(pattern, tag) is not None:
|
||||
return True
|
||||
return False
|
||||
|
||||
def __repr__(self):
|
||||
return f"Package {self.name}"
|
||||
|
||||
|
||||
class GithubContainerRegistryApi(_GithubApiBase):
|
||||
"""
|
||||
Class wrapper to deal with the Github packages API. This class only deals with
|
||||
container type packages, the only type published by paperless-ngx.
|
||||
"""
|
||||
|
||||
def __init__(self, token: str, owner_or_org: str) -> None:
|
||||
super().__init__(token)
|
||||
self._owner_or_org = owner_or_org
|
||||
if self._owner_or_org == "paperless-ngx":
|
||||
# https://docs.github.com/en/rest/packages#get-all-package-versions-for-a-package-owned-by-an-organization
|
||||
self._PACKAGES_VERSIONS_ENDPOINT = "https://api.github.com/orgs/{ORG}/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions"
|
||||
# https://docs.github.com/en/rest/packages#delete-package-version-for-an-organization
|
||||
self._PACKAGE_VERSION_DELETE_ENDPOINT = "https://api.github.com/orgs/{ORG}/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions/{PACKAGE_VERSION_ID}"
|
||||
else:
|
||||
# https://docs.github.com/en/rest/packages#get-all-package-versions-for-a-package-owned-by-the-authenticated-user
|
||||
self._PACKAGES_VERSIONS_ENDPOINT = "https://api.github.com/user/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions"
|
||||
# https://docs.github.com/en/rest/packages#delete-a-package-version-for-the-authenticated-user
|
||||
self._PACKAGE_VERSION_DELETE_ENDPOINT = "https://api.github.com/user/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions/{PACKAGE_VERSION_ID}"
|
||||
self._PACKAGE_VERSION_RESTORE_ENDPOINT = (
|
||||
f"{self._PACKAGE_VERSION_DELETE_ENDPOINT}/restore"
|
||||
)
|
||||
|
||||
def get_active_package_versions(
|
||||
self,
|
||||
package_name: str,
|
||||
) -> List[ContainerPackage]:
|
||||
"""
|
||||
Returns all the versions of a given package (container images) from
|
||||
the API
|
||||
"""
|
||||
|
||||
package_type: str = "container"
|
||||
# Need to quote this for slashes in the name
|
||||
package_name = urllib.parse.quote(package_name, safe="")
|
||||
|
||||
endpoint = self._PACKAGES_VERSIONS_ENDPOINT.format(
|
||||
ORG=self._owner_or_org,
|
||||
PACKAGE_TYPE=package_type,
|
||||
PACKAGE_NAME=package_name,
|
||||
)
|
||||
|
||||
pkgs = []
|
||||
|
||||
for data in self._read_all_pages(endpoint):
|
||||
pkgs.append(ContainerPackage(data))
|
||||
|
||||
return pkgs
|
||||
|
||||
def get_deleted_package_versions(
|
||||
self,
|
||||
package_name: str,
|
||||
) -> List[ContainerPackage]:
|
||||
package_type: str = "container"
|
||||
# Need to quote this for slashes in the name
|
||||
package_name = urllib.parse.quote(package_name, safe="")
|
||||
|
||||
endpoint = (
|
||||
self._PACKAGES_VERSIONS_ENDPOINT.format(
|
||||
ORG=self._owner_or_org,
|
||||
PACKAGE_TYPE=package_type,
|
||||
PACKAGE_NAME=package_name,
|
||||
)
|
||||
+ "?state=deleted"
|
||||
)
|
||||
|
||||
pkgs = []
|
||||
|
||||
for data in self._read_all_pages(endpoint):
|
||||
pkgs.append(ContainerPackage(data))
|
||||
|
||||
return pkgs
|
||||
|
||||
def delete_package_version(self, package_data: ContainerPackage):
|
||||
"""
|
||||
Deletes the given package version from the GHCR
|
||||
"""
|
||||
resp = self._client.delete(package_data.url)
|
||||
if resp.status_code != 204:
|
||||
logger.warning(
|
||||
f"Request to delete {package_data.url} returned HTTP {resp.status_code}",
|
||||
)
|
||||
|
||||
def restore_package_version(
|
||||
self,
|
||||
package_name: str,
|
||||
package_data: ContainerPackage,
|
||||
):
|
||||
package_type: str = "container"
|
||||
endpoint = self._PACKAGE_VERSION_RESTORE_ENDPOINT.format(
|
||||
ORG=self._owner_or_org,
|
||||
PACKAGE_TYPE=package_type,
|
||||
PACKAGE_NAME=package_name,
|
||||
PACKAGE_VERSION_ID=package_data.id,
|
||||
)
|
||||
|
||||
resp = self._client.post(endpoint)
|
||||
if resp.status_code != 204:
|
||||
logger.warning(
|
||||
f"Request to delete {endpoint} returned HTTP {resp.status_code}",
|
||||
)
|
15
.github/stale.yml
vendored
@@ -1,18 +1,23 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 30
|
||||
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 7
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
- fixpending
|
||||
|
||||
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)
|
||||
onlyLabels: [cant-reproduce]
|
||||
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: stale
|
||||
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
||||
|
||||
# See https://github.com/marketplace/stale for more info on the app
|
||||
# and https://github.com/probot/stale for the configuration docs
|
||||
|
466
.github/workflows/ci.yml
vendored
@@ -3,8 +3,10 @@ name: ci
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- ngx-*
|
||||
- beta-*
|
||||
# https://semver.org/#spec-item-2
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
# https://semver.org/#spec-item-9
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-beta.rc[0-9]+'
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
pull_request:
|
||||
@@ -12,118 +14,120 @@ on:
|
||||
- 'translations**'
|
||||
|
||||
jobs:
|
||||
pre-commit:
|
||||
name: Linting Checks
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
-
|
||||
name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
-
|
||||
name: Install tools
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
|
||||
-
|
||||
name: Check files
|
||||
uses: pre-commit/action@v3.0.0
|
||||
|
||||
documentation:
|
||||
name: "Build Documentation"
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- pre-commit
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Install pipenv
|
||||
run: pipx install pipenv
|
||||
run: |
|
||||
pipx install pipenv==2022.11.30
|
||||
-
|
||||
name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: 3.8
|
||||
cache: "pipenv"
|
||||
cache-dependency-path: 'Pipfile.lock'
|
||||
-
|
||||
name: Install dependencies
|
||||
run: |
|
||||
pipenv sync --dev
|
||||
-
|
||||
name: List installed Python dependencies
|
||||
run: |
|
||||
pipenv run pip list
|
||||
-
|
||||
name: Make documentation
|
||||
run: |
|
||||
cd docs/
|
||||
pipenv run make html
|
||||
pipenv run mkdocs build --config-file ./mkdocs.yml
|
||||
-
|
||||
name: Upload artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: documentation
|
||||
path: docs/_build/html/
|
||||
path: site/
|
||||
|
||||
code-checks-backend:
|
||||
name: "Backend Code Checks"
|
||||
runs-on: ubuntu-20.04
|
||||
documentation-deploy:
|
||||
name: "Deploy Documentation"
|
||||
runs-on: ubuntu-22.04
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
needs:
|
||||
- documentation
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Install checkers
|
||||
run: |
|
||||
pipx install reorder-python-imports
|
||||
pipx install yesqa
|
||||
pipx install add-trailing-comma
|
||||
pipx install flake8
|
||||
-
|
||||
name: Run reorder-python-imports
|
||||
run: |
|
||||
find src/ -type f -name '*.py' ! -path "*/migrations/*" | xargs reorder-python-imports
|
||||
-
|
||||
name: Run yesqa
|
||||
run: |
|
||||
find src/ -type f -name '*.py' ! -path "*/migrations/*" | xargs yesqa
|
||||
-
|
||||
name: Run add-trailing-comma
|
||||
run: |
|
||||
find src/ -type f -name '*.py' ! -path "*/migrations/*" | xargs add-trailing-comma
|
||||
# black is placed after add-trailing-comma because it may format differently
|
||||
# if a trailing comma is added
|
||||
-
|
||||
name: Run black
|
||||
uses: psf/black@stable
|
||||
with:
|
||||
options: "--check --diff"
|
||||
version: "22.3.0"
|
||||
-
|
||||
name: Run flake8 checks
|
||||
run: |
|
||||
cd src/
|
||||
flake8 --max-line-length=88 --ignore=E203,W503
|
||||
|
||||
code-checks-frontend:
|
||||
name: "Frontend Code Checks"
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16'
|
||||
-
|
||||
name: Install prettier
|
||||
run: |
|
||||
npm install prettier
|
||||
-
|
||||
name: Run prettier
|
||||
run:
|
||||
npx prettier --check --ignore-path Pipfile.lock **/*.js **/*.ts *.md **/*.md
|
||||
name: Deploy docs
|
||||
uses: mhausenblas/mkdocs-deploy-gh-pages@master
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CUSTOM_DOMAIN: docs.paperless-ngx.com
|
||||
CONFIG_FILE: mkdocs.yml
|
||||
EXTRA_PACKAGES: build-base
|
||||
|
||||
tests-backend:
|
||||
needs: [code-checks-backend]
|
||||
name: "Backend Tests (${{ matrix.python-version }})"
|
||||
runs-on: ubuntu-20.04
|
||||
name: "Tests (${{ matrix.python-version }})"
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- pre-commit
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.8', '3.9']
|
||||
python-version: ['3.8', '3.9', '3.10']
|
||||
fail-fast: false
|
||||
env:
|
||||
# Enable Tika end to end testing
|
||||
TIKA_LIVE: 1
|
||||
# Enable paperless_mail testing against real server
|
||||
PAPERLESS_MAIL_TEST_HOST: ${{ secrets.TEST_MAIL_HOST }}
|
||||
PAPERLESS_MAIL_TEST_USER: ${{ secrets.TEST_MAIL_USER }}
|
||||
PAPERLESS_MAIL_TEST_PASSWD: ${{ secrets.TEST_MAIL_PASSWD }}
|
||||
# Skip Tests which require convert
|
||||
PAPERLESS_TEST_SKIP_CONVERT: 1
|
||||
# Enable Gotenberg end to end testing
|
||||
GOTENBERG_LIVE: 1
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 2
|
||||
fetch-depth: 0
|
||||
-
|
||||
name: Start containers
|
||||
run: |
|
||||
docker compose --file ${GITHUB_WORKSPACE}/docker/compose/docker-compose.ci-test.yml pull --quiet
|
||||
docker compose --file ${GITHUB_WORKSPACE}/docker/compose/docker-compose.ci-test.yml up --detach
|
||||
-
|
||||
name: Install pipenv
|
||||
run: pipx install pipenv
|
||||
run: |
|
||||
pipx install pipenv==2022.11.30
|
||||
-
|
||||
name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "${{ matrix.python-version }}"
|
||||
cache: "pipenv"
|
||||
@@ -132,20 +136,24 @@ jobs:
|
||||
name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript optipng libzbar0 poppler-utils
|
||||
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
|
||||
-
|
||||
name: Install Python dependencies
|
||||
run: |
|
||||
pipenv sync --dev
|
||||
-
|
||||
name: List installed Python dependencies
|
||||
run: |
|
||||
pipenv run pip list
|
||||
-
|
||||
name: Tests
|
||||
run: |
|
||||
cd src/
|
||||
pipenv run pytest
|
||||
pipenv run pytest -rfEp
|
||||
-
|
||||
name: Get changed files
|
||||
id: changed-files-specific
|
||||
uses: tj-actions/changed-files@v18.1
|
||||
uses: tj-actions/changed-files@v34
|
||||
with:
|
||||
files: |
|
||||
src/**
|
||||
@@ -164,59 +172,179 @@ jobs:
|
||||
run: |
|
||||
cd src/
|
||||
pipenv run coveralls --service=github
|
||||
-
|
||||
name: Stop containers
|
||||
if: always()
|
||||
run: |
|
||||
docker compose --file ${GITHUB_WORKSPACE}/docker/compose/docker-compose.ci-test.yml logs
|
||||
docker compose --file ${GITHUB_WORKSPACE}/docker/compose/docker-compose.ci-test.yml down
|
||||
|
||||
tests-frontend:
|
||||
needs: [code-checks-frontend]
|
||||
name: "Frontend Tests"
|
||||
runs-on: ubuntu-20.04
|
||||
name: "Tests Frontend"
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- pre-commit
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [16.x]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
-
|
||||
name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- run: cd src-ui && npm ci
|
||||
- run: cd src-ui && npm run lint
|
||||
- run: cd src-ui && npm run test
|
||||
- run: cd src-ui && npm run e2e:ci
|
||||
|
||||
prepare-docker-build:
|
||||
name: Prepare Docker Pipeline Data
|
||||
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v'))
|
||||
runs-on: ubuntu-22.04
|
||||
# If the push triggered the installer library workflow, wait for it to
|
||||
# complete here. This ensures the required versions for the final
|
||||
# image have been built, while not waiting at all if the versions haven't changed
|
||||
concurrency:
|
||||
group: build-installer-library
|
||||
cancel-in-progress: false
|
||||
needs:
|
||||
- documentation
|
||||
- tests-backend
|
||||
- tests-frontend
|
||||
steps:
|
||||
-
|
||||
name: Set ghcr repository name
|
||||
id: set-ghcr-repository
|
||||
run: |
|
||||
ghcr_name=$(echo "${GITHUB_REPOSITORY}" | awk '{ print tolower($0) }')
|
||||
echo "repository=${ghcr_name}" >> $GITHUB_OUTPUT
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
-
|
||||
name: Setup qpdf image
|
||||
id: qpdf-setup
|
||||
run: |
|
||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py qpdf)
|
||||
|
||||
echo ${build_json}
|
||||
|
||||
echo "qpdf-json=${build_json}" >> $GITHUB_OUTPUT
|
||||
-
|
||||
name: Setup psycopg2 image
|
||||
id: psycopg2-setup
|
||||
run: |
|
||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py psycopg2)
|
||||
|
||||
echo ${build_json}
|
||||
|
||||
echo "psycopg2-json=${build_json}" >> $GITHUB_OUTPUT
|
||||
-
|
||||
name: Setup pikepdf image
|
||||
id: pikepdf-setup
|
||||
run: |
|
||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py pikepdf)
|
||||
|
||||
echo ${build_json}
|
||||
|
||||
echo "pikepdf-json=${build_json}" >> $GITHUB_OUTPUT
|
||||
-
|
||||
name: Setup jbig2enc image
|
||||
id: jbig2enc-setup
|
||||
run: |
|
||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py jbig2enc)
|
||||
|
||||
echo ${build_json}
|
||||
|
||||
echo "jbig2enc-json=${build_json}" >> $GITHUB_OUTPUT
|
||||
|
||||
outputs:
|
||||
|
||||
ghcr-repository: ${{ steps.set-ghcr-repository.outputs.repository }}
|
||||
|
||||
qpdf-json: ${{ steps.qpdf-setup.outputs.qpdf-json }}
|
||||
|
||||
pikepdf-json: ${{ steps.pikepdf-setup.outputs.pikepdf-json }}
|
||||
|
||||
psycopg2-json: ${{ steps.psycopg2-setup.outputs.psycopg2-json }}
|
||||
|
||||
jbig2enc-json: ${{ steps.jbig2enc-setup.outputs.jbig2enc-json}}
|
||||
|
||||
# build and push image to docker hub.
|
||||
build-docker-image:
|
||||
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || startsWith(github.ref, 'refs/tags/ngx-') || startsWith(github.ref, 'refs/tags/beta-'))
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [tests-backend, tests-frontend]
|
||||
runs-on: ubuntu-22.04
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
|
||||
cancel-in-progress: true
|
||||
needs:
|
||||
- prepare-docker-build
|
||||
steps:
|
||||
-
|
||||
name: Check pushing to Docker Hub
|
||||
id: docker-hub
|
||||
# Only push to Dockerhub from the main repo AND the ref is either:
|
||||
# main
|
||||
# dev
|
||||
# beta
|
||||
# a tag
|
||||
# Otherwise forks would require a Docker Hub account and secrets setup
|
||||
run: |
|
||||
if [[ ${{ needs.prepare-docker-build.outputs.ghcr-repository }} == "paperless-ngx/paperless-ngx" && ( ${{ github.ref_name }} == "main" || ${{ github.ref_name }} == "dev" || ${{ github.ref_name }} == "beta" || ${{ startsWith(github.ref, 'refs/tags/v') }} == "true" ) ]] ; then
|
||||
echo "Enabling DockerHub image push"
|
||||
echo "enable=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Not pushing to DockerHub"
|
||||
echo "enable=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
-
|
||||
name: Gather Docker metadata
|
||||
id: docker-meta
|
||||
uses: docker/metadata-action@v3
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}
|
||||
images: |
|
||||
ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}
|
||||
name=paperlessngx/paperless-ngx,enable=${{ steps.docker-hub.outputs.enable }}
|
||||
tags: |
|
||||
type=match,pattern=ngx-(\d.\d.\d),group=1
|
||||
# Tag branches with branch name
|
||||
type=ref,event=branch
|
||||
type=ref,event=tag
|
||||
# Process semver tags
|
||||
# For a tag x.y.z or vX.Y.Z, output an x.y.z and x.y image tag
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v2
|
||||
-
|
||||
name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
uses: docker/setup-qemu-action@v2
|
||||
-
|
||||
name: Login to Github Container Registry
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
-
|
||||
name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
# Don't attempt to login is not pushing to Docker Hub
|
||||
if: steps.docker-hub.outputs.enable == 'true'
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
-
|
||||
name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
@@ -224,8 +352,19 @@ jobs:
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.docker-meta.outputs.tags }}
|
||||
labels: ${{ steps.docker-meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
JBIG2ENC_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.jbig2enc-json).version }}
|
||||
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
|
||||
PIKEPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.pikepdf-json).version }}
|
||||
PSYCOPG2_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.psycopg2-json).version }}
|
||||
# Get cache layers from this branch, then dev, then main
|
||||
# This allows new branches to get at least some cache benefits, generally from dev
|
||||
cache-from: |
|
||||
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
||||
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:dev
|
||||
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:main
|
||||
cache-to: |
|
||||
type=registry,mode=max,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
||||
-
|
||||
name: Inspect image
|
||||
run: |
|
||||
@@ -243,24 +382,34 @@ jobs:
|
||||
path: src/documents/static/frontend/
|
||||
|
||||
build-release:
|
||||
needs: [build-docker-image, documentation]
|
||||
runs-on: ubuntu-20.04
|
||||
needs:
|
||||
- build-docker-image
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Install pipenv
|
||||
run: |
|
||||
pip3 install --upgrade pip setuptools wheel pipx
|
||||
pipx install pipenv
|
||||
-
|
||||
name: Set up Python
|
||||
uses: actions/setup-python@v3
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
cache: "pipenv"
|
||||
cache-dependency-path: 'Pipfile.lock'
|
||||
-
|
||||
name: Install dependencies
|
||||
name: Install Python dependencies
|
||||
run: |
|
||||
pipenv sync --dev
|
||||
-
|
||||
name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends gettext liblept5
|
||||
pip3 install --upgrade pip setuptools wheel
|
||||
pip3 install -r requirements.txt
|
||||
-
|
||||
name: Download frontend artifact
|
||||
uses: actions/download-artifact@v3
|
||||
@@ -273,34 +422,38 @@ jobs:
|
||||
with:
|
||||
name: documentation
|
||||
path: docs/_build/html/
|
||||
-
|
||||
name: Generate requirements file
|
||||
run: |
|
||||
pipenv requirements > requirements.txt
|
||||
-
|
||||
name: Compile messages
|
||||
run: |
|
||||
cd src/
|
||||
pipenv run python3 manage.py compilemessages
|
||||
-
|
||||
name: Collect static files
|
||||
run: |
|
||||
cd src/
|
||||
pipenv run python3 manage.py collectstatic --no-input
|
||||
-
|
||||
name: Move files
|
||||
run: |
|
||||
mkdir dist
|
||||
mkdir dist/paperless-ngx
|
||||
mkdir dist/paperless-ngx/scripts
|
||||
cp .dockerignore .env Dockerfile Pipfile Pipfile.lock LICENSE README.md requirements.txt dist/paperless-ngx/
|
||||
cp .dockerignore .env Dockerfile Pipfile Pipfile.lock requirements.txt LICENSE README.md dist/paperless-ngx/
|
||||
cp paperless.conf.example dist/paperless-ngx/paperless.conf
|
||||
cp gunicorn.conf.py dist/paperless-ngx/gunicorn.conf.py
|
||||
cp docker/ dist/paperless-ngx/docker -r
|
||||
cp -r docker/ dist/paperless-ngx/docker
|
||||
cp scripts/*.service scripts/*.sh dist/paperless-ngx/scripts/
|
||||
cp src/ dist/paperless-ngx/src -r
|
||||
cp docs/_build/html/ dist/paperless-ngx/docs -r
|
||||
-
|
||||
name: Compile messages
|
||||
run: |
|
||||
cd dist/paperless-ngx/src
|
||||
python3 manage.py compilemessages
|
||||
-
|
||||
name: Collect static files
|
||||
run: |
|
||||
cd dist/paperless-ngx/src
|
||||
python3 manage.py collectstatic --no-input
|
||||
cp -r src/ dist/paperless-ngx/src
|
||||
cp -r docs/_build/html/ dist/paperless-ngx/docs
|
||||
mv static dist/paperless-ngx
|
||||
-
|
||||
name: Make release package
|
||||
run: |
|
||||
cd dist
|
||||
find . -name __pycache__ | xargs rm -r
|
||||
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
||||
-
|
||||
name: Upload release artifact
|
||||
@@ -310,9 +463,14 @@ jobs:
|
||||
path: dist/paperless-ngx.tar.xz
|
||||
|
||||
publish-release:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: build-release
|
||||
if: contains(github.ref, 'refs/tags/ngx-') || contains(github.ref, 'refs/tags/beta-')
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
||||
changelog: ${{ steps.create-release.outputs.body }}
|
||||
version: ${{ steps.get_version.outputs.version }}
|
||||
needs:
|
||||
- build-release
|
||||
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
|
||||
steps:
|
||||
-
|
||||
name: Download release artifact
|
||||
@@ -324,20 +482,19 @@ jobs:
|
||||
name: Get version
|
||||
id: get_version
|
||||
run: |
|
||||
if [[ $GITHUB_REF == refs/tags/ngx-* ]]; then
|
||||
echo ::set-output name=version::${GITHUB_REF#refs/tags/ngx-}
|
||||
echo ::set-output name=prerelease::false
|
||||
elif [[ $GITHUB_REF == refs/tags/beta-* ]]; then
|
||||
echo ::set-output name=version::${GITHUB_REF#refs/tags/beta-}
|
||||
echo ::set-output name=prerelease::true
|
||||
echo "version=${{ github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
if [[ ${{ contains(github.ref_name, '-beta.rc') }} == 'true' ]]; then
|
||||
echo "prerelease=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "prerelease=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
-
|
||||
name: Create Release and Changelog
|
||||
id: create-release
|
||||
uses: release-drafter/release-drafter@v5
|
||||
uses: paperless-ngx/release-drafter@master
|
||||
with:
|
||||
name: Paperless-ngx ${{ steps.get_version.outputs.version }}
|
||||
tag: ngx-${{ steps.get_version.outputs.version }}
|
||||
tag: ${{ steps.get_version.outputs.version }}
|
||||
version: ${{ steps.get_version.outputs.version }}
|
||||
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
||||
publish: true # ensures release is not marked as draft
|
||||
@@ -346,11 +503,72 @@ jobs:
|
||||
-
|
||||
name: Upload release archive
|
||||
id: upload-release-asset
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
uses: shogo82148/actions-upload-release-asset@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
asset_path: ./paperless-ngx.tar.xz
|
||||
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
|
||||
asset_content_type: application/x-xz
|
||||
|
||||
append-changelog:
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- publish-release
|
||||
if: needs.publish-release.outputs.prerelease == 'false'
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: main
|
||||
-
|
||||
name: Install pipenv
|
||||
run: |
|
||||
pip3 install --upgrade pip setuptools wheel pipx
|
||||
pipx install pipenv
|
||||
-
|
||||
name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
cache: "pipenv"
|
||||
cache-dependency-path: 'Pipfile.lock'
|
||||
-
|
||||
name: Append Changelog to docs
|
||||
id: append-Changelog
|
||||
working-directory: docs
|
||||
run: |
|
||||
git branch ${{ needs.publish-release.outputs.version }}-changelog
|
||||
git checkout ${{ needs.publish-release.outputs.version }}-changelog
|
||||
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
|
||||
echo "Manually linking usernames"
|
||||
sed -i -r 's|@(.+?) \(\[#|[@\1](https://github.com/\1) ([#|ig' changelog-new.md
|
||||
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
|
||||
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
|
||||
mv changelog-new.md changelog.md
|
||||
pipenv run pre-commit run --files changelog.md || true
|
||||
git config --global user.name "github-actions"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
|
||||
git push origin ${{ needs.publish-release.outputs.version }}-changelog
|
||||
-
|
||||
name: Create Pull Request
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
const { repo, owner } = context.repo;
|
||||
const result = await github.rest.pulls.create({
|
||||
title: '[Documentation] Add ${{ needs.publish-release.outputs.version }} changelog',
|
||||
owner,
|
||||
repo,
|
||||
head: '${{ needs.publish-release.outputs.version }}-changelog',
|
||||
base: 'main',
|
||||
body: 'This PR is auto-generated by CI.'
|
||||
});
|
||||
github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: result.data.number,
|
||||
labels: ['documentation']
|
||||
});
|
||||
|
93
.github/workflows/cleanup-tags.yml
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
# This workflow runs on certain conditions to check for and potentially
|
||||
# delete container images from the GHCR which no longer have an associated
|
||||
# code branch.
|
||||
# Requires a PAT with the correct scope set in the secrets.
|
||||
#
|
||||
# This workflow will not trigger runs on forked repos.
|
||||
|
||||
name: Cleanup Image Tags
|
||||
|
||||
on:
|
||||
delete:
|
||||
push:
|
||||
paths:
|
||||
- ".github/workflows/cleanup-tags.yml"
|
||||
- ".github/scripts/cleanup-tags.py"
|
||||
- ".github/scripts/github.py"
|
||||
- ".github/scripts/common.py"
|
||||
|
||||
concurrency:
|
||||
group: registry-tags-cleanup
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
cleanup-images:
|
||||
name: Cleanup Image Tags for ${{ matrix.primary-name }}
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- primary-name: "paperless-ngx"
|
||||
cache-name: "paperless-ngx/builder/cache/app"
|
||||
|
||||
- primary-name: "paperless-ngx/builder/qpdf"
|
||||
cache-name: "paperless-ngx/builder/cache/qpdf"
|
||||
|
||||
- primary-name: "paperless-ngx/builder/pikepdf"
|
||||
cache-name: "paperless-ngx/builder/cache/pikepdf"
|
||||
|
||||
- primary-name: "paperless-ngx/builder/jbig2enc"
|
||||
cache-name: "paperless-ngx/builder/cache/jbig2enc"
|
||||
|
||||
- primary-name: "paperless-ngx/builder/psycopg2"
|
||||
cache-name: "paperless-ngx/builder/cache/psycopg2"
|
||||
env:
|
||||
# Requires a personal access token with the OAuth scope delete:packages
|
||||
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Login to Github Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
-
|
||||
name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
-
|
||||
name: Install httpx
|
||||
run: |
|
||||
python -m pip install httpx
|
||||
#
|
||||
# Clean up primary package
|
||||
#
|
||||
-
|
||||
name: Cleanup for package "${{ matrix.primary-name }}"
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
run: |
|
||||
python ${GITHUB_WORKSPACE}/.github/scripts/cleanup-tags.py --untagged --is-manifest --delete "${{ matrix.primary-name }}"
|
||||
#
|
||||
# Clean up registry cache package
|
||||
#
|
||||
-
|
||||
name: Cleanup for package "${{ matrix.cache-name }}"
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
run: |
|
||||
python ${GITHUB_WORKSPACE}/.github/scripts/cleanup-tags.py --untagged --delete "${{ matrix.cache-name }}"
|
||||
#
|
||||
# Verify tags which are left still pull
|
||||
#
|
||||
-
|
||||
name: Check all tags still pull
|
||||
run: |
|
||||
ghcr_name=$(echo "ghcr.io/${GITHUB_REPOSITORY_OWNER}/${{ matrix.primary-name }}" | awk '{ print tolower($0) }')
|
||||
echo "Pulling all tags of ${ghcr_name}"
|
||||
docker pull --quiet --all-tags ${ghcr_name}
|
||||
docker image list
|
8
.github/workflows/codeql-analysis.yml
vendored
@@ -23,7 +23,7 @@ on:
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
@@ -38,11 +38,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -51,4 +51,4 @@ jobs:
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
uses: github/codeql-action/analyze@v2
|
||||
|
171
.github/workflows/installer-library.yml
vendored
Normal file
@@ -0,0 +1,171 @@
|
||||
# This workflow will run to update the installer library of
|
||||
# Docker images. These are the images which provide updated wheels
|
||||
# .deb installation packages or maybe just some compiled library
|
||||
|
||||
name: Build Image Library
|
||||
|
||||
on:
|
||||
push:
|
||||
# Must match one of these branches AND one of the paths
|
||||
# to be triggered
|
||||
branches:
|
||||
- "main"
|
||||
- "dev"
|
||||
- "library-*"
|
||||
- "feature-*"
|
||||
paths:
|
||||
# Trigger the workflow if a Dockerfile changed
|
||||
- "docker-builders/**"
|
||||
# Trigger if a package was updated
|
||||
- ".build-config.json"
|
||||
- "Pipfile.lock"
|
||||
# Also trigger on workflow changes related to the library
|
||||
- ".github/workflows/installer-library.yml"
|
||||
- ".github/workflows/reusable-workflow-builder.yml"
|
||||
- ".github/scripts/**"
|
||||
|
||||
# Set a workflow level concurrency group so primary workflow
|
||||
# can wait for this to complete if needed
|
||||
# DO NOT CHANGE without updating main workflow group
|
||||
concurrency:
|
||||
group: build-installer-library
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
prepare-docker-build:
|
||||
name: Prepare Docker Image Version Data
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
-
|
||||
name: Set ghcr repository name
|
||||
id: set-ghcr-repository
|
||||
run: |
|
||||
ghcr_name=$(echo "${GITHUB_REPOSITORY}" | awk '{ print tolower($0) }')
|
||||
echo "repository=${ghcr_name}" >> $GITHUB_OUTPUT
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
-
|
||||
name: Install jq
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install jq
|
||||
-
|
||||
name: Setup qpdf image
|
||||
id: qpdf-setup
|
||||
run: |
|
||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py qpdf)
|
||||
|
||||
echo ${build_json}
|
||||
|
||||
echo "qpdf-json=${build_json}" >> $GITHUB_OUTPUT
|
||||
-
|
||||
name: Setup psycopg2 image
|
||||
id: psycopg2-setup
|
||||
run: |
|
||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py psycopg2)
|
||||
|
||||
echo ${build_json}
|
||||
|
||||
echo "psycopg2-json=${build_json}" >> $GITHUB_OUTPUT
|
||||
-
|
||||
name: Setup pikepdf image
|
||||
id: pikepdf-setup
|
||||
run: |
|
||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py pikepdf)
|
||||
|
||||
echo ${build_json}
|
||||
|
||||
echo "pikepdf-json=${build_json}" >> $GITHUB_OUTPUT
|
||||
-
|
||||
name: Setup jbig2enc image
|
||||
id: jbig2enc-setup
|
||||
run: |
|
||||
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py jbig2enc)
|
||||
|
||||
echo ${build_json}
|
||||
|
||||
echo "jbig2enc-json=${build_json}" >> $GITHUB_OUTPUT
|
||||
-
|
||||
name: Setup other versions
|
||||
id: cache-bust-setup
|
||||
run: |
|
||||
pillow_version=$(jq ".default.pillow.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
||||
lxml_version=$(jq ".default.lxml.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
||||
|
||||
echo "Pillow is ${pillow_version}"
|
||||
echo "lxml is ${lxml_version}"
|
||||
|
||||
echo "pillow-version=${pillow_version}" >> $GITHUB_OUTPUT
|
||||
echo "lxml-version=${lxml_version}" >> $GITHUB_OUTPUT
|
||||
|
||||
outputs:
|
||||
|
||||
ghcr-repository: ${{ steps.set-ghcr-repository.outputs.repository }}
|
||||
|
||||
qpdf-json: ${{ steps.qpdf-setup.outputs.qpdf-json }}
|
||||
|
||||
pikepdf-json: ${{ steps.pikepdf-setup.outputs.pikepdf-json }}
|
||||
|
||||
psycopg2-json: ${{ steps.psycopg2-setup.outputs.psycopg2-json }}
|
||||
|
||||
jbig2enc-json: ${{ steps.jbig2enc-setup.outputs.jbig2enc-json }}
|
||||
|
||||
pillow-version: ${{ steps.cache-bust-setup.outputs.pillow-version }}
|
||||
|
||||
lxml-version: ${{ steps.cache-bust-setup.outputs.lxml-version }}
|
||||
|
||||
build-qpdf-debs:
|
||||
name: qpdf
|
||||
needs:
|
||||
- prepare-docker-build
|
||||
uses: ./.github/workflows/reusable-workflow-builder.yml
|
||||
with:
|
||||
dockerfile: ./docker-builders/Dockerfile.qpdf
|
||||
build-platforms: linux/amd64
|
||||
build-json: ${{ needs.prepare-docker-build.outputs.qpdf-json }}
|
||||
build-args: |
|
||||
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
|
||||
|
||||
build-jbig2enc:
|
||||
name: jbig2enc
|
||||
needs:
|
||||
- prepare-docker-build
|
||||
uses: ./.github/workflows/reusable-workflow-builder.yml
|
||||
with:
|
||||
dockerfile: ./docker-builders/Dockerfile.jbig2enc
|
||||
build-json: ${{ needs.prepare-docker-build.outputs.jbig2enc-json }}
|
||||
build-args: |
|
||||
JBIG2ENC_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.jbig2enc-json).version }}
|
||||
|
||||
build-psycopg2-wheel:
|
||||
name: psycopg2
|
||||
needs:
|
||||
- prepare-docker-build
|
||||
uses: ./.github/workflows/reusable-workflow-builder.yml
|
||||
with:
|
||||
dockerfile: ./docker-builders/Dockerfile.psycopg2
|
||||
build-json: ${{ needs.prepare-docker-build.outputs.psycopg2-json }}
|
||||
build-args: |
|
||||
PSYCOPG2_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.psycopg2-json).version }}
|
||||
|
||||
build-pikepdf-wheel:
|
||||
name: pikepdf
|
||||
needs:
|
||||
- prepare-docker-build
|
||||
- build-qpdf-debs
|
||||
uses: ./.github/workflows/reusable-workflow-builder.yml
|
||||
with:
|
||||
dockerfile: ./docker-builders/Dockerfile.pikepdf
|
||||
build-json: ${{ needs.prepare-docker-build.outputs.pikepdf-json }}
|
||||
build-args: |
|
||||
REPO=${{ needs.prepare-docker-build.outputs.ghcr-repository }}
|
||||
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
|
||||
PIKEPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.pikepdf-json).version }}
|
||||
PILLOW_VERSION=${{ needs.prepare-docker-build.outputs.pillow-version }}
|
||||
LXML_VERSION=${{ needs.prepare-docker-build.outputs.lxml-version }}
|
26
.github/workflows/project-actions.yml
vendored
@@ -13,6 +13,9 @@ on:
|
||||
- main
|
||||
- dev
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
todo: Todo
|
||||
done: Done
|
||||
@@ -21,11 +24,11 @@ env:
|
||||
jobs:
|
||||
issue_opened_or_reopened:
|
||||
name: issue_opened_or_reopened
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
if: github.event_name == 'issues' && (github.event.action == 'opened' || github.event.action == 'reopened')
|
||||
steps:
|
||||
- name: Set issue status to ${{ env.todo }}
|
||||
uses: leonsteinhaeuser/project-beta-automations@v1.2.1
|
||||
- name: Add issue to project and set status to ${{ env.todo }}
|
||||
uses: leonsteinhaeuser/project-beta-automations@v2.0.1
|
||||
with:
|
||||
gh_token: ${{ secrets.GH_TOKEN }}
|
||||
organization: paperless-ngx
|
||||
@@ -34,14 +37,21 @@ jobs:
|
||||
status_value: ${{ env.todo }} # Target status
|
||||
pr_opened_or_reopened:
|
||||
name: pr_opened_or_reopened
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'pull_request_target' && (github.event.action == 'opened' || github.event.action == 'reopened')
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
# write permission is required for autolabeler
|
||||
pull-requests: write
|
||||
if: github.event_name == 'pull_request_target' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request.user.login != 'dependabot'
|
||||
steps:
|
||||
- name: Set PR status to ${{ env.in_progress }}
|
||||
uses: leonsteinhaeuser/project-beta-automations@v1.2.1
|
||||
- name: Add PR to project and set status to "Needs Review"
|
||||
uses: leonsteinhaeuser/project-beta-automations@v2.0.1
|
||||
with:
|
||||
gh_token: ${{ secrets.GH_TOKEN }}
|
||||
organization: paperless-ngx
|
||||
project_id: 2
|
||||
resource_node_id: ${{ github.event.pull_request.node_id }}
|
||||
status_value: ${{ env.in_progress }} # Target status
|
||||
status_value: "Needs Review" # Target status
|
||||
- name: Label PR with release-drafter
|
||||
uses: release-drafter/release-drafter@v5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
31
.github/workflows/release-chart.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
---
|
||||
name: Release Charts
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
release_chart:
|
||||
name: "Release Chart"
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config user.name "$GITHUB_ACTOR"
|
||||
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
||||
- name: Install Helm
|
||||
uses: azure/setup-helm@v3
|
||||
with:
|
||||
version: v3.10.0
|
||||
|
||||
- name: Run chart-releaser
|
||||
uses: helm/chart-releaser-action@v1.4.1
|
||||
env:
|
||||
CR_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
57
.github/workflows/reusable-workflow-builder.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: Reusable Image Builder
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
dockerfile:
|
||||
required: true
|
||||
type: string
|
||||
build-json:
|
||||
required: true
|
||||
type: string
|
||||
build-args:
|
||||
required: false
|
||||
default: ""
|
||||
type: string
|
||||
build-platforms:
|
||||
required: false
|
||||
default: linux/amd64,linux/arm64,linux/arm/v7
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ fromJSON(inputs.build-json).name }}-${{ fromJSON(inputs.build-json).version }}
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
build-image:
|
||||
name: Build ${{ fromJSON(inputs.build-json).name }} @ ${{ fromJSON(inputs.build-json).version }}
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Login to Github Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
-
|
||||
name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
-
|
||||
name: Build ${{ fromJSON(inputs.build-json).name }}
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ${{ inputs.dockerfile }}
|
||||
tags: ${{ fromJSON(inputs.build-json).image_tag }}
|
||||
platforms: ${{ inputs.build-platforms }}
|
||||
build-args: ${{ inputs.build-args }}
|
||||
push: true
|
||||
cache-from: type=registry,ref=${{ fromJSON(inputs.build-json).cache_tag }}
|
||||
cache-to: type=registry,mode=max,ref=${{ fromJSON(inputs.build-json).cache_tag }}
|
15
.gitignore
vendored
@@ -51,8 +51,8 @@ coverage.xml
|
||||
# Django stuff:
|
||||
*.log
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
# MkDocs documentation
|
||||
site/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
@@ -63,11 +63,14 @@ target/
|
||||
|
||||
# VS Code
|
||||
.vscode
|
||||
/src-ui/.vscode
|
||||
/docs/.vscode
|
||||
|
||||
# Other stuff that doesn't belong
|
||||
.virtualenv
|
||||
virtualenv
|
||||
/venv
|
||||
.venv/
|
||||
/docker-compose.env
|
||||
/docker-compose.yml
|
||||
|
||||
@@ -84,8 +87,12 @@ scripts/nuke
|
||||
/paperless.conf
|
||||
/consume/
|
||||
/export/
|
||||
/src-ui/.vscode
|
||||
|
||||
# this is where the compiled frontend is moved to.
|
||||
/src/documents/static/frontend/
|
||||
/docs/.vscode/settings.json
|
||||
|
||||
# mac os
|
||||
.DS_Store
|
||||
|
||||
# celery schedule file
|
||||
celerybeat-schedule*
|
||||
|
8
.hadolint.yml
Normal file
@@ -0,0 +1,8 @@
|
||||
failure-threshold: warning
|
||||
ignored:
|
||||
# https://github.com/hadolint/hadolint/wiki/DL3008
|
||||
- DL3008
|
||||
# https://github.com/hadolint/hadolint/wiki/DL3013
|
||||
- DL3013
|
||||
# https://github.com/hadolint/hadolint/wiki/DL3003
|
||||
- DL3003
|
@@ -5,19 +5,20 @@
|
||||
repos:
|
||||
# General hooks
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.2.0
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: check-docstring-first
|
||||
- id: check-json
|
||||
exclude: "tsconfig.*json"
|
||||
- id: check-yaml
|
||||
exclude: "charts/paperless-ngx/templates/common.yaml"
|
||||
- id: check-toml
|
||||
- id: check-executables-have-shebangs
|
||||
- id: end-of-file-fixer
|
||||
exclude_types:
|
||||
- svg
|
||||
- pofile
|
||||
exclude: "(^LICENSE$)"
|
||||
exclude: "^(LICENSE|charts/paperless-ngx/README.md)$"
|
||||
- id: mixed-line-ending
|
||||
args:
|
||||
- "--fix=lf"
|
||||
@@ -27,46 +28,53 @@ repos:
|
||||
- id: check-case-conflict
|
||||
- id: detect-private-key
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: "v2.6.2"
|
||||
rev: "v2.7.1"
|
||||
hooks:
|
||||
- id: prettier
|
||||
types_or:
|
||||
- javascript
|
||||
- ts
|
||||
- markdown
|
||||
exclude: "(^Pipfile\\.lock$)"
|
||||
exclude: "(^Pipfile\\.lock$)|(^charts/paperless-ngx/README.md$)"
|
||||
# Python hooks
|
||||
- repo: https://github.com/asottile/reorder_python_imports
|
||||
rev: v3.0.1
|
||||
rev: v3.9.0
|
||||
hooks:
|
||||
- id: reorder-python-imports
|
||||
exclude: "(migrations)"
|
||||
- repo: https://github.com/asottile/yesqa
|
||||
rev: "v1.3.0"
|
||||
rev: "v1.4.0"
|
||||
hooks:
|
||||
- id: yesqa
|
||||
exclude: "(migrations)"
|
||||
- repo: https://github.com/asottile/add-trailing-comma
|
||||
rev: "v2.2.2"
|
||||
rev: "v2.4.0"
|
||||
hooks:
|
||||
- id: add-trailing-comma
|
||||
exclude: "(migrations)"
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.9.2
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 6.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
files: ^src/
|
||||
args:
|
||||
- "--config=./src/setup.cfg"
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.3.0
|
||||
rev: 22.12.0
|
||||
hooks:
|
||||
- id: black
|
||||
# Dockerfile hooks
|
||||
- repo: https://github.com/pryorda/dockerfilelint-precommit-hooks
|
||||
rev: "v0.1.0"
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.3.1
|
||||
hooks:
|
||||
- id: dockerfilelint
|
||||
- id: pyupgrade
|
||||
exclude: "(migrations)"
|
||||
args:
|
||||
- "--py38-plus"
|
||||
# Dockerfile hooks
|
||||
- repo: https://github.com/AleksaC/hadolint-py
|
||||
rev: v2.10.0
|
||||
hooks:
|
||||
- id: hadolint
|
||||
# Shell script hooks
|
||||
- repo: https://github.com/lovesegfault/beautysh
|
||||
rev: v6.2.1
|
||||
@@ -75,6 +83,6 @@ repos:
|
||||
args:
|
||||
- "--tab"
|
||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||
rev: "v0.8.0.4"
|
||||
rev: "v0.9.0.2"
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
|
@@ -1,16 +0,0 @@
|
||||
# .readthedocs.yml
|
||||
# Read the Docs configuration file
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
# Build documentation in the docs/ directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
|
||||
# Optionally set the version of Python and requirements required to build your docs
|
||||
python:
|
||||
version: "3.8"
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
@@ -7,4 +7,3 @@
|
||||
/src/ @paperless-ngx/backend
|
||||
Pipfile* @paperless-ngx/backend
|
||||
*.py @paperless-ngx/backend
|
||||
requirements.txt @paperless-ngx/backend
|
||||
|
@@ -27,11 +27,11 @@ Please format and test your code! I know it's a hassle, but it makes sure that y
|
||||
|
||||
To test your code, execute `pytest` in the src/ directory. This also generates a html coverage report, which you can use to see if you missed anything important during testing.
|
||||
|
||||
Before you can run `pytest`, ensure to [properly set up your local environment](https://paperless-ngx.readthedocs.io/en/latest/extending.html#initial-setup-and-first-start).
|
||||
Before you can run `pytest`, ensure to [properly set up your local environment](https://docs.paperless-ngx.com/development/#initial-setup-and-first-start).
|
||||
|
||||
## More info:
|
||||
|
||||
... is available in the documentation. https://paperless-ngx.readthedocs.io/en/latest/extending.html
|
||||
... is available [in the documentation](https://docs.paperless-ngx.com/development).
|
||||
|
||||
# Merging PRs
|
||||
|
||||
|
274
Dockerfile
@@ -1,58 +1,258 @@
|
||||
FROM node:16 AS compile-frontend
|
||||
# syntax=docker/dockerfile:1.4
|
||||
|
||||
COPY . /src
|
||||
# Pull the installer images from the library
|
||||
# These are all built previously
|
||||
# They provide either a .deb or .whl
|
||||
|
||||
ARG JBIG2ENC_VERSION
|
||||
ARG QPDF_VERSION
|
||||
ARG PIKEPDF_VERSION
|
||||
ARG PSYCOPG2_VERSION
|
||||
|
||||
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/jbig2enc:${JBIG2ENC_VERSION} as jbig2enc-builder
|
||||
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/qpdf:${QPDF_VERSION} as qpdf-builder
|
||||
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/pikepdf:${PIKEPDF_VERSION} as pikepdf-builder
|
||||
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/psycopg2:${PSYCOPG2_VERSION} as psycopg2-builder
|
||||
|
||||
FROM --platform=$BUILDPLATFORM node:16-bullseye-slim AS compile-frontend
|
||||
|
||||
# This stage compiles the frontend
|
||||
# This stage runs once for the native platform, as the outputs are not
|
||||
# dependent on target arch
|
||||
# Inputs: None
|
||||
|
||||
COPY ./src-ui /src/src-ui
|
||||
|
||||
WORKDIR /src/src-ui
|
||||
RUN npm update npm -g && npm ci --no-optional
|
||||
RUN ./node_modules/.bin/ng build --configuration production
|
||||
RUN set -eux \
|
||||
&& npm update npm -g \
|
||||
&& npm ci --omit=optional
|
||||
RUN set -eux \
|
||||
&& ./node_modules/.bin/ng build --configuration production
|
||||
|
||||
FROM ghcr.io/paperless-ngx/builder/ngx-base:1.7.0 as main-app
|
||||
FROM --platform=$BUILDPLATFORM python:3.9-slim-bullseye as pipenv-base
|
||||
|
||||
# This stage generates the requirements.txt file using pipenv
|
||||
# This stage runs once for the native platform, as the outputs are not
|
||||
# dependent on target arch
|
||||
# This way, pipenv dependencies are not left in the final image
|
||||
# nor can pipenv mess up the final image somehow
|
||||
# Inputs: None
|
||||
|
||||
WORKDIR /usr/src/pipenv
|
||||
|
||||
COPY Pipfile* ./
|
||||
|
||||
RUN set -eux \
|
||||
&& echo "Installing pipenv" \
|
||||
&& python3 -m pip install --no-cache-dir --upgrade pipenv==2022.11.30 \
|
||||
&& echo "Generating requirement.txt" \
|
||||
&& pipenv requirements > requirements.txt
|
||||
|
||||
FROM python:3.9-slim-bullseye as main-app
|
||||
|
||||
LABEL org.opencontainers.image.authors="paperless-ngx team <hello@paperless-ngx.com>"
|
||||
LABEL org.opencontainers.image.documentation="https://paperless-ngx.readthedocs.io/en/latest/"
|
||||
LABEL org.opencontainers.image.documentation="https://docs.paperless-ngx.com/"
|
||||
LABEL org.opencontainers.image.source="https://github.com/paperless-ngx/paperless-ngx"
|
||||
LABEL org.opencontainers.image.url="https://github.com/paperless-ngx/paperless-ngx"
|
||||
LABEL org.opencontainers.image.licenses="GPL-3.0-only"
|
||||
|
||||
WORKDIR /usr/src/paperless/src/
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
# Buildx provided
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
|
||||
COPY requirements.txt ../
|
||||
# Workflow provided
|
||||
ARG QPDF_VERSION
|
||||
|
||||
# Python dependencies
|
||||
RUN apt-get update \
|
||||
# python-Levenshtein still needs to be compiled here
|
||||
&& apt-get -y --no-install-recommends install \
|
||||
build-essential \
|
||||
&& python3 -m pip install --upgrade --no-cache-dir pip wheel \
|
||||
&& python3 -m pip install --default-timeout=1000 --upgrade --no-cache-dir supervisor \
|
||||
&& python3 -m pip install --default-timeout=1000 --no-cache-dir -r ../requirements.txt \
|
||||
&& apt-get -y purge build-essential \
|
||||
&& apt-get -y autoremove --purge \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
#
|
||||
# Begin installation and configuration
|
||||
# Order the steps below from least often changed to most
|
||||
#
|
||||
|
||||
# copy jbig2enc
|
||||
# Basically will never change again
|
||||
COPY --from=jbig2enc-builder /usr/src/jbig2enc/src/.libs/libjbig2enc* /usr/local/lib/
|
||||
COPY --from=jbig2enc-builder /usr/src/jbig2enc/src/jbig2 /usr/local/bin/
|
||||
COPY --from=jbig2enc-builder /usr/src/jbig2enc/src/*.h /usr/local/include/
|
||||
|
||||
# Packages need for running
|
||||
ARG RUNTIME_PACKAGES="\
|
||||
# Python
|
||||
python3 \
|
||||
python3-pip \
|
||||
python3-setuptools \
|
||||
# General utils
|
||||
curl \
|
||||
# Docker specific
|
||||
gosu \
|
||||
# Timezones support
|
||||
tzdata \
|
||||
# fonts for text file thumbnail generation
|
||||
fonts-liberation \
|
||||
gettext \
|
||||
ghostscript \
|
||||
gnupg \
|
||||
icc-profiles-free \
|
||||
imagemagick \
|
||||
# Image processing
|
||||
liblept5 \
|
||||
liblcms2-2 \
|
||||
libtiff5 \
|
||||
libfreetype6 \
|
||||
libwebp6 \
|
||||
libopenjp2-7 \
|
||||
libimagequant0 \
|
||||
libraqm0 \
|
||||
libjpeg62-turbo \
|
||||
# PostgreSQL
|
||||
libpq5 \
|
||||
postgresql-client \
|
||||
# MySQL / MariaDB
|
||||
mariadb-client \
|
||||
# For Numpy
|
||||
libatlas3-base \
|
||||
# OCRmyPDF dependencies
|
||||
tesseract-ocr \
|
||||
tesseract-ocr-eng \
|
||||
tesseract-ocr-deu \
|
||||
tesseract-ocr-fra \
|
||||
tesseract-ocr-ita \
|
||||
tesseract-ocr-spa \
|
||||
unpaper \
|
||||
pngquant \
|
||||
# pikepdf / qpdf
|
||||
jbig2dec \
|
||||
libxml2 \
|
||||
libxslt1.1 \
|
||||
libgnutls30 \
|
||||
# Mime type detection
|
||||
file \
|
||||
libmagic1 \
|
||||
media-types \
|
||||
zlib1g \
|
||||
# Barcode splitter
|
||||
libzbar0 \
|
||||
poppler-utils \
|
||||
# RapidFuzz on armv7
|
||||
libatomic1"
|
||||
|
||||
# Install basic runtime packages.
|
||||
# These change very infrequently
|
||||
RUN set -eux \
|
||||
echo "Installing system packages" \
|
||||
&& apt-get update \
|
||||
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES} \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& echo "Installing supervisor" \
|
||||
&& python3 -m pip install --default-timeout=1000 --upgrade --no-cache-dir supervisor==4.2.4
|
||||
|
||||
# Copy gunicorn config
|
||||
# Changes very infrequently
|
||||
WORKDIR /usr/src/paperless/
|
||||
|
||||
COPY gunicorn.conf.py .
|
||||
|
||||
# setup docker-specific things
|
||||
COPY docker/ ./docker/
|
||||
# Use mounts to avoid copying installer files into the image
|
||||
# These change sometimes, but rarely
|
||||
WORKDIR /usr/src/paperless/src/docker/
|
||||
|
||||
RUN cd docker \
|
||||
&& cp imagemagick-policy.xml /etc/ImageMagick-6/policy.xml \
|
||||
&& mkdir /var/log/supervisord /var/run/supervisord \
|
||||
&& cp supervisord.conf /etc/supervisord.conf \
|
||||
&& cp docker-entrypoint.sh /sbin/docker-entrypoint.sh \
|
||||
&& chmod 755 /sbin/docker-entrypoint.sh \
|
||||
&& cp docker-prepare.sh /sbin/docker-prepare.sh \
|
||||
&& chmod 755 /sbin/docker-prepare.sh \
|
||||
&& chmod +x install_management_commands.sh \
|
||||
&& ./install_management_commands.sh \
|
||||
&& cd .. \
|
||||
&& rm -rf docker/
|
||||
COPY [ \
|
||||
"docker/imagemagick-policy.xml", \
|
||||
"docker/supervisord.conf", \
|
||||
"docker/docker-entrypoint.sh", \
|
||||
"docker/docker-prepare.sh", \
|
||||
"docker/paperless_cmd.sh", \
|
||||
"docker/wait-for-redis.py", \
|
||||
"docker/management_script.sh", \
|
||||
"docker/flower-conditional.sh", \
|
||||
"docker/install_management_commands.sh", \
|
||||
"/usr/src/paperless/src/docker/" \
|
||||
]
|
||||
|
||||
COPY gunicorn.conf.py ../
|
||||
RUN set -eux \
|
||||
&& echo "Configuring ImageMagick" \
|
||||
&& mv imagemagick-policy.xml /etc/ImageMagick-6/policy.xml \
|
||||
&& echo "Configuring supervisord" \
|
||||
&& mkdir /var/log/supervisord /var/run/supervisord \
|
||||
&& mv supervisord.conf /etc/supervisord.conf \
|
||||
&& echo "Setting up Docker scripts" \
|
||||
&& mv docker-entrypoint.sh /sbin/docker-entrypoint.sh \
|
||||
&& chmod 755 /sbin/docker-entrypoint.sh \
|
||||
&& mv docker-prepare.sh /sbin/docker-prepare.sh \
|
||||
&& chmod 755 /sbin/docker-prepare.sh \
|
||||
&& mv wait-for-redis.py /sbin/wait-for-redis.py \
|
||||
&& chmod 755 /sbin/wait-for-redis.py \
|
||||
&& mv paperless_cmd.sh /usr/local/bin/paperless_cmd.sh \
|
||||
&& chmod 755 /usr/local/bin/paperless_cmd.sh \
|
||||
&& mv flower-conditional.sh /usr/local/bin/flower-conditional.sh \
|
||||
&& chmod 755 /usr/local/bin/flower-conditional.sh \
|
||||
&& echo "Installing managment commands" \
|
||||
&& chmod +x install_management_commands.sh \
|
||||
&& ./install_management_commands.sh
|
||||
|
||||
# copy app
|
||||
COPY --from=compile-frontend /src/src/ ./
|
||||
# Install the built packages from the installer library images
|
||||
# Use mounts to avoid copying installer files into the image
|
||||
# These change sometimes
|
||||
RUN --mount=type=bind,from=qpdf-builder,target=/qpdf \
|
||||
--mount=type=bind,from=psycopg2-builder,target=/psycopg2 \
|
||||
--mount=type=bind,from=pikepdf-builder,target=/pikepdf \
|
||||
set -eux \
|
||||
&& echo "Installing qpdf" \
|
||||
&& apt-get install --yes --no-install-recommends /qpdf/usr/src/qpdf/${QPDF_VERSION}/${TARGETARCH}${TARGETVARIANT}/libqpdf29_*.deb \
|
||||
&& apt-get install --yes --no-install-recommends /qpdf/usr/src/qpdf/${QPDF_VERSION}/${TARGETARCH}${TARGETVARIANT}/qpdf_*.deb \
|
||||
&& echo "Installing pikepdf and dependencies" \
|
||||
&& python3 -m pip install --no-cache-dir /pikepdf/usr/src/wheels/*.whl \
|
||||
&& python3 -m pip list \
|
||||
&& echo "Installing psycopg2" \
|
||||
&& python3 -m pip install --no-cache-dir /psycopg2/usr/src/wheels/psycopg2*.whl \
|
||||
&& python3 -m pip list
|
||||
|
||||
WORKDIR /usr/src/paperless/src/
|
||||
|
||||
# Python dependencies
|
||||
# Change pretty frequently
|
||||
COPY --from=pipenv-base /usr/src/pipenv/requirements.txt ./
|
||||
|
||||
# Packages needed only for building a few quick Python
|
||||
# dependencies
|
||||
ARG BUILD_PACKAGES="\
|
||||
build-essential \
|
||||
git \
|
||||
default-libmysqlclient-dev \
|
||||
python3-dev"
|
||||
|
||||
RUN set -eux \
|
||||
&& echo "Installing build system packages" \
|
||||
&& apt-get update \
|
||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||
&& python3 -m pip install --no-cache-dir --upgrade wheel \
|
||||
&& echo "Installing Python requirements" \
|
||||
&& python3 -m pip install --default-timeout=1000 --no-cache-dir --requirement requirements.txt \
|
||||
&& echo "Installing NLTK data" \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/local/share/nltk_data" snowball_data \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/local/share/nltk_data" stopwords \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/local/share/nltk_data" punkt \
|
||||
&& echo "Cleaning up image" \
|
||||
&& apt-get -y purge ${BUILD_PACKAGES} \
|
||||
&& apt-get -y autoremove --purge \
|
||||
&& apt-get clean --yes \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& rm -rf /tmp/* \
|
||||
&& rm -rf /var/tmp/* \
|
||||
&& rm -rf /var/cache/apt/archives/* \
|
||||
&& truncate -s 0 /var/log/*log
|
||||
|
||||
# copy backend
|
||||
COPY ./src ./
|
||||
|
||||
# copy frontend
|
||||
COPY --from=compile-frontend /src/src/documents/static/frontend/ ./documents/static/frontend/
|
||||
|
||||
# add users, setup scripts
|
||||
RUN addgroup --gid 1000 paperless \
|
||||
RUN set -eux \
|
||||
&& addgroup --gid 1000 paperless \
|
||||
&& useradd --uid 1000 --gid paperless --home-dir /usr/src/paperless paperless \
|
||||
&& chown -R paperless:paperless ../ \
|
||||
&& gosu paperless python3 manage.py collectstatic --clear --no-input \
|
||||
@@ -67,4 +267,4 @@ ENTRYPOINT ["/sbin/docker-entrypoint.sh"]
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["/usr/local/bin/supervisord", "-c", "/etc/supervisord.conf"]
|
||||
CMD ["/usr/local/bin/paperless_cmd.sh"]
|
||||
|
57
Pipfile
@@ -10,41 +10,37 @@ name = "piwheels"
|
||||
|
||||
[packages]
|
||||
dateparser = "~=1.1"
|
||||
django = "~=4.0"
|
||||
django = "~=4.1"
|
||||
django-cors-headers = "*"
|
||||
django-extensions = "*"
|
||||
django-filter = "~=21.1"
|
||||
django-q = "~=1.3"
|
||||
djangorestframework = "~=3.13"
|
||||
django-filter = "~=22.1"
|
||||
djangorestframework = "~=3.14"
|
||||
filelock = "*"
|
||||
fuzzywuzzy = {extras = ["speedup"], version = "*"}
|
||||
gunicorn = "*"
|
||||
imap-tools = "*"
|
||||
langdetect = "*"
|
||||
pathvalidate = "*"
|
||||
pillow = "~=9.1"
|
||||
# Any version update to pikepdf requires a base image update
|
||||
pikepdf = "~=5.1"
|
||||
pillow = "~=9.3"
|
||||
pikepdf = "*"
|
||||
python-gnupg = "*"
|
||||
python-dotenv = "*"
|
||||
python-dateutil = "*"
|
||||
python-magic = "*"
|
||||
# Any version update to psycopg2 requires a base image update
|
||||
psycopg2 = "*"
|
||||
redis = "*"
|
||||
# Pinned because aarch64 wheels and updates cause warnings when loading the classifier model.
|
||||
scikit-learn="==1.0.2"
|
||||
whitenoise = "~=6.0.0"
|
||||
watchdog = "~=2.1.0"
|
||||
whoosh="~=2.7.4"
|
||||
rapidfuzz = "*"
|
||||
redis = {extras = ["hiredis"], version = "*"}
|
||||
scikit-learn = "~=1.1"
|
||||
numpy = "*"
|
||||
whitenoise = "~=6.2"
|
||||
watchdog = "~=2.1"
|
||||
whoosh="~=2.7"
|
||||
inotifyrecursive = "~=0.3"
|
||||
ocrmypdf = "~=13.4"
|
||||
ocrmypdf = "~=14.0"
|
||||
tqdm = "*"
|
||||
tika = "*"
|
||||
# TODO: This will sadly also install daphne+dependencies,
|
||||
# which an ASGI server we don't need. Adds about 15MB image size.
|
||||
channels = "~=3.0"
|
||||
channels-redis = "*"
|
||||
uvicorn = {extras = ["standard"], version = "*"}
|
||||
concurrent-log-handler = "*"
|
||||
"pdfminer.six" = "*"
|
||||
@@ -52,7 +48,28 @@ concurrent-log-handler = "*"
|
||||
"importlib-resources" = {version = "*", markers = "python_version < '3.9'"}
|
||||
zipp = {version = "*", markers = "python_version < '3.9'"}
|
||||
pyzbar = "*"
|
||||
mysqlclient = "*"
|
||||
celery = {extras = ["redis"], version = "*"}
|
||||
django-celery-results = "*"
|
||||
setproctitle = "*"
|
||||
nltk = "*"
|
||||
pdf2image = "*"
|
||||
flower = "*"
|
||||
bleach = "*"
|
||||
|
||||
#
|
||||
# Packages locked due to issues (try to check if these are fixed in a release every so often)
|
||||
#
|
||||
|
||||
# Pin this until piwheels is building 1.9 (see https://www.piwheels.org/project/scipy/)
|
||||
scipy = "==1.8.1"
|
||||
|
||||
# Newer versions aren't builting yet (see https://www.piwheels.org/project/cryptography/)
|
||||
cryptography = "==38.0.1"
|
||||
|
||||
# Locked version until https://github.com/django/channels_redis/issues/332
|
||||
# is resolved
|
||||
channels-redis = "==3.4.1"
|
||||
|
||||
[dev-packages]
|
||||
coveralls = "*"
|
||||
@@ -64,8 +81,10 @@ pytest-django = "*"
|
||||
pytest-env = "*"
|
||||
pytest-sugar = "*"
|
||||
pytest-xdist = "*"
|
||||
sphinx = "~=4.5.0"
|
||||
sphinx_rtd_theme = "*"
|
||||
tox = "*"
|
||||
black = "*"
|
||||
pre-commit = "*"
|
||||
sphinx-autobuild = "*"
|
||||
myst-parser = "*"
|
||||
imagehash = "*"
|
||||
mkdocs-material = "*"
|
||||
|
2871
Pipfile.lock
generated
24
README.md
@@ -1,8 +1,9 @@
|
||||
[](https://github.com/paperless-ngx/paperless-ngx/actions)
|
||||
[](https://crowdin.com/project/paperless-ngx)
|
||||
[](https://paperless-ngx.readthedocs.io/en/latest/?badge=latest)
|
||||
[](https://docs.paperless-ngx.com)
|
||||
[](https://coveralls.io/github/paperless-ngx/paperless-ngx?branch=master)
|
||||
[](https://matrix.to/#/#paperless:adnidor.de)
|
||||
[](https://matrix.to/#/%23paperlessngx%3Amatrix.org)
|
||||
[](https://demo.paperless-ngx.com)
|
||||
|
||||
<p align="center">
|
||||
<img src="https://github.com/paperless-ngx/paperless-ngx/raw/main/resources/logo/web/png/Black%20logo%20-%20no%20background.png#gh-light-mode-only" width="50%" />
|
||||
@@ -32,13 +33,13 @@ A demo is available at [demo.paperless-ngx.com](https://demo.paperless-ngx.com)
|
||||
|
||||
# Features
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
- Organize and index your scanned documents with tags, correspondents, types, and more.
|
||||
- Performs OCR on your documents, adds selectable text to image only documents and adds tags, correspondents and document types to your documents.
|
||||
- Supports PDF documents, images, plain text files, and Office documents (Word, Excel, Powerpoint, and LibreOffice equivalents).
|
||||
- Office document support is optional and provided by Apache Tika (see [configuration](https://paperless-ngx.readthedocs.io/en/latest/configuration.html#tika-settings))
|
||||
- Office document support is optional and provided by Apache Tika (see [configuration](https://docs.paperless-ngx.com/configuration/#tika))
|
||||
- Paperless stores your documents plain on disk. Filenames and folders are managed by paperless and their format can be configured freely.
|
||||
- Single page application front end.
|
||||
- Includes a dashboard that shows basic statistics and has document upload.
|
||||
@@ -56,7 +57,7 @@ A demo is available at [demo.paperless-ngx.com](https://demo.paperless-ngx.com)
|
||||
- Paperless-ngx learns from your documents and will be able to automatically assign tags, correspondents and types to documents once you've stored a few documents in paperless.
|
||||
- Optimized for multi core systems: Paperless-ngx consumes multiple documents in parallel.
|
||||
- The integrated sanity checker makes sure that your document archive is in good health.
|
||||
- [More screenshots are available in the documentation](https://paperless-ngx.readthedocs.io/en/latest/screenshots.html).
|
||||
- [More screenshots are available in the documentation](https://docs.paperless-ngx.com/#screenshots).
|
||||
|
||||
# Getting started
|
||||
|
||||
@@ -68,19 +69,19 @@ If you'd like to jump right in, you can configure a docker-compose environment w
|
||||
bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
||||
```
|
||||
|
||||
Alternatively, you can install the dependencies and setup apache and a database server yourself. The [documentation](https://paperless-ngx.readthedocs.io/en/latest/setup.html#installation) has a step by step guide on how to do it.
|
||||
Alternatively, you can install the dependencies and setup apache and a database server yourself. The [documentation](https://docs.paperless-ngx.com/setup/#installation) has a step by step guide on how to do it.
|
||||
|
||||
Migrating from Paperless-ng is easy, just drop in the new docker image! See the [documentation on migrating](https://paperless-ngx.readthedocs.io/en/latest/setup.html#migrating-from-paperless-ng) for more details.
|
||||
Migrating from Paperless-ng is easy, just drop in the new docker image! See the [documentation on migrating](https://docs.paperless-ngx.com/setup/#migrating-to-paperless-ngx) for more details.
|
||||
|
||||
<!-- omit in toc -->
|
||||
|
||||
### Documentation
|
||||
|
||||
The documentation for Paperless-ngx is available on [ReadTheDocs](https://paperless-ngx.readthedocs.io/).
|
||||
The documentation for Paperless-ngx is available at [https://docs.paperless-ngx.com](https://docs.paperless-ngx.com/).
|
||||
|
||||
# Contributing
|
||||
|
||||
If you feel like contributing to the project, please do! Bug fixes, enhancements, visual fixes etc. are always welcome. If you want to implement something big: Please start a discussion about that! The [documentation](https://paperless-ngx.readthedocs.io/en/latest/extending.html) has some basic information on how to get started.
|
||||
If you feel like contributing to the project, please do! Bug fixes, enhancements, visual fixes etc. are always welcome. If you want to implement something big: Please start a discussion about that! The [documentation](https://docs.paperless-ngx.com/development/) has some basic information on how to get started.
|
||||
|
||||
## Community Support
|
||||
|
||||
@@ -102,9 +103,10 @@ For bugs please [open an issue](https://github.com/paperless-ngx/paperless-ngx/i
|
||||
|
||||
Paperless has been around a while now, and people are starting to build stuff on top of it. If you're one of those people, we can add your project to this list:
|
||||
|
||||
- [Paperless App](https://github.com/bauerj/paperless_app): An Android/iOS app for Paperless-ngx. Also works with the original Paperless and Paperless-ngx.
|
||||
- [Paperless App](https://github.com/bauerj/paperless_app): An Android/iOS app for Paperless-ngx. Also works with the original Paperless and Paperless-ng.
|
||||
- [Paperless Share](https://github.com/qcasey/paperless_share). Share any files from your Android application with paperless. Very simple, but works with all of the mobile scanning apps out there that allow you to share scanned documents.
|
||||
- [Scan to Paperless](https://github.com/sbrunner/scan-to-paperless): Scan and prepare (crop, deskew, OCR, ...) your documents for Paperless.
|
||||
- [Paperless Mobile](https://github.com/astubenbord/paperless-mobile): A modern, feature rich mobile application for Paperless.
|
||||
|
||||
These projects also exist, but their status and compatibility with paperless-ngx is unknown.
|
||||
|
||||
|
81
build-docker-image.sh
Executable file
@@ -0,0 +1,81 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Helper script for building the Docker image locally.
|
||||
# Parses and provides the nessecary versions of other images to Docker
|
||||
# before passing in the rest of script args.
|
||||
|
||||
# First Argument: The Dockerfile to build
|
||||
# Other Arguments: Additional arguments to docker build
|
||||
|
||||
# Example Usage:
|
||||
# ./build-docker-image.sh Dockerfile -t paperless-ngx:my-awesome-feature
|
||||
|
||||
set -eu
|
||||
|
||||
if ! command -v jq &> /dev/null ; then
|
||||
echo "jq required"
|
||||
exit 1
|
||||
elif [ ! -f "$1" ]; then
|
||||
echo "$1 is not a file, please provide the Dockerfile"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the branch name (used for caching)
|
||||
branch_name=$(git rev-parse --abbrev-ref HEAD)
|
||||
|
||||
# Parse eithe Pipfile.lock or the .build-config.json
|
||||
jbig2enc_version=$(jq ".jbig2enc.version" .build-config.json | sed 's/"//g')
|
||||
qpdf_version=$(jq ".qpdf.version" .build-config.json | sed 's/"//g')
|
||||
psycopg2_version=$(jq ".default.psycopg2.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
||||
pikepdf_version=$(jq ".default.pikepdf.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
||||
pillow_version=$(jq ".default.pillow.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
||||
lxml_version=$(jq ".default.lxml.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
||||
|
||||
base_filename="$(basename -- "${1}")"
|
||||
build_args_str=""
|
||||
cache_from_str=""
|
||||
|
||||
case "${base_filename}" in
|
||||
|
||||
*.jbig2enc)
|
||||
build_args_str="--build-arg JBIG2ENC_VERSION=${jbig2enc_version}"
|
||||
cache_from_str="--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/jbig2enc:${jbig2enc_version}"
|
||||
;;
|
||||
|
||||
*.psycopg2)
|
||||
build_args_str="--build-arg PSYCOPG2_VERSION=${psycopg2_version}"
|
||||
cache_from_str="--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/psycopg2:${psycopg2_version}"
|
||||
;;
|
||||
|
||||
*.qpdf)
|
||||
build_args_str="--build-arg QPDF_VERSION=${qpdf_version}"
|
||||
cache_from_str="--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/qpdf:${qpdf_version}"
|
||||
;;
|
||||
|
||||
*.pikepdf)
|
||||
build_args_str="--build-arg QPDF_VERSION=${qpdf_version} --build-arg PIKEPDF_VERSION=${pikepdf_version} --build-arg PILLOW_VERSION=${pillow_version} --build-arg LXML_VERSION=${lxml_version}"
|
||||
cache_from_str="--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/pikepdf:${pikepdf_version}"
|
||||
;;
|
||||
|
||||
Dockerfile)
|
||||
build_args_str="--build-arg QPDF_VERSION=${qpdf_version} --build-arg PIKEPDF_VERSION=${pikepdf_version} --build-arg PSYCOPG2_VERSION=${psycopg2_version} --build-arg JBIG2ENC_VERSION=${jbig2enc_version}"
|
||||
cache_from_str="--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/app:${branch_name} --cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/app:dev"
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unable to match ${base_filename}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
read -r -a build_args_arr <<< "${build_args_str}"
|
||||
read -r -a cache_from_arr <<< "${cache_from_str}"
|
||||
|
||||
set -eux
|
||||
|
||||
docker buildx build --file "${1}" \
|
||||
--progress=plain \
|
||||
--output=type=docker \
|
||||
"${cache_from_arr[@]}" \
|
||||
"${build_args_arr[@]}" \
|
||||
"${@:2}" .
|
26
charts/paperless-ngx/.helmignore
Normal file
@@ -0,0 +1,26 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
||||
# OWNERS file for Kubernetes
|
||||
OWNERS
|
||||
# helm-docs templates
|
||||
*.gotmpl
|
35
charts/paperless-ngx/Chart.yaml
Normal file
@@ -0,0 +1,35 @@
|
||||
---
|
||||
apiVersion: v2
|
||||
appVersion: "1.9.2"
|
||||
description: Paperless-ngx - Index and archive all of your scanned paper documents
|
||||
name: paperless
|
||||
version: 10.0.1
|
||||
kubeVersion: ">=1.16.0-0"
|
||||
keywords:
|
||||
- paperless
|
||||
- paperless-ngx
|
||||
- dms
|
||||
- document
|
||||
home: https://github.com/paperless-ngx/paperless-ngx/tree/main/charts/paperless-ngx
|
||||
icon: https://github.com/paperless-ngx/paperless-ngx/raw/main/resources/logo/web/svg/square.svg
|
||||
sources:
|
||||
- https://github.com/paperless-ngx/paperless-ngx
|
||||
maintainers:
|
||||
- name: Paperless-ngx maintainers
|
||||
dependencies:
|
||||
- name: common
|
||||
repository: https://library-charts.k8s-at-home.com
|
||||
version: 4.5.2
|
||||
- name: postgresql
|
||||
version: 11.6.12
|
||||
repository: https://charts.bitnami.com/bitnami
|
||||
condition: postgresql.enabled
|
||||
- name: redis
|
||||
version: 16.13.1
|
||||
repository: https://charts.bitnami.com/bitnami
|
||||
condition: redis.enabled
|
||||
deprecated: false
|
||||
annotations:
|
||||
artifacthub.io/changes: |
|
||||
- kind: changed
|
||||
description: Moved to Paperless-ngx ownership
|
201
charts/paperless-ngx/LICENSE
Normal file
@@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2020 k8s@Home
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
50
charts/paperless-ngx/README.md
Normal file
@@ -0,0 +1,50 @@
|
||||
# paperless
|
||||
|
||||
 
|
||||
|
||||
Paperless-ngx - Index and archive all of your scanned paper documents
|
||||
|
||||
**Homepage:** <https://github.com/paperless-ngx/paperless-ngx/tree/main/charts/paperless-ngx>
|
||||
|
||||
## Maintainers
|
||||
|
||||
| Name | Email | Url |
|
||||
| ---- | ------ | --- |
|
||||
| Paperless-ngx maintainers | | |
|
||||
|
||||
## Source Code
|
||||
|
||||
* <https://github.com/paperless-ngx/paperless-ngx>
|
||||
|
||||
## Requirements
|
||||
|
||||
Kubernetes: `>=1.16.0-0`
|
||||
|
||||
| Repository | Name | Version |
|
||||
|------------|------|---------|
|
||||
| https://charts.bitnami.com/bitnami | postgresql | 11.6.12 |
|
||||
| https://charts.bitnami.com/bitnami | redis | 16.13.1 |
|
||||
| https://library-charts.k8s-at-home.com | common | 4.5.2 |
|
||||
|
||||
## Values
|
||||
|
||||
| Key | Type | Default | Description |
|
||||
|-----|------|---------|-------------|
|
||||
| env | object | See below | See the following files for additional environment variables: https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose/ https://github.com/paperless-ngx/paperless-ngx/blob/main/paperless.conf.example |
|
||||
| env.COMPOSE_PROJECT_NAME | string | `"paperless"` | Project name |
|
||||
| env.PAPERLESS_DBHOST | string | `nil` | Database host to use |
|
||||
| env.PAPERLESS_OCR_LANGUAGE | string | `"eng"` | OCR languages to install |
|
||||
| env.PAPERLESS_PORT | int | `8000` | Port to use |
|
||||
| env.PAPERLESS_REDIS | string | `nil` | Redis to use |
|
||||
| image.pullPolicy | string | `"IfNotPresent"` | image pull policy |
|
||||
| image.repository | string | `"ghcr.io/paperless-ngx/paperless-ngx"` | image repository |
|
||||
| image.tag | string | chart.appVersion | image tag |
|
||||
| ingress.main | object | See values.yaml | Enable and configure ingress settings for the chart under this key. |
|
||||
| persistence.consume | object | See values.yaml | Configure volume to monitor for new documents. |
|
||||
| persistence.data | object | See values.yaml | Configure persistence for data. |
|
||||
| persistence.export | object | See values.yaml | Configure export volume. |
|
||||
| persistence.media | object | See values.yaml | Configure persistence for media. |
|
||||
| postgresql | object | See values.yaml | Enable and configure postgresql database subchart under this key. For more options see [postgresql chart documentation](https://github.com/bitnami/charts/tree/master/bitnami/postgresql) |
|
||||
| redis | object | See values.yaml | Enable and configure redis subchart under this key. For more options see [redis chart documentation](https://github.com/bitnami/charts/tree/master/bitnami/redis) |
|
||||
| service | object | See values.yaml | Configures service settings for the chart. |
|
||||
|
8
charts/paperless-ngx/README_CONFIG.md.gotmpl
Normal file
@@ -0,0 +1,8 @@
|
||||
{{- define "custom.custom.configuration.header" -}}
|
||||
## Custom configuration
|
||||
{{- end -}}
|
||||
|
||||
{{- define "custom.custom.configuration" -}}
|
||||
{{ template "custom.custom.configuration.header" . }}
|
||||
N/A
|
||||
{{- end -}}
|
26
charts/paperless-ngx/ci/ct-values.yaml
Normal file
@@ -0,0 +1,26 @@
|
||||
env:
|
||||
PAPERLESS_REDIS: redis://paperless-redis-headless:6379
|
||||
|
||||
persistence:
|
||||
data:
|
||||
enabled: true
|
||||
type: emptyDir
|
||||
media:
|
||||
enabled: true
|
||||
type: emptyDir
|
||||
consume:
|
||||
enabled: true
|
||||
type: emptyDir
|
||||
export:
|
||||
enabled: true
|
||||
type: emptyDir
|
||||
|
||||
redis:
|
||||
enabled: true
|
||||
architecture: standalone
|
||||
auth:
|
||||
enabled: false
|
||||
master:
|
||||
persistence:
|
||||
enabled: false
|
||||
fullnameOverride: paperless-redis
|
4
charts/paperless-ngx/templates/NOTES.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
{{- include "common.notes.defaultNotes" . }}
|
||||
2. Create a super user by running the command:
|
||||
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "common.names.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
|
||||
kubectl exec -it --namespace {{ .Release.Namespace }} $POD_NAME -- bash -c "python manage.py createsuperuser"
|
11
charts/paperless-ngx/templates/common.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
{{/* Make sure all variables are set properly */}}
|
||||
{{- include "common.values.setup" . }}
|
||||
|
||||
{{/* Append the hardcoded settings */}}
|
||||
{{- define "paperless.harcodedValues" -}}
|
||||
env:
|
||||
PAPERLESS_URL: http{{if ne ( len .Values.ingress.main.tls ) 0 }}s{{end}}://{{ (first .Values.ingress.main.hosts).host }}
|
||||
{{- end -}}
|
||||
{{- $_ := merge .Values (include "paperless.harcodedValues" . | fromYaml) -}}
|
||||
|
||||
{{ include "common.all" . }}
|
107
charts/paperless-ngx/values.yaml
Normal file
@@ -0,0 +1,107 @@
|
||||
#
|
||||
# IMPORTANT NOTE
|
||||
#
|
||||
# This chart inherits from our common library chart. You can check the default values/options here:
|
||||
# https://github.com/k8s-at-home/library-charts/tree/main/charts/stable/common/values.yaml
|
||||
#
|
||||
|
||||
image:
|
||||
# -- image repository
|
||||
repository: ghcr.io/paperless-ngx/paperless-ngx
|
||||
# -- image pull policy
|
||||
pullPolicy: IfNotPresent
|
||||
# -- image tag
|
||||
# @default -- chart.appVersion
|
||||
tag:
|
||||
|
||||
# -- See the following files for additional environment variables:
|
||||
# https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose/
|
||||
# https://github.com/paperless-ngx/paperless-ngx/blob/main/paperless.conf.example
|
||||
# @default -- See below
|
||||
env:
|
||||
# -- Project name
|
||||
COMPOSE_PROJECT_NAME: paperless
|
||||
# -- Redis to use
|
||||
PAPERLESS_REDIS:
|
||||
# -- OCR languages to install
|
||||
PAPERLESS_OCR_LANGUAGE: eng
|
||||
# USERMAP_UID: 1000
|
||||
# USERMAP_GID: 1000
|
||||
# PAPERLESS_TIME_ZONE: Europe/London
|
||||
# -- Database host to use
|
||||
PAPERLESS_DBHOST:
|
||||
# -- Port to use
|
||||
PAPERLESS_PORT: 8000
|
||||
# -- Username for the root user
|
||||
# PAPERLESS_ADMIN_USER: admin
|
||||
# -- Password for the root user
|
||||
# PAPERLESS_ADMIN_PASSWORD: admin
|
||||
# PAPERLESS_URL: <set to main ingress by default>
|
||||
|
||||
# -- Configures service settings for the chart.
|
||||
# @default -- See values.yaml
|
||||
service:
|
||||
main:
|
||||
ports:
|
||||
http:
|
||||
port: 8000
|
||||
|
||||
ingress:
|
||||
# -- Enable and configure ingress settings for the chart under this key.
|
||||
# @default -- See values.yaml
|
||||
main:
|
||||
enabled: false
|
||||
|
||||
persistence:
|
||||
# -- Configure persistence for data.
|
||||
# @default -- See values.yaml
|
||||
data:
|
||||
enabled: false
|
||||
mountPath: /usr/src/paperless/data
|
||||
accessMode: ReadWriteOnce
|
||||
emptyDir:
|
||||
enabled: false
|
||||
# -- Configure persistence for media.
|
||||
# @default -- See values.yaml
|
||||
media:
|
||||
enabled: false
|
||||
mountPath: /usr/src/paperless/media
|
||||
accessMode: ReadWriteOnce
|
||||
emptyDir:
|
||||
enabled: false
|
||||
# -- Configure volume to monitor for new documents.
|
||||
# @default -- See values.yaml
|
||||
consume:
|
||||
enabled: false
|
||||
mountPath: /usr/src/paperless/consume
|
||||
accessMode: ReadWriteOnce
|
||||
emptyDir:
|
||||
enabled: false
|
||||
# -- Configure export volume.
|
||||
# @default -- See values.yaml
|
||||
export:
|
||||
enabled: false
|
||||
mountPath: /usr/src/paperless/export
|
||||
accessMode: ReadWriteOnce
|
||||
emptyDir:
|
||||
enabled: false
|
||||
|
||||
# -- Enable and configure postgresql database subchart under this key.
|
||||
# For more options see [postgresql chart documentation](https://github.com/bitnami/charts/tree/master/bitnami/postgresql)
|
||||
# @default -- See values.yaml
|
||||
postgresql:
|
||||
enabled: false
|
||||
postgresqlUsername: paperless
|
||||
postgresqlPassword: paperless
|
||||
postgresqlDatabase: paperless
|
||||
persistence:
|
||||
enabled: false
|
||||
# storageClass: ""
|
||||
|
||||
# -- Enable and configure redis subchart under this key.
|
||||
# For more options see [redis chart documentation](https://github.com/bitnami/charts/tree/master/bitnami/redis)
|
||||
# @default -- See values.yaml
|
||||
redis:
|
||||
enabled: false
|
||||
auth:
|
||||
enabled: false
|
35
docker-builders/Dockerfile.jbig2enc
Normal file
@@ -0,0 +1,35 @@
|
||||
# This Dockerfile compiles the jbig2enc library
|
||||
# Inputs:
|
||||
# - JBIG2ENC_VERSION - the Git tag to checkout and build
|
||||
|
||||
FROM debian:bullseye-slim as main
|
||||
|
||||
LABEL org.opencontainers.image.description="A intermediate image with jbig2enc built"
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG JBIG2ENC_VERSION
|
||||
|
||||
ARG BUILD_PACKAGES="\
|
||||
build-essential \
|
||||
automake \
|
||||
libtool \
|
||||
libleptonica-dev \
|
||||
zlib1g-dev \
|
||||
git \
|
||||
ca-certificates"
|
||||
|
||||
WORKDIR /usr/src/jbig2enc
|
||||
|
||||
RUN set -eux \
|
||||
&& echo "Installing build tools" \
|
||||
&& apt-get update --quiet \
|
||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||
&& echo "Building jbig2enc" \
|
||||
&& git clone --quiet --branch $JBIG2ENC_VERSION https://github.com/agl/jbig2enc . \
|
||||
&& ./autogen.sh \
|
||||
&& ./configure \
|
||||
&& make \
|
||||
&& echo "Cleaning up image" \
|
||||
&& apt-get -y purge ${BUILD_PACKAGES} \
|
||||
&& apt-get -y autoremove --purge \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
102
docker-builders/Dockerfile.pikepdf
Normal file
@@ -0,0 +1,102 @@
|
||||
# This Dockerfile builds the pikepdf wheel
|
||||
# Inputs:
|
||||
# - REPO - Docker repository to pull qpdf from
|
||||
# - QPDF_VERSION - The image qpdf version to copy .deb files from
|
||||
# - PIKEPDF_VERSION - Version of pikepdf to build wheel for
|
||||
|
||||
# Default to pulling from the main repo registry when manually building
|
||||
ARG REPO="paperless-ngx/paperless-ngx"
|
||||
|
||||
ARG QPDF_VERSION
|
||||
FROM ghcr.io/${REPO}/builder/qpdf:${QPDF_VERSION} as qpdf-builder
|
||||
|
||||
# This does nothing, except provide a name for a copy below
|
||||
|
||||
FROM python:3.9-slim-bullseye as main
|
||||
|
||||
LABEL org.opencontainers.image.description="A intermediate image with pikepdf wheel built"
|
||||
|
||||
# Buildx provided
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
# Workflow provided
|
||||
ARG QPDF_VERSION
|
||||
ARG PIKEPDF_VERSION
|
||||
# These are not used, but will still bust the cache if one changes
|
||||
# Otherwise, the main image will try to build thing (and fail)
|
||||
ARG PILLOW_VERSION
|
||||
ARG LXML_VERSION
|
||||
|
||||
ARG BUILD_PACKAGES="\
|
||||
build-essential \
|
||||
python3-dev \
|
||||
python3-pip \
|
||||
# qpdf requirement - https://github.com/qpdf/qpdf#crypto-providers
|
||||
libgnutls28-dev \
|
||||
# lxml requrements - https://lxml.de/installation.html
|
||||
libxml2-dev \
|
||||
libxslt1-dev \
|
||||
# Pillow requirements - https://pillow.readthedocs.io/en/stable/installation.html#external-libraries
|
||||
# JPEG functionality
|
||||
libjpeg62-turbo-dev \
|
||||
# conpressed PNG
|
||||
zlib1g-dev \
|
||||
# compressed TIFF
|
||||
libtiff-dev \
|
||||
# type related services
|
||||
libfreetype-dev \
|
||||
# color management
|
||||
liblcms2-dev \
|
||||
# WebP format
|
||||
libwebp-dev \
|
||||
# JPEG 2000
|
||||
libopenjp2-7-dev \
|
||||
# improved color quantization
|
||||
libimagequant-dev \
|
||||
# complex text layout support
|
||||
libraqm-dev"
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
COPY --from=qpdf-builder /usr/src/qpdf/${QPDF_VERSION}/${TARGETARCH}${TARGETVARIANT}/*.deb ./
|
||||
|
||||
# As this is an base image for a multi-stage final image
|
||||
# the added size of the install is basically irrelevant
|
||||
|
||||
RUN set -eux \
|
||||
&& echo "Installing build tools" \
|
||||
&& apt-get update --quiet \
|
||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||
&& echo "Installing qpdf" \
|
||||
&& dpkg --install libqpdf29_*.deb \
|
||||
&& dpkg --install libqpdf-dev_*.deb \
|
||||
&& echo "Installing Python tools" \
|
||||
&& python3 -m pip install --no-cache-dir --upgrade \
|
||||
pip \
|
||||
wheel \
|
||||
# https://pikepdf.readthedocs.io/en/latest/installation.html#requirements
|
||||
pybind11 \
|
||||
&& echo "Building pikepdf wheel ${PIKEPDF_VERSION}" \
|
||||
&& mkdir wheels \
|
||||
&& python3 -m pip wheel \
|
||||
# Build the package at the required version
|
||||
pikepdf==${PIKEPDF_VERSION} \
|
||||
# Look to piwheels for additional pre-built wheels
|
||||
--extra-index-url https://www.piwheels.org/simple \
|
||||
# Output the *.whl into this directory
|
||||
--wheel-dir wheels \
|
||||
# Do not use a binary packge for the package being built
|
||||
--no-binary=pikepdf \
|
||||
# Do use binary packages for dependencies
|
||||
--prefer-binary \
|
||||
# Don't cache build files
|
||||
--no-cache-dir \
|
||||
&& ls -ahl wheels \
|
||||
&& echo "Gathering package data" \
|
||||
&& dpkg-query -f '${Package;-40}${Version}\n' -W > ./wheels/pkg-list.txt \
|
||||
&& echo "Cleaning up image" \
|
||||
&& apt-get -y purge ${BUILD_PACKAGES} \
|
||||
&& apt-get -y autoremove --purge \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
50
docker-builders/Dockerfile.psycopg2
Normal file
@@ -0,0 +1,50 @@
|
||||
# This Dockerfile builds the psycopg2 wheel
|
||||
# Inputs:
|
||||
# - PSYCOPG2_VERSION - Version to build
|
||||
|
||||
FROM python:3.9-slim-bullseye as main
|
||||
|
||||
LABEL org.opencontainers.image.description="A intermediate image with psycopg2 wheel built"
|
||||
|
||||
ARG PSYCOPG2_VERSION
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
ARG BUILD_PACKAGES="\
|
||||
build-essential \
|
||||
python3-dev \
|
||||
python3-pip \
|
||||
# https://www.psycopg.org/docs/install.html#prerequisites
|
||||
libpq-dev"
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
# As this is an base image for a multi-stage final image
|
||||
# the added size of the install is basically irrelevant
|
||||
|
||||
RUN set -eux \
|
||||
&& echo "Installing build tools" \
|
||||
&& apt-get update --quiet \
|
||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||
&& echo "Installing Python tools" \
|
||||
&& python3 -m pip install --no-cache-dir --upgrade pip wheel \
|
||||
&& echo "Building psycopg2 wheel ${PSYCOPG2_VERSION}" \
|
||||
&& cd /usr/src \
|
||||
&& mkdir wheels \
|
||||
&& python3 -m pip wheel \
|
||||
# Build the package at the required version
|
||||
psycopg2==${PSYCOPG2_VERSION} \
|
||||
# Output the *.whl into this directory
|
||||
--wheel-dir wheels \
|
||||
# Do not use a binary packge for the package being built
|
||||
--no-binary=psycopg2 \
|
||||
# Do use binary packages for dependencies
|
||||
--prefer-binary \
|
||||
# Don't cache build files
|
||||
--no-cache-dir \
|
||||
&& ls -ahl wheels/ \
|
||||
&& echo "Gathering package data" \
|
||||
&& dpkg-query -f '${Package;-40}${Version}\n' -W > ./wheels/pkg-list.txt \
|
||||
&& echo "Cleaning up image" \
|
||||
&& apt-get -y purge ${BUILD_PACKAGES} \
|
||||
&& apt-get -y autoremove --purge \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
156
docker-builders/Dockerfile.qpdf
Normal file
@@ -0,0 +1,156 @@
|
||||
#
|
||||
# Stage: pre-build
|
||||
# Purpose:
|
||||
# - Installs common packages
|
||||
# - Sets common environment variables related to dpkg
|
||||
# - Aquires the qpdf source from bookwork
|
||||
# Useful Links:
|
||||
# - https://qpdf.readthedocs.io/en/stable/installation.html#system-requirements
|
||||
# - https://wiki.debian.org/Multiarch/HOWTO
|
||||
# - https://wiki.debian.org/CrossCompiling
|
||||
#
|
||||
|
||||
FROM debian:bullseye-slim as pre-build
|
||||
|
||||
ARG QPDF_VERSION
|
||||
|
||||
ARG COMMON_BUILD_PACKAGES="\
|
||||
cmake \
|
||||
debhelper\
|
||||
debian-keyring \
|
||||
devscripts \
|
||||
dpkg-dev \
|
||||
equivs \
|
||||
packaging-dev \
|
||||
libtool"
|
||||
|
||||
ENV DEB_BUILD_OPTIONS="terse nocheck nodoc parallel=2"
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
RUN set -eux \
|
||||
&& echo "Installing common packages" \
|
||||
&& apt-get update --quiet \
|
||||
&& apt-get install --yes --quiet --no-install-recommends ${COMMON_BUILD_PACKAGES} \
|
||||
&& echo "Getting qpdf source" \
|
||||
&& echo "deb-src http://deb.debian.org/debian/ bookworm main" > /etc/apt/sources.list.d/bookworm-src.list \
|
||||
&& apt-get update --quiet \
|
||||
&& apt-get source --yes --quiet qpdf=${QPDF_VERSION}-1/bookworm
|
||||
|
||||
#
|
||||
# Stage: amd64-builder
|
||||
# Purpose: Builds qpdf for x86_64 (native build)
|
||||
#
|
||||
FROM pre-build as amd64-builder
|
||||
|
||||
ARG AMD64_BUILD_PACKAGES="\
|
||||
build-essential \
|
||||
libjpeg62-turbo-dev:amd64 \
|
||||
libgnutls28-dev:amd64 \
|
||||
zlib1g-dev:amd64"
|
||||
|
||||
WORKDIR /usr/src/qpdf-${QPDF_VERSION}
|
||||
|
||||
RUN set -eux \
|
||||
&& echo "Beginning amd64" \
|
||||
&& echo "Install amd64 packages" \
|
||||
&& apt-get update --quiet \
|
||||
&& apt-get install --yes --quiet --no-install-recommends ${AMD64_BUILD_PACKAGES} \
|
||||
&& echo "Building amd64" \
|
||||
&& dpkg-buildpackage --build=binary --unsigned-source --unsigned-changes --post-clean \
|
||||
&& echo "Removing debug files" \
|
||||
&& rm -f ../libqpdf29-dbgsym* \
|
||||
&& rm -f ../qpdf-dbgsym* \
|
||||
&& echo "Gathering package data" \
|
||||
&& dpkg-query -f '${Package;-40}${Version}\n' -W > ../pkg-list.txt
|
||||
#
|
||||
# Stage: armhf-builder
|
||||
# Purpose:
|
||||
# - Sets armhf specific environment
|
||||
# - Builds qpdf for armhf (cross compile)
|
||||
#
|
||||
FROM pre-build as armhf-builder
|
||||
|
||||
ARG ARMHF_PACKAGES="\
|
||||
crossbuild-essential-armhf \
|
||||
libjpeg62-turbo-dev:armhf \
|
||||
libgnutls28-dev:armhf \
|
||||
zlib1g-dev:armhf"
|
||||
|
||||
WORKDIR /usr/src/qpdf-${QPDF_VERSION}
|
||||
|
||||
ENV CXX="/usr/bin/arm-linux-gnueabihf-g++" \
|
||||
CC="/usr/bin/arm-linux-gnueabihf-gcc"
|
||||
|
||||
RUN set -eux \
|
||||
&& echo "Beginning armhf" \
|
||||
&& echo "Install armhf packages" \
|
||||
&& dpkg --add-architecture armhf \
|
||||
&& apt-get update --quiet \
|
||||
&& apt-get install --yes --quiet --no-install-recommends ${ARMHF_PACKAGES} \
|
||||
&& echo "Building armhf" \
|
||||
&& dpkg-buildpackage --build=binary --unsigned-source --unsigned-changes --post-clean --host-arch armhf \
|
||||
&& echo "Removing debug files" \
|
||||
&& rm -f ../libqpdf29-dbgsym* \
|
||||
&& rm -f ../qpdf-dbgsym* \
|
||||
&& echo "Gathering package data" \
|
||||
&& dpkg-query -f '${Package;-40}${Version}\n' -W > ../pkg-list.txt
|
||||
|
||||
#
|
||||
# Stage: aarch64-builder
|
||||
# Purpose:
|
||||
# - Sets aarch64 specific environment
|
||||
# - Builds qpdf for aarch64 (cross compile)
|
||||
#
|
||||
FROM pre-build as aarch64-builder
|
||||
|
||||
ARG ARM64_PACKAGES="\
|
||||
crossbuild-essential-arm64 \
|
||||
libjpeg62-turbo-dev:arm64 \
|
||||
libgnutls28-dev:arm64 \
|
||||
zlib1g-dev:arm64"
|
||||
|
||||
ENV CXX="/usr/bin/aarch64-linux-gnu-g++" \
|
||||
CC="/usr/bin/aarch64-linux-gnu-gcc"
|
||||
|
||||
WORKDIR /usr/src/qpdf-${QPDF_VERSION}
|
||||
|
||||
RUN set -eux \
|
||||
&& echo "Beginning arm64" \
|
||||
&& echo "Install arm64 packages" \
|
||||
&& dpkg --add-architecture arm64 \
|
||||
&& apt-get update --quiet \
|
||||
&& apt-get install --yes --quiet --no-install-recommends ${ARM64_PACKAGES} \
|
||||
&& echo "Building arm64" \
|
||||
&& dpkg-buildpackage --build=binary --unsigned-source --unsigned-changes --post-clean --host-arch arm64 \
|
||||
&& echo "Removing debug files" \
|
||||
&& rm -f ../libqpdf29-dbgsym* \
|
||||
&& rm -f ../qpdf-dbgsym* \
|
||||
&& echo "Gathering package data" \
|
||||
&& dpkg-query -f '${Package;-40}${Version}\n' -W > ../pkg-list.txt
|
||||
|
||||
#
|
||||
# Stage: package
|
||||
# Purpose: Holds the compiled .deb files in arch/variant specific folders
|
||||
#
|
||||
FROM alpine:3.17 as package
|
||||
|
||||
LABEL org.opencontainers.image.description="A image with qpdf installers stored in architecture & version specific folders"
|
||||
|
||||
ARG QPDF_VERSION
|
||||
|
||||
WORKDIR /usr/src/qpdf/${QPDF_VERSION}/amd64
|
||||
|
||||
COPY --from=amd64-builder /usr/src/*.deb ./
|
||||
COPY --from=amd64-builder /usr/src/pkg-list.txt ./
|
||||
|
||||
# Note this is ${TARGETARCH}${TARGETVARIANT} for armv7
|
||||
WORKDIR /usr/src/qpdf/${QPDF_VERSION}/armv7
|
||||
|
||||
COPY --from=armhf-builder /usr/src/*.deb ./
|
||||
COPY --from=armhf-builder /usr/src/pkg-list.txt ./
|
||||
|
||||
WORKDIR /usr/src/qpdf/${QPDF_VERSION}/arm64
|
||||
|
||||
COPY --from=aarch64-builder /usr/src/*.deb ./
|
||||
COPY --from=aarch64-builder /usr/src/pkg-list.txt ./
|
25
docker/compose/docker-compose.ci-test.yml
Normal file
@@ -0,0 +1,25 @@
|
||||
# docker-compose file for running paperless testing with actual gotenberg
|
||||
# and Tika containers for a more end to end test of the Tika related functionality
|
||||
# Can be used locally or by the CI to start the nessecary containers with the
|
||||
# correct networking for the tests
|
||||
|
||||
version: "3.7"
|
||||
services:
|
||||
gotenberg:
|
||||
image: docker.io/gotenberg/gotenberg:7.6
|
||||
hostname: gotenberg
|
||||
container_name: gotenberg
|
||||
network_mode: host
|
||||
restart: unless-stopped
|
||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||
# want to allow external content like tracking pixels or even javascript.
|
||||
command:
|
||||
- "gotenberg"
|
||||
- "--chromium-disable-javascript=true"
|
||||
- "--chromium-allow-list=file:///tmp/.*"
|
||||
tika:
|
||||
image: ghcr.io/paperless-ngx/tika:latest
|
||||
hostname: tika
|
||||
container_name: tika
|
||||
network_mode: host
|
||||
restart: unless-stopped
|
@@ -36,3 +36,7 @@
|
||||
# The default language to use for OCR. Set this to the language most of your
|
||||
# documents are written in.
|
||||
#PAPERLESS_OCR_LANGUAGE=eng
|
||||
|
||||
# Set if accessing paperless via a domain subpath e.g. https://domain.com/PATHPREFIX and using a reverse-proxy like traefik or nginx
|
||||
#PAPERLESS_FORCE_SCRIPT_NAME=/PATHPREFIX
|
||||
#PAPERLESS_STATIC_URL=/PATHPREFIX/static/ # trailing slash required
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# docker-compose file for running paperless from the docker container registry.
|
||||
# docker-compose file for running paperless from the Docker Hub.
|
||||
# This file contains everything paperless needs to run.
|
||||
# Paperless supports amd64, arm and arm64 hardware.
|
||||
#
|
||||
@@ -10,14 +10,10 @@
|
||||
# as this file and mounted to the correct folders inside the container.
|
||||
# - Paperless listens on port 8000.
|
||||
#
|
||||
# SQLite is used as the database. The SQLite file is stored in the data volume.
|
||||
#
|
||||
# iwishiwasaneagle/apache-tika-arm docker image is used to enable arm64 arch
|
||||
# which apache/tika does not currently support.
|
||||
#
|
||||
# In addition to that, this docker-compose file adds the following optional
|
||||
# configurations:
|
||||
#
|
||||
# - Instead of SQLite (default), MariaDB is used as the database server.
|
||||
# - Apache Tika and Gotenberg servers are started with paperless and paperless
|
||||
# is configured to use these services. These provide support for consuming
|
||||
# Office documents (Word, Excel, Power Point and their LibreOffice counter-
|
||||
@@ -37,15 +33,28 @@
|
||||
version: "3.4"
|
||||
services:
|
||||
broker:
|
||||
image: redis:6.0
|
||||
image: docker.io/library/redis:7
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
db:
|
||||
image: docker.io/library/mariadb:10
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- dbdata:/var/lib/mysql
|
||||
environment:
|
||||
MARIADB_HOST: paperless
|
||||
MARIADB_DATABASE: paperless
|
||||
MARIADB_USER: paperless
|
||||
MARIADB_PASSWORD: paperless
|
||||
MARIADB_ROOT_PASSWORD: paperless
|
||||
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- db
|
||||
- broker
|
||||
- gotenberg
|
||||
- tika
|
||||
@@ -64,21 +73,31 @@ services:
|
||||
env_file: docker-compose.env
|
||||
environment:
|
||||
PAPERLESS_REDIS: redis://broker:6379
|
||||
PAPERLESS_DBENGINE: mariadb
|
||||
PAPERLESS_DBHOST: db
|
||||
PAPERLESS_DBUSER: paperless # only needed if non-default username
|
||||
PAPERLESS_DBPASS: paperless # only needed if non-default password
|
||||
PAPERLESS_DBPORT: 3306
|
||||
PAPERLESS_TIKA_ENABLED: 1
|
||||
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||
|
||||
gotenberg:
|
||||
image: thecodingmachine/gotenberg
|
||||
image: docker.io/gotenberg/gotenberg:7.6
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
DISABLE_GOOGLE_CHROME: 1
|
||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||
# want to allow external content like tracking pixels or even javascript.
|
||||
command:
|
||||
- "gotenberg"
|
||||
- "--chromium-disable-javascript=true"
|
||||
- "--chromium-allow-list=file:///tmp/.*"
|
||||
|
||||
tika:
|
||||
image: iwishiwasaneagle/apache-tika-arm@sha256:a78c25ffe57ecb1a194b2859d42a61af46e9e845191512b8f1a4bf90578ffdfd
|
||||
image: ghcr.io/paperless-ngx/tika:latest
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
dbdata:
|
||||
redisdata:
|
81
docker/compose/docker-compose.mariadb.yml
Normal file
@@ -0,0 +1,81 @@
|
||||
# docker-compose file for running paperless from the Docker Hub.
|
||||
# This file contains everything paperless needs to run.
|
||||
# Paperless supports amd64, arm and arm64 hardware.
|
||||
#
|
||||
# All compose files of paperless configure paperless in the following way:
|
||||
#
|
||||
# - Paperless is (re)started on system boot, if it was running before shutdown.
|
||||
# - Docker volumes for storing data are managed by Docker.
|
||||
# - Folders for importing and exporting files are created in the same directory
|
||||
# as this file and mounted to the correct folders inside the container.
|
||||
# - Paperless listens on port 8000.
|
||||
#
|
||||
# In addition to that, this docker-compose file adds the following optional
|
||||
# configurations:
|
||||
#
|
||||
# - Instead of SQLite (default), MariaDB is used as the database server.
|
||||
#
|
||||
# To install and update paperless with this file, do the following:
|
||||
#
|
||||
# - Copy this file as 'docker-compose.yml' and the files 'docker-compose.env'
|
||||
# and '.env' into a folder.
|
||||
# - Run 'docker-compose pull'.
|
||||
# - Run 'docker-compose run --rm webserver createsuperuser' to create a user.
|
||||
# - Run 'docker-compose up -d'.
|
||||
#
|
||||
# For more extensive installation and update instructions, refer to the
|
||||
# documentation.
|
||||
|
||||
version: "3.4"
|
||||
services:
|
||||
broker:
|
||||
image: docker.io/library/redis:7
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
db:
|
||||
image: docker.io/library/mariadb:10
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- dbdata:/var/lib/mysql
|
||||
environment:
|
||||
MARIADB_HOST: paperless
|
||||
MARIADB_DATABASE: paperless
|
||||
MARIADB_USER: paperless
|
||||
MARIADB_PASSWORD: paperless
|
||||
MARIADB_ROOT_PASSWORD: paperless
|
||||
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- db
|
||||
- broker
|
||||
ports:
|
||||
- 8000:8000
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
volumes:
|
||||
- data:/usr/src/paperless/data
|
||||
- media:/usr/src/paperless/media
|
||||
- ./export:/usr/src/paperless/export
|
||||
- ./consume:/usr/src/paperless/consume
|
||||
env_file: docker-compose.env
|
||||
environment:
|
||||
PAPERLESS_REDIS: redis://broker:6379
|
||||
PAPERLESS_DBENGINE: mariadb
|
||||
PAPERLESS_DBHOST: db
|
||||
PAPERLESS_DBUSER: paperless # only needed if non-default username
|
||||
PAPERLESS_DBPASS: paperless # only needed if non-default password
|
||||
PAPERLESS_DBPORT: 3306
|
||||
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
dbdata:
|
||||
redisdata:
|
@@ -31,13 +31,13 @@
|
||||
version: "3.4"
|
||||
services:
|
||||
broker:
|
||||
image: redis:6.0
|
||||
image: docker.io/library/redis:7
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
db:
|
||||
image: postgres:13
|
||||
image: docker.io/library/postgres:13
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
@@ -55,7 +55,7 @@ services:
|
||||
ports:
|
||||
- 8010:8000
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
||||
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
|
@@ -1,7 +1,6 @@
|
||||
# docker-compose file for running paperless from the docker container registry.
|
||||
# This file contains everything paperless needs to run.
|
||||
# Paperless supports amd64, arm and arm64 hardware. The apache/tika image
|
||||
# does not support arm or arm64, however.
|
||||
# Paperless supports amd64, arm and arm64 hardware.
|
||||
#
|
||||
# All compose files of paperless configure paperless in the following way:
|
||||
#
|
||||
@@ -34,13 +33,13 @@
|
||||
version: "3.4"
|
||||
services:
|
||||
broker:
|
||||
image: redis:6.0
|
||||
image: docker.io/library/redis:7
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
db:
|
||||
image: postgres:13
|
||||
image: docker.io/library/postgres:13
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
@@ -60,7 +59,7 @@ services:
|
||||
ports:
|
||||
- 8000:8000
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
||||
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
@@ -78,14 +77,18 @@ services:
|
||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||
|
||||
gotenberg:
|
||||
image: gotenberg/gotenberg:7
|
||||
image: docker.io/gotenberg/gotenberg:7.6
|
||||
restart: unless-stopped
|
||||
|
||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||
# want to allow external content like tracking pixels or even javascript.
|
||||
command:
|
||||
- "gotenberg"
|
||||
- "--chromium-disable-routes=true"
|
||||
- "--chromium-disable-javascript=true"
|
||||
- "--chromium-allow-list=file:///tmp/.*"
|
||||
|
||||
tika:
|
||||
image: apache/tika
|
||||
image: ghcr.io/paperless-ngx/tika:latest
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
|
@@ -29,13 +29,13 @@
|
||||
version: "3.4"
|
||||
services:
|
||||
broker:
|
||||
image: redis:6.0
|
||||
image: docker.io/library/redis:7
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
db:
|
||||
image: postgres:13
|
||||
image: docker.io/library/postgres:13
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
@@ -53,7 +53,7 @@ services:
|
||||
ports:
|
||||
- 8000:8000
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
||||
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
|
@@ -1,8 +1,6 @@
|
||||
# docker-compose file for running paperless from the docker container registry.
|
||||
# This file contains everything paperless needs to run.
|
||||
# Paperless supports amd64, arm and arm64 hardware. The apache/tika image
|
||||
# does not support arm or arm64, however.
|
||||
#
|
||||
# Paperless supports amd64, arm and arm64 hardware.
|
||||
# All compose files of paperless configure paperless in the following way:
|
||||
#
|
||||
# - Paperless is (re)started on system boot, if it was running before shutdown.
|
||||
@@ -35,7 +33,7 @@
|
||||
version: "3.4"
|
||||
services:
|
||||
broker:
|
||||
image: redis:6.0
|
||||
image: docker.io/library/redis:7
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
@@ -50,7 +48,7 @@ services:
|
||||
ports:
|
||||
- 8000:8000
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
||||
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
@@ -67,14 +65,18 @@ services:
|
||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||
|
||||
gotenberg:
|
||||
image: gotenberg/gotenberg:7
|
||||
image: docker.io/gotenberg/gotenberg:7.6
|
||||
restart: unless-stopped
|
||||
|
||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||
# want to allow external content like tracking pixels or even javascript.
|
||||
command:
|
||||
- "gotenberg"
|
||||
- "--chromium-disable-routes=true"
|
||||
- "--chromium-disable-javascript=true"
|
||||
- "--chromium-allow-list=file:///tmp/.*"
|
||||
|
||||
tika:
|
||||
image: apache/tika
|
||||
image: ghcr.io/paperless-ngx/tika:latest
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
|
@@ -26,7 +26,7 @@
|
||||
version: "3.4"
|
||||
services:
|
||||
broker:
|
||||
image: redis:6.0
|
||||
image: docker.io/library/redis:7
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
@@ -39,7 +39,7 @@ services:
|
||||
ports:
|
||||
- 8000:8000
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
||||
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
|
@@ -2,46 +2,160 @@
|
||||
|
||||
set -e
|
||||
|
||||
# Adapted from:
|
||||
# https://github.com/docker-library/postgres/blob/master/docker-entrypoint.sh
|
||||
# usage: file_env VAR
|
||||
# ie: file_env 'XYZ_DB_PASSWORD' will allow for "$XYZ_DB_PASSWORD_FILE" to
|
||||
# fill in the value of "$XYZ_DB_PASSWORD" from a file, especially for Docker's
|
||||
# secrets feature
|
||||
file_env() {
|
||||
local -r var="$1"
|
||||
local -r fileVar="${var}_FILE"
|
||||
|
||||
# Basic validation
|
||||
if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
|
||||
echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Only export var if the _FILE exists
|
||||
if [ "${!fileVar:-}" ]; then
|
||||
# And the file exists
|
||||
if [[ -f ${!fileVar} ]]; then
|
||||
echo "Setting ${var} from file"
|
||||
val="$(< "${!fileVar}")"
|
||||
export "$var"="$val"
|
||||
else
|
||||
echo "File ${!fileVar} doesn't exist"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
# Source: https://github.com/sameersbn/docker-gitlab/
|
||||
map_uidgid() {
|
||||
USERMAP_ORIG_UID=$(id -u paperless)
|
||||
USERMAP_ORIG_GID=$(id -g paperless)
|
||||
USERMAP_NEW_UID=${USERMAP_UID:-$USERMAP_ORIG_UID}
|
||||
USERMAP_NEW_GID=${USERMAP_GID:-${USERMAP_ORIG_GID:-$USERMAP_NEW_UID}}
|
||||
if [[ ${USERMAP_NEW_UID} != "${USERMAP_ORIG_UID}" || ${USERMAP_NEW_GID} != "${USERMAP_ORIG_GID}" ]]; then
|
||||
echo "Mapping UID and GID for paperless:paperless to $USERMAP_NEW_UID:$USERMAP_NEW_GID"
|
||||
usermod -o -u "${USERMAP_NEW_UID}" paperless
|
||||
groupmod -o -g "${USERMAP_NEW_GID}" paperless
|
||||
local -r usermap_original_uid=$(id -u paperless)
|
||||
local -r usermap_original_gid=$(id -g paperless)
|
||||
local -r usermap_new_uid=${USERMAP_UID:-$usermap_original_uid}
|
||||
local -r usermap_new_gid=${USERMAP_GID:-${usermap_original_gid:-$usermap_new_uid}}
|
||||
if [[ ${usermap_new_uid} != "${usermap_original_uid}" || ${usermap_new_gid} != "${usermap_original_gid}" ]]; then
|
||||
echo "Mapping UID and GID for paperless:paperless to $usermap_new_uid:$usermap_new_gid"
|
||||
usermod -o -u "${usermap_new_uid}" paperless
|
||||
groupmod -o -g "${usermap_new_gid}" paperless
|
||||
fi
|
||||
}
|
||||
|
||||
map_folders() {
|
||||
# Export these so they can be used in docker-prepare.sh
|
||||
export DATA_DIR="${PAPERLESS_DATA_DIR:-/usr/src/paperless/data}"
|
||||
export MEDIA_ROOT_DIR="${PAPERLESS_MEDIA_ROOT:-/usr/src/paperless/media}"
|
||||
export CONSUME_DIR="${PAPERLESS_CONSUMPTION_DIR:-/usr/src/paperless/consume}"
|
||||
}
|
||||
|
||||
custom_container_init() {
|
||||
# Mostly borrowed from the LinuxServer.io base image
|
||||
# https://github.com/linuxserver/docker-baseimage-ubuntu/tree/bionic/root/etc/cont-init.d
|
||||
local -r custom_script_dir="/custom-cont-init.d"
|
||||
# Tamper checking.
|
||||
# Don't run files which are owned by anyone except root
|
||||
# Don't run files which are writeable by others
|
||||
if [ -d "${custom_script_dir}" ]; then
|
||||
if [ -n "$(/usr/bin/find "${custom_script_dir}" -maxdepth 1 ! -user root)" ]; then
|
||||
echo "**** Potential tampering with custom scripts detected ****"
|
||||
echo "**** The folder '${custom_script_dir}' must be owned by root ****"
|
||||
return 0
|
||||
fi
|
||||
if [ -n "$(/usr/bin/find "${custom_script_dir}" -maxdepth 1 -perm -o+w)" ]; then
|
||||
echo "**** The folder '${custom_script_dir}' or some of contents have write permissions for others, which is a security risk. ****"
|
||||
echo "**** Please review the permissions and their contents to make sure they are owned by root, and can only be modified by root. ****"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Make sure custom init directory has files in it
|
||||
if [ -n "$(/bin/ls -A "${custom_script_dir}" 2>/dev/null)" ]; then
|
||||
echo "[custom-init] files found in ${custom_script_dir} executing"
|
||||
# Loop over files in the directory
|
||||
for SCRIPT in "${custom_script_dir}"/*; do
|
||||
NAME="$(basename "${SCRIPT}")"
|
||||
if [ -f "${SCRIPT}" ]; then
|
||||
echo "[custom-init] ${NAME}: executing..."
|
||||
/bin/bash "${SCRIPT}"
|
||||
echo "[custom-init] ${NAME}: exited $?"
|
||||
elif [ ! -f "${SCRIPT}" ]; then
|
||||
echo "[custom-init] ${NAME}: is not a file"
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "[custom-init] no custom files found exiting..."
|
||||
fi
|
||||
|
||||
fi
|
||||
}
|
||||
|
||||
initialize() {
|
||||
|
||||
# Setup environment from secrets before anything else
|
||||
for env_var in \
|
||||
PAPERLESS_DBUSER \
|
||||
PAPERLESS_DBPASS \
|
||||
PAPERLESS_SECRET_KEY \
|
||||
PAPERLESS_AUTO_LOGIN_USERNAME \
|
||||
PAPERLESS_ADMIN_USER \
|
||||
PAPERLESS_ADMIN_MAIL \
|
||||
PAPERLESS_ADMIN_PASSWORD \
|
||||
PAPERLESS_REDIS; do
|
||||
# Check for a version of this var with _FILE appended
|
||||
# and convert the contents to the env var value
|
||||
file_env ${env_var}
|
||||
done
|
||||
|
||||
# Change the user and group IDs if needed
|
||||
map_uidgid
|
||||
|
||||
for dir in export data data/index media media/documents media/documents/originals media/documents/thumbnails; do
|
||||
if [[ ! -d "../$dir" ]]; then
|
||||
echo "Creating directory ../$dir"
|
||||
mkdir ../$dir
|
||||
# Check for overrides of certain folders
|
||||
map_folders
|
||||
|
||||
local -r export_dir="/usr/src/paperless/export"
|
||||
|
||||
for dir in \
|
||||
"${export_dir}" \
|
||||
"${DATA_DIR}" "${DATA_DIR}/index" \
|
||||
"${MEDIA_ROOT_DIR}" "${MEDIA_ROOT_DIR}/documents" "${MEDIA_ROOT_DIR}/documents/originals" "${MEDIA_ROOT_DIR}/documents/thumbnails" \
|
||||
"${CONSUME_DIR}"; do
|
||||
if [[ ! -d "${dir}" ]]; then
|
||||
echo "Creating directory ${dir}"
|
||||
mkdir "${dir}"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Creating directory /tmp/paperless"
|
||||
mkdir -p /tmp/paperless
|
||||
local -r tmp_dir="/tmp/paperless"
|
||||
echo "Creating directory ${tmp_dir}"
|
||||
mkdir -p "${tmp_dir}"
|
||||
|
||||
set +e
|
||||
echo "Adjusting permissions of paperless files. This may take a while."
|
||||
chown -R paperless:paperless /tmp/paperless
|
||||
find .. -not \( -user paperless -and -group paperless \) -exec chown paperless:paperless {} +
|
||||
chown -R paperless:paperless ${tmp_dir}
|
||||
for dir in \
|
||||
"${export_dir}" \
|
||||
"${DATA_DIR}" \
|
||||
"${MEDIA_ROOT_DIR}" \
|
||||
"${CONSUME_DIR}"; do
|
||||
find "${dir}" -not \( -user paperless -and -group paperless \) -exec chown paperless:paperless {} +
|
||||
done
|
||||
set -e
|
||||
|
||||
gosu paperless /sbin/docker-prepare.sh
|
||||
"${gosu_cmd[@]}" /sbin/docker-prepare.sh
|
||||
|
||||
# Leave this last thing
|
||||
custom_container_init
|
||||
|
||||
}
|
||||
|
||||
install_languages() {
|
||||
echo "Installing languages..."
|
||||
|
||||
local langs="$1"
|
||||
read -ra langs <<<"$langs"
|
||||
read -ra langs <<<"$1"
|
||||
|
||||
# Check that it is not empty
|
||||
if [ ${#langs[@]} -eq 0 ]; then
|
||||
@@ -51,10 +165,6 @@ install_languages() {
|
||||
|
||||
for lang in "${langs[@]}"; do
|
||||
pkg="tesseract-ocr-$lang"
|
||||
# English is installed by default
|
||||
#if [[ "$lang" == "eng" ]]; then
|
||||
# continue
|
||||
#fi
|
||||
|
||||
if dpkg -s "$pkg" &>/dev/null; then
|
||||
echo "Package $pkg already installed!"
|
||||
@@ -76,6 +186,11 @@ install_languages() {
|
||||
|
||||
echo "Paperless-ngx docker container starting..."
|
||||
|
||||
gosu_cmd=(gosu paperless)
|
||||
if [ "$(id -u)" == "$(id -u paperless)" ]; then
|
||||
gosu_cmd=()
|
||||
fi
|
||||
|
||||
# Install additional languages if specified
|
||||
if [[ -n "$PAPERLESS_OCR_LANGUAGES" ]]; then
|
||||
install_languages "$PAPERLESS_OCR_LANGUAGES"
|
||||
@@ -85,7 +200,7 @@ initialize
|
||||
|
||||
if [[ "$1" != "/"* ]]; then
|
||||
echo Executing management command "$@"
|
||||
exec gosu paperless python3 manage.py "$@"
|
||||
exec "${gosu_cmd[@]}" python3 manage.py "$@"
|
||||
else
|
||||
echo Executing "$@"
|
||||
exec "$@"
|
||||
|
@@ -1,16 +1,44 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
wait_for_postgres() {
|
||||
attempt_num=1
|
||||
max_attempts=5
|
||||
local attempt_num=1
|
||||
local -r max_attempts=5
|
||||
|
||||
echo "Waiting for PostgreSQL to start..."
|
||||
|
||||
host="${PAPERLESS_DBHOST:=localhost}"
|
||||
port="${PAPERLESS_DBPORT:=5342}"
|
||||
local -r host="${PAPERLESS_DBHOST:-localhost}"
|
||||
local -r port="${PAPERLESS_DBPORT:-5432}"
|
||||
|
||||
# Disable warning, host and port can't have spaces
|
||||
# shellcheck disable=SC2086
|
||||
while [ ! "$(pg_isready -h ${host} -p ${port})" ]; do
|
||||
|
||||
while [ ! "$(pg_isready -h $host -p $port)" ]; do
|
||||
if [ $attempt_num -eq $max_attempts ]; then
|
||||
echo "Unable to connect to database."
|
||||
exit 1
|
||||
else
|
||||
echo "Attempt $attempt_num failed! Trying again in 5 seconds..."
|
||||
fi
|
||||
|
||||
attempt_num=$(("$attempt_num" + 1))
|
||||
sleep 5
|
||||
done
|
||||
}
|
||||
|
||||
wait_for_mariadb() {
|
||||
echo "Waiting for MariaDB to start..."
|
||||
|
||||
local -r host="${PAPERLESS_DBHOST:=localhost}"
|
||||
local -r port="${PAPERLESS_DBPORT:=3306}"
|
||||
|
||||
local attempt_num=1
|
||||
local -r max_attempts=5
|
||||
|
||||
# Disable warning, host and port can't have spaces
|
||||
# shellcheck disable=SC2086
|
||||
while ! true > /dev/tcp/$host/$port; do
|
||||
|
||||
if [ $attempt_num -eq $max_attempts ]; then
|
||||
echo "Unable to connect to database."
|
||||
@@ -25,6 +53,14 @@ wait_for_postgres() {
|
||||
done
|
||||
}
|
||||
|
||||
wait_for_redis() {
|
||||
# We use a Python script to send the Redis ping
|
||||
# instead of installing redis-tools just for 1 thing
|
||||
if ! python3 /sbin/wait-for-redis.py; then
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
migrations() {
|
||||
(
|
||||
# flock is in place to prevent multiple containers from doing migrations
|
||||
@@ -32,18 +68,25 @@ migrations() {
|
||||
# of the current container starts.
|
||||
flock 200
|
||||
echo "Apply database migrations..."
|
||||
python3 manage.py migrate
|
||||
) 200>/usr/src/paperless/data/migration_lock
|
||||
python3 manage.py migrate --skip-checks --no-input
|
||||
) 200>"${DATA_DIR}/migration_lock"
|
||||
}
|
||||
|
||||
django_checks() {
|
||||
# Explicitly run the Django system checks
|
||||
echo "Running Django checks"
|
||||
python3 manage.py check
|
||||
}
|
||||
|
||||
search_index() {
|
||||
index_version=1
|
||||
index_version_file=/usr/src/paperless/data/.index_version
|
||||
|
||||
if [[ (! -f "$index_version_file") || $(<$index_version_file) != "$index_version" ]]; then
|
||||
local -r index_version=1
|
||||
local -r index_version_file=${DATA_DIR}/.index_version
|
||||
|
||||
if [[ (! -f "${index_version_file}") || $(<"${index_version_file}") != "$index_version" ]]; then
|
||||
echo "Search index out of date. Updating..."
|
||||
python3 manage.py document_index reindex
|
||||
echo $index_version | tee $index_version_file >/dev/null
|
||||
python3 manage.py document_index reindex --no-progress-bar
|
||||
echo ${index_version} | tee "${index_version_file}" >/dev/null
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -54,12 +97,18 @@ superuser() {
|
||||
}
|
||||
|
||||
do_work() {
|
||||
if [[ -n "${PAPERLESS_DBHOST}" ]]; then
|
||||
if [[ "${PAPERLESS_DBENGINE}" == "mariadb" ]]; then
|
||||
wait_for_mariadb
|
||||
elif [[ -n "${PAPERLESS_DBHOST}" ]]; then
|
||||
wait_for_postgres
|
||||
fi
|
||||
|
||||
wait_for_redis
|
||||
|
||||
migrations
|
||||
|
||||
django_checks
|
||||
|
||||
search_index
|
||||
|
||||
superuser
|
||||
|
7
docker/flower-conditional.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
echo "Checking if we should start flower..."
|
||||
|
||||
if [[ -n "${PAPERLESS_ENABLE_FLOWER}" ]]; then
|
||||
celery --app paperless flower
|
||||
fi
|
@@ -1,6 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
for command in document_archiver document_exporter document_importer mail_fetcher document_create_classifier document_index document_renamer document_retagger document_thumbnails document_sanity_checker manage_superuser;
|
||||
set -eu
|
||||
|
||||
for command in decrypt_documents \
|
||||
document_archiver \
|
||||
document_exporter \
|
||||
document_importer \
|
||||
mail_fetcher \
|
||||
document_create_classifier \
|
||||
document_index \
|
||||
document_renamer \
|
||||
document_retagger \
|
||||
document_thumbnails \
|
||||
document_sanity_checker \
|
||||
manage_superuser;
|
||||
do
|
||||
echo "installing $command..."
|
||||
sed "s/management_command/$command/g" management_script.sh > /usr/local/bin/$command
|
||||
|
15
docker/paperless_cmd.sh
Executable file
@@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
rootless_args=()
|
||||
if [ "$(id -u)" == "$(id -u paperless)" ]; then
|
||||
rootless_args=(
|
||||
--user
|
||||
paperless
|
||||
--logfile
|
||||
supervisord.log
|
||||
--pidfile
|
||||
supervisord.pid
|
||||
)
|
||||
fi
|
||||
|
||||
exec /usr/local/bin/supervisord -c /etc/supervisord.conf "${rootless_args[@]}"
|
@@ -10,7 +10,7 @@ user=root
|
||||
[program:gunicorn]
|
||||
command=gunicorn -c /usr/src/paperless/gunicorn.conf.py paperless.asgi:application
|
||||
user=paperless
|
||||
|
||||
priority = 1
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
@@ -19,16 +19,41 @@ stderr_logfile_maxbytes=0
|
||||
[program:consumer]
|
||||
command=python3 manage.py document_consumer
|
||||
user=paperless
|
||||
|
||||
stopsignal=INT
|
||||
priority = 20
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:scheduler]
|
||||
command=python3 manage.py qcluster
|
||||
[program:celery]
|
||||
|
||||
command = celery --app paperless worker --loglevel INFO
|
||||
user=paperless
|
||||
|
||||
stopasgroup = true
|
||||
stopwaitsecs = 60
|
||||
priority = 5
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:celery-beat]
|
||||
|
||||
command = celery --app paperless beat --loglevel INFO
|
||||
user=paperless
|
||||
stopasgroup = true
|
||||
priority = 10
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:celery-flower]
|
||||
command = /usr/local/bin/flower-conditional.sh
|
||||
user = paperless
|
||||
startsecs = 0
|
||||
priority = 40
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
|
44
docker/wait-for-redis.py
Executable file
@@ -0,0 +1,44 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Simple script which attempts to ping the Redis broker as set in the environment for
|
||||
a certain number of times, waiting a little bit in between
|
||||
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from typing import Final
|
||||
|
||||
from redis import Redis
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
MAX_RETRY_COUNT: Final[int] = 5
|
||||
RETRY_SLEEP_SECONDS: Final[int] = 5
|
||||
|
||||
REDIS_URL: Final[str] = os.getenv("PAPERLESS_REDIS", "redis://localhost:6379")
|
||||
|
||||
print(f"Waiting for Redis...", flush=True)
|
||||
|
||||
attempt = 0
|
||||
with Redis.from_url(url=REDIS_URL) as client:
|
||||
while attempt < MAX_RETRY_COUNT:
|
||||
try:
|
||||
client.ping()
|
||||
break
|
||||
except Exception as e:
|
||||
print(
|
||||
f"Redis ping #{attempt} failed.\n"
|
||||
f"Error: {str(e)}.\n"
|
||||
f"Waiting {RETRY_SLEEP_SECONDS}s",
|
||||
flush=True,
|
||||
)
|
||||
time.sleep(RETRY_SLEEP_SECONDS)
|
||||
attempt += 1
|
||||
|
||||
if attempt >= MAX_RETRY_COUNT:
|
||||
print(f"Failed to connect to redis using environment variable PAPERLESS_REDIS.")
|
||||
sys.exit(os.EX_UNAVAILABLE)
|
||||
else:
|
||||
print(f"Connected to Redis broker.")
|
||||
sys.exit(os.EX_OK)
|
@@ -1,17 +0,0 @@
|
||||
FROM python:3.5.1
|
||||
|
||||
# Install Sphinx and Pygments
|
||||
RUN pip install Sphinx Pygments
|
||||
|
||||
# Setup directories, copy data
|
||||
RUN mkdir /build
|
||||
COPY . /build
|
||||
WORKDIR /build/docs
|
||||
|
||||
# Build documentation
|
||||
RUN make html
|
||||
|
||||
# Start webserver
|
||||
WORKDIR /build/docs/_build/html
|
||||
EXPOSE 8000/tcp
|
||||
CMD ["python3", "-m", "http.server"]
|
177
docs/Makefile
@@ -1,177 +0,0 @@
|
||||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " xml to make Docutils-native XML files"
|
||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
|
||||
clean:
|
||||
rm -rf $(BUILDDIR)/*
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/RIPEAtlasToolsMagellan.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/RIPEAtlasToolsMagellan.qhc"
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/RIPEAtlasToolsMagellan"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/RIPEAtlasToolsMagellan"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
latexpdfja:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
xml:
|
||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||
@echo
|
||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||
|
||||
pseudoxml:
|
||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||
@echo
|
||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
14
docs/_static/custom.css
vendored
@@ -1,14 +0,0 @@
|
||||
/* override table width restrictions */
|
||||
@media screen and (min-width: 767px) {
|
||||
|
||||
.wy-table-responsive table td {
|
||||
/* !important prevents the common CSS stylesheets from
|
||||
overriding this as on RTD they are loaded after this stylesheet */
|
||||
white-space: normal !important;
|
||||
}
|
||||
|
||||
.wy-table-responsive {
|
||||
overflow: visible !important;
|
||||
}
|
||||
|
||||
}
|
BIN
docs/_static/screenshot.png
vendored
Before Width: | Height: | Size: 445 KiB |
BIN
docs/_static/screenshots/correspondents.png
vendored
Before Width: | Height: | Size: 106 KiB |
BIN
docs/_static/screenshots/dashboard.png
vendored
Before Width: | Height: | Size: 167 KiB |
BIN
docs/_static/screenshots/documents-filter.png
vendored
Before Width: | Height: | Size: 28 KiB |
BIN
docs/_static/screenshots/documents-largecards.png
vendored
Before Width: | Height: | Size: 306 KiB |
BIN
docs/_static/screenshots/documents-smallcards.png
vendored
Before Width: | Height: | Size: 410 KiB |
BIN
docs/_static/screenshots/documents-table.png
vendored
Before Width: | Height: | Size: 137 KiB |
BIN
docs/_static/screenshots/editing.png
vendored
Before Width: | Height: | Size: 293 KiB |
BIN
docs/_static/screenshots/logs.png
vendored
Before Width: | Height: | Size: 260 KiB |
BIN
docs/_static/screenshots/mail-rules-edited.png
vendored
Before Width: | Height: | Size: 96 KiB |
BIN
docs/_static/screenshots/mobile.png
vendored
Before Width: | Height: | Size: 158 KiB |
BIN
docs/_static/screenshots/new-tag.png
vendored
Before Width: | Height: | Size: 32 KiB |
BIN
docs/_static/screenshots/search-preview.png
vendored
Before Width: | Height: | Size: 61 KiB |
BIN
docs/_static/screenshots/search-results.png
vendored
Before Width: | Height: | Size: 261 KiB |
504
docs/administration.md
Normal file
@@ -0,0 +1,504 @@
|
||||
# Administration
|
||||
|
||||
## Making backups {#backup}
|
||||
|
||||
Multiple options exist for making backups of your paperless instance,
|
||||
depending on how you installed paperless.
|
||||
|
||||
Before making backups, make sure that paperless is not running.
|
||||
|
||||
Options available to any installation of paperless:
|
||||
|
||||
- Use the [document exporter](#exporter). The document exporter exports all your documents,
|
||||
thumbnails and metadata to a specific folder. You may import your
|
||||
documents into a fresh instance of paperless again or store your
|
||||
documents in another DMS with this export.
|
||||
- The document exporter is also able to update an already existing
|
||||
export. Therefore, incremental backups with `rsync` are entirely
|
||||
possible.
|
||||
|
||||
!!! caution
|
||||
|
||||
You cannot import the export generated with one version of paperless in
|
||||
a different version of paperless. The export contains an exact image of
|
||||
the database, and migrations may change the database layout.
|
||||
|
||||
Options available to docker installations:
|
||||
|
||||
- Backup the docker volumes. These usually reside within
|
||||
`/var/lib/docker/volumes` on the host and you need to be root in
|
||||
order to access them.
|
||||
|
||||
Paperless uses 4 volumes:
|
||||
|
||||
- `paperless_media`: This is where your documents are stored.
|
||||
- `paperless_data`: This is where auxillary data is stored. This
|
||||
folder also contains the SQLite database, if you use it.
|
||||
- `paperless_pgdata`: Exists only if you use PostgreSQL and
|
||||
contains the database.
|
||||
- `paperless_dbdata`: Exists only if you use MariaDB and contains
|
||||
the database.
|
||||
|
||||
Options available to bare-metal and non-docker installations:
|
||||
|
||||
- Backup the entire paperless folder. This ensures that if your
|
||||
paperless instance crashes at some point or your disk fails, you can
|
||||
simply copy the folder back into place and it works.
|
||||
|
||||
When using PostgreSQL or MariaDB, you'll also have to backup the
|
||||
database.
|
||||
|
||||
### Restoring {#migrating-restoring}
|
||||
|
||||
## Updating Paperless {#updating}
|
||||
|
||||
### Docker Route {#docker-updating}
|
||||
|
||||
If a new release of paperless-ngx is available, upgrading depends on how
|
||||
you installed paperless-ngx in the first place. The releases are
|
||||
available at the [release
|
||||
page](https://github.com/paperless-ngx/paperless-ngx/releases).
|
||||
|
||||
First of all, ensure that paperless is stopped.
|
||||
|
||||
```shell-session
|
||||
$ cd /path/to/paperless
|
||||
$ docker-compose down
|
||||
```
|
||||
|
||||
After that, [make a backup](#backup).
|
||||
|
||||
1. If you pull the image from the docker hub, all you need to do is:
|
||||
|
||||
```shell-session
|
||||
$ docker-compose pull
|
||||
$ docker-compose up
|
||||
```
|
||||
|
||||
The docker-compose files refer to the `latest` version, which is
|
||||
always the latest stable release.
|
||||
|
||||
2. If you built the image yourself, do the following:
|
||||
|
||||
```shell-session
|
||||
$ git pull
|
||||
$ docker-compose build
|
||||
$ docker-compose up
|
||||
```
|
||||
|
||||
Running `docker-compose up` will also apply any new database migrations.
|
||||
If you see everything working, press CTRL+C once to gracefully stop
|
||||
paperless. Then you can start paperless-ngx with `-d` to have it run in
|
||||
the background.
|
||||
|
||||
!!! note
|
||||
|
||||
In version 0.9.14, the update process was changed. In 0.9.13 and
|
||||
earlier, the docker-compose files specified exact versions and pull
|
||||
won't automatically update to newer versions. In order to enable
|
||||
updates as described above, either get the new `docker-compose.yml`
|
||||
file from
|
||||
[here](https://github.com/paperless-ngx/paperless-ngx/tree/master/docker/compose)
|
||||
or edit the `docker-compose.yml` file, find the line that says
|
||||
|
||||
```
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:0.9.x
|
||||
```
|
||||
|
||||
and replace the version with `latest`:
|
||||
|
||||
```
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
```
|
||||
|
||||
!!! note
|
||||
|
||||
In version 1.7.1 and onwards, the Docker image can now be pinned to a
|
||||
release series. This is often combined with automatic updaters such as
|
||||
Watchtower to allow safer unattended upgrading to new bugfix releases
|
||||
only. It is still recommended to always review release notes before
|
||||
upgrading. To pin your install to a release series, edit the
|
||||
`docker-compose.yml` find the line that says
|
||||
|
||||
```
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
```
|
||||
|
||||
and replace the version with the series you want to track, for
|
||||
example:
|
||||
|
||||
```
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:1.7
|
||||
```
|
||||
|
||||
### Bare Metal Route {#bare-metal-updating}
|
||||
|
||||
After grabbing the new release and unpacking the contents, do the
|
||||
following:
|
||||
|
||||
1. Update dependencies. New paperless version may require additional
|
||||
dependencies. The dependencies required are listed in the section
|
||||
about
|
||||
[bare metal installations](/setup#bare_metal).
|
||||
|
||||
2. Update python requirements. Keep in mind to activate your virtual
|
||||
environment before that, if you use one.
|
||||
|
||||
```shell-session
|
||||
$ pip install -r requirements.txt
|
||||
```
|
||||
|
||||
3. Migrate the database.
|
||||
|
||||
```shell-session
|
||||
$ cd src
|
||||
$ python3 manage.py migrate
|
||||
```
|
||||
|
||||
This might not actually do anything. Not every new paperless version
|
||||
comes with new database migrations.
|
||||
|
||||
## Downgrading Paperless {#downgrade-paperless}
|
||||
|
||||
Downgrades are possible. However, some updates also contain database
|
||||
migrations (these change the layout of the database and may move data).
|
||||
In order to move back from a version that applied database migrations,
|
||||
you'll have to revert the database migration _before_ downgrading, and
|
||||
then downgrade paperless.
|
||||
|
||||
This table lists the compatible versions for each database migration
|
||||
number.
|
||||
|
||||
| Migration number | Version range |
|
||||
| ---------------- | --------------- |
|
||||
| 1011 | 1.0.0 |
|
||||
| 1012 | 1.1.0 - 1.2.1 |
|
||||
| 1014 | 1.3.0 - 1.3.1 |
|
||||
| 1016 | 1.3.2 - current |
|
||||
|
||||
Execute the following management command to migrate your database:
|
||||
|
||||
```shell-session
|
||||
$ python3 manage.py migrate documents <migration number>
|
||||
```
|
||||
|
||||
!!! note
|
||||
|
||||
Some migrations cannot be undone. The command will issue errors if that
|
||||
happens.
|
||||
|
||||
## Management utilities {#management-commands}
|
||||
|
||||
Paperless comes with some management commands that perform various
|
||||
maintenance tasks on your paperless instance. You can invoke these
|
||||
commands in the following way:
|
||||
|
||||
With docker-compose, while paperless is running:
|
||||
|
||||
```shell-session
|
||||
$ cd /path/to/paperless
|
||||
$ docker-compose exec webserver <command> <arguments>
|
||||
```
|
||||
|
||||
With docker, while paperless is running:
|
||||
|
||||
```shell-session
|
||||
$ docker exec -it <container-name> <command> <arguments>
|
||||
```
|
||||
|
||||
Bare metal:
|
||||
|
||||
```shell-session
|
||||
$ cd /path/to/paperless/src
|
||||
$ python3 manage.py <command> <arguments>
|
||||
```
|
||||
|
||||
All commands have built-in help, which can be accessed by executing them
|
||||
with the argument `--help`.
|
||||
|
||||
### Document exporter {#exporter}
|
||||
|
||||
The document exporter exports all your data from paperless into a folder
|
||||
for backup or migration to another DMS.
|
||||
|
||||
If you use the document exporter within a cronjob to backup your data
|
||||
you might use the `-T` flag behind exec to suppress "The input device
|
||||
is not a TTY" errors. For example:
|
||||
`docker-compose exec -T webserver document_exporter ../export`
|
||||
|
||||
```
|
||||
document_exporter target [-c] [-f] [-d]
|
||||
|
||||
optional arguments:
|
||||
-c, --compare-checksums
|
||||
-f, --use-filename-format
|
||||
-d, --delete
|
||||
-z --zip
|
||||
```
|
||||
|
||||
`target` is a folder to which the data gets written. This includes
|
||||
documents, thumbnails and a `manifest.json` file. The manifest contains
|
||||
all metadata from the database (correspondents, tags, etc).
|
||||
|
||||
When you use the provided docker compose script, specify `../export` as
|
||||
the target. This path inside the container is automatically mounted on
|
||||
your host on the folder `export`.
|
||||
|
||||
If the target directory already exists and contains files, paperless
|
||||
will assume that the contents of the export directory are a previous
|
||||
export and will attempt to update the previous export. Paperless will
|
||||
only export changed and added files. Paperless determines whether a file
|
||||
has changed by inspecting the file attributes "date/time modified" and
|
||||
"size". If that does not work out for you, specify
|
||||
`--compare-checksums` and paperless will attempt to compare file
|
||||
checksums instead. This is slower.
|
||||
|
||||
Paperless will not remove any existing files in the export directory. If
|
||||
you want paperless to also remove files that do not belong to the
|
||||
current export such as files from deleted documents, specify `--delete`.
|
||||
Be careful when pointing paperless to a directory that already contains
|
||||
other files.
|
||||
|
||||
If `-z` or `--zip` is provided, the export will be a zipfile
|
||||
in the target directory, named according to the current date.
|
||||
|
||||
The filenames generated by this command follow the format
|
||||
`[date created] [correspondent] [title].[extension]`. If you want
|
||||
paperless to use `PAPERLESS_FILENAME_FORMAT` for exported filenames
|
||||
instead, specify `--use-filename-format`.
|
||||
|
||||
### Document importer {#importer}
|
||||
|
||||
The document importer takes the export produced by the [Document
|
||||
exporter](#exporter) and imports it into paperless.
|
||||
|
||||
The importer works just like the exporter. You point it at a directory,
|
||||
and the script does the rest of the work:
|
||||
|
||||
```
|
||||
document_importer source
|
||||
```
|
||||
|
||||
When you use the provided docker compose script, put the export inside
|
||||
the `export` folder in your paperless source directory. Specify
|
||||
`../export` as the `source`.
|
||||
|
||||
!!! note
|
||||
|
||||
Importing from a previous version of Paperless may work, but for best
|
||||
results it is suggested to match the versions.
|
||||
|
||||
### Document retagger {#retagger}
|
||||
|
||||
Say you've imported a few hundred documents and now want to introduce a
|
||||
tag or set up a new correspondent, and apply its matching to all of the
|
||||
currently-imported docs. This problem is common enough that there are
|
||||
tools for it.
|
||||
|
||||
```
|
||||
document_retagger [-h] [-c] [-T] [-t] [-i] [--use-first] [-f]
|
||||
|
||||
optional arguments:
|
||||
-c, --correspondent
|
||||
-T, --tags
|
||||
-t, --document_type
|
||||
-s, --storage_path
|
||||
-i, --inbox-only
|
||||
--use-first
|
||||
-f, --overwrite
|
||||
```
|
||||
|
||||
Run this after changing or adding matching rules. It'll loop over all
|
||||
of the documents in your database and attempt to match documents
|
||||
according to the new rules.
|
||||
|
||||
Specify any combination of `-c`, `-T`, `-t` and `-s` to have the
|
||||
retagger perform matching of the specified metadata type. If you don't
|
||||
specify any of these options, the document retagger won't do anything.
|
||||
|
||||
Specify `-i` to have the document retagger work on documents tagged with
|
||||
inbox tags only. This is useful when you don't want to mess with your
|
||||
already processed documents.
|
||||
|
||||
When multiple document types or correspondents match a single document,
|
||||
the retagger won't assign these to the document. Specify `--use-first`
|
||||
to override this behavior and just use the first correspondent or type
|
||||
it finds. This option does not apply to tags, since any amount of tags
|
||||
can be applied to a document.
|
||||
|
||||
Finally, `-f` specifies that you wish to overwrite already assigned
|
||||
correspondents, types and/or tags. The default behavior is to not assign
|
||||
correspondents and types to documents that have this data already
|
||||
assigned. `-f` works differently for tags: By default, only additional
|
||||
tags get added to documents, no tags will be removed. With `-f`, tags
|
||||
that don't match a document anymore get removed as well.
|
||||
|
||||
### Managing the Automatic matching algorithm
|
||||
|
||||
The _Auto_ matching algorithm requires a trained neural network to work.
|
||||
This network needs to be updated whenever somethings in your data
|
||||
changes. The docker image takes care of that automatically with the task
|
||||
scheduler. You can manually renew the classifier by invoking the
|
||||
following management command:
|
||||
|
||||
```
|
||||
document_create_classifier
|
||||
```
|
||||
|
||||
This command takes no arguments.
|
||||
|
||||
### Managing the document search index {#index}
|
||||
|
||||
The document search index is responsible for delivering search results
|
||||
for the website. The document index is automatically updated whenever
|
||||
documents get added to, changed, or removed from paperless. However, if
|
||||
the search yields non-existing documents or won't find anything, you
|
||||
may need to recreate the index manually.
|
||||
|
||||
```
|
||||
document_index {reindex,optimize}
|
||||
```
|
||||
|
||||
Specify `reindex` to have the index created from scratch. This may take
|
||||
some time.
|
||||
|
||||
Specify `optimize` to optimize the index. This updates certain aspects
|
||||
of the index and usually makes queries faster and also ensures that the
|
||||
autocompletion works properly. This command is regularly invoked by the
|
||||
task scheduler.
|
||||
|
||||
### Managing filenames {#renamer}
|
||||
|
||||
If you use paperless' feature to
|
||||
[assign custom filenames to your documents](/advanced_usage#file-name-handling), you can use this command to move all your files after
|
||||
changing the naming scheme.
|
||||
|
||||
!!! warning
|
||||
|
||||
Since this command moves your documents, it is advised to do a backup
|
||||
beforehand. The renaming logic is robust and will never overwrite or
|
||||
delete a file, but you can't ever be careful enough.
|
||||
|
||||
```
|
||||
document_renamer
|
||||
```
|
||||
|
||||
The command takes no arguments and processes all your documents at once.
|
||||
|
||||
Learn how to use
|
||||
[Management Utilities](#management-commands).
|
||||
|
||||
### Sanity checker {#sanity-checker}
|
||||
|
||||
Paperless has a built-in sanity checker that inspects your document
|
||||
collection for issues.
|
||||
|
||||
The issues detected by the sanity checker are as follows:
|
||||
|
||||
- Missing original files.
|
||||
- Missing archive files.
|
||||
- Inaccessible original files due to improper permissions.
|
||||
- Inaccessible archive files due to improper permissions.
|
||||
- Corrupted original documents by comparing their checksum against
|
||||
what is stored in the database.
|
||||
- Corrupted archive documents by comparing their checksum against what
|
||||
is stored in the database.
|
||||
- Missing thumbnails.
|
||||
- Inaccessible thumbnails due to improper permissions.
|
||||
- Documents without any content (warning).
|
||||
- Orphaned files in the media directory (warning). These are files
|
||||
that are not referenced by any document im paperless.
|
||||
|
||||
```
|
||||
document_sanity_checker
|
||||
```
|
||||
|
||||
The command takes no arguments. Depending on the size of your document
|
||||
archive, this may take some time.
|
||||
|
||||
### Fetching e-mail
|
||||
|
||||
Paperless automatically fetches your e-mail every 10 minutes by default.
|
||||
If you want to invoke the email consumer manually, call the following
|
||||
management command:
|
||||
|
||||
```
|
||||
mail_fetcher
|
||||
```
|
||||
|
||||
The command takes no arguments and processes all your mail accounts and
|
||||
rules.
|
||||
|
||||
!!! note
|
||||
|
||||
As of October 2022 Microsoft no longer supports IMAP authentication
|
||||
for Exchange servers, thus Exchange is no longer supported until a
|
||||
solution is implemented in the Python IMAP library used by Paperless.
|
||||
See [learn.microsoft.com](https://learn.microsoft.com/en-us/exchange/clients-and-mobile-in-exchange-online/deprecation-of-basic-authentication-exchange-online)
|
||||
|
||||
### Creating archived documents {#archiver}
|
||||
|
||||
Paperless stores archived PDF/A documents alongside your original
|
||||
documents. These archived documents will also contain selectable text
|
||||
for image-only originals. These documents are derived from the
|
||||
originals, which are always stored unmodified. If coming from an earlier
|
||||
version of paperless, your documents won't have archived versions.
|
||||
|
||||
This command creates PDF/A documents for your documents.
|
||||
|
||||
```
|
||||
document_archiver --overwrite --document <id>
|
||||
```
|
||||
|
||||
This command will only attempt to create archived documents when no
|
||||
archived document exists yet, unless `--overwrite` is specified. If
|
||||
`--document <id>` is specified, the archiver will only process that
|
||||
document.
|
||||
|
||||
!!! note
|
||||
|
||||
This command essentially performs OCR on all your documents again,
|
||||
according to your settings. If you run this with
|
||||
`PAPERLESS_OCR_MODE=redo`, it will potentially run for a very long time.
|
||||
You can cancel the command at any time, since this command will skip
|
||||
already archived versions the next time it is run.
|
||||
|
||||
!!! note
|
||||
|
||||
Some documents will cause errors and cannot be converted into PDF/A
|
||||
documents, such as encrypted PDF documents. The archiver will skip over
|
||||
these documents each time it sees them.
|
||||
|
||||
### Managing encryption {#encryption}
|
||||
|
||||
Documents can be stored in Paperless using GnuPG encryption.
|
||||
|
||||
!!! warning
|
||||
|
||||
Encryption is deprecated since [paperless-ng 0.9](/changelog#paperless-ng-090) and doesn't really
|
||||
provide any additional security, since you have to store the passphrase
|
||||
in a configuration file on the same system as the encrypted documents
|
||||
for paperless to work. Furthermore, the entire text content of the
|
||||
documents is stored plain in the database, even if your documents are
|
||||
encrypted. Filenames are not encrypted as well.
|
||||
|
||||
Also, the web server provides transparent access to your encrypted
|
||||
documents.
|
||||
|
||||
Consider running paperless on an encrypted filesystem instead, which
|
||||
will then at least provide security against physical hardware theft.
|
||||
|
||||
#### Enabling encryption
|
||||
|
||||
Enabling encryption is no longer supported.
|
||||
|
||||
#### Disabling encryption
|
||||
|
||||
Basic usage to disable encryption of your document store:
|
||||
|
||||
(Note: If `PAPERLESS_PASSPHRASE` isn't set already, you need to specify
|
||||
it here)
|
||||
|
||||
```
|
||||
decrypt_documents [--passphrase SECR3TP4SSPHRA$E]
|
||||
```
|
@@ -1,499 +0,0 @@
|
||||
|
||||
**************
|
||||
Administration
|
||||
**************
|
||||
|
||||
.. _administration-backup:
|
||||
|
||||
Making backups
|
||||
##############
|
||||
|
||||
Multiple options exist for making backups of your paperless instance,
|
||||
depending on how you installed paperless.
|
||||
|
||||
Before making backups, make sure that paperless is not running.
|
||||
|
||||
Options available to any installation of paperless:
|
||||
|
||||
* Use the :ref:`document exporter <utilities-exporter>`.
|
||||
The document exporter exports all your documents, thumbnails and
|
||||
metadata to a specific folder. You may import your documents into a
|
||||
fresh instance of paperless again or store your documents in another
|
||||
DMS with this export.
|
||||
* The document exporter is also able to update an already existing export.
|
||||
Therefore, incremental backups with ``rsync`` are entirely possible.
|
||||
|
||||
.. caution::
|
||||
|
||||
You cannot import the export generated with one version of paperless in a
|
||||
different version of paperless. The export contains an exact image of the
|
||||
database, and migrations may change the database layout.
|
||||
|
||||
Options available to docker installations:
|
||||
|
||||
* Backup the docker volumes. These usually reside within
|
||||
``/var/lib/docker/volumes`` on the host and you need to be root in order
|
||||
to access them.
|
||||
|
||||
Paperless uses 3 volumes:
|
||||
|
||||
* ``paperless_media``: This is where your documents are stored.
|
||||
* ``paperless_data``: This is where auxillary data is stored. This
|
||||
folder also contains the SQLite database, if you use it.
|
||||
* ``paperless_pgdata``: Exists only if you use PostgreSQL and contains
|
||||
the database.
|
||||
|
||||
Options available to bare-metal and non-docker installations:
|
||||
|
||||
* Backup the entire paperless folder. This ensures that if your paperless instance
|
||||
crashes at some point or your disk fails, you can simply copy the folder back
|
||||
into place and it works.
|
||||
|
||||
When using PostgreSQL, you'll also have to backup the database.
|
||||
|
||||
.. _migrating-restoring:
|
||||
|
||||
Restoring
|
||||
=========
|
||||
|
||||
.. _administration-updating:
|
||||
|
||||
Updating Paperless
|
||||
##################
|
||||
|
||||
Docker Route
|
||||
============
|
||||
|
||||
If a new release of paperless-ngx is available, upgrading depends on how you
|
||||
installed paperless-ngx in the first place. The releases are available at the
|
||||
`release page <https://github.com/paperless-ngx/paperless-ngx/releases>`_.
|
||||
|
||||
First of all, ensure that paperless is stopped.
|
||||
|
||||
.. code:: shell-session
|
||||
|
||||
$ cd /path/to/paperless
|
||||
$ docker-compose down
|
||||
|
||||
After that, :ref:`make a backup <administration-backup>`.
|
||||
|
||||
A. If you pull the image from the docker hub, all you need to do is:
|
||||
|
||||
.. code:: shell-session
|
||||
|
||||
$ docker-compose pull
|
||||
$ docker-compose up
|
||||
|
||||
The docker-compose files refer to the ``latest`` version, which is always the latest
|
||||
stable release.
|
||||
|
||||
B. If you built the image yourself, do the following:
|
||||
|
||||
.. code:: shell-session
|
||||
|
||||
$ git pull
|
||||
$ docker-compose build
|
||||
$ docker-compose up
|
||||
|
||||
Running ``docker-compose up`` will also apply any new database migrations.
|
||||
If you see everything working, press CTRL+C once to gracefully stop paperless.
|
||||
Then you can start paperless-ngx with ``-d`` to have it run in the background.
|
||||
|
||||
.. note::
|
||||
|
||||
In version 0.9.14, the update process was changed. In 0.9.13 and earlier, the
|
||||
docker-compose files specified exact versions and pull won't automatically
|
||||
update to newer versions. In order to enable updates as described above, either
|
||||
get the new ``docker-compose.yml`` file from `here <https://github.com/paperless-ngx/paperless-ngx/tree/master/docker/compose>`_
|
||||
or edit the ``docker-compose.yml`` file, find the line that says
|
||||
|
||||
.. code::
|
||||
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:0.9.x
|
||||
|
||||
and replace the version with ``latest``:
|
||||
|
||||
.. code::
|
||||
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
|
||||
Bare Metal Route
|
||||
================
|
||||
|
||||
After grabbing the new release and unpacking the contents, do the following:
|
||||
|
||||
1. Update dependencies. New paperless version may require additional
|
||||
dependencies. The dependencies required are listed in the section about
|
||||
:ref:`bare metal installations <setup-bare_metal>`.
|
||||
|
||||
2. Update python requirements. Keep in mind to activate your virtual environment
|
||||
before that, if you use one.
|
||||
|
||||
.. code:: shell-session
|
||||
|
||||
$ pip install -r requirements.txt
|
||||
|
||||
3. Migrate the database.
|
||||
|
||||
.. code:: shell-session
|
||||
|
||||
$ cd src
|
||||
$ python3 manage.py migrate
|
||||
|
||||
This might not actually do anything. Not every new paperless version comes with new
|
||||
database migrations.
|
||||
|
||||
Downgrading Paperless
|
||||
#####################
|
||||
|
||||
Downgrades are possible. However, some updates also contain database migrations (these change the layout of the database and may move data).
|
||||
In order to move back from a version that applied database migrations, you'll have to revert the database migration *before* downgrading,
|
||||
and then downgrade paperless.
|
||||
|
||||
This table lists the compatible versions for each database migration number.
|
||||
|
||||
+------------------+-----------------+
|
||||
| Migration number | Version range |
|
||||
+------------------+-----------------+
|
||||
| 1011 | 1.0.0 |
|
||||
+------------------+-----------------+
|
||||
| 1012 | 1.1.0 - 1.2.1 |
|
||||
+------------------+-----------------+
|
||||
| 1014 | 1.3.0 - 1.3.1 |
|
||||
+------------------+-----------------+
|
||||
| 1016 | 1.3.2 - current |
|
||||
+------------------+-----------------+
|
||||
|
||||
Execute the following management command to migrate your database:
|
||||
|
||||
.. code:: shell-session
|
||||
|
||||
$ python3 manage.py migrate documents <migration number>
|
||||
|
||||
.. note::
|
||||
|
||||
Some migrations cannot be undone. The command will issue errors if that happens.
|
||||
|
||||
.. _utilities-management-commands:
|
||||
|
||||
Management utilities
|
||||
####################
|
||||
|
||||
Paperless comes with some management commands that perform various maintenance
|
||||
tasks on your paperless instance. You can invoke these commands in the following way:
|
||||
|
||||
With docker-compose, while paperless is running:
|
||||
|
||||
.. code:: shell-session
|
||||
|
||||
$ cd /path/to/paperless
|
||||
$ docker-compose exec webserver <command> <arguments>
|
||||
|
||||
With docker, while paperless is running:
|
||||
|
||||
.. code:: shell-session
|
||||
|
||||
$ docker exec -it <container-name> <command> <arguments>
|
||||
|
||||
Bare metal:
|
||||
|
||||
.. code:: shell-session
|
||||
|
||||
$ cd /path/to/paperless/src
|
||||
$ python3 manage.py <command> <arguments>
|
||||
|
||||
All commands have built-in help, which can be accessed by executing them with
|
||||
the argument ``--help``.
|
||||
|
||||
.. _utilities-exporter:
|
||||
|
||||
Document exporter
|
||||
=================
|
||||
|
||||
The document exporter exports all your data from paperless into a folder for
|
||||
backup or migration to another DMS.
|
||||
|
||||
If you use the document exporter within a cronjob to backup your data you might use the ``-T`` flag behind exec to suppress "The input device is not a TTY" errors. For example: ``docker-compose exec -T webserver document_exporter ../export``
|
||||
|
||||
.. code::
|
||||
|
||||
document_exporter target [-c] [-f] [-d]
|
||||
|
||||
optional arguments:
|
||||
-c, --compare-checksums
|
||||
-f, --use-filename-format
|
||||
-d, --delete
|
||||
|
||||
``target`` is a folder to which the data gets written. This includes documents,
|
||||
thumbnails and a ``manifest.json`` file. The manifest contains all metadata from
|
||||
the database (correspondents, tags, etc).
|
||||
|
||||
When you use the provided docker compose script, specify ``../export`` as the
|
||||
target. This path inside the container is automatically mounted on your host on
|
||||
the folder ``export``.
|
||||
|
||||
If the target directory already exists and contains files, paperless will assume
|
||||
that the contents of the export directory are a previous export and will attempt
|
||||
to update the previous export. Paperless will only export changed and added files.
|
||||
Paperless determines whether a file has changed by inspecting the file attributes
|
||||
"date/time modified" and "size". If that does not work out for you, specify
|
||||
``--compare-checksums`` and paperless will attempt to compare file checksums instead.
|
||||
This is slower.
|
||||
|
||||
Paperless will not remove any existing files in the export directory. If you want
|
||||
paperless to also remove files that do not belong to the current export such as files
|
||||
from deleted documents, specify ``--delete``. Be careful when pointing paperless to
|
||||
a directory that already contains other files.
|
||||
|
||||
The filenames generated by this command follow the format
|
||||
``[date created] [correspondent] [title].[extension]``.
|
||||
If you want paperless to use ``PAPERLESS_FILENAME_FORMAT`` for exported filenames
|
||||
instead, specify ``--use-filename-format``.
|
||||
|
||||
|
||||
.. _utilities-importer:
|
||||
|
||||
Document importer
|
||||
=================
|
||||
|
||||
The document importer takes the export produced by the `Document exporter`_ and
|
||||
imports it into paperless.
|
||||
|
||||
The importer works just like the exporter. You point it at a directory, and
|
||||
the script does the rest of the work:
|
||||
|
||||
.. code::
|
||||
|
||||
document_importer source
|
||||
|
||||
When you use the provided docker compose script, put the export inside the
|
||||
``export`` folder in your paperless source directory. Specify ``../export``
|
||||
as the ``source``.
|
||||
|
||||
|
||||
.. _utilities-retagger:
|
||||
|
||||
Document retagger
|
||||
=================
|
||||
|
||||
Say you've imported a few hundred documents and now want to introduce
|
||||
a tag or set up a new correspondent, and apply its matching to all of
|
||||
the currently-imported docs. This problem is common enough that
|
||||
there are tools for it.
|
||||
|
||||
.. code::
|
||||
|
||||
document_retagger [-h] [-c] [-T] [-t] [-i] [--use-first] [-f]
|
||||
|
||||
optional arguments:
|
||||
-c, --correspondent
|
||||
-T, --tags
|
||||
-t, --document_type
|
||||
-i, --inbox-only
|
||||
--use-first
|
||||
-f, --overwrite
|
||||
|
||||
Run this after changing or adding matching rules. It'll loop over all
|
||||
of the documents in your database and attempt to match documents
|
||||
according to the new rules.
|
||||
|
||||
Specify any combination of ``-c``, ``-T`` and ``-t`` to have the
|
||||
retagger perform matching of the specified metadata type. If you don't
|
||||
specify any of these options, the document retagger won't do anything.
|
||||
|
||||
Specify ``-i`` to have the document retagger work on documents tagged
|
||||
with inbox tags only. This is useful when you don't want to mess with
|
||||
your already processed documents.
|
||||
|
||||
When multiple document types or correspondents match a single document,
|
||||
the retagger won't assign these to the document. Specify ``--use-first``
|
||||
to override this behavior and just use the first correspondent or type
|
||||
it finds. This option does not apply to tags, since any amount of tags
|
||||
can be applied to a document.
|
||||
|
||||
Finally, ``-f`` specifies that you wish to overwrite already assigned
|
||||
correspondents, types and/or tags. The default behavior is to not
|
||||
assign correspondents and types to documents that have this data already
|
||||
assigned. ``-f`` works differently for tags: By default, only additional tags get
|
||||
added to documents, no tags will be removed. With ``-f``, tags that don't
|
||||
match a document anymore get removed as well.
|
||||
|
||||
|
||||
Managing the Automatic matching algorithm
|
||||
=========================================
|
||||
|
||||
The *Auto* matching algorithm requires a trained neural network to work.
|
||||
This network needs to be updated whenever somethings in your data
|
||||
changes. The docker image takes care of that automatically with the task
|
||||
scheduler. You can manually renew the classifier by invoking the following
|
||||
management command:
|
||||
|
||||
.. code::
|
||||
|
||||
document_create_classifier
|
||||
|
||||
This command takes no arguments.
|
||||
|
||||
.. _`administration-index`:
|
||||
|
||||
Managing the document search index
|
||||
==================================
|
||||
|
||||
The document search index is responsible for delivering search results for the
|
||||
website. The document index is automatically updated whenever documents get
|
||||
added to, changed, or removed from paperless. However, if the search yields
|
||||
non-existing documents or won't find anything, you may need to recreate the
|
||||
index manually.
|
||||
|
||||
.. code::
|
||||
|
||||
document_index {reindex,optimize}
|
||||
|
||||
Specify ``reindex`` to have the index created from scratch. This may take some
|
||||
time.
|
||||
|
||||
Specify ``optimize`` to optimize the index. This updates certain aspects of
|
||||
the index and usually makes queries faster and also ensures that the
|
||||
autocompletion works properly. This command is regularly invoked by the task
|
||||
scheduler.
|
||||
|
||||
.. _utilities-renamer:
|
||||
|
||||
Managing filenames
|
||||
==================
|
||||
|
||||
If you use paperless' feature to
|
||||
:ref:`assign custom filenames to your documents <advanced-file_name_handling>`,
|
||||
you can use this command to move all your files after changing
|
||||
the naming scheme.
|
||||
|
||||
.. warning::
|
||||
|
||||
Since this command moves you documents around alot, it is advised to to
|
||||
a backup before. The renaming logic is robust and will never overwrite
|
||||
or delete a file, but you can't ever be careful enough.
|
||||
|
||||
.. code::
|
||||
|
||||
document_renamer
|
||||
|
||||
The command takes no arguments and processes all your documents at once.
|
||||
|
||||
Learn how to use :ref:`Management Utilities<utilities-management-commands>`.
|
||||
|
||||
|
||||
.. _utilities-sanity-checker:
|
||||
|
||||
Sanity checker
|
||||
==============
|
||||
|
||||
Paperless has a built-in sanity checker that inspects your document collection for issues.
|
||||
|
||||
The issues detected by the sanity checker are as follows:
|
||||
|
||||
* Missing original files.
|
||||
* Missing archive files.
|
||||
* Inaccessible original files due to improper permissions.
|
||||
* Inaccessible archive files due to improper permissions.
|
||||
* Corrupted original documents by comparing their checksum against what is stored in the database.
|
||||
* Corrupted archive documents by comparing their checksum against what is stored in the database.
|
||||
* Missing thumbnails.
|
||||
* Inaccessible thumbnails due to improper permissions.
|
||||
* Documents without any content (warning).
|
||||
* Orphaned files in the media directory (warning). These are files that are not referenced by any document im paperless.
|
||||
|
||||
|
||||
.. code::
|
||||
|
||||
document_sanity_checker
|
||||
|
||||
The command takes no arguments. Depending on the size of your document archive, this may take some time.
|
||||
|
||||
|
||||
Fetching e-mail
|
||||
===============
|
||||
|
||||
Paperless automatically fetches your e-mail every 10 minutes by default. If
|
||||
you want to invoke the email consumer manually, call the following management
|
||||
command:
|
||||
|
||||
.. code::
|
||||
|
||||
mail_fetcher
|
||||
|
||||
The command takes no arguments and processes all your mail accounts and rules.
|
||||
|
||||
.. _utilities-archiver:
|
||||
|
||||
Creating archived documents
|
||||
===========================
|
||||
|
||||
Paperless stores archived PDF/A documents alongside your original documents.
|
||||
These archived documents will also contain selectable text for image-only
|
||||
originals.
|
||||
These documents are derived from the originals, which are always stored
|
||||
unmodified. If coming from an earlier version of paperless, your documents
|
||||
won't have archived versions.
|
||||
|
||||
This command creates PDF/A documents for your documents.
|
||||
|
||||
.. code::
|
||||
|
||||
document_archiver --overwrite --document <id>
|
||||
|
||||
This command will only attempt to create archived documents when no archived
|
||||
document exists yet, unless ``--overwrite`` is specified. If ``--document <id>``
|
||||
is specified, the archiver will only process that document.
|
||||
|
||||
.. note::
|
||||
|
||||
This command essentially performs OCR on all your documents again,
|
||||
according to your settings. If you run this with ``PAPERLESS_OCR_MODE=redo``,
|
||||
it will potentially run for a very long time. You can cancel the command
|
||||
at any time, since this command will skip already archived versions the next time
|
||||
it is run.
|
||||
|
||||
.. note::
|
||||
|
||||
Some documents will cause errors and cannot be converted into PDF/A documents,
|
||||
such as encrypted PDF documents. The archiver will skip over these documents
|
||||
each time it sees them.
|
||||
|
||||
.. _utilities-encyption:
|
||||
|
||||
Managing encryption
|
||||
===================
|
||||
|
||||
Documents can be stored in Paperless using GnuPG encryption.
|
||||
|
||||
.. danger::
|
||||
|
||||
Encryption is deprecated since paperless-ngx 0.9 and doesn't really provide any
|
||||
additional security, since you have to store the passphrase in a configuration
|
||||
file on the same system as the encrypted documents for paperless to work.
|
||||
Furthermore, the entire text content of the documents is stored plain in the
|
||||
database, even if your documents are encrypted. Filenames are not encrypted as
|
||||
well.
|
||||
|
||||
Also, the web server provides transparent access to your encrypted documents.
|
||||
|
||||
Consider running paperless on an encrypted filesystem instead, which will then
|
||||
at least provide security against physical hardware theft.
|
||||
|
||||
|
||||
Enabling encryption
|
||||
-------------------
|
||||
|
||||
Enabling encryption is no longer supported.
|
||||
|
||||
|
||||
Disabling encryption
|
||||
--------------------
|
||||
|
||||
Basic usage to disable encryption of your document store:
|
||||
|
||||
(Note: If ``PAPERLESS_PASSPHRASE`` isn't set already, you need to specify it here)
|
||||
|
||||
.. code::
|
||||
|
||||
decrypt_documents [--passphrase SECR3TP4SSPHRA$E]
|
476
docs/advanced_usage.md
Normal file
@@ -0,0 +1,476 @@
|
||||
# Advanced Topics
|
||||
|
||||
Paperless offers a couple features that automate certain tasks and make
|
||||
your life easier.
|
||||
|
||||
## Matching tags, correspondents, document types, and storage paths {#matching}
|
||||
|
||||
Paperless will compare the matching algorithms defined by every tag,
|
||||
correspondent, document type, and storage path in your database to see
|
||||
if they apply to the text in a document. In other words, if you define a
|
||||
tag called `Home Utility` that had a `match` property of `bc hydro` and
|
||||
a `matching_algorithm` of `literal`, Paperless will automatically tag
|
||||
your newly-consumed document with your `Home Utility` tag so long as the
|
||||
text `bc hydro` appears in the body of the document somewhere.
|
||||
|
||||
The matching logic is quite powerful. It supports searching the text of
|
||||
your document with different algorithms, and as such, some
|
||||
experimentation may be necessary to get things right.
|
||||
|
||||
In order to have a tag, correspondent, document type, or storage path
|
||||
assigned automatically to newly consumed documents, assign a match and
|
||||
matching algorithm using the web interface. These settings define when
|
||||
to assign tags, correspondents, document types, and storage paths to
|
||||
documents.
|
||||
|
||||
The following algorithms are available:
|
||||
|
||||
- **Any:** Looks for any occurrence of any word provided in match in
|
||||
the PDF. If you define the match as `Bank1 Bank2`, it will match
|
||||
documents containing either of these terms.
|
||||
- **All:** Requires that every word provided appears in the PDF,
|
||||
albeit not in the order provided.
|
||||
- **Literal:** Matches only if the match appears exactly as provided
|
||||
(i.e. preserve ordering) in the PDF.
|
||||
- **Regular expression:** Parses the match as a regular expression and
|
||||
tries to find a match within the document.
|
||||
- **Fuzzy match:** I don't know. Look at the source.
|
||||
- **Auto:** Tries to automatically match new documents. This does not
|
||||
require you to set a match. See the notes below.
|
||||
|
||||
When using the _any_ or _all_ matching algorithms, you can search for
|
||||
terms that consist of multiple words by enclosing them in double quotes.
|
||||
For example, defining a match text of `"Bank of America" BofA` using the
|
||||
_any_ algorithm, will match documents that contain either "Bank of
|
||||
America" or "BofA", but will not match documents containing "Bank of
|
||||
South America".
|
||||
|
||||
Then just save your tag, correspondent, document type, or storage path
|
||||
and run another document through the consumer. Once complete, you should
|
||||
see the newly-created document, automatically tagged with the
|
||||
appropriate data.
|
||||
|
||||
### Automatic matching {#automatic-matching}
|
||||
|
||||
Paperless-ngx comes with a new matching algorithm called _Auto_. This
|
||||
matching algorithm tries to assign tags, correspondents, document types,
|
||||
and storage paths to your documents based on how you have already
|
||||
assigned these on existing documents. It uses a neural network under the
|
||||
hood.
|
||||
|
||||
If, for example, all your bank statements of your account 123 at the
|
||||
Bank of America are tagged with the tag "bofa123" and the matching
|
||||
algorithm of this tag is set to _Auto_, this neural network will examine
|
||||
your documents and automatically learn when to assign this tag.
|
||||
|
||||
Paperless tries to hide much of the involved complexity with this
|
||||
approach. However, there are a couple caveats you need to keep in mind
|
||||
when using this feature:
|
||||
|
||||
- Changes to your documents are not immediately reflected by the
|
||||
matching algorithm. The neural network needs to be _trained_ on your
|
||||
documents after changes. Paperless periodically (default: once each
|
||||
hour) checks for changes and does this automatically for you.
|
||||
- The Auto matching algorithm only takes documents into account which
|
||||
are NOT placed in your inbox (i.e. have any inbox tags assigned to
|
||||
them). This ensures that the neural network only learns from
|
||||
documents which you have correctly tagged before.
|
||||
- The matching algorithm can only work if there is a correlation
|
||||
between the tag, correspondent, document type, or storage path and
|
||||
the document itself. Your bank statements usually contain your bank
|
||||
account number and the name of the bank, so this works reasonably
|
||||
well, However, tags such as "TODO" cannot be automatically
|
||||
assigned.
|
||||
- The matching algorithm needs a reasonable number of documents to
|
||||
identify when to assign tags, correspondents, storage paths, and
|
||||
types. If one out of a thousand documents has the correspondent
|
||||
"Very obscure web shop I bought something five years ago", it will
|
||||
probably not assign this correspondent automatically if you buy
|
||||
something from them again. The more documents, the better.
|
||||
- Paperless also needs a reasonable amount of negative examples to
|
||||
decide when not to assign a certain tag, correspondent, document
|
||||
type, or storage path. This will usually be the case as you start
|
||||
filling up paperless with documents. Example: If all your documents
|
||||
are either from "Webshop" and "Bank", paperless will assign one
|
||||
of these correspondents to ANY new document, if both are set to
|
||||
automatic matching.
|
||||
|
||||
## Hooking into the consumption process {#consume-hooks}
|
||||
|
||||
Sometimes you may want to do something arbitrary whenever a document is
|
||||
consumed. Rather than try to predict what you may want to do, Paperless
|
||||
lets you execute scripts of your own choosing just before or after a
|
||||
document is consumed using a couple simple hooks.
|
||||
|
||||
Just write a script, put it somewhere that Paperless can read & execute,
|
||||
and then put the path to that script in `paperless.conf` or
|
||||
`docker-compose.env` with the variable name of either
|
||||
`PAPERLESS_PRE_CONSUME_SCRIPT` or `PAPERLESS_POST_CONSUME_SCRIPT`.
|
||||
|
||||
!!! info
|
||||
|
||||
These scripts are executed in a **blocking** process, which means that
|
||||
if a script takes a long time to run, it can significantly slow down
|
||||
your document consumption flow. If you want things to run
|
||||
asynchronously, you'll have to fork the process in your script and
|
||||
exit.
|
||||
|
||||
### Pre-consumption script {#pre-consume-script}
|
||||
|
||||
Executed after the consumer sees a new document in the consumption
|
||||
folder, but before any processing of the document is performed. This
|
||||
script can access the following relevant environment variables set:
|
||||
|
||||
- `DOCUMENT_SOURCE_PATH`
|
||||
|
||||
A simple but common example for this would be creating a simple script
|
||||
like this:
|
||||
|
||||
`/usr/local/bin/ocr-pdf`
|
||||
|
||||
```bash
|
||||
#!/usr/bin/env bash
|
||||
pdf2pdfocr.py -i ${DOCUMENT_SOURCE_PATH}
|
||||
```
|
||||
|
||||
`/etc/paperless.conf`
|
||||
|
||||
```bash
|
||||
...
|
||||
PAPERLESS_PRE_CONSUME_SCRIPT="/usr/local/bin/ocr-pdf"
|
||||
...
|
||||
```
|
||||
|
||||
This will pass the path to the document about to be consumed to
|
||||
`/usr/local/bin/ocr-pdf`, which will in turn call
|
||||
[pdf2pdfocr.py](https://github.com/LeoFCardoso/pdf2pdfocr) on your
|
||||
document, which will then overwrite the file with an OCR'd version of
|
||||
the file and exit. At which point, the consumption process will begin
|
||||
with the newly modified file.
|
||||
|
||||
The script's stdout and stderr will be logged line by line to the
|
||||
webserver log, along with the exit code of the script.
|
||||
|
||||
### Post-consumption script {#post-consume-script}
|
||||
|
||||
Executed after the consumer has successfully processed a document and
|
||||
has moved it into paperless. It receives the following environment
|
||||
variables:
|
||||
|
||||
- `DOCUMENT_ID`
|
||||
- `DOCUMENT_FILE_NAME`
|
||||
- `DOCUMENT_CREATED`
|
||||
- `DOCUMENT_MODIFIED`
|
||||
- `DOCUMENT_ADDED`
|
||||
- `DOCUMENT_SOURCE_PATH`
|
||||
- `DOCUMENT_ARCHIVE_PATH`
|
||||
- `DOCUMENT_THUMBNAIL_PATH`
|
||||
- `DOCUMENT_DOWNLOAD_URL`
|
||||
- `DOCUMENT_THUMBNAIL_URL`
|
||||
- `DOCUMENT_CORRESPONDENT`
|
||||
- `DOCUMENT_TAGS`
|
||||
- `DOCUMENT_ORIGINAL_FILENAME`
|
||||
|
||||
The script can be in any language, but for a simple shell script
|
||||
example, you can take a look at
|
||||
[post-consumption-example.sh](https://github.com/paperless-ngx/paperless-ngx/blob/main/scripts/post-consumption-example.sh)
|
||||
in this project.
|
||||
|
||||
The post consumption script cannot cancel the consumption process.
|
||||
|
||||
The script's stdout and stderr will be logged line by line to the
|
||||
webserver log, along with the exit code of the script.
|
||||
|
||||
### Docker {#docker-consume-hooks}
|
||||
|
||||
To hook into the consumption process when using Docker, you
|
||||
will need to pass the scripts into the container via a host mount
|
||||
in your `docker-compose.yml`.
|
||||
|
||||
Assuming you have
|
||||
`/home/paperless-ngx/scripts/post-consumption-example.sh` as a
|
||||
script which you'd like to run.
|
||||
|
||||
You can pass that script into the consumer container via a host mount:
|
||||
|
||||
```yaml
|
||||
...
|
||||
webserver:
|
||||
...
|
||||
volumes:
|
||||
...
|
||||
- /home/paperless-ngx/scripts:/path/in/container/scripts/ # (1)!
|
||||
environment: # (3)!
|
||||
...
|
||||
PAPERLESS_POST_CONSUME_SCRIPT: /path/in/container/scripts/post-consumption-example.sh # (2)!
|
||||
...
|
||||
```
|
||||
|
||||
1. The external scripts directory is mounted to a location inside the container.
|
||||
2. The internal location of the script is used to set the script to run
|
||||
3. This can also be set in `docker-compose.env`
|
||||
|
||||
Troubleshooting:
|
||||
|
||||
- Monitor the docker-compose log
|
||||
`cd ~/paperless-ngx; docker-compose logs -f`
|
||||
- Check your script's permission e.g. in case of permission error
|
||||
`sudo chmod 755 post-consumption-example.sh`
|
||||
- Pipe your scripts's output to a log file e.g.
|
||||
`echo "${DOCUMENT_ID}" | tee --append /usr/src/paperless/scripts/post-consumption-example.log`
|
||||
|
||||
## File name handling {#file-name-handling}
|
||||
|
||||
By default, paperless stores your documents in the media directory and
|
||||
renames them using the identifier which it has assigned to each
|
||||
document. You will end up getting files like `0000123.pdf` in your media
|
||||
directory. This isn't necessarily a bad thing, because you normally
|
||||
don't have to access these files manually. However, if you wish to name
|
||||
your files differently, you can do that by adjusting the
|
||||
`PAPERLESS_FILENAME_FORMAT` configuration option. Paperless adds the
|
||||
correct file extension e.g. `.pdf`, `.jpg` automatically.
|
||||
|
||||
This variable allows you to configure the filename (folders are allowed)
|
||||
using placeholders. For example, configuring this to
|
||||
|
||||
```bash
|
||||
PAPERLESS_FILENAME_FORMAT={created_year}/{correspondent}/{title}
|
||||
```
|
||||
|
||||
will create a directory structure as follows:
|
||||
|
||||
```
|
||||
2019/
|
||||
My bank/
|
||||
Statement January.pdf
|
||||
Statement February.pdf
|
||||
2020/
|
||||
My bank/
|
||||
Statement January.pdf
|
||||
Letter.pdf
|
||||
Letter_01.pdf
|
||||
Shoe store/
|
||||
My new shoes.pdf
|
||||
```
|
||||
|
||||
!!! warning
|
||||
|
||||
Do not manually move your files in the media folder. Paperless remembers
|
||||
the last filename a document was stored as. If you do rename a file,
|
||||
paperless will report your files as missing and won't be able to find
|
||||
them.
|
||||
|
||||
Paperless provides the following placeholders within filenames:
|
||||
|
||||
- `{asn}`: The archive serial number of the document, or "none".
|
||||
- `{correspondent}`: The name of the correspondent, or "none".
|
||||
- `{document_type}`: The name of the document type, or "none".
|
||||
- `{tag_list}`: A comma separated list of all tags assigned to the
|
||||
document.
|
||||
- `{title}`: The title of the document.
|
||||
- `{created}`: The full date (ISO format) the document was created.
|
||||
- `{created_year}`: Year created only, formatted as the year with
|
||||
century.
|
||||
- `{created_year_short}`: Year created only, formatted as the year
|
||||
without century, zero padded.
|
||||
- `{created_month}`: Month created only (number 01-12).
|
||||
- `{created_month_name}`: Month created name, as per locale
|
||||
- `{created_month_name_short}`: Month created abbreviated name, as per
|
||||
locale
|
||||
- `{created_day}`: Day created only (number 01-31).
|
||||
- `{added}`: The full date (ISO format) the document was added to
|
||||
paperless.
|
||||
- `{added_year}`: Year added only.
|
||||
- `{added_year_short}`: Year added only, formatted as the year without
|
||||
century, zero padded.
|
||||
- `{added_month}`: Month added only (number 01-12).
|
||||
- `{added_month_name}`: Month added name, as per locale
|
||||
- `{added_month_name_short}`: Month added abbreviated name, as per
|
||||
locale
|
||||
- `{added_day}`: Day added only (number 01-31).
|
||||
|
||||
Paperless will try to conserve the information from your database as
|
||||
much as possible. However, some characters that you can use in document
|
||||
titles and correspondent names (such as `: \ /` and a couple more) are
|
||||
not allowed in filenames and will be replaced with dashes.
|
||||
|
||||
If paperless detects that two documents share the same filename,
|
||||
paperless will automatically append `_01`, `_02`, etc to the filename.
|
||||
This happens if all the placeholders in a filename evaluate to the same
|
||||
value.
|
||||
|
||||
!!! tip
|
||||
|
||||
You can affect how empty placeholders are treated by changing the
|
||||
following setting to `true`.
|
||||
|
||||
```
|
||||
PAPERLESS_FILENAME_FORMAT_REMOVE_NONE=True
|
||||
```
|
||||
|
||||
Doing this results in all empty placeholders resolving to "" instead
|
||||
of "none" as stated above. Spaces before empty placeholders are
|
||||
removed as well, empty directories are omitted.
|
||||
|
||||
!!! tip
|
||||
|
||||
Paperless checks the filename of a document whenever it is saved.
|
||||
Therefore, you need to update the filenames of your documents and move
|
||||
them after altering this setting by invoking the
|
||||
[`document renamer`](/administration#renamer).
|
||||
|
||||
!!! warning
|
||||
|
||||
Make absolutely sure you get the spelling of the placeholders right, or
|
||||
else paperless will use the default naming scheme instead.
|
||||
|
||||
!!! caution
|
||||
|
||||
As of now, you could totally tell paperless to store your files anywhere
|
||||
outside the media directory by setting
|
||||
|
||||
```
|
||||
PAPERLESS_FILENAME_FORMAT=../../my/custom/location/{title}
|
||||
```
|
||||
|
||||
However, keep in mind that inside docker, if files get stored outside of
|
||||
the predefined volumes, they will be lost after a restart of paperless.
|
||||
|
||||
## Storage paths
|
||||
|
||||
One of the best things in Paperless is that you can not only access the
|
||||
documents via the web interface, but also via the file system.
|
||||
|
||||
When as single storage layout is not sufficient for your use case,
|
||||
storage paths come to the rescue. Storage paths allow you to configure
|
||||
more precisely where each document is stored in the file system.
|
||||
|
||||
- Each storage path is a `PAPERLESS_FILENAME_FORMAT` and
|
||||
follows the rules described above
|
||||
- Each document is assigned a storage path using the matching
|
||||
algorithms described above, but can be overwritten at any time
|
||||
|
||||
For example, you could define the following two storage paths:
|
||||
|
||||
1. Normal communications are put into a folder structure sorted by
|
||||
`year/correspondent`
|
||||
2. Communications with insurance companies are stored in a flat
|
||||
structure with longer file names, but containing the full date of
|
||||
the correspondence.
|
||||
|
||||
```
|
||||
By Year = {created_year}/{correspondent}/{title}
|
||||
Insurances = Insurances/{correspondent}/{created_year}-{created_month}-{created_day} {title}
|
||||
```
|
||||
|
||||
If you then map these storage paths to the documents, you might get the
|
||||
following result. For simplicity, `By Year` defines the same
|
||||
structure as in the previous example above.
|
||||
|
||||
```text
|
||||
2019/ # By Year
|
||||
My bank/
|
||||
Statement January.pdf
|
||||
Statement February.pdf
|
||||
|
||||
Insurances/ # Insurances
|
||||
Healthcare 123/
|
||||
2022-01-01 Statement January.pdf
|
||||
2022-02-02 Letter.pdf
|
||||
2022-02-03 Letter.pdf
|
||||
Dental 456/
|
||||
2021-12-01 New Conditions.pdf
|
||||
```
|
||||
|
||||
!!! tip
|
||||
|
||||
Defining a storage path is optional. If no storage path is defined for a
|
||||
document, the global `PAPERLESS_FILENAME_FORMAT` is applied.
|
||||
|
||||
!!! warning
|
||||
|
||||
If you adjust the format of an existing storage path, old documents
|
||||
don't get relocated automatically. You need to run the
|
||||
[document renamer](/administration#renamer) to
|
||||
adjust their pathes.
|
||||
|
||||
## Celery Monitoring {#celery-monitoring}
|
||||
|
||||
The monitoring tool
|
||||
[Flower](https://flower.readthedocs.io/en/latest/index.html) can be used
|
||||
to view more detailed information about the health of the celery workers
|
||||
used for asynchronous tasks. This includes details on currently running,
|
||||
queued and completed tasks, timing and more. Flower can also be used
|
||||
with Prometheus, as it exports metrics. For details on its capabilities,
|
||||
refer to the Flower documentation.
|
||||
|
||||
To configure Flower further, create a `flowerconfig.py` and
|
||||
place it into the `src/paperless` directory. For a Docker
|
||||
installation, you can use volumes to accomplish this:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
# ...
|
||||
webserver:
|
||||
ports:
|
||||
- 5555:5555 # (2)!
|
||||
# ...
|
||||
volumes:
|
||||
- /path/to/my/flowerconfig.py:/usr/src/paperless/src/paperless/flowerconfig.py:ro # (1)!
|
||||
```
|
||||
|
||||
1. Note the `:ro` tag means the file will be mounted as read only.
|
||||
2. `flower` runs by default on port 5555, but this can be configured
|
||||
|
||||
## Custom Container Initialization
|
||||
|
||||
The Docker image includes the ability to run custom user scripts during
|
||||
startup. This could be utilized for installing additional tools or
|
||||
Python packages, for example. Scripts are expected to be shell scripts.
|
||||
|
||||
To utilize this, mount a folder containing your scripts to the custom
|
||||
initialization directory, `/custom-cont-init.d` and place
|
||||
scripts you wish to run inside. For security, the folder must be owned
|
||||
by `root` and should have permissions of `a=rx`. Additionally, scripts
|
||||
must only be writable by `root`.
|
||||
|
||||
Your scripts will be run directly before the webserver completes
|
||||
startup. Scripts will be run by the `root` user.
|
||||
If you would like to switch users, the utility `gosu` is available and
|
||||
preferred over `sudo`.
|
||||
|
||||
This is an advanced functionality with which you could break functionality
|
||||
or lose data. If you experience issues, please disable any custom scripts
|
||||
and try again before reporting an issue.
|
||||
|
||||
For example, using Docker Compose:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
# ...
|
||||
webserver:
|
||||
# ...
|
||||
volumes:
|
||||
- /path/to/my/scripts:/custom-cont-init.d:ro # (1)!
|
||||
```
|
||||
|
||||
1. Note the `:ro` tag means the folder will be mounted as read only. This is for extra security against changes
|
||||
|
||||
## MySQL Caveats {#mysql-caveats}
|
||||
|
||||
### Case Sensitivity
|
||||
|
||||
The database interface does not provide a method to configure a MySQL
|
||||
database to be case sensitive. This would prevent a user from creating a
|
||||
tag `Name` and `NAME` as they are considered the same.
|
||||
|
||||
Per Django documentation, to enable this requires manual intervention.
|
||||
To enable case sensetive tables, you can execute the following command
|
||||
against each table:
|
||||
|
||||
`ALTER TABLE <table_name> CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;`
|
||||
|
||||
You can also set the default for new tables (this does NOT affect
|
||||
existing tables) with:
|
||||
|
||||
`ALTER DATABASE <db_name> CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;`
|
@@ -1,292 +0,0 @@
|
||||
***************
|
||||
Advanced topics
|
||||
***************
|
||||
|
||||
Paperless offers a couple features that automate certain tasks and make your life
|
||||
easier.
|
||||
|
||||
.. _advanced-matching:
|
||||
|
||||
Matching tags, correspondents and document types
|
||||
################################################
|
||||
|
||||
Paperless will compare the matching algorithms defined by every tag and
|
||||
correspondent already set in your database to see if they apply to the text in
|
||||
a document. In other words, if you defined a tag called ``Home Utility``
|
||||
that had a ``match`` property of ``bc hydro`` and a ``matching_algorithm`` of
|
||||
``literal``, Paperless will automatically tag your newly-consumed document with
|
||||
your ``Home Utility`` tag so long as the text ``bc hydro`` appears in the body
|
||||
of the document somewhere.
|
||||
|
||||
The matching logic is quite powerful. It supports searching the text of your
|
||||
document with different algorithms, and as such, some experimentation may be
|
||||
necessary to get things right.
|
||||
|
||||
In order to have a tag, correspondent, or type assigned automatically to newly
|
||||
consumed documents, assign a match and matching algorithm using the web
|
||||
interface. These settings define when to assign correspondents, tags, and types
|
||||
to documents.
|
||||
|
||||
The following algorithms are available:
|
||||
|
||||
* **Any:** Looks for any occurrence of any word provided in match in the PDF.
|
||||
If you define the match as ``Bank1 Bank2``, it will match documents containing
|
||||
either of these terms.
|
||||
* **All:** Requires that every word provided appears in the PDF, albeit not in the
|
||||
order provided.
|
||||
* **Literal:** Matches only if the match appears exactly as provided (i.e. preserve ordering) in the PDF.
|
||||
* **Regular expression:** Parses the match as a regular expression and tries to
|
||||
find a match within the document.
|
||||
* **Fuzzy match:** I dont know. Look at the source.
|
||||
* **Auto:** Tries to automatically match new documents. This does not require you
|
||||
to set a match. See the notes below.
|
||||
|
||||
When using the *any* or *all* matching algorithms, you can search for terms
|
||||
that consist of multiple words by enclosing them in double quotes. For example,
|
||||
defining a match text of ``"Bank of America" BofA`` using the *any* algorithm,
|
||||
will match documents that contain either "Bank of America" or "BofA", but will
|
||||
not match documents containing "Bank of South America".
|
||||
|
||||
Then just save your tag/correspondent and run another document through the
|
||||
consumer. Once complete, you should see the newly-created document,
|
||||
automatically tagged with the appropriate data.
|
||||
|
||||
|
||||
.. _advanced-automatic_matching:
|
||||
|
||||
Automatic matching
|
||||
==================
|
||||
|
||||
Paperless-ngx comes with a new matching algorithm called *Auto*. This matching
|
||||
algorithm tries to assign tags, correspondents, and document types to your
|
||||
documents based on how you have already assigned these on existing documents. It
|
||||
uses a neural network under the hood.
|
||||
|
||||
If, for example, all your bank statements of your account 123 at the Bank of
|
||||
America are tagged with the tag "bofa_123" and the matching algorithm of this
|
||||
tag is set to *Auto*, this neural network will examine your documents and
|
||||
automatically learn when to assign this tag.
|
||||
|
||||
Paperless tries to hide much of the involved complexity with this approach.
|
||||
However, there are a couple caveats you need to keep in mind when using this
|
||||
feature:
|
||||
|
||||
* Changes to your documents are not immediately reflected by the matching
|
||||
algorithm. The neural network needs to be *trained* on your documents after
|
||||
changes. Paperless periodically (default: once each hour) checks for changes
|
||||
and does this automatically for you.
|
||||
* The Auto matching algorithm only takes documents into account which are NOT
|
||||
placed in your inbox (i.e. have any inbox tags assigned to them). This ensures
|
||||
that the neural network only learns from documents which you have correctly
|
||||
tagged before.
|
||||
* The matching algorithm can only work if there is a correlation between the
|
||||
tag, correspondent, or document type and the document itself. Your bank
|
||||
statements usually contain your bank account number and the name of the bank,
|
||||
so this works reasonably well, However, tags such as "TODO" cannot be
|
||||
automatically assigned.
|
||||
* The matching algorithm needs a reasonable number of documents to identify when
|
||||
to assign tags, correspondents, and types. If one out of a thousand documents
|
||||
has the correspondent "Very obscure web shop I bought something five years
|
||||
ago", it will probably not assign this correspondent automatically if you buy
|
||||
something from them again. The more documents, the better.
|
||||
* Paperless also needs a reasonable amount of negative examples to decide when
|
||||
not to assign a certain tag, correspondent or type. This will usually be the
|
||||
case as you start filling up paperless with documents. Example: If all your
|
||||
documents are either from "Webshop" and "Bank", paperless will assign one of
|
||||
these correspondents to ANY new document, if both are set to automatic matching.
|
||||
|
||||
Hooking into the consumption process
|
||||
####################################
|
||||
|
||||
Sometimes you may want to do something arbitrary whenever a document is
|
||||
consumed. Rather than try to predict what you may want to do, Paperless lets
|
||||
you execute scripts of your own choosing just before or after a document is
|
||||
consumed using a couple simple hooks.
|
||||
|
||||
Just write a script, put it somewhere that Paperless can read & execute, and
|
||||
then put the path to that script in ``paperless.conf`` or ``docker-compose.env`` with the variable name
|
||||
of either ``PAPERLESS_PRE_CONSUME_SCRIPT`` or
|
||||
``PAPERLESS_POST_CONSUME_SCRIPT``.
|
||||
|
||||
.. important::
|
||||
|
||||
These scripts are executed in a **blocking** process, which means that if
|
||||
a script takes a long time to run, it can significantly slow down your
|
||||
document consumption flow. If you want things to run asynchronously,
|
||||
you'll have to fork the process in your script and exit.
|
||||
|
||||
|
||||
Pre-consumption script
|
||||
======================
|
||||
|
||||
Executed after the consumer sees a new document in the consumption folder, but
|
||||
before any processing of the document is performed. This script receives exactly
|
||||
one argument:
|
||||
|
||||
* Document file name
|
||||
|
||||
A simple but common example for this would be creating a simple script like
|
||||
this:
|
||||
|
||||
``/usr/local/bin/ocr-pdf``
|
||||
|
||||
.. code:: bash
|
||||
|
||||
#!/usr/bin/env bash
|
||||
pdf2pdfocr.py -i ${1}
|
||||
|
||||
``/etc/paperless.conf``
|
||||
|
||||
.. code:: bash
|
||||
|
||||
...
|
||||
PAPERLESS_PRE_CONSUME_SCRIPT="/usr/local/bin/ocr-pdf"
|
||||
...
|
||||
|
||||
This will pass the path to the document about to be consumed to ``/usr/local/bin/ocr-pdf``,
|
||||
which will in turn call `pdf2pdfocr.py`_ on your document, which will then
|
||||
overwrite the file with an OCR'd version of the file and exit. At which point,
|
||||
the consumption process will begin with the newly modified file.
|
||||
|
||||
.. _pdf2pdfocr.py: https://github.com/LeoFCardoso/pdf2pdfocr
|
||||
|
||||
.. _advanced-post_consume_script:
|
||||
|
||||
Post-consumption script
|
||||
=======================
|
||||
|
||||
Executed after the consumer has successfully processed a document and has moved it
|
||||
into paperless. It receives the following arguments:
|
||||
|
||||
* Document id
|
||||
* Generated file name
|
||||
* Source path
|
||||
* Thumbnail path
|
||||
* Download URL
|
||||
* Thumbnail URL
|
||||
* Correspondent
|
||||
* Tags
|
||||
|
||||
The script can be in any language, but for a simple shell script
|
||||
example, you can take a look at `post-consumption-example.sh`_ in this project.
|
||||
|
||||
The post consumption script cannot cancel the consumption process.
|
||||
|
||||
Docker
|
||||
------
|
||||
Assumed you have ``/home/foo/paperless-ngx/scripts/post-consumption-example.sh``.
|
||||
|
||||
You can pass that script into the consumer container via a host mount in your ``docker-compose.yml``.
|
||||
|
||||
.. code:: bash
|
||||
|
||||
...
|
||||
consumer:
|
||||
...
|
||||
volumes:
|
||||
...
|
||||
- /home/paperless-ngx/scripts:/path/in/container/scripts/
|
||||
...
|
||||
|
||||
Example (docker-compose.yml): ``- /home/foo/paperless-ngx/scripts:/usr/src/paperless/scripts``
|
||||
|
||||
which in turn requires the variable ``PAPERLESS_POST_CONSUME_SCRIPT`` in ``docker-compose.env`` to point to ``/path/in/container/scripts/post-consumption-example.sh``.
|
||||
|
||||
Example (docker-compose.env): ``PAPERLESS_POST_CONSUME_SCRIPT=/usr/src/paperless/scripts/post-consumption-example.sh``
|
||||
|
||||
Troubleshooting:
|
||||
|
||||
- Monitor the docker-compose log ``cd ~/paperless-ngx; docker-compose logs -f``
|
||||
- Check your script's permission e.g. in case of permission error ``sudo chmod 755 post-consumption-example.sh``
|
||||
- Pipe your scripts's output to a log file e.g. ``echo "${DOCUMENT_ID}" | tee --append /usr/src/paperless/scripts/post-consumption-example.log``
|
||||
|
||||
.. _post-consumption-example.sh: https://github.com/paperless-ngx/paperless-ngx/blob/main/scripts/post-consumption-example.sh
|
||||
|
||||
.. _advanced-file_name_handling:
|
||||
|
||||
File name handling
|
||||
##################
|
||||
|
||||
By default, paperless stores your documents in the media directory and renames them
|
||||
using the identifier which it has assigned to each document. You will end up getting
|
||||
files like ``0000123.pdf`` in your media directory. This isn't necessarily a bad
|
||||
thing, because you normally don't have to access these files manually. However, if
|
||||
you wish to name your files differently, you can do that by adjusting the
|
||||
``PAPERLESS_FILENAME_FORMAT`` configuration option.
|
||||
|
||||
This variable allows you to configure the filename (folders are allowed) using
|
||||
placeholders. For example, configuring this to
|
||||
|
||||
.. code:: bash
|
||||
|
||||
PAPERLESS_FILENAME_FORMAT={created_year}/{correspondent}/{title}
|
||||
|
||||
will create a directory structure as follows:
|
||||
|
||||
.. code::
|
||||
|
||||
2019/
|
||||
My bank/
|
||||
Statement January.pdf
|
||||
Statement February.pdf
|
||||
2020/
|
||||
My bank/
|
||||
Statement January.pdf
|
||||
Letter.pdf
|
||||
Letter_01.pdf
|
||||
Shoe store/
|
||||
My new shoes.pdf
|
||||
|
||||
.. danger::
|
||||
|
||||
Do not manually move your files in the media folder. Paperless remembers the
|
||||
last filename a document was stored as. If you do rename a file, paperless will
|
||||
report your files as missing and won't be able to find them.
|
||||
|
||||
Paperless provides the following placeholders withing filenames:
|
||||
|
||||
* ``{asn}``: The archive serial number of the document, or "none".
|
||||
* ``{correspondent}``: The name of the correspondent, or "none".
|
||||
* ``{document_type}``: The name of the document type, or "none".
|
||||
* ``{tag_list}``: A comma separated list of all tags assigned to the document.
|
||||
* ``{title}``: The title of the document.
|
||||
* ``{created}``: The full date (ISO format) the document was created.
|
||||
* ``{created_year}``: Year created only.
|
||||
* ``{created_month}``: Month created only (number 01-12).
|
||||
* ``{created_day}``: Day created only (number 01-31).
|
||||
* ``{added}``: The full date (ISO format) the document was added to paperless.
|
||||
* ``{added_year}``: Year added only.
|
||||
* ``{added_month}``: Month added only (number 01-12).
|
||||
* ``{added_day}``: Day added only (number 01-31).
|
||||
|
||||
|
||||
Paperless will try to conserve the information from your database as much as possible.
|
||||
However, some characters that you can use in document titles and correspondent names (such
|
||||
as ``: \ /`` and a couple more) are not allowed in filenames and will be replaced with dashes.
|
||||
|
||||
If paperless detects that two documents share the same filename, paperless will automatically
|
||||
append ``_01``, ``_02``, etc to the filename. This happens if all the placeholders in a filename
|
||||
evaluate to the same value.
|
||||
|
||||
.. hint::
|
||||
|
||||
Paperless checks the filename of a document whenever it is saved. Therefore,
|
||||
you need to update the filenames of your documents and move them after altering
|
||||
this setting by invoking the :ref:`document renamer <utilities-renamer>`.
|
||||
|
||||
.. warning::
|
||||
|
||||
Make absolutely sure you get the spelling of the placeholders right, or else
|
||||
paperless will use the default naming scheme instead.
|
||||
|
||||
.. caution::
|
||||
|
||||
As of now, you could totally tell paperless to store your files anywhere outside
|
||||
the media directory by setting
|
||||
|
||||
.. code::
|
||||
|
||||
PAPERLESS_FILENAME_FORMAT=../../my/custom/location/{title}
|
||||
|
||||
However, keep in mind that inside docker, if files get stored outside of the
|
||||
predefined volumes, they will be lost after a restart of paperless.
|
318
docs/api.md
Normal file
@@ -0,0 +1,318 @@
|
||||
# The REST API
|
||||
|
||||
Paperless makes use of the [Django REST
|
||||
Framework](https://django-rest-framework.org/) standard API interface. It
|
||||
provides a browsable API for most of its endpoints, which you can
|
||||
inspect at `http://<paperless-host>:<port>/api/`. This also documents
|
||||
most of the available filters and ordering fields.
|
||||
|
||||
The API provides 5 main endpoints:
|
||||
|
||||
- `/api/documents/`: Full CRUD support, except POSTing new documents.
|
||||
See below.
|
||||
- `/api/correspondents/`: Full CRUD support.
|
||||
- `/api/document_types/`: Full CRUD support.
|
||||
- `/api/logs/`: Read-Only.
|
||||
- `/api/tags/`: Full CRUD support.
|
||||
- `/api/mail_accounts/`: Full CRUD support.
|
||||
- `/api/mail_rules/`: Full CRUD support.
|
||||
|
||||
All of these endpoints except for the logging endpoint allow you to
|
||||
fetch, edit and delete individual objects by appending their primary key
|
||||
to the path, for example `/api/documents/454/`.
|
||||
|
||||
The objects served by the document endpoint contain the following
|
||||
fields:
|
||||
|
||||
- `id`: ID of the document. Read-only.
|
||||
- `title`: Title of the document.
|
||||
- `content`: Plain text content of the document.
|
||||
- `tags`: List of IDs of tags assigned to this document, or empty
|
||||
list.
|
||||
- `document_type`: Document type of this document, or null.
|
||||
- `correspondent`: Correspondent of this document or null.
|
||||
- `created`: The date time at which this document was created.
|
||||
- `created_date`: The date (YYYY-MM-DD) at which this document was
|
||||
created. Optional. If also passed with created, this is ignored.
|
||||
- `modified`: The date at which this document was last edited in
|
||||
paperless. Read-only.
|
||||
- `added`: The date at which this document was added to paperless.
|
||||
Read-only.
|
||||
- `archive_serial_number`: The identifier of this document in a
|
||||
physical document archive.
|
||||
- `original_file_name`: Verbose filename of the original document.
|
||||
Read-only.
|
||||
- `archived_file_name`: Verbose filename of the archived document.
|
||||
Read-only. Null if no archived document is available.
|
||||
|
||||
## Downloading documents
|
||||
|
||||
In addition to that, the document endpoint offers these additional
|
||||
actions on individual documents:
|
||||
|
||||
- `/api/documents/<pk>/download/`: Download the document.
|
||||
- `/api/documents/<pk>/preview/`: Display the document inline, without
|
||||
downloading it.
|
||||
- `/api/documents/<pk>/thumb/`: Download the PNG thumbnail of a
|
||||
document.
|
||||
|
||||
Paperless generates archived PDF/A documents from consumed files and
|
||||
stores both the original files as well as the archived files. By
|
||||
default, the endpoints for previews and downloads serve the archived
|
||||
file, if it is available. Otherwise, the original file is served. Some
|
||||
document cannot be archived.
|
||||
|
||||
The endpoints correctly serve the response header fields
|
||||
`Content-Disposition` and `Content-Type` to indicate the filename for
|
||||
download and the type of content of the document.
|
||||
|
||||
In order to download or preview the original document when an archived
|
||||
document is available, supply the query parameter `original=true`.
|
||||
|
||||
!!! tip
|
||||
|
||||
Paperless used to provide these functionality at `/fetch/<pk>/preview`,
|
||||
`/fetch/<pk>/thumb` and `/fetch/<pk>/doc`. Redirects to the new URLs are
|
||||
in place. However, if you use these old URLs to access documents, you
|
||||
should update your app or script to use the new URLs.
|
||||
|
||||
## Getting document metadata
|
||||
|
||||
The api also has an endpoint to retrieve read-only metadata about
|
||||
specific documents. this information is not served along with the
|
||||
document objects, since it requires reading files and would therefore
|
||||
slow down document lists considerably.
|
||||
|
||||
Access the metadata of a document with an ID `id` at
|
||||
`/api/documents/<id>/metadata/`.
|
||||
|
||||
The endpoint reports the following data:
|
||||
|
||||
- `original_checksum`: MD5 checksum of the original document.
|
||||
- `original_size`: Size of the original document, in bytes.
|
||||
- `original_mime_type`: Mime type of the original document.
|
||||
- `media_filename`: Current filename of the document, under which it
|
||||
is stored inside the media directory.
|
||||
- `has_archive_version`: True, if this document is archived, false
|
||||
otherwise.
|
||||
- `original_metadata`: A list of metadata associated with the original
|
||||
document. See below.
|
||||
- `archive_checksum`: MD5 checksum of the archived document, or null.
|
||||
- `archive_size`: Size of the archived document in bytes, or null.
|
||||
- `archive_metadata`: Metadata associated with the archived document,
|
||||
or null. See below.
|
||||
|
||||
File metadata is reported as a list of objects in the following form:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"namespace": "http://ns.adobe.com/pdf/1.3/",
|
||||
"prefix": "pdf",
|
||||
"key": "Producer",
|
||||
"value": "SparklePDF, Fancy edition"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
`namespace` and `prefix` can be null. The actual metadata reported
|
||||
depends on the file type and the metadata available in that specific
|
||||
document. Paperless only reports PDF metadata at this point.
|
||||
|
||||
## Authorization
|
||||
|
||||
The REST api provides three different forms of authentication.
|
||||
|
||||
1. Basic authentication
|
||||
|
||||
Authorize by providing a HTTP header in the form
|
||||
|
||||
```
|
||||
Authorization: Basic <credentials>
|
||||
```
|
||||
|
||||
where `credentials` is a base64-encoded string of
|
||||
`<username>:<password>`
|
||||
|
||||
2. Session authentication
|
||||
|
||||
When you're logged into paperless in your browser, you're
|
||||
automatically logged into the API as well and don't need to provide
|
||||
any authorization headers.
|
||||
|
||||
3. Token authentication
|
||||
|
||||
Paperless also offers an endpoint to acquire authentication tokens.
|
||||
|
||||
POST a username and password as a form or json string to
|
||||
`/api/token/` and paperless will respond with a token, if the login
|
||||
data is correct. This token can be used to authenticate other
|
||||
requests with the following HTTP header:
|
||||
|
||||
```
|
||||
Authorization: Token <token>
|
||||
```
|
||||
|
||||
Tokens can be managed and revoked in the paperless admin.
|
||||
|
||||
## Searching for documents
|
||||
|
||||
Full text searching is available on the `/api/documents/` endpoint. Two
|
||||
specific query parameters cause the API to return full text search
|
||||
results:
|
||||
|
||||
- `/api/documents/?query=your%20search%20query`: Search for a document
|
||||
using a full text query. For details on the syntax, see [Basic Usage - Searching](/usage#basic-usage_searching).
|
||||
- `/api/documents/?more_like=1234`: Search for documents similar to
|
||||
the document with id 1234.
|
||||
|
||||
Pagination works exactly the same as it does for normal requests on this
|
||||
endpoint.
|
||||
|
||||
Certain limitations apply to full text queries:
|
||||
|
||||
- Results are always sorted by search score. The results matching the
|
||||
query best will show up first.
|
||||
- Only a small subset of filtering parameters are supported.
|
||||
|
||||
Furthermore, each returned document has an additional `__search_hit__`
|
||||
attribute with various information about the search results:
|
||||
|
||||
```
|
||||
{
|
||||
"count": 31,
|
||||
"next": "http://localhost:8000/api/documents/?page=2&query=test",
|
||||
"previous": null,
|
||||
"results": [
|
||||
|
||||
...
|
||||
|
||||
{
|
||||
"id": 123,
|
||||
"title": "title",
|
||||
"content": "content",
|
||||
|
||||
...
|
||||
|
||||
"__search_hit__": {
|
||||
"score": 0.343,
|
||||
"highlights": "text <span class="match">Test</span> text",
|
||||
"rank": 23
|
||||
}
|
||||
},
|
||||
|
||||
...
|
||||
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- `score` is an indication how well this document matches the query
|
||||
relative to the other search results.
|
||||
- `highlights` is an excerpt from the document content and highlights
|
||||
the search terms with `<span>` tags as shown above.
|
||||
- `rank` is the index of the search results. The first result will
|
||||
have rank 0.
|
||||
|
||||
### `/api/search/autocomplete/`
|
||||
|
||||
Get auto completions for a partial search term.
|
||||
|
||||
Query parameters:
|
||||
|
||||
- `term`: The incomplete term.
|
||||
- `limit`: Amount of results. Defaults to 10.
|
||||
|
||||
Results returned by the endpoint are ordered by importance of the term
|
||||
in the document index. The first result is the term that has the highest
|
||||
[Tf/Idf](https://en.wikipedia.org/wiki/Tf%E2%80%93idf) score in the index.
|
||||
|
||||
```json
|
||||
["term1", "term3", "term6", "term4"]
|
||||
```
|
||||
|
||||
## POSTing documents {#file-uploads}
|
||||
|
||||
The API provides a special endpoint for file uploads:
|
||||
|
||||
`/api/documents/post_document/`
|
||||
|
||||
POST a multipart form to this endpoint, where the form field `document`
|
||||
contains the document that you want to upload to paperless. The filename
|
||||
is sanitized and then used to store the document in a temporary
|
||||
directory, and the consumer will be instructed to consume the document
|
||||
from there.
|
||||
|
||||
The endpoint supports the following optional form fields:
|
||||
|
||||
- `title`: Specify a title that the consumer should use for the
|
||||
document.
|
||||
- `created`: Specify a DateTime where the document was created (e.g.
|
||||
"2016-04-19" or "2016-04-19 06:15:00+02:00").
|
||||
- `correspondent`: Specify the ID of a correspondent that the consumer
|
||||
should use for the document.
|
||||
- `document_type`: Similar to correspondent.
|
||||
- `tags`: Similar to correspondent. Specify this multiple times to
|
||||
have multiple tags added to the document.
|
||||
|
||||
The endpoint will immediately return "OK" if the document consumption
|
||||
process was started successfully. No additional status information about
|
||||
the consumption process itself is available, since that happens in a
|
||||
different process.
|
||||
|
||||
## API Versioning
|
||||
|
||||
The REST API is versioned since Paperless-ngx 1.3.0.
|
||||
|
||||
- Versioning ensures that changes to the API don't break older
|
||||
clients.
|
||||
- Clients specify the specific version of the API they wish to use
|
||||
with every request and Paperless will handle the request using the
|
||||
specified API version.
|
||||
- Even if the underlying data model changes, older API versions will
|
||||
always serve compatible data.
|
||||
- If no version is specified, Paperless will serve version 1 to ensure
|
||||
compatibility with older clients that do not request a specific API
|
||||
version.
|
||||
|
||||
API versions are specified by submitting an additional HTTP `Accept`
|
||||
header with every request:
|
||||
|
||||
```
|
||||
Accept: application/json; version=6
|
||||
```
|
||||
|
||||
If an invalid version is specified, Paperless 1.3.0 will respond with
|
||||
"406 Not Acceptable" and an error message in the body. Earlier
|
||||
versions of Paperless will serve API version 1 regardless of whether a
|
||||
version is specified via the `Accept` header.
|
||||
|
||||
If a client wishes to verify whether it is compatible with any given
|
||||
server, the following procedure should be performed:
|
||||
|
||||
1. Perform an _authenticated_ request against any API endpoint. If the
|
||||
server is on version 1.3.0 or newer, the server will add two custom
|
||||
headers to the response:
|
||||
|
||||
```
|
||||
X-Api-Version: 2
|
||||
X-Version: 1.3.0
|
||||
```
|
||||
|
||||
2. Determine whether the client is compatible with this server based on
|
||||
the presence/absence of these headers and their values if present.
|
||||
|
||||
### API Changelog
|
||||
|
||||
#### Version 1
|
||||
|
||||
Initial API version.
|
||||
|
||||
#### Version 2
|
||||
|
||||
- Added field `Tag.color`. This read/write string field contains a hex
|
||||
color such as `#a6cee3`.
|
||||
- Added read-only field `Tag.text_color`. This field contains the text
|
||||
color to use for a specific tag, which is either black or white
|
||||
depending on the brightness of `Tag.color`.
|
||||
- Removed field `Tag.colour`.
|
300
docs/api.rst
@@ -1,300 +0,0 @@
|
||||
|
||||
************
|
||||
The REST API
|
||||
************
|
||||
|
||||
|
||||
Paperless makes use of the `Django REST Framework`_ standard API interface.
|
||||
It provides a browsable API for most of its endpoints, which you can inspect
|
||||
at ``http://<paperless-host>:<port>/api/``. This also documents most of the
|
||||
available filters and ordering fields.
|
||||
|
||||
.. _Django REST Framework: http://django-rest-framework.org/
|
||||
|
||||
The API provides 5 main endpoints:
|
||||
|
||||
* ``/api/documents/``: Full CRUD support, except POSTing new documents. See below.
|
||||
* ``/api/correspondents/``: Full CRUD support.
|
||||
* ``/api/document_types/``: Full CRUD support.
|
||||
* ``/api/logs/``: Read-Only.
|
||||
* ``/api/tags/``: Full CRUD support.
|
||||
|
||||
All of these endpoints except for the logging endpoint
|
||||
allow you to fetch, edit and delete individual objects
|
||||
by appending their primary key to the path, for example ``/api/documents/454/``.
|
||||
|
||||
The objects served by the document endpoint contain the following fields:
|
||||
|
||||
* ``id``: ID of the document. Read-only.
|
||||
* ``title``: Title of the document.
|
||||
* ``content``: Plain text content of the document.
|
||||
* ``tags``: List of IDs of tags assigned to this document, or empty list.
|
||||
* ``document_type``: Document type of this document, or null.
|
||||
* ``correspondent``: Correspondent of this document or null.
|
||||
* ``created``: The date at which this document was created.
|
||||
* ``modified``: The date at which this document was last edited in paperless. Read-only.
|
||||
* ``added``: The date at which this document was added to paperless. Read-only.
|
||||
* ``archive_serial_number``: The identifier of this document in a physical document archive.
|
||||
* ``original_file_name``: Verbose filename of the original document. Read-only.
|
||||
* ``archived_file_name``: Verbose filename of the archived document. Read-only. Null if no archived document is available.
|
||||
|
||||
|
||||
Downloading documents
|
||||
#####################
|
||||
|
||||
In addition to that, the document endpoint offers these additional actions on
|
||||
individual documents:
|
||||
|
||||
* ``/api/documents/<pk>/download/``: Download the document.
|
||||
* ``/api/documents/<pk>/preview/``: Display the document inline,
|
||||
without downloading it.
|
||||
* ``/api/documents/<pk>/thumb/``: Download the PNG thumbnail of a document.
|
||||
|
||||
Paperless generates archived PDF/A documents from consumed files and stores both
|
||||
the original files as well as the archived files. By default, the endpoints
|
||||
for previews and downloads serve the archived file, if it is available.
|
||||
Otherwise, the original file is served.
|
||||
Some document cannot be archived.
|
||||
|
||||
The endpoints correctly serve the response header fields ``Content-Disposition``
|
||||
and ``Content-Type`` to indicate the filename for download and the type of content of
|
||||
the document.
|
||||
|
||||
In order to download or preview the original document when an archived document is available,
|
||||
supply the query parameter ``original=true``.
|
||||
|
||||
.. hint::
|
||||
|
||||
Paperless used to provide these functionality at ``/fetch/<pk>/preview``,
|
||||
``/fetch/<pk>/thumb`` and ``/fetch/<pk>/doc``. Redirects to the new URLs
|
||||
are in place. However, if you use these old URLs to access documents, you
|
||||
should update your app or script to use the new URLs.
|
||||
|
||||
|
||||
Getting document metadata
|
||||
#########################
|
||||
|
||||
The api also has an endpoint to retrieve read-only metadata about specific documents. this
|
||||
information is not served along with the document objects, since it requires reading
|
||||
files and would therefore slow down document lists considerably.
|
||||
|
||||
Access the metadata of a document with an ID ``id`` at ``/api/documents/<id>/metadata/``.
|
||||
|
||||
The endpoint reports the following data:
|
||||
|
||||
* ``original_checksum``: MD5 checksum of the original document.
|
||||
* ``original_size``: Size of the original document, in bytes.
|
||||
* ``original_mime_type``: Mime type of the original document.
|
||||
* ``media_filename``: Current filename of the document, under which it is stored inside the media directory.
|
||||
* ``has_archive_version``: True, if this document is archived, false otherwise.
|
||||
* ``original_metadata``: A list of metadata associated with the original document. See below.
|
||||
* ``archive_checksum``: MD5 checksum of the archived document, or null.
|
||||
* ``archive_size``: Size of the archived document in bytes, or null.
|
||||
* ``archive_metadata``: Metadata associated with the archived document, or null. See below.
|
||||
|
||||
File metadata is reported as a list of objects in the following form:
|
||||
|
||||
.. code:: json
|
||||
|
||||
[
|
||||
{
|
||||
"namespace": "http://ns.adobe.com/pdf/1.3/",
|
||||
"prefix": "pdf",
|
||||
"key": "Producer",
|
||||
"value": "SparklePDF, Fancy edition"
|
||||
},
|
||||
]
|
||||
|
||||
``namespace`` and ``prefix`` can be null. The actual metadata reported depends on the file type and the metadata
|
||||
available in that specific document. Paperless only reports PDF metadata at this point.
|
||||
|
||||
Authorization
|
||||
#############
|
||||
|
||||
The REST api provides three different forms of authentication.
|
||||
|
||||
1. Basic authentication
|
||||
|
||||
Authorize by providing a HTTP header in the form
|
||||
|
||||
.. code::
|
||||
|
||||
Authorization: Basic <credentials>
|
||||
|
||||
where ``credentials`` is a base64-encoded string of ``<username>:<password>``
|
||||
|
||||
2. Session authentication
|
||||
|
||||
When you're logged into paperless in your browser, you're automatically
|
||||
logged into the API as well and don't need to provide any authorization
|
||||
headers.
|
||||
|
||||
3. Token authentication
|
||||
|
||||
Paperless also offers an endpoint to acquire authentication tokens.
|
||||
|
||||
POST a username and password as a form or json string to ``/api/token/``
|
||||
and paperless will respond with a token, if the login data is correct.
|
||||
This token can be used to authenticate other requests with the
|
||||
following HTTP header:
|
||||
|
||||
.. code::
|
||||
|
||||
Authorization: Token <token>
|
||||
|
||||
Tokens can be managed and revoked in the paperless admin.
|
||||
|
||||
Searching for documents
|
||||
#######################
|
||||
|
||||
Full text searching is available on the ``/api/documents/`` endpoint. Two specific
|
||||
query parameters cause the API to return full text search results:
|
||||
|
||||
* ``/api/documents/?query=your%20search%20query``: Search for a document using a full text query.
|
||||
For details on the syntax, see :ref:`basic-usage_searching`.
|
||||
|
||||
* ``/api/documents/?more_like=1234``: Search for documents similar to the document with id 1234.
|
||||
|
||||
Pagination works exactly the same as it does for normal requests on this endpoint.
|
||||
|
||||
Certain limitations apply to full text queries:
|
||||
|
||||
* Results are always sorted by search score. The results matching the query best will show up first.
|
||||
|
||||
* Only a small subset of filtering parameters are supported.
|
||||
|
||||
Furthermore, each returned document has an additional ``__search_hit__`` attribute with various information
|
||||
about the search results:
|
||||
|
||||
.. code::
|
||||
|
||||
{
|
||||
"count": 31,
|
||||
"next": "http://localhost:8000/api/documents/?page=2&query=test",
|
||||
"previous": null,
|
||||
"results": [
|
||||
|
||||
...
|
||||
|
||||
{
|
||||
"id": 123,
|
||||
"title": "title",
|
||||
"content": "content",
|
||||
|
||||
...
|
||||
|
||||
"__search_hit__": {
|
||||
"score": 0.343,
|
||||
"highlights": "text <span class=\"match\">Test</span> text",
|
||||
"rank": 23
|
||||
}
|
||||
},
|
||||
|
||||
...
|
||||
|
||||
]
|
||||
}
|
||||
|
||||
* ``score`` is an indication how well this document matches the query relative to the other search results.
|
||||
* ``highlights`` is an excerpt from the document content and highlights the search terms with ``<span>`` tags as shown above.
|
||||
* ``rank`` is the index of the search results. The first result will have rank 0.
|
||||
|
||||
``/api/search/autocomplete/``
|
||||
=============================
|
||||
|
||||
Get auto completions for a partial search term.
|
||||
|
||||
Query parameters:
|
||||
|
||||
* ``term``: The incomplete term.
|
||||
* ``limit``: Amount of results. Defaults to 10.
|
||||
|
||||
Results returned by the endpoint are ordered by importance of the term in the
|
||||
document index. The first result is the term that has the highest Tf/Idf score
|
||||
in the index.
|
||||
|
||||
.. code:: json
|
||||
|
||||
[
|
||||
"term1",
|
||||
"term3",
|
||||
"term6",
|
||||
"term4"
|
||||
]
|
||||
|
||||
|
||||
.. _api-file_uploads:
|
||||
|
||||
POSTing documents
|
||||
#################
|
||||
|
||||
The API provides a special endpoint for file uploads:
|
||||
|
||||
``/api/documents/post_document/``
|
||||
|
||||
POST a multipart form to this endpoint, where the form field ``document`` contains
|
||||
the document that you want to upload to paperless. The filename is sanitized and
|
||||
then used to store the document in a temporary directory, and the consumer will
|
||||
be instructed to consume the document from there.
|
||||
|
||||
The endpoint supports the following optional form fields:
|
||||
|
||||
* ``title``: Specify a title that the consumer should use for the document.
|
||||
* ``correspondent``: Specify the ID of a correspondent that the consumer should use for the document.
|
||||
* ``document_type``: Similar to correspondent.
|
||||
* ``tags``: Similar to correspondent. Specify this multiple times to have multiple tags added
|
||||
to the document.
|
||||
|
||||
The endpoint will immediately return "OK" if the document consumption process
|
||||
was started successfully. No additional status information about the consumption
|
||||
process itself is available, since that happens in a different process.
|
||||
|
||||
|
||||
.. _api-versioning:
|
||||
|
||||
API Versioning
|
||||
##############
|
||||
|
||||
The REST API is versioned since Paperless-ngx 1.3.0.
|
||||
|
||||
* Versioning ensures that changes to the API don't break older clients.
|
||||
* Clients specify the specific version of the API they wish to use with every request and Paperless will handle the request using the specified API version.
|
||||
* Even if the underlying data model changes, older API versions will always serve compatible data.
|
||||
* If no version is specified, Paperless will serve version 1 to ensure compatibility with older clients that do not request a specific API version.
|
||||
|
||||
API versions are specified by submitting an additional HTTP ``Accept`` header with every request:
|
||||
|
||||
.. code::
|
||||
|
||||
Accept: application/json; version=6
|
||||
|
||||
If an invalid version is specified, Paperless 1.3.0 will respond with "406 Not Acceptable" and an error message in the body.
|
||||
Earlier versions of Paperless will serve API version 1 regardless of whether a version is specified via the ``Accept`` header.
|
||||
|
||||
If a client wishes to verify whether it is compatible with any given server, the following procedure should be performed:
|
||||
|
||||
1. Perform an *authenticated* request against any API endpoint. If the server is on version 1.3.0 or newer, the server will
|
||||
add two custom headers to the response:
|
||||
|
||||
.. code::
|
||||
|
||||
X-Api-Version: 2
|
||||
X-Version: 1.3.0
|
||||
|
||||
2. Determine whether the client is compatible with this server based on the presence/absence of these headers and their values if present.
|
||||
|
||||
|
||||
API Changelog
|
||||
=============
|
||||
|
||||
Version 1
|
||||
---------
|
||||
|
||||
Initial API version.
|
||||
|
||||
Version 2
|
||||
---------
|
||||
|
||||
* Added field ``Tag.color``. This read/write string field contains a hex color such as ``#a6cee3``.
|
||||
* Added read-only field ``Tag.text_color``. This field contains the text color to use for a specific tag, which is either black or white depending on the brightness of ``Tag.color``.
|
||||
* Removed field ``Tag.colour``.
|
36
docs/assets/extra.css
Normal file
@@ -0,0 +1,36 @@
|
||||
:root > * {
|
||||
--md-primary-fg-color: #17541f;
|
||||
--md-primary-fg-color--dark: #17541f;
|
||||
--md-primary-fg-color--light: #17541f;
|
||||
--md-accent-fg-color: #2b8a38;
|
||||
--md-typeset-a-color: #21652a;
|
||||
}
|
||||
|
||||
[data-md-color-scheme="slate"] {
|
||||
--md-hue: 222;
|
||||
}
|
||||
|
||||
@media (min-width: 400px) {
|
||||
.grid-left {
|
||||
width: 33%;
|
||||
float: left;
|
||||
}
|
||||
.grid-right {
|
||||
width: 62%;
|
||||
margin-left: 4%;
|
||||
float: left;
|
||||
}
|
||||
}
|
||||
|
||||
.grid-left > p {
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
|
||||
|
||||
.grid-right p {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.index-callout {
|
||||
margin-right: .5rem;
|
||||
}
|
BIN
docs/assets/favicon.png
Normal file
After Width: | Height: | Size: 768 B |
12
docs/assets/logo.svg
Normal file
@@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 27.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 1000 1000" style="enable-background:new 0 0 1000 1000;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<path class="st0" d="M299,891.7c-4.2-19.8-12.5-59.6-13.6-59.6c-176.7-105.7-155.8-288.7-97.3-393.4
|
||||
c12.5,131.8,245.8,222.8,109.8,383.9c-1.1,2,6.2,27.2,12.5,50.2c27.2-46,68-101.4,65.8-106.7C208.9,358.2,731.9,326.9,840.6,73.7
|
||||
c49.1,244.8-25.1,623.5-445.5,719.7c-2,1.1-76.3,131.8-79.5,132.9c0-2-31.4-1.1-27.2-11.5C290.7,908.4,294.8,900.1,299,891.7
|
||||
L299,891.7z M293.8,793.4c53.3-61.8-9.4-167.4-47.1-201.9C310.5,701.3,306.3,765.1,293.8,793.4L293.8,793.4z"/>
|
||||
</svg>
|
After Width: | Height: | Size: 869 B |
68
docs/assets/logo_full_black.svg
Normal file
@@ -0,0 +1,68 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 27.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 2962.2 860.2" style="enable-background:new 0 0 2962.2 860.2;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#17541F;stroke:#000000;stroke-miterlimit:10;}
|
||||
</style>
|
||||
<path d="M1055.6,639.7v-20.6c-18,20-43.1,30.1-75.4,30.1c-22.4,0-42.8-5.8-61-17.5c-18.3-11.7-32.5-27.8-42.9-48.3
|
||||
c-10.3-20.5-15.5-43.3-15.5-68.4c0-25.1,5.2-48,15.5-68.5s24.6-36.6,42.9-48.3s38.6-17.5,61-17.5c32.3,0,57.5,10,75.4,30.1v-20.6
|
||||
h85.3v249.6L1055.6,639.7L1055.6,639.7z M1059.1,514.9c0-17.4-5.2-31.9-15.5-43.8c-10.3-11.8-23.9-17.7-40.6-17.7
|
||||
c-16.8,0-30.2,5.9-40.4,17.7c-10.2,11.8-15.3,26.4-15.3,43.8c0,17.4,5.1,31.9,15.3,43.8c10.2,11.8,23.6,17.7,40.4,17.7
|
||||
c16.8,0,30.3-5.9,40.6-17.7C1054,546.9,1059.1,532.3,1059.1,514.9z"/>
|
||||
<path d="M1417.8,398.2c18.3,11.7,32.5,27.8,42.9,48.3c10.3,20.5,15.5,43.3,15.5,68.5c0,25.1-5.2,48-15.5,68.4
|
||||
c-10.3,20.5-24.6,36.6-42.9,48.3s-38.6,17.5-61,17.5c-32.3,0-57.5-10-75.4-30.1v165.6h-85.3V390.2h85.3v20.6
|
||||
c18-20,43.1-30.1,75.4-30.1C1379.2,380.7,1399.5,386.6,1417.8,398.2z M1389.5,514.9c0-17.4-5.1-31.9-15.3-43.8
|
||||
c-10.2-11.8-23.6-17.7-40.4-17.7s-30.2,5.9-40.4,17.7c-10.2,11.8-15.3,26.4-15.3,43.8c0,17.4,5.1,31.9,15.3,43.8
|
||||
c10.2,11.8,23.6,17.7,40.4,17.7s30.2-5.9,40.4-17.7S1389.5,532.3,1389.5,514.9z"/>
|
||||
<path d="M1713.6,555.3l53,49.4c-28.1,29.6-66.7,44.4-115.8,44.4c-28.1,0-53-5.8-74.5-17.5s-38.2-27.7-49.8-48
|
||||
c-11.7-20.3-17.7-43.2-18-68.7c0-24.8,5.9-47.5,17.7-68c11.8-20.5,28.1-36.7,48.7-48.5s43.5-17.7,68.7-17.7
|
||||
c24.8,0,47.6,6.1,68.2,18.2s37,29.5,49.1,52.3c12.1,22.7,18.2,49.1,18.2,79l-0.4,11.7h-181.8c3.6,11.4,10.5,20.7,20.9,28.1
|
||||
c10.3,7.3,21.3,11,33,11c14.4,0,26.3-2.2,35.7-6.5C1695.8,570.1,1704.9,563.7,1713.6,555.3z M1596.9,486.2h92.9
|
||||
c-2.1-12.3-7.5-22.1-16.2-29.4s-18.7-11-30.1-11s-21.5,3.7-30.3,11S1599,473.9,1596.9,486.2z"/>
|
||||
<path d="M1908.8,418.4c7.8-10.8,17.2-19,28.3-24.7s22-8.5,32.8-8.5c11.4,0,20,1.6,26,4.9l-10.8,72.7c-8.4-2.1-15.7-3.1-22-3.1
|
||||
c-17.1,0-30.4,4.3-39.9,12.8c-9.6,8.5-14.4,24.2-14.4,46.9v120.3h-85.3V390.2h85.3V418.4L1908.8,418.4z"/>
|
||||
<path d="M2113,258.2v381.5h-85.3V258.2H2113z"/>
|
||||
<path d="M2360.8,555.3l53,49.4c-28.1,29.6-66.7,44.4-115.8,44.4c-28.1,0-53-5.8-74.5-17.5s-38.2-27.7-49.8-48
|
||||
c-11.7-20.3-17.7-43.2-18-68.7c0-24.8,5.9-47.5,17.7-68s28.1-36.7,48.7-48.5c20.6-11.8,43.5-17.7,68.7-17.7
|
||||
c24.8,0,47.6,6.1,68.2,18.2c20.6,12.1,37,29.5,49.1,52.3c12.1,22.7,18.2,49.1,18.2,79l-0.4,11.7h-181.8
|
||||
c3.6,11.4,10.5,20.7,20.9,28.1c10.3,7.3,21.3,11,33,11c14.4,0,26.3-2.2,35.7-6.5C2343.1,570.1,2352.1,563.7,2360.8,555.3z
|
||||
M2244.1,486.2h92.9c-2.1-12.3-7.5-22.1-16.2-29.4s-18.7-11-30.1-11s-21.5,3.7-30.3,11C2251.7,464.1,2246.2,473.9,2244.1,486.2z"/>
|
||||
<path d="M2565.9,446.3c-9.9,0-17.1,1.1-21.5,3.4c-4.5,2.2-6.7,5.9-6.7,11s3.4,8.8,10.3,11.2c6.9,2.4,18,4.9,33.2,7.6
|
||||
c20,3,37,6.7,50.9,11.2s26,12.1,36.1,22.9c10.2,10.8,15.3,25.9,15.3,45.3c0,29.9-10.9,52.4-32.8,67.6
|
||||
c-21.8,15.1-50.3,22.7-85.3,22.7c-25.7,0-49.5-3.7-71.4-11c-21.8-7.3-37.4-14.7-46.7-22.2l33.7-60.6c10.2,9,23.4,15.8,39.7,20.4
|
||||
c16.3,4.6,31.3,7,45.1,7c19.7,0,29.6-5.2,29.6-15.7c0-5.4-3.3-9.4-9.9-11.9c-6.6-2.5-17.2-5.2-31.9-7.9c-18.9-3.3-34.9-7.2-48-11.7
|
||||
c-13.2-4.5-24.6-12.2-34.3-23.1c-9.7-10.9-14.6-26-14.6-45.1c0-27.2,9.7-48.5,29-63.7c19.3-15.3,46-22.9,80.1-22.9
|
||||
c23.3,0,44.4,3.6,63.3,10.8c18.9,7.2,34,14.5,45.3,22l-32.8,58.8c-10.8-7.5-23.2-13.7-37.3-18.6
|
||||
C2590.5,448.7,2577.6,446.3,2565.9,446.3z"/>
|
||||
<path d="M2817.3,446.3c-9.9,0-17.1,1.1-21.5,3.4c-4.5,2.2-6.7,5.9-6.7,11s3.4,8.8,10.3,11.2c6.9,2.4,18,4.9,33.2,7.6
|
||||
c20,3,37,6.7,50.9,11.2s26,12.1,36.1,22.9c10.2,10.8,15.3,25.9,15.3,45.3c0,29.9-10.9,52.4-32.8,67.6
|
||||
c-21.8,15.1-50.3,22.7-85.3,22.7c-25.7,0-49.5-3.7-71.4-11c-21.8-7.3-37.4-14.7-46.7-22.2l33.7-60.6c10.2,9,23.4,15.8,39.7,20.4
|
||||
c16.3,4.6,31.3,7,45.1,7c19.8,0,29.6-5.2,29.6-15.7c0-5.4-3.3-9.4-9.9-11.9c-6.6-2.5-17.2-5.2-31.9-7.9c-18.9-3.3-34.9-7.2-48-11.7
|
||||
c-13.2-4.5-24.6-12.2-34.3-23.1c-9.7-10.9-14.6-26-14.6-45.1c0-27.2,9.7-48.5,29-63.7c19.3-15.3,46-22.9,80.1-22.9
|
||||
c23.3,0,44.4,3.6,63.3,10.8c18.9,7.2,34,14.5,45.3,22l-32.8,58.8c-10.8-7.5-23.2-13.7-37.3-18.6
|
||||
C2841.8,448.7,2828.9,446.3,2817.3,446.3z"/>
|
||||
<g>
|
||||
<path d="M2508,724h60.2v17.3H2508V724z"/>
|
||||
<path d="M2629.2,694.4c4.9-2,10.2-3.1,16-3.1c10.9,0,19.5,3.4,25.9,10.2s9.6,16.7,9.6,29.6v57.3h-19.6v-52.6
|
||||
c0-9.3-1.7-16.2-5.1-20.7c-3.4-4.5-9.1-6.7-17-6.7c-6.5,0-11.8,2.4-16.1,7.1c-4.3,4.8-6.4,11.5-6.4,20.2v52.6h-19.6v-94.6h19.6v9.5
|
||||
C2620.2,699.4,2624.4,696.4,2629.2,694.4z"/>
|
||||
<path d="M2790.3,833.2c-8.6,6.8-19.4,10.2-32.3,10.2c-7.9,0-15.2-1.4-21.9-4.1s-12.1-6.8-16.3-12.2s-6.6-11.9-7.1-19.6h19.6
|
||||
c0.7,6.1,3.5,10.8,8.4,13.9c4.9,3.2,10.7,4.8,17.4,4.8c7,0,13.1-2,18.2-6c5.1-4,7.7-10.3,7.7-18.9v-24.7c-3.6,3.4-8,6.2-13.3,8.2
|
||||
c-5.2,2.1-10.7,3.1-16.3,3.1c-8.7,0-16.6-2.1-23.7-6.4c-7.1-4.3-12.6-10-16.7-17.3c-4-7.3-6-15.5-6-24.6s2-17.3,6-24.7
|
||||
s9.6-13.2,16.7-17.4c7.1-4.3,15-6.4,23.7-6.4c5.7,0,11.1,1,16.3,3.1s9.6,4.8,13.3,8.2v-8.8h19.4v107.8
|
||||
C2803.2,815.9,2798.9,826.4,2790.3,833.2z M2782.2,755.7c2.6-4.7,3.8-10,3.8-15.9s-1.3-11.2-3.8-16c-2.6-4.8-6.1-8.5-10.5-11.1
|
||||
c-4.5-2.7-9.5-4-15.1-4c-5.8,0-10.9,1.4-15.4,4.3c-4.5,2.8-7.9,6.6-10.3,11.4c-2.4,4.8-3.6,9.9-3.6,15.5c0,5.4,1.2,10.5,3.6,15.3
|
||||
c2.4,4.8,5.8,8.6,10.3,11.5s9.6,4.3,15.4,4.3c5.6,0,10.6-1.4,15.1-4.1C2776.1,764.1,2779.6,760.4,2782.2,755.7z"/>
|
||||
<path d="M2843.5,788.4h-21.6l37.9-48l-36.4-46.6h22.6l25.7,33.3l25.8-33.3h21.6l-36.2,45.9l37.9,48.6h-22.6l-27.4-35L2843.5,788.4z
|
||||
"/>
|
||||
</g>
|
||||
<path d="M835.8,319.2c-11.5-18.9-27.4-33.7-47.6-44.7c-20.2-10.9-43-16.4-68.5-16.4h-90.6c-8.6,39.6-21.3,77.2-38,112.4
|
||||
c-10,21-21.3,41-33.9,59.9v209.2H647v-135h72.7c25.4,0,48.3-5.5,68.5-16.4s36.1-25.8,47.6-44.7c11.5-18.9,17.3-39.5,17.3-61.9
|
||||
C853.1,358.9,847.4,338.1,835.8,319.2z M747,416.6c-9.4,9-21.8,13.5-37,13.5l-62.8,0.4v-93.4l62.8-0.4c15.3,0,27.6,4.5,37,13.5
|
||||
s14.1,20,14.1,33.2C761.1,396.6,756.4,407.7,747,416.6z"/>
|
||||
<path class="st0" d="M164.7,698.7c-3.5-16.5-10.4-49.6-11.3-49.6c-147.1-88-129.7-240.3-81-327.4C82.8,431.4,277,507.1,163.8,641.2
|
||||
c-0.9,1.7,5.2,22.6,10.4,41.8c22.6-38.3,56.6-84.4,54.8-88.8C89.7,254.7,525,228.6,615.5,17.9c40.9,203.7-20.9,518.9-370.8,599
|
||||
c-1.7,0.9-63.5,109.7-66.2,110.6c0-1.7-26.1-0.9-22.6-9.6C157.8,712.6,161.2,705.7,164.7,698.7L164.7,698.7z M160.4,616.9
|
||||
c44.4-51.4-7.8-139.3-39.2-168C174.3,540.2,170.8,593.3,160.4,616.9L160.4,616.9z"/>
|
||||
</svg>
|
After Width: | Height: | Size: 6.3 KiB |
69
docs/assets/logo_full_white.svg
Normal file
@@ -0,0 +1,69 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 27.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 2962.2 860.2" style="enable-background:new 0 0 2962.2 860.2;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;stroke:#000000;stroke-miterlimit:10;}
|
||||
.st1{fill:#17541F;stroke:#000000;stroke-miterlimit:10;}
|
||||
</style>
|
||||
<path class="st0" d="M1055.6,639.7v-20.6c-18,20-43.1,30.1-75.4,30.1c-22.4,0-42.8-5.8-61-17.5c-18.3-11.7-32.5-27.8-42.9-48.3
|
||||
c-10.3-20.5-15.5-43.3-15.5-68.4c0-25.1,5.2-48,15.5-68.5s24.6-36.6,42.9-48.3s38.6-17.5,61-17.5c32.3,0,57.5,10,75.4,30.1v-20.6
|
||||
h85.3v249.6L1055.6,639.7L1055.6,639.7z M1059.1,514.9c0-17.4-5.2-31.9-15.5-43.8c-10.3-11.8-23.9-17.7-40.6-17.7
|
||||
c-16.8,0-30.2,5.9-40.4,17.7c-10.2,11.8-15.3,26.4-15.3,43.8c0,17.4,5.1,31.9,15.3,43.8c10.2,11.8,23.6,17.7,40.4,17.7
|
||||
c16.8,0,30.3-5.9,40.6-17.7C1054,546.9,1059.1,532.3,1059.1,514.9z"/>
|
||||
<path class="st0" d="M1417.8,398.2c18.3,11.7,32.5,27.8,42.9,48.3c10.3,20.5,15.5,43.3,15.5,68.5c0,25.1-5.2,48-15.5,68.4
|
||||
c-10.3,20.5-24.6,36.6-42.9,48.3s-38.6,17.5-61,17.5c-32.3,0-57.5-10-75.4-30.1v165.6h-85.3V390.2h85.3v20.6
|
||||
c18-20,43.1-30.1,75.4-30.1C1379.2,380.7,1399.5,386.6,1417.8,398.2z M1389.5,514.9c0-17.4-5.1-31.9-15.3-43.8
|
||||
c-10.2-11.8-23.6-17.7-40.4-17.7s-30.2,5.9-40.4,17.7c-10.2,11.8-15.3,26.4-15.3,43.8c0,17.4,5.1,31.9,15.3,43.8
|
||||
c10.2,11.8,23.6,17.7,40.4,17.7s30.2-5.9,40.4-17.7S1389.5,532.3,1389.5,514.9z"/>
|
||||
<path class="st0" d="M1713.6,555.3l53,49.4c-28.1,29.6-66.7,44.4-115.8,44.4c-28.1,0-53-5.8-74.5-17.5s-38.2-27.7-49.8-48
|
||||
c-11.7-20.3-17.7-43.2-18-68.7c0-24.8,5.9-47.5,17.7-68c11.8-20.5,28.1-36.7,48.7-48.5s43.5-17.7,68.7-17.7
|
||||
c24.8,0,47.6,6.1,68.2,18.2s37,29.5,49.1,52.3c12.1,22.7,18.2,49.1,18.2,79l-0.4,11.7h-181.8c3.6,11.4,10.5,20.7,20.9,28.1
|
||||
c10.3,7.3,21.3,11,33,11c14.4,0,26.3-2.2,35.7-6.5C1695.8,570.1,1704.9,563.7,1713.6,555.3z M1596.9,486.2h92.9
|
||||
c-2.1-12.3-7.5-22.1-16.2-29.4s-18.7-11-30.1-11s-21.5,3.7-30.3,11S1599,473.9,1596.9,486.2z"/>
|
||||
<path class="st0" d="M1908.8,418.4c7.8-10.8,17.2-19,28.3-24.7s22-8.5,32.8-8.5c11.4,0,20,1.6,26,4.9l-10.8,72.7
|
||||
c-8.4-2.1-15.7-3.1-22-3.1c-17.1,0-30.4,4.3-39.9,12.8c-9.6,8.5-14.4,24.2-14.4,46.9v120.3h-85.3V390.2h85.3V418.4L1908.8,418.4z"/>
|
||||
<path class="st0" d="M2113,258.2v381.5h-85.3V258.2H2113z"/>
|
||||
<path class="st0" d="M2360.8,555.3l53,49.4c-28.1,29.6-66.7,44.4-115.8,44.4c-28.1,0-53-5.8-74.5-17.5s-38.2-27.7-49.8-48
|
||||
c-11.7-20.3-17.7-43.2-18-68.7c0-24.8,5.9-47.5,17.7-68s28.1-36.7,48.7-48.5c20.6-11.8,43.5-17.7,68.7-17.7
|
||||
c24.8,0,47.6,6.1,68.2,18.2c20.6,12.1,37,29.5,49.1,52.3c12.1,22.7,18.2,49.1,18.2,79l-0.4,11.7h-181.8
|
||||
c3.6,11.4,10.5,20.7,20.9,28.1c10.3,7.3,21.3,11,33,11c14.4,0,26.3-2.2,35.7-6.5C2343.1,570.1,2352.1,563.7,2360.8,555.3z
|
||||
M2244.1,486.2h92.9c-2.1-12.3-7.5-22.1-16.2-29.4s-18.7-11-30.1-11s-21.5,3.7-30.3,11C2251.7,464.1,2246.2,473.9,2244.1,486.2z"/>
|
||||
<path class="st0" d="M2565.9,446.3c-9.9,0-17.1,1.1-21.5,3.4c-4.5,2.2-6.7,5.9-6.7,11s3.4,8.8,10.3,11.2c6.9,2.4,18,4.9,33.2,7.6
|
||||
c20,3,37,6.7,50.9,11.2s26,12.1,36.1,22.9c10.2,10.8,15.3,25.9,15.3,45.3c0,29.9-10.9,52.4-32.8,67.6
|
||||
c-21.8,15.1-50.3,22.7-85.3,22.7c-25.7,0-49.5-3.7-71.4-11c-21.8-7.3-37.4-14.7-46.7-22.2l33.7-60.6c10.2,9,23.4,15.8,39.7,20.4
|
||||
c16.3,4.6,31.3,7,45.1,7c19.7,0,29.6-5.2,29.6-15.7c0-5.4-3.3-9.4-9.9-11.9c-6.6-2.5-17.2-5.2-31.9-7.9c-18.9-3.3-34.9-7.2-48-11.7
|
||||
c-13.2-4.5-24.6-12.2-34.3-23.1c-9.7-10.9-14.6-26-14.6-45.1c0-27.2,9.7-48.5,29-63.7c19.3-15.3,46-22.9,80.1-22.9
|
||||
c23.3,0,44.4,3.6,63.3,10.8c18.9,7.2,34,14.5,45.3,22l-32.8,58.8c-10.8-7.5-23.2-13.7-37.3-18.6
|
||||
C2590.5,448.7,2577.6,446.3,2565.9,446.3z"/>
|
||||
<path class="st0" d="M2817.3,446.3c-9.9,0-17.1,1.1-21.5,3.4c-4.5,2.2-6.7,5.9-6.7,11s3.4,8.8,10.3,11.2c6.9,2.4,18,4.9,33.2,7.6
|
||||
c20,3,37,6.7,50.9,11.2s26,12.1,36.1,22.9c10.2,10.8,15.3,25.9,15.3,45.3c0,29.9-10.9,52.4-32.8,67.6
|
||||
c-21.8,15.1-50.3,22.7-85.3,22.7c-25.7,0-49.5-3.7-71.4-11c-21.8-7.3-37.4-14.7-46.7-22.2l33.7-60.6c10.2,9,23.4,15.8,39.7,20.4
|
||||
c16.3,4.6,31.3,7,45.1,7c19.8,0,29.6-5.2,29.6-15.7c0-5.4-3.3-9.4-9.9-11.9c-6.6-2.5-17.2-5.2-31.9-7.9c-18.9-3.3-34.9-7.2-48-11.7
|
||||
c-13.2-4.5-24.6-12.2-34.3-23.1c-9.7-10.9-14.6-26-14.6-45.1c0-27.2,9.7-48.5,29-63.7c19.3-15.3,46-22.9,80.1-22.9
|
||||
c23.3,0,44.4,3.6,63.3,10.8c18.9,7.2,34,14.5,45.3,22l-32.8,58.8c-10.8-7.5-23.2-13.7-37.3-18.6
|
||||
C2841.8,448.7,2828.9,446.3,2817.3,446.3z"/>
|
||||
<g>
|
||||
<path class="st0" d="M2508,724h60.2v17.3H2508V724z"/>
|
||||
<path class="st0" d="M2629.2,694.4c4.9-2,10.2-3.1,16-3.1c10.9,0,19.5,3.4,25.9,10.2s9.6,16.7,9.6,29.6v57.3h-19.6v-52.6
|
||||
c0-9.3-1.7-16.2-5.1-20.7c-3.4-4.5-9.1-6.7-17-6.7c-6.5,0-11.8,2.4-16.1,7.1c-4.3,4.8-6.4,11.5-6.4,20.2v52.6h-19.6v-94.6h19.6v9.5
|
||||
C2620.2,699.4,2624.4,696.4,2629.2,694.4z"/>
|
||||
<path class="st0" d="M2790.3,833.2c-8.6,6.8-19.4,10.2-32.3,10.2c-7.9,0-15.2-1.4-21.9-4.1s-12.1-6.8-16.3-12.2s-6.6-11.9-7.1-19.6
|
||||
h19.6c0.7,6.1,3.5,10.8,8.4,13.9c4.9,3.2,10.7,4.8,17.4,4.8c7,0,13.1-2,18.2-6c5.1-4,7.7-10.3,7.7-18.9v-24.7
|
||||
c-3.6,3.4-8,6.2-13.3,8.2c-5.2,2.1-10.7,3.1-16.3,3.1c-8.7,0-16.6-2.1-23.7-6.4c-7.1-4.3-12.6-10-16.7-17.3c-4-7.3-6-15.5-6-24.6
|
||||
s2-17.3,6-24.7s9.6-13.2,16.7-17.4c7.1-4.3,15-6.4,23.7-6.4c5.7,0,11.1,1,16.3,3.1s9.6,4.8,13.3,8.2v-8.8h19.4v107.8
|
||||
C2803.2,815.9,2798.9,826.4,2790.3,833.2z M2782.2,755.7c2.6-4.7,3.8-10,3.8-15.9s-1.3-11.2-3.8-16c-2.6-4.8-6.1-8.5-10.5-11.1
|
||||
c-4.5-2.7-9.5-4-15.1-4c-5.8,0-10.9,1.4-15.4,4.3c-4.5,2.8-7.9,6.6-10.3,11.4c-2.4,4.8-3.6,9.9-3.6,15.5c0,5.4,1.2,10.5,3.6,15.3
|
||||
c2.4,4.8,5.8,8.6,10.3,11.5s9.6,4.3,15.4,4.3c5.6,0,10.6-1.4,15.1-4.1C2776.1,764.1,2779.6,760.4,2782.2,755.7z"/>
|
||||
<path class="st0" d="M2843.5,788.4h-21.6l37.9-48l-36.4-46.6h22.6l25.7,33.3l25.8-33.3h21.6l-36.2,45.9l37.9,48.6h-22.6l-27.4-35
|
||||
L2843.5,788.4z"/>
|
||||
</g>
|
||||
<path class="st0" d="M835.8,319.2c-11.5-18.9-27.4-33.7-47.6-44.7c-20.2-10.9-43-16.4-68.5-16.4h-90.6c-8.6,39.6-21.3,77.2-38,112.4
|
||||
c-10,21-21.3,41-33.9,59.9v209.2H647v-135h72.7c25.4,0,48.3-5.5,68.5-16.4s36.1-25.8,47.6-44.7c11.5-18.9,17.3-39.5,17.3-61.9
|
||||
C853.1,358.9,847.4,338.1,835.8,319.2z M747,416.6c-9.4,9-21.8,13.5-37,13.5l-62.8,0.4v-93.4l62.8-0.4c15.3,0,27.6,4.5,37,13.5
|
||||
s14.1,20,14.1,33.2C761.1,396.6,756.4,407.7,747,416.6z"/>
|
||||
<path class="st1" d="M164.7,698.7c-3.5-16.5-10.4-49.6-11.3-49.6c-147.1-88-129.7-240.3-81-327.4C82.8,431.4,277,507.1,163.8,641.2
|
||||
c-0.9,1.7,5.2,22.6,10.4,41.8c22.6-38.3,56.6-84.4,54.8-88.8C89.7,254.7,525,228.6,615.5,17.9c40.9,203.7-20.9,518.9-370.8,599
|
||||
c-1.7,0.9-63.5,109.7-66.2,110.6c0-1.7-26.1-0.9-22.6-9.6C157.8,712.6,161.2,705.7,164.7,698.7L164.7,698.7z M160.4,616.9
|
||||
c44.4-51.4-7.8-139.3-39.2-168C174.3,540.2,170.8,593.3,160.4,616.9L160.4,616.9z"/>
|
||||
</svg>
|
After Width: | Height: | Size: 6.5 KiB |
Before Width: | Height: | Size: 67 KiB After Width: | Height: | Size: 67 KiB |
BIN
docs/assets/screenshots/bulk-edit.png
Normal file
After Width: | Height: | Size: 661 KiB |
BIN
docs/assets/screenshots/correspondents.png
Normal file
After Width: | Height: | Size: 457 KiB |
BIN
docs/assets/screenshots/dashboard.png
Normal file
After Width: | Height: | Size: 436 KiB |
BIN
docs/assets/screenshots/documents-filter.png
Normal file
After Width: | Height: | Size: 462 KiB |
BIN
docs/assets/screenshots/documents-largecards.png
Normal file
After Width: | Height: | Size: 608 KiB |
BIN
docs/assets/screenshots/documents-smallcards-dark.png
Normal file
After Width: | Height: | Size: 698 KiB |
BIN
docs/assets/screenshots/documents-smallcards.png
Normal file
After Width: | Height: | Size: 706 KiB |
BIN
docs/assets/screenshots/documents-table.png
Normal file
After Width: | Height: | Size: 480 KiB |
Before Width: | Height: | Size: 680 KiB After Width: | Height: | Size: 680 KiB |