mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-01-20 22:24:24 -06:00
Compare commits
3 Commits
feature-te
...
fix-11679
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f23433bd57 | ||
|
|
c968505f64 | ||
|
|
fbb5864757 |
56
.codecov.yml
56
.codecov.yml
@@ -1,7 +1,6 @@
|
||||
# https://docs.codecov.com/docs/codecovyml-reference#codecov
|
||||
codecov:
|
||||
require_ci_to_pass: true
|
||||
# https://docs.codecov.com/docs/components
|
||||
# https://docs.codecov.com/docs/components
|
||||
component_management:
|
||||
individual_components:
|
||||
- component_id: backend
|
||||
@@ -10,70 +9,35 @@ component_management:
|
||||
- component_id: frontend
|
||||
paths:
|
||||
- src-ui/**
|
||||
# https://docs.codecov.com/docs/flags#step-2-flag-management-in-yaml
|
||||
# https://docs.codecov.com/docs/carryforward-flags
|
||||
flags:
|
||||
# Backend Python versions
|
||||
backend-python-3.10:
|
||||
backend:
|
||||
paths:
|
||||
- src/**
|
||||
carryforward: true
|
||||
backend-python-3.11:
|
||||
paths:
|
||||
- src/**
|
||||
carryforward: true
|
||||
backend-python-3.12:
|
||||
paths:
|
||||
- src/**
|
||||
carryforward: true
|
||||
# Frontend (shards merge into single flag)
|
||||
frontend-node-24.x:
|
||||
frontend:
|
||||
paths:
|
||||
- src-ui/**
|
||||
carryforward: true
|
||||
# https://docs.codecov.com/docs/pull-request-comments
|
||||
comment:
|
||||
layout: "header, diff, components, flags, files"
|
||||
# https://docs.codecov.com/docs/javascript-bundle-analysis
|
||||
require_bundle_changes: true
|
||||
bundle_change_threshold: "50Kb"
|
||||
coverage:
|
||||
# https://docs.codecov.com/docs/commit-status
|
||||
status:
|
||||
project:
|
||||
backend:
|
||||
flags:
|
||||
- backend-python-3.10
|
||||
- backend-python-3.11
|
||||
- backend-python-3.12
|
||||
paths:
|
||||
- src/**
|
||||
default:
|
||||
# https://docs.codecov.com/docs/commit-status#threshold
|
||||
threshold: 1%
|
||||
removed_code_behavior: adjust_base
|
||||
frontend:
|
||||
flags:
|
||||
- frontend-node-24.x
|
||||
paths:
|
||||
- src-ui/**
|
||||
threshold: 1%
|
||||
removed_code_behavior: adjust_base
|
||||
patch:
|
||||
backend:
|
||||
flags:
|
||||
- backend-python-3.10
|
||||
- backend-python-3.11
|
||||
- backend-python-3.12
|
||||
paths:
|
||||
- src/**
|
||||
target: 100%
|
||||
threshold: 25%
|
||||
frontend:
|
||||
flags:
|
||||
- frontend-node-24.x
|
||||
paths:
|
||||
- src-ui/**
|
||||
default:
|
||||
# For the changed lines only, target 100% covered, but
|
||||
# allow as low as 75%
|
||||
target: 100%
|
||||
threshold: 25%
|
||||
# https://docs.codecov.com/docs/javascript-bundle-analysis
|
||||
bundle_analysis:
|
||||
# Fail if the bundle size increases by more than 1MB
|
||||
warning_threshold: "1MB"
|
||||
status: true
|
||||
|
||||
1
.github/release-drafter.yml
vendored
1
.github/release-drafter.yml
vendored
@@ -44,7 +44,6 @@ include-labels:
|
||||
- 'notable'
|
||||
exclude-labels:
|
||||
- 'skip-changelog'
|
||||
filter-by-commitish: true
|
||||
category-template: '### $TITLE'
|
||||
change-template: '- $TITLE @$AUTHOR ([#$NUMBER]($URL))'
|
||||
change-title-escapes: '\<*_&#@'
|
||||
|
||||
4
.github/workflows/ci-backend.yml
vendored
4
.github/workflows/ci-backend.yml
vendored
@@ -88,13 +88,13 @@ jobs:
|
||||
if: always()
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: backend-python-${{ matrix.python-version }}
|
||||
flags: backend,backend-python-${{ matrix.python-version }}
|
||||
files: junit.xml
|
||||
report_type: test_results
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: backend-python-${{ matrix.python-version }}
|
||||
flags: backend,backend-python-${{ matrix.python-version }}
|
||||
files: coverage.xml
|
||||
report_type: coverage
|
||||
- name: Stop containers
|
||||
|
||||
42
.github/workflows/ci-docker.yml
vendored
42
.github/workflows/ci-docker.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
contents: read
|
||||
packages: write
|
||||
outputs:
|
||||
should-push: ${{ steps.check-push.outputs.should-push }}
|
||||
can-push: ${{ steps.check-push.outputs.can-push }}
|
||||
push-external: ${{ steps.check-push.outputs.push-external }}
|
||||
repository: ${{ steps.repo.outputs.name }}
|
||||
ref-name: ${{ steps.ref.outputs.name }}
|
||||
@@ -59,28 +59,16 @@ jobs:
|
||||
env:
|
||||
REF_NAME: ${{ steps.ref.outputs.name }}
|
||||
run: |
|
||||
# should-push: Should we push to GHCR?
|
||||
# True for:
|
||||
# 1. Pushes (tags/dev/beta) - filtered via the workflow triggers
|
||||
# 2. Internal PRs where the branch name starts with 'feature-' - filtered here when a PR is synced
|
||||
|
||||
should_push="false"
|
||||
|
||||
if [[ "${{ github.event_name }}" == "push" ]]; then
|
||||
should_push="true"
|
||||
elif [[ "${{ github.event_name }}" == "pull_request" && "${{ github.event.pull_request.head.repo.full_name }}" == "${{ github.repository }}" ]]; then
|
||||
if [[ "${REF_NAME}" == feature-* || "${REF_NAME}" == fix-* ]]; then
|
||||
should_push="true"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "should-push=${should_push}"
|
||||
echo "should-push=${should_push}" >> $GITHUB_OUTPUT
|
||||
# can-push: Can we push to GHCR?
|
||||
# True for: pushes, or PRs from the same repo (not forks)
|
||||
can_push=${{ github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
echo "can-push=${can_push}"
|
||||
echo "can-push=${can_push}" >> $GITHUB_OUTPUT
|
||||
|
||||
# push-external: Should we also push to Docker Hub and Quay.io?
|
||||
# Only for main repo on dev/beta branches or version tags
|
||||
push_external="false"
|
||||
if [[ "${should_push}" == "true" && "${{ github.repository_owner }}" == "paperless-ngx" ]]; then
|
||||
if [[ "${can_push}" == "true" && "${{ github.repository_owner }}" == "paperless-ngx" ]]; then
|
||||
case "${REF_NAME}" in
|
||||
dev|beta)
|
||||
push_external="true"
|
||||
@@ -110,12 +98,6 @@ jobs:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Maximize space
|
||||
run: |
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf /usr/local/share/boost
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
- name: Docker metadata
|
||||
id: docker-meta
|
||||
uses: docker/metadata-action@v5.10.0
|
||||
@@ -137,20 +119,20 @@ jobs:
|
||||
labels: ${{ steps.docker-meta.outputs.labels }}
|
||||
build-args: |
|
||||
PNGX_TAG_VERSION=${{ steps.docker-meta.outputs.version }}
|
||||
outputs: type=image,name=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }},push-by-digest=true,name-canonical=true,push=${{ steps.check-push.outputs.should-push }}
|
||||
outputs: type=image,name=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }},push-by-digest=true,name-canonical=true,push=${{ steps.check-push.outputs.can-push }}
|
||||
cache-from: |
|
||||
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:${{ steps.ref.outputs.cache-ref }}-${{ matrix.arch }}
|
||||
type=registry,ref=${{ env.REGISTRY }}/${{ steps.repo.outputs.name }}/cache/app:dev-${{ matrix.arch }}
|
||||
cache-to: ${{ steps.check-push.outputs.should-push == 'true' && format('type=registry,mode=max,ref={0}/{1}/cache/app:{2}-{3}', env.REGISTRY, steps.repo.outputs.name, steps.ref.outputs.cache-ref, matrix.arch) || '' }}
|
||||
cache-to: ${{ steps.check-push.outputs.can-push == 'true' && format('type=registry,mode=max,ref={0}/{1}/cache/app:{2}-{3}', env.REGISTRY, steps.repo.outputs.name, steps.ref.outputs.cache-ref, matrix.arch) || '' }}
|
||||
- name: Export digest
|
||||
if: steps.check-push.outputs.should-push == 'true'
|
||||
if: steps.check-push.outputs.can-push == 'true'
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
echo "digest=${digest}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
- name: Upload digest
|
||||
if: steps.check-push.outputs.should-push == 'true'
|
||||
if: steps.check-push.outputs.can-push == 'true'
|
||||
uses: actions/upload-artifact@v6.0.0
|
||||
with:
|
||||
name: digests-${{ matrix.arch }}
|
||||
@@ -161,7 +143,7 @@ jobs:
|
||||
name: Merge and Push Manifest
|
||||
runs-on: ubuntu-24.04
|
||||
needs: build-arch
|
||||
if: needs.build-arch.outputs.should-push == 'true'
|
||||
if: needs.build-arch.outputs.can-push == 'true'
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
4
.github/workflows/ci-frontend.yml
vendored
4
.github/workflows/ci-frontend.yml
vendored
@@ -109,13 +109,13 @@ jobs:
|
||||
if: always()
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: frontend-node-${{ matrix.node-version }}
|
||||
flags: frontend,frontend-node-${{ matrix.node-version }}
|
||||
directory: src-ui/
|
||||
report_type: test_results
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: frontend-node-${{ matrix.node-version }}
|
||||
flags: frontend,frontend-node-${{ matrix.node-version }}
|
||||
directory: src-ui/coverage/
|
||||
e2e-tests:
|
||||
name: "E2E Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||
|
||||
@@ -30,7 +30,7 @@ RUN set -eux \
|
||||
# Purpose: Installs s6-overlay and rootfs
|
||||
# Comments:
|
||||
# - Don't leave anything extra in here either
|
||||
FROM ghcr.io/astral-sh/uv:0.9.26-python3.12-trixie-slim AS s6-overlay-base
|
||||
FROM ghcr.io/astral-sh/uv:0.9.15-python3.12-trixie-slim AS s6-overlay-base
|
||||
|
||||
WORKDIR /usr/src/s6
|
||||
|
||||
@@ -196,11 +196,7 @@ RUN set -eux \
|
||||
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||
&& echo "Installing Python requirements" \
|
||||
&& uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt \
|
||||
&& uv pip install --no-cache --system --no-python-downloads --python-preference system \
|
||||
--index https://pypi.org/simple \
|
||||
--index https://download.pytorch.org/whl/cpu \
|
||||
--index-strategy unsafe-best-match \
|
||||
--requirements requirements.txt \
|
||||
&& uv pip install --no-cache --system --no-python-downloads --python-preference system --requirements requirements.txt \
|
||||
&& echo "Installing NLTK data" \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" snowball_data \
|
||||
&& python3 -W ignore::RuntimeWarning -m nltk.downloader -d "/usr/share/nltk_data" stopwords \
|
||||
|
||||
@@ -11,7 +11,6 @@ for command in decrypt_documents \
|
||||
mail_fetcher \
|
||||
document_create_classifier \
|
||||
document_index \
|
||||
document_llmindex \
|
||||
document_renamer \
|
||||
document_retagger \
|
||||
document_thumbnails \
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
#!/command/with-contenv /usr/bin/bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -e
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ $(id -u) == 0 ]]; then
|
||||
s6-setuidgid paperless python3 manage.py document_llmindex "$@"
|
||||
elif [[ $(id -un) == "paperless" ]]; then
|
||||
python3 manage.py document_llmindex "$@"
|
||||
else
|
||||
echo "Unknown user."
|
||||
fi
|
||||
@@ -1,60 +1,9 @@
|
||||
# Changelog
|
||||
|
||||
## paperless-ngx 2.20.5
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fix: ensure horizontal scroll for long tag names in list, wrap tags without parent [@shamoon](https://github.com/shamoon) ([#11811](https://github.com/paperless-ngx/paperless-ngx/pull/11811))
|
||||
- Fix: use explicit order field for workflow actions [@shamoon](https://github.com/shamoon) [@stumpylog](https://github.com/stumpylog) ([#11781](https://github.com/paperless-ngx/paperless-ngx/pull/11781))
|
||||
|
||||
### All App Changes
|
||||
|
||||
<details>
|
||||
<summary>2 changes</summary>
|
||||
|
||||
- Fix: ensure horizontal scroll for long tag names in list, wrap tags without parent [@shamoon](https://github.com/shamoon) ([#11811](https://github.com/paperless-ngx/paperless-ngx/pull/11811))
|
||||
- Fix: use explicit order field for workflow actions [@shamoon](https://github.com/shamoon) [@stumpylog](https://github.com/stumpylog) ([#11781](https://github.com/paperless-ngx/paperless-ngx/pull/11781))
|
||||
</details>
|
||||
|
||||
## paperless-ngx 2.20.4
|
||||
|
||||
### Security
|
||||
|
||||
- Resolve [GHSA-28cf-xvcf-hw6m](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-28cf-xvcf-hw6m)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fix: propagate metadata override created value [@shamoon](https://github.com/shamoon) ([#11659](https://github.com/paperless-ngx/paperless-ngx/pull/11659))
|
||||
- Fix: support ordering by storage path name [@shamoon](https://github.com/shamoon) ([#11661](https://github.com/paperless-ngx/paperless-ngx/pull/11661))
|
||||
- Fix: validate cf integer values within PostgreSQL range [@shamoon](https://github.com/shamoon) ([#11666](https://github.com/paperless-ngx/paperless-ngx/pull/11666))
|
||||
- Fixhancement: add error handling and retry when opening index [@shamoon](https://github.com/shamoon) ([#11731](https://github.com/paperless-ngx/paperless-ngx/pull/11731))
|
||||
- Fix: fix recurring workflow to respect latest run time [@shamoon](https://github.com/shamoon) ([#11735](https://github.com/paperless-ngx/paperless-ngx/pull/11735))
|
||||
|
||||
### All App Changes
|
||||
|
||||
<details>
|
||||
<summary>5 changes</summary>
|
||||
|
||||
- Fix: propagate metadata override created value [@shamoon](https://github.com/shamoon) ([#11659](https://github.com/paperless-ngx/paperless-ngx/pull/11659))
|
||||
- Fix: support ordering by storage path name [@shamoon](https://github.com/shamoon) ([#11661](https://github.com/paperless-ngx/paperless-ngx/pull/11661))
|
||||
- Fix: validate cf integer values within PostgreSQL range [@shamoon](https://github.com/shamoon) ([#11666](https://github.com/paperless-ngx/paperless-ngx/pull/11666))
|
||||
- Fixhancement: add error handling and retry when opening index [@shamoon](https://github.com/shamoon) ([#11731](https://github.com/paperless-ngx/paperless-ngx/pull/11731))
|
||||
- Fix: fix recurring workflow to respect latest run time [@shamoon](https://github.com/shamoon) ([#11735](https://github.com/paperless-ngx/paperless-ngx/pull/11735))
|
||||
</details>
|
||||
|
||||
## paperless-ngx 2.20.3
|
||||
|
||||
### Security
|
||||
|
||||
- Resolve [GHSA-7cq3-mhxq-w946](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-7cq3-mhxq-w946)
|
||||
|
||||
## paperless-ngx 2.20.2
|
||||
|
||||
### Security
|
||||
|
||||
- Resolve [GHSA-6653-vcx4-69mc](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-6653-vcx4-69mc)
|
||||
- Resolve [GHSA-24x5-wp64-9fcc](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-24x5-wp64-9fcc)
|
||||
|
||||
### Features / Enhancements
|
||||
|
||||
- Tweakhancement: dim inactive users in users-groups list [@shamoon](https://github.com/shamoon) ([#11537](https://github.com/paperless-ngx/paperless-ngx/pull/11537))
|
||||
|
||||
@@ -170,18 +170,11 @@ Available options are `postgresql` and `mariadb`.
|
||||
|
||||
!!! note
|
||||
|
||||
A pool of 8-10 connections per worker is typically sufficient.
|
||||
If you encounter error messages such as `couldn't get a connection`
|
||||
or database connection timeouts, you probably need to increase the pool size.
|
||||
|
||||
!!! warning
|
||||
Make sure your PostgreSQL `max_connections` setting is large enough to handle the connection pools:
|
||||
`(NB_PAPERLESS_WORKERS + NB_CELERY_WORKERS) × POOL_SIZE + SAFETY_MARGIN`. For example, with
|
||||
4 Paperless workers and 2 Celery workers, and a pool size of 8:``(4 + 2) × 8 + 10 = 58`,
|
||||
so `max_connections = 60` (or even more) is appropriate.
|
||||
|
||||
This assumes only Paperless-ngx connects to your PostgreSQL instance. If you have other applications,
|
||||
you should increase `max_connections` accordingly.
|
||||
A small pool is typically sufficient — for example, a size of 4.
|
||||
Make sure your PostgreSQL server's max_connections setting is large enough to handle:
|
||||
```(Paperless workers + Celery workers) × pool size + safety margin```
|
||||
For example, with 4 Paperless workers and 2 Celery workers, and a pool size of 4:
|
||||
(4 + 2) × 4 + 10 = 34 connections required.
|
||||
|
||||
#### [`PAPERLESS_DB_READ_CACHE_ENABLED=<bool>`](#PAPERLESS_DB_READ_CACHE_ENABLED) {#PAPERLESS_DB_READ_CACHE_ENABLED}
|
||||
|
||||
@@ -1831,67 +1824,3 @@ password. All of these options come from their similarly-named [Django settings]
|
||||
: The endpoint to use for the remote OCR engine. This is required for Azure AI.
|
||||
|
||||
Defaults to None.
|
||||
|
||||
## AI {#ai}
|
||||
|
||||
#### [`PAPERLESS_AI_ENABLED=<bool>`](#PAPERLESS_AI_ENABLED) {#PAPERLESS_AI_ENABLED}
|
||||
|
||||
: Enables the AI features in Paperless. This includes the AI-based
|
||||
suggestions. This setting is required to be set to true in order to use the AI features.
|
||||
|
||||
Defaults to false.
|
||||
|
||||
#### [`PAPERLESS_AI_LLM_EMBEDDING_BACKEND=<str>`](#PAPERLESS_AI_LLM_EMBEDDING_BACKEND) {#PAPERLESS_AI_LLM_EMBEDDING_BACKEND}
|
||||
|
||||
: The embedding backend to use for RAG. This can be either "openai" or "huggingface".
|
||||
|
||||
Defaults to None.
|
||||
|
||||
#### [`PAPERLESS_AI_LLM_EMBEDDING_MODEL=<str>`](#PAPERLESS_AI_LLM_EMBEDDING_MODEL) {#PAPERLESS_AI_LLM_EMBEDDING_MODEL}
|
||||
|
||||
: The model to use for the embedding backend for RAG. This can be set to any of the embedding models supported by the current embedding backend. If not supplied, defaults to "text-embedding-3-small" for OpenAI and "sentence-transformers/all-MiniLM-L6-v2" for Huggingface.
|
||||
|
||||
Defaults to None.
|
||||
|
||||
#### [`PAPERLESS_AI_LLM_BACKEND=<str>`](#PAPERLESS_AI_LLM_BACKEND) {#PAPERLESS_AI_LLM_BACKEND}
|
||||
|
||||
: The AI backend to use. This can be either "openai" or "ollama". If set to "ollama", the AI
|
||||
features will be run locally on your machine. If set to "openai", the AI features will be run
|
||||
using the OpenAI API. This setting is required to be set to use the AI features.
|
||||
|
||||
Defaults to None.
|
||||
|
||||
!!! note
|
||||
|
||||
The OpenAI API is a paid service. You will need to set up an OpenAI account and
|
||||
will be charged for usage incurred by Paperless-ngx features and your document data
|
||||
will (of course) be sent to the OpenAI API. Paperless-ngx does not endorse the use of the
|
||||
OpenAI API in any way.
|
||||
|
||||
Refer to the OpenAI terms of service, and use at your own risk.
|
||||
|
||||
#### [`PAPERLESS_AI_LLM_MODEL=<str>`](#PAPERLESS_AI_LLM_MODEL) {#PAPERLESS_AI_LLM_MODEL}
|
||||
|
||||
: The model to use for the AI backend, i.e. "gpt-3.5-turbo", "gpt-4" or any of the models supported by the
|
||||
current backend. If not supplied, defaults to "gpt-3.5-turbo" for OpenAI and "llama3.1" for Ollama.
|
||||
|
||||
Defaults to None.
|
||||
|
||||
#### [`PAPERLESS_AI_LLM_API_KEY=<str>`](#PAPERLESS_AI_LLM_API_KEY) {#PAPERLESS_AI_LLM_API_KEY}
|
||||
|
||||
: The API key to use for the AI backend. This is required for the OpenAI backend (optional for others).
|
||||
|
||||
Defaults to None.
|
||||
|
||||
#### [`PAPERLESS_AI_LLM_ENDPOINT=<str>`](#PAPERLESS_AI_LLM_ENDPOINT) {#PAPERLESS_AI_LLM_ENDPOINT}
|
||||
|
||||
: The endpoint / url to use for the AI backend. This is required for the Ollama backend (optional for others).
|
||||
|
||||
Defaults to None.
|
||||
|
||||
#### [`PAPERLESS_AI_LLM_INDEX_TASK_CRON=<cron expression>`](#PAPERLESS_AI_LLM_INDEX_TASK_CRON) {#PAPERLESS_AI_LLM_INDEX_TASK_CRON}
|
||||
|
||||
: Configures the schedule to update the AI embeddings of text content and metadata for all documents. Only performed if
|
||||
AI is enabled and the LLM embedding backend is set.
|
||||
|
||||
Defaults to `10 2 * * *`, once per day.
|
||||
|
||||
@@ -31,7 +31,6 @@ physical documents into a searchable online archive so you can keep, well, _less
|
||||
- _New!_ Supports remote OCR with Azure AI (opt-in).
|
||||
- Documents are saved as PDF/A format which is designed for long term storage, alongside the unaltered originals.
|
||||
- Uses machine-learning to automatically add tags, correspondents and document types to your documents.
|
||||
- **New**: Paperless-ngx can now leverage AI (Large Language Models or LLMs) for document suggestions. This is an optional feature that can be enabled (and is disabled by default).
|
||||
- Supports PDF documents, images, plain text files, Office documents (Word, Excel, PowerPoint, and LibreOffice equivalents)[^1] and more.
|
||||
- Paperless stores your documents plain on disk. Filenames and folders are managed by paperless and their format can be configured freely with different configurations assigned to different documents.
|
||||
- **Beautiful, modern web application** that features:
|
||||
|
||||
@@ -278,28 +278,6 @@ Once setup, navigating to the email settings page in Paperless-ngx will allow yo
|
||||
You can also submit a document using the REST API, see [POSTing documents](api.md#file-uploads)
|
||||
for details.
|
||||
|
||||
## Document Suggestions
|
||||
|
||||
Paperless-ngx can suggest tags, correspondents, document types and storage paths for documents based on the content of the document. This is done using a (non-LLM) machine learning model that is trained on the documents in your database. The suggestions are shown in the document detail page and can be accepted or rejected by the user.
|
||||
|
||||
## AI Features
|
||||
|
||||
Paperless-ngx includes several features that use AI to enhance the document management experience. These features are optional and can be enabled or disabled in the settings. If you are using the AI features, you may want to also enable the "LLM index" feature, which supports Retrieval-Augmented Generation (RAG) designed to improve the quality of AI responses. The LLM index feature is not enabled by default and requires additional configuration.
|
||||
|
||||
!!! warning
|
||||
|
||||
Remember that Paperless-ngx will send document content to the AI provider you have configured, so consider the privacy implications of using these features, especially if using a remote model (e.g. OpenAI), instead of the default local model.
|
||||
|
||||
The AI features work by creating an embedding of the text content and metadata of documents, which is then used for various tasks such as similarity search and question answering. This uses the FAISS vector store.
|
||||
|
||||
### AI-Enhanced Suggestions
|
||||
|
||||
If enabled, Paperless-ngx can use an AI LLM model to suggest document titles, dates, tags, correspondents and document types for documents. This feature will always be "opt-in" and does not disable the existing classifier-based suggestion system. Currently, both remote (via the OpenAI API) and local (via Ollama) models are supported, see [configuration](configuration.md#ai) for details.
|
||||
|
||||
### Document Chat
|
||||
|
||||
Paperless-ngx can use an AI LLM model to answer questions about a document or across multiple documents. Again, this feature works best when RAG is enabled. The chat feature is available in the upper app toolbar and will switch between chatting across multiple documents or a single document based on the current view.
|
||||
|
||||
## Sharing documents from Paperless-ngx
|
||||
|
||||
Paperless-ngx supports sharing documents with other users by assigning them [permissions](#object-permissions)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "paperless-ngx"
|
||||
version = "2.20.5"
|
||||
version = "2.20.3"
|
||||
description = "A community-supported supercharged document management system: scan, index and archive all your physical documents"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
@@ -28,7 +28,7 @@ dependencies = [
|
||||
# Only patch versions are guaranteed to not introduce breaking changes.
|
||||
"django~=5.2.5",
|
||||
"django-allauth[mfa,socialaccount]~=65.12.1",
|
||||
"django-auditlog~=3.4.1",
|
||||
"django-auditlog~=3.3.0",
|
||||
"django-cachalot~=2.8.0",
|
||||
"django-celery-results~=2.6.0",
|
||||
"django-compression-middleware~=0.5.0",
|
||||
@@ -44,24 +44,16 @@ dependencies = [
|
||||
"drf-spectacular~=0.28",
|
||||
"drf-spectacular-sidecar~=2025.10.1",
|
||||
"drf-writable-nested~=0.7.1",
|
||||
"faiss-cpu>=1.10",
|
||||
"filelock~=3.20.0",
|
||||
"flower~=2.0.1",
|
||||
"gotenberg-client~=0.13.1",
|
||||
"gotenberg-client~=0.12.0",
|
||||
"httpx-oauth~=0.16",
|
||||
"imap-tools~=1.11.0",
|
||||
"inotifyrecursive~=0.3",
|
||||
"jinja2~=3.1.5",
|
||||
"langdetect~=1.0.9",
|
||||
"llama-index-core>=0.14.12",
|
||||
"llama-index-embeddings-huggingface>=0.6.1",
|
||||
"llama-index-embeddings-openai>=0.5.1",
|
||||
"llama-index-llms-ollama>=0.9.1",
|
||||
"llama-index-llms-openai>=0.6.13",
|
||||
"llama-index-vector-stores-faiss>=0.5.2",
|
||||
"nltk~=3.9.1",
|
||||
"ocrmypdf~=16.13.0",
|
||||
"openai>=1.76",
|
||||
"ocrmypdf~=16.12.0",
|
||||
"pathvalidate~=3.3.1",
|
||||
"pdf2image~=1.17.0",
|
||||
"python-dateutil~=2.9.0",
|
||||
@@ -74,10 +66,8 @@ dependencies = [
|
||||
"redis[hiredis]~=5.2.1",
|
||||
"regex>=2025.9.18",
|
||||
"scikit-learn~=1.7.0",
|
||||
"sentence-transformers>=4.1",
|
||||
"setproctitle~=1.3.4",
|
||||
"tika-client~=0.10.0",
|
||||
"torch~=2.9.1",
|
||||
"tqdm~=4.67.1",
|
||||
"watchdog~=6.0",
|
||||
"whitenoise~=6.9",
|
||||
@@ -92,7 +82,7 @@ optional-dependencies.postgres = [
|
||||
"psycopg[c,pool]==3.2.12",
|
||||
# Direct dependency for proper resolution of the pre-built wheels
|
||||
"psycopg-c==3.2.12",
|
||||
"psycopg-pool==3.3",
|
||||
"psycopg-pool==3.2.7",
|
||||
]
|
||||
optional-dependencies.webserver = [
|
||||
"granian[uvloop]~=2.5.1",
|
||||
@@ -127,7 +117,7 @@ testing = [
|
||||
]
|
||||
|
||||
lint = [
|
||||
"pre-commit~=4.5.1",
|
||||
"pre-commit~=4.4.0",
|
||||
"pre-commit-uv~=4.2.0",
|
||||
"ruff~=0.14.0",
|
||||
]
|
||||
@@ -170,15 +160,6 @@ zxing-cpp = [
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
||||
]
|
||||
|
||||
torch = [
|
||||
{ index = "pytorch-cpu" },
|
||||
]
|
||||
|
||||
[[tool.uv.index]]
|
||||
name = "pytorch-cpu"
|
||||
url = "https://download.pytorch.org/whl/cpu"
|
||||
explicit = true
|
||||
|
||||
[tool.ruff]
|
||||
target-version = "py310"
|
||||
line-length = 88
|
||||
@@ -274,7 +255,6 @@ testpaths = [
|
||||
"src/paperless_tika/tests",
|
||||
"src/paperless_text/tests/",
|
||||
"src/paperless_remote/tests/",
|
||||
"src/paperless_ai/tests",
|
||||
]
|
||||
addopts = [
|
||||
"--pythonwarnings=all",
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "paperless-ngx-ui",
|
||||
"version": "2.20.5",
|
||||
"version": "2.20.3",
|
||||
"scripts": {
|
||||
"preinstall": "npx only-allow pnpm",
|
||||
"ng": "ng",
|
||||
|
||||
@@ -35,12 +35,8 @@
|
||||
@case (ConfigOptionType.String) { <pngx-input-text [formControlName]="option.key" [error]="errors[option.key]"></pngx-input-text> }
|
||||
@case (ConfigOptionType.JSON) { <pngx-input-text [formControlName]="option.key" [error]="errors[option.key]"></pngx-input-text> }
|
||||
@case (ConfigOptionType.File) { <pngx-input-file [formControlName]="option.key" (upload)="uploadFile($event, option.key)" [error]="errors[option.key]"></pngx-input-file> }
|
||||
@case (ConfigOptionType.Password) { <pngx-input-password [formControlName]="option.key" [error]="errors[option.key]"></pngx-input-password> }
|
||||
}
|
||||
</div>
|
||||
@if (option.note) {
|
||||
<div class="form-text fst-italic">{{option.note}}</div>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -29,7 +29,6 @@ import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { FileComponent } from '../../common/input/file/file.component'
|
||||
import { NumberComponent } from '../../common/input/number/number.component'
|
||||
import { PasswordComponent } from '../../common/input/password/password.component'
|
||||
import { SelectComponent } from '../../common/input/select/select.component'
|
||||
import { SwitchComponent } from '../../common/input/switch/switch.component'
|
||||
import { TextComponent } from '../../common/input/text/text.component'
|
||||
@@ -47,7 +46,6 @@ import { LoadingComponentWithPermissions } from '../../loading-component/loading
|
||||
TextComponent,
|
||||
NumberComponent,
|
||||
FileComponent,
|
||||
PasswordComponent,
|
||||
AsyncPipe,
|
||||
NgbNavModule,
|
||||
FormsModule,
|
||||
|
||||
@@ -91,9 +91,6 @@ const status: SystemStatus = {
|
||||
sanity_check_status: SystemStatusItemStatus.ERROR,
|
||||
sanity_check_last_run: new Date().toISOString(),
|
||||
sanity_check_error: 'Error running sanity check.',
|
||||
llmindex_status: SystemStatusItemStatus.DISABLED,
|
||||
llmindex_last_modified: new Date().toISOString(),
|
||||
llmindex_error: null,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -30,9 +30,6 @@
|
||||
</div>
|
||||
</div>
|
||||
<ul ngbNav class="order-sm-3">
|
||||
@if (aiEnabled) {
|
||||
<pngx-chat></pngx-chat>
|
||||
}
|
||||
<pngx-toasts-dropdown></pngx-toasts-dropdown>
|
||||
<li ngbDropdown class="nav-item dropdown">
|
||||
<button class="btn ps-1 border-0" id="userDropdown" ngbDropdownToggle>
|
||||
|
||||
@@ -44,7 +44,6 @@ import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { TasksService } from 'src/app/services/tasks.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { ChatComponent } from '../chat/chat/chat.component'
|
||||
import { ProfileEditDialogComponent } from '../common/profile-edit-dialog/profile-edit-dialog.component'
|
||||
import { DocumentDetailComponent } from '../document-detail/document-detail.component'
|
||||
import { ComponentWithPermissions } from '../with-permissions/with-permissions.component'
|
||||
@@ -60,7 +59,6 @@ import { ToastsDropdownComponent } from './toasts-dropdown/toasts-dropdown.compo
|
||||
DocumentTitlePipe,
|
||||
IfPermissionsDirective,
|
||||
ToastsDropdownComponent,
|
||||
ChatComponent,
|
||||
RouterModule,
|
||||
NgClass,
|
||||
NgbDropdownModule,
|
||||
@@ -186,10 +184,6 @@ export class AppFrameComponent
|
||||
})
|
||||
}
|
||||
|
||||
get aiEnabled(): boolean {
|
||||
return this.settingsService.get(SETTINGS_KEYS.AI_ENABLED)
|
||||
}
|
||||
|
||||
closeMenu() {
|
||||
this.isMenuCollapsed = true
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
<li ngbDropdown class="nav-item mx-1" (openChange)="onOpenChange($event)">
|
||||
<li ngbDropdown class="nav-item" (openChange)="onOpenChange($event)">
|
||||
@if (toasts.length) {
|
||||
<span class="badge rounded-pill z-3 pe-none bg-secondary me-2 position-absolute top-0 left-0">{{ toasts.length }}</span>
|
||||
}
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
|
||||
<li ngbDropdown class="nav-item me-n2" (openChange)="onOpenChange($event)">
|
||||
<button class="btn border-0" id="chatDropdown" ngbDropdownToggle>
|
||||
<i-bs width="1.3em" height="1.3em" name="chatSquareDots"></i-bs>
|
||||
</button>
|
||||
<div ngbDropdownMenu class="dropdown-menu-end shadow p-3" aria-labelledby="chatDropdown">
|
||||
<div class="chat-container bg-light p-2">
|
||||
<div class="chat-messages font-monospace small">
|
||||
@for (message of messages; track message) {
|
||||
<div class="message d-flex flex-row small" [class.justify-content-end]="message.role === 'user'">
|
||||
<span class="p-2 m-2" [class.bg-dark]="message.role === 'user'">
|
||||
{{ message.content }}
|
||||
@if (message.isStreaming) { <span class="blinking-cursor">|</span> }
|
||||
</span>
|
||||
</div>
|
||||
}
|
||||
<div #scrollAnchor></div>
|
||||
</div>
|
||||
|
||||
<form class="chat-input">
|
||||
<div class="input-group">
|
||||
<input
|
||||
#chatInput
|
||||
class="form-control form-control-sm" name="chatInput" type="text"
|
||||
[placeholder]="placeholder"
|
||||
[disabled]="loading"
|
||||
[(ngModel)]="input"
|
||||
(keydown)="searchInputKeyDown($event)"
|
||||
/>
|
||||
<button class="btn btn-sm btn-secondary" type="button" (click)="sendMessage()" [disabled]="loading">Send</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
@@ -1,37 +0,0 @@
|
||||
.dropdown-menu {
|
||||
width: var(--pngx-toast-max-width);
|
||||
}
|
||||
|
||||
.chat-messages {
|
||||
max-height: 350px;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.dropdown-toggle::after {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.dropdown-item {
|
||||
white-space: initial;
|
||||
}
|
||||
|
||||
@media screen and (max-width: 400px) {
|
||||
:host ::ng-deep .dropdown-menu-end {
|
||||
right: -3rem;
|
||||
}
|
||||
}
|
||||
|
||||
.blinking-cursor {
|
||||
font-weight: bold;
|
||||
font-size: 1.2em;
|
||||
animation: blink 1s step-end infinite;
|
||||
}
|
||||
|
||||
@keyframes blink {
|
||||
from, to {
|
||||
opacity: 0;
|
||||
}
|
||||
50% {
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
@@ -1,132 +0,0 @@
|
||||
import { provideHttpClient, withInterceptorsFromDi } from '@angular/common/http'
|
||||
import { provideHttpClientTesting } from '@angular/common/http/testing'
|
||||
import { ElementRef } from '@angular/core'
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing'
|
||||
import { NavigationEnd, Router } from '@angular/router'
|
||||
import { allIcons, NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { Subject } from 'rxjs'
|
||||
import { ChatService } from 'src/app/services/chat.service'
|
||||
import { ChatComponent } from './chat.component'
|
||||
|
||||
describe('ChatComponent', () => {
|
||||
let component: ChatComponent
|
||||
let fixture: ComponentFixture<ChatComponent>
|
||||
let chatService: ChatService
|
||||
let router: Router
|
||||
let routerEvents$: Subject<NavigationEnd>
|
||||
let mockStream$: Subject<string>
|
||||
|
||||
beforeEach(async () => {
|
||||
TestBed.configureTestingModule({
|
||||
imports: [NgxBootstrapIconsModule.pick(allIcons), ChatComponent],
|
||||
providers: [
|
||||
provideHttpClient(withInterceptorsFromDi()),
|
||||
provideHttpClientTesting(),
|
||||
],
|
||||
}).compileComponents()
|
||||
|
||||
fixture = TestBed.createComponent(ChatComponent)
|
||||
router = TestBed.inject(Router)
|
||||
routerEvents$ = new Subject<any>()
|
||||
jest
|
||||
.spyOn(router, 'events', 'get')
|
||||
.mockReturnValue(routerEvents$.asObservable())
|
||||
chatService = TestBed.inject(ChatService)
|
||||
mockStream$ = new Subject<string>()
|
||||
jest
|
||||
.spyOn(chatService, 'streamChat')
|
||||
.mockReturnValue(mockStream$.asObservable())
|
||||
component = fixture.componentInstance
|
||||
|
||||
jest.useFakeTimers()
|
||||
|
||||
fixture.detectChanges()
|
||||
|
||||
component.scrollAnchor.nativeElement.scrollIntoView = jest.fn()
|
||||
})
|
||||
|
||||
it('should update documentId on initialization', () => {
|
||||
jest.spyOn(router, 'url', 'get').mockReturnValue('/documents/123')
|
||||
component.ngOnInit()
|
||||
expect(component.documentId).toBe(123)
|
||||
})
|
||||
|
||||
it('should update documentId on navigation', () => {
|
||||
component.ngOnInit()
|
||||
routerEvents$.next(new NavigationEnd(1, '/documents/456', '/documents/456'))
|
||||
expect(component.documentId).toBe(456)
|
||||
})
|
||||
|
||||
it('should return correct placeholder based on documentId', () => {
|
||||
component.documentId = 123
|
||||
expect(component.placeholder).toBe('Ask a question about this document...')
|
||||
component.documentId = undefined
|
||||
expect(component.placeholder).toBe('Ask a question about a document...')
|
||||
})
|
||||
|
||||
it('should send a message and handle streaming response', () => {
|
||||
component.input = 'Hello'
|
||||
component.sendMessage()
|
||||
|
||||
expect(component.messages.length).toBe(2)
|
||||
expect(component.messages[0].content).toBe('Hello')
|
||||
expect(component.loading).toBe(true)
|
||||
|
||||
mockStream$.next('Hi')
|
||||
expect(component.messages[1].content).toBe('H')
|
||||
mockStream$.next('Hi there')
|
||||
// advance time to process the typewriter effect
|
||||
jest.advanceTimersByTime(1000)
|
||||
expect(component.messages[1].content).toBe('Hi there')
|
||||
|
||||
mockStream$.complete()
|
||||
expect(component.loading).toBe(false)
|
||||
expect(component.messages[1].isStreaming).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle errors during streaming', () => {
|
||||
component.input = 'Hello'
|
||||
component.sendMessage()
|
||||
|
||||
mockStream$.error('Error')
|
||||
expect(component.messages[1].content).toContain(
|
||||
'⚠️ Error receiving response.'
|
||||
)
|
||||
expect(component.loading).toBe(false)
|
||||
})
|
||||
|
||||
it('should enqueue typewriter chunks correctly', () => {
|
||||
const message = { content: '', role: 'assistant', isStreaming: true }
|
||||
component.enqueueTypewriter(null, message as any) // coverage for null
|
||||
component.enqueueTypewriter('Hello', message as any)
|
||||
expect(component['typewriterBuffer'].length).toBe(4)
|
||||
})
|
||||
|
||||
it('should scroll to bottom after sending a message', () => {
|
||||
const scrollSpy = jest.spyOn(
|
||||
ChatComponent.prototype as any,
|
||||
'scrollToBottom'
|
||||
)
|
||||
component.input = 'Test'
|
||||
component.sendMessage()
|
||||
expect(scrollSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should focus chat input when dropdown is opened', () => {
|
||||
const focus = jest.fn()
|
||||
component.chatInput = {
|
||||
nativeElement: { focus: focus },
|
||||
} as unknown as ElementRef<HTMLInputElement>
|
||||
|
||||
component.onOpenChange(true)
|
||||
jest.advanceTimersByTime(15)
|
||||
expect(focus).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should send message on Enter key press', () => {
|
||||
jest.spyOn(component, 'sendMessage')
|
||||
const event = new KeyboardEvent('keydown', { key: 'Enter' })
|
||||
component.searchInputKeyDown(event)
|
||||
expect(component.sendMessage).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
@@ -1,140 +0,0 @@
|
||||
import { Component, ElementRef, inject, OnInit, ViewChild } from '@angular/core'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
import { NavigationEnd, Router } from '@angular/router'
|
||||
import { NgbDropdownModule } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { filter, map } from 'rxjs'
|
||||
import { ChatMessage, ChatService } from 'src/app/services/chat.service'
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-chat',
|
||||
imports: [
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
NgxBootstrapIconsModule,
|
||||
NgbDropdownModule,
|
||||
],
|
||||
templateUrl: './chat.component.html',
|
||||
styleUrl: './chat.component.scss',
|
||||
})
|
||||
export class ChatComponent implements OnInit {
|
||||
public messages: ChatMessage[] = []
|
||||
public loading = false
|
||||
public input: string = ''
|
||||
public documentId!: number
|
||||
|
||||
private chatService: ChatService = inject(ChatService)
|
||||
private router: Router = inject(Router)
|
||||
|
||||
@ViewChild('scrollAnchor') scrollAnchor!: ElementRef<HTMLDivElement>
|
||||
@ViewChild('chatInput') chatInput!: ElementRef<HTMLInputElement>
|
||||
|
||||
private typewriterBuffer: string[] = []
|
||||
private typewriterActive = false
|
||||
|
||||
public get placeholder(): string {
|
||||
return this.documentId
|
||||
? $localize`Ask a question about this document...`
|
||||
: $localize`Ask a question about a document...`
|
||||
}
|
||||
|
||||
ngOnInit(): void {
|
||||
this.updateDocumentId(this.router.url)
|
||||
this.router.events
|
||||
.pipe(
|
||||
filter((event) => event instanceof NavigationEnd),
|
||||
map((event) => (event as NavigationEnd).url)
|
||||
)
|
||||
.subscribe((url) => {
|
||||
this.updateDocumentId(url)
|
||||
})
|
||||
}
|
||||
|
||||
private updateDocumentId(url: string): void {
|
||||
const docIdRe = url.match(/^\/documents\/(\d+)/)
|
||||
this.documentId = docIdRe ? +docIdRe[1] : undefined
|
||||
}
|
||||
|
||||
sendMessage(): void {
|
||||
if (!this.input.trim()) return
|
||||
|
||||
const userMessage: ChatMessage = { role: 'user', content: this.input }
|
||||
this.messages.push(userMessage)
|
||||
this.scrollToBottom()
|
||||
|
||||
const assistantMessage: ChatMessage = {
|
||||
role: 'assistant',
|
||||
content: '',
|
||||
isStreaming: true,
|
||||
}
|
||||
this.messages.push(assistantMessage)
|
||||
this.loading = true
|
||||
|
||||
let lastPartialLength = 0
|
||||
|
||||
this.chatService.streamChat(this.documentId, this.input).subscribe({
|
||||
next: (chunk) => {
|
||||
const delta = chunk.substring(lastPartialLength)
|
||||
lastPartialLength = chunk.length
|
||||
this.enqueueTypewriter(delta, assistantMessage)
|
||||
},
|
||||
error: () => {
|
||||
assistantMessage.content += '\n\n⚠️ Error receiving response.'
|
||||
assistantMessage.isStreaming = false
|
||||
this.loading = false
|
||||
},
|
||||
complete: () => {
|
||||
assistantMessage.isStreaming = false
|
||||
this.loading = false
|
||||
this.scrollToBottom()
|
||||
},
|
||||
})
|
||||
|
||||
this.input = ''
|
||||
}
|
||||
|
||||
enqueueTypewriter(chunk: string, message: ChatMessage): void {
|
||||
if (!chunk) return
|
||||
|
||||
this.typewriterBuffer.push(...chunk.split(''))
|
||||
|
||||
if (!this.typewriterActive) {
|
||||
this.typewriterActive = true
|
||||
this.playTypewriter(message)
|
||||
}
|
||||
}
|
||||
|
||||
playTypewriter(message: ChatMessage): void {
|
||||
if (this.typewriterBuffer.length === 0) {
|
||||
this.typewriterActive = false
|
||||
return
|
||||
}
|
||||
|
||||
const nextChar = this.typewriterBuffer.shift()
|
||||
message.content += nextChar
|
||||
this.scrollToBottom()
|
||||
|
||||
setTimeout(() => this.playTypewriter(message), 10) // 10ms per character
|
||||
}
|
||||
|
||||
private scrollToBottom(): void {
|
||||
setTimeout(() => {
|
||||
this.scrollAnchor?.nativeElement?.scrollIntoView({ behavior: 'smooth' })
|
||||
}, 50)
|
||||
}
|
||||
|
||||
public onOpenChange(open: boolean): void {
|
||||
if (open) {
|
||||
setTimeout(() => {
|
||||
this.chatInput.nativeElement.focus()
|
||||
}, 10)
|
||||
}
|
||||
}
|
||||
|
||||
public searchInputKeyDown(event: KeyboardEvent) {
|
||||
if (event.key === 'Enter') {
|
||||
event.preventDefault()
|
||||
this.sendMessage()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
<div ngbDropdown #fieldDropdown="ngbDropdown" (openChange)="onOpenClose($event)" [popperOptions]="popperOptions">
|
||||
<button type="button" class="btn btn-sm btn-outline-primary" id="customFieldsDropdown" [disabled]="disabled" ngbDropdownToggle>
|
||||
<div ngbDropdown #fieldDropdown="ngbDropdown" (openChange)="onOpenClose($event)" [popperOptions]="popperOptions" placement="bottom-end">
|
||||
<button class="btn btn-sm btn-outline-primary" id="customFieldsDropdown" [disabled]="disabled" ngbDropdownToggle>
|
||||
<i-bs name="ui-radios"></i-bs>
|
||||
<div class="d-none d-lg-inline"> <ng-container i18n>Custom Fields</ng-container></div>
|
||||
<div class="d-none d-sm-inline"> <ng-container i18n>Custom Fields</ng-container></div>
|
||||
</button>
|
||||
<div ngbDropdownMenu aria-labelledby="customFieldsDropdown" class="shadow custom-fields-dropdown">
|
||||
<div class="list-group list-group-flush" (keydown)="listKeyDown($event)">
|
||||
|
||||
@@ -252,7 +252,7 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
expect(component.object.actions.length).toEqual(2)
|
||||
})
|
||||
|
||||
it('should update order on drag n drop', () => {
|
||||
it('should update order and remove ids from actions on drag n drop', () => {
|
||||
const action1 = workflow.actions[0]
|
||||
const action2 = workflow.actions[1]
|
||||
component.object = workflow
|
||||
@@ -261,6 +261,8 @@ describe('WorkflowEditDialogComponent', () => {
|
||||
WorkflowAction[]
|
||||
>)
|
||||
expect(component.object.actions).toEqual([action2, action1])
|
||||
expect(action1.id).toBeNull()
|
||||
expect(action2.id).toBeNull()
|
||||
})
|
||||
|
||||
it('should not include auto matching in algorithms', () => {
|
||||
|
||||
@@ -1283,6 +1283,11 @@ export class WorkflowEditDialogComponent
|
||||
const actionField = this.actionFields.at(event.previousIndex)
|
||||
this.actionFields.removeAt(event.previousIndex)
|
||||
this.actionFields.insert(event.currentIndex, actionField)
|
||||
// removing id will effectively re-create the actions in this order
|
||||
this.object.actions.forEach((a) => (a.id = null))
|
||||
this.actionFields.controls.forEach((c) =>
|
||||
c.get('id').setValue(null, { emitEvent: false })
|
||||
)
|
||||
}
|
||||
|
||||
save(): void {
|
||||
|
||||
@@ -1,24 +1,17 @@
|
||||
<div class="mb-3" [class.pb-3]="error">
|
||||
<div class="row">
|
||||
<div class="d-flex align-items-center position-relative hidden-button-container" [class.col-md-3]="horizontal">
|
||||
@if (title) {
|
||||
<label class="form-label" [class.mb-md-0]="horizontal" [for]="inputId">{{title}}</label>
|
||||
}
|
||||
</div>
|
||||
<div class="position-relative" [class.col-md-9]="horizontal">
|
||||
<div class="input-group" [class.is-invalid]="error">
|
||||
<input #inputField [type]="showReveal && textVisible ? 'text' : 'password'" class="form-control" [class.is-invalid]="error" [id]="inputId" [(ngModel)]="value" (focus)="onFocus()" (focusout)="onFocusOut()" (change)="onChange(value)" [disabled]="disabled" [autocomplete]="autocomplete">
|
||||
@if (showReveal) {
|
||||
<button type="button" class="btn btn-outline-secondary" (click)="toggleVisibility()" i18n-title title="Show password" [disabled]="disabled || disableRevealToggle">
|
||||
<i-bs name="eye"></i-bs>
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
<div class="invalid-feedback">
|
||||
{{error}}
|
||||
</div>
|
||||
@if (hint) {
|
||||
<small class="form-text text-muted" [innerHTML]="hint"></small>
|
||||
<div class="mb-3">
|
||||
<label class="form-label" [for]="inputId">{{title}}</label>
|
||||
<div class="input-group" [class.is-invalid]="error">
|
||||
<input #inputField [type]="showReveal && textVisible ? 'text' : 'password'" class="form-control" [class.is-invalid]="error" [id]="inputId" [(ngModel)]="value" (focus)="onFocus()" (focusout)="onFocusOut()" (change)="onChange(value)" [disabled]="disabled" [autocomplete]="autocomplete">
|
||||
@if (showReveal) {
|
||||
<button type="button" class="btn btn-outline-secondary" (click)="toggleVisibility()" i18n-title title="Show password" [disabled]="disabled || disableRevealToggle">
|
||||
<i-bs name="eye"></i-bs>
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
<div class="invalid-feedback">
|
||||
{{error}}
|
||||
</div>
|
||||
@if (hint) {
|
||||
<small class="form-text text-muted" [innerHTML]="hint"></small>
|
||||
}
|
||||
</div>
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
</button>
|
||||
</ng-template>
|
||||
<ng-template ng-option-tmp let-item="item" let-index="index" let-search="searchTerm">
|
||||
<div class="tag-option-row d-flex align-items-center" [class.w-auto]="!getTag(item.id)?.parent">
|
||||
<div class="tag-option-row d-flex align-items-center">
|
||||
@if (item.id && tags) {
|
||||
@if (getTag(item.id)?.parent) {
|
||||
<i-bs name="list-nested" class="me-1"></i-bs>
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
|
||||
// Dropdown hierarchy reveal for ng-select options
|
||||
::ng-deep .ng-dropdown-panel .ng-option {
|
||||
overflow-x: scroll !important;
|
||||
overflow-x: scroll;
|
||||
|
||||
.tag-option-row {
|
||||
font-size: 1rem;
|
||||
|
||||
@@ -15,12 +15,6 @@
|
||||
@if (hint) {
|
||||
<small class="form-text text-muted" [innerHTML]="hint"></small>
|
||||
}
|
||||
@if (getSuggestion()?.length > 0) {
|
||||
<small>
|
||||
<span i18n>Suggestion:</span>
|
||||
<a (click)="applySuggestion(s)" [routerLink]="[]">{{getSuggestion()}}</a>
|
||||
</small>
|
||||
}
|
||||
<div class="invalid-feedback position-absolute top-100">
|
||||
{{error}}
|
||||
</div>
|
||||
|
||||
@@ -26,20 +26,10 @@ describe('TextComponent', () => {
|
||||
|
||||
it('should support use of input field', () => {
|
||||
expect(component.value).toBeUndefined()
|
||||
input.value = 'foo'
|
||||
input.dispatchEvent(new Event('input'))
|
||||
fixture.detectChanges()
|
||||
expect(component.value).toBe('foo')
|
||||
})
|
||||
|
||||
it('should support suggestion', () => {
|
||||
component.value = 'foo'
|
||||
component.suggestion = 'foo'
|
||||
expect(component.getSuggestion()).toBe('')
|
||||
component.value = 'bar'
|
||||
expect(component.getSuggestion()).toBe('foo')
|
||||
component.applySuggestion()
|
||||
fixture.detectChanges()
|
||||
expect(component.value).toBe('foo')
|
||||
// TODO: why doesn't this work?
|
||||
// input.value = 'foo'
|
||||
// input.dispatchEvent(new Event('change'))
|
||||
// fixture.detectChanges()
|
||||
// expect(component.value).toEqual('foo')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -4,7 +4,6 @@ import {
|
||||
NG_VALUE_ACCESSOR,
|
||||
ReactiveFormsModule,
|
||||
} from '@angular/forms'
|
||||
import { RouterLink } from '@angular/router'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { AbstractInputComponent } from '../abstract-input'
|
||||
|
||||
@@ -19,12 +18,7 @@ import { AbstractInputComponent } from '../abstract-input'
|
||||
selector: 'pngx-input-text',
|
||||
templateUrl: './text.component.html',
|
||||
styleUrls: ['./text.component.scss'],
|
||||
imports: [
|
||||
FormsModule,
|
||||
ReactiveFormsModule,
|
||||
NgxBootstrapIconsModule,
|
||||
RouterLink,
|
||||
],
|
||||
imports: [FormsModule, ReactiveFormsModule, NgxBootstrapIconsModule],
|
||||
})
|
||||
export class TextComponent extends AbstractInputComponent<string> {
|
||||
@Input()
|
||||
@@ -33,19 +27,7 @@ export class TextComponent extends AbstractInputComponent<string> {
|
||||
@Input()
|
||||
placeholder: string = ''
|
||||
|
||||
@Input()
|
||||
suggestion: string = ''
|
||||
|
||||
constructor() {
|
||||
super()
|
||||
}
|
||||
|
||||
getSuggestion() {
|
||||
return this.value !== this.suggestion ? this.suggestion : ''
|
||||
}
|
||||
|
||||
applySuggestion() {
|
||||
this.value = this.suggestion
|
||||
this.onChange(this.value)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
<div class="btn-group">
|
||||
<button type="button" class="btn btn-sm btn-outline-primary" (click)="clickSuggest()" [disabled]="loading || (suggestions && !aiEnabled)">
|
||||
@if (loading) {
|
||||
<div class="spinner-border spinner-border-sm" role="status"></div>
|
||||
} @else {
|
||||
<i-bs width="1.2em" height="1.2em" name="stars"></i-bs>
|
||||
}
|
||||
<span class="d-none d-lg-inline ps-1" i18n>Suggest</span>
|
||||
@if (totalSuggestions > 0) {
|
||||
<span class="badge bg-primary ms-2">{{ totalSuggestions }}</span>
|
||||
}
|
||||
</button>
|
||||
|
||||
@if (aiEnabled) {
|
||||
<div class="btn-group" ngbDropdown #dropdown="ngbDropdown" [popperOptions]="popperOptions">
|
||||
<button type="button" class="btn btn-sm btn-outline-primary" ngbDropdownToggle [disabled]="loading || !suggestions" aria-expanded="false" aria-controls="suggestionsDropdown" aria-label="Suggestions dropdown">
|
||||
<span class="visually-hidden" i18n>Show suggestions</span>
|
||||
</button>
|
||||
|
||||
<div ngbDropdownMenu aria-labelledby="suggestionsDropdown" class="shadow suggestions-dropdown">
|
||||
<div class="list-group list-group-flush small pb-0">
|
||||
@if (!suggestions?.suggested_tags && !suggestions?.suggested_document_types && !suggestions?.suggested_correspondents) {
|
||||
<div class="list-group-item text-muted fst-italic">
|
||||
<small class="text-muted small fst-italic" i18n>No novel suggestions</small>
|
||||
</div>
|
||||
}
|
||||
@if (suggestions?.suggested_tags.length > 0) {
|
||||
<small class="list-group-item text-uppercase text-muted small">Tags</small>
|
||||
@for (tag of suggestions.suggested_tags; track tag) {
|
||||
<button type="button" class="list-group-item list-group-item-action bg-light" (click)="addTag.emit(tag)" i18n>{{ tag }}</button>
|
||||
}
|
||||
}
|
||||
@if (suggestions?.suggested_document_types.length > 0) {
|
||||
<div class="list-group-item text-uppercase text-muted small">Document Types</div>
|
||||
@for (type of suggestions.suggested_document_types; track type) {
|
||||
<button type="button" class="list-group-item list-group-item-action bg-light" (click)="addDocumentType.emit(type)" i18n>{{ type }}</button>
|
||||
}
|
||||
}
|
||||
@if (suggestions?.suggested_correspondents.length > 0) {
|
||||
<div class="list-group-item text-uppercase text-muted small">Correspondents</div>
|
||||
@for (correspondent of suggestions.suggested_correspondents; track correspondent) {
|
||||
<button type="button" class="list-group-item list-group-item-action bg-light" (click)="addCorrespondent.emit(correspondent)" i18n>{{ correspondent }}</button>
|
||||
}
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
@@ -1,3 +0,0 @@
|
||||
.suggestions-dropdown {
|
||||
min-width: 250px;
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing'
|
||||
import { NgbDropdownModule } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
|
||||
import { SuggestionsDropdownComponent } from './suggestions-dropdown.component'
|
||||
|
||||
describe('SuggestionsDropdownComponent', () => {
|
||||
let component: SuggestionsDropdownComponent
|
||||
let fixture: ComponentFixture<SuggestionsDropdownComponent>
|
||||
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
imports: [
|
||||
NgbDropdownModule,
|
||||
NgxBootstrapIconsModule.pick(allIcons),
|
||||
SuggestionsDropdownComponent,
|
||||
],
|
||||
providers: [],
|
||||
})
|
||||
fixture = TestBed.createComponent(SuggestionsDropdownComponent)
|
||||
component = fixture.componentInstance
|
||||
fixture.detectChanges()
|
||||
})
|
||||
|
||||
it('should calculate totalSuggestions', () => {
|
||||
component.suggestions = {
|
||||
suggested_correspondents: ['John Doe'],
|
||||
suggested_tags: ['Tag1', 'Tag2'],
|
||||
suggested_document_types: ['Type1'],
|
||||
}
|
||||
expect(component.totalSuggestions).toBe(4)
|
||||
})
|
||||
|
||||
it('should emit getSuggestions when clickSuggest is called and suggestions are null', () => {
|
||||
jest.spyOn(component.getSuggestions, 'emit')
|
||||
component.suggestions = null
|
||||
component.clickSuggest()
|
||||
expect(component.getSuggestions.emit).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should toggle dropdown when clickSuggest is called and suggestions are not null', () => {
|
||||
component.aiEnabled = true
|
||||
fixture.detectChanges()
|
||||
component.suggestions = {
|
||||
suggested_correspondents: [],
|
||||
suggested_tags: [],
|
||||
suggested_document_types: [],
|
||||
}
|
||||
component.clickSuggest()
|
||||
expect(component.dropdown.open).toBeTruthy()
|
||||
})
|
||||
})
|
||||
@@ -1,64 +0,0 @@
|
||||
import {
|
||||
Component,
|
||||
EventEmitter,
|
||||
Input,
|
||||
Output,
|
||||
ViewChild,
|
||||
} from '@angular/core'
|
||||
import { NgbDropdown, NgbDropdownModule } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { DocumentSuggestions } from 'src/app/data/document-suggestions'
|
||||
import { pngxPopperOptions } from 'src/app/utils/popper-options'
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-suggestions-dropdown',
|
||||
imports: [NgbDropdownModule, NgxBootstrapIconsModule],
|
||||
templateUrl: './suggestions-dropdown.component.html',
|
||||
styleUrl: './suggestions-dropdown.component.scss',
|
||||
})
|
||||
export class SuggestionsDropdownComponent {
|
||||
public popperOptions = pngxPopperOptions
|
||||
|
||||
@ViewChild('dropdown') dropdown: NgbDropdown
|
||||
|
||||
@Input()
|
||||
suggestions: DocumentSuggestions = null
|
||||
|
||||
@Input()
|
||||
aiEnabled: boolean = false
|
||||
|
||||
@Input()
|
||||
loading: boolean = false
|
||||
|
||||
@Input()
|
||||
disabled: boolean = false
|
||||
|
||||
@Output()
|
||||
getSuggestions: EventEmitter<SuggestionsDropdownComponent> =
|
||||
new EventEmitter()
|
||||
|
||||
@Output()
|
||||
addTag: EventEmitter<string> = new EventEmitter()
|
||||
|
||||
@Output()
|
||||
addDocumentType: EventEmitter<string> = new EventEmitter()
|
||||
|
||||
@Output()
|
||||
addCorrespondent: EventEmitter<string> = new EventEmitter()
|
||||
|
||||
public clickSuggest(): void {
|
||||
if (!this.suggestions) {
|
||||
this.getSuggestions.emit(this)
|
||||
} else {
|
||||
this.dropdown?.toggle()
|
||||
}
|
||||
}
|
||||
|
||||
get totalSuggestions(): number {
|
||||
return (
|
||||
this.suggestions?.suggested_correspondents?.length +
|
||||
this.suggestions?.suggested_tags?.length +
|
||||
this.suggestions?.suggested_document_types?.length || 0
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -266,43 +266,6 @@
|
||||
}
|
||||
</span>
|
||||
</dd>
|
||||
@if (aiEnabled) {
|
||||
<dt i18n>AI Index</dt>
|
||||
<dd class="d-flex align-items-center">
|
||||
<button class="btn btn-sm d-flex align-items-center btn-dark text-uppercase small" [ngbPopover]="llmIndexStatus" triggers="click mouseenter:mouseleave">
|
||||
{{status.tasks.llmindex_status}}
|
||||
@if (status.tasks.llmindex_status === 'OK') {
|
||||
@if (isStale(status.tasks.llmindex_last_modified)) {
|
||||
<i-bs name="exclamation-triangle-fill" class="text-warning ms-2 lh-1"></i-bs>
|
||||
} @else {
|
||||
<i-bs name="check-circle-fill" class="text-primary ms-2 lh-1"></i-bs>
|
||||
}
|
||||
} @else {
|
||||
<i-bs name="exclamation-triangle-fill" class="ms-2 lh-1"
|
||||
[class.text-danger]="status.tasks.llmindex_status === SystemStatusItemStatus.ERROR"
|
||||
[class.text-warning]="status.tasks.llmindex_status === SystemStatusItemStatus.WARNING"
|
||||
[class.text-muted]="status.tasks.llmindex_status === SystemStatusItemStatus.DISABLED"></i-bs>
|
||||
}
|
||||
</button>
|
||||
@if (currentUserIsSuperUser) {
|
||||
@if (isRunning(PaperlessTaskName.LLMIndexUpdate)) {
|
||||
<div class="spinner-border spinner-border-sm ms-2" role="status"></div>
|
||||
} @else {
|
||||
<button class="btn btn-sm d-flex align-items-center btn-dark small ms-2" (click)="runTask(PaperlessTaskName.LLMIndexUpdate)">
|
||||
<i-bs name="play-fill"></i-bs>
|
||||
<ng-container i18n>Run Task</ng-container>
|
||||
</button>
|
||||
}
|
||||
}
|
||||
</dd>
|
||||
<ng-template #llmIndexStatus>
|
||||
@if (status.tasks.llmindex_status === 'OK') {
|
||||
<h6><ng-container i18n>Last Run</ng-container>:</h6> <span class="font-monospace small">{{status.tasks.llmindex_last_modified | customDate:'medium'}}</span>
|
||||
} @else {
|
||||
<h6><ng-container i18n>Error</ng-container>:</h6> <span class="font-monospace small">{{status.tasks.llmindex_error}}</span>
|
||||
}
|
||||
</ng-template>
|
||||
}
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -68,9 +68,6 @@ const status: SystemStatus = {
|
||||
sanity_check_status: SystemStatusItemStatus.OK,
|
||||
sanity_check_last_run: new Date().toISOString(),
|
||||
sanity_check_error: null,
|
||||
llmindex_status: SystemStatusItemStatus.OK,
|
||||
llmindex_last_modified: new Date().toISOString(),
|
||||
llmindex_error: null,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -13,11 +13,9 @@ import {
|
||||
SystemStatus,
|
||||
SystemStatusItemStatus,
|
||||
} from 'src/app/data/system-status'
|
||||
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
|
||||
import { CustomDatePipe } from 'src/app/pipes/custom-date.pipe'
|
||||
import { FileSizePipe } from 'src/app/pipes/file-size.pipe'
|
||||
import { PermissionsService } from 'src/app/services/permissions.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { SystemStatusService } from 'src/app/services/system-status.service'
|
||||
import { TasksService } from 'src/app/services/tasks.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
@@ -46,7 +44,6 @@ export class SystemStatusDialogComponent implements OnInit, OnDestroy {
|
||||
private toastService = inject(ToastService)
|
||||
private permissionsService = inject(PermissionsService)
|
||||
private websocketStatusService = inject(WebsocketStatusService)
|
||||
private settingsService = inject(SettingsService)
|
||||
|
||||
public SystemStatusItemStatus = SystemStatusItemStatus
|
||||
public PaperlessTaskName = PaperlessTaskName
|
||||
@@ -63,10 +60,6 @@ export class SystemStatusDialogComponent implements OnInit, OnDestroy {
|
||||
return this.permissionsService.isSuperUser()
|
||||
}
|
||||
|
||||
get aiEnabled(): boolean {
|
||||
return this.settingsService.get(SETTINGS_KEYS.AI_ENABLED)
|
||||
}
|
||||
|
||||
public ngOnInit() {
|
||||
this.versionMismatch =
|
||||
environment.production &&
|
||||
|
||||
@@ -74,6 +74,16 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<pngx-custom-fields-dropdown
|
||||
*pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.CustomField }"
|
||||
[documentId]="documentId"
|
||||
[disabled]="!userCanEdit"
|
||||
[existingFields]="document?.custom_fields"
|
||||
(created)="refreshCustomFields()"
|
||||
(added)="addField($event)">
|
||||
</pngx-custom-fields-dropdown>
|
||||
|
||||
|
||||
<div class="ms-auto" ngbDropdown>
|
||||
<button class="btn btn-sm btn-outline-primary" id="sendDropdown" ngbDropdownToggle>
|
||||
<i-bs name="send"></i-bs>
|
||||
@@ -94,7 +104,7 @@
|
||||
</pngx-page-header>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-md-6 col-xl-5 mb-4">
|
||||
<div class="col-md-6 col-xl-4 mb-4">
|
||||
|
||||
<form [formGroup]='documentForm' (ngSubmit)="save()">
|
||||
|
||||
@@ -111,32 +121,6 @@
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<ng-container *pngxIfPermissions="{ action: PermissionAction.Change, type: PermissionType.Document }">
|
||||
<div class="btn-group pb-3 ms-auto">
|
||||
<pngx-suggestions-dropdown *pngxIfPermissions="{ action: PermissionAction.Change, type: PermissionType.Document }"
|
||||
[disabled]="!userCanEdit || suggestionsLoading"
|
||||
[loading]="suggestionsLoading"
|
||||
[suggestions]="suggestions"
|
||||
[aiEnabled]="aiEnabled"
|
||||
(getSuggestions)="getSuggestions()"
|
||||
(addTag)="createTag($event)"
|
||||
(addDocumentType)="createDocumentType($event)"
|
||||
(addCorrespondent)="createCorrespondent($event)">
|
||||
</pngx-suggestions-dropdown>
|
||||
</div>
|
||||
|
||||
<div class="btn-group pb-3 ms-2">
|
||||
<pngx-custom-fields-dropdown
|
||||
*pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.CustomField }"
|
||||
[documentId]="documentId"
|
||||
[disabled]="!userCanEdit"
|
||||
[existingFields]="document?.custom_fields"
|
||||
(created)="refreshCustomFields()"
|
||||
(added)="addField($event)">
|
||||
</pngx-custom-fields-dropdown>
|
||||
</div>
|
||||
</ng-container>
|
||||
|
||||
<ng-container *ngTemplateOutlet="saveButtons"></ng-container>
|
||||
</div>
|
||||
|
||||
@@ -145,7 +129,7 @@
|
||||
<a ngbNavLink i18n>Details</a>
|
||||
<ng-template ngbNavContent>
|
||||
<div>
|
||||
<pngx-input-text #inputTitle i18n-title title="Title" formControlName="title" [horizontal]="true" [suggestion]="suggestions?.title" (keyup)="titleKeyUp($event)" [error]="error?.title"></pngx-input-text>
|
||||
<pngx-input-text #inputTitle i18n-title title="Title" formControlName="title" [horizontal]="true" (keyup)="titleKeyUp($event)" [error]="error?.title"></pngx-input-text>
|
||||
<pngx-input-number i18n-title title="Archive serial number" [error]="error?.archive_serial_number" [horizontal]="true" formControlName='archive_serial_number'></pngx-input-number>
|
||||
<pngx-input-date i18n-title title="Date created" formControlName="created" [suggestions]="suggestions?.dates" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event)"
|
||||
[error]="error?.created"></pngx-input-date>
|
||||
@@ -155,7 +139,7 @@
|
||||
(createNew)="createDocumentType($event)" [hideAddButton]="createDisabled(DataType.DocumentType)" [suggestions]="suggestions?.document_types" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.DocumentType }"></pngx-input-select>
|
||||
<pngx-input-select [items]="storagePaths" i18n-title title="Storage path" formControlName="storage_path" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.StoragePath)"
|
||||
(createNew)="createStoragePath($event)" [hideAddButton]="createDisabled(DataType.StoragePath)" [suggestions]="suggestions?.storage_paths" i18n-placeholder placeholder="Default" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.StoragePath }"></pngx-input-select>
|
||||
<pngx-input-tags #tagsInput formControlName="tags" [suggestions]="suggestions?.tags" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.Tag)" [hideAddButton]="createDisabled(DataType.Tag)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Tag }"></pngx-input-tags>
|
||||
<pngx-input-tags formControlName="tags" [suggestions]="suggestions?.tags" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.Tag)" [hideAddButton]="createDisabled(DataType.Tag)" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Tag }"></pngx-input-tags>
|
||||
@for (fieldInstance of document?.custom_fields; track fieldInstance.field; let i = $index) {
|
||||
<div [formGroup]="customFieldFormFields.controls[i]">
|
||||
@switch (getCustomFieldFromInstance(fieldInstance)?.data_type) {
|
||||
@@ -377,14 +361,14 @@
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div class="col-md-6 col-xl-7 mb-3 d-none d-md-block position-relative" #pdfPreview>
|
||||
<div class="col-md-6 col-xl-8 mb-3 d-none d-md-block position-relative" #pdfPreview>
|
||||
<ng-container *ngTemplateOutlet="previewContent"></ng-container>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<ng-template #saveButtons>
|
||||
<div class="btn-group pb-3 ms-4">
|
||||
<div class="btn-group pb-3 ms-auto">
|
||||
<ng-container *pngxIfPermissions="{ action: PermissionAction.Change, type: PermissionType.Document }">
|
||||
<button type="submit" class="order-3 btn btn-sm btn-primary" i18n [disabled]="!userCanEdit || networkActive || (isDirty$ | async) !== true">Save</button>
|
||||
@if (hasNext()) {
|
||||
|
||||
@@ -157,16 +157,6 @@ describe('DocumentDetailComponent', () => {
|
||||
{
|
||||
provide: TagService,
|
||||
useValue: {
|
||||
getCachedMany: (ids: number[]) =>
|
||||
of(
|
||||
ids.map((id) => ({
|
||||
id,
|
||||
name: `Tag${id}`,
|
||||
is_inbox_tag: true,
|
||||
color: '#ff0000',
|
||||
text_color: '#000000',
|
||||
}))
|
||||
),
|
||||
listAll: () =>
|
||||
of({
|
||||
count: 3,
|
||||
@@ -393,32 +383,8 @@ describe('DocumentDetailComponent', () => {
|
||||
currentUserCan = true
|
||||
})
|
||||
|
||||
it('should support creating tag, remove from suggestions', () => {
|
||||
it('should support creating document type', () => {
|
||||
initNormally()
|
||||
component.suggestions = {
|
||||
suggested_tags: ['Tag1', 'NewTag12'],
|
||||
}
|
||||
let openModal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((modal) => (openModal = modal[0]))
|
||||
const modalSpy = jest.spyOn(modalService, 'open')
|
||||
component.createTag('NewTag12')
|
||||
expect(modalSpy).toHaveBeenCalled()
|
||||
openModal.componentInstance.succeeded.next({
|
||||
id: 12,
|
||||
name: 'NewTag12',
|
||||
is_inbox_tag: true,
|
||||
color: '#ff0000',
|
||||
text_color: '#000000',
|
||||
})
|
||||
expect(component.tagsInput.value).toContain(12)
|
||||
expect(component.suggestions.suggested_tags).not.toContain('NewTag12')
|
||||
})
|
||||
|
||||
it('should support creating document type, remove from suggestions', () => {
|
||||
initNormally()
|
||||
component.suggestions = {
|
||||
suggested_document_types: ['DocumentType1', 'NewDocType2'],
|
||||
}
|
||||
let openModal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((modal) => (openModal = modal[0]))
|
||||
const modalSpy = jest.spyOn(modalService, 'open')
|
||||
@@ -426,16 +392,10 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(modalSpy).toHaveBeenCalled()
|
||||
openModal.componentInstance.succeeded.next({ id: 12, name: 'NewDocType12' })
|
||||
expect(component.documentForm.get('document_type').value).toEqual(12)
|
||||
expect(component.suggestions.suggested_document_types).not.toContain(
|
||||
'NewDocType2'
|
||||
)
|
||||
})
|
||||
|
||||
it('should support creating correspondent, remove from suggestions', () => {
|
||||
it('should support creating correspondent', () => {
|
||||
initNormally()
|
||||
component.suggestions = {
|
||||
suggested_correspondents: ['Correspondent1', 'NewCorrrespondent12'],
|
||||
}
|
||||
let openModal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((modal) => (openModal = modal[0]))
|
||||
const modalSpy = jest.spyOn(modalService, 'open')
|
||||
@@ -446,9 +406,6 @@ describe('DocumentDetailComponent', () => {
|
||||
name: 'NewCorrrespondent12',
|
||||
})
|
||||
expect(component.documentForm.get('correspondent').value).toEqual(12)
|
||||
expect(component.suggestions.suggested_correspondents).not.toContain(
|
||||
'NewCorrrespondent12'
|
||||
)
|
||||
})
|
||||
|
||||
it('should support creating storage path', () => {
|
||||
@@ -1039,7 +996,7 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(component.document.custom_fields).toHaveLength(initialLength - 1)
|
||||
expect(component.customFieldFormFields).toHaveLength(initialLength - 1)
|
||||
expect(
|
||||
fixture.debugElement.query(By.css('form ul')).nativeElement.textContent
|
||||
fixture.debugElement.query(By.css('form')).nativeElement.textContent
|
||||
).not.toContain('Field 1')
|
||||
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||
component.save(true)
|
||||
@@ -1130,22 +1087,10 @@ describe('DocumentDetailComponent', () => {
|
||||
|
||||
it('should get suggestions', () => {
|
||||
const suggestionsSpy = jest.spyOn(documentService, 'getSuggestions')
|
||||
suggestionsSpy.mockReturnValue(
|
||||
of({
|
||||
tags: [42, 43],
|
||||
suggested_tags: [],
|
||||
suggested_document_types: [],
|
||||
suggested_correspondents: [],
|
||||
})
|
||||
)
|
||||
suggestionsSpy.mockReturnValue(of({ tags: [42, 43] }))
|
||||
initNormally()
|
||||
expect(suggestionsSpy).toHaveBeenCalled()
|
||||
expect(component.suggestions).toEqual({
|
||||
tags: [42, 43],
|
||||
suggested_tags: [],
|
||||
suggested_document_types: [],
|
||||
suggested_correspondents: [],
|
||||
})
|
||||
expect(component.suggestions).toEqual({ tags: [42, 43] })
|
||||
})
|
||||
|
||||
it('should show error if needed for get suggestions', () => {
|
||||
|
||||
@@ -31,7 +31,6 @@ import {
|
||||
map,
|
||||
switchMap,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs/operators'
|
||||
import { Correspondent } from 'src/app/data/correspondent'
|
||||
import { CustomField, CustomFieldDataType } from 'src/app/data/custom-field'
|
||||
@@ -77,7 +76,6 @@ import { DocumentTypeService } from 'src/app/services/rest/document-type.service
|
||||
import { DocumentService } from 'src/app/services/rest/document.service'
|
||||
import { SavedViewService } from 'src/app/services/rest/saved-view.service'
|
||||
import { StoragePathService } from 'src/app/services/rest/storage-path.service'
|
||||
import { TagService } from 'src/app/services/rest/tag.service'
|
||||
import { UserService } from 'src/app/services/rest/user.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
@@ -91,7 +89,6 @@ import { CorrespondentEditDialogComponent } from '../common/edit-dialog/correspo
|
||||
import { DocumentTypeEditDialogComponent } from '../common/edit-dialog/document-type-edit-dialog/document-type-edit-dialog.component'
|
||||
import { EditDialogMode } from '../common/edit-dialog/edit-dialog.component'
|
||||
import { StoragePathEditDialogComponent } from '../common/edit-dialog/storage-path-edit-dialog/storage-path-edit-dialog.component'
|
||||
import { TagEditDialogComponent } from '../common/edit-dialog/tag-edit-dialog/tag-edit-dialog.component'
|
||||
import { EmailDocumentDialogComponent } from '../common/email-document-dialog/email-document-dialog.component'
|
||||
import { CheckComponent } from '../common/input/check/check.component'
|
||||
import { DateComponent } from '../common/input/date/date.component'
|
||||
@@ -110,7 +107,6 @@ import {
|
||||
PdfEditorEditMode,
|
||||
} from '../common/pdf-editor/pdf-editor.component'
|
||||
import { ShareLinksDialogComponent } from '../common/share-links-dialog/share-links-dialog.component'
|
||||
import { SuggestionsDropdownComponent } from '../common/suggestions-dropdown/suggestions-dropdown.component'
|
||||
import { DocumentHistoryComponent } from '../document-history/document-history.component'
|
||||
import { DocumentNotesComponent } from '../document-notes/document-notes.component'
|
||||
import { ComponentWithPermissions } from '../with-permissions/with-permissions.component'
|
||||
@@ -167,7 +163,6 @@ export enum ZoomSetting {
|
||||
NumberComponent,
|
||||
MonetaryComponent,
|
||||
UrlComponent,
|
||||
SuggestionsDropdownComponent,
|
||||
CustomDatePipe,
|
||||
FileSizePipe,
|
||||
IfPermissionsDirective,
|
||||
@@ -189,7 +184,6 @@ export class DocumentDetailComponent
|
||||
{
|
||||
private documentsService = inject(DocumentService)
|
||||
private route = inject(ActivatedRoute)
|
||||
private tagService = inject(TagService)
|
||||
private correspondentService = inject(CorrespondentService)
|
||||
private documentTypeService = inject(DocumentTypeService)
|
||||
private router = inject(Router)
|
||||
@@ -212,8 +206,6 @@ export class DocumentDetailComponent
|
||||
@ViewChild('inputTitle')
|
||||
titleInput: TextComponent
|
||||
|
||||
@ViewChild('tagsInput') tagsInput: TagsComponent
|
||||
|
||||
expandOriginalMetadata = false
|
||||
expandArchivedMetadata = false
|
||||
|
||||
@@ -225,7 +217,6 @@ export class DocumentDetailComponent
|
||||
document: Document
|
||||
metadata: DocumentMetadata
|
||||
suggestions: DocumentSuggestions
|
||||
suggestionsLoading: boolean = false
|
||||
users: User[]
|
||||
|
||||
title: string
|
||||
@@ -285,10 +276,10 @@ export class DocumentDetailComponent
|
||||
if (
|
||||
element &&
|
||||
element.nativeElement.offsetParent !== null &&
|
||||
this.nav?.activeId == DocumentDetailNavIDs.Preview
|
||||
this.nav?.activeId == 4
|
||||
) {
|
||||
// its visible
|
||||
setTimeout(() => this.nav?.select(DocumentDetailNavIDs.Details))
|
||||
setTimeout(() => this.nav?.select(1))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -307,10 +298,6 @@ export class DocumentDetailComponent
|
||||
return this.deviceDetectorService.isMobile()
|
||||
}
|
||||
|
||||
get aiEnabled(): boolean {
|
||||
return this.settings.get(SETTINGS_KEYS.AI_ENABLED)
|
||||
}
|
||||
|
||||
get archiveContentRenderType(): ContentRenderType {
|
||||
return this.document?.archived_file_name
|
||||
? this.getRenderType('application/pdf')
|
||||
@@ -695,12 +682,25 @@ export class DocumentDetailComponent
|
||||
PermissionType.Document
|
||||
)
|
||||
) {
|
||||
this.tagService.getCachedMany(doc.tags).subscribe((tags) => {
|
||||
// only show suggestions if document has inbox tags
|
||||
if (tags.some((tag) => tag.is_inbox_tag)) {
|
||||
this.getSuggestions()
|
||||
}
|
||||
})
|
||||
this.documentsService
|
||||
.getSuggestions(doc.id)
|
||||
.pipe(
|
||||
first(),
|
||||
takeUntil(this.unsubscribeNotifier),
|
||||
takeUntil(this.docChangeNotifier)
|
||||
)
|
||||
.subscribe({
|
||||
next: (result) => {
|
||||
this.suggestions = result
|
||||
},
|
||||
error: (error) => {
|
||||
this.suggestions = null
|
||||
this.toastService.showError(
|
||||
$localize`Error retrieving suggestions.`,
|
||||
error
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
this.title = this.documentTitlePipe.transform(doc.title)
|
||||
this.prepareForm(doc)
|
||||
@@ -710,63 +710,6 @@ export class DocumentDetailComponent
|
||||
return this.documentForm.get('custom_fields') as FormArray
|
||||
}
|
||||
|
||||
getSuggestions() {
|
||||
this.suggestionsLoading = true
|
||||
this.documentsService
|
||||
.getSuggestions(this.documentId)
|
||||
.pipe(
|
||||
first(),
|
||||
takeUntil(this.unsubscribeNotifier),
|
||||
takeUntil(this.docChangeNotifier)
|
||||
)
|
||||
.subscribe({
|
||||
next: (result) => {
|
||||
this.suggestions = result
|
||||
this.suggestionsLoading = false
|
||||
},
|
||||
error: (error) => {
|
||||
this.suggestions = null
|
||||
this.suggestionsLoading = false
|
||||
this.toastService.showError(
|
||||
$localize`Error retrieving suggestions.`,
|
||||
error
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
createTag(newName: string) {
|
||||
var modal = this.modalService.open(TagEditDialogComponent, {
|
||||
backdrop: 'static',
|
||||
})
|
||||
modal.componentInstance.dialogMode = EditDialogMode.CREATE
|
||||
if (newName) modal.componentInstance.object = { name: newName }
|
||||
modal.componentInstance.succeeded
|
||||
.pipe(
|
||||
tap((newTag: Tag) => {
|
||||
// remove from suggestions if present
|
||||
if (this.suggestions) {
|
||||
this.suggestions = {
|
||||
...this.suggestions,
|
||||
suggested_tags: this.suggestions.suggested_tags.filter(
|
||||
(tag) => tag !== newTag.name
|
||||
),
|
||||
}
|
||||
}
|
||||
}),
|
||||
switchMap((newTag: Tag) => {
|
||||
return this.tagService
|
||||
.listAll()
|
||||
.pipe(map((tags) => ({ newTag, tags })))
|
||||
}),
|
||||
takeUntil(this.unsubscribeNotifier)
|
||||
)
|
||||
.subscribe(({ newTag, tags }) => {
|
||||
this.tagsInput.tags = tags.results
|
||||
this.tagsInput.addTag(newTag.id)
|
||||
})
|
||||
}
|
||||
|
||||
createDocumentType(newName: string) {
|
||||
var modal = this.modalService.open(DocumentTypeEditDialogComponent, {
|
||||
backdrop: 'static',
|
||||
@@ -786,12 +729,6 @@ export class DocumentDetailComponent
|
||||
this.documentTypes = documentTypes.results
|
||||
this.documentForm.get('document_type').setValue(newDocumentType.id)
|
||||
this.documentForm.get('document_type').markAsDirty()
|
||||
if (this.suggestions) {
|
||||
this.suggestions.suggested_document_types =
|
||||
this.suggestions.suggested_document_types.filter(
|
||||
(dt) => dt !== newName
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -816,12 +753,6 @@ export class DocumentDetailComponent
|
||||
this.correspondents = correspondents.results
|
||||
this.documentForm.get('correspondent').setValue(newCorrespondent.id)
|
||||
this.documentForm.get('correspondent').markAsDirty()
|
||||
if (this.suggestions) {
|
||||
this.suggestions.suggested_correspondents =
|
||||
this.suggestions.suggested_correspondents.filter(
|
||||
(c) => c !== newName
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -1,17 +1,11 @@
|
||||
export interface DocumentSuggestions {
|
||||
title?: string
|
||||
|
||||
tags?: number[]
|
||||
suggested_tags?: string[]
|
||||
|
||||
correspondents?: number[]
|
||||
suggested_correspondents?: string[]
|
||||
|
||||
document_types?: number[]
|
||||
suggested_document_types?: string[]
|
||||
|
||||
storage_paths?: number[]
|
||||
suggested_storage_paths?: string[]
|
||||
|
||||
dates?: string[] // ISO-formatted date string e.g. 2022-11-03
|
||||
}
|
||||
|
||||
@@ -44,24 +44,12 @@ export enum ConfigOptionType {
|
||||
Boolean = 'boolean',
|
||||
JSON = 'json',
|
||||
File = 'file',
|
||||
Password = 'password',
|
||||
}
|
||||
|
||||
export const ConfigCategory = {
|
||||
General: $localize`General Settings`,
|
||||
OCR: $localize`OCR Settings`,
|
||||
Barcode: $localize`Barcode Settings`,
|
||||
AI: $localize`AI Settings`,
|
||||
}
|
||||
|
||||
export const LLMEmbeddingBackendConfig = {
|
||||
OPENAI: 'openai',
|
||||
HUGGINGFACE: 'huggingface',
|
||||
}
|
||||
|
||||
export const LLMBackendConfig = {
|
||||
OPENAI: 'openai',
|
||||
OLLAMA: 'ollama',
|
||||
}
|
||||
|
||||
export interface ConfigOption {
|
||||
@@ -71,7 +59,6 @@ export interface ConfigOption {
|
||||
choices?: Array<{ id: string; name: string }>
|
||||
config_key?: string
|
||||
category: string
|
||||
note?: string
|
||||
}
|
||||
|
||||
function mapToItems(enumObj: Object): Array<{ id: string; name: string }> {
|
||||
@@ -271,58 +258,6 @@ export const PaperlessConfigOptions: ConfigOption[] = [
|
||||
config_key: 'PAPERLESS_CONSUMER_TAG_BARCODE_MAPPING',
|
||||
category: ConfigCategory.Barcode,
|
||||
},
|
||||
{
|
||||
key: 'ai_enabled',
|
||||
title: $localize`AI Enabled`,
|
||||
type: ConfigOptionType.Boolean,
|
||||
config_key: 'PAPERLESS_AI_ENABLED',
|
||||
category: ConfigCategory.AI,
|
||||
note: $localize`Consider privacy implications when enabling AI features, especially if using a remote model.`,
|
||||
},
|
||||
{
|
||||
key: 'llm_embedding_backend',
|
||||
title: $localize`LLM Embedding Backend`,
|
||||
type: ConfigOptionType.Select,
|
||||
choices: mapToItems(LLMEmbeddingBackendConfig),
|
||||
config_key: 'PAPERLESS_AI_LLM_EMBEDDING_BACKEND',
|
||||
category: ConfigCategory.AI,
|
||||
},
|
||||
{
|
||||
key: 'llm_embedding_model',
|
||||
title: $localize`LLM Embedding Model`,
|
||||
type: ConfigOptionType.String,
|
||||
config_key: 'PAPERLESS_AI_LLM_EMBEDDING_MODEL',
|
||||
category: ConfigCategory.AI,
|
||||
},
|
||||
{
|
||||
key: 'llm_backend',
|
||||
title: $localize`LLM Backend`,
|
||||
type: ConfigOptionType.Select,
|
||||
choices: mapToItems(LLMBackendConfig),
|
||||
config_key: 'PAPERLESS_AI_LLM_BACKEND',
|
||||
category: ConfigCategory.AI,
|
||||
},
|
||||
{
|
||||
key: 'llm_model',
|
||||
title: $localize`LLM Model`,
|
||||
type: ConfigOptionType.String,
|
||||
config_key: 'PAPERLESS_AI_LLM_MODEL',
|
||||
category: ConfigCategory.AI,
|
||||
},
|
||||
{
|
||||
key: 'llm_api_key',
|
||||
title: $localize`LLM API Key`,
|
||||
type: ConfigOptionType.Password,
|
||||
config_key: 'PAPERLESS_AI_LLM_API_KEY',
|
||||
category: ConfigCategory.AI,
|
||||
},
|
||||
{
|
||||
key: 'llm_endpoint',
|
||||
title: $localize`LLM Endpoint`,
|
||||
type: ConfigOptionType.String,
|
||||
config_key: 'PAPERLESS_AI_LLM_ENDPOINT',
|
||||
category: ConfigCategory.AI,
|
||||
},
|
||||
]
|
||||
|
||||
export interface PaperlessConfig extends ObjectWithId {
|
||||
@@ -352,11 +287,4 @@ export interface PaperlessConfig extends ObjectWithId {
|
||||
barcode_max_pages: number
|
||||
barcode_enable_tag: boolean
|
||||
barcode_tag_mapping: object
|
||||
ai_enabled: boolean
|
||||
llm_embedding_backend: string
|
||||
llm_embedding_model: string
|
||||
llm_backend: string
|
||||
llm_model: string
|
||||
llm_api_key: string
|
||||
llm_endpoint: string
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ export enum PaperlessTaskName {
|
||||
TrainClassifier = 'train_classifier',
|
||||
SanityCheck = 'check_sanity',
|
||||
IndexOptimize = 'index_optimize',
|
||||
LLMIndexUpdate = 'llmindex_update',
|
||||
}
|
||||
|
||||
export enum PaperlessTaskStatus {
|
||||
|
||||
@@ -7,7 +7,6 @@ export enum SystemStatusItemStatus {
|
||||
OK = 'OK',
|
||||
ERROR = 'ERROR',
|
||||
WARNING = 'WARNING',
|
||||
DISABLED = 'DISABLED',
|
||||
}
|
||||
|
||||
export interface SystemStatus {
|
||||
@@ -44,9 +43,6 @@ export interface SystemStatus {
|
||||
sanity_check_status: SystemStatusItemStatus
|
||||
sanity_check_last_run: string // ISO date string
|
||||
sanity_check_error: string
|
||||
llmindex_status: SystemStatusItemStatus
|
||||
llmindex_last_modified: string // ISO date string
|
||||
llmindex_error: string
|
||||
}
|
||||
websocket_connected?: SystemStatusItemStatus // added client-side
|
||||
}
|
||||
|
||||
@@ -76,7 +76,6 @@ export const SETTINGS_KEYS = {
|
||||
GMAIL_OAUTH_URL: 'gmail_oauth_url',
|
||||
OUTLOOK_OAUTH_URL: 'outlook_oauth_url',
|
||||
EMAIL_ENABLED: 'email_enabled',
|
||||
AI_ENABLED: 'ai_enabled',
|
||||
}
|
||||
|
||||
export const SETTINGS: UiSetting[] = [
|
||||
@@ -290,9 +289,4 @@ export const SETTINGS: UiSetting[] = [
|
||||
type: 'string',
|
||||
default: 'page-width', // ZoomSetting from 'document-detail.component'
|
||||
},
|
||||
{
|
||||
key: SETTINGS_KEYS.AI_ENABLED,
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
},
|
||||
]
|
||||
|
||||
@@ -4,15 +4,15 @@ import {
|
||||
HttpInterceptor,
|
||||
HttpRequest,
|
||||
} from '@angular/common/http'
|
||||
import { inject, Injectable } from '@angular/core'
|
||||
import { Injectable, inject } from '@angular/core'
|
||||
import { Meta } from '@angular/platform-browser'
|
||||
import { CookieService } from 'ngx-cookie-service'
|
||||
import { Observable } from 'rxjs'
|
||||
|
||||
@Injectable()
|
||||
export class CsrfInterceptor implements HttpInterceptor {
|
||||
private cookieService: CookieService = inject(CookieService)
|
||||
private meta: Meta = inject(Meta)
|
||||
private cookieService = inject(CookieService)
|
||||
private meta = inject(Meta)
|
||||
|
||||
intercept(
|
||||
request: HttpRequest<unknown>,
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
import {
|
||||
HttpEventType,
|
||||
provideHttpClient,
|
||||
withInterceptorsFromDi,
|
||||
} from '@angular/common/http'
|
||||
import {
|
||||
HttpTestingController,
|
||||
provideHttpClientTesting,
|
||||
} from '@angular/common/http/testing'
|
||||
import { TestBed } from '@angular/core/testing'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { ChatService } from './chat.service'
|
||||
|
||||
describe('ChatService', () => {
|
||||
let service: ChatService
|
||||
let httpMock: HttpTestingController
|
||||
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
imports: [],
|
||||
providers: [
|
||||
ChatService,
|
||||
provideHttpClient(withInterceptorsFromDi()),
|
||||
provideHttpClientTesting(),
|
||||
],
|
||||
})
|
||||
service = TestBed.inject(ChatService)
|
||||
httpMock = TestBed.inject(HttpTestingController)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
httpMock.verify()
|
||||
})
|
||||
|
||||
it('should stream chat messages', (done) => {
|
||||
const documentId = 1
|
||||
const prompt = 'Hello, world!'
|
||||
const mockResponse = 'Partial response text'
|
||||
const apiUrl = `${environment.apiBaseUrl}documents/chat/`
|
||||
|
||||
service.streamChat(documentId, prompt).subscribe((chunk) => {
|
||||
expect(chunk).toBe(mockResponse)
|
||||
done()
|
||||
})
|
||||
|
||||
const req = httpMock.expectOne(apiUrl)
|
||||
expect(req.request.method).toBe('POST')
|
||||
expect(req.request.body).toEqual({
|
||||
document_id: documentId,
|
||||
q: prompt,
|
||||
})
|
||||
|
||||
req.event({
|
||||
type: HttpEventType.DownloadProgress,
|
||||
partialText: mockResponse,
|
||||
} as any)
|
||||
})
|
||||
})
|
||||
@@ -1,46 +0,0 @@
|
||||
import {
|
||||
HttpClient,
|
||||
HttpDownloadProgressEvent,
|
||||
HttpEventType,
|
||||
} from '@angular/common/http'
|
||||
import { inject, Injectable } from '@angular/core'
|
||||
import { filter, map, Observable } from 'rxjs'
|
||||
import { environment } from 'src/environments/environment'
|
||||
|
||||
export interface ChatMessage {
|
||||
role: 'user' | 'assistant'
|
||||
content: string
|
||||
isStreaming?: boolean
|
||||
}
|
||||
|
||||
@Injectable({
|
||||
providedIn: 'root',
|
||||
})
|
||||
export class ChatService {
|
||||
private http: HttpClient = inject(HttpClient)
|
||||
|
||||
streamChat(documentId: number, prompt: string): Observable<string> {
|
||||
return this.http
|
||||
.post(
|
||||
`${environment.apiBaseUrl}documents/chat/`,
|
||||
{
|
||||
document_id: documentId,
|
||||
q: prompt,
|
||||
},
|
||||
{
|
||||
observe: 'events',
|
||||
reportProgress: true,
|
||||
responseType: 'text',
|
||||
withCredentials: true,
|
||||
}
|
||||
)
|
||||
.pipe(
|
||||
map((event) => {
|
||||
if (event.type === HttpEventType.DownloadProgress) {
|
||||
return (event as HttpDownloadProgressEvent).partialText!
|
||||
}
|
||||
}),
|
||||
filter((chunk) => !!chunk)
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -6,7 +6,7 @@ export const environment = {
|
||||
apiVersion: '9', // match src/paperless/settings.py
|
||||
appTitle: 'Paperless-ngx',
|
||||
tag: 'prod',
|
||||
version: '2.20.5',
|
||||
version: '2.20.3',
|
||||
webSocketHost: window.location.host,
|
||||
webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:',
|
||||
webSocketBaseUrl: base_url.pathname + 'ws/',
|
||||
|
||||
@@ -10,7 +10,6 @@ import { DatePipe, registerLocaleData } from '@angular/common'
|
||||
import {
|
||||
HTTP_INTERCEPTORS,
|
||||
provideHttpClient,
|
||||
withFetch,
|
||||
withInterceptorsFromDi,
|
||||
} from '@angular/common/http'
|
||||
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
|
||||
@@ -50,7 +49,6 @@ import {
|
||||
caretDown,
|
||||
caretUp,
|
||||
chatLeftText,
|
||||
chatSquareDots,
|
||||
check,
|
||||
check2All,
|
||||
checkAll,
|
||||
@@ -126,7 +124,6 @@ import {
|
||||
sliders2Vertical,
|
||||
sortAlphaDown,
|
||||
sortAlphaUpAlt,
|
||||
stars,
|
||||
tag,
|
||||
tagFill,
|
||||
tags,
|
||||
@@ -269,7 +266,6 @@ const icons = {
|
||||
caretDown,
|
||||
caretUp,
|
||||
chatLeftText,
|
||||
chatSquareDots,
|
||||
check,
|
||||
check2All,
|
||||
checkAll,
|
||||
@@ -345,7 +341,6 @@ const icons = {
|
||||
sliders2Vertical,
|
||||
sortAlphaDown,
|
||||
sortAlphaUpAlt,
|
||||
stars,
|
||||
tagFill,
|
||||
tag,
|
||||
tags,
|
||||
@@ -412,6 +407,6 @@ bootstrapApplication(AppComponent, {
|
||||
CorrespondentNamePipe,
|
||||
DocumentTypeNamePipe,
|
||||
StoragePathNamePipe,
|
||||
provideHttpClient(withInterceptorsFromDi(), withFetch()),
|
||||
provideHttpClient(withInterceptorsFromDi()),
|
||||
],
|
||||
}).catch((err) => console.error(err))
|
||||
|
||||
@@ -11,7 +11,6 @@ class DocumentsConfig(AppConfig):
|
||||
from documents.signals import document_consumption_finished
|
||||
from documents.signals import document_updated
|
||||
from documents.signals.handlers import add_inbox_tags
|
||||
from documents.signals.handlers import add_or_update_document_in_llm_index
|
||||
from documents.signals.handlers import add_to_index
|
||||
from documents.signals.handlers import run_workflows_added
|
||||
from documents.signals.handlers import run_workflows_updated
|
||||
@@ -27,7 +26,6 @@ class DocumentsConfig(AppConfig):
|
||||
document_consumption_finished.connect(set_storage_path)
|
||||
document_consumption_finished.connect(add_to_index)
|
||||
document_consumption_finished.connect(run_workflows_added)
|
||||
document_consumption_finished.connect(add_or_update_document_in_llm_index)
|
||||
document_updated.connect(run_workflows_updated)
|
||||
|
||||
import documents.schema # noqa: F401
|
||||
|
||||
@@ -13,6 +13,7 @@ from pikepdf import Page
|
||||
from pikepdf import PasswordError
|
||||
from pikepdf import Pdf
|
||||
|
||||
from documents.consumer import ConsumerPreflightPlugin
|
||||
from documents.converters import convert_from_tiff_to_pdf
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
@@ -193,6 +194,15 @@ class BarcodePlugin(ConsumeTaskPlugin):
|
||||
):
|
||||
logger.info(f"Found ASN in barcode: {located_asn}")
|
||||
self.metadata.asn = located_asn
|
||||
# (Re-)run the preflight ASN check
|
||||
preflight_plugin = ConsumerPreflightPlugin(
|
||||
input_doc=self.input_doc,
|
||||
metadata=self.metadata,
|
||||
status_mgr=self.status_mgr,
|
||||
base_tmp_dir=self.base_tmp_dir,
|
||||
task_id=self.task_id,
|
||||
)
|
||||
preflight_plugin.pre_check_asn_value()
|
||||
|
||||
def cleanup(self) -> None:
|
||||
self.temp_dir.cleanup()
|
||||
|
||||
@@ -41,7 +41,6 @@ class SuggestionCacheData:
|
||||
CLASSIFIER_VERSION_KEY: Final[str] = "classifier_version"
|
||||
CLASSIFIER_HASH_KEY: Final[str] = "classifier_hash"
|
||||
CLASSIFIER_MODIFIED_KEY: Final[str] = "classifier_modified"
|
||||
LLM_CACHE_CLASSIFIER_VERSION: Final[int] = 1000 # Marker distinguishing LLM suggestions
|
||||
|
||||
CACHE_1_MINUTE: Final[int] = 60
|
||||
CACHE_5_MINUTES: Final[int] = 5 * CACHE_1_MINUTE
|
||||
@@ -197,54 +196,6 @@ def refresh_suggestions_cache(
|
||||
cache.touch(doc_key, timeout)
|
||||
|
||||
|
||||
def get_llm_suggestion_cache(
|
||||
document_id: int,
|
||||
backend: str,
|
||||
) -> SuggestionCacheData | None:
|
||||
doc_key = get_suggestion_cache_key(document_id)
|
||||
data: SuggestionCacheData = cache.get(doc_key)
|
||||
|
||||
if data and data.classifier_hash == backend:
|
||||
return data
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def set_llm_suggestions_cache(
|
||||
document_id: int,
|
||||
suggestions: dict,
|
||||
*,
|
||||
backend: str,
|
||||
timeout: int = CACHE_50_MINUTES,
|
||||
) -> None:
|
||||
"""
|
||||
Cache LLM-generated suggestions using a backend-specific identifier (e.g. 'openai:gpt-4').
|
||||
"""
|
||||
doc_key = get_suggestion_cache_key(document_id)
|
||||
cache.set(
|
||||
doc_key,
|
||||
SuggestionCacheData(
|
||||
classifier_version=LLM_CACHE_CLASSIFIER_VERSION,
|
||||
classifier_hash=backend,
|
||||
suggestions=suggestions,
|
||||
),
|
||||
timeout,
|
||||
)
|
||||
|
||||
|
||||
def invalidate_llm_suggestions_cache(
|
||||
document_id: int,
|
||||
) -> None:
|
||||
"""
|
||||
Invalidate the LLM suggestions cache for a specific document and backend.
|
||||
"""
|
||||
doc_key = get_suggestion_cache_key(document_id)
|
||||
data: SuggestionCacheData = cache.get(doc_key)
|
||||
|
||||
if data:
|
||||
cache.delete(doc_key)
|
||||
|
||||
|
||||
def get_metadata_cache_key(document_id: int) -> str:
|
||||
"""
|
||||
Returns the basic key for a document's metadata
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
from django.core.management import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
from documents.management.commands.mixins import ProgressBarMixin
|
||||
from documents.tasks import llmindex_index
|
||||
|
||||
|
||||
class Command(ProgressBarMixin, BaseCommand):
|
||||
help = "Manages the LLM-based vector index for Paperless."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("command", choices=["rebuild", "update"])
|
||||
self.add_argument_progress_bar_mixin(parser)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.handle_progress_bar_mixin(**options)
|
||||
with transaction.atomic():
|
||||
llmindex_index(
|
||||
progress_bar_disable=self.no_progress_bar,
|
||||
rebuild=options["command"] == "rebuild",
|
||||
scheduled=False,
|
||||
)
|
||||
File diff suppressed because it is too large
Load Diff
26
src/documents/migrations/0002_auto_20151226_1316.py
Normal file
26
src/documents/migrations/0002_auto_20151226_1316.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 1.9 on 2015-12-26 13:16
|
||||
|
||||
import django.utils.timezone
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="document",
|
||||
options={"ordering": ("sender", "title")},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="created",
|
||||
field=models.DateTimeField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
),
|
||||
),
|
||||
]
|
||||
70
src/documents/migrations/0003_sender.py
Normal file
70
src/documents/migrations/0003_sender.py
Normal file
@@ -0,0 +1,70 @@
|
||||
# Generated by Django 1.9 on 2016-01-11 12:21
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
from django.template.defaultfilters import slugify
|
||||
|
||||
DOCUMENT_SENDER_MAP = {}
|
||||
|
||||
|
||||
def move_sender_strings_to_sender_model(apps, schema_editor):
|
||||
sender_model = apps.get_model("documents", "Sender")
|
||||
document_model = apps.get_model("documents", "Document")
|
||||
|
||||
# Create the sender and log the relationship with the document
|
||||
for document in document_model.objects.all():
|
||||
if document.sender:
|
||||
(
|
||||
DOCUMENT_SENDER_MAP[document.pk],
|
||||
_,
|
||||
) = sender_model.objects.get_or_create(
|
||||
name=document.sender,
|
||||
defaults={"slug": slugify(document.sender)},
|
||||
)
|
||||
|
||||
|
||||
def realign_senders(apps, schema_editor):
|
||||
document_model = apps.get_model("documents", "Document")
|
||||
for pk, sender in DOCUMENT_SENDER_MAP.items():
|
||||
document_model.objects.filter(pk=pk).update(sender=sender)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0002_auto_20151226_1316"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Sender",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=128, unique=True)),
|
||||
("slug", models.SlugField()),
|
||||
],
|
||||
),
|
||||
migrations.RunPython(move_sender_strings_to_sender_model),
|
||||
migrations.RemoveField(
|
||||
model_name="document",
|
||||
name="sender",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="sender",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="documents.Sender",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(realign_senders),
|
||||
]
|
||||
25
src/documents/migrations/0004_auto_20160114_1844.py
Normal file
25
src/documents/migrations/0004_auto_20160114_1844.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 1.9 on 2016-01-14 18:44
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0003_sender"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="sender",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="documents",
|
||||
to="documents.Sender",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,178 @@
|
||||
# Generated by Django 4.2.13 on 2024-06-28 17:52
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
replaces = [
|
||||
("documents", "0004_auto_20160114_1844"),
|
||||
("documents", "0005_auto_20160123_0313"),
|
||||
("documents", "0006_auto_20160123_0430"),
|
||||
("documents", "0007_auto_20160126_2114"),
|
||||
("documents", "0008_document_file_type"),
|
||||
("documents", "0009_auto_20160214_0040"),
|
||||
("documents", "0010_log"),
|
||||
("documents", "0011_auto_20160303_1929"),
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
("documents", "0003_sender"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="sender",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="documents",
|
||||
to="documents.sender",
|
||||
),
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="sender",
|
||||
options={"ordering": ("name",)},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Tag",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=128, unique=True)),
|
||||
("slug", models.SlugField(blank=True)),
|
||||
(
|
||||
"colour",
|
||||
models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "#a6cee3"),
|
||||
(2, "#1f78b4"),
|
||||
(3, "#b2df8a"),
|
||||
(4, "#33a02c"),
|
||||
(5, "#fb9a99"),
|
||||
(6, "#e31a1c"),
|
||||
(7, "#fdbf6f"),
|
||||
(8, "#ff7f00"),
|
||||
(9, "#cab2d6"),
|
||||
(10, "#6a3d9a"),
|
||||
(11, "#b15928"),
|
||||
(12, "#000000"),
|
||||
(13, "#cccccc"),
|
||||
],
|
||||
default=1,
|
||||
),
|
||||
),
|
||||
("match", models.CharField(blank=True, max_length=256)),
|
||||
(
|
||||
"matching_algorithm",
|
||||
models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. If you don\'t know what a regex is, you probably don\'t want this option.',
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="sender",
|
||||
name="slug",
|
||||
field=models.SlugField(blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="file_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("pdf", "PDF"),
|
||||
("png", "PNG"),
|
||||
("jpg", "JPG"),
|
||||
("gif", "GIF"),
|
||||
("tiff", "TIFF"),
|
||||
],
|
||||
default="pdf",
|
||||
editable=False,
|
||||
max_length=4,
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="tags",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="documents",
|
||||
to="documents.tag",
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Log",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("group", models.UUIDField(blank=True)),
|
||||
("message", models.TextField()),
|
||||
(
|
||||
"level",
|
||||
models.PositiveIntegerField(
|
||||
choices=[
|
||||
(10, "Debugging"),
|
||||
(20, "Informational"),
|
||||
(30, "Warning"),
|
||||
(40, "Error"),
|
||||
(50, "Critical"),
|
||||
],
|
||||
default=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"component",
|
||||
models.PositiveIntegerField(
|
||||
choices=[(1, "Consumer"), (2, "Mail Fetcher")],
|
||||
),
|
||||
),
|
||||
("created", models.DateTimeField(auto_now_add=True)),
|
||||
("modified", models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
"ordering": ("-modified",),
|
||||
},
|
||||
),
|
||||
migrations.RenameModel(
|
||||
old_name="Sender",
|
||||
new_name="Correspondent",
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="document",
|
||||
options={"ordering": ("correspondent", "title")},
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name="document",
|
||||
old_name="sender",
|
||||
new_name="correspondent",
|
||||
),
|
||||
]
|
||||
16
src/documents/migrations/0005_auto_20160123_0313.py
Normal file
16
src/documents/migrations/0005_auto_20160123_0313.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# Generated by Django 1.9 on 2016-01-23 03:13
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0004_auto_20160114_1844"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="sender",
|
||||
options={"ordering": ("name",)},
|
||||
),
|
||||
]
|
||||
64
src/documents/migrations/0006_auto_20160123_0430.py
Normal file
64
src/documents/migrations/0006_auto_20160123_0430.py
Normal file
@@ -0,0 +1,64 @@
|
||||
# Generated by Django 1.9 on 2016-01-23 04:30
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0005_auto_20160123_0313"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Tag",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=128, unique=True)),
|
||||
("slug", models.SlugField(blank=True)),
|
||||
(
|
||||
"colour",
|
||||
models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "#a6cee3"),
|
||||
(2, "#1f78b4"),
|
||||
(3, "#b2df8a"),
|
||||
(4, "#33a02c"),
|
||||
(5, "#fb9a99"),
|
||||
(6, "#e31a1c"),
|
||||
(7, "#fdbf6f"),
|
||||
(8, "#ff7f00"),
|
||||
(9, "#cab2d6"),
|
||||
(10, "#6a3d9a"),
|
||||
(11, "#ffff99"),
|
||||
(12, "#b15928"),
|
||||
(13, "#000000"),
|
||||
(14, "#cccccc"),
|
||||
],
|
||||
default=1,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="sender",
|
||||
name="slug",
|
||||
field=models.SlugField(blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="tags",
|
||||
field=models.ManyToManyField(related_name="documents", to="documents.Tag"),
|
||||
),
|
||||
]
|
||||
55
src/documents/migrations/0007_auto_20160126_2114.py
Normal file
55
src/documents/migrations/0007_auto_20160126_2114.py
Normal file
@@ -0,0 +1,55 @@
|
||||
# Generated by Django 1.9 on 2016-01-26 21:14
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0006_auto_20160123_0430"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="tag",
|
||||
name="match",
|
||||
field=models.CharField(blank=True, max_length=256),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="tag",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
blank=True,
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
],
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. If you don\'t know what a regex is, you probably don\'t want this option.',
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="colour",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "#a6cee3"),
|
||||
(2, "#1f78b4"),
|
||||
(3, "#b2df8a"),
|
||||
(4, "#33a02c"),
|
||||
(5, "#fb9a99"),
|
||||
(6, "#e31a1c"),
|
||||
(7, "#fdbf6f"),
|
||||
(8, "#ff7f00"),
|
||||
(9, "#cab2d6"),
|
||||
(10, "#6a3d9a"),
|
||||
(11, "#b15928"),
|
||||
(12, "#000000"),
|
||||
(13, "#cccccc"),
|
||||
],
|
||||
default=1,
|
||||
),
|
||||
),
|
||||
]
|
||||
39
src/documents/migrations/0008_document_file_type.py
Normal file
39
src/documents/migrations/0008_document_file_type.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# Generated by Django 1.9 on 2016-01-29 22:58
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0007_auto_20160126_2114"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="file_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("pdf", "PDF"),
|
||||
("png", "PNG"),
|
||||
("jpg", "JPG"),
|
||||
("gif", "GIF"),
|
||||
("tiff", "TIFF"),
|
||||
],
|
||||
default="pdf",
|
||||
editable=False,
|
||||
max_length=4,
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="tags",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="documents",
|
||||
to="documents.Tag",
|
||||
),
|
||||
),
|
||||
]
|
||||
27
src/documents/migrations/0009_auto_20160214_0040.py
Normal file
27
src/documents/migrations/0009_auto_20160214_0040.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 1.9 on 2016-02-14 00:40
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0008_document_file_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. If you don\'t know what a regex is, you probably don\'t want this option.',
|
||||
),
|
||||
),
|
||||
]
|
||||
53
src/documents/migrations/0010_log.py
Normal file
53
src/documents/migrations/0010_log.py
Normal file
@@ -0,0 +1,53 @@
|
||||
# Generated by Django 1.9 on 2016-02-27 17:54
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0009_auto_20160214_0040"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Log",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("group", models.UUIDField(blank=True)),
|
||||
("message", models.TextField()),
|
||||
(
|
||||
"level",
|
||||
models.PositiveIntegerField(
|
||||
choices=[
|
||||
(10, "Debugging"),
|
||||
(20, "Informational"),
|
||||
(30, "Warning"),
|
||||
(40, "Error"),
|
||||
(50, "Critical"),
|
||||
],
|
||||
default=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"component",
|
||||
models.PositiveIntegerField(
|
||||
choices=[(1, "Consumer"), (2, "Mail Fetcher")],
|
||||
),
|
||||
),
|
||||
("created", models.DateTimeField(auto_now_add=True)),
|
||||
("modified", models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
"ordering": ("-modified",),
|
||||
},
|
||||
),
|
||||
]
|
||||
26
src/documents/migrations/0011_auto_20160303_1929.py
Normal file
26
src/documents/migrations/0011_auto_20160303_1929.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 1.9.2 on 2016-03-03 19:29
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
dependencies = [
|
||||
("documents", "0010_log"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameModel(
|
||||
old_name="Sender",
|
||||
new_name="Correspondent",
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="document",
|
||||
options={"ordering": ("correspondent", "title")},
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name="document",
|
||||
old_name="sender",
|
||||
new_name="correspondent",
|
||||
),
|
||||
]
|
||||
128
src/documents/migrations/0012_auto_20160305_0040.py
Normal file
128
src/documents/migrations/0012_auto_20160305_0040.py
Normal file
@@ -0,0 +1,128 @@
|
||||
# Generated by Django 1.9.2 on 2016-03-05 00:40
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import gnupg
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
from django.utils.termcolors import colorize as colourise # Spelling hurts me
|
||||
|
||||
|
||||
class GnuPG:
|
||||
"""
|
||||
A handy singleton to use when handling encrypted files.
|
||||
"""
|
||||
|
||||
gpg = gnupg.GPG(gnupghome=settings.GNUPG_HOME)
|
||||
|
||||
@classmethod
|
||||
def decrypted(cls, file_handle):
|
||||
return cls.gpg.decrypt_file(file_handle, passphrase=settings.PASSPHRASE).data
|
||||
|
||||
@classmethod
|
||||
def encrypted(cls, file_handle):
|
||||
return cls.gpg.encrypt_file(
|
||||
file_handle,
|
||||
recipients=None,
|
||||
passphrase=settings.PASSPHRASE,
|
||||
symmetric=True,
|
||||
).data
|
||||
|
||||
|
||||
def move_documents_and_create_thumbnails(apps, schema_editor):
|
||||
(Path(settings.MEDIA_ROOT) / "documents" / "originals").mkdir(
|
||||
parents=True,
|
||||
exist_ok=True,
|
||||
)
|
||||
(Path(settings.MEDIA_ROOT) / "documents" / "thumbnails").mkdir(
|
||||
parents=True,
|
||||
exist_ok=True,
|
||||
)
|
||||
|
||||
documents: list[str] = os.listdir(Path(settings.MEDIA_ROOT) / "documents") # noqa: PTH208
|
||||
|
||||
if set(documents) == {"originals", "thumbnails"}:
|
||||
return
|
||||
|
||||
print(
|
||||
colourise(
|
||||
"\n\n"
|
||||
" This is a one-time only migration to generate thumbnails for all of your\n"
|
||||
" documents so that future UIs will have something to work with. If you have\n"
|
||||
" a lot of documents though, this may take a while, so a coffee break may be\n"
|
||||
" in order."
|
||||
"\n",
|
||||
opts=("bold",),
|
||||
),
|
||||
)
|
||||
|
||||
Path(settings.SCRATCH_DIR).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for f in sorted(documents):
|
||||
if not f.endswith("gpg"):
|
||||
continue
|
||||
|
||||
print(
|
||||
" {} {} {}".format(
|
||||
colourise("*", fg="green"),
|
||||
colourise("Generating a thumbnail for", fg="white"),
|
||||
colourise(f, fg="cyan"),
|
||||
),
|
||||
)
|
||||
|
||||
thumb_temp: str = tempfile.mkdtemp(prefix="paperless", dir=settings.SCRATCH_DIR)
|
||||
orig_temp: str = tempfile.mkdtemp(prefix="paperless", dir=settings.SCRATCH_DIR)
|
||||
|
||||
orig_source: Path = Path(settings.MEDIA_ROOT) / "documents" / f
|
||||
orig_target: Path = Path(orig_temp) / f.replace(".gpg", "")
|
||||
|
||||
with orig_source.open("rb") as encrypted, orig_target.open("wb") as unencrypted:
|
||||
unencrypted.write(GnuPG.decrypted(encrypted))
|
||||
|
||||
subprocess.Popen(
|
||||
(
|
||||
settings.CONVERT_BINARY,
|
||||
"-scale",
|
||||
"500x5000",
|
||||
"-alpha",
|
||||
"remove",
|
||||
orig_target,
|
||||
Path(thumb_temp) / "convert-%04d.png",
|
||||
),
|
||||
).wait()
|
||||
|
||||
thumb_source: Path = Path(thumb_temp) / "convert-0000.png"
|
||||
thumb_target: Path = (
|
||||
Path(settings.MEDIA_ROOT)
|
||||
/ "documents"
|
||||
/ "thumbnails"
|
||||
/ re.sub(r"(\d+)\.\w+(\.gpg)", "\\1.png\\2", f)
|
||||
)
|
||||
with (
|
||||
thumb_source.open("rb") as unencrypted,
|
||||
thumb_target.open("wb") as encrypted,
|
||||
):
|
||||
encrypted.write(GnuPG.encrypted(unencrypted))
|
||||
|
||||
shutil.rmtree(thumb_temp)
|
||||
shutil.rmtree(orig_temp)
|
||||
|
||||
shutil.move(
|
||||
Path(settings.MEDIA_ROOT) / "documents" / f,
|
||||
Path(settings.MEDIA_ROOT) / "documents" / "originals" / f,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0011_auto_20160303_1929"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(move_documents_and_create_thumbnails),
|
||||
]
|
||||
42
src/documents/migrations/0013_auto_20160325_2111.py
Normal file
42
src/documents/migrations/0013_auto_20160325_2111.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# Generated by Django 1.9.4 on 2016-03-25 21:11
|
||||
|
||||
import django.utils.timezone
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0012_auto_20160305_0040"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="correspondent",
|
||||
name="match",
|
||||
field=models.CharField(blank=True, max_length=256),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="correspondent",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. If you don\'t know what a regex is, you probably don\'t want this option.',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="created",
|
||||
field=models.DateTimeField(default=django.utils.timezone.now),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="log",
|
||||
name="component",
|
||||
),
|
||||
]
|
||||
182
src/documents/migrations/0014_document_checksum.py
Normal file
182
src/documents/migrations/0014_document_checksum.py
Normal file
@@ -0,0 +1,182 @@
|
||||
# Generated by Django 1.9.4 on 2016-03-28 19:09
|
||||
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
|
||||
import django.utils.timezone
|
||||
import gnupg
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
from django.template.defaultfilters import slugify
|
||||
from django.utils.termcolors import colorize as colourise # Spelling hurts me
|
||||
|
||||
|
||||
class GnuPG:
|
||||
"""
|
||||
A handy singleton to use when handling encrypted files.
|
||||
"""
|
||||
|
||||
gpg = gnupg.GPG(gnupghome=settings.GNUPG_HOME)
|
||||
|
||||
@classmethod
|
||||
def decrypted(cls, file_handle):
|
||||
return cls.gpg.decrypt_file(file_handle, passphrase=settings.PASSPHRASE).data
|
||||
|
||||
@classmethod
|
||||
def encrypted(cls, file_handle):
|
||||
return cls.gpg.encrypt_file(
|
||||
file_handle,
|
||||
recipients=None,
|
||||
passphrase=settings.PASSPHRASE,
|
||||
symmetric=True,
|
||||
).data
|
||||
|
||||
|
||||
class Document:
|
||||
"""
|
||||
Django's migrations restrict access to model methods, so this is a snapshot
|
||||
of the methods that existed at the time this migration was written, since
|
||||
we need to make use of a lot of these shortcuts here.
|
||||
"""
|
||||
|
||||
def __init__(self, doc):
|
||||
self.pk = doc.pk
|
||||
self.correspondent = doc.correspondent
|
||||
self.title = doc.title
|
||||
self.file_type = doc.file_type
|
||||
self.tags = doc.tags
|
||||
self.created = doc.created
|
||||
|
||||
def __str__(self):
|
||||
created = self.created.strftime("%Y%m%d%H%M%S")
|
||||
if self.correspondent and self.title:
|
||||
return f"{created}: {self.correspondent} - {self.title}"
|
||||
if self.correspondent or self.title:
|
||||
return f"{created}: {self.correspondent or self.title}"
|
||||
return str(created)
|
||||
|
||||
@property
|
||||
def source_path(self):
|
||||
return (
|
||||
Path(settings.MEDIA_ROOT)
|
||||
/ "documents"
|
||||
/ "originals"
|
||||
/ f"{self.pk:07}.{self.file_type}.gpg"
|
||||
)
|
||||
|
||||
@property
|
||||
def source_file(self):
|
||||
return self.source_path.open("rb")
|
||||
|
||||
@property
|
||||
def file_name(self):
|
||||
return slugify(str(self)) + "." + self.file_type
|
||||
|
||||
|
||||
def set_checksums(apps, schema_editor):
|
||||
document_model = apps.get_model("documents", "Document")
|
||||
|
||||
if not document_model.objects.all().exists():
|
||||
return
|
||||
|
||||
print(
|
||||
colourise(
|
||||
"\n\n"
|
||||
" This is a one-time only migration to generate checksums for all\n"
|
||||
" of your existing documents. If you have a lot of documents\n"
|
||||
" though, this may take a while, so a coffee break may be in\n"
|
||||
" order."
|
||||
"\n",
|
||||
opts=("bold",),
|
||||
),
|
||||
)
|
||||
|
||||
sums = {}
|
||||
for d in document_model.objects.all():
|
||||
document = Document(d)
|
||||
|
||||
print(
|
||||
" {} {} {}".format(
|
||||
colourise("*", fg="green"),
|
||||
colourise("Generating a checksum for", fg="white"),
|
||||
colourise(document.file_name, fg="cyan"),
|
||||
),
|
||||
)
|
||||
|
||||
with document.source_file as encrypted:
|
||||
checksum = hashlib.md5(GnuPG.decrypted(encrypted)).hexdigest()
|
||||
|
||||
if checksum in sums:
|
||||
error = "\n{line}{p1}\n\n{doc1}\n{doc2}\n\n{p2}\n\n{code}\n\n{p3}{line}".format(
|
||||
p1=colourise(
|
||||
"It appears that you have two identical documents in your collection and \nPaperless no longer supports this (see issue #97). The documents in question\nare:",
|
||||
fg="yellow",
|
||||
),
|
||||
p2=colourise(
|
||||
"To fix this problem, you'll have to remove one of them from the database, a task\nmost easily done by running the following command in the same\ndirectory as manage.py:",
|
||||
fg="yellow",
|
||||
),
|
||||
p3=colourise(
|
||||
"When that's finished, re-run the migrate, and provided that there aren't any\nother duplicates, you should be good to go.",
|
||||
fg="yellow",
|
||||
),
|
||||
doc1=colourise(
|
||||
f" * {sums[checksum][1]} (id: {sums[checksum][0]})",
|
||||
fg="red",
|
||||
),
|
||||
doc2=colourise(
|
||||
f" * {document.file_name} (id: {document.pk})",
|
||||
fg="red",
|
||||
),
|
||||
code=colourise(
|
||||
f" $ echo 'DELETE FROM documents_document WHERE id = {document.pk};' | ./manage.py dbshell",
|
||||
fg="green",
|
||||
),
|
||||
line=colourise("\n{}\n".format("=" * 80), fg="white", opts=("bold",)),
|
||||
)
|
||||
raise RuntimeError(error)
|
||||
sums[checksum] = (document.pk, document.file_name)
|
||||
|
||||
document_model.objects.filter(pk=document.pk).update(checksum=checksum)
|
||||
|
||||
|
||||
def do_nothing(apps, schema_editor):
|
||||
pass
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0013_auto_20160325_2111"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="checksum",
|
||||
field=models.CharField(
|
||||
default="-",
|
||||
db_index=True,
|
||||
editable=False,
|
||||
max_length=32,
|
||||
help_text="The checksum of the original document (before it "
|
||||
"was encrypted). We use this to prevent duplicate "
|
||||
"document imports.",
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.RunPython(set_checksums, do_nothing),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="created",
|
||||
field=models.DateTimeField(
|
||||
db_index=True,
|
||||
default=django.utils.timezone.now,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="modified",
|
||||
field=models.DateTimeField(auto_now=True, db_index=True),
|
||||
),
|
||||
]
|
||||
33
src/documents/migrations/0015_add_insensitive_to_match.py
Normal file
33
src/documents/migrations/0015_add_insensitive_to_match.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 1.10.2 on 2016-10-05 21:38
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0014_document_checksum"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="checksum",
|
||||
field=models.CharField(
|
||||
editable=False,
|
||||
help_text="The checksum of the original document (before it was encrypted). We use this to prevent duplicate document imports.",
|
||||
max_length=32,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="correspondent",
|
||||
name="is_insensitive",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="tag",
|
||||
name="is_insensitive",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,92 @@
|
||||
# Generated by Django 4.2.13 on 2024-06-28 17:57
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
replaces = [
|
||||
("documents", "0015_add_insensitive_to_match"),
|
||||
("documents", "0016_auto_20170325_1558"),
|
||||
("documents", "0017_auto_20170512_0507"),
|
||||
("documents", "0018_auto_20170715_1712"),
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
("documents", "0014_document_checksum"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="checksum",
|
||||
field=models.CharField(
|
||||
editable=False,
|
||||
help_text="The checksum of the original document (before it was encrypted). We use this to prevent duplicate document imports.",
|
||||
max_length=32,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="correspondent",
|
||||
name="is_insensitive",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="tag",
|
||||
name="is_insensitive",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="content",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
db_index=("mysql" not in settings.DATABASES["default"]["ENGINE"]),
|
||||
help_text="The raw, text-only data of the document. This field is primarily used for searching.",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="correspondent",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
(5, "Fuzzy Match"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containing imperfections that foil accurate OCR.',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
(5, "Fuzzy Match"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containing imperfections that foil accurate OCR.',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="correspondent",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="documents",
|
||||
to="documents.correspondent",
|
||||
),
|
||||
),
|
||||
]
|
||||
23
src/documents/migrations/0016_auto_20170325_1558.py
Normal file
23
src/documents/migrations/0016_auto_20170325_1558.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 1.10.5 on 2017-03-25 15:58
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0015_add_insensitive_to_match"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="content",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
db_index=("mysql" not in settings.DATABASES["default"]["ENGINE"]),
|
||||
help_text="The raw, text-only data of the document. This field is primarily used for searching.",
|
||||
),
|
||||
),
|
||||
]
|
||||
43
src/documents/migrations/0017_auto_20170512_0507.py
Normal file
43
src/documents/migrations/0017_auto_20170512_0507.py
Normal file
@@ -0,0 +1,43 @@
|
||||
# Generated by Django 1.10.5 on 2017-05-12 05:07
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0016_auto_20170325_1558"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="correspondent",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
(5, "Fuzzy Match"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containing imperfections that foil accurate OCR.',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
(5, "Fuzzy Match"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containing imperfections that foil accurate OCR.',
|
||||
),
|
||||
),
|
||||
]
|
||||
25
src/documents/migrations/0018_auto_20170715_1712.py
Normal file
25
src/documents/migrations/0018_auto_20170715_1712.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 1.10.5 on 2017-07-15 17:12
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0017_auto_20170512_0507"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="correspondent",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="documents",
|
||||
to="documents.Correspondent",
|
||||
),
|
||||
),
|
||||
]
|
||||
22
src/documents/migrations/0019_add_consumer_user.py
Normal file
22
src/documents/migrations/0019_add_consumer_user.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Generated by Django 1.10.5 on 2017-07-15 17:12
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def forwards_func(apps, schema_editor):
|
||||
User.objects.create(username="consumer")
|
||||
|
||||
|
||||
def reverse_func(apps, schema_editor):
|
||||
User.objects.get(username="consumer").delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0018_auto_20170715_1712"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(forwards_func, reverse_func),
|
||||
]
|
||||
29
src/documents/migrations/0020_document_added.py
Normal file
29
src/documents/migrations/0020_document_added.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import django.utils.timezone
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
def set_added_time_to_created_time(apps, schema_editor):
|
||||
Document = apps.get_model("documents", "Document")
|
||||
for doc in Document.objects.all():
|
||||
doc.added = doc.created
|
||||
doc.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0019_add_consumer_user"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="added",
|
||||
field=models.DateTimeField(
|
||||
db_index=True,
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
),
|
||||
),
|
||||
migrations.RunPython(set_added_time_to_created_time),
|
||||
]
|
||||
41
src/documents/migrations/0021_document_storage_type.py
Normal file
41
src/documents/migrations/0021_document_storage_type.py
Normal file
@@ -0,0 +1,41 @@
|
||||
# Generated by Django 1.11.10 on 2018-02-04 13:07
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0020_document_added"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Add the field with the default GPG-encrypted value
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="storage_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("unencrypted", "Unencrypted"),
|
||||
("gpg", "Encrypted with GNU Privacy Guard"),
|
||||
],
|
||||
default="gpg",
|
||||
editable=False,
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
# Now that the field is added, change the default to unencrypted
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="storage_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("unencrypted", "Unencrypted"),
|
||||
("gpg", "Encrypted with GNU Privacy Guard"),
|
||||
],
|
||||
default="unencrypted",
|
||||
editable=False,
|
||||
max_length=11,
|
||||
),
|
||||
),
|
||||
]
|
||||
61
src/documents/migrations/0022_auto_20181007_1420.py
Normal file
61
src/documents/migrations/0022_auto_20181007_1420.py
Normal file
@@ -0,0 +1,61 @@
|
||||
# Generated by Django 2.0.8 on 2018-10-07 14:20
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
from django.utils.text import slugify
|
||||
|
||||
|
||||
def re_slug_all_the_things(apps, schema_editor):
|
||||
"""
|
||||
Rewrite all slug values to make sure they're actually slugs before we brand
|
||||
them as uneditable.
|
||||
"""
|
||||
|
||||
Tag = apps.get_model("documents", "Tag")
|
||||
Correspondent = apps.get_model("documents", "Correspondent")
|
||||
|
||||
for klass in (Tag, Correspondent):
|
||||
for instance in klass.objects.all():
|
||||
klass.objects.filter(pk=instance.pk).update(slug=slugify(instance.slug))
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0021_document_storage_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="tag",
|
||||
options={"ordering": ("name",)},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="correspondent",
|
||||
name="slug",
|
||||
field=models.SlugField(blank=True, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="file_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("pdf", "PDF"),
|
||||
("png", "PNG"),
|
||||
("jpg", "JPG"),
|
||||
("gif", "GIF"),
|
||||
("tiff", "TIFF"),
|
||||
("txt", "TXT"),
|
||||
("csv", "CSV"),
|
||||
("md", "MD"),
|
||||
],
|
||||
editable=False,
|
||||
max_length=4,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="slug",
|
||||
field=models.SlugField(blank=True, editable=False),
|
||||
),
|
||||
migrations.RunPython(re_slug_all_the_things, migrations.RunPython.noop),
|
||||
]
|
||||
39
src/documents/migrations/0023_document_current_filename.py
Normal file
39
src/documents/migrations/0023_document_current_filename.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# Generated by Django 2.0.10 on 2019-04-26 18:57
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
def set_filename(apps, schema_editor):
|
||||
Document = apps.get_model("documents", "Document")
|
||||
for doc in Document.objects.all():
|
||||
file_name = f"{doc.pk:07}.{doc.file_type}"
|
||||
if doc.storage_type == "gpg":
|
||||
file_name += ".gpg"
|
||||
|
||||
# Set filename
|
||||
doc.filename = file_name
|
||||
|
||||
# Save document
|
||||
doc.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0022_auto_20181007_1420"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="filename",
|
||||
field=models.FilePathField(
|
||||
default=None,
|
||||
null=True,
|
||||
editable=False,
|
||||
help_text="Current filename in storage",
|
||||
max_length=256,
|
||||
),
|
||||
),
|
||||
migrations.RunPython(set_filename),
|
||||
]
|
||||
147
src/documents/migrations/1000_update_paperless_all.py
Normal file
147
src/documents/migrations/1000_update_paperless_all.py
Normal file
@@ -0,0 +1,147 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-07 12:35
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
def logs_set_default_group(apps, schema_editor):
|
||||
Log = apps.get_model("documents", "Log")
|
||||
for log in Log.objects.all():
|
||||
if log.group is None:
|
||||
log.group = uuid.uuid4()
|
||||
log.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0023_document_current_filename"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="archive_serial_number",
|
||||
field=models.IntegerField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="The position of this document in your physical document archive.",
|
||||
null=True,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="tag",
|
||||
name="is_inbox_tag",
|
||||
field=models.BooleanField(
|
||||
default=False,
|
||||
help_text="Marks this tag as an inbox tag: All newly consumed documents will be tagged with inbox tags.",
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="DocumentType",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=128, unique=True)),
|
||||
("slug", models.SlugField(blank=True, editable=False)),
|
||||
("match", models.CharField(blank=True, max_length=256)),
|
||||
(
|
||||
"matching_algorithm",
|
||||
models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
(5, "Fuzzy Match"),
|
||||
(6, "Automatic Classification"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containing imperfections that foil accurate OCR.',
|
||||
),
|
||||
),
|
||||
("is_insensitive", models.BooleanField(default=True)),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
"ordering": ("name",),
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="document_type",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="documents",
|
||||
to="documents.documenttype",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="correspondent",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
(5, "Fuzzy Match"),
|
||||
(6, "Automatic Classification"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containing imperfections that foil accurate OCR.',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any"),
|
||||
(2, "All"),
|
||||
(3, "Literal"),
|
||||
(4, "Regular Expression"),
|
||||
(5, "Fuzzy Match"),
|
||||
(6, "Automatic Classification"),
|
||||
],
|
||||
default=1,
|
||||
help_text='Which algorithm you want to use when matching text to the OCR\'d PDF. Here, "any" looks for any occurrence of any word provided in the PDF, while "all" requires that every word provided appear in the PDF, albeit not in the order provided. A "literal" match means that the text you enter must appear in the PDF exactly as you\'ve entered it, and "regular expression" uses a regex to match the PDF. (If you don\'t know what a regex is, you probably don\'t want this option.) Finally, a "fuzzy match" looks for words or phrases that are mostly—but not exactly—the same, which can be useful for matching against documents containing imperfections that foil accurate OCR.',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="content",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
help_text="The raw, text-only data of the document. This field is primarily used for searching.",
|
||||
),
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="log",
|
||||
options={"ordering": ("-created",)},
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="log",
|
||||
name="modified",
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="log",
|
||||
name="group",
|
||||
field=models.UUIDField(blank=True, null=True),
|
||||
),
|
||||
migrations.RunPython(
|
||||
code=django.db.migrations.operations.special.RunPython.noop,
|
||||
reverse_code=logs_set_default_group,
|
||||
),
|
||||
]
|
||||
13
src/documents/migrations/1001_auto_20201109_1636.py
Normal file
13
src/documents/migrations/1001_auto_20201109_1636.py
Normal file
@@ -0,0 +1,13 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-09 16:36
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1000_update_paperless_all"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop),
|
||||
]
|
||||
24
src/documents/migrations/1002_auto_20201111_1105.py
Normal file
24
src/documents/migrations/1002_auto_20201111_1105.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-11 11:05
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1001_auto_20201109_1636"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="filename",
|
||||
field=models.FilePathField(
|
||||
default=None,
|
||||
editable=False,
|
||||
help_text="Current filename in storage",
|
||||
max_length=1024,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
||||
92
src/documents/migrations/1003_mime_types.py
Normal file
92
src/documents/migrations/1003_mime_types.py
Normal file
@@ -0,0 +1,92 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-20 11:21
|
||||
from pathlib import Path
|
||||
|
||||
import magic
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
from paperless.db import GnuPG
|
||||
|
||||
STORAGE_TYPE_UNENCRYPTED = "unencrypted"
|
||||
STORAGE_TYPE_GPG = "gpg"
|
||||
|
||||
|
||||
def source_path(self) -> Path:
|
||||
if self.filename:
|
||||
fname: str = str(self.filename)
|
||||
else:
|
||||
fname = f"{self.pk:07}.{self.file_type}"
|
||||
if self.storage_type == STORAGE_TYPE_GPG:
|
||||
fname += ".gpg"
|
||||
|
||||
return Path(settings.ORIGINALS_DIR) / fname
|
||||
|
||||
|
||||
def add_mime_types(apps, schema_editor):
|
||||
Document = apps.get_model("documents", "Document")
|
||||
documents = Document.objects.all()
|
||||
|
||||
for d in documents:
|
||||
with Path(source_path(d)).open("rb") as f:
|
||||
if d.storage_type == STORAGE_TYPE_GPG:
|
||||
data = GnuPG.decrypted(f)
|
||||
else:
|
||||
data = f.read(1024)
|
||||
|
||||
d.mime_type = magic.from_buffer(data, mime=True)
|
||||
d.save()
|
||||
|
||||
|
||||
def add_file_extensions(apps, schema_editor):
|
||||
Document = apps.get_model("documents", "Document")
|
||||
documents = Document.objects.all()
|
||||
|
||||
for d in documents:
|
||||
d.file_type = Path(d.filename).suffix.lstrip(".")
|
||||
d.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1002_auto_20201111_1105"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="mime_type",
|
||||
field=models.CharField(default="-", editable=False, max_length=256),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.RunPython(add_mime_types, migrations.RunPython.noop),
|
||||
# This operation is here so that we can revert the entire migration:
|
||||
# By allowing this field to be blank and null, we can revert the
|
||||
# remove operation further down and the database won't complain about
|
||||
# NOT NULL violations.
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="file_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("pdf", "PDF"),
|
||||
("png", "PNG"),
|
||||
("jpg", "JPG"),
|
||||
("gif", "GIF"),
|
||||
("tiff", "TIFF"),
|
||||
("txt", "TXT"),
|
||||
("csv", "CSV"),
|
||||
("md", "MD"),
|
||||
],
|
||||
editable=False,
|
||||
max_length=4,
|
||||
null=True,
|
||||
blank=True,
|
||||
),
|
||||
),
|
||||
migrations.RunPython(migrations.RunPython.noop, add_file_extensions),
|
||||
migrations.RemoveField(
|
||||
model_name="document",
|
||||
name="file_type",
|
||||
),
|
||||
]
|
||||
12
src/documents/migrations/1004_sanity_check_schedule.py
Normal file
12
src/documents/migrations/1004_sanity_check_schedule.py
Normal file
@@ -0,0 +1,12 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-25 14:53
|
||||
|
||||
from django.db import migrations
|
||||
from django.db.migrations import RunPython
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1003_mime_types"),
|
||||
]
|
||||
|
||||
operations = [RunPython(migrations.RunPython.noop, migrations.RunPython.noop)]
|
||||
34
src/documents/migrations/1005_checksums.py
Normal file
34
src/documents/migrations/1005_checksums.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# Generated by Django 3.1.3 on 2020-11-29 00:48
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1004_sanity_check_schedule"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="archive_checksum",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
editable=False,
|
||||
help_text="The checksum of the archived document.",
|
||||
max_length=32,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="checksum",
|
||||
field=models.CharField(
|
||||
editable=False,
|
||||
help_text="The checksum of the original document.",
|
||||
max_length=32,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
]
|
||||
24
src/documents/migrations/1006_auto_20201208_2209.py
Normal file
24
src/documents/migrations/1006_auto_20201208_2209.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.1.4 on 2020-12-08 22:09
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1005_checksums"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="correspondent",
|
||||
name="slug",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="documenttype",
|
||||
name="slug",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="tag",
|
||||
name="slug",
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,485 @@
|
||||
# Generated by Django 4.2.13 on 2024-06-28 18:01
|
||||
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
replaces = [
|
||||
("documents", "1006_auto_20201208_2209"),
|
||||
("documents", "1007_savedview_savedviewfilterrule"),
|
||||
("documents", "1008_auto_20201216_1736"),
|
||||
("documents", "1009_auto_20201216_2005"),
|
||||
("documents", "1010_auto_20210101_2159"),
|
||||
("documents", "1011_auto_20210101_2340"),
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
("documents", "1005_checksums"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="correspondent",
|
||||
name="slug",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="documenttype",
|
||||
name="slug",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="tag",
|
||||
name="slug",
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SavedView",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=128, verbose_name="name")),
|
||||
(
|
||||
"show_on_dashboard",
|
||||
models.BooleanField(verbose_name="show on dashboard"),
|
||||
),
|
||||
(
|
||||
"show_in_sidebar",
|
||||
models.BooleanField(verbose_name="show in sidebar"),
|
||||
),
|
||||
(
|
||||
"sort_field",
|
||||
models.CharField(max_length=128, verbose_name="sort field"),
|
||||
),
|
||||
(
|
||||
"sort_reverse",
|
||||
models.BooleanField(default=False, verbose_name="sort reverse"),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="user",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"ordering": ("name",),
|
||||
"verbose_name": "saved view",
|
||||
"verbose_name_plural": "saved views",
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="correspondent",
|
||||
options={
|
||||
"ordering": ("name",),
|
||||
"verbose_name": "correspondent",
|
||||
"verbose_name_plural": "correspondents",
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="document",
|
||||
options={
|
||||
"ordering": ("-created",),
|
||||
"verbose_name": "document",
|
||||
"verbose_name_plural": "documents",
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="documenttype",
|
||||
options={
|
||||
"verbose_name": "document type",
|
||||
"verbose_name_plural": "document types",
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="log",
|
||||
options={
|
||||
"ordering": ("-created",),
|
||||
"verbose_name": "log",
|
||||
"verbose_name_plural": "logs",
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="tag",
|
||||
options={"verbose_name": "tag", "verbose_name_plural": "tags"},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="correspondent",
|
||||
name="is_insensitive",
|
||||
field=models.BooleanField(default=True, verbose_name="is insensitive"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="correspondent",
|
||||
name="match",
|
||||
field=models.CharField(blank=True, max_length=256, verbose_name="match"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="correspondent",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any word"),
|
||||
(2, "All words"),
|
||||
(3, "Exact match"),
|
||||
(4, "Regular expression"),
|
||||
(5, "Fuzzy word"),
|
||||
(6, "Automatic"),
|
||||
],
|
||||
default=1,
|
||||
verbose_name="matching algorithm",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="correspondent",
|
||||
name="name",
|
||||
field=models.CharField(max_length=128, unique=True, verbose_name="name"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="added",
|
||||
field=models.DateTimeField(
|
||||
db_index=True,
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="added",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="archive_checksum",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
editable=False,
|
||||
help_text="The checksum of the archived document.",
|
||||
max_length=32,
|
||||
null=True,
|
||||
verbose_name="archive checksum",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="archive_serial_number",
|
||||
field=models.IntegerField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="The position of this document in your physical document archive.",
|
||||
null=True,
|
||||
unique=True,
|
||||
verbose_name="archive serial number",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="checksum",
|
||||
field=models.CharField(
|
||||
editable=False,
|
||||
help_text="The checksum of the original document.",
|
||||
max_length=32,
|
||||
unique=True,
|
||||
verbose_name="checksum",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="content",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
help_text="The raw, text-only data of the document. This field is primarily used for searching.",
|
||||
verbose_name="content",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="correspondent",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="documents",
|
||||
to="documents.correspondent",
|
||||
verbose_name="correspondent",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="created",
|
||||
field=models.DateTimeField(
|
||||
db_index=True,
|
||||
default=django.utils.timezone.now,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="document_type",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="documents",
|
||||
to="documents.documenttype",
|
||||
verbose_name="document type",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="filename",
|
||||
field=models.FilePathField(
|
||||
default=None,
|
||||
editable=False,
|
||||
help_text="Current filename in storage",
|
||||
max_length=1024,
|
||||
null=True,
|
||||
verbose_name="filename",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="mime_type",
|
||||
field=models.CharField(
|
||||
editable=False,
|
||||
max_length=256,
|
||||
verbose_name="mime type",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="modified",
|
||||
field=models.DateTimeField(
|
||||
auto_now=True,
|
||||
db_index=True,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="storage_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("unencrypted", "Unencrypted"),
|
||||
("gpg", "Encrypted with GNU Privacy Guard"),
|
||||
],
|
||||
default="unencrypted",
|
||||
editable=False,
|
||||
max_length=11,
|
||||
verbose_name="storage type",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="tags",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="documents",
|
||||
to="documents.tag",
|
||||
verbose_name="tags",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="title",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
max_length=128,
|
||||
verbose_name="title",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="documenttype",
|
||||
name="is_insensitive",
|
||||
field=models.BooleanField(default=True, verbose_name="is insensitive"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="documenttype",
|
||||
name="match",
|
||||
field=models.CharField(blank=True, max_length=256, verbose_name="match"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="documenttype",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any word"),
|
||||
(2, "All words"),
|
||||
(3, "Exact match"),
|
||||
(4, "Regular expression"),
|
||||
(5, "Fuzzy word"),
|
||||
(6, "Automatic"),
|
||||
],
|
||||
default=1,
|
||||
verbose_name="matching algorithm",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="documenttype",
|
||||
name="name",
|
||||
field=models.CharField(max_length=128, unique=True, verbose_name="name"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="log",
|
||||
name="created",
|
||||
field=models.DateTimeField(auto_now_add=True, verbose_name="created"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="log",
|
||||
name="group",
|
||||
field=models.UUIDField(blank=True, null=True, verbose_name="group"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="log",
|
||||
name="level",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(10, "debug"),
|
||||
(20, "information"),
|
||||
(30, "warning"),
|
||||
(40, "error"),
|
||||
(50, "critical"),
|
||||
],
|
||||
default=20,
|
||||
verbose_name="level",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="log",
|
||||
name="message",
|
||||
field=models.TextField(verbose_name="message"),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SavedViewFilterRule",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
(
|
||||
"rule_type",
|
||||
models.PositiveIntegerField(
|
||||
choices=[
|
||||
(0, "title contains"),
|
||||
(1, "content contains"),
|
||||
(2, "ASN is"),
|
||||
(3, "correspondent is"),
|
||||
(4, "document type is"),
|
||||
(5, "is in inbox"),
|
||||
(6, "has tag"),
|
||||
(7, "has any tag"),
|
||||
(8, "created before"),
|
||||
(9, "created after"),
|
||||
(10, "created year is"),
|
||||
(11, "created month is"),
|
||||
(12, "created day is"),
|
||||
(13, "added before"),
|
||||
(14, "added after"),
|
||||
(15, "modified before"),
|
||||
(16, "modified after"),
|
||||
(17, "does not have tag"),
|
||||
],
|
||||
verbose_name="rule type",
|
||||
),
|
||||
),
|
||||
(
|
||||
"value",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
max_length=128,
|
||||
null=True,
|
||||
verbose_name="value",
|
||||
),
|
||||
),
|
||||
(
|
||||
"saved_view",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="filter_rules",
|
||||
to="documents.savedview",
|
||||
verbose_name="saved view",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "filter rule",
|
||||
"verbose_name_plural": "filter rules",
|
||||
},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="colour",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "#a6cee3"),
|
||||
(2, "#1f78b4"),
|
||||
(3, "#b2df8a"),
|
||||
(4, "#33a02c"),
|
||||
(5, "#fb9a99"),
|
||||
(6, "#e31a1c"),
|
||||
(7, "#fdbf6f"),
|
||||
(8, "#ff7f00"),
|
||||
(9, "#cab2d6"),
|
||||
(10, "#6a3d9a"),
|
||||
(11, "#b15928"),
|
||||
(12, "#000000"),
|
||||
(13, "#cccccc"),
|
||||
],
|
||||
default=1,
|
||||
verbose_name="color",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="is_inbox_tag",
|
||||
field=models.BooleanField(
|
||||
default=False,
|
||||
help_text="Marks this tag as an inbox tag: All newly consumed documents will be tagged with inbox tags.",
|
||||
verbose_name="is inbox tag",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="is_insensitive",
|
||||
field=models.BooleanField(default=True, verbose_name="is insensitive"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="match",
|
||||
field=models.CharField(blank=True, max_length=256, verbose_name="match"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any word"),
|
||||
(2, "All words"),
|
||||
(3, "Exact match"),
|
||||
(4, "Regular expression"),
|
||||
(5, "Fuzzy word"),
|
||||
(6, "Automatic"),
|
||||
],
|
||||
default=1,
|
||||
verbose_name="matching algorithm",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="name",
|
||||
field=models.CharField(max_length=128, unique=True, verbose_name="name"),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,90 @@
|
||||
# Generated by Django 3.1.4 on 2020-12-12 14:41
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("documents", "1006_auto_20201208_2209"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="SavedView",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=128)),
|
||||
("show_on_dashboard", models.BooleanField()),
|
||||
("show_in_sidebar", models.BooleanField()),
|
||||
("sort_field", models.CharField(max_length=128)),
|
||||
("sort_reverse", models.BooleanField(default=False)),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SavedViewFilterRule",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
(
|
||||
"rule_type",
|
||||
models.PositiveIntegerField(
|
||||
choices=[
|
||||
(0, "Title contains"),
|
||||
(1, "Content contains"),
|
||||
(2, "ASN is"),
|
||||
(3, "Correspondent is"),
|
||||
(4, "Document type is"),
|
||||
(5, "Is in inbox"),
|
||||
(6, "Has tag"),
|
||||
(7, "Has any tag"),
|
||||
(8, "Created before"),
|
||||
(9, "Created after"),
|
||||
(10, "Created year is"),
|
||||
(11, "Created month is"),
|
||||
(12, "Created day is"),
|
||||
(13, "Added before"),
|
||||
(14, "Added after"),
|
||||
(15, "Modified before"),
|
||||
(16, "Modified after"),
|
||||
(17, "Does not have tag"),
|
||||
],
|
||||
),
|
||||
),
|
||||
("value", models.CharField(max_length=128)),
|
||||
(
|
||||
"saved_view",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="filter_rules",
|
||||
to="documents.savedview",
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
33
src/documents/migrations/1008_auto_20201216_1736.py
Normal file
33
src/documents/migrations/1008_auto_20201216_1736.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 3.1.4 on 2020-12-16 17:36
|
||||
|
||||
import django.db.models.functions.text
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1007_savedview_savedviewfilterrule"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="correspondent",
|
||||
options={"ordering": (django.db.models.functions.text.Lower("name"),)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="document",
|
||||
options={"ordering": ("-created",)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="documenttype",
|
||||
options={"ordering": (django.db.models.functions.text.Lower("name"),)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="savedview",
|
||||
options={"ordering": (django.db.models.functions.text.Lower("name"),)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="tag",
|
||||
options={"ordering": (django.db.models.functions.text.Lower("name"),)},
|
||||
),
|
||||
]
|
||||
28
src/documents/migrations/1009_auto_20201216_2005.py
Normal file
28
src/documents/migrations/1009_auto_20201216_2005.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.1.4 on 2020-12-16 20:05
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1008_auto_20201216_1736"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="correspondent",
|
||||
options={"ordering": ("name",)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="documenttype",
|
||||
options={"ordering": ("name",)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="savedview",
|
||||
options={"ordering": ("name",)},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="tag",
|
||||
options={"ordering": ("name",)},
|
||||
),
|
||||
]
|
||||
18
src/documents/migrations/1010_auto_20210101_2159.py
Normal file
18
src/documents/migrations/1010_auto_20210101_2159.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-01 21:59
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1009_auto_20201216_2005"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="savedviewfilterrule",
|
||||
name="value",
|
||||
field=models.CharField(blank=True, max_length=128, null=True),
|
||||
),
|
||||
]
|
||||
454
src/documents/migrations/1011_auto_20210101_2340.py
Normal file
454
src/documents/migrations/1011_auto_20210101_2340.py
Normal file
@@ -0,0 +1,454 @@
|
||||
# Generated by Django 3.1.4 on 2021-01-01 23:40
|
||||
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("documents", "1010_auto_20210101_2159"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="correspondent",
|
||||
options={
|
||||
"ordering": ("name",),
|
||||
"verbose_name": "correspondent",
|
||||
"verbose_name_plural": "correspondents",
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="document",
|
||||
options={
|
||||
"ordering": ("-created",),
|
||||
"verbose_name": "document",
|
||||
"verbose_name_plural": "documents",
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="documenttype",
|
||||
options={
|
||||
"verbose_name": "document type",
|
||||
"verbose_name_plural": "document types",
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="log",
|
||||
options={
|
||||
"ordering": ("-created",),
|
||||
"verbose_name": "log",
|
||||
"verbose_name_plural": "logs",
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="savedview",
|
||||
options={
|
||||
"ordering": ("name",),
|
||||
"verbose_name": "saved view",
|
||||
"verbose_name_plural": "saved views",
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="savedviewfilterrule",
|
||||
options={
|
||||
"verbose_name": "filter rule",
|
||||
"verbose_name_plural": "filter rules",
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="tag",
|
||||
options={"verbose_name": "tag", "verbose_name_plural": "tags"},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="correspondent",
|
||||
name="is_insensitive",
|
||||
field=models.BooleanField(default=True, verbose_name="is insensitive"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="correspondent",
|
||||
name="match",
|
||||
field=models.CharField(blank=True, max_length=256, verbose_name="match"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="correspondent",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any word"),
|
||||
(2, "All words"),
|
||||
(3, "Exact match"),
|
||||
(4, "Regular expression"),
|
||||
(5, "Fuzzy word"),
|
||||
(6, "Automatic"),
|
||||
],
|
||||
default=1,
|
||||
verbose_name="matching algorithm",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="correspondent",
|
||||
name="name",
|
||||
field=models.CharField(max_length=128, unique=True, verbose_name="name"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="added",
|
||||
field=models.DateTimeField(
|
||||
db_index=True,
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="added",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="archive_checksum",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
editable=False,
|
||||
help_text="The checksum of the archived document.",
|
||||
max_length=32,
|
||||
null=True,
|
||||
verbose_name="archive checksum",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="archive_serial_number",
|
||||
field=models.IntegerField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="The position of this document in your physical document archive.",
|
||||
null=True,
|
||||
unique=True,
|
||||
verbose_name="archive serial number",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="checksum",
|
||||
field=models.CharField(
|
||||
editable=False,
|
||||
help_text="The checksum of the original document.",
|
||||
max_length=32,
|
||||
unique=True,
|
||||
verbose_name="checksum",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="content",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
help_text="The raw, text-only data of the document. This field is primarily used for searching.",
|
||||
verbose_name="content",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="correspondent",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="documents",
|
||||
to="documents.correspondent",
|
||||
verbose_name="correspondent",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="created",
|
||||
field=models.DateTimeField(
|
||||
db_index=True,
|
||||
default=django.utils.timezone.now,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="document_type",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="documents",
|
||||
to="documents.documenttype",
|
||||
verbose_name="document type",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="filename",
|
||||
field=models.FilePathField(
|
||||
default=None,
|
||||
editable=False,
|
||||
help_text="Current filename in storage",
|
||||
max_length=1024,
|
||||
null=True,
|
||||
verbose_name="filename",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="mime_type",
|
||||
field=models.CharField(
|
||||
editable=False,
|
||||
max_length=256,
|
||||
verbose_name="mime type",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="modified",
|
||||
field=models.DateTimeField(
|
||||
auto_now=True,
|
||||
db_index=True,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="storage_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("unencrypted", "Unencrypted"),
|
||||
("gpg", "Encrypted with GNU Privacy Guard"),
|
||||
],
|
||||
default="unencrypted",
|
||||
editable=False,
|
||||
max_length=11,
|
||||
verbose_name="storage type",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="tags",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="documents",
|
||||
to="documents.Tag",
|
||||
verbose_name="tags",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="title",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
max_length=128,
|
||||
verbose_name="title",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="documenttype",
|
||||
name="is_insensitive",
|
||||
field=models.BooleanField(default=True, verbose_name="is insensitive"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="documenttype",
|
||||
name="match",
|
||||
field=models.CharField(blank=True, max_length=256, verbose_name="match"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="documenttype",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any word"),
|
||||
(2, "All words"),
|
||||
(3, "Exact match"),
|
||||
(4, "Regular expression"),
|
||||
(5, "Fuzzy word"),
|
||||
(6, "Automatic"),
|
||||
],
|
||||
default=1,
|
||||
verbose_name="matching algorithm",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="documenttype",
|
||||
name="name",
|
||||
field=models.CharField(max_length=128, unique=True, verbose_name="name"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="log",
|
||||
name="created",
|
||||
field=models.DateTimeField(auto_now_add=True, verbose_name="created"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="log",
|
||||
name="group",
|
||||
field=models.UUIDField(blank=True, null=True, verbose_name="group"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="log",
|
||||
name="level",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(10, "debug"),
|
||||
(20, "information"),
|
||||
(30, "warning"),
|
||||
(40, "error"),
|
||||
(50, "critical"),
|
||||
],
|
||||
default=20,
|
||||
verbose_name="level",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="log",
|
||||
name="message",
|
||||
field=models.TextField(verbose_name="message"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="savedview",
|
||||
name="name",
|
||||
field=models.CharField(max_length=128, verbose_name="name"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="savedview",
|
||||
name="show_in_sidebar",
|
||||
field=models.BooleanField(verbose_name="show in sidebar"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="savedview",
|
||||
name="show_on_dashboard",
|
||||
field=models.BooleanField(verbose_name="show on dashboard"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="savedview",
|
||||
name="sort_field",
|
||||
field=models.CharField(max_length=128, verbose_name="sort field"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="savedview",
|
||||
name="sort_reverse",
|
||||
field=models.BooleanField(default=False, verbose_name="sort reverse"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="savedview",
|
||||
name="user",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
verbose_name="user",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="savedviewfilterrule",
|
||||
name="rule_type",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(0, "title contains"),
|
||||
(1, "content contains"),
|
||||
(2, "ASN is"),
|
||||
(3, "correspondent is"),
|
||||
(4, "document type is"),
|
||||
(5, "is in inbox"),
|
||||
(6, "has tag"),
|
||||
(7, "has any tag"),
|
||||
(8, "created before"),
|
||||
(9, "created after"),
|
||||
(10, "created year is"),
|
||||
(11, "created month is"),
|
||||
(12, "created day is"),
|
||||
(13, "added before"),
|
||||
(14, "added after"),
|
||||
(15, "modified before"),
|
||||
(16, "modified after"),
|
||||
(17, "does not have tag"),
|
||||
],
|
||||
verbose_name="rule type",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="savedviewfilterrule",
|
||||
name="saved_view",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="filter_rules",
|
||||
to="documents.savedview",
|
||||
verbose_name="saved view",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="savedviewfilterrule",
|
||||
name="value",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
max_length=128,
|
||||
null=True,
|
||||
verbose_name="value",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="colour",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "#a6cee3"),
|
||||
(2, "#1f78b4"),
|
||||
(3, "#b2df8a"),
|
||||
(4, "#33a02c"),
|
||||
(5, "#fb9a99"),
|
||||
(6, "#e31a1c"),
|
||||
(7, "#fdbf6f"),
|
||||
(8, "#ff7f00"),
|
||||
(9, "#cab2d6"),
|
||||
(10, "#6a3d9a"),
|
||||
(11, "#b15928"),
|
||||
(12, "#000000"),
|
||||
(13, "#cccccc"),
|
||||
],
|
||||
default=1,
|
||||
verbose_name="color",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="is_inbox_tag",
|
||||
field=models.BooleanField(
|
||||
default=False,
|
||||
help_text="Marks this tag as an inbox tag: All newly consumed documents will be tagged with inbox tags.",
|
||||
verbose_name="is inbox tag",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="is_insensitive",
|
||||
field=models.BooleanField(default=True, verbose_name="is insensitive"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="match",
|
||||
field=models.CharField(blank=True, max_length=256, verbose_name="match"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="matching_algorithm",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(1, "Any word"),
|
||||
(2, "All words"),
|
||||
(3, "Exact match"),
|
||||
(4, "Regular expression"),
|
||||
(5, "Fuzzy word"),
|
||||
(6, "Automatic"),
|
||||
],
|
||||
default=1,
|
||||
verbose_name="matching algorithm",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="name",
|
||||
field=models.CharField(max_length=128, unique=True, verbose_name="name"),
|
||||
),
|
||||
]
|
||||
367
src/documents/migrations/1012_fix_archive_files.py
Normal file
367
src/documents/migrations/1012_fix_archive_files.py
Normal file
@@ -0,0 +1,367 @@
|
||||
# Generated by Django 3.1.6 on 2021-02-07 22:26
|
||||
import datetime
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
from collections import defaultdict
|
||||
from pathlib import Path
|
||||
from time import sleep
|
||||
|
||||
import pathvalidate
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
from django.template.defaultfilters import slugify
|
||||
|
||||
logger = logging.getLogger("paperless.migrations")
|
||||
|
||||
|
||||
###############################################################################
|
||||
# This is code copied straight paperless before the change.
|
||||
###############################################################################
|
||||
class defaultdictNoStr(defaultdict):
|
||||
def __str__(self): # pragma: no cover
|
||||
raise ValueError("Don't use {tags} directly.")
|
||||
|
||||
|
||||
def many_to_dictionary(field): # pragma: no cover
|
||||
# Converts ManyToManyField to dictionary by assuming, that field
|
||||
# entries contain an _ or - which will be used as a delimiter
|
||||
mydictionary = dict()
|
||||
|
||||
for index, t in enumerate(field.all()):
|
||||
# Populate tag names by index
|
||||
mydictionary[index] = slugify(t.name)
|
||||
|
||||
# Find delimiter
|
||||
delimiter = t.name.find("_")
|
||||
|
||||
if delimiter == -1:
|
||||
delimiter = t.name.find("-")
|
||||
|
||||
if delimiter == -1:
|
||||
continue
|
||||
|
||||
key = t.name[:delimiter]
|
||||
value = t.name[delimiter + 1 :]
|
||||
|
||||
mydictionary[slugify(key)] = slugify(value)
|
||||
|
||||
return mydictionary
|
||||
|
||||
|
||||
def archive_name_from_filename(filename: Path) -> Path:
|
||||
return Path(filename.stem + ".pdf")
|
||||
|
||||
|
||||
def archive_path_old(doc) -> Path:
|
||||
if doc.filename:
|
||||
fname = archive_name_from_filename(Path(doc.filename))
|
||||
else:
|
||||
fname = Path(f"{doc.pk:07}.pdf")
|
||||
|
||||
return settings.ARCHIVE_DIR / fname
|
||||
|
||||
|
||||
STORAGE_TYPE_GPG = "gpg"
|
||||
|
||||
|
||||
def archive_path_new(doc) -> Path | None:
|
||||
if doc.archive_filename is not None:
|
||||
return settings.ARCHIVE_DIR / doc.archive_filename
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def source_path(doc) -> Path:
|
||||
if doc.filename:
|
||||
fname = doc.filename
|
||||
else:
|
||||
fname = f"{doc.pk:07}{doc.file_type}"
|
||||
if doc.storage_type == STORAGE_TYPE_GPG:
|
||||
fname = Path(str(fname) + ".gpg") # pragma: no cover
|
||||
|
||||
return settings.ORIGINALS_DIR / fname
|
||||
|
||||
|
||||
def generate_unique_filename(doc, *, archive_filename=False):
|
||||
if archive_filename:
|
||||
old_filename = doc.archive_filename
|
||||
root = settings.ARCHIVE_DIR
|
||||
else:
|
||||
old_filename = doc.filename
|
||||
root = settings.ORIGINALS_DIR
|
||||
|
||||
counter = 0
|
||||
|
||||
while True:
|
||||
new_filename = generate_filename(
|
||||
doc,
|
||||
counter=counter,
|
||||
archive_filename=archive_filename,
|
||||
)
|
||||
if new_filename == old_filename:
|
||||
# still the same as before.
|
||||
return new_filename
|
||||
|
||||
if (root / new_filename).exists():
|
||||
counter += 1
|
||||
else:
|
||||
return new_filename
|
||||
|
||||
|
||||
def generate_filename(doc, *, counter=0, append_gpg=True, archive_filename=False):
|
||||
path = ""
|
||||
|
||||
try:
|
||||
if settings.FILENAME_FORMAT is not None:
|
||||
tags = defaultdictNoStr(lambda: slugify(None), many_to_dictionary(doc.tags))
|
||||
|
||||
tag_list = pathvalidate.sanitize_filename(
|
||||
",".join(sorted([tag.name for tag in doc.tags.all()])),
|
||||
replacement_text="-",
|
||||
)
|
||||
|
||||
if doc.correspondent:
|
||||
correspondent = pathvalidate.sanitize_filename(
|
||||
doc.correspondent.name,
|
||||
replacement_text="-",
|
||||
)
|
||||
else:
|
||||
correspondent = "none"
|
||||
|
||||
if doc.document_type:
|
||||
document_type = pathvalidate.sanitize_filename(
|
||||
doc.document_type.name,
|
||||
replacement_text="-",
|
||||
)
|
||||
else:
|
||||
document_type = "none"
|
||||
|
||||
path = settings.FILENAME_FORMAT.format(
|
||||
title=pathvalidate.sanitize_filename(doc.title, replacement_text="-"),
|
||||
correspondent=correspondent,
|
||||
document_type=document_type,
|
||||
created=datetime.date.isoformat(doc.created),
|
||||
created_year=doc.created.year if doc.created else "none",
|
||||
created_month=f"{doc.created.month:02}" if doc.created else "none",
|
||||
created_day=f"{doc.created.day:02}" if doc.created else "none",
|
||||
added=datetime.date.isoformat(doc.added),
|
||||
added_year=doc.added.year if doc.added else "none",
|
||||
added_month=f"{doc.added.month:02}" if doc.added else "none",
|
||||
added_day=f"{doc.added.day:02}" if doc.added else "none",
|
||||
tags=tags,
|
||||
tag_list=tag_list,
|
||||
).strip()
|
||||
|
||||
path = path.strip(os.sep)
|
||||
|
||||
except (ValueError, KeyError, IndexError):
|
||||
logger.warning(
|
||||
f"Invalid PAPERLESS_FILENAME_FORMAT: "
|
||||
f"{settings.FILENAME_FORMAT}, falling back to default",
|
||||
)
|
||||
|
||||
counter_str = f"_{counter:02}" if counter else ""
|
||||
|
||||
filetype_str = ".pdf" if archive_filename else doc.file_type
|
||||
|
||||
if len(path) > 0:
|
||||
filename = f"{path}{counter_str}{filetype_str}"
|
||||
else:
|
||||
filename = f"{doc.pk:07}{counter_str}{filetype_str}"
|
||||
|
||||
# Append .gpg for encrypted files
|
||||
if append_gpg and doc.storage_type == STORAGE_TYPE_GPG:
|
||||
filename += ".gpg"
|
||||
|
||||
return filename
|
||||
|
||||
|
||||
###############################################################################
|
||||
# This code performs bidirection archive file transformation.
|
||||
###############################################################################
|
||||
|
||||
|
||||
def parse_wrapper(parser, path, mime_type, file_name):
|
||||
# this is here so that I can mock this out for testing.
|
||||
parser.parse(path, mime_type, file_name)
|
||||
|
||||
|
||||
def create_archive_version(doc, retry_count=3):
|
||||
from documents.parsers import DocumentParser
|
||||
from documents.parsers import ParseError
|
||||
from documents.parsers import get_parser_class_for_mime_type
|
||||
|
||||
logger.info(f"Regenerating archive document for document ID:{doc.id}")
|
||||
parser_class = get_parser_class_for_mime_type(doc.mime_type)
|
||||
for try_num in range(retry_count):
|
||||
parser: DocumentParser = parser_class(None, None)
|
||||
try:
|
||||
parse_wrapper(
|
||||
parser,
|
||||
source_path(doc),
|
||||
doc.mime_type,
|
||||
Path(doc.filename).name,
|
||||
)
|
||||
doc.content = parser.get_text()
|
||||
|
||||
if parser.get_archive_path() and Path(parser.get_archive_path()).is_file():
|
||||
doc.archive_filename = generate_unique_filename(
|
||||
doc,
|
||||
archive_filename=True,
|
||||
)
|
||||
with Path(parser.get_archive_path()).open("rb") as f:
|
||||
doc.archive_checksum = hashlib.md5(f.read()).hexdigest()
|
||||
archive_path_new(doc).parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(parser.get_archive_path(), archive_path_new(doc))
|
||||
else:
|
||||
doc.archive_checksum = None
|
||||
logger.error(
|
||||
f"Parser did not return an archive document for document "
|
||||
f"ID:{doc.id}. Removing archive document.",
|
||||
)
|
||||
doc.save()
|
||||
return
|
||||
except ParseError:
|
||||
if try_num + 1 == retry_count:
|
||||
logger.exception(
|
||||
f"Unable to regenerate archive document for ID:{doc.id}. You "
|
||||
f"need to invoke the document_archiver management command "
|
||||
f"manually for that document.",
|
||||
)
|
||||
doc.archive_checksum = None
|
||||
doc.save()
|
||||
return
|
||||
else:
|
||||
# This is mostly here for the tika parser in docker
|
||||
# environments. The servers for parsing need to come up first,
|
||||
# and the docker setup doesn't ensure that tika is running
|
||||
# before attempting migrations.
|
||||
logger.error("Parse error, will try again in 5 seconds...")
|
||||
sleep(5)
|
||||
finally:
|
||||
parser.cleanup()
|
||||
|
||||
|
||||
def move_old_to_new_locations(apps, schema_editor):
|
||||
Document = apps.get_model("documents", "Document")
|
||||
|
||||
affected_document_ids = set()
|
||||
|
||||
old_archive_path_to_id = {}
|
||||
|
||||
# check for documents that have incorrect archive versions
|
||||
for doc in Document.objects.filter(archive_checksum__isnull=False):
|
||||
old_path = archive_path_old(doc)
|
||||
|
||||
if old_path in old_archive_path_to_id:
|
||||
affected_document_ids.add(doc.id)
|
||||
affected_document_ids.add(old_archive_path_to_id[old_path])
|
||||
else:
|
||||
old_archive_path_to_id[old_path] = doc.id
|
||||
|
||||
# check that archive files of all unaffected documents are in place
|
||||
for doc in Document.objects.filter(archive_checksum__isnull=False):
|
||||
old_path = archive_path_old(doc)
|
||||
if doc.id not in affected_document_ids and not old_path.is_file():
|
||||
raise ValueError(
|
||||
f"Archived document ID:{doc.id} does not exist at: {old_path}",
|
||||
)
|
||||
|
||||
# check that we can regenerate affected archive versions
|
||||
for doc_id in affected_document_ids:
|
||||
from documents.parsers import get_parser_class_for_mime_type
|
||||
|
||||
doc = Document.objects.get(id=doc_id)
|
||||
parser_class = get_parser_class_for_mime_type(doc.mime_type)
|
||||
if not parser_class:
|
||||
raise ValueError(
|
||||
f"Document ID:{doc.id} has an invalid archived document, "
|
||||
f"but no parsers are available. Cannot migrate.",
|
||||
)
|
||||
|
||||
for doc in Document.objects.filter(archive_checksum__isnull=False):
|
||||
if doc.id in affected_document_ids:
|
||||
old_path = archive_path_old(doc)
|
||||
# remove affected archive versions
|
||||
if old_path.is_file():
|
||||
logger.debug(f"Removing {old_path}")
|
||||
old_path.unlink()
|
||||
else:
|
||||
# Set archive path for unaffected files
|
||||
doc.archive_filename = archive_name_from_filename(Path(doc.filename))
|
||||
Document.objects.filter(id=doc.id).update(
|
||||
archive_filename=doc.archive_filename,
|
||||
)
|
||||
|
||||
# regenerate archive documents
|
||||
for doc_id in affected_document_ids:
|
||||
doc = Document.objects.get(id=doc_id)
|
||||
create_archive_version(doc)
|
||||
|
||||
|
||||
def move_new_to_old_locations(apps, schema_editor):
|
||||
Document = apps.get_model("documents", "Document")
|
||||
|
||||
old_archive_paths = set()
|
||||
|
||||
for doc in Document.objects.filter(archive_checksum__isnull=False):
|
||||
new_archive_path = archive_path_new(doc)
|
||||
old_archive_path = archive_path_old(doc)
|
||||
if old_archive_path in old_archive_paths:
|
||||
raise ValueError(
|
||||
f"Cannot migrate: Archive file name {old_archive_path} of "
|
||||
f"document {doc.filename} would clash with another archive "
|
||||
f"filename.",
|
||||
)
|
||||
old_archive_paths.add(old_archive_path)
|
||||
if new_archive_path != old_archive_path and old_archive_path.is_file():
|
||||
raise ValueError(
|
||||
f"Cannot migrate: Cannot move {new_archive_path} to "
|
||||
f"{old_archive_path}: file already exists.",
|
||||
)
|
||||
|
||||
for doc in Document.objects.filter(archive_checksum__isnull=False):
|
||||
new_archive_path = archive_path_new(doc)
|
||||
old_archive_path = archive_path_old(doc)
|
||||
if new_archive_path != old_archive_path:
|
||||
logger.debug(f"Moving {new_archive_path} to {old_archive_path}")
|
||||
shutil.move(new_archive_path, old_archive_path)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1011_auto_20210101_2340"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="archive_filename",
|
||||
field=models.FilePathField(
|
||||
default=None,
|
||||
editable=False,
|
||||
help_text="Current archive filename in storage",
|
||||
max_length=1024,
|
||||
null=True,
|
||||
unique=True,
|
||||
verbose_name="archive filename",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="document",
|
||||
name="filename",
|
||||
field=models.FilePathField(
|
||||
default=None,
|
||||
editable=False,
|
||||
help_text="Current filename in storage",
|
||||
max_length=1024,
|
||||
null=True,
|
||||
unique=True,
|
||||
verbose_name="filename",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(move_old_to_new_locations, move_new_to_old_locations),
|
||||
]
|
||||
74
src/documents/migrations/1013_migrate_tag_colour.py
Normal file
74
src/documents/migrations/1013_migrate_tag_colour.py
Normal file
@@ -0,0 +1,74 @@
|
||||
# Generated by Django 3.1.4 on 2020-12-02 21:43
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
COLOURS_OLD = {
|
||||
1: "#a6cee3",
|
||||
2: "#1f78b4",
|
||||
3: "#b2df8a",
|
||||
4: "#33a02c",
|
||||
5: "#fb9a99",
|
||||
6: "#e31a1c",
|
||||
7: "#fdbf6f",
|
||||
8: "#ff7f00",
|
||||
9: "#cab2d6",
|
||||
10: "#6a3d9a",
|
||||
11: "#b15928",
|
||||
12: "#000000",
|
||||
13: "#cccccc",
|
||||
}
|
||||
|
||||
|
||||
def forward(apps, schema_editor):
|
||||
Tag = apps.get_model("documents", "Tag")
|
||||
|
||||
for tag in Tag.objects.all():
|
||||
colour_old_id = tag.colour_old
|
||||
rgb = COLOURS_OLD[colour_old_id]
|
||||
tag.color = rgb
|
||||
tag.save()
|
||||
|
||||
|
||||
def reverse(apps, schema_editor):
|
||||
Tag = apps.get_model("documents", "Tag")
|
||||
|
||||
def _get_colour_id(rdb):
|
||||
for idx, rdbx in COLOURS_OLD.items():
|
||||
if rdbx == rdb:
|
||||
return idx
|
||||
# Return colour 1 if we can't match anything
|
||||
return 1
|
||||
|
||||
for tag in Tag.objects.all():
|
||||
colour_id = _get_colour_id(tag.color)
|
||||
tag.colour_old = colour_id
|
||||
tag.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1012_fix_archive_files"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name="tag",
|
||||
old_name="colour",
|
||||
new_name="colour_old",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="tag",
|
||||
name="color",
|
||||
field=models.CharField(
|
||||
default="#a6cee3",
|
||||
max_length=7,
|
||||
verbose_name="color",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(forward, reverse),
|
||||
migrations.RemoveField(
|
||||
model_name="tag",
|
||||
name="colour_old",
|
||||
),
|
||||
]
|
||||
42
src/documents/migrations/1014_auto_20210228_1614.py
Normal file
42
src/documents/migrations/1014_auto_20210228_1614.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# Generated by Django 3.1.7 on 2021-02-28 15:14
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1013_migrate_tag_colour"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="savedviewfilterrule",
|
||||
name="rule_type",
|
||||
field=models.PositiveIntegerField(
|
||||
choices=[
|
||||
(0, "title contains"),
|
||||
(1, "content contains"),
|
||||
(2, "ASN is"),
|
||||
(3, "correspondent is"),
|
||||
(4, "document type is"),
|
||||
(5, "is in inbox"),
|
||||
(6, "has tag"),
|
||||
(7, "has any tag"),
|
||||
(8, "created before"),
|
||||
(9, "created after"),
|
||||
(10, "created year is"),
|
||||
(11, "created month is"),
|
||||
(12, "created day is"),
|
||||
(13, "added before"),
|
||||
(14, "added after"),
|
||||
(15, "modified before"),
|
||||
(16, "modified after"),
|
||||
(17, "does not have tag"),
|
||||
(18, "does not have ASN"),
|
||||
(19, "title or content contains"),
|
||||
],
|
||||
verbose_name="rule type",
|
||||
),
|
||||
),
|
||||
]
|
||||
27
src/documents/migrations/1015_remove_null_characters.py
Normal file
27
src/documents/migrations/1015_remove_null_characters.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 3.1.7 on 2021-04-04 18:28
|
||||
import logging
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
logger = logging.getLogger("paperless.migrations")
|
||||
|
||||
|
||||
def remove_null_characters(apps, schema_editor):
|
||||
Document = apps.get_model("documents", "Document")
|
||||
|
||||
for doc in Document.objects.all():
|
||||
content: str = doc.content
|
||||
if "\0" in content:
|
||||
logger.info(f"Removing null characters from document {doc}...")
|
||||
doc.content = content.replace("\0", " ")
|
||||
doc.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "1014_auto_20210228_1614"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(remove_null_characters, migrations.RunPython.noop),
|
||||
]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user