mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-05-21 12:52:13 -05:00
Merge branch 'dev'
This commit is contained in:
commit
2f8b2944f1
@ -21,19 +21,17 @@
|
||||
# This file is intended only to be used through VSCOde devcontainers. See README.md
|
||||
# in the folder .devcontainer.
|
||||
|
||||
|
||||
services:
|
||||
broker:
|
||||
image: docker.io/library/redis:7
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ./redisdata:/data
|
||||
|
||||
# No ports need to be exposed; the VSCode DevContainer plugin manages them.
|
||||
paperless-development:
|
||||
image: paperless-ngx
|
||||
build:
|
||||
context: ../ # Dockerfile cannot access files from parent directories if context is not set.
|
||||
context: ../ # Dockerfile cannot access files from parent directories if context is not set.
|
||||
dockerfile: ./.devcontainer/Dockerfile
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
@ -60,25 +58,20 @@ services:
|
||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||
PAPERLESS_STATICDIR: ./src/documents/static
|
||||
PAPERLESS_DEBUG: true
|
||||
|
||||
# Overrides default command so things don't shut down after the process ends.
|
||||
command: /bin/sh -c "chown -R paperless:paperless /usr/src/paperless/paperless-ngx/src/documents/static/frontend && chown -R paperless:paperless /usr/src/paperless/paperless-ngx/.ruff_cache && while sleep 1000; do :; done"
|
||||
|
||||
gotenberg:
|
||||
image: docker.io/gotenberg/gotenberg:8.17
|
||||
restart: unless-stopped
|
||||
|
||||
# The Gotenberg Chromium route is used to convert .eml files. We do not
|
||||
# want to allow external content like tracking pixels or even JavaScript.
|
||||
command:
|
||||
- "gotenberg"
|
||||
- "--chromium-disable-javascript=true"
|
||||
- "--chromium-allow-list=file:///tmp/.*"
|
||||
|
||||
tika:
|
||||
image: docker.io/apache/tika:latest
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
|
8
.github/dependabot.yml
vendored
8
.github/dependabot.yml
vendored
@ -5,7 +5,6 @@ version: 2
|
||||
# Required for uv support for now
|
||||
enable-beta-ecosystems: true
|
||||
updates:
|
||||
|
||||
# Enable version updates for pnpm
|
||||
- package-ecosystem: "npm"
|
||||
target-branch: "dev"
|
||||
@ -35,7 +34,6 @@ updates:
|
||||
patterns:
|
||||
- "@typescript-eslint*"
|
||||
- "eslint"
|
||||
|
||||
# Enable version updates for Python
|
||||
- package-ecosystem: "uv"
|
||||
target-branch: "dev"
|
||||
@ -59,6 +57,7 @@ updates:
|
||||
django:
|
||||
patterns:
|
||||
- "*django*"
|
||||
- "drf-*"
|
||||
major-versions:
|
||||
update-types:
|
||||
- "major"
|
||||
@ -70,7 +69,6 @@ updates:
|
||||
patterns:
|
||||
- psycopg*
|
||||
- zxing-cpp
|
||||
|
||||
# Enable updates for GitHub Actions
|
||||
- package-ecosystem: "github-actions"
|
||||
target-branch: "dev"
|
||||
@ -90,7 +88,6 @@ updates:
|
||||
- "major"
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
||||
# Update Dockerfile in root directory
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
@ -100,12 +97,10 @@ updates:
|
||||
reviewers:
|
||||
- "paperless-ngx/ci-cd"
|
||||
labels:
|
||||
- "ci-cd"
|
||||
- "dependencies"
|
||||
commit-message:
|
||||
prefix: "docker"
|
||||
include: "scope"
|
||||
|
||||
# Update Docker Compose files in docker/compose directory
|
||||
- package-ecosystem: "docker-compose"
|
||||
directory: "/docker/compose/"
|
||||
@ -115,7 +110,6 @@ updates:
|
||||
reviewers:
|
||||
- "paperless-ngx/ci-cd"
|
||||
labels:
|
||||
- "ci-cd"
|
||||
- "dependencies"
|
||||
commit-message:
|
||||
prefix: "docker-compose"
|
||||
|
19
.github/labeler.yml
vendored
Normal file
19
.github/labeler.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
backend:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- 'src/**'
|
||||
- 'pyproject.toml'
|
||||
- 'uv.lock'
|
||||
- 'requirements.txt'
|
||||
frontend:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- 'src-ui/**'
|
||||
documentation:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- 'docs/**'
|
||||
ci-cd:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- '.github/**'
|
300
.github/workflows/ci.yml
vendored
300
.github/workflows/ci.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
@ -12,72 +11,57 @@ on:
|
||||
pull_request:
|
||||
branches-ignore:
|
||||
- 'translations**'
|
||||
|
||||
env:
|
||||
DEFAULT_UV_VERSION: "0.6.x"
|
||||
# This is the default version of Python to use in most steps which aren't specific
|
||||
DEFAULT_PYTHON_VERSION: "3.11"
|
||||
|
||||
jobs:
|
||||
pre-commit:
|
||||
# We want to run on external PRs, but not on our own internal PRs as they'll be run
|
||||
# by the push to the branch. Without this if check, checks are duplicated since
|
||||
# internal PRs match both the push and pull_request events.
|
||||
if:
|
||||
github.event_name == 'push' || github.event.pull_request.head.repo.full_name !=
|
||||
github.repository
|
||||
|
||||
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
|
||||
name: Linting Checks
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
-
|
||||
name: Checkout repository
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
-
|
||||
name: Install python
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
-
|
||||
name: Check files
|
||||
- name: Check files
|
||||
uses: pre-commit/action@v3.0.1
|
||||
|
||||
documentation:
|
||||
name: "Build & Deploy Documentation"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- pre-commit
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
-
|
||||
name: Set up Python
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
-
|
||||
name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
-
|
||||
name: Install Python dependencies
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||
-
|
||||
name: Make documentation
|
||||
- name: Make documentation
|
||||
run: |
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--dev \
|
||||
--frozen \
|
||||
mkdocs build --config-file ./mkdocs.yml
|
||||
-
|
||||
name: Deploy documentation
|
||||
- name: Deploy documentation
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
run: |
|
||||
echo "docs.paperless-ngx.com" > "${{ github.workspace }}/docs/CNAME"
|
||||
@ -88,14 +72,12 @@ jobs:
|
||||
--dev \
|
||||
--frozen \
|
||||
mkdocs gh-deploy --force --no-history
|
||||
-
|
||||
name: Upload artifact
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: documentation
|
||||
path: site/
|
||||
retention-days: 7
|
||||
|
||||
tests-backend:
|
||||
name: "Backend Tests (Python ${{ matrix.python-version }})"
|
||||
runs-on: ubuntu-24.04
|
||||
@ -106,49 +88,40 @@ jobs:
|
||||
python-version: ['3.10', '3.11', '3.12']
|
||||
fail-fast: false
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
-
|
||||
name: Start containers
|
||||
- name: Start containers
|
||||
run: |
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml pull --quiet
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml up --detach
|
||||
-
|
||||
name: Set up Python
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "${{ matrix.python-version }}"
|
||||
-
|
||||
name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||
-
|
||||
name: Install system dependencies
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
|
||||
-
|
||||
name: Configure ImageMagick
|
||||
- name: Configure ImageMagick
|
||||
run: |
|
||||
sudo cp docker/rootfs/etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml
|
||||
-
|
||||
name: Install Python dependencies
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
--group testing \
|
||||
--frozen
|
||||
-
|
||||
name: List installed Python dependencies
|
||||
- name: List installed Python dependencies
|
||||
run: |
|
||||
uv pip list
|
||||
-
|
||||
name: Tests
|
||||
- name: Tests
|
||||
env:
|
||||
PAPERLESS_CI_TEST: 1
|
||||
# Enable paperless_mail testing against real server
|
||||
@ -161,28 +134,24 @@ jobs:
|
||||
--dev \
|
||||
--frozen \
|
||||
pytest
|
||||
-
|
||||
name: Upload backend test results to Codecov
|
||||
- name: Upload backend test results to Codecov
|
||||
if: always()
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend-python-${{ matrix.python-version }}
|
||||
files: junit.xml
|
||||
-
|
||||
name: Upload backend coverage to Codecov
|
||||
- name: Upload backend coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend-python-${{ matrix.python-version }}
|
||||
files: coverage.xml
|
||||
-
|
||||
name: Stop containers
|
||||
- name: Stop containers
|
||||
if: always()
|
||||
run: |
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml logs
|
||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml down
|
||||
|
||||
install-frontend-dependencies:
|
||||
name: "Install Frontend Dependencies"
|
||||
runs-on: ubuntu-24.04
|
||||
@ -194,8 +163,7 @@ jobs:
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
-
|
||||
name: Use Node.js 20
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
@ -209,17 +177,10 @@ jobs:
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
-
|
||||
name: Install dependencies
|
||||
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
||||
- name: Install dependencies
|
||||
run: cd src-ui && pnpm install
|
||||
-
|
||||
name: Install Playwright
|
||||
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
||||
run: cd src-ui && pnpm playwright install --with-deps
|
||||
|
||||
tests-frontend:
|
||||
name: "Frontend Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||
name: "Frontend Unit Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- install-frontend-dependencies
|
||||
@ -235,8 +196,7 @@ jobs:
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
-
|
||||
name: Use Node.js 20
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
@ -252,52 +212,90 @@ jobs:
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Re-link Angular cli
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
-
|
||||
name: Linting checks
|
||||
- name: Linting checks
|
||||
run: cd src-ui && pnpm run lint
|
||||
-
|
||||
name: Run Jest unit tests
|
||||
- name: Run Jest unit tests
|
||||
run: cd src-ui && pnpm run test --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||
-
|
||||
name: Run Playwright e2e tests
|
||||
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||
-
|
||||
name: Upload frontend test results to Codecov
|
||||
- name: Upload frontend test results to Codecov
|
||||
uses: codecov/test-results-action@v1
|
||||
if: always()
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: frontend-node-${{ matrix.node-version }}
|
||||
directory: src-ui/
|
||||
-
|
||||
name: Upload frontend coverage to Codecov
|
||||
- name: Upload frontend coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: frontend-node-${{ matrix.node-version }}
|
||||
directory: src-ui/coverage/
|
||||
|
||||
frontend-bundle-analysis:
|
||||
name: "Frontend Bundle Analysis"
|
||||
tests-frontend-e2e:
|
||||
name: "Frontend E2E Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- tests-frontend
|
||||
- install-frontend-dependencies
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
shard-index: [1, 2]
|
||||
shard-count: [2]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
-
|
||||
name: Install pnpm
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
-
|
||||
name: Use Node.js 20
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
-
|
||||
name: Cache frontend dependencies
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Re-link Angular cli
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Cache Playwright browsers
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
key: ${{ runner.os }}-playwright-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-playwright-
|
||||
- name: Install Playwright system dependencies
|
||||
run: npx playwright install-deps
|
||||
- name: Install dependencies
|
||||
run: cd src-ui && pnpm install --no-frozen-lockfile
|
||||
- name: Install Playwright
|
||||
run: cd src-ui && pnpm exec playwright install
|
||||
- name: Run Playwright e2e tests
|
||||
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||
frontend-bundle-analysis:
|
||||
name: "Frontend Bundle Analysis"
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- tests-frontend
|
||||
- tests-frontend-e2e
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@ -305,15 +303,12 @@ jobs:
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
||||
-
|
||||
name: Re-link Angular cli
|
||||
- name: Re-link Angular cli
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
-
|
||||
name: Build frontend and upload analysis
|
||||
- name: Build frontend and upload analysis
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: cd src-ui && pnpm run build --configuration=production
|
||||
|
||||
build-docker-image:
|
||||
name: Build Docker image for ${{ github.ref_name }}
|
||||
runs-on: ubuntu-24.04
|
||||
@ -324,9 +319,9 @@ jobs:
|
||||
needs:
|
||||
- tests-backend
|
||||
- tests-frontend
|
||||
- tests-frontend-e2e
|
||||
steps:
|
||||
-
|
||||
name: Check pushing to Docker Hub
|
||||
- name: Check pushing to Docker Hub
|
||||
id: push-other-places
|
||||
# Only push to Dockerhub from the main repo AND the ref is either:
|
||||
# main
|
||||
@ -342,15 +337,13 @@ jobs:
|
||||
echo "Not pushing to DockerHub"
|
||||
echo "enable=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
-
|
||||
name: Set ghcr repository name
|
||||
- name: Set ghcr repository name
|
||||
id: set-ghcr-repository
|
||||
run: |
|
||||
ghcr_name=$(echo "${{ github.repository }}" | awk '{ print tolower($0) }')
|
||||
echo "Name is ${ghcr_name}"
|
||||
echo "ghcr-repository=${ghcr_name}" >> $GITHUB_OUTPUT
|
||||
-
|
||||
name: Gather Docker metadata
|
||||
- name: Gather Docker metadata
|
||||
id: docker-meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
@ -365,37 +358,31 @@ jobs:
|
||||
# For a tag x.y.z or vX.Y.Z, output an x.y.z and x.y image tag
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
-
|
||||
name: Checkout
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
# If https://github.com/docker/buildx/issues/1044 is resolved,
|
||||
# the append input with a native arm64 arch could be used to
|
||||
# significantly speed up building
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
-
|
||||
name: Set up QEMU
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: arm64
|
||||
-
|
||||
name: Login to GitHub Container Registry
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
-
|
||||
name: Login to Docker Hub
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
# Don't attempt to login if not pushing to Docker Hub
|
||||
if: steps.push-other-places.outputs.enable == 'true'
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
-
|
||||
name: Login to Quay.io
|
||||
- name: Login to Quay.io
|
||||
uses: docker/login-action@v3
|
||||
# Don't attempt to login if not pushing to Quay.io
|
||||
if: steps.push-other-places.outputs.enable == 'true'
|
||||
@ -403,8 +390,7 @@ jobs:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.QUAY_USERNAME }}
|
||||
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
||||
-
|
||||
name: Build and push
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
@ -422,23 +408,19 @@ jobs:
|
||||
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:dev
|
||||
cache-to: |
|
||||
type=registry,mode=max,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
||||
-
|
||||
name: Inspect image
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||
-
|
||||
name: Export frontend artifact from docker
|
||||
- name: Export frontend artifact from docker
|
||||
run: |
|
||||
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
|
||||
-
|
||||
name: Upload frontend artifact
|
||||
- name: Upload frontend artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: frontend-compiled
|
||||
path: src/documents/static/frontend/
|
||||
retention-days: 7
|
||||
|
||||
build-release:
|
||||
name: "Build Release"
|
||||
needs:
|
||||
@ -446,63 +428,52 @@ jobs:
|
||||
- documentation
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
-
|
||||
name: Set up Python
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
-
|
||||
name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||
-
|
||||
name: Install Python dependencies
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||
-
|
||||
name: Install system dependencies
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends gettext liblept5
|
||||
-
|
||||
name: Download frontend artifact
|
||||
- name: Download frontend artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: frontend-compiled
|
||||
path: src/documents/static/frontend/
|
||||
-
|
||||
name: Download documentation artifact
|
||||
- name: Download documentation artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: documentation
|
||||
path: docs/_build/html/
|
||||
-
|
||||
name: Generate requirements file
|
||||
- name: Generate requirements file
|
||||
run: |
|
||||
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
|
||||
-
|
||||
name: Compile messages
|
||||
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
|
||||
- name: Compile messages
|
||||
run: |
|
||||
cd src/
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
manage.py compilemessages
|
||||
-
|
||||
name: Collect static files
|
||||
- name: Collect static files
|
||||
run: |
|
||||
cd src/
|
||||
uv run \
|
||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||
manage.py collectstatic --no-input
|
||||
-
|
||||
name: Move files
|
||||
- name: Move files
|
||||
run: |
|
||||
echo "Making dist folders"
|
||||
for directory in dist \
|
||||
@ -539,21 +510,18 @@ jobs:
|
||||
cp --recursive docs/_build/html/ dist/paperless-ngx/docs
|
||||
|
||||
mv --verbose static dist/paperless-ngx
|
||||
-
|
||||
name: Make release package
|
||||
- name: Make release package
|
||||
run: |
|
||||
echo "Creating release archive"
|
||||
cd dist
|
||||
sudo chown -R 1000:1000 paperless-ngx/
|
||||
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
||||
-
|
||||
name: Upload release artifact
|
||||
- name: Upload release artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release
|
||||
path: dist/paperless-ngx.tar.xz
|
||||
retention-days: 7
|
||||
|
||||
publish-release:
|
||||
name: "Publish Release"
|
||||
runs-on: ubuntu-24.04
|
||||
@ -565,14 +533,12 @@ jobs:
|
||||
- build-release
|
||||
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
|
||||
steps:
|
||||
-
|
||||
name: Download release artifact
|
||||
- name: Download release artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: release
|
||||
path: ./
|
||||
-
|
||||
name: Get version
|
||||
- name: Get version
|
||||
id: get_version
|
||||
run: |
|
||||
echo "version=${{ github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
@ -581,8 +547,7 @@ jobs:
|
||||
else
|
||||
echo "prerelease=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
-
|
||||
name: Create Release and Changelog
|
||||
- name: Create Release and Changelog
|
||||
id: create-release
|
||||
uses: release-drafter/release-drafter@v6
|
||||
with:
|
||||
@ -593,8 +558,7 @@ jobs:
|
||||
publish: true # ensures release is not marked as draft
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
-
|
||||
name: Upload release archive
|
||||
- name: Upload release archive
|
||||
id: upload-release-asset
|
||||
uses: shogo82148/actions-upload-release-asset@v1
|
||||
with:
|
||||
@ -603,7 +567,6 @@ jobs:
|
||||
asset_path: ./paperless-ngx.tar.xz
|
||||
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
|
||||
asset_content_type: application/x-xz
|
||||
|
||||
append-changelog:
|
||||
name: "Append Changelog"
|
||||
runs-on: ubuntu-24.04
|
||||
@ -611,26 +574,22 @@ jobs:
|
||||
- publish-release
|
||||
if: needs.publish-release.outputs.prerelease == 'false'
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
-
|
||||
name: Set up Python
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
-
|
||||
name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||
enable-cache: true
|
||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||
-
|
||||
name: Append Changelog to docs
|
||||
- name: Append Changelog to docs
|
||||
id: append-Changelog
|
||||
working-directory: docs
|
||||
run: |
|
||||
@ -652,8 +611,7 @@ jobs:
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
|
||||
git push origin ${{ needs.publish-release.outputs.version }}-changelog
|
||||
-
|
||||
name: Create Pull Request
|
||||
- name: Create Pull Request
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
|
10
.github/workflows/cleanup-tags.yml
vendored
10
.github/workflows/cleanup-tags.yml
vendored
@ -6,17 +6,14 @@
|
||||
# This workflow will not trigger runs on forked repos.
|
||||
|
||||
name: Cleanup Image Tags
|
||||
|
||||
on:
|
||||
delete:
|
||||
push:
|
||||
paths:
|
||||
- ".github/workflows/cleanup-tags.yml"
|
||||
|
||||
concurrency:
|
||||
group: registry-tags-cleanup
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
cleanup-images:
|
||||
name: Cleanup Image Tags for ${{ matrix.primary-name }}
|
||||
@ -30,8 +27,7 @@ jobs:
|
||||
# Requires a personal access token with the OAuth scope delete:packages
|
||||
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
|
||||
steps:
|
||||
-
|
||||
name: Clean temporary images
|
||||
- name: Clean temporary images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.10.0
|
||||
with:
|
||||
@ -43,7 +39,6 @@ jobs:
|
||||
repo_name: "paperless-ngx"
|
||||
match_regex: "(feature|fix)"
|
||||
do_delete: "true"
|
||||
|
||||
cleanup-untagged-images:
|
||||
name: Cleanup Untagged Images Tags for ${{ matrix.primary-name }}
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
@ -58,8 +53,7 @@ jobs:
|
||||
# Requires a personal access token with the OAuth scope delete:packages
|
||||
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
|
||||
steps:
|
||||
-
|
||||
name: Clean untagged images
|
||||
- name: Clean untagged images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.10.0
|
||||
with:
|
||||
|
38
.github/workflows/codeql-analysis.yml
vendored
38
.github/workflows/codeql-analysis.yml
vendored
@ -10,16 +10,14 @@
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main, dev ]
|
||||
branches: [main, dev]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ dev ]
|
||||
branches: [dev]
|
||||
schedule:
|
||||
- cron: '28 13 * * 5'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
@ -28,27 +26,23 @@ jobs:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'javascript', 'python' ]
|
||||
language: ['javascript', 'python']
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
|
39
.github/workflows/crowdin.yml
vendored
39
.github/workflows/crowdin.yml
vendored
@ -1,35 +1,28 @@
|
||||
name: Crowdin Action
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '2 */12 * * *'
|
||||
push:
|
||||
paths: [
|
||||
'src/locale/**',
|
||||
'src-ui/messages.xlf',
|
||||
'src-ui/src/locale/**'
|
||||
]
|
||||
branches: [ dev ]
|
||||
|
||||
paths: ['src/locale/**', 'src-ui/messages.xlf', 'src-ui/src/locale/**']
|
||||
branches: [dev]
|
||||
jobs:
|
||||
synchronize-with-crowdin:
|
||||
name: Crowdin Sync
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: crowdin action
|
||||
uses: crowdin/github-action@v2
|
||||
with:
|
||||
upload_translations: false
|
||||
download_translations: true
|
||||
crowdin_branch_name: 'dev'
|
||||
localization_branch_name: l10n_dev
|
||||
pull_request_labels: 'skip-changelog, translation'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CROWDIN_PROJECT_ID: ${{ secrets.CROWDIN_PROJECT_ID }}
|
||||
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: crowdin action
|
||||
uses: crowdin/github-action@v2
|
||||
with:
|
||||
upload_translations: false
|
||||
download_translations: true
|
||||
crowdin_branch_name: 'dev'
|
||||
localization_branch_name: l10n_dev
|
||||
pull_request_labels: 'skip-changelog, translation'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CROWDIN_PROJECT_ID: ${{ secrets.CROWDIN_PROJECT_ID }}
|
||||
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}
|
||||
|
86
.github/workflows/pr-bot.yml
vendored
Normal file
86
.github/workflows/pr-bot.yml
vendored
Normal file
@ -0,0 +1,86 @@
|
||||
name: PR Bot
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened]
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
jobs:
|
||||
pr-bot:
|
||||
name: Automated PR Bot
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Label by file path
|
||||
uses: actions/labeler@v5
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Label by size
|
||||
uses: Gascon1/pr-size-labeler@v1.3.0
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
xs_label: 'small-change'
|
||||
xs_diff: '9'
|
||||
s_label: 'non-trivial'
|
||||
s_diff: '99999'
|
||||
fail_if_xl: 'false'
|
||||
excluded_files: /\.lock$/ /\.txt$/ ^src-ui/pnpm-lock\.yaml$ ^src-ui/messages\.xlf$ ^src/locale/en_US/LC_MESSAGES/django\.po$
|
||||
- name: Label bot-generated PRs
|
||||
if: ${{ contains(github.actor, 'dependabot') || contains(github.actor, 'crowdin-bot') }}
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const pr = context.payload.pull_request;
|
||||
const user = pr.user.login.toLowerCase();
|
||||
const labels = [];
|
||||
|
||||
if (user.includes('dependabot')) {
|
||||
labels.push('dependencies');
|
||||
}
|
||||
|
||||
if (user.includes('crowdin-bot')) {
|
||||
labels.push('translation', 'skip-changelog');
|
||||
}
|
||||
|
||||
if (labels.length) {
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: pr.number,
|
||||
labels,
|
||||
});
|
||||
}
|
||||
- name: Welcome comment
|
||||
if: ${{ !contains(github.actor, 'bot') }}
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const pr = context.payload.pull_request;
|
||||
const user = pr.user.login;
|
||||
|
||||
const { data: members } = await github.rest.orgs.listMembers({
|
||||
org: 'paperless-ngx',
|
||||
});
|
||||
|
||||
const memberLogins = members.map(m => m.login.toLowerCase());
|
||||
if (memberLogins.includes(user.toLowerCase())) {
|
||||
core.info('Skipping comment: user is org member');
|
||||
return;
|
||||
}
|
||||
|
||||
const body =
|
||||
"Hello @" + user + ",\n\n" +
|
||||
"Thank you very much for submitting this PR to us!\n\n" +
|
||||
"This is what will happen next:\n\n" +
|
||||
"1. CI tests will run against your PR to ensure quality and consistency.\n" +
|
||||
"2. Next, human contributors from paperless-ngx review your changes.\n" +
|
||||
"3. Please address any issues that come up during the review as soon as you are able to.\n" +
|
||||
"4. If accepted, your pull request will be merged into the `dev` branch and changes there will be tested further.\n" +
|
||||
"5. Eventually, changes from you and other contributors will be merged into `main` and a new release will be made.\n\n" +
|
||||
"You'll be hearing from us soon, and thank you again for contributing to our project.";
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
issue_number: pr.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body,
|
||||
});
|
3
.github/workflows/project-actions.yml
vendored
3
.github/workflows/project-actions.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Project Automations
|
||||
|
||||
on:
|
||||
pull_request_target: #_target allows access to secrets
|
||||
types:
|
||||
@ -8,10 +7,8 @@ on:
|
||||
branches:
|
||||
- main
|
||||
- dev
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
pr_opened_or_reopened:
|
||||
name: pr_opened_or_reopened
|
||||
|
27
.github/workflows/repo-maintenance.yml
vendored
27
.github/workflows/repo-maintenance.yml
vendored
@ -1,18 +1,14 @@
|
||||
name: 'Repository Maintenance'
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 3 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
discussions: write
|
||||
|
||||
concurrency:
|
||||
group: lock
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
name: 'Stale'
|
||||
@ -27,9 +23,8 @@ jobs:
|
||||
stale-issue-label: stale
|
||||
stale-pr-label: stale
|
||||
stale-issue-message: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||
This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||
|
||||
lock-threads:
|
||||
name: 'Lock Old Threads'
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
@ -42,20 +37,14 @@ jobs:
|
||||
discussion-inactive-days: '30'
|
||||
log-output: true
|
||||
issue-comment: >
|
||||
This issue has been automatically locked since there
|
||||
has not been any recent activity after it was closed.
|
||||
Please open a new discussion or issue for related concerns.
|
||||
See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||
This issue has been automatically locked since there has not been any recent activity after it was closed. Please open a new discussion or issue for related concerns. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||
|
||||
pr-comment: >
|
||||
This pull request has been automatically locked since there
|
||||
has not been any recent activity after it was closed.
|
||||
Please open a new discussion or issue for related concerns.
|
||||
See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||
This pull request has been automatically locked since there has not been any recent activity after it was closed. Please open a new discussion or issue for related concerns. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||
|
||||
discussion-comment: >
|
||||
This discussion has been automatically locked since there
|
||||
has not been any recent activity after it was closed.
|
||||
Please open a new discussion for related concerns.
|
||||
See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||
This discussion has been automatically locked since there has not been any recent activity after it was closed. Please open a new discussion for related concerns. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||
|
||||
close-answered-discussions:
|
||||
name: 'Close Answered Discussions'
|
||||
if: github.repository_owner == 'paperless-ngx'
|
||||
|
69
.github/workflows/translate-strings.yml
vendored
Normal file
69
.github/workflows/translate-strings.yml
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
name: Generate Translation Strings
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
jobs:
|
||||
generate-translate-strings:
|
||||
name: Generate Translation Strings
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.PNGX_BOT_PAT }}
|
||||
ref: ${{ github.head_ref }}
|
||||
- name: Set up Python
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v5
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq --no-install-recommends gettext
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
enable-cache: true
|
||||
- name: Install backend python dependencies
|
||||
run: |
|
||||
uv sync \
|
||||
--group dev \
|
||||
--frozen
|
||||
- name: Generate backend translation strings
|
||||
run: cd src/ && uv run manage.py makemessages -l en_US -i "samples*"
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
- name: Use Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||
- name: Cache frontend dependencies
|
||||
id: cache-frontend-deps
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.pnpm-store
|
||||
~/.cache
|
||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||
- name: Install frontend dependencies
|
||||
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
||||
run: cd src-ui && pnpm install
|
||||
- name: Re-link Angular cli
|
||||
run: cd src-ui && pnpm link @angular/cli
|
||||
- name: Generate frontend translation strings
|
||||
run: |
|
||||
cd src-ui
|
||||
pnpm run ng extract-i18n
|
||||
- name: Commit changes
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
file_pattern: 'src-ui/messages.xlf src/locale/en_US/LC_MESSAGES/django.po'
|
||||
commit_message: "Auto translate strings"
|
||||
commit_user_name: "GitHub Actions"
|
||||
commit_author: "GitHub Actions <41898282+github-actions[bot]@users.noreply.github.com>"
|
@ -76,3 +76,8 @@ repos:
|
||||
rev: "v0.10.0.1"
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
- repo: https://github.com/google/yamlfmt
|
||||
rev: v0.14.0
|
||||
hooks:
|
||||
- id: yamlfmt
|
||||
exclude: "^src-ui/pnpm-lock.yaml"
|
||||
|
@ -32,7 +32,7 @@ RUN set -eux \
|
||||
# Purpose: Installs s6-overlay and rootfs
|
||||
# Comments:
|
||||
# - Don't leave anything extra in here either
|
||||
FROM ghcr.io/astral-sh/uv:0.6.13-python3.12-bookworm-slim AS s6-overlay-base
|
||||
FROM ghcr.io/astral-sh/uv:0.6.16-python3.12-bookworm-slim AS s6-overlay-base
|
||||
|
||||
WORKDIR /usr/src/s6
|
||||
|
||||
@ -47,7 +47,7 @@ ENV \
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
# Lock this version
|
||||
ARG S6_OVERLAY_VERSION=3.2.0.2
|
||||
ARG S6_OVERLAY_VERSION=3.2.1.0
|
||||
|
||||
ARG S6_BUILD_TIME_PKGS="curl \
|
||||
xz-utils"
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
services:
|
||||
gotenberg:
|
||||
image: docker.io/gotenberg/gotenberg:8.19
|
||||
image: docker.io/gotenberg/gotenberg:8.20
|
||||
hostname: gotenberg
|
||||
container_name: gotenberg
|
||||
network_mode: host
|
||||
|
@ -32,11 +32,10 @@
|
||||
|
||||
services:
|
||||
broker:
|
||||
image: docker.io/library/redis:7
|
||||
image: docker.io/library/redis:8
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
db:
|
||||
image: docker.io/library/mariadb:11
|
||||
restart: unless-stopped
|
||||
@ -48,7 +47,6 @@ services:
|
||||
MARIADB_USER: paperless
|
||||
MARIADB_PASSWORD: paperless
|
||||
MARIADB_ROOT_PASSWORD: paperless
|
||||
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
@ -75,9 +73,8 @@ services:
|
||||
PAPERLESS_TIKA_ENABLED: 1
|
||||
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||
|
||||
gotenberg:
|
||||
image: docker.io/gotenberg/gotenberg:8.19
|
||||
image: docker.io/gotenberg/gotenberg:8.20
|
||||
restart: unless-stopped
|
||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||
# want to allow external content like tracking pixels or even javascript.
|
||||
@ -85,11 +82,9 @@ services:
|
||||
- "gotenberg"
|
||||
- "--chromium-disable-javascript=true"
|
||||
- "--chromium-allow-list=file:///tmp/.*"
|
||||
|
||||
tika:
|
||||
image: docker.io/apache/tika:latest
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
|
@ -27,11 +27,10 @@
|
||||
|
||||
services:
|
||||
broker:
|
||||
image: docker.io/library/redis:7
|
||||
image: docker.io/library/redis:8
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
db:
|
||||
image: docker.io/library/mariadb:11
|
||||
restart: unless-stopped
|
||||
@ -43,7 +42,6 @@ services:
|
||||
MARIADB_USER: paperless
|
||||
MARIADB_PASSWORD: paperless
|
||||
MARIADB_ROOT_PASSWORD: paperless
|
||||
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
@ -65,7 +63,6 @@ services:
|
||||
PAPERLESS_DBUSER: paperless # only needed if non-default username
|
||||
PAPERLESS_DBPASS: paperless # only needed if non-default password
|
||||
PAPERLESS_DBPORT: 3306
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
|
@ -28,11 +28,10 @@
|
||||
|
||||
services:
|
||||
broker:
|
||||
image: docker.io/library/redis:7
|
||||
image: docker.io/library/redis:8
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
db:
|
||||
image: docker.io/library/postgres:17
|
||||
restart: unless-stopped
|
||||
@ -42,7 +41,6 @@ services:
|
||||
POSTGRES_DB: paperless
|
||||
POSTGRES_USER: paperless
|
||||
POSTGRES_PASSWORD: paperless
|
||||
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
@ -61,7 +59,6 @@ services:
|
||||
PAPERLESS_DBHOST: db
|
||||
env_file:
|
||||
- stack.env
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
|
@ -31,11 +31,10 @@
|
||||
|
||||
services:
|
||||
broker:
|
||||
image: docker.io/library/redis:7
|
||||
image: docker.io/library/redis:8
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
db:
|
||||
image: docker.io/library/postgres:17
|
||||
restart: unless-stopped
|
||||
@ -45,7 +44,6 @@ services:
|
||||
POSTGRES_DB: paperless
|
||||
POSTGRES_USER: paperless
|
||||
POSTGRES_PASSWORD: paperless
|
||||
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
@ -68,22 +66,18 @@ services:
|
||||
PAPERLESS_TIKA_ENABLED: 1
|
||||
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||
|
||||
gotenberg:
|
||||
image: docker.io/gotenberg/gotenberg:8.19
|
||||
image: docker.io/gotenberg/gotenberg:8.20
|
||||
restart: unless-stopped
|
||||
|
||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||
# want to allow external content like tracking pixels or even javascript.
|
||||
command:
|
||||
- "gotenberg"
|
||||
- "--chromium-disable-javascript=true"
|
||||
- "--chromium-allow-list=file:///tmp/.*"
|
||||
|
||||
tika:
|
||||
image: docker.io/apache/tika:latest
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
|
@ -27,11 +27,10 @@
|
||||
|
||||
services:
|
||||
broker:
|
||||
image: docker.io/library/redis:7
|
||||
image: docker.io/library/redis:8
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
db:
|
||||
image: docker.io/library/postgres:17
|
||||
restart: unless-stopped
|
||||
@ -41,7 +40,6 @@ services:
|
||||
POSTGRES_DB: paperless
|
||||
POSTGRES_USER: paperless
|
||||
POSTGRES_PASSWORD: paperless
|
||||
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
@ -59,7 +57,6 @@ services:
|
||||
environment:
|
||||
PAPERLESS_REDIS: redis://broker:6379
|
||||
PAPERLESS_DBHOST: db
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
|
@ -31,11 +31,10 @@
|
||||
|
||||
services:
|
||||
broker:
|
||||
image: docker.io/library/redis:7
|
||||
image: docker.io/library/redis:8
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
@ -56,22 +55,18 @@ services:
|
||||
PAPERLESS_TIKA_ENABLED: 1
|
||||
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||
|
||||
gotenberg:
|
||||
image: docker.io/gotenberg/gotenberg:8.19
|
||||
image: docker.io/gotenberg/gotenberg:8.20
|
||||
restart: unless-stopped
|
||||
|
||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||
# want to allow external content like tracking pixels or even javascript.
|
||||
command:
|
||||
- "gotenberg"
|
||||
- "--chromium-disable-javascript=true"
|
||||
- "--chromium-allow-list=file:///tmp/.*"
|
||||
|
||||
tika:
|
||||
image: docker.io/apache/tika:latest
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
|
@ -24,11 +24,10 @@
|
||||
|
||||
services:
|
||||
broker:
|
||||
image: docker.io/library/redis:7
|
||||
image: docker.io/library/redis:8
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
restart: unless-stopped
|
||||
@ -44,7 +43,6 @@ services:
|
||||
env_file: docker-compose.env
|
||||
environment:
|
||||
PAPERLESS_REDIS: redis://broker:6379
|
||||
|
||||
volumes:
|
||||
data:
|
||||
media:
|
||||
|
@ -9,7 +9,7 @@ if find /run/s6/container_environment/*"_FILE" -maxdepth 1 > /dev/null 2>&1; the
|
||||
for FILENAME in /run/s6/container_environment/*; do
|
||||
if [[ "${FILENAME##*/}" == PAPERLESS_*_FILE ]]; then
|
||||
# This should have been named different..
|
||||
if [[ ${FILENAME} == "PAPERLESS_OCR_SKIP_ARCHIVE_FILE" || ${FILENAME} == "PAPERLESS_MODEL_FILE" ]]; then
|
||||
if [[ "${FILENAME##*/}" == "PAPERLESS_OCR_SKIP_ARCHIVE_FILE" || "${FILENAME##*/}" == "PAPERLESS_MODEL_FILE" ]]; then
|
||||
continue
|
||||
fi
|
||||
SECRETFILE=$(cat "${FILENAME}")
|
||||
|
@ -418,3 +418,9 @@ Initial API version.
|
||||
|
||||
- The user field of document notes now returns a simplified user object
|
||||
rather than just the user ID.
|
||||
|
||||
#### Version 9
|
||||
|
||||
- The document `created` field is now a date, not a datetime. The
|
||||
`created_date` field is considered deprecated and will be removed in a
|
||||
future version.
|
||||
|
@ -629,7 +629,13 @@ If both the [PAPERLESS_ACCOUNT_DEFAULT_GROUPS](#PAPERLESS_ACCOUNT_DEFAULT_GROUPS
|
||||
|
||||
!!! note
|
||||
|
||||
If you do not have a working email server set up you should set this to 'none'.
|
||||
If you do not have a working email server set up this will be set to 'none'.
|
||||
|
||||
#### [`PAPERLESS_ACCOUNT_EMAIL_UNKNOWN_ACCOUNTS=<bool>`](#PAPERLESS_ACCOUNT_EMAIL_UNKNOWN_ACCOUNTS) {#PAPERLESS_ACCOUNT_EMAIL_UNKNOWN_ACCOUNTS}
|
||||
|
||||
: See the relevant [django-allauth documentation](https://docs.allauth.org/en/latest/account/configuration.html)
|
||||
|
||||
Defaults to True (from allauth)
|
||||
|
||||
#### [`PAPERLESS_DISABLE_REGULAR_LOGIN=<bool>`](#PAPERLESS_DISABLE_REGULAR_LOGIN) {#PAPERLESS_DISABLE_REGULAR_LOGIN}
|
||||
|
||||
|
@ -407,7 +407,8 @@ Currently, there are three events that correspond to workflow trigger 'types':
|
||||
3. **Document Updated**: when a document is updated. Similar to 'added' events, triggers can include filtering by content matching,
|
||||
tags, doc type, or correspondent.
|
||||
4. **Scheduled**: a scheduled trigger that can be used to run workflows at a specific time. The date used can be either the document
|
||||
added, created, updated date or you can specify a (date) custom field. You can also specify a day offset from the date.
|
||||
added, created, updated date or you can specify a (date) custom field. You can also specify a day offset from the date (positive
|
||||
offsets will trigger before the date, negative offsets will trigger after).
|
||||
|
||||
The following flow diagram illustrates the three document trigger types:
|
||||
|
||||
|
28
mkdocs.yml
28
mkdocs.yml
@ -11,14 +11,12 @@ theme:
|
||||
toggle:
|
||||
icon: material/brightness-auto
|
||||
name: Switch to light mode
|
||||
|
||||
# Palette toggle for light mode
|
||||
- media: "(prefers-color-scheme: light)"
|
||||
scheme: default
|
||||
toggle:
|
||||
icon: material/brightness-7
|
||||
name: Switch to dark mode
|
||||
|
||||
# Palette toggle for dark mode
|
||||
- media: "(prefers-color-scheme: dark)"
|
||||
scheme: slate
|
||||
@ -60,17 +58,17 @@ markdown_extensions:
|
||||
emoji_generator: !!python/name:material.extensions.emoji.to_svg
|
||||
strict: true
|
||||
nav:
|
||||
- index.md
|
||||
- setup.md
|
||||
- 'Basic Usage': usage.md
|
||||
- configuration.md
|
||||
- administration.md
|
||||
- advanced_usage.md
|
||||
- 'REST API': api.md
|
||||
- development.md
|
||||
- 'FAQs': faq.md
|
||||
- troubleshooting.md
|
||||
- changelog.md
|
||||
- index.md
|
||||
- setup.md
|
||||
- 'Basic Usage': usage.md
|
||||
- configuration.md
|
||||
- administration.md
|
||||
- advanced_usage.md
|
||||
- 'REST API': api.md
|
||||
- development.md
|
||||
- 'FAQs': faq.md
|
||||
- troubleshooting.md
|
||||
- changelog.md
|
||||
copyright: Copyright © 2016 - 2023 Daniel Quinn, Jonas Winkler, and the Paperless-ngx team
|
||||
extra:
|
||||
social:
|
||||
@ -83,5 +81,5 @@ extra:
|
||||
plugins:
|
||||
- search
|
||||
- glightbox:
|
||||
skip_classes:
|
||||
- no-lightbox
|
||||
skip_classes:
|
||||
- no-lightbox
|
||||
|
@ -23,13 +23,13 @@ dependencies = [
|
||||
"dateparser~=1.2",
|
||||
# WARNING: django does not use semver.
|
||||
# Only patch versions are guaranteed to not introduce breaking changes.
|
||||
"django~=5.1.7",
|
||||
"django~=5.2.1",
|
||||
"django-allauth[socialaccount,mfa]~=65.4.0",
|
||||
"django-auditlog~=3.0.0",
|
||||
"django-celery-results~=2.5.1",
|
||||
"django-auditlog~=3.1.2",
|
||||
"django-celery-results~=2.6.0",
|
||||
"django-compression-middleware~=0.5.0",
|
||||
"django-cors-headers~=4.7.0",
|
||||
"django-extensions~=3.2.3",
|
||||
"django-extensions~=4.1",
|
||||
"django-filter~=25.1",
|
||||
"django-guardian~=2.4.0",
|
||||
"django-multiselectfield~=0.1.13",
|
||||
@ -37,11 +37,11 @@ dependencies = [
|
||||
"djangorestframework~=3.15",
|
||||
"djangorestframework-guardian~=0.3.0",
|
||||
"drf-spectacular~=0.28",
|
||||
"drf-spectacular-sidecar~=2025.3.1",
|
||||
"drf-spectacular-sidecar~=2025.5.1",
|
||||
"drf-writable-nested~=0.7.1",
|
||||
"filelock~=3.17.0",
|
||||
"filelock~=3.18.0",
|
||||
"flower~=2.0.1",
|
||||
"gotenberg-client~=0.9.0",
|
||||
"gotenberg-client~=0.10.0",
|
||||
"httpx-oauth~=0.16",
|
||||
"imap-tools~=1.10.0",
|
||||
"inotifyrecursive~=0.3",
|
||||
@ -52,12 +52,12 @@ dependencies = [
|
||||
"pathvalidate~=3.2.3",
|
||||
"pdf2image~=1.17.0",
|
||||
"python-dateutil~=2.9.0",
|
||||
"python-dotenv~=1.0.1",
|
||||
"python-dotenv~=1.1.0",
|
||||
"python-gnupg~=0.5.4",
|
||||
"python-ipware~=3.0.0",
|
||||
"python-magic~=0.4.27",
|
||||
"pyzbar~=0.1.9",
|
||||
"rapidfuzz~=3.12.1",
|
||||
"rapidfuzz~=3.13.0",
|
||||
"redis[hiredis]~=5.2.1",
|
||||
"scikit-learn~=1.6.1",
|
||||
"setproctitle~=1.3.4",
|
||||
@ -227,27 +227,9 @@ lint.per-file-ignores."src/documents/tests/test_consumer.py" = [
|
||||
lint.per-file-ignores."src/documents/tests/test_file_handling.py" = [
|
||||
"PTH",
|
||||
] # TODO Enable & remove
|
||||
lint.per-file-ignores."src/documents/tests/test_management.py" = [
|
||||
"PTH",
|
||||
] # TODO Enable & remove
|
||||
lint.per-file-ignores."src/documents/tests/test_management_consumer.py" = [
|
||||
"PTH",
|
||||
] # TODO Enable & remove
|
||||
lint.per-file-ignores."src/documents/tests/test_management_exporter.py" = [
|
||||
"PTH",
|
||||
] # TODO Enable & remove
|
||||
lint.per-file-ignores."src/documents/tests/test_migration_archive_files.py" = [
|
||||
"PTH",
|
||||
] # TODO Enable & remove
|
||||
lint.per-file-ignores."src/documents/tests/test_migration_document_pages_count.py" = [
|
||||
"PTH",
|
||||
] # TODO Enable & remove
|
||||
lint.per-file-ignores."src/documents/tests/test_migration_mime_type.py" = [
|
||||
"PTH",
|
||||
] # TODO Enable & remove
|
||||
lint.per-file-ignores."src/documents/tests/test_sanity_check.py" = [
|
||||
"PTH",
|
||||
] # TODO Enable & remove
|
||||
lint.per-file-ignores."src/documents/views.py" = [
|
||||
"PTH",
|
||||
] # TODO Enable & remove
|
||||
|
13
src-ui/__mocks__/pdfjs-dist.ts
Normal file
13
src-ui/__mocks__/pdfjs-dist.ts
Normal file
@ -0,0 +1,13 @@
|
||||
export const getDocument = jest.fn(() => ({
|
||||
promise: Promise.resolve({ numPages: 3 }),
|
||||
}))
|
||||
|
||||
export const GlobalWorkerOptions = { workerSrc: '' }
|
||||
export const VerbosityLevel = { ERRORS: 0 }
|
||||
|
||||
globalThis.pdfjsLib = {
|
||||
getDocument,
|
||||
GlobalWorkerOptions,
|
||||
VerbosityLevel,
|
||||
AbortException: class AbortException extends Error {},
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -7,22 +7,21 @@
|
||||
"start": "ng serve",
|
||||
"build": "ng build",
|
||||
"test": "ng test --no-watch --coverage",
|
||||
"lint": "ng lint",
|
||||
"postinstall": "patch-package"
|
||||
"lint": "ng lint"
|
||||
},
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@angular/cdk": "^19.2.7",
|
||||
"@angular/common": "~19.2.4",
|
||||
"@angular/compiler": "~19.2.4",
|
||||
"@angular/core": "~19.2.4",
|
||||
"@angular/forms": "~19.2.4",
|
||||
"@angular/localize": "~19.2.4",
|
||||
"@angular/platform-browser": "~19.2.4",
|
||||
"@angular/platform-browser-dynamic": "~19.2.4",
|
||||
"@angular/router": "~19.2.4",
|
||||
"@angular/cdk": "^19.2.14",
|
||||
"@angular/common": "~19.2.9",
|
||||
"@angular/compiler": "~19.2.9",
|
||||
"@angular/core": "~19.2.9",
|
||||
"@angular/forms": "~19.2.9",
|
||||
"@angular/localize": "~19.2.9",
|
||||
"@angular/platform-browser": "~19.2.9",
|
||||
"@angular/platform-browser-dynamic": "~19.2.9",
|
||||
"@angular/router": "~19.2.9",
|
||||
"@ng-bootstrap/ng-bootstrap": "^18.0.0",
|
||||
"@ng-select/ng-select": "^14.2.6",
|
||||
"@ng-select/ng-select": "^14.7.0",
|
||||
"@ngneat/dirty-check-forms": "^3.0.3",
|
||||
"@popperjs/core": "^2.11.8",
|
||||
"bootstrap": "^5.3.3",
|
||||
@ -33,7 +32,6 @@
|
||||
"ngx-color": "^10.0.0",
|
||||
"ngx-cookie-service": "^19.1.2",
|
||||
"ngx-device-detector": "^9.0.0",
|
||||
"ngx-file-drop": "^16.0.0",
|
||||
"ngx-ui-tour-ng-bootstrap": "^16.0.0",
|
||||
"rxjs": "^7.8.2",
|
||||
"tslib": "^2.8.1",
|
||||
@ -42,32 +40,31 @@
|
||||
"zone.js": "^0.15.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@angular-builders/custom-webpack": "^19.0.0",
|
||||
"@angular-builders/jest": "^19.0.0",
|
||||
"@angular-devkit/build-angular": "^19.2.5",
|
||||
"@angular-devkit/core": "^19.2.5",
|
||||
"@angular-devkit/schematics": "^19.2.5",
|
||||
"@angular-builders/custom-webpack": "^19.0.1",
|
||||
"@angular-builders/jest": "^19.0.1",
|
||||
"@angular-devkit/build-angular": "^19.2.10",
|
||||
"@angular-devkit/core": "^19.2.10",
|
||||
"@angular-devkit/schematics": "^19.2.10",
|
||||
"@angular-eslint/builder": "19.3.0",
|
||||
"@angular-eslint/eslint-plugin": "19.3.0",
|
||||
"@angular-eslint/eslint-plugin-template": "19.3.0",
|
||||
"@angular-eslint/schematics": "19.3.0",
|
||||
"@angular-eslint/template-parser": "19.3.0",
|
||||
"@angular/cli": "~19.2.5",
|
||||
"@angular/compiler-cli": "~19.2.4",
|
||||
"@angular/cli": "~19.2.10",
|
||||
"@angular/compiler-cli": "~19.2.9",
|
||||
"@codecov/webpack-plugin": "^1.9.0",
|
||||
"@playwright/test": "^1.51.1",
|
||||
"@types/jest": "^29.5.14",
|
||||
"@types/node": "^22.13.17",
|
||||
"@typescript-eslint/eslint-plugin": "^8.29.0",
|
||||
"@typescript-eslint/parser": "^8.29.0",
|
||||
"@typescript-eslint/utils": "^8.29.0",
|
||||
"eslint": "^9.23.0",
|
||||
"@types/node": "^22.15.3",
|
||||
"@typescript-eslint/eslint-plugin": "^8.31.1",
|
||||
"@typescript-eslint/parser": "^8.31.1",
|
||||
"@typescript-eslint/utils": "^8.31.1",
|
||||
"eslint": "^9.25.1",
|
||||
"jest": "29.7.0",
|
||||
"jest-environment-jsdom": "^29.7.0",
|
||||
"jest-junit": "^16.0.0",
|
||||
"jest-preset-angular": "^14.5.4",
|
||||
"jest-preset-angular": "^14.5.5",
|
||||
"jest-websocket-mock": "^2.5.0",
|
||||
"patch-package": "^8.0.0",
|
||||
"prettier-plugin-organize-imports": "^4.1.0",
|
||||
"ts-node": "~10.9.1",
|
||||
"typescript": "^5.5.4"
|
||||
|
File diff suppressed because one or more lines are too long
2936
src-ui/pnpm-lock.yaml
generated
2936
src-ui/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -121,19 +121,4 @@ HTMLCanvasElement.prototype.getContext = <
|
||||
typeof HTMLCanvasElement.prototype.getContext
|
||||
>jest.fn()
|
||||
|
||||
// pdfjs
|
||||
jest.mock('pdfjs-dist', () => ({
|
||||
getDocument: jest.fn(() => ({
|
||||
promise: Promise.resolve({ numPages: 3 }),
|
||||
})),
|
||||
GlobalWorkerOptions: { workerSrc: '' },
|
||||
VerbosityLevel: { ERRORS: 0 },
|
||||
globalThis: {
|
||||
pdfjsLib: {
|
||||
GlobalWorkerOptions: {
|
||||
workerSrc: '',
|
||||
},
|
||||
},
|
||||
},
|
||||
}))
|
||||
jest.mock('pdfjs-dist/web/pdf_viewer', () => ({}))
|
||||
jest.mock('pdfjs-dist')
|
||||
|
@ -9,7 +9,6 @@ import {
|
||||
import { Router, RouterModule } from '@angular/router'
|
||||
import { NgbModalModule } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { allIcons, NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { NgxFileDropModule } from 'ngx-file-drop'
|
||||
import { TourNgBootstrapModule, TourService } from 'ngx-ui-tour-ng-bootstrap'
|
||||
import { Subject } from 'rxjs'
|
||||
import { routes } from './app-routing.module'
|
||||
@ -43,7 +42,6 @@ describe('AppComponent', () => {
|
||||
imports: [
|
||||
TourNgBootstrapModule,
|
||||
RouterModule.forRoot(routes),
|
||||
NgxFileDropModule,
|
||||
NgbModalModule,
|
||||
AppComponent,
|
||||
ToastsComponent,
|
||||
|
@ -105,9 +105,9 @@ describe('ConfigComponent', () => {
|
||||
|
||||
it('should support JSON validation for e.g. user_args', () => {
|
||||
component.configForm.patchValue({ user_args: '{ foo bar }' })
|
||||
expect(component.errors).toEqual({ user_args: 'Invalid JSON' })
|
||||
expect(component.errors['user_args']).toEqual('Invalid JSON')
|
||||
component.configForm.patchValue({ user_args: '{ "foo": "bar" }' })
|
||||
expect(component.errors).toEqual({ user_args: null })
|
||||
expect(component.errors['user_args']).toBeNull()
|
||||
})
|
||||
|
||||
it('should upload file, show error if necessary', () => {
|
||||
|
@ -405,7 +405,7 @@ describe('GlobalSearchComponent', () => {
|
||||
expect(toastErrorSpy).toHaveBeenCalled()
|
||||
|
||||
// succeed
|
||||
editDialog.succeeded.emit(true)
|
||||
editDialog.succeeded.emit(object as any)
|
||||
expect(toastInfoSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
@ -456,7 +456,7 @@ describe('GlobalSearchComponent', () => {
|
||||
expect(toastErrorSpy).toHaveBeenCalled()
|
||||
|
||||
// succeed
|
||||
editDialog.succeeded.emit(true)
|
||||
editDialog.succeeded.emit(searchResults.tags[0] as any)
|
||||
expect(toastInfoSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
|
@ -47,7 +47,7 @@ export abstract class EditDialogComponent<
|
||||
object: T
|
||||
|
||||
@Output()
|
||||
succeeded = new EventEmitter()
|
||||
succeeded = new EventEmitter<T>()
|
||||
|
||||
@Output()
|
||||
failed = new EventEmitter()
|
||||
|
@ -123,7 +123,15 @@
|
||||
<p class="small" i18n>Set scheduled trigger offset and which date field to use.</p>
|
||||
<div class="row">
|
||||
<div class="col-4">
|
||||
<pngx-input-number i18n-title title="Offset days" formControlName="schedule_offset_days" [showAdd]="false" [error]="error?.schedule_offset_days"></pngx-input-number>
|
||||
<pngx-input-number
|
||||
i18n-title
|
||||
title="Offset days"
|
||||
formControlName="schedule_offset_days"
|
||||
[showAdd]="false"
|
||||
[error]="error?.schedule_offset_days"
|
||||
hint="Positive values will trigger the workflow before the date, negative values after."
|
||||
i18n-hint
|
||||
></pngx-input-number>
|
||||
</div>
|
||||
<div class="col-4">
|
||||
<pngx-input-select i18n-title title="Relative to" formControlName="schedule_date_field" [items]="scheduleDateFieldOptions" [error]="error?.schedule_date_field"></pngx-input-select>
|
||||
|
@ -586,6 +586,8 @@ export class FilterableDropdownComponent
|
||||
this.selectionModel.reset()
|
||||
this.modelIsDirty = false
|
||||
}
|
||||
this.selectionModel.singleSelect =
|
||||
this.editing && !this.selectionModel.manyToOne
|
||||
this.opened.next(this)
|
||||
} else {
|
||||
if (this.creating) {
|
||||
|
@ -33,7 +33,7 @@
|
||||
</ng-template>
|
||||
</ng-select>
|
||||
@if (allowCreate && !hideAddButton) {
|
||||
<button class="btn btn-outline-secondary" type="button" (click)="createTag()" [disabled]="disabled">
|
||||
<button class="btn btn-outline-secondary" type="button" (click)="createTag(null, true)" [disabled]="disabled">
|
||||
<i-bs width="1.2em" height="1.2em" name="plus"></i-bs>
|
||||
</button>
|
||||
}
|
||||
|
@ -130,7 +130,7 @@ export class TagsComponent implements OnInit, ControlValueAccessor {
|
||||
}
|
||||
}
|
||||
|
||||
createTag(name: string = null) {
|
||||
createTag(name: string = null, add: boolean = false) {
|
||||
var modal = this.modalService.open(TagEditDialogComponent, {
|
||||
backdrop: 'static',
|
||||
})
|
||||
@ -143,9 +143,10 @@ export class TagsComponent implements OnInit, ControlValueAccessor {
|
||||
return firstValueFrom(
|
||||
(modal.componentInstance as TagEditDialogComponent).succeeded.pipe(
|
||||
first(),
|
||||
tap(() => {
|
||||
tap((newTag) => {
|
||||
this.tagService.listAll().subscribe((tags) => {
|
||||
this.tags = tags.results
|
||||
add && this.addTag(newTag.id)
|
||||
})
|
||||
})
|
||||
)
|
||||
|
@ -43,7 +43,7 @@
|
||||
<a routerLink="/documents/{{doc.id}}" class="btn-link text-dark text-decoration-none py-2 py-md-3" title="Open document" i18n-title>{{doc.added | customDate}}</a>
|
||||
}
|
||||
@case (DisplayField.CREATED) {
|
||||
<a routerLink="/documents/{{doc.id}}" class="btn-link text-dark text-decoration-none py-2 py-md-3" title="Open document" i18n-title>{{doc.created_date | customDate}}</a>
|
||||
<a routerLink="/documents/{{doc.id}}" class="btn-link text-dark text-decoration-none py-2 py-md-3" title="Open document" i18n-title>{{doc.created | customDate}}</a>
|
||||
}
|
||||
@case (DisplayField.TITLE) {
|
||||
<a routerLink="/documents/{{doc.id}}" title="Open document" i18n-title class="btn-link text-dark text-decoration-none py-2 py-md-3">{{doc.title | documentTitle}}</a>
|
||||
|
@ -82,10 +82,20 @@ describe('UploadFileWidgetComponent', () => {
|
||||
})
|
||||
|
||||
it('should upload files', () => {
|
||||
const uploadSpy = jest.spyOn(uploadDocumentsService, 'uploadFiles')
|
||||
fixture.debugElement
|
||||
.query(By.css('input'))
|
||||
.nativeElement.dispatchEvent(new Event('change'))
|
||||
const uploadSpy = jest.spyOn(uploadDocumentsService, 'uploadFile')
|
||||
const file = new File(
|
||||
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||
'file.pdf'
|
||||
)
|
||||
const fileInput = fixture.debugElement.query(By.css('input'))
|
||||
jest.spyOn(fileInput.nativeElement, 'files', 'get').mockReturnValue({
|
||||
item: () => file,
|
||||
length: 1,
|
||||
[Symbol.iterator]: () => ({
|
||||
next: () => ({ done: false, value: file }),
|
||||
}),
|
||||
} as any)
|
||||
fileInput.nativeElement.dispatchEvent(new Event('change'))
|
||||
expect(uploadSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
|
@ -134,9 +134,11 @@ export class UploadFileWidgetComponent extends ComponentWithPermissions {
|
||||
}
|
||||
|
||||
public onFileSelected(event: Event) {
|
||||
this.uploadDocumentsService.uploadFiles(
|
||||
(event.target as HTMLInputElement).files
|
||||
)
|
||||
const files = (event.target as HTMLInputElement).files
|
||||
for (let i = 0; i < files?.length; i++) {
|
||||
const file = files.item(i)
|
||||
file && this.uploadDocumentsService.uploadFile(file)
|
||||
}
|
||||
}
|
||||
|
||||
get slimSidebarEnabled(): boolean {
|
||||
|
@ -9,9 +9,9 @@
|
||||
}
|
||||
<div class="input-group input-group-sm me-md-5 d-none d-md-flex">
|
||||
<button class="btn btn-outline-secondary" (click)="decreaseZoom()" i18n>-</button>
|
||||
<select class="form-select" (change)="setZoom($event.target.value)">
|
||||
<select class="form-select" (change)="setZoom($event.target.value)" [ngModel]="currentZoom">
|
||||
@for (setting of zoomSettings; track setting) {
|
||||
<option [value]="setting" [attr.selected]="isZoomSelected(setting) ? 'selected' : null">
|
||||
<option [value]="setting">
|
||||
{{ getZoomSettingTitle(setting) }}
|
||||
</option>
|
||||
}
|
||||
@ -129,8 +129,8 @@
|
||||
<div>
|
||||
<pngx-input-text #inputTitle i18n-title title="Title" formControlName="title" [horizontal]="true" (keyup)="titleKeyUp($event)" [error]="error?.title"></pngx-input-text>
|
||||
<pngx-input-number i18n-title title="Archive serial number" [error]="error?.archive_serial_number" [horizontal]="true" formControlName='archive_serial_number'></pngx-input-number>
|
||||
<pngx-input-date i18n-title title="Date created" formControlName="created_date" [suggestions]="suggestions?.dates" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event)"
|
||||
[error]="error?.created_date"></pngx-input-date>
|
||||
<pngx-input-date i18n-title title="Date created" formControlName="created" [suggestions]="suggestions?.dates" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event)"
|
||||
[error]="error?.created"></pngx-input-date>
|
||||
<pngx-input-select [items]="correspondents" i18n-title title="Correspondent" formControlName="correspondent" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.Correspondent)"
|
||||
(createNew)="createCorrespondent($event)" [hideAddButton]="createDisabled(DataType.Correspondent)" [suggestions]="suggestions?.correspondents" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Correspondent }"></pngx-input-select>
|
||||
<pngx-input-select [items]="documentTypes" i18n-title title="Document type" formControlName="document_type" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.DocumentType)"
|
||||
|
@ -456,11 +456,11 @@ describe('DocumentDetailComponent', () => {
|
||||
initNormally()
|
||||
component.title = 'Foo Bar'
|
||||
const closeSpy = jest.spyOn(component, 'close')
|
||||
const updateSpy = jest.spyOn(documentService, 'update')
|
||||
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||
const toastSpy = jest.spyOn(toastService, 'showInfo')
|
||||
updateSpy.mockImplementation((o) => of(doc))
|
||||
patchSpy.mockImplementation((o) => of(doc))
|
||||
component.save(true)
|
||||
expect(updateSpy).toHaveBeenCalled()
|
||||
expect(patchSpy).toHaveBeenCalled()
|
||||
expect(closeSpy).toHaveBeenCalled()
|
||||
expect(toastSpy).toHaveBeenCalledWith(
|
||||
'Document "Doc 3" saved successfully.'
|
||||
@ -471,11 +471,11 @@ describe('DocumentDetailComponent', () => {
|
||||
initNormally()
|
||||
component.title = 'Foo Bar'
|
||||
const closeSpy = jest.spyOn(component, 'close')
|
||||
const updateSpy = jest.spyOn(documentService, 'update')
|
||||
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||
const toastSpy = jest.spyOn(toastService, 'showInfo')
|
||||
updateSpy.mockImplementation((o) => of(doc))
|
||||
patchSpy.mockImplementation((o) => of(doc))
|
||||
component.save()
|
||||
expect(updateSpy).toHaveBeenCalled()
|
||||
expect(patchSpy).toHaveBeenCalled()
|
||||
expect(closeSpy).not.toHaveBeenCalled()
|
||||
expect(toastSpy).toHaveBeenCalledWith(
|
||||
'Document "Doc 3" saved successfully.'
|
||||
@ -487,12 +487,12 @@ describe('DocumentDetailComponent', () => {
|
||||
initNormally()
|
||||
component.title = 'Foo Bar'
|
||||
const closeSpy = jest.spyOn(component, 'close')
|
||||
const updateSpy = jest.spyOn(documentService, 'update')
|
||||
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||
const toastSpy = jest.spyOn(toastService, 'showError')
|
||||
const error = new Error('failed to save')
|
||||
updateSpy.mockImplementation(() => throwError(() => error))
|
||||
patchSpy.mockImplementation(() => throwError(() => error))
|
||||
component.save()
|
||||
expect(updateSpy).toHaveBeenCalled()
|
||||
expect(patchSpy).toHaveBeenCalled()
|
||||
expect(closeSpy).not.toHaveBeenCalled()
|
||||
expect(toastSpy).toHaveBeenCalledWith(
|
||||
'Error saving document "Doc 3"',
|
||||
@ -505,13 +505,13 @@ describe('DocumentDetailComponent', () => {
|
||||
initNormally()
|
||||
component.title = 'Foo Bar'
|
||||
const closeSpy = jest.spyOn(component, 'close')
|
||||
const updateSpy = jest.spyOn(documentService, 'update')
|
||||
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||
const toastSpy = jest.spyOn(toastService, 'showInfo')
|
||||
updateSpy.mockImplementation(() =>
|
||||
patchSpy.mockImplementation(() =>
|
||||
throwError(() => new Error('failed to save'))
|
||||
)
|
||||
component.save(true)
|
||||
expect(updateSpy).toHaveBeenCalled()
|
||||
expect(patchSpy).toHaveBeenCalled()
|
||||
expect(closeSpy).toHaveBeenCalled()
|
||||
expect(toastSpy).toHaveBeenCalledWith(
|
||||
'Document "Doc 3" saved successfully.'
|
||||
@ -522,8 +522,8 @@ describe('DocumentDetailComponent', () => {
|
||||
initNormally()
|
||||
const nextDocId = 100
|
||||
component.title = 'Foo Bar'
|
||||
const updateSpy = jest.spyOn(documentService, 'update')
|
||||
updateSpy.mockReturnValue(of(doc))
|
||||
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||
patchSpy.mockReturnValue(of(doc))
|
||||
const nextSpy = jest.spyOn(documentListViewService, 'getNext')
|
||||
nextSpy.mockReturnValue(of(nextDocId))
|
||||
const closeSpy = jest.spyOn(openDocumentsService, 'closeDocument')
|
||||
@ -531,7 +531,7 @@ describe('DocumentDetailComponent', () => {
|
||||
const navigateSpy = jest.spyOn(router, 'navigate')
|
||||
|
||||
component.saveEditNext()
|
||||
expect(updateSpy).toHaveBeenCalled()
|
||||
expect(patchSpy).toHaveBeenCalled()
|
||||
expect(navigateSpy).toHaveBeenCalledWith(['documents', nextDocId])
|
||||
expect
|
||||
})
|
||||
@ -541,12 +541,12 @@ describe('DocumentDetailComponent', () => {
|
||||
initNormally()
|
||||
component.title = 'Foo Bar'
|
||||
const closeSpy = jest.spyOn(component, 'close')
|
||||
const updateSpy = jest.spyOn(documentService, 'update')
|
||||
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||
const toastSpy = jest.spyOn(toastService, 'showError')
|
||||
const error = new Error('failed to save')
|
||||
updateSpy.mockImplementation(() => throwError(() => error))
|
||||
patchSpy.mockImplementation(() => throwError(() => error))
|
||||
component.saveEditNext()
|
||||
expect(updateSpy).toHaveBeenCalled()
|
||||
expect(patchSpy).toHaveBeenCalled()
|
||||
expect(closeSpy).not.toHaveBeenCalled()
|
||||
expect(toastSpy).toHaveBeenCalledWith('Error saving document', error)
|
||||
})
|
||||
@ -791,14 +791,9 @@ describe('DocumentDetailComponent', () => {
|
||||
it('should select correct zoom setting in dropdown', () => {
|
||||
initNormally()
|
||||
component.setZoom(ZoomSetting.PageFit)
|
||||
expect(component.isZoomSelected(ZoomSetting.PageFit)).toBeTruthy()
|
||||
expect(component.isZoomSelected(ZoomSetting.One)).toBeFalsy()
|
||||
component.setZoom(ZoomSetting.PageWidth)
|
||||
expect(component.isZoomSelected(ZoomSetting.One)).toBeTruthy()
|
||||
expect(component.isZoomSelected(ZoomSetting.PageFit)).toBeFalsy()
|
||||
expect(component.currentZoom).toEqual(ZoomSetting.PageFit)
|
||||
component.setZoom(ZoomSetting.Quarter)
|
||||
expect(component.isZoomSelected(ZoomSetting.Quarter)).toBeTruthy()
|
||||
expect(component.isZoomSelected(ZoomSetting.PageFit)).toBeFalsy()
|
||||
expect(component.currentZoom).toEqual(ZoomSetting.Quarter)
|
||||
})
|
||||
|
||||
it('should support updating notes dynamically', () => {
|
||||
@ -970,10 +965,10 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(fixture.debugElement.nativeElement.textContent).toContain(
|
||||
customFields[1].name
|
||||
)
|
||||
const updateSpy = jest.spyOn(documentService, 'update')
|
||||
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||
component.save(true)
|
||||
expect(updateSpy.mock.lastCall[0].custom_fields).toHaveLength(2)
|
||||
expect(updateSpy.mock.lastCall[0].custom_fields[1]).toEqual({
|
||||
expect(patchSpy.mock.lastCall[0].custom_fields).toHaveLength(2)
|
||||
expect(patchSpy.mock.lastCall[0].custom_fields[1]).toEqual({
|
||||
field: customFields[1].id,
|
||||
value: null,
|
||||
})
|
||||
@ -990,13 +985,51 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(
|
||||
fixture.debugElement.query(By.css('form')).nativeElement.textContent
|
||||
).not.toContain('Field 1')
|
||||
const updateSpy = jest.spyOn(documentService, 'update')
|
||||
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||
component.save(true)
|
||||
expect(updateSpy.mock.lastCall[0].custom_fields).toHaveLength(
|
||||
expect(patchSpy.mock.lastCall[0].custom_fields).toHaveLength(
|
||||
initialLength - 1
|
||||
)
|
||||
})
|
||||
|
||||
it('should correctly determine changed fields', () => {
|
||||
initNormally()
|
||||
expect(component['getChangedFields']()).toEqual({
|
||||
id: doc.id,
|
||||
})
|
||||
component.documentForm.get('title').setValue('Foo Bar')
|
||||
component.documentForm.get('permissions_form').setValue({
|
||||
owner: 1,
|
||||
set_permissions: {
|
||||
view: {
|
||||
users: [2],
|
||||
groups: [],
|
||||
},
|
||||
change: {
|
||||
users: [3],
|
||||
groups: [],
|
||||
},
|
||||
},
|
||||
})
|
||||
component.documentForm.get('title').markAsDirty()
|
||||
component.documentForm.get('permissions_form').markAsDirty()
|
||||
expect(component['getChangedFields']()).toEqual({
|
||||
id: doc.id,
|
||||
title: 'Foo Bar',
|
||||
owner: 1,
|
||||
set_permissions: {
|
||||
view: {
|
||||
users: [2],
|
||||
groups: [],
|
||||
},
|
||||
change: {
|
||||
users: [3],
|
||||
groups: [],
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should show custom field errors', () => {
|
||||
initNormally()
|
||||
component.error = {
|
||||
|
@ -208,7 +208,7 @@ export class DocumentDetailComponent
|
||||
documentForm: FormGroup = new FormGroup({
|
||||
title: new FormControl(''),
|
||||
content: new FormControl(''),
|
||||
created_date: new FormControl(),
|
||||
created: new FormControl(),
|
||||
correspondent: new FormControl(),
|
||||
document_type: new FormControl(),
|
||||
storage_path: new FormControl(),
|
||||
@ -490,7 +490,7 @@ export class DocumentDetailComponent
|
||||
this.store = new BehaviorSubject({
|
||||
title: doc.title,
|
||||
content: doc.content,
|
||||
created_date: doc.created_date,
|
||||
created: doc.created,
|
||||
correspondent: doc.correspondent,
|
||||
document_type: doc.document_type,
|
||||
storage_path: doc.storage_path,
|
||||
@ -784,6 +784,7 @@ export class DocumentDetailComponent
|
||||
this.title = doc.title
|
||||
this.updateFormForCustomFields()
|
||||
this.documentForm.patchValue(doc)
|
||||
this.documentForm.markAsPristine()
|
||||
this.openDocumentService.setDirty(doc, false)
|
||||
},
|
||||
error: () => {
|
||||
@ -794,11 +795,30 @@ export class DocumentDetailComponent
|
||||
})
|
||||
}
|
||||
|
||||
private getChangedFields(): any {
|
||||
const changes = {
|
||||
id: this.document.id,
|
||||
}
|
||||
Object.keys(this.documentForm.controls).forEach((key) => {
|
||||
if (this.documentForm.get(key).dirty) {
|
||||
if (key === 'permissions_form') {
|
||||
changes['owner'] =
|
||||
this.documentForm.get('permissions_form').value['owner']
|
||||
changes['set_permissions'] =
|
||||
this.documentForm.get('permissions_form').value['set_permissions']
|
||||
} else {
|
||||
changes[key] = this.documentForm.get(key).value
|
||||
}
|
||||
}
|
||||
})
|
||||
return changes
|
||||
}
|
||||
|
||||
save(close: boolean = false) {
|
||||
this.networkActive = true
|
||||
;(document.activeElement as HTMLElement)?.dispatchEvent(new Event('change'))
|
||||
this.documentsService
|
||||
.update(this.document)
|
||||
.patch(this.getChangedFields())
|
||||
.pipe(first())
|
||||
.subscribe({
|
||||
next: (docValues) => {
|
||||
@ -852,7 +872,7 @@ export class DocumentDetailComponent
|
||||
this.networkActive = true
|
||||
this.store.next(this.documentForm.value)
|
||||
this.documentsService
|
||||
.update(this.document)
|
||||
.patch(this.getChangedFields())
|
||||
.pipe(
|
||||
switchMap((updateResult) => {
|
||||
return this.documentListViewService
|
||||
@ -1099,12 +1119,10 @@ export class DocumentDetailComponent
|
||||
)
|
||||
}
|
||||
|
||||
isZoomSelected(setting: ZoomSetting): boolean {
|
||||
get currentZoom() {
|
||||
if (this.previewZoomScale === ZoomSetting.PageFit) {
|
||||
return setting === ZoomSetting.PageFit
|
||||
}
|
||||
|
||||
return this.previewZoomSetting === setting
|
||||
return ZoomSetting.PageFit
|
||||
} else return this.previewZoomSetting
|
||||
}
|
||||
|
||||
getZoomSettingTitle(setting: ZoomSetting): string {
|
||||
@ -1305,6 +1323,8 @@ export class DocumentDetailComponent
|
||||
created: new Date(),
|
||||
})
|
||||
this.updateFormForCustomFields(true)
|
||||
this.documentForm.get('custom_fields').markAsDirty()
|
||||
this.documentForm.updateValueAndValidity()
|
||||
}
|
||||
|
||||
public removeField(fieldInstance: CustomFieldInstance) {
|
||||
@ -1313,6 +1333,7 @@ export class DocumentDetailComponent
|
||||
1
|
||||
)
|
||||
this.updateFormForCustomFields(true)
|
||||
this.documentForm.get('custom_fields').markAsDirty()
|
||||
this.documentForm.updateValueAndValidity()
|
||||
}
|
||||
|
||||
|
@ -112,14 +112,14 @@
|
||||
@if (displayFields.includes(DisplayField.CREATED) || displayFields.includes(DisplayField.ADDED)) {
|
||||
<ng-template #dateTooltip>
|
||||
<div class="d-flex flex-column text-light">
|
||||
<span i18n>Created: {{ document.created_date | customDate }}</span>
|
||||
<span i18n>Created: {{ document.created | customDate }}</span>
|
||||
<span i18n>Added: {{ document.added | customDate }}</span>
|
||||
<span i18n>Modified: {{ document.modified | customDate }}</span>
|
||||
</div>
|
||||
</ng-template>
|
||||
@if (displayFields.includes(DisplayField.CREATED)) {
|
||||
<div class="list-group-item bg-light text-dark p-1 border-0 d-flex align-items-center" [ngbTooltip]="dateTooltip">
|
||||
<i-bs width=".9em" height=".9em" class="me-2 text-muted" name="calendar-event"></i-bs><small>{{document.created_date | customDate:'mediumDate'}}</small>
|
||||
<i-bs width=".9em" height=".9em" class="me-2 text-muted" name="calendar-event"></i-bs><small>{{document.created | customDate:'mediumDate'}}</small>
|
||||
</div>
|
||||
}
|
||||
@if (displayFields.includes(DisplayField.ADDED)) {
|
||||
|
@ -73,14 +73,14 @@
|
||||
<div class="list-group-item bg-transparent p-0 border-0 d-flex flex-wrap-reverse justify-content-between">
|
||||
<ng-template #dateTooltip>
|
||||
<div class="d-flex flex-column text-light">
|
||||
<span i18n>Created: {{ document.created_date | customDate }}</span>
|
||||
<span i18n>Created: {{ document.created | customDate }}</span>
|
||||
<span i18n>Added: {{ document.added | customDate }}</span>
|
||||
<span i18n>Modified: {{ document.modified | customDate }}</span>
|
||||
</div>
|
||||
</ng-template>
|
||||
<div class="ps-0 p-1" placement="top" [ngbTooltip]="dateTooltip">
|
||||
<i-bs width="1em" height="1em" class="me-2 text-muted" name="calendar-event"></i-bs>
|
||||
<small>{{document.created_date | customDate:'mediumDate'}}</small>
|
||||
<small>{{document.created | customDate:'mediumDate'}}</small>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
|
@ -348,7 +348,7 @@
|
||||
}
|
||||
@if (activeDisplayFields.includes(DisplayField.CREATED)) {
|
||||
<td>
|
||||
{{d.created_date | customDate}}
|
||||
{{d.created | customDate}}
|
||||
</td>
|
||||
}
|
||||
@if (activeDisplayFields.includes(DisplayField.ADDED)) {
|
||||
|
@ -2,13 +2,6 @@
|
||||
<ng-content select="[content]"></ng-content>
|
||||
</div>
|
||||
|
||||
<div class="global-dropzone-overlay position-fixed top-0 start-0 bottom-0 end-0 text-center pe-none fade" [class.show]="fileIsOver" [class.hide]="hidden">
|
||||
<div class="global-dropzone-overlay position-fixed top-0 start-0 bottom-0 end-0 text-center pe-none" [class.active]="fileIsOver && !hidden">
|
||||
<h2 class="pe-none position-absolute top-50 start-50 translate-middle" i18n>Drop files to begin upload</h2>
|
||||
</div>
|
||||
|
||||
<ngx-file-drop
|
||||
dropZoneClassName="visually-hidden"
|
||||
contentClassName="visually-hidden"
|
||||
(onFileDrop)="dropped($event)"
|
||||
#ngxFileDrop>
|
||||
</ngx-file-drop>
|
||||
|
@ -1,8 +1,14 @@
|
||||
.global-dropzone-overlay {
|
||||
opacity: 0;
|
||||
transition: opacity 0.25s ease-in-out;
|
||||
background-color: hsla(var(--pngx-primary), var(--pngx-primary-lightness), .8);
|
||||
z-index: 1200;
|
||||
|
||||
h2 {
|
||||
color: var(--pngx-primary-text-contrast)
|
||||
}
|
||||
|
||||
&.active {
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
|
@ -9,7 +9,6 @@ import {
|
||||
tick,
|
||||
} from '@angular/core/testing'
|
||||
import { By } from '@angular/platform-browser'
|
||||
import { NgxFileDropEntry, NgxFileDropModule } from 'ngx-file-drop'
|
||||
import { PermissionsService } from 'src/app/services/permissions.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
@ -27,7 +26,7 @@ describe('FileDropComponent', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
imports: [NgxFileDropModule, FileDropComponent, ToastsComponent],
|
||||
imports: [FileDropComponent, ToastsComponent],
|
||||
providers: [
|
||||
provideHttpClient(withInterceptorsFromDi()),
|
||||
provideHttpClientTesting(),
|
||||
@ -66,12 +65,12 @@ describe('FileDropComponent', () => {
|
||||
const dropzone = fixture.debugElement.query(
|
||||
By.css('.global-dropzone-overlay')
|
||||
)
|
||||
expect(dropzone.classes['hide']).toBeTruthy()
|
||||
expect(dropzone.classes['active']).toBeFalsy()
|
||||
component.onDragLeave(new Event('dragleave') as DragEvent)
|
||||
tick(700)
|
||||
fixture.detectChanges()
|
||||
// drop
|
||||
const uploadSpy = jest.spyOn(uploadDocumentsService, 'uploadFiles')
|
||||
const uploadSpy = jest.spyOn(uploadDocumentsService, 'uploadFile')
|
||||
const dragEvent = new Event('drop')
|
||||
dragEvent['dataTransfer'] = {
|
||||
files: {
|
||||
@ -93,53 +92,209 @@ describe('FileDropComponent', () => {
|
||||
tick(1)
|
||||
fixture.detectChanges()
|
||||
expect(component.fileIsOver).toBeTruthy()
|
||||
const dropzone = fixture.debugElement.query(
|
||||
By.css('.global-dropzone-overlay')
|
||||
)
|
||||
component.onDragLeave(new Event('dragleave') as DragEvent)
|
||||
tick(700)
|
||||
fixture.detectChanges()
|
||||
expect(dropzone.classes['hide']).toBeTruthy()
|
||||
// drop
|
||||
const toastSpy = jest.spyOn(toastService, 'show')
|
||||
const uploadSpy = jest.spyOn(
|
||||
UploadDocumentsService.prototype as any,
|
||||
'uploadFile'
|
||||
const uploadSpy = jest.spyOn(uploadDocumentsService, 'uploadFile')
|
||||
const file = new File(
|
||||
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||
'file.pdf'
|
||||
)
|
||||
const dragEvent = new Event('drop')
|
||||
dragEvent['dataTransfer'] = {
|
||||
files: {
|
||||
item: () => {
|
||||
return new File(
|
||||
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||
'file.pdf'
|
||||
)
|
||||
items: [
|
||||
{
|
||||
kind: 'file',
|
||||
type: 'application/pdf',
|
||||
getAsFile: () => file,
|
||||
},
|
||||
length: 1,
|
||||
} as unknown as FileList,
|
||||
],
|
||||
}
|
||||
component.onDrop(dragEvent as DragEvent)
|
||||
component.dropped([
|
||||
{
|
||||
fileEntry: {
|
||||
isFile: true,
|
||||
file: (callback) => {
|
||||
callback(
|
||||
new File(
|
||||
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||
'file.pdf'
|
||||
)
|
||||
)
|
||||
},
|
||||
},
|
||||
} as unknown as NgxFileDropEntry,
|
||||
])
|
||||
tick(3000)
|
||||
expect(toastSpy).toHaveBeenCalled()
|
||||
expect(uploadSpy).toHaveBeenCalled()
|
||||
discardPeriodicTasks()
|
||||
}))
|
||||
|
||||
it('should support drag drop, initiate upload with webkitGetAsEntry', fakeAsync(() => {
|
||||
jest.spyOn(permissionsService, 'currentUserCan').mockReturnValue(true)
|
||||
expect(component.fileIsOver).toBeFalsy()
|
||||
const overEvent = new Event('dragover') as DragEvent
|
||||
;(overEvent as any).dataTransfer = { types: ['Files'] }
|
||||
component.onDragOver(overEvent)
|
||||
tick(1)
|
||||
fixture.detectChanges()
|
||||
expect(component.fileIsOver).toBeTruthy()
|
||||
component.onDragLeave(new Event('dragleave') as DragEvent)
|
||||
tick(700)
|
||||
fixture.detectChanges()
|
||||
// drop
|
||||
const toastSpy = jest.spyOn(toastService, 'show')
|
||||
const uploadSpy = jest.spyOn(uploadDocumentsService, 'uploadFile')
|
||||
const file = new File(
|
||||
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||
'file.pdf'
|
||||
)
|
||||
const dragEvent = new Event('drop')
|
||||
dragEvent['dataTransfer'] = {
|
||||
items: [
|
||||
{
|
||||
kind: 'file',
|
||||
type: 'application/pdf',
|
||||
webkitGetAsEntry: () => ({
|
||||
isFile: true,
|
||||
isDirectory: false,
|
||||
file: (cb: (file: File) => void) => cb(file),
|
||||
}),
|
||||
},
|
||||
],
|
||||
files: [],
|
||||
}
|
||||
component.onDrop(dragEvent as DragEvent)
|
||||
tick(3000)
|
||||
expect(toastSpy).toHaveBeenCalled()
|
||||
expect(uploadSpy).toHaveBeenCalled()
|
||||
discardPeriodicTasks()
|
||||
}))
|
||||
|
||||
it('should show an error on traverseFileTree error', fakeAsync(() => {
|
||||
jest.spyOn(permissionsService, 'currentUserCan').mockReturnValue(true)
|
||||
const toastSpy = jest.spyOn(toastService, 'showError')
|
||||
const traverseSpy = jest
|
||||
.spyOn(component as any, 'traverseFileTree')
|
||||
.mockReturnValue(Promise.reject(new Error('Error traversing file tree')))
|
||||
fixture.detectChanges()
|
||||
|
||||
// Simulate a drop with a directory entry
|
||||
const mockEntry = {
|
||||
isDirectory: true,
|
||||
isFile: false,
|
||||
createReader: () => ({ readEntries: jest.fn() }),
|
||||
} as unknown as FileSystemDirectoryEntry
|
||||
|
||||
const event = {
|
||||
preventDefault: () => {},
|
||||
stopImmediatePropagation: () => {},
|
||||
dataTransfer: {
|
||||
items: [
|
||||
{
|
||||
kind: 'file',
|
||||
webkitGetAsEntry: () => mockEntry,
|
||||
},
|
||||
],
|
||||
},
|
||||
} as unknown as DragEvent
|
||||
|
||||
component.onDrop(event)
|
||||
|
||||
tick() // flush microtasks (e.g., Promise.reject)
|
||||
|
||||
expect(traverseSpy).toHaveBeenCalled()
|
||||
expect(toastSpy).toHaveBeenCalledWith(
|
||||
$localize`Failed to read dropped items: Error traversing file tree`
|
||||
)
|
||||
|
||||
discardPeriodicTasks()
|
||||
}))
|
||||
|
||||
it('should support drag drop, initiate upload without DataTransfer API support', fakeAsync(() => {
|
||||
jest.spyOn(permissionsService, 'currentUserCan').mockReturnValue(true)
|
||||
expect(component.fileIsOver).toBeFalsy()
|
||||
const overEvent = new Event('dragover') as DragEvent
|
||||
;(overEvent as any).dataTransfer = { types: ['Files'] }
|
||||
component.onDragOver(overEvent)
|
||||
tick(1)
|
||||
fixture.detectChanges()
|
||||
expect(component.fileIsOver).toBeTruthy()
|
||||
component.onDragLeave(new Event('dragleave') as DragEvent)
|
||||
tick(700)
|
||||
fixture.detectChanges()
|
||||
// drop
|
||||
const toastSpy = jest.spyOn(toastService, 'show')
|
||||
const uploadSpy = jest.spyOn(uploadDocumentsService, 'uploadFile')
|
||||
const file = new File(
|
||||
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||
'file.pdf'
|
||||
)
|
||||
const dragEvent = new Event('drop')
|
||||
dragEvent['dataTransfer'] = {
|
||||
items: [],
|
||||
files: [file],
|
||||
}
|
||||
component.onDrop(dragEvent as DragEvent)
|
||||
tick(3000)
|
||||
expect(toastSpy).toHaveBeenCalled()
|
||||
expect(uploadSpy).toHaveBeenCalled()
|
||||
discardPeriodicTasks()
|
||||
}))
|
||||
|
||||
it('should resolve a single file when entry isFile', () => {
|
||||
const mockFile = new File(['data'], 'test.txt', { type: 'text/plain' })
|
||||
const mockEntry = {
|
||||
isFile: true,
|
||||
isDirectory: false,
|
||||
file: (cb: (f: File) => void) => cb(mockFile),
|
||||
} as unknown as FileSystemFileEntry
|
||||
|
||||
return (component as any)
|
||||
.traverseFileTree(mockEntry)
|
||||
.then((result: File[]) => {
|
||||
expect(result).toEqual([mockFile])
|
||||
})
|
||||
})
|
||||
|
||||
it('should resolve all files in a flat directory', async () => {
|
||||
const file1 = new File(['data'], 'file1.txt')
|
||||
const file2 = new File(['data'], 'file2.txt')
|
||||
|
||||
const mockFileEntry1 = {
|
||||
isFile: true,
|
||||
isDirectory: false,
|
||||
file: (cb: (f: File) => void) => cb(file1),
|
||||
} as unknown as FileSystemFileEntry
|
||||
|
||||
const mockFileEntry2 = {
|
||||
isFile: true,
|
||||
isDirectory: false,
|
||||
file: (cb: (f: File) => void) => cb(file2),
|
||||
} as unknown as FileSystemFileEntry
|
||||
|
||||
let callCount = 0
|
||||
|
||||
const mockDirEntry = {
|
||||
isFile: false,
|
||||
isDirectory: true,
|
||||
createReader: () => ({
|
||||
readEntries: (cb: (batch: FileSystemEntry[]) => void) => {
|
||||
if (callCount++ === 0) {
|
||||
cb([mockFileEntry1, mockFileEntry2])
|
||||
} else {
|
||||
cb([]) // second call: signal EOF
|
||||
}
|
||||
},
|
||||
}),
|
||||
} as unknown as FileSystemDirectoryEntry
|
||||
|
||||
const result = await (component as any).traverseFileTree(mockDirEntry)
|
||||
expect(result).toEqual([file1, file2])
|
||||
})
|
||||
|
||||
it('should resolve a non-file non-directory entry as an empty array', () => {
|
||||
const mockEntry = {
|
||||
isFile: false,
|
||||
isDirectory: false,
|
||||
file: (cb: (f: File) => void) => cb(new File([], '')),
|
||||
} as unknown as FileSystemEntry
|
||||
return (component as any)
|
||||
.traverseFileTree(mockEntry)
|
||||
.then((result: File[]) => {
|
||||
expect(result).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
it('should ignore events if disabled', fakeAsync(() => {
|
||||
settingsService.globalDropzoneEnabled = false
|
||||
expect(settingsService.globalDropzoneActive).toBeFalsy()
|
||||
|
@ -1,9 +1,4 @@
|
||||
import { Component, HostListener, ViewChild } from '@angular/core'
|
||||
import {
|
||||
NgxFileDropComponent,
|
||||
NgxFileDropEntry,
|
||||
NgxFileDropModule,
|
||||
} from 'ngx-file-drop'
|
||||
import { Component, HostListener } from '@angular/core'
|
||||
import {
|
||||
PermissionAction,
|
||||
PermissionsService,
|
||||
@ -17,7 +12,7 @@ import { UploadDocumentsService } from 'src/app/services/upload-documents.servic
|
||||
selector: 'pngx-file-drop',
|
||||
templateUrl: './file-drop.component.html',
|
||||
styleUrls: ['./file-drop.component.scss'],
|
||||
imports: [NgxFileDropModule],
|
||||
imports: [],
|
||||
})
|
||||
export class FileDropComponent {
|
||||
private fileLeaveTimeoutID: any
|
||||
@ -41,8 +36,6 @@ export class FileDropComponent {
|
||||
)
|
||||
}
|
||||
|
||||
@ViewChild('ngxFileDrop') ngxFileDrop: NgxFileDropComponent
|
||||
|
||||
@HostListener('document:dragover', ['$event']) onDragOver(event: DragEvent) {
|
||||
if (!this.dragDropEnabled || !event.dataTransfer?.types?.includes('Files'))
|
||||
return
|
||||
@ -78,19 +71,85 @@ export class FileDropComponent {
|
||||
}, ms)
|
||||
}
|
||||
|
||||
private traverseFileTree(entry: FileSystemEntry): Promise<File[]> {
|
||||
if (entry.isFile) {
|
||||
return new Promise((resolve, reject) => {
|
||||
;(entry as FileSystemFileEntry).file(resolve, reject)
|
||||
}).then((file: File) => [file])
|
||||
}
|
||||
|
||||
if (entry.isDirectory) {
|
||||
return new Promise<File[]>((resolve, reject) => {
|
||||
const dirReader = (entry as FileSystemDirectoryEntry).createReader()
|
||||
const allEntries: FileSystemEntry[] = []
|
||||
|
||||
const readEntries = () => {
|
||||
dirReader.readEntries((batch) => {
|
||||
if (batch.length === 0) {
|
||||
const promises = allEntries.map((child) =>
|
||||
this.traverseFileTree(child)
|
||||
)
|
||||
Promise.all(promises)
|
||||
.then((results) => resolve([].concat(...results)))
|
||||
.catch(reject)
|
||||
} else {
|
||||
allEntries.push(...batch)
|
||||
readEntries() // keep reading
|
||||
}
|
||||
}, reject)
|
||||
}
|
||||
|
||||
readEntries()
|
||||
})
|
||||
}
|
||||
|
||||
return Promise.resolve([])
|
||||
}
|
||||
|
||||
@HostListener('document:drop', ['$event']) public onDrop(event: DragEvent) {
|
||||
if (!this.dragDropEnabled) return
|
||||
event.preventDefault()
|
||||
event.stopImmediatePropagation()
|
||||
// pass event onto ngx-file-drop to handle files
|
||||
this.ngxFileDrop.dropFiles(event)
|
||||
this.onDragLeave(event, true)
|
||||
}
|
||||
|
||||
public dropped(files: NgxFileDropEntry[]) {
|
||||
this.uploadDocumentsService.onNgxFileDrop(files)
|
||||
if (files.length > 0)
|
||||
const files: File[] = []
|
||||
const entries: FileSystemEntry[] = []
|
||||
if (event.dataTransfer?.items && event.dataTransfer.items.length) {
|
||||
for (const item of Array.from(event.dataTransfer.items)) {
|
||||
if (item.webkitGetAsEntry) {
|
||||
// webkitGetAsEntry not standard, but is widely supported
|
||||
const entry = item.webkitGetAsEntry()
|
||||
if (entry) entries.push(entry)
|
||||
} else if (item.kind === 'file') {
|
||||
const file = item.getAsFile()
|
||||
if (file) files.push(file)
|
||||
}
|
||||
}
|
||||
} else if (event.dataTransfer?.files) {
|
||||
// Fallback for browsers without DataTransferItem API
|
||||
for (const file of Array.from(event.dataTransfer.files)) {
|
||||
files.push(file)
|
||||
}
|
||||
}
|
||||
|
||||
if (entries.length) {
|
||||
const promises = entries.map((entry) => this.traverseFileTree(entry))
|
||||
Promise.all(promises)
|
||||
.then((results) => {
|
||||
files.push(...[].concat(...results))
|
||||
this.toastService.showInfo($localize`Initiating upload...`, 3000)
|
||||
files.forEach((file) => this.uploadDocumentsService.uploadFile(file))
|
||||
})
|
||||
.catch((e) => {
|
||||
this.toastService.showError(
|
||||
$localize`Failed to read dropped items: ${e.message}`
|
||||
)
|
||||
})
|
||||
} else if (files.length) {
|
||||
this.toastService.showInfo($localize`Initiating upload...`, 3000)
|
||||
files.forEach((file) => this.uploadDocumentsService.uploadFile(file))
|
||||
}
|
||||
|
||||
this.onDragLeave(event, true)
|
||||
}
|
||||
|
||||
@HostListener('window:blur', ['$event']) public onWindowBlur() {
|
||||
|
@ -188,7 +188,7 @@ describe('MailComponent', () => {
|
||||
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
|
||||
editDialog.failed.emit()
|
||||
expect(toastErrorSpy).toBeCalled()
|
||||
editDialog.succeeded.emit(mailAccounts[0])
|
||||
editDialog.succeeded.emit(mailAccounts[0] as any)
|
||||
expect(toastInfoSpy).toHaveBeenCalledWith(
|
||||
`Saved account "${mailAccounts[0].name}".`
|
||||
)
|
||||
@ -246,7 +246,7 @@ describe('MailComponent', () => {
|
||||
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
|
||||
editDialog.failed.emit()
|
||||
expect(toastErrorSpy).toBeCalled()
|
||||
editDialog.succeeded.emit(mailRules[0])
|
||||
editDialog.succeeded.emit(mailRules[0] as any)
|
||||
expect(toastInfoSpy).toHaveBeenCalledWith(
|
||||
`Saved rule "${mailRules[0].name}".`
|
||||
)
|
||||
|
@ -130,9 +130,6 @@ export interface Document extends ObjectWithPermissions {
|
||||
// UTC
|
||||
created?: Date
|
||||
|
||||
// localized date
|
||||
created_date?: Date
|
||||
|
||||
modified?: Date
|
||||
|
||||
added?: Date
|
||||
|
@ -49,6 +49,7 @@ export enum ConfigOptionType {
|
||||
export const ConfigCategory = {
|
||||
General: $localize`General Settings`,
|
||||
OCR: $localize`OCR Settings`,
|
||||
Barcode: $localize`Barcode Settings`,
|
||||
}
|
||||
|
||||
export interface ConfigOption {
|
||||
@ -180,6 +181,83 @@ export const PaperlessConfigOptions: ConfigOption[] = [
|
||||
config_key: 'PAPERLESS_APP_TITLE',
|
||||
category: ConfigCategory.General,
|
||||
},
|
||||
{
|
||||
key: 'barcodes_enabled',
|
||||
title: $localize`Enable Barcodes`,
|
||||
type: ConfigOptionType.Boolean,
|
||||
config_key: 'PAPERLESS_CONSUMER_ENABLE_BARCODES',
|
||||
category: ConfigCategory.Barcode,
|
||||
},
|
||||
{
|
||||
key: 'barcode_enable_tiff_support',
|
||||
title: $localize`Enable TIFF Support`,
|
||||
type: ConfigOptionType.Boolean,
|
||||
config_key: 'PAPERLESS_CONSUMER_BARCODE_TIFF_SUPPORT',
|
||||
category: ConfigCategory.Barcode,
|
||||
},
|
||||
{
|
||||
key: 'barcode_string',
|
||||
title: $localize`Barcode String`,
|
||||
type: ConfigOptionType.String,
|
||||
config_key: 'PAPERLESS_CONSUMER_BARCODE_STRING',
|
||||
category: ConfigCategory.Barcode,
|
||||
},
|
||||
{
|
||||
key: 'barcode_retain_split_pages',
|
||||
title: $localize`Retain Split Pages`,
|
||||
type: ConfigOptionType.Boolean,
|
||||
config_key: 'PAPERLESS_CONSUMER_BARCODE_RETAIN_SPLIT_PAGES',
|
||||
category: ConfigCategory.Barcode,
|
||||
},
|
||||
{
|
||||
key: 'barcode_enable_asn',
|
||||
title: $localize`Enable ASN`,
|
||||
type: ConfigOptionType.Boolean,
|
||||
config_key: 'PAPERLESS_CONSUMER_ENABLE_ASN_BARCODE',
|
||||
category: ConfigCategory.Barcode,
|
||||
},
|
||||
{
|
||||
key: 'barcode_asn_prefix',
|
||||
title: $localize`ASN Prefix`,
|
||||
type: ConfigOptionType.String,
|
||||
config_key: 'PAPERLESS_CONSUMER_ASN_BARCODE_PREFIX',
|
||||
category: ConfigCategory.Barcode,
|
||||
},
|
||||
{
|
||||
key: 'barcode_upscale',
|
||||
title: $localize`Upscale`,
|
||||
type: ConfigOptionType.Number,
|
||||
config_key: 'PAPERLESS_CONSUMER_BARCODE_UPSCALE',
|
||||
category: ConfigCategory.Barcode,
|
||||
},
|
||||
{
|
||||
key: 'barcode_dpi',
|
||||
title: $localize`DPI`,
|
||||
type: ConfigOptionType.Number,
|
||||
config_key: 'PAPERLESS_CONSUMER_BARCODE_DPI',
|
||||
category: ConfigCategory.Barcode,
|
||||
},
|
||||
{
|
||||
key: 'barcode_max_pages',
|
||||
title: $localize`Max Pages`,
|
||||
type: ConfigOptionType.Number,
|
||||
config_key: 'PAPERLESS_CONSUMER_BARCODE_MAX_PAGES',
|
||||
category: ConfigCategory.Barcode,
|
||||
},
|
||||
{
|
||||
key: 'barcode_enable_tag',
|
||||
title: $localize`Enable Tag Detection`,
|
||||
type: ConfigOptionType.Boolean,
|
||||
config_key: 'PAPERLESS_CONSUMER_ENABLE_TAG_BARCODE',
|
||||
category: ConfigCategory.Barcode,
|
||||
},
|
||||
{
|
||||
key: 'barcode_tag_mapping',
|
||||
title: $localize`Tag Mapping`,
|
||||
type: ConfigOptionType.JSON,
|
||||
config_key: 'PAPERLESS_CONSUMER_TAG_BARCODE_MAPPING',
|
||||
category: ConfigCategory.Barcode,
|
||||
},
|
||||
]
|
||||
|
||||
export interface PaperlessConfig extends ObjectWithId {
|
||||
@ -198,4 +276,15 @@ export interface PaperlessConfig extends ObjectWithId {
|
||||
user_args: object
|
||||
app_logo: string
|
||||
app_title: string
|
||||
barcodes_enabled: boolean
|
||||
barcode_enable_tiff_support: boolean
|
||||
barcode_string: string
|
||||
barcode_retain_split_pages: boolean
|
||||
barcode_enable_asn: boolean
|
||||
barcode_asn_prefix: string
|
||||
barcode_upscale: number
|
||||
barcode_dpi: number
|
||||
barcode_max_pages: number
|
||||
barcode_enable_tag: boolean
|
||||
barcode_tag_mapping: object
|
||||
}
|
||||
|
@ -7,4 +7,6 @@ export interface WebsocketProgressMessage {
|
||||
message?: string
|
||||
document_id: number
|
||||
owner_id?: number
|
||||
users_can_view?: number[]
|
||||
groups_can_view?: number[]
|
||||
}
|
||||
|
@ -268,15 +268,15 @@ describe(`DocumentService`, () => {
|
||||
expect(req.request.method).toEqual('GET')
|
||||
})
|
||||
|
||||
it('should pass remove_inbox_tags setting to update', () => {
|
||||
subscription = service.update(documents[0]).subscribe()
|
||||
it('should pass remove_inbox_tags setting to patch', () => {
|
||||
subscription = service.patch(documents[0]).subscribe()
|
||||
let req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/`
|
||||
)
|
||||
expect(req.request.body.remove_inbox_tags).toEqual(false)
|
||||
|
||||
settingsService.set(SETTINGS_KEYS.DOCUMENT_EDITING_REMOVE_INBOX_TAGS, true)
|
||||
subscription = service.update(documents[0]).subscribe()
|
||||
subscription = service.patch(documents[0]).subscribe()
|
||||
req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/`
|
||||
)
|
||||
|
@ -189,13 +189,11 @@ export class DocumentService extends AbstractPaperlessService<Document> {
|
||||
return this.http.get<number>(this.getResourceUrl(null, 'next_asn'))
|
||||
}
|
||||
|
||||
update(o: Document): Observable<Document> {
|
||||
// we want to only set created_date
|
||||
o.created = undefined
|
||||
patch(o: Document): Observable<Document> {
|
||||
o.remove_inbox_tags = !!this.settingsService.get(
|
||||
SETTINGS_KEYS.DOCUMENT_EDITING_REMOVE_INBOX_TAGS
|
||||
)
|
||||
return super.update(o)
|
||||
return super.patch(o)
|
||||
}
|
||||
|
||||
uploadDocument(formData) {
|
||||
|
@ -15,33 +15,6 @@ import {
|
||||
WebsocketStatusService,
|
||||
} from './websocket-status.service'
|
||||
|
||||
const files = [
|
||||
{
|
||||
lastModified: 1693349892540,
|
||||
lastModifiedDate: new Date(),
|
||||
name: 'file1.pdf',
|
||||
size: 386,
|
||||
type: 'application/pdf',
|
||||
},
|
||||
{
|
||||
lastModified: 1695618533892,
|
||||
lastModifiedDate: new Date(),
|
||||
name: 'file2.pdf',
|
||||
size: 358265,
|
||||
type: 'application/pdf',
|
||||
},
|
||||
]
|
||||
|
||||
const fileList = {
|
||||
item: (x) => {
|
||||
return new File(
|
||||
[new Blob(['testing'], { type: files[x].type })],
|
||||
files[x].name
|
||||
)
|
||||
},
|
||||
length: files.length,
|
||||
} as unknown as FileList
|
||||
|
||||
describe('UploadDocumentsService', () => {
|
||||
let httpTestingController: HttpTestingController
|
||||
let uploadDocumentsService: UploadDocumentsService
|
||||
@ -68,7 +41,11 @@ describe('UploadDocumentsService', () => {
|
||||
})
|
||||
|
||||
it('calls post_document api endpoint on upload', () => {
|
||||
uploadDocumentsService.uploadFiles(fileList)
|
||||
const file = new File(
|
||||
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||
'file.pdf'
|
||||
)
|
||||
uploadDocumentsService.uploadFile(file)
|
||||
const req = httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/post_document/`
|
||||
)
|
||||
@ -78,7 +55,16 @@ describe('UploadDocumentsService', () => {
|
||||
})
|
||||
|
||||
it('updates progress during upload and failure', () => {
|
||||
uploadDocumentsService.uploadFiles(fileList)
|
||||
const file = new File(
|
||||
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||
'file.pdf'
|
||||
)
|
||||
const file2 = new File(
|
||||
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||
'file2.pdf'
|
||||
)
|
||||
uploadDocumentsService.uploadFile(file)
|
||||
uploadDocumentsService.uploadFile(file2)
|
||||
|
||||
expect(websocketStatusService.getConsumerStatusNotCompleted()).toHaveLength(
|
||||
2
|
||||
@ -103,7 +89,11 @@ describe('UploadDocumentsService', () => {
|
||||
})
|
||||
|
||||
it('updates progress on failure', () => {
|
||||
uploadDocumentsService.uploadFiles(fileList)
|
||||
const file = new File(
|
||||
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||
'file.pdf'
|
||||
)
|
||||
uploadDocumentsService.uploadFile(file)
|
||||
|
||||
let req = httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/post_document/`
|
||||
@ -125,7 +115,7 @@ describe('UploadDocumentsService', () => {
|
||||
websocketStatusService.getConsumerStatus(FileStatusPhase.FAILED)
|
||||
).toHaveLength(1)
|
||||
|
||||
uploadDocumentsService.uploadFiles(fileList)
|
||||
uploadDocumentsService.uploadFile(file)
|
||||
|
||||
req = httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/post_document/`
|
||||
@ -143,35 +133,4 @@ describe('UploadDocumentsService', () => {
|
||||
websocketStatusService.getConsumerStatus(FileStatusPhase.FAILED)
|
||||
).toHaveLength(2)
|
||||
})
|
||||
|
||||
it('accepts files via drag and drop', () => {
|
||||
const uploadSpy = jest.spyOn(
|
||||
UploadDocumentsService.prototype as any,
|
||||
'uploadFile'
|
||||
)
|
||||
const fileEntry = {
|
||||
name: 'file.pdf',
|
||||
isDirectory: false,
|
||||
isFile: true,
|
||||
file: (callback) => {
|
||||
return callback(
|
||||
new File(
|
||||
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||
'file.pdf'
|
||||
)
|
||||
)
|
||||
},
|
||||
}
|
||||
uploadDocumentsService.onNgxFileDrop([
|
||||
{
|
||||
relativePath: 'path/to/file.pdf',
|
||||
fileEntry,
|
||||
},
|
||||
])
|
||||
expect(uploadSpy).toHaveBeenCalled()
|
||||
|
||||
let req = httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/post_document/`
|
||||
)
|
||||
})
|
||||
})
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { HttpEventType } from '@angular/common/http'
|
||||
import { Injectable } from '@angular/core'
|
||||
import { FileSystemFileEntry, NgxFileDropEntry } from 'ngx-file-drop'
|
||||
import { Subscription } from 'rxjs'
|
||||
import { DocumentService } from './rest/document.service'
|
||||
import {
|
||||
@ -19,22 +18,7 @@ export class UploadDocumentsService {
|
||||
private websocketStatusService: WebsocketStatusService
|
||||
) {}
|
||||
|
||||
onNgxFileDrop(files: NgxFileDropEntry[]) {
|
||||
for (const droppedFile of files) {
|
||||
if (droppedFile.fileEntry.isFile) {
|
||||
const fileEntry = droppedFile.fileEntry as FileSystemFileEntry
|
||||
fileEntry.file((file: File) => this.uploadFile(file))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
uploadFiles(files: FileList) {
|
||||
for (let index = 0; index < files.length; index++) {
|
||||
this.uploadFile(files.item(index))
|
||||
}
|
||||
}
|
||||
|
||||
private uploadFile(file: File) {
|
||||
public uploadFile(file: File) {
|
||||
let formData = new FormData()
|
||||
formData.append('document', file, file.name)
|
||||
formData.append('from_webui', 'true')
|
||||
|
@ -355,6 +355,50 @@ describe('ConsumerStatusService', () => {
|
||||
)
|
||||
})
|
||||
|
||||
it('should notify user if user can view or is in group', () => {
|
||||
settingsService.currentUser = {
|
||||
id: 1,
|
||||
username: 'testuser',
|
||||
is_superuser: false,
|
||||
groups: [1],
|
||||
}
|
||||
websocketStatusService.connect()
|
||||
server.send({
|
||||
type: WebsocketStatusType.STATUS_UPDATE,
|
||||
data: {
|
||||
task_id: '1234',
|
||||
filename: 'file1.pdf',
|
||||
current_progress: 50,
|
||||
max_progress: 100,
|
||||
docuement_id: 12,
|
||||
owner_id: 2,
|
||||
status: 'WORKING',
|
||||
users_can_view: [1],
|
||||
groups_can_view: [],
|
||||
},
|
||||
})
|
||||
expect(websocketStatusService.getConsumerStatusNotCompleted()).toHaveLength(
|
||||
1
|
||||
)
|
||||
server.send({
|
||||
type: WebsocketStatusType.STATUS_UPDATE,
|
||||
data: {
|
||||
task_id: '5678',
|
||||
filename: 'file2.pdf',
|
||||
current_progress: 50,
|
||||
max_progress: 100,
|
||||
docuement_id: 13,
|
||||
owner_id: 2,
|
||||
status: 'WORKING',
|
||||
users_can_view: [],
|
||||
groups_can_view: [1],
|
||||
},
|
||||
})
|
||||
expect(websocketStatusService.getConsumerStatusNotCompleted()).toHaveLength(
|
||||
2
|
||||
)
|
||||
})
|
||||
|
||||
it('should trigger deleted subject on document deleted', () => {
|
||||
let deleted = false
|
||||
websocketStatusService.onDocumentDeleted().subscribe(() => {
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { Injectable } from '@angular/core'
|
||||
import { Subject } from 'rxjs'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { User } from '../data/user'
|
||||
import { WebsocketDocumentsDeletedMessage } from '../data/websocket-documents-deleted-message'
|
||||
import { WebsocketProgressMessage } from '../data/websocket-progress-message'
|
||||
import { SettingsService } from './settings.service'
|
||||
@ -173,13 +174,25 @@ export class WebsocketStatusService {
|
||||
}
|
||||
}
|
||||
|
||||
private canViewMessage(messageData: WebsocketProgressMessage): boolean {
|
||||
// see paperless.consumers.StatusConsumer._can_view
|
||||
const user: User = this.settingsService.currentUser
|
||||
return (
|
||||
!messageData.owner_id ||
|
||||
user.is_superuser ||
|
||||
(messageData.owner_id && messageData.owner_id === user.id) ||
|
||||
(messageData.users_can_view &&
|
||||
messageData.users_can_view.includes(user.id)) ||
|
||||
(messageData.groups_can_view &&
|
||||
messageData.groups_can_view.some((groupId) =>
|
||||
user.groups?.includes(groupId)
|
||||
))
|
||||
)
|
||||
}
|
||||
|
||||
handleProgressUpdate(messageData: WebsocketProgressMessage) {
|
||||
// fallback if backend didn't restrict message
|
||||
if (
|
||||
messageData.owner_id &&
|
||||
messageData.owner_id !== this.settingsService.currentUser?.id &&
|
||||
!this.settingsService.currentUser?.is_superuser
|
||||
) {
|
||||
if (!this.canViewMessage(messageData)) {
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -3,7 +3,7 @@ const base_url = new URL(document.baseURI)
|
||||
export const environment = {
|
||||
production: true,
|
||||
apiBaseUrl: document.baseURI + 'api/',
|
||||
apiVersion: '7',
|
||||
apiVersion: '9', // match src/paperless/settings.py
|
||||
appTitle: 'Paperless-ngx',
|
||||
version: '2.15.3',
|
||||
webSocketHost: window.location.host,
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user