mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-05-21 12:52:13 -05:00
Merge branch 'dev'
This commit is contained in:
commit
2f8b2944f1
@ -21,19 +21,17 @@
|
|||||||
# This file is intended only to be used through VSCOde devcontainers. See README.md
|
# This file is intended only to be used through VSCOde devcontainers. See README.md
|
||||||
# in the folder .devcontainer.
|
# in the folder .devcontainer.
|
||||||
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: docker.io/library/redis:7
|
image: docker.io/library/redis:7
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- ./redisdata:/data
|
- ./redisdata:/data
|
||||||
|
|
||||||
# No ports need to be exposed; the VSCode DevContainer plugin manages them.
|
# No ports need to be exposed; the VSCode DevContainer plugin manages them.
|
||||||
paperless-development:
|
paperless-development:
|
||||||
image: paperless-ngx
|
image: paperless-ngx
|
||||||
build:
|
build:
|
||||||
context: ../ # Dockerfile cannot access files from parent directories if context is not set.
|
context: ../ # Dockerfile cannot access files from parent directories if context is not set.
|
||||||
dockerfile: ./.devcontainer/Dockerfile
|
dockerfile: ./.devcontainer/Dockerfile
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
@ -60,25 +58,20 @@ services:
|
|||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
PAPERLESS_STATICDIR: ./src/documents/static
|
PAPERLESS_STATICDIR: ./src/documents/static
|
||||||
PAPERLESS_DEBUG: true
|
PAPERLESS_DEBUG: true
|
||||||
|
|
||||||
# Overrides default command so things don't shut down after the process ends.
|
# Overrides default command so things don't shut down after the process ends.
|
||||||
command: /bin/sh -c "chown -R paperless:paperless /usr/src/paperless/paperless-ngx/src/documents/static/frontend && chown -R paperless:paperless /usr/src/paperless/paperless-ngx/.ruff_cache && while sleep 1000; do :; done"
|
command: /bin/sh -c "chown -R paperless:paperless /usr/src/paperless/paperless-ngx/src/documents/static/frontend && chown -R paperless:paperless /usr/src/paperless/paperless-ngx/.ruff_cache && while sleep 1000; do :; done"
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:8.17
|
image: docker.io/gotenberg/gotenberg:8.17
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
# The Gotenberg Chromium route is used to convert .eml files. We do not
|
# The Gotenberg Chromium route is used to convert .eml files. We do not
|
||||||
# want to allow external content like tracking pixels or even JavaScript.
|
# want to allow external content like tracking pixels or even JavaScript.
|
||||||
command:
|
command:
|
||||||
- "gotenberg"
|
- "gotenberg"
|
||||||
- "--chromium-disable-javascript=true"
|
- "--chromium-disable-javascript=true"
|
||||||
- "--chromium-allow-list=file:///tmp/.*"
|
- "--chromium-allow-list=file:///tmp/.*"
|
||||||
|
|
||||||
tika:
|
tika:
|
||||||
image: docker.io/apache/tika:latest
|
image: docker.io/apache/tika:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
data:
|
data:
|
||||||
media:
|
media:
|
||||||
|
8
.github/dependabot.yml
vendored
8
.github/dependabot.yml
vendored
@ -5,7 +5,6 @@ version: 2
|
|||||||
# Required for uv support for now
|
# Required for uv support for now
|
||||||
enable-beta-ecosystems: true
|
enable-beta-ecosystems: true
|
||||||
updates:
|
updates:
|
||||||
|
|
||||||
# Enable version updates for pnpm
|
# Enable version updates for pnpm
|
||||||
- package-ecosystem: "npm"
|
- package-ecosystem: "npm"
|
||||||
target-branch: "dev"
|
target-branch: "dev"
|
||||||
@ -35,7 +34,6 @@ updates:
|
|||||||
patterns:
|
patterns:
|
||||||
- "@typescript-eslint*"
|
- "@typescript-eslint*"
|
||||||
- "eslint"
|
- "eslint"
|
||||||
|
|
||||||
# Enable version updates for Python
|
# Enable version updates for Python
|
||||||
- package-ecosystem: "uv"
|
- package-ecosystem: "uv"
|
||||||
target-branch: "dev"
|
target-branch: "dev"
|
||||||
@ -59,6 +57,7 @@ updates:
|
|||||||
django:
|
django:
|
||||||
patterns:
|
patterns:
|
||||||
- "*django*"
|
- "*django*"
|
||||||
|
- "drf-*"
|
||||||
major-versions:
|
major-versions:
|
||||||
update-types:
|
update-types:
|
||||||
- "major"
|
- "major"
|
||||||
@ -70,7 +69,6 @@ updates:
|
|||||||
patterns:
|
patterns:
|
||||||
- psycopg*
|
- psycopg*
|
||||||
- zxing-cpp
|
- zxing-cpp
|
||||||
|
|
||||||
# Enable updates for GitHub Actions
|
# Enable updates for GitHub Actions
|
||||||
- package-ecosystem: "github-actions"
|
- package-ecosystem: "github-actions"
|
||||||
target-branch: "dev"
|
target-branch: "dev"
|
||||||
@ -90,7 +88,6 @@ updates:
|
|||||||
- "major"
|
- "major"
|
||||||
- "minor"
|
- "minor"
|
||||||
- "patch"
|
- "patch"
|
||||||
|
|
||||||
# Update Dockerfile in root directory
|
# Update Dockerfile in root directory
|
||||||
- package-ecosystem: "docker"
|
- package-ecosystem: "docker"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
@ -100,12 +97,10 @@ updates:
|
|||||||
reviewers:
|
reviewers:
|
||||||
- "paperless-ngx/ci-cd"
|
- "paperless-ngx/ci-cd"
|
||||||
labels:
|
labels:
|
||||||
- "ci-cd"
|
|
||||||
- "dependencies"
|
- "dependencies"
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "docker"
|
prefix: "docker"
|
||||||
include: "scope"
|
include: "scope"
|
||||||
|
|
||||||
# Update Docker Compose files in docker/compose directory
|
# Update Docker Compose files in docker/compose directory
|
||||||
- package-ecosystem: "docker-compose"
|
- package-ecosystem: "docker-compose"
|
||||||
directory: "/docker/compose/"
|
directory: "/docker/compose/"
|
||||||
@ -115,7 +110,6 @@ updates:
|
|||||||
reviewers:
|
reviewers:
|
||||||
- "paperless-ngx/ci-cd"
|
- "paperless-ngx/ci-cd"
|
||||||
labels:
|
labels:
|
||||||
- "ci-cd"
|
|
||||||
- "dependencies"
|
- "dependencies"
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "docker-compose"
|
prefix: "docker-compose"
|
||||||
|
19
.github/labeler.yml
vendored
Normal file
19
.github/labeler.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
backend:
|
||||||
|
- changed-files:
|
||||||
|
- any-glob-to-any-file:
|
||||||
|
- 'src/**'
|
||||||
|
- 'pyproject.toml'
|
||||||
|
- 'uv.lock'
|
||||||
|
- 'requirements.txt'
|
||||||
|
frontend:
|
||||||
|
- changed-files:
|
||||||
|
- any-glob-to-any-file:
|
||||||
|
- 'src-ui/**'
|
||||||
|
documentation:
|
||||||
|
- changed-files:
|
||||||
|
- any-glob-to-any-file:
|
||||||
|
- 'docs/**'
|
||||||
|
ci-cd:
|
||||||
|
- changed-files:
|
||||||
|
- any-glob-to-any-file:
|
||||||
|
- '.github/**'
|
300
.github/workflows/ci.yml
vendored
300
.github/workflows/ci.yml
vendored
@ -1,5 +1,4 @@
|
|||||||
name: ci
|
name: ci
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
@ -12,72 +11,57 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
branches-ignore:
|
branches-ignore:
|
||||||
- 'translations**'
|
- 'translations**'
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DEFAULT_UV_VERSION: "0.6.x"
|
DEFAULT_UV_VERSION: "0.6.x"
|
||||||
# This is the default version of Python to use in most steps which aren't specific
|
# This is the default version of Python to use in most steps which aren't specific
|
||||||
DEFAULT_PYTHON_VERSION: "3.11"
|
DEFAULT_PYTHON_VERSION: "3.11"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-commit:
|
pre-commit:
|
||||||
# We want to run on external PRs, but not on our own internal PRs as they'll be run
|
# We want to run on external PRs, but not on our own internal PRs as they'll be run
|
||||||
# by the push to the branch. Without this if check, checks are duplicated since
|
# by the push to the branch. Without this if check, checks are duplicated since
|
||||||
# internal PRs match both the push and pull_request events.
|
# internal PRs match both the push and pull_request events.
|
||||||
if:
|
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
|
||||||
github.event_name == 'push' || github.event.pull_request.head.repo.full_name !=
|
|
||||||
github.repository
|
|
||||||
|
|
||||||
name: Linting Checks
|
name: Linting Checks
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
-
|
- name: Checkout repository
|
||||||
name: Checkout repository
|
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
-
|
- name: Install python
|
||||||
name: Install python
|
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
-
|
- name: Check files
|
||||||
name: Check files
|
|
||||||
uses: pre-commit/action@v3.0.1
|
uses: pre-commit/action@v3.0.1
|
||||||
|
|
||||||
documentation:
|
documentation:
|
||||||
name: "Build & Deploy Documentation"
|
name: "Build & Deploy Documentation"
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
needs:
|
needs:
|
||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
-
|
- name: Checkout
|
||||||
name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
-
|
- name: Set up Python
|
||||||
name: Set up Python
|
|
||||||
id: setup-python
|
id: setup-python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
-
|
- name: Install uv
|
||||||
name: Install uv
|
uses: astral-sh/setup-uv@v6
|
||||||
uses: astral-sh/setup-uv@v5
|
|
||||||
with:
|
with:
|
||||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||||
enable-cache: true
|
enable-cache: true
|
||||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
-
|
- name: Install Python dependencies
|
||||||
name: Install Python dependencies
|
|
||||||
run: |
|
run: |
|
||||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||||
-
|
- name: Make documentation
|
||||||
name: Make documentation
|
|
||||||
run: |
|
run: |
|
||||||
uv run \
|
uv run \
|
||||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||||
--dev \
|
--dev \
|
||||||
--frozen \
|
--frozen \
|
||||||
mkdocs build --config-file ./mkdocs.yml
|
mkdocs build --config-file ./mkdocs.yml
|
||||||
-
|
- name: Deploy documentation
|
||||||
name: Deploy documentation
|
|
||||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||||
run: |
|
run: |
|
||||||
echo "docs.paperless-ngx.com" > "${{ github.workspace }}/docs/CNAME"
|
echo "docs.paperless-ngx.com" > "${{ github.workspace }}/docs/CNAME"
|
||||||
@ -88,14 +72,12 @@ jobs:
|
|||||||
--dev \
|
--dev \
|
||||||
--frozen \
|
--frozen \
|
||||||
mkdocs gh-deploy --force --no-history
|
mkdocs gh-deploy --force --no-history
|
||||||
-
|
- name: Upload artifact
|
||||||
name: Upload artifact
|
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: documentation
|
name: documentation
|
||||||
path: site/
|
path: site/
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
||||||
tests-backend:
|
tests-backend:
|
||||||
name: "Backend Tests (Python ${{ matrix.python-version }})"
|
name: "Backend Tests (Python ${{ matrix.python-version }})"
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
@ -106,49 +88,40 @@ jobs:
|
|||||||
python-version: ['3.10', '3.11', '3.12']
|
python-version: ['3.10', '3.11', '3.12']
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
steps:
|
steps:
|
||||||
-
|
- name: Checkout
|
||||||
name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
-
|
- name: Start containers
|
||||||
name: Start containers
|
|
||||||
run: |
|
run: |
|
||||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml pull --quiet
|
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml pull --quiet
|
||||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml up --detach
|
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml up --detach
|
||||||
-
|
- name: Set up Python
|
||||||
name: Set up Python
|
|
||||||
id: setup-python
|
id: setup-python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "${{ matrix.python-version }}"
|
python-version: "${{ matrix.python-version }}"
|
||||||
-
|
- name: Install uv
|
||||||
name: Install uv
|
uses: astral-sh/setup-uv@v6
|
||||||
uses: astral-sh/setup-uv@v5
|
|
||||||
with:
|
with:
|
||||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||||
enable-cache: true
|
enable-cache: true
|
||||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||||
-
|
- name: Install system dependencies
|
||||||
name: Install system dependencies
|
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update -qq
|
sudo apt-get update -qq
|
||||||
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
|
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
|
||||||
-
|
- name: Configure ImageMagick
|
||||||
name: Configure ImageMagick
|
|
||||||
run: |
|
run: |
|
||||||
sudo cp docker/rootfs/etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml
|
sudo cp docker/rootfs/etc/ImageMagick-6/paperless-policy.xml /etc/ImageMagick-6/policy.xml
|
||||||
-
|
- name: Install Python dependencies
|
||||||
name: Install Python dependencies
|
|
||||||
run: |
|
run: |
|
||||||
uv sync \
|
uv sync \
|
||||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||||
--group testing \
|
--group testing \
|
||||||
--frozen
|
--frozen
|
||||||
-
|
- name: List installed Python dependencies
|
||||||
name: List installed Python dependencies
|
|
||||||
run: |
|
run: |
|
||||||
uv pip list
|
uv pip list
|
||||||
-
|
- name: Tests
|
||||||
name: Tests
|
|
||||||
env:
|
env:
|
||||||
PAPERLESS_CI_TEST: 1
|
PAPERLESS_CI_TEST: 1
|
||||||
# Enable paperless_mail testing against real server
|
# Enable paperless_mail testing against real server
|
||||||
@ -161,28 +134,24 @@ jobs:
|
|||||||
--dev \
|
--dev \
|
||||||
--frozen \
|
--frozen \
|
||||||
pytest
|
pytest
|
||||||
-
|
- name: Upload backend test results to Codecov
|
||||||
name: Upload backend test results to Codecov
|
|
||||||
if: always()
|
if: always()
|
||||||
uses: codecov/test-results-action@v1
|
uses: codecov/test-results-action@v1
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
flags: backend-python-${{ matrix.python-version }}
|
flags: backend-python-${{ matrix.python-version }}
|
||||||
files: junit.xml
|
files: junit.xml
|
||||||
-
|
- name: Upload backend coverage to Codecov
|
||||||
name: Upload backend coverage to Codecov
|
|
||||||
uses: codecov/codecov-action@v5
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
flags: backend-python-${{ matrix.python-version }}
|
flags: backend-python-${{ matrix.python-version }}
|
||||||
files: coverage.xml
|
files: coverage.xml
|
||||||
-
|
- name: Stop containers
|
||||||
name: Stop containers
|
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml logs
|
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml logs
|
||||||
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml down
|
docker compose --file ${{ github.workspace }}/docker/compose/docker-compose.ci-test.yml down
|
||||||
|
|
||||||
install-frontend-dependencies:
|
install-frontend-dependencies:
|
||||||
name: "Install Frontend Dependencies"
|
name: "Install Frontend Dependencies"
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
@ -194,8 +163,7 @@ jobs:
|
|||||||
uses: pnpm/action-setup@v4
|
uses: pnpm/action-setup@v4
|
||||||
with:
|
with:
|
||||||
version: 10
|
version: 10
|
||||||
-
|
- name: Use Node.js 20
|
||||||
name: Use Node.js 20
|
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
@ -209,17 +177,10 @@ jobs:
|
|||||||
~/.pnpm-store
|
~/.pnpm-store
|
||||||
~/.cache
|
~/.cache
|
||||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||||
-
|
- name: Install dependencies
|
||||||
name: Install dependencies
|
|
||||||
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
|
||||||
run: cd src-ui && pnpm install
|
run: cd src-ui && pnpm install
|
||||||
-
|
|
||||||
name: Install Playwright
|
|
||||||
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
|
||||||
run: cd src-ui && pnpm playwright install --with-deps
|
|
||||||
|
|
||||||
tests-frontend:
|
tests-frontend:
|
||||||
name: "Frontend Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
name: "Frontend Unit Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
needs:
|
needs:
|
||||||
- install-frontend-dependencies
|
- install-frontend-dependencies
|
||||||
@ -235,8 +196,7 @@ jobs:
|
|||||||
uses: pnpm/action-setup@v4
|
uses: pnpm/action-setup@v4
|
||||||
with:
|
with:
|
||||||
version: 10
|
version: 10
|
||||||
-
|
- name: Use Node.js 20
|
||||||
name: Use Node.js 20
|
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
@ -252,52 +212,90 @@ jobs:
|
|||||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||||
- name: Re-link Angular cli
|
- name: Re-link Angular cli
|
||||||
run: cd src-ui && pnpm link @angular/cli
|
run: cd src-ui && pnpm link @angular/cli
|
||||||
-
|
- name: Linting checks
|
||||||
name: Linting checks
|
|
||||||
run: cd src-ui && pnpm run lint
|
run: cd src-ui && pnpm run lint
|
||||||
-
|
- name: Run Jest unit tests
|
||||||
name: Run Jest unit tests
|
|
||||||
run: cd src-ui && pnpm run test --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
run: cd src-ui && pnpm run test --max-workers=2 --shard=${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||||
-
|
- name: Upload frontend test results to Codecov
|
||||||
name: Run Playwright e2e tests
|
|
||||||
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
|
||||||
-
|
|
||||||
name: Upload frontend test results to Codecov
|
|
||||||
uses: codecov/test-results-action@v1
|
uses: codecov/test-results-action@v1
|
||||||
if: always()
|
if: always()
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
flags: frontend-node-${{ matrix.node-version }}
|
flags: frontend-node-${{ matrix.node-version }}
|
||||||
directory: src-ui/
|
directory: src-ui/
|
||||||
-
|
- name: Upload frontend coverage to Codecov
|
||||||
name: Upload frontend coverage to Codecov
|
|
||||||
uses: codecov/codecov-action@v5
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
flags: frontend-node-${{ matrix.node-version }}
|
flags: frontend-node-${{ matrix.node-version }}
|
||||||
directory: src-ui/coverage/
|
directory: src-ui/coverage/
|
||||||
|
tests-frontend-e2e:
|
||||||
frontend-bundle-analysis:
|
name: "Frontend E2E Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||||
name: "Frontend Bundle Analysis"
|
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
needs:
|
needs:
|
||||||
- tests-frontend
|
- install-frontend-dependencies
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
node-version: [20.x]
|
||||||
|
shard-index: [1, 2]
|
||||||
|
shard-count: [2]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
-
|
- name: Install pnpm
|
||||||
name: Install pnpm
|
|
||||||
uses: pnpm/action-setup@v4
|
uses: pnpm/action-setup@v4
|
||||||
with:
|
with:
|
||||||
version: 10
|
version: 10
|
||||||
-
|
- name: Use Node.js 20
|
||||||
name: Use Node.js 20
|
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
cache: 'pnpm'
|
cache: 'pnpm'
|
||||||
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||||
-
|
- name: Cache frontend dependencies
|
||||||
name: Cache frontend dependencies
|
id: cache-frontend-deps
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.pnpm-store
|
||||||
|
~/.cache
|
||||||
|
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||||
|
- name: Re-link Angular cli
|
||||||
|
run: cd src-ui && pnpm link @angular/cli
|
||||||
|
- name: Cache Playwright browsers
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ~/.cache/ms-playwright
|
||||||
|
key: ${{ runner.os }}-playwright-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-playwright-
|
||||||
|
- name: Install Playwright system dependencies
|
||||||
|
run: npx playwright install-deps
|
||||||
|
- name: Install dependencies
|
||||||
|
run: cd src-ui && pnpm install --no-frozen-lockfile
|
||||||
|
- name: Install Playwright
|
||||||
|
run: cd src-ui && pnpm exec playwright install
|
||||||
|
- name: Run Playwright e2e tests
|
||||||
|
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||||
|
frontend-bundle-analysis:
|
||||||
|
name: "Frontend Bundle Analysis"
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
needs:
|
||||||
|
- tests-frontend
|
||||||
|
- tests-frontend-e2e
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Install pnpm
|
||||||
|
uses: pnpm/action-setup@v4
|
||||||
|
with:
|
||||||
|
version: 10
|
||||||
|
- name: Use Node.js 20
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 20.x
|
||||||
|
cache: 'pnpm'
|
||||||
|
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||||
|
- name: Cache frontend dependencies
|
||||||
id: cache-frontend-deps
|
id: cache-frontend-deps
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
@ -305,15 +303,12 @@ jobs:
|
|||||||
~/.pnpm-store
|
~/.pnpm-store
|
||||||
~/.cache
|
~/.cache
|
||||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/package-lock.json') }}
|
||||||
-
|
- name: Re-link Angular cli
|
||||||
name: Re-link Angular cli
|
|
||||||
run: cd src-ui && pnpm link @angular/cli
|
run: cd src-ui && pnpm link @angular/cli
|
||||||
-
|
- name: Build frontend and upload analysis
|
||||||
name: Build frontend and upload analysis
|
|
||||||
env:
|
env:
|
||||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
run: cd src-ui && pnpm run build --configuration=production
|
run: cd src-ui && pnpm run build --configuration=production
|
||||||
|
|
||||||
build-docker-image:
|
build-docker-image:
|
||||||
name: Build Docker image for ${{ github.ref_name }}
|
name: Build Docker image for ${{ github.ref_name }}
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
@ -324,9 +319,9 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- tests-backend
|
- tests-backend
|
||||||
- tests-frontend
|
- tests-frontend
|
||||||
|
- tests-frontend-e2e
|
||||||
steps:
|
steps:
|
||||||
-
|
- name: Check pushing to Docker Hub
|
||||||
name: Check pushing to Docker Hub
|
|
||||||
id: push-other-places
|
id: push-other-places
|
||||||
# Only push to Dockerhub from the main repo AND the ref is either:
|
# Only push to Dockerhub from the main repo AND the ref is either:
|
||||||
# main
|
# main
|
||||||
@ -342,15 +337,13 @@ jobs:
|
|||||||
echo "Not pushing to DockerHub"
|
echo "Not pushing to DockerHub"
|
||||||
echo "enable=false" >> $GITHUB_OUTPUT
|
echo "enable=false" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
-
|
- name: Set ghcr repository name
|
||||||
name: Set ghcr repository name
|
|
||||||
id: set-ghcr-repository
|
id: set-ghcr-repository
|
||||||
run: |
|
run: |
|
||||||
ghcr_name=$(echo "${{ github.repository }}" | awk '{ print tolower($0) }')
|
ghcr_name=$(echo "${{ github.repository }}" | awk '{ print tolower($0) }')
|
||||||
echo "Name is ${ghcr_name}"
|
echo "Name is ${ghcr_name}"
|
||||||
echo "ghcr-repository=${ghcr_name}" >> $GITHUB_OUTPUT
|
echo "ghcr-repository=${ghcr_name}" >> $GITHUB_OUTPUT
|
||||||
-
|
- name: Gather Docker metadata
|
||||||
name: Gather Docker metadata
|
|
||||||
id: docker-meta
|
id: docker-meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
@ -365,37 +358,31 @@ jobs:
|
|||||||
# For a tag x.y.z or vX.Y.Z, output an x.y.z and x.y image tag
|
# For a tag x.y.z or vX.Y.Z, output an x.y.z and x.y image tag
|
||||||
type=semver,pattern={{version}}
|
type=semver,pattern={{version}}
|
||||||
type=semver,pattern={{major}}.{{minor}}
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
-
|
- name: Checkout
|
||||||
name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
# If https://github.com/docker/buildx/issues/1044 is resolved,
|
# If https://github.com/docker/buildx/issues/1044 is resolved,
|
||||||
# the append input with a native arm64 arch could be used to
|
# the append input with a native arm64 arch could be used to
|
||||||
# significantly speed up building
|
# significantly speed up building
|
||||||
-
|
- name: Set up Docker Buildx
|
||||||
name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
-
|
- name: Set up QEMU
|
||||||
name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@v3
|
||||||
with:
|
with:
|
||||||
platforms: arm64
|
platforms: arm64
|
||||||
-
|
- name: Login to GitHub Container Registry
|
||||||
name: Login to GitHub Container Registry
|
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
-
|
- name: Login to Docker Hub
|
||||||
name: Login to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
# Don't attempt to login if not pushing to Docker Hub
|
# Don't attempt to login if not pushing to Docker Hub
|
||||||
if: steps.push-other-places.outputs.enable == 'true'
|
if: steps.push-other-places.outputs.enable == 'true'
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
-
|
- name: Login to Quay.io
|
||||||
name: Login to Quay.io
|
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
# Don't attempt to login if not pushing to Quay.io
|
# Don't attempt to login if not pushing to Quay.io
|
||||||
if: steps.push-other-places.outputs.enable == 'true'
|
if: steps.push-other-places.outputs.enable == 'true'
|
||||||
@ -403,8 +390,7 @@ jobs:
|
|||||||
registry: quay.io
|
registry: quay.io
|
||||||
username: ${{ secrets.QUAY_USERNAME }}
|
username: ${{ secrets.QUAY_USERNAME }}
|
||||||
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
||||||
-
|
- name: Build and push
|
||||||
name: Build and push
|
|
||||||
uses: docker/build-push-action@v6
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
@ -422,23 +408,19 @@ jobs:
|
|||||||
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:dev
|
type=registry,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:dev
|
||||||
cache-to: |
|
cache-to: |
|
||||||
type=registry,mode=max,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
type=registry,mode=max,ref=ghcr.io/${{ steps.set-ghcr-repository.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
||||||
-
|
- name: Inspect image
|
||||||
name: Inspect image
|
|
||||||
run: |
|
run: |
|
||||||
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||||
-
|
- name: Export frontend artifact from docker
|
||||||
name: Export frontend artifact from docker
|
|
||||||
run: |
|
run: |
|
||||||
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||||
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
|
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
|
||||||
-
|
- name: Upload frontend artifact
|
||||||
name: Upload frontend artifact
|
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: frontend-compiled
|
name: frontend-compiled
|
||||||
path: src/documents/static/frontend/
|
path: src/documents/static/frontend/
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
||||||
build-release:
|
build-release:
|
||||||
name: "Build Release"
|
name: "Build Release"
|
||||||
needs:
|
needs:
|
||||||
@ -446,63 +428,52 @@ jobs:
|
|||||||
- documentation
|
- documentation
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
-
|
- name: Checkout
|
||||||
name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
-
|
- name: Set up Python
|
||||||
name: Set up Python
|
|
||||||
id: setup-python
|
id: setup-python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
-
|
- name: Install uv
|
||||||
name: Install uv
|
uses: astral-sh/setup-uv@v6
|
||||||
uses: astral-sh/setup-uv@v5
|
|
||||||
with:
|
with:
|
||||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||||
enable-cache: true
|
enable-cache: true
|
||||||
python-version: ${{ steps.setup-python.outputs.python-version }}
|
python-version: ${{ steps.setup-python.outputs.python-version }}
|
||||||
-
|
- name: Install Python dependencies
|
||||||
name: Install Python dependencies
|
|
||||||
run: |
|
run: |
|
||||||
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
uv sync --python ${{ steps.setup-python.outputs.python-version }} --dev --frozen
|
||||||
-
|
- name: Install system dependencies
|
||||||
name: Install system dependencies
|
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update -qq
|
sudo apt-get update -qq
|
||||||
sudo apt-get install -qq --no-install-recommends gettext liblept5
|
sudo apt-get install -qq --no-install-recommends gettext liblept5
|
||||||
-
|
- name: Download frontend artifact
|
||||||
name: Download frontend artifact
|
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: frontend-compiled
|
name: frontend-compiled
|
||||||
path: src/documents/static/frontend/
|
path: src/documents/static/frontend/
|
||||||
-
|
- name: Download documentation artifact
|
||||||
name: Download documentation artifact
|
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: documentation
|
name: documentation
|
||||||
path: docs/_build/html/
|
path: docs/_build/html/
|
||||||
-
|
- name: Generate requirements file
|
||||||
name: Generate requirements file
|
|
||||||
run: |
|
run: |
|
||||||
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
|
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
|
||||||
-
|
- name: Compile messages
|
||||||
name: Compile messages
|
|
||||||
run: |
|
run: |
|
||||||
cd src/
|
cd src/
|
||||||
uv run \
|
uv run \
|
||||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||||
manage.py compilemessages
|
manage.py compilemessages
|
||||||
-
|
- name: Collect static files
|
||||||
name: Collect static files
|
|
||||||
run: |
|
run: |
|
||||||
cd src/
|
cd src/
|
||||||
uv run \
|
uv run \
|
||||||
--python ${{ steps.setup-python.outputs.python-version }} \
|
--python ${{ steps.setup-python.outputs.python-version }} \
|
||||||
manage.py collectstatic --no-input
|
manage.py collectstatic --no-input
|
||||||
-
|
- name: Move files
|
||||||
name: Move files
|
|
||||||
run: |
|
run: |
|
||||||
echo "Making dist folders"
|
echo "Making dist folders"
|
||||||
for directory in dist \
|
for directory in dist \
|
||||||
@ -539,21 +510,18 @@ jobs:
|
|||||||
cp --recursive docs/_build/html/ dist/paperless-ngx/docs
|
cp --recursive docs/_build/html/ dist/paperless-ngx/docs
|
||||||
|
|
||||||
mv --verbose static dist/paperless-ngx
|
mv --verbose static dist/paperless-ngx
|
||||||
-
|
- name: Make release package
|
||||||
name: Make release package
|
|
||||||
run: |
|
run: |
|
||||||
echo "Creating release archive"
|
echo "Creating release archive"
|
||||||
cd dist
|
cd dist
|
||||||
sudo chown -R 1000:1000 paperless-ngx/
|
sudo chown -R 1000:1000 paperless-ngx/
|
||||||
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
||||||
-
|
- name: Upload release artifact
|
||||||
name: Upload release artifact
|
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: release
|
name: release
|
||||||
path: dist/paperless-ngx.tar.xz
|
path: dist/paperless-ngx.tar.xz
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
||||||
publish-release:
|
publish-release:
|
||||||
name: "Publish Release"
|
name: "Publish Release"
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
@ -565,14 +533,12 @@ jobs:
|
|||||||
- build-release
|
- build-release
|
||||||
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
|
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
|
||||||
steps:
|
steps:
|
||||||
-
|
- name: Download release artifact
|
||||||
name: Download release artifact
|
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: release
|
name: release
|
||||||
path: ./
|
path: ./
|
||||||
-
|
- name: Get version
|
||||||
name: Get version
|
|
||||||
id: get_version
|
id: get_version
|
||||||
run: |
|
run: |
|
||||||
echo "version=${{ github.ref_name }}" >> $GITHUB_OUTPUT
|
echo "version=${{ github.ref_name }}" >> $GITHUB_OUTPUT
|
||||||
@ -581,8 +547,7 @@ jobs:
|
|||||||
else
|
else
|
||||||
echo "prerelease=false" >> $GITHUB_OUTPUT
|
echo "prerelease=false" >> $GITHUB_OUTPUT
|
||||||
fi
|
fi
|
||||||
-
|
- name: Create Release and Changelog
|
||||||
name: Create Release and Changelog
|
|
||||||
id: create-release
|
id: create-release
|
||||||
uses: release-drafter/release-drafter@v6
|
uses: release-drafter/release-drafter@v6
|
||||||
with:
|
with:
|
||||||
@ -593,8 +558,7 @@ jobs:
|
|||||||
publish: true # ensures release is not marked as draft
|
publish: true # ensures release is not marked as draft
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
-
|
- name: Upload release archive
|
||||||
name: Upload release archive
|
|
||||||
id: upload-release-asset
|
id: upload-release-asset
|
||||||
uses: shogo82148/actions-upload-release-asset@v1
|
uses: shogo82148/actions-upload-release-asset@v1
|
||||||
with:
|
with:
|
||||||
@ -603,7 +567,6 @@ jobs:
|
|||||||
asset_path: ./paperless-ngx.tar.xz
|
asset_path: ./paperless-ngx.tar.xz
|
||||||
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
|
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
|
||||||
asset_content_type: application/x-xz
|
asset_content_type: application/x-xz
|
||||||
|
|
||||||
append-changelog:
|
append-changelog:
|
||||||
name: "Append Changelog"
|
name: "Append Changelog"
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
@ -611,26 +574,22 @@ jobs:
|
|||||||
- publish-release
|
- publish-release
|
||||||
if: needs.publish-release.outputs.prerelease == 'false'
|
if: needs.publish-release.outputs.prerelease == 'false'
|
||||||
steps:
|
steps:
|
||||||
-
|
- name: Checkout
|
||||||
name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: main
|
ref: main
|
||||||
-
|
- name: Set up Python
|
||||||
name: Set up Python
|
|
||||||
id: setup-python
|
id: setup-python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
-
|
- name: Install uv
|
||||||
name: Install uv
|
uses: astral-sh/setup-uv@v6
|
||||||
uses: astral-sh/setup-uv@v5
|
|
||||||
with:
|
with:
|
||||||
version: ${{ env.DEFAULT_UV_VERSION }}
|
version: ${{ env.DEFAULT_UV_VERSION }}
|
||||||
enable-cache: true
|
enable-cache: true
|
||||||
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
|
||||||
-
|
- name: Append Changelog to docs
|
||||||
name: Append Changelog to docs
|
|
||||||
id: append-Changelog
|
id: append-Changelog
|
||||||
working-directory: docs
|
working-directory: docs
|
||||||
run: |
|
run: |
|
||||||
@ -652,8 +611,7 @@ jobs:
|
|||||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||||
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
|
git commit -am "Changelog ${{ needs.publish-release.outputs.version }} - GHA"
|
||||||
git push origin ${{ needs.publish-release.outputs.version }}-changelog
|
git push origin ${{ needs.publish-release.outputs.version }}-changelog
|
||||||
-
|
- name: Create Pull Request
|
||||||
name: Create Pull Request
|
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
|
10
.github/workflows/cleanup-tags.yml
vendored
10
.github/workflows/cleanup-tags.yml
vendored
@ -6,17 +6,14 @@
|
|||||||
# This workflow will not trigger runs on forked repos.
|
# This workflow will not trigger runs on forked repos.
|
||||||
|
|
||||||
name: Cleanup Image Tags
|
name: Cleanup Image Tags
|
||||||
|
|
||||||
on:
|
on:
|
||||||
delete:
|
delete:
|
||||||
push:
|
push:
|
||||||
paths:
|
paths:
|
||||||
- ".github/workflows/cleanup-tags.yml"
|
- ".github/workflows/cleanup-tags.yml"
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: registry-tags-cleanup
|
group: registry-tags-cleanup
|
||||||
cancel-in-progress: false
|
cancel-in-progress: false
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
cleanup-images:
|
cleanup-images:
|
||||||
name: Cleanup Image Tags for ${{ matrix.primary-name }}
|
name: Cleanup Image Tags for ${{ matrix.primary-name }}
|
||||||
@ -30,8 +27,7 @@ jobs:
|
|||||||
# Requires a personal access token with the OAuth scope delete:packages
|
# Requires a personal access token with the OAuth scope delete:packages
|
||||||
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
|
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
-
|
- name: Clean temporary images
|
||||||
name: Clean temporary images
|
|
||||||
if: "${{ env.TOKEN != '' }}"
|
if: "${{ env.TOKEN != '' }}"
|
||||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.10.0
|
uses: stumpylog/image-cleaner-action/ephemeral@v0.10.0
|
||||||
with:
|
with:
|
||||||
@ -43,7 +39,6 @@ jobs:
|
|||||||
repo_name: "paperless-ngx"
|
repo_name: "paperless-ngx"
|
||||||
match_regex: "(feature|fix)"
|
match_regex: "(feature|fix)"
|
||||||
do_delete: "true"
|
do_delete: "true"
|
||||||
|
|
||||||
cleanup-untagged-images:
|
cleanup-untagged-images:
|
||||||
name: Cleanup Untagged Images Tags for ${{ matrix.primary-name }}
|
name: Cleanup Untagged Images Tags for ${{ matrix.primary-name }}
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
if: github.repository_owner == 'paperless-ngx'
|
||||||
@ -58,8 +53,7 @@ jobs:
|
|||||||
# Requires a personal access token with the OAuth scope delete:packages
|
# Requires a personal access token with the OAuth scope delete:packages
|
||||||
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
|
TOKEN: ${{ secrets.GHA_CONTAINER_DELETE_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
-
|
- name: Clean untagged images
|
||||||
name: Clean untagged images
|
|
||||||
if: "${{ env.TOKEN != '' }}"
|
if: "${{ env.TOKEN != '' }}"
|
||||||
uses: stumpylog/image-cleaner-action/untagged@v0.10.0
|
uses: stumpylog/image-cleaner-action/untagged@v0.10.0
|
||||||
with:
|
with:
|
||||||
|
38
.github/workflows/codeql-analysis.yml
vendored
38
.github/workflows/codeql-analysis.yml
vendored
@ -10,16 +10,14 @@
|
|||||||
# supported CodeQL languages.
|
# supported CodeQL languages.
|
||||||
#
|
#
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ main, dev ]
|
branches: [main, dev]
|
||||||
pull_request:
|
pull_request:
|
||||||
# The branches below must be a subset of the branches above
|
# The branches below must be a subset of the branches above
|
||||||
branches: [ dev ]
|
branches: [dev]
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '28 13 * * 5'
|
- cron: '28 13 * * 5'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
analyze:
|
analyze:
|
||||||
name: Analyze
|
name: Analyze
|
||||||
@ -28,27 +26,23 @@ jobs:
|
|||||||
actions: read
|
actions: read
|
||||||
contents: read
|
contents: read
|
||||||
security-events: write
|
security-events: write
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
language: [ 'javascript', 'python' ]
|
language: ['javascript', 'python']
|
||||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
# Initializes the CodeQL tools for scanning.
|
- name: Initialize CodeQL
|
||||||
- name: Initialize CodeQL
|
uses: github/codeql-action/init@v3
|
||||||
uses: github/codeql-action/init@v3
|
with:
|
||||||
with:
|
languages: ${{ matrix.language }}
|
||||||
languages: ${{ matrix.language }}
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# By default, queries listed here will override any specified in a config file.
|
||||||
# By default, queries listed here will override any specified in a config file.
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v3
|
||||||
- name: Perform CodeQL Analysis
|
|
||||||
uses: github/codeql-action/analyze@v3
|
|
||||||
|
39
.github/workflows/crowdin.yml
vendored
39
.github/workflows/crowdin.yml
vendored
@ -1,35 +1,28 @@
|
|||||||
name: Crowdin Action
|
name: Crowdin Action
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '2 */12 * * *'
|
- cron: '2 */12 * * *'
|
||||||
push:
|
push:
|
||||||
paths: [
|
paths: ['src/locale/**', 'src-ui/messages.xlf', 'src-ui/src/locale/**']
|
||||||
'src/locale/**',
|
branches: [dev]
|
||||||
'src-ui/messages.xlf',
|
|
||||||
'src-ui/src/locale/**'
|
|
||||||
]
|
|
||||||
branches: [ dev ]
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
synchronize-with-crowdin:
|
synchronize-with-crowdin:
|
||||||
name: Crowdin Sync
|
name: Crowdin Sync
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
if: github.repository_owner == 'paperless-ngx'
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
- name: crowdin action
|
- name: crowdin action
|
||||||
uses: crowdin/github-action@v2
|
uses: crowdin/github-action@v2
|
||||||
with:
|
with:
|
||||||
upload_translations: false
|
upload_translations: false
|
||||||
download_translations: true
|
download_translations: true
|
||||||
crowdin_branch_name: 'dev'
|
crowdin_branch_name: 'dev'
|
||||||
localization_branch_name: l10n_dev
|
localization_branch_name: l10n_dev
|
||||||
pull_request_labels: 'skip-changelog, translation'
|
pull_request_labels: 'skip-changelog, translation'
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
CROWDIN_PROJECT_ID: ${{ secrets.CROWDIN_PROJECT_ID }}
|
CROWDIN_PROJECT_ID: ${{ secrets.CROWDIN_PROJECT_ID }}
|
||||||
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}
|
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}
|
||||||
|
86
.github/workflows/pr-bot.yml
vendored
Normal file
86
.github/workflows/pr-bot.yml
vendored
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
name: PR Bot
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types: [opened]
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
jobs:
|
||||||
|
pr-bot:
|
||||||
|
name: Automated PR Bot
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Label by file path
|
||||||
|
uses: actions/labeler@v5
|
||||||
|
with:
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Label by size
|
||||||
|
uses: Gascon1/pr-size-labeler@v1.3.0
|
||||||
|
with:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
xs_label: 'small-change'
|
||||||
|
xs_diff: '9'
|
||||||
|
s_label: 'non-trivial'
|
||||||
|
s_diff: '99999'
|
||||||
|
fail_if_xl: 'false'
|
||||||
|
excluded_files: /\.lock$/ /\.txt$/ ^src-ui/pnpm-lock\.yaml$ ^src-ui/messages\.xlf$ ^src/locale/en_US/LC_MESSAGES/django\.po$
|
||||||
|
- name: Label bot-generated PRs
|
||||||
|
if: ${{ contains(github.actor, 'dependabot') || contains(github.actor, 'crowdin-bot') }}
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const pr = context.payload.pull_request;
|
||||||
|
const user = pr.user.login.toLowerCase();
|
||||||
|
const labels = [];
|
||||||
|
|
||||||
|
if (user.includes('dependabot')) {
|
||||||
|
labels.push('dependencies');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (user.includes('crowdin-bot')) {
|
||||||
|
labels.push('translation', 'skip-changelog');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (labels.length) {
|
||||||
|
await github.rest.issues.addLabels({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: pr.number,
|
||||||
|
labels,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
- name: Welcome comment
|
||||||
|
if: ${{ !contains(github.actor, 'bot') }}
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const pr = context.payload.pull_request;
|
||||||
|
const user = pr.user.login;
|
||||||
|
|
||||||
|
const { data: members } = await github.rest.orgs.listMembers({
|
||||||
|
org: 'paperless-ngx',
|
||||||
|
});
|
||||||
|
|
||||||
|
const memberLogins = members.map(m => m.login.toLowerCase());
|
||||||
|
if (memberLogins.includes(user.toLowerCase())) {
|
||||||
|
core.info('Skipping comment: user is org member');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const body =
|
||||||
|
"Hello @" + user + ",\n\n" +
|
||||||
|
"Thank you very much for submitting this PR to us!\n\n" +
|
||||||
|
"This is what will happen next:\n\n" +
|
||||||
|
"1. CI tests will run against your PR to ensure quality and consistency.\n" +
|
||||||
|
"2. Next, human contributors from paperless-ngx review your changes.\n" +
|
||||||
|
"3. Please address any issues that come up during the review as soon as you are able to.\n" +
|
||||||
|
"4. If accepted, your pull request will be merged into the `dev` branch and changes there will be tested further.\n" +
|
||||||
|
"5. Eventually, changes from you and other contributors will be merged into `main` and a new release will be made.\n\n" +
|
||||||
|
"You'll be hearing from us soon, and thank you again for contributing to our project.";
|
||||||
|
|
||||||
|
await github.rest.issues.createComment({
|
||||||
|
issue_number: pr.number,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
body,
|
||||||
|
});
|
3
.github/workflows/project-actions.yml
vendored
3
.github/workflows/project-actions.yml
vendored
@ -1,5 +1,4 @@
|
|||||||
name: Project Automations
|
name: Project Automations
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request_target: #_target allows access to secrets
|
pull_request_target: #_target allows access to secrets
|
||||||
types:
|
types:
|
||||||
@ -8,10 +7,8 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- dev
|
- dev
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pr_opened_or_reopened:
|
pr_opened_or_reopened:
|
||||||
name: pr_opened_or_reopened
|
name: pr_opened_or_reopened
|
||||||
|
27
.github/workflows/repo-maintenance.yml
vendored
27
.github/workflows/repo-maintenance.yml
vendored
@ -1,18 +1,14 @@
|
|||||||
name: 'Repository Maintenance'
|
name: 'Repository Maintenance'
|
||||||
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '0 3 * * *'
|
- cron: '0 3 * * *'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
issues: write
|
issues: write
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
discussions: write
|
discussions: write
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: lock
|
group: lock
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
name: 'Stale'
|
name: 'Stale'
|
||||||
@ -27,9 +23,8 @@ jobs:
|
|||||||
stale-issue-label: stale
|
stale-issue-label: stale
|
||||||
stale-pr-label: stale
|
stale-pr-label: stale
|
||||||
stale-issue-message: >
|
stale-issue-message: >
|
||||||
This issue has been automatically marked as stale because it has not had
|
This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||||
recent activity. It will be closed if no further activity occurs. Thank you
|
|
||||||
for your contributions. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
|
||||||
lock-threads:
|
lock-threads:
|
||||||
name: 'Lock Old Threads'
|
name: 'Lock Old Threads'
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
if: github.repository_owner == 'paperless-ngx'
|
||||||
@ -42,20 +37,14 @@ jobs:
|
|||||||
discussion-inactive-days: '30'
|
discussion-inactive-days: '30'
|
||||||
log-output: true
|
log-output: true
|
||||||
issue-comment: >
|
issue-comment: >
|
||||||
This issue has been automatically locked since there
|
This issue has been automatically locked since there has not been any recent activity after it was closed. Please open a new discussion or issue for related concerns. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||||
has not been any recent activity after it was closed.
|
|
||||||
Please open a new discussion or issue for related concerns.
|
|
||||||
See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
|
||||||
pr-comment: >
|
pr-comment: >
|
||||||
This pull request has been automatically locked since there
|
This pull request has been automatically locked since there has not been any recent activity after it was closed. Please open a new discussion or issue for related concerns. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||||
has not been any recent activity after it was closed.
|
|
||||||
Please open a new discussion or issue for related concerns.
|
|
||||||
See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
|
||||||
discussion-comment: >
|
discussion-comment: >
|
||||||
This discussion has been automatically locked since there
|
This discussion has been automatically locked since there has not been any recent activity after it was closed. Please open a new discussion for related concerns. See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
||||||
has not been any recent activity after it was closed.
|
|
||||||
Please open a new discussion for related concerns.
|
|
||||||
See our [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/dev/CONTRIBUTING.md#automatic-repository-maintenance) for more details.
|
|
||||||
close-answered-discussions:
|
close-answered-discussions:
|
||||||
name: 'Close Answered Discussions'
|
name: 'Close Answered Discussions'
|
||||||
if: github.repository_owner == 'paperless-ngx'
|
if: github.repository_owner == 'paperless-ngx'
|
||||||
|
69
.github/workflows/translate-strings.yml
vendored
Normal file
69
.github/workflows/translate-strings.yml
vendored
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
name: Generate Translation Strings
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- dev
|
||||||
|
jobs:
|
||||||
|
generate-translate-strings:
|
||||||
|
name: Generate Translation Strings
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.PNGX_BOT_PAT }}
|
||||||
|
ref: ${{ github.head_ref }}
|
||||||
|
- name: Set up Python
|
||||||
|
id: setup-python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
- name: Install system dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update -qq
|
||||||
|
sudo apt-get install -qq --no-install-recommends gettext
|
||||||
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v6
|
||||||
|
with:
|
||||||
|
enable-cache: true
|
||||||
|
- name: Install backend python dependencies
|
||||||
|
run: |
|
||||||
|
uv sync \
|
||||||
|
--group dev \
|
||||||
|
--frozen
|
||||||
|
- name: Generate backend translation strings
|
||||||
|
run: cd src/ && uv run manage.py makemessages -l en_US -i "samples*"
|
||||||
|
- name: Install pnpm
|
||||||
|
uses: pnpm/action-setup@v4
|
||||||
|
with:
|
||||||
|
version: 10
|
||||||
|
- name: Use Node.js 20
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 20.x
|
||||||
|
cache: 'pnpm'
|
||||||
|
cache-dependency-path: 'src-ui/pnpm-lock.yaml'
|
||||||
|
- name: Cache frontend dependencies
|
||||||
|
id: cache-frontend-deps
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.pnpm-store
|
||||||
|
~/.cache
|
||||||
|
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||||
|
- name: Install frontend dependencies
|
||||||
|
if: steps.cache-frontend-deps.outputs.cache-hit != 'true'
|
||||||
|
run: cd src-ui && pnpm install
|
||||||
|
- name: Re-link Angular cli
|
||||||
|
run: cd src-ui && pnpm link @angular/cli
|
||||||
|
- name: Generate frontend translation strings
|
||||||
|
run: |
|
||||||
|
cd src-ui
|
||||||
|
pnpm run ng extract-i18n
|
||||||
|
- name: Commit changes
|
||||||
|
uses: stefanzweifel/git-auto-commit-action@v5
|
||||||
|
with:
|
||||||
|
file_pattern: 'src-ui/messages.xlf src/locale/en_US/LC_MESSAGES/django.po'
|
||||||
|
commit_message: "Auto translate strings"
|
||||||
|
commit_user_name: "GitHub Actions"
|
||||||
|
commit_author: "GitHub Actions <41898282+github-actions[bot]@users.noreply.github.com>"
|
@ -76,3 +76,8 @@ repos:
|
|||||||
rev: "v0.10.0.1"
|
rev: "v0.10.0.1"
|
||||||
hooks:
|
hooks:
|
||||||
- id: shellcheck
|
- id: shellcheck
|
||||||
|
- repo: https://github.com/google/yamlfmt
|
||||||
|
rev: v0.14.0
|
||||||
|
hooks:
|
||||||
|
- id: yamlfmt
|
||||||
|
exclude: "^src-ui/pnpm-lock.yaml"
|
||||||
|
@ -32,7 +32,7 @@ RUN set -eux \
|
|||||||
# Purpose: Installs s6-overlay and rootfs
|
# Purpose: Installs s6-overlay and rootfs
|
||||||
# Comments:
|
# Comments:
|
||||||
# - Don't leave anything extra in here either
|
# - Don't leave anything extra in here either
|
||||||
FROM ghcr.io/astral-sh/uv:0.6.13-python3.12-bookworm-slim AS s6-overlay-base
|
FROM ghcr.io/astral-sh/uv:0.6.16-python3.12-bookworm-slim AS s6-overlay-base
|
||||||
|
|
||||||
WORKDIR /usr/src/s6
|
WORKDIR /usr/src/s6
|
||||||
|
|
||||||
@ -47,7 +47,7 @@ ENV \
|
|||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ARG TARGETVARIANT
|
ARG TARGETVARIANT
|
||||||
# Lock this version
|
# Lock this version
|
||||||
ARG S6_OVERLAY_VERSION=3.2.0.2
|
ARG S6_OVERLAY_VERSION=3.2.1.0
|
||||||
|
|
||||||
ARG S6_BUILD_TIME_PKGS="curl \
|
ARG S6_BUILD_TIME_PKGS="curl \
|
||||||
xz-utils"
|
xz-utils"
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:8.19
|
image: docker.io/gotenberg/gotenberg:8.20
|
||||||
hostname: gotenberg
|
hostname: gotenberg
|
||||||
container_name: gotenberg
|
container_name: gotenberg
|
||||||
network_mode: host
|
network_mode: host
|
||||||
|
@ -32,11 +32,10 @@
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: docker.io/library/redis:7
|
image: docker.io/library/redis:8
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/mariadb:11
|
image: docker.io/library/mariadb:11
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@ -48,7 +47,6 @@ services:
|
|||||||
MARIADB_USER: paperless
|
MARIADB_USER: paperless
|
||||||
MARIADB_PASSWORD: paperless
|
MARIADB_PASSWORD: paperless
|
||||||
MARIADB_ROOT_PASSWORD: paperless
|
MARIADB_ROOT_PASSWORD: paperless
|
||||||
|
|
||||||
webserver:
|
webserver:
|
||||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@ -75,9 +73,8 @@ services:
|
|||||||
PAPERLESS_TIKA_ENABLED: 1
|
PAPERLESS_TIKA_ENABLED: 1
|
||||||
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:8.19
|
image: docker.io/gotenberg/gotenberg:8.20
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||||
# want to allow external content like tracking pixels or even javascript.
|
# want to allow external content like tracking pixels or even javascript.
|
||||||
@ -85,11 +82,9 @@ services:
|
|||||||
- "gotenberg"
|
- "gotenberg"
|
||||||
- "--chromium-disable-javascript=true"
|
- "--chromium-disable-javascript=true"
|
||||||
- "--chromium-allow-list=file:///tmp/.*"
|
- "--chromium-allow-list=file:///tmp/.*"
|
||||||
|
|
||||||
tika:
|
tika:
|
||||||
image: docker.io/apache/tika:latest
|
image: docker.io/apache/tika:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
data:
|
data:
|
||||||
media:
|
media:
|
||||||
|
@ -27,11 +27,10 @@
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: docker.io/library/redis:7
|
image: docker.io/library/redis:8
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/mariadb:11
|
image: docker.io/library/mariadb:11
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@ -43,7 +42,6 @@ services:
|
|||||||
MARIADB_USER: paperless
|
MARIADB_USER: paperless
|
||||||
MARIADB_PASSWORD: paperless
|
MARIADB_PASSWORD: paperless
|
||||||
MARIADB_ROOT_PASSWORD: paperless
|
MARIADB_ROOT_PASSWORD: paperless
|
||||||
|
|
||||||
webserver:
|
webserver:
|
||||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@ -65,7 +63,6 @@ services:
|
|||||||
PAPERLESS_DBUSER: paperless # only needed if non-default username
|
PAPERLESS_DBUSER: paperless # only needed if non-default username
|
||||||
PAPERLESS_DBPASS: paperless # only needed if non-default password
|
PAPERLESS_DBPASS: paperless # only needed if non-default password
|
||||||
PAPERLESS_DBPORT: 3306
|
PAPERLESS_DBPORT: 3306
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
data:
|
data:
|
||||||
media:
|
media:
|
||||||
|
@ -28,11 +28,10 @@
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: docker.io/library/redis:7
|
image: docker.io/library/redis:8
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/postgres:17
|
image: docker.io/library/postgres:17
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@ -42,7 +41,6 @@ services:
|
|||||||
POSTGRES_DB: paperless
|
POSTGRES_DB: paperless
|
||||||
POSTGRES_USER: paperless
|
POSTGRES_USER: paperless
|
||||||
POSTGRES_PASSWORD: paperless
|
POSTGRES_PASSWORD: paperless
|
||||||
|
|
||||||
webserver:
|
webserver:
|
||||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@ -61,7 +59,6 @@ services:
|
|||||||
PAPERLESS_DBHOST: db
|
PAPERLESS_DBHOST: db
|
||||||
env_file:
|
env_file:
|
||||||
- stack.env
|
- stack.env
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
data:
|
data:
|
||||||
media:
|
media:
|
||||||
|
@ -31,11 +31,10 @@
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: docker.io/library/redis:7
|
image: docker.io/library/redis:8
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/postgres:17
|
image: docker.io/library/postgres:17
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@ -45,7 +44,6 @@ services:
|
|||||||
POSTGRES_DB: paperless
|
POSTGRES_DB: paperless
|
||||||
POSTGRES_USER: paperless
|
POSTGRES_USER: paperless
|
||||||
POSTGRES_PASSWORD: paperless
|
POSTGRES_PASSWORD: paperless
|
||||||
|
|
||||||
webserver:
|
webserver:
|
||||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@ -68,22 +66,18 @@ services:
|
|||||||
PAPERLESS_TIKA_ENABLED: 1
|
PAPERLESS_TIKA_ENABLED: 1
|
||||||
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:8.19
|
image: docker.io/gotenberg/gotenberg:8.20
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||||
# want to allow external content like tracking pixels or even javascript.
|
# want to allow external content like tracking pixels or even javascript.
|
||||||
command:
|
command:
|
||||||
- "gotenberg"
|
- "gotenberg"
|
||||||
- "--chromium-disable-javascript=true"
|
- "--chromium-disable-javascript=true"
|
||||||
- "--chromium-allow-list=file:///tmp/.*"
|
- "--chromium-allow-list=file:///tmp/.*"
|
||||||
|
|
||||||
tika:
|
tika:
|
||||||
image: docker.io/apache/tika:latest
|
image: docker.io/apache/tika:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
data:
|
data:
|
||||||
media:
|
media:
|
||||||
|
@ -27,11 +27,10 @@
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: docker.io/library/redis:7
|
image: docker.io/library/redis:8
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/library/postgres:17
|
image: docker.io/library/postgres:17
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@ -41,7 +40,6 @@ services:
|
|||||||
POSTGRES_DB: paperless
|
POSTGRES_DB: paperless
|
||||||
POSTGRES_USER: paperless
|
POSTGRES_USER: paperless
|
||||||
POSTGRES_PASSWORD: paperless
|
POSTGRES_PASSWORD: paperless
|
||||||
|
|
||||||
webserver:
|
webserver:
|
||||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@ -59,7 +57,6 @@ services:
|
|||||||
environment:
|
environment:
|
||||||
PAPERLESS_REDIS: redis://broker:6379
|
PAPERLESS_REDIS: redis://broker:6379
|
||||||
PAPERLESS_DBHOST: db
|
PAPERLESS_DBHOST: db
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
data:
|
data:
|
||||||
media:
|
media:
|
||||||
|
@ -31,11 +31,10 @@
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: docker.io/library/redis:7
|
image: docker.io/library/redis:8
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
webserver:
|
webserver:
|
||||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@ -56,22 +55,18 @@ services:
|
|||||||
PAPERLESS_TIKA_ENABLED: 1
|
PAPERLESS_TIKA_ENABLED: 1
|
||||||
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
|
||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: docker.io/gotenberg/gotenberg:8.19
|
image: docker.io/gotenberg/gotenberg:8.20
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
# The gotenberg chromium route is used to convert .eml files. We do not
|
# The gotenberg chromium route is used to convert .eml files. We do not
|
||||||
# want to allow external content like tracking pixels or even javascript.
|
# want to allow external content like tracking pixels or even javascript.
|
||||||
command:
|
command:
|
||||||
- "gotenberg"
|
- "gotenberg"
|
||||||
- "--chromium-disable-javascript=true"
|
- "--chromium-disable-javascript=true"
|
||||||
- "--chromium-allow-list=file:///tmp/.*"
|
- "--chromium-allow-list=file:///tmp/.*"
|
||||||
|
|
||||||
tika:
|
tika:
|
||||||
image: docker.io/apache/tika:latest
|
image: docker.io/apache/tika:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
data:
|
data:
|
||||||
media:
|
media:
|
||||||
|
@ -24,11 +24,10 @@
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: docker.io/library/redis:7
|
image: docker.io/library/redis:8
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
webserver:
|
webserver:
|
||||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@ -44,7 +43,6 @@ services:
|
|||||||
env_file: docker-compose.env
|
env_file: docker-compose.env
|
||||||
environment:
|
environment:
|
||||||
PAPERLESS_REDIS: redis://broker:6379
|
PAPERLESS_REDIS: redis://broker:6379
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
data:
|
data:
|
||||||
media:
|
media:
|
||||||
|
@ -9,7 +9,7 @@ if find /run/s6/container_environment/*"_FILE" -maxdepth 1 > /dev/null 2>&1; the
|
|||||||
for FILENAME in /run/s6/container_environment/*; do
|
for FILENAME in /run/s6/container_environment/*; do
|
||||||
if [[ "${FILENAME##*/}" == PAPERLESS_*_FILE ]]; then
|
if [[ "${FILENAME##*/}" == PAPERLESS_*_FILE ]]; then
|
||||||
# This should have been named different..
|
# This should have been named different..
|
||||||
if [[ ${FILENAME} == "PAPERLESS_OCR_SKIP_ARCHIVE_FILE" || ${FILENAME} == "PAPERLESS_MODEL_FILE" ]]; then
|
if [[ "${FILENAME##*/}" == "PAPERLESS_OCR_SKIP_ARCHIVE_FILE" || "${FILENAME##*/}" == "PAPERLESS_MODEL_FILE" ]]; then
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
SECRETFILE=$(cat "${FILENAME}")
|
SECRETFILE=$(cat "${FILENAME}")
|
||||||
|
@ -418,3 +418,9 @@ Initial API version.
|
|||||||
|
|
||||||
- The user field of document notes now returns a simplified user object
|
- The user field of document notes now returns a simplified user object
|
||||||
rather than just the user ID.
|
rather than just the user ID.
|
||||||
|
|
||||||
|
#### Version 9
|
||||||
|
|
||||||
|
- The document `created` field is now a date, not a datetime. The
|
||||||
|
`created_date` field is considered deprecated and will be removed in a
|
||||||
|
future version.
|
||||||
|
@ -629,7 +629,13 @@ If both the [PAPERLESS_ACCOUNT_DEFAULT_GROUPS](#PAPERLESS_ACCOUNT_DEFAULT_GROUPS
|
|||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
If you do not have a working email server set up you should set this to 'none'.
|
If you do not have a working email server set up this will be set to 'none'.
|
||||||
|
|
||||||
|
#### [`PAPERLESS_ACCOUNT_EMAIL_UNKNOWN_ACCOUNTS=<bool>`](#PAPERLESS_ACCOUNT_EMAIL_UNKNOWN_ACCOUNTS) {#PAPERLESS_ACCOUNT_EMAIL_UNKNOWN_ACCOUNTS}
|
||||||
|
|
||||||
|
: See the relevant [django-allauth documentation](https://docs.allauth.org/en/latest/account/configuration.html)
|
||||||
|
|
||||||
|
Defaults to True (from allauth)
|
||||||
|
|
||||||
#### [`PAPERLESS_DISABLE_REGULAR_LOGIN=<bool>`](#PAPERLESS_DISABLE_REGULAR_LOGIN) {#PAPERLESS_DISABLE_REGULAR_LOGIN}
|
#### [`PAPERLESS_DISABLE_REGULAR_LOGIN=<bool>`](#PAPERLESS_DISABLE_REGULAR_LOGIN) {#PAPERLESS_DISABLE_REGULAR_LOGIN}
|
||||||
|
|
||||||
|
@ -407,7 +407,8 @@ Currently, there are three events that correspond to workflow trigger 'types':
|
|||||||
3. **Document Updated**: when a document is updated. Similar to 'added' events, triggers can include filtering by content matching,
|
3. **Document Updated**: when a document is updated. Similar to 'added' events, triggers can include filtering by content matching,
|
||||||
tags, doc type, or correspondent.
|
tags, doc type, or correspondent.
|
||||||
4. **Scheduled**: a scheduled trigger that can be used to run workflows at a specific time. The date used can be either the document
|
4. **Scheduled**: a scheduled trigger that can be used to run workflows at a specific time. The date used can be either the document
|
||||||
added, created, updated date or you can specify a (date) custom field. You can also specify a day offset from the date.
|
added, created, updated date or you can specify a (date) custom field. You can also specify a day offset from the date (positive
|
||||||
|
offsets will trigger before the date, negative offsets will trigger after).
|
||||||
|
|
||||||
The following flow diagram illustrates the three document trigger types:
|
The following flow diagram illustrates the three document trigger types:
|
||||||
|
|
||||||
|
28
mkdocs.yml
28
mkdocs.yml
@ -11,14 +11,12 @@ theme:
|
|||||||
toggle:
|
toggle:
|
||||||
icon: material/brightness-auto
|
icon: material/brightness-auto
|
||||||
name: Switch to light mode
|
name: Switch to light mode
|
||||||
|
|
||||||
# Palette toggle for light mode
|
# Palette toggle for light mode
|
||||||
- media: "(prefers-color-scheme: light)"
|
- media: "(prefers-color-scheme: light)"
|
||||||
scheme: default
|
scheme: default
|
||||||
toggle:
|
toggle:
|
||||||
icon: material/brightness-7
|
icon: material/brightness-7
|
||||||
name: Switch to dark mode
|
name: Switch to dark mode
|
||||||
|
|
||||||
# Palette toggle for dark mode
|
# Palette toggle for dark mode
|
||||||
- media: "(prefers-color-scheme: dark)"
|
- media: "(prefers-color-scheme: dark)"
|
||||||
scheme: slate
|
scheme: slate
|
||||||
@ -60,17 +58,17 @@ markdown_extensions:
|
|||||||
emoji_generator: !!python/name:material.extensions.emoji.to_svg
|
emoji_generator: !!python/name:material.extensions.emoji.to_svg
|
||||||
strict: true
|
strict: true
|
||||||
nav:
|
nav:
|
||||||
- index.md
|
- index.md
|
||||||
- setup.md
|
- setup.md
|
||||||
- 'Basic Usage': usage.md
|
- 'Basic Usage': usage.md
|
||||||
- configuration.md
|
- configuration.md
|
||||||
- administration.md
|
- administration.md
|
||||||
- advanced_usage.md
|
- advanced_usage.md
|
||||||
- 'REST API': api.md
|
- 'REST API': api.md
|
||||||
- development.md
|
- development.md
|
||||||
- 'FAQs': faq.md
|
- 'FAQs': faq.md
|
||||||
- troubleshooting.md
|
- troubleshooting.md
|
||||||
- changelog.md
|
- changelog.md
|
||||||
copyright: Copyright © 2016 - 2023 Daniel Quinn, Jonas Winkler, and the Paperless-ngx team
|
copyright: Copyright © 2016 - 2023 Daniel Quinn, Jonas Winkler, and the Paperless-ngx team
|
||||||
extra:
|
extra:
|
||||||
social:
|
social:
|
||||||
@ -83,5 +81,5 @@ extra:
|
|||||||
plugins:
|
plugins:
|
||||||
- search
|
- search
|
||||||
- glightbox:
|
- glightbox:
|
||||||
skip_classes:
|
skip_classes:
|
||||||
- no-lightbox
|
- no-lightbox
|
||||||
|
@ -23,13 +23,13 @@ dependencies = [
|
|||||||
"dateparser~=1.2",
|
"dateparser~=1.2",
|
||||||
# WARNING: django does not use semver.
|
# WARNING: django does not use semver.
|
||||||
# Only patch versions are guaranteed to not introduce breaking changes.
|
# Only patch versions are guaranteed to not introduce breaking changes.
|
||||||
"django~=5.1.7",
|
"django~=5.2.1",
|
||||||
"django-allauth[socialaccount,mfa]~=65.4.0",
|
"django-allauth[socialaccount,mfa]~=65.4.0",
|
||||||
"django-auditlog~=3.0.0",
|
"django-auditlog~=3.1.2",
|
||||||
"django-celery-results~=2.5.1",
|
"django-celery-results~=2.6.0",
|
||||||
"django-compression-middleware~=0.5.0",
|
"django-compression-middleware~=0.5.0",
|
||||||
"django-cors-headers~=4.7.0",
|
"django-cors-headers~=4.7.0",
|
||||||
"django-extensions~=3.2.3",
|
"django-extensions~=4.1",
|
||||||
"django-filter~=25.1",
|
"django-filter~=25.1",
|
||||||
"django-guardian~=2.4.0",
|
"django-guardian~=2.4.0",
|
||||||
"django-multiselectfield~=0.1.13",
|
"django-multiselectfield~=0.1.13",
|
||||||
@ -37,11 +37,11 @@ dependencies = [
|
|||||||
"djangorestframework~=3.15",
|
"djangorestframework~=3.15",
|
||||||
"djangorestframework-guardian~=0.3.0",
|
"djangorestframework-guardian~=0.3.0",
|
||||||
"drf-spectacular~=0.28",
|
"drf-spectacular~=0.28",
|
||||||
"drf-spectacular-sidecar~=2025.3.1",
|
"drf-spectacular-sidecar~=2025.5.1",
|
||||||
"drf-writable-nested~=0.7.1",
|
"drf-writable-nested~=0.7.1",
|
||||||
"filelock~=3.17.0",
|
"filelock~=3.18.0",
|
||||||
"flower~=2.0.1",
|
"flower~=2.0.1",
|
||||||
"gotenberg-client~=0.9.0",
|
"gotenberg-client~=0.10.0",
|
||||||
"httpx-oauth~=0.16",
|
"httpx-oauth~=0.16",
|
||||||
"imap-tools~=1.10.0",
|
"imap-tools~=1.10.0",
|
||||||
"inotifyrecursive~=0.3",
|
"inotifyrecursive~=0.3",
|
||||||
@ -52,12 +52,12 @@ dependencies = [
|
|||||||
"pathvalidate~=3.2.3",
|
"pathvalidate~=3.2.3",
|
||||||
"pdf2image~=1.17.0",
|
"pdf2image~=1.17.0",
|
||||||
"python-dateutil~=2.9.0",
|
"python-dateutil~=2.9.0",
|
||||||
"python-dotenv~=1.0.1",
|
"python-dotenv~=1.1.0",
|
||||||
"python-gnupg~=0.5.4",
|
"python-gnupg~=0.5.4",
|
||||||
"python-ipware~=3.0.0",
|
"python-ipware~=3.0.0",
|
||||||
"python-magic~=0.4.27",
|
"python-magic~=0.4.27",
|
||||||
"pyzbar~=0.1.9",
|
"pyzbar~=0.1.9",
|
||||||
"rapidfuzz~=3.12.1",
|
"rapidfuzz~=3.13.0",
|
||||||
"redis[hiredis]~=5.2.1",
|
"redis[hiredis]~=5.2.1",
|
||||||
"scikit-learn~=1.6.1",
|
"scikit-learn~=1.6.1",
|
||||||
"setproctitle~=1.3.4",
|
"setproctitle~=1.3.4",
|
||||||
@ -227,27 +227,9 @@ lint.per-file-ignores."src/documents/tests/test_consumer.py" = [
|
|||||||
lint.per-file-ignores."src/documents/tests/test_file_handling.py" = [
|
lint.per-file-ignores."src/documents/tests/test_file_handling.py" = [
|
||||||
"PTH",
|
"PTH",
|
||||||
] # TODO Enable & remove
|
] # TODO Enable & remove
|
||||||
lint.per-file-ignores."src/documents/tests/test_management.py" = [
|
|
||||||
"PTH",
|
|
||||||
] # TODO Enable & remove
|
|
||||||
lint.per-file-ignores."src/documents/tests/test_management_consumer.py" = [
|
|
||||||
"PTH",
|
|
||||||
] # TODO Enable & remove
|
|
||||||
lint.per-file-ignores."src/documents/tests/test_management_exporter.py" = [
|
|
||||||
"PTH",
|
|
||||||
] # TODO Enable & remove
|
|
||||||
lint.per-file-ignores."src/documents/tests/test_migration_archive_files.py" = [
|
lint.per-file-ignores."src/documents/tests/test_migration_archive_files.py" = [
|
||||||
"PTH",
|
"PTH",
|
||||||
] # TODO Enable & remove
|
] # TODO Enable & remove
|
||||||
lint.per-file-ignores."src/documents/tests/test_migration_document_pages_count.py" = [
|
|
||||||
"PTH",
|
|
||||||
] # TODO Enable & remove
|
|
||||||
lint.per-file-ignores."src/documents/tests/test_migration_mime_type.py" = [
|
|
||||||
"PTH",
|
|
||||||
] # TODO Enable & remove
|
|
||||||
lint.per-file-ignores."src/documents/tests/test_sanity_check.py" = [
|
|
||||||
"PTH",
|
|
||||||
] # TODO Enable & remove
|
|
||||||
lint.per-file-ignores."src/documents/views.py" = [
|
lint.per-file-ignores."src/documents/views.py" = [
|
||||||
"PTH",
|
"PTH",
|
||||||
] # TODO Enable & remove
|
] # TODO Enable & remove
|
||||||
|
13
src-ui/__mocks__/pdfjs-dist.ts
Normal file
13
src-ui/__mocks__/pdfjs-dist.ts
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
export const getDocument = jest.fn(() => ({
|
||||||
|
promise: Promise.resolve({ numPages: 3 }),
|
||||||
|
}))
|
||||||
|
|
||||||
|
export const GlobalWorkerOptions = { workerSrc: '' }
|
||||||
|
export const VerbosityLevel = { ERRORS: 0 }
|
||||||
|
|
||||||
|
globalThis.pdfjsLib = {
|
||||||
|
getDocument,
|
||||||
|
GlobalWorkerOptions,
|
||||||
|
VerbosityLevel,
|
||||||
|
AbortException: class AbortException extends Error {},
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
@ -7,22 +7,21 @@
|
|||||||
"start": "ng serve",
|
"start": "ng serve",
|
||||||
"build": "ng build",
|
"build": "ng build",
|
||||||
"test": "ng test --no-watch --coverage",
|
"test": "ng test --no-watch --coverage",
|
||||||
"lint": "ng lint",
|
"lint": "ng lint"
|
||||||
"postinstall": "patch-package"
|
|
||||||
},
|
},
|
||||||
"private": true,
|
"private": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@angular/cdk": "^19.2.7",
|
"@angular/cdk": "^19.2.14",
|
||||||
"@angular/common": "~19.2.4",
|
"@angular/common": "~19.2.9",
|
||||||
"@angular/compiler": "~19.2.4",
|
"@angular/compiler": "~19.2.9",
|
||||||
"@angular/core": "~19.2.4",
|
"@angular/core": "~19.2.9",
|
||||||
"@angular/forms": "~19.2.4",
|
"@angular/forms": "~19.2.9",
|
||||||
"@angular/localize": "~19.2.4",
|
"@angular/localize": "~19.2.9",
|
||||||
"@angular/platform-browser": "~19.2.4",
|
"@angular/platform-browser": "~19.2.9",
|
||||||
"@angular/platform-browser-dynamic": "~19.2.4",
|
"@angular/platform-browser-dynamic": "~19.2.9",
|
||||||
"@angular/router": "~19.2.4",
|
"@angular/router": "~19.2.9",
|
||||||
"@ng-bootstrap/ng-bootstrap": "^18.0.0",
|
"@ng-bootstrap/ng-bootstrap": "^18.0.0",
|
||||||
"@ng-select/ng-select": "^14.2.6",
|
"@ng-select/ng-select": "^14.7.0",
|
||||||
"@ngneat/dirty-check-forms": "^3.0.3",
|
"@ngneat/dirty-check-forms": "^3.0.3",
|
||||||
"@popperjs/core": "^2.11.8",
|
"@popperjs/core": "^2.11.8",
|
||||||
"bootstrap": "^5.3.3",
|
"bootstrap": "^5.3.3",
|
||||||
@ -33,7 +32,6 @@
|
|||||||
"ngx-color": "^10.0.0",
|
"ngx-color": "^10.0.0",
|
||||||
"ngx-cookie-service": "^19.1.2",
|
"ngx-cookie-service": "^19.1.2",
|
||||||
"ngx-device-detector": "^9.0.0",
|
"ngx-device-detector": "^9.0.0",
|
||||||
"ngx-file-drop": "^16.0.0",
|
|
||||||
"ngx-ui-tour-ng-bootstrap": "^16.0.0",
|
"ngx-ui-tour-ng-bootstrap": "^16.0.0",
|
||||||
"rxjs": "^7.8.2",
|
"rxjs": "^7.8.2",
|
||||||
"tslib": "^2.8.1",
|
"tslib": "^2.8.1",
|
||||||
@ -42,32 +40,31 @@
|
|||||||
"zone.js": "^0.15.0"
|
"zone.js": "^0.15.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@angular-builders/custom-webpack": "^19.0.0",
|
"@angular-builders/custom-webpack": "^19.0.1",
|
||||||
"@angular-builders/jest": "^19.0.0",
|
"@angular-builders/jest": "^19.0.1",
|
||||||
"@angular-devkit/build-angular": "^19.2.5",
|
"@angular-devkit/build-angular": "^19.2.10",
|
||||||
"@angular-devkit/core": "^19.2.5",
|
"@angular-devkit/core": "^19.2.10",
|
||||||
"@angular-devkit/schematics": "^19.2.5",
|
"@angular-devkit/schematics": "^19.2.10",
|
||||||
"@angular-eslint/builder": "19.3.0",
|
"@angular-eslint/builder": "19.3.0",
|
||||||
"@angular-eslint/eslint-plugin": "19.3.0",
|
"@angular-eslint/eslint-plugin": "19.3.0",
|
||||||
"@angular-eslint/eslint-plugin-template": "19.3.0",
|
"@angular-eslint/eslint-plugin-template": "19.3.0",
|
||||||
"@angular-eslint/schematics": "19.3.0",
|
"@angular-eslint/schematics": "19.3.0",
|
||||||
"@angular-eslint/template-parser": "19.3.0",
|
"@angular-eslint/template-parser": "19.3.0",
|
||||||
"@angular/cli": "~19.2.5",
|
"@angular/cli": "~19.2.10",
|
||||||
"@angular/compiler-cli": "~19.2.4",
|
"@angular/compiler-cli": "~19.2.9",
|
||||||
"@codecov/webpack-plugin": "^1.9.0",
|
"@codecov/webpack-plugin": "^1.9.0",
|
||||||
"@playwright/test": "^1.51.1",
|
"@playwright/test": "^1.51.1",
|
||||||
"@types/jest": "^29.5.14",
|
"@types/jest": "^29.5.14",
|
||||||
"@types/node": "^22.13.17",
|
"@types/node": "^22.15.3",
|
||||||
"@typescript-eslint/eslint-plugin": "^8.29.0",
|
"@typescript-eslint/eslint-plugin": "^8.31.1",
|
||||||
"@typescript-eslint/parser": "^8.29.0",
|
"@typescript-eslint/parser": "^8.31.1",
|
||||||
"@typescript-eslint/utils": "^8.29.0",
|
"@typescript-eslint/utils": "^8.31.1",
|
||||||
"eslint": "^9.23.0",
|
"eslint": "^9.25.1",
|
||||||
"jest": "29.7.0",
|
"jest": "29.7.0",
|
||||||
"jest-environment-jsdom": "^29.7.0",
|
"jest-environment-jsdom": "^29.7.0",
|
||||||
"jest-junit": "^16.0.0",
|
"jest-junit": "^16.0.0",
|
||||||
"jest-preset-angular": "^14.5.4",
|
"jest-preset-angular": "^14.5.5",
|
||||||
"jest-websocket-mock": "^2.5.0",
|
"jest-websocket-mock": "^2.5.0",
|
||||||
"patch-package": "^8.0.0",
|
|
||||||
"prettier-plugin-organize-imports": "^4.1.0",
|
"prettier-plugin-organize-imports": "^4.1.0",
|
||||||
"ts-node": "~10.9.1",
|
"ts-node": "~10.9.1",
|
||||||
"typescript": "^5.5.4"
|
"typescript": "^5.5.4"
|
||||||
|
File diff suppressed because one or more lines are too long
2936
src-ui/pnpm-lock.yaml
generated
2936
src-ui/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -121,19 +121,4 @@ HTMLCanvasElement.prototype.getContext = <
|
|||||||
typeof HTMLCanvasElement.prototype.getContext
|
typeof HTMLCanvasElement.prototype.getContext
|
||||||
>jest.fn()
|
>jest.fn()
|
||||||
|
|
||||||
// pdfjs
|
jest.mock('pdfjs-dist')
|
||||||
jest.mock('pdfjs-dist', () => ({
|
|
||||||
getDocument: jest.fn(() => ({
|
|
||||||
promise: Promise.resolve({ numPages: 3 }),
|
|
||||||
})),
|
|
||||||
GlobalWorkerOptions: { workerSrc: '' },
|
|
||||||
VerbosityLevel: { ERRORS: 0 },
|
|
||||||
globalThis: {
|
|
||||||
pdfjsLib: {
|
|
||||||
GlobalWorkerOptions: {
|
|
||||||
workerSrc: '',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
jest.mock('pdfjs-dist/web/pdf_viewer', () => ({}))
|
|
||||||
|
@ -9,7 +9,6 @@ import {
|
|||||||
import { Router, RouterModule } from '@angular/router'
|
import { Router, RouterModule } from '@angular/router'
|
||||||
import { NgbModalModule } from '@ng-bootstrap/ng-bootstrap'
|
import { NgbModalModule } from '@ng-bootstrap/ng-bootstrap'
|
||||||
import { allIcons, NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
import { allIcons, NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||||
import { NgxFileDropModule } from 'ngx-file-drop'
|
|
||||||
import { TourNgBootstrapModule, TourService } from 'ngx-ui-tour-ng-bootstrap'
|
import { TourNgBootstrapModule, TourService } from 'ngx-ui-tour-ng-bootstrap'
|
||||||
import { Subject } from 'rxjs'
|
import { Subject } from 'rxjs'
|
||||||
import { routes } from './app-routing.module'
|
import { routes } from './app-routing.module'
|
||||||
@ -43,7 +42,6 @@ describe('AppComponent', () => {
|
|||||||
imports: [
|
imports: [
|
||||||
TourNgBootstrapModule,
|
TourNgBootstrapModule,
|
||||||
RouterModule.forRoot(routes),
|
RouterModule.forRoot(routes),
|
||||||
NgxFileDropModule,
|
|
||||||
NgbModalModule,
|
NgbModalModule,
|
||||||
AppComponent,
|
AppComponent,
|
||||||
ToastsComponent,
|
ToastsComponent,
|
||||||
|
@ -105,9 +105,9 @@ describe('ConfigComponent', () => {
|
|||||||
|
|
||||||
it('should support JSON validation for e.g. user_args', () => {
|
it('should support JSON validation for e.g. user_args', () => {
|
||||||
component.configForm.patchValue({ user_args: '{ foo bar }' })
|
component.configForm.patchValue({ user_args: '{ foo bar }' })
|
||||||
expect(component.errors).toEqual({ user_args: 'Invalid JSON' })
|
expect(component.errors['user_args']).toEqual('Invalid JSON')
|
||||||
component.configForm.patchValue({ user_args: '{ "foo": "bar" }' })
|
component.configForm.patchValue({ user_args: '{ "foo": "bar" }' })
|
||||||
expect(component.errors).toEqual({ user_args: null })
|
expect(component.errors['user_args']).toBeNull()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should upload file, show error if necessary', () => {
|
it('should upload file, show error if necessary', () => {
|
||||||
|
@ -405,7 +405,7 @@ describe('GlobalSearchComponent', () => {
|
|||||||
expect(toastErrorSpy).toHaveBeenCalled()
|
expect(toastErrorSpy).toHaveBeenCalled()
|
||||||
|
|
||||||
// succeed
|
// succeed
|
||||||
editDialog.succeeded.emit(true)
|
editDialog.succeeded.emit(object as any)
|
||||||
expect(toastInfoSpy).toHaveBeenCalled()
|
expect(toastInfoSpy).toHaveBeenCalled()
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -456,7 +456,7 @@ describe('GlobalSearchComponent', () => {
|
|||||||
expect(toastErrorSpy).toHaveBeenCalled()
|
expect(toastErrorSpy).toHaveBeenCalled()
|
||||||
|
|
||||||
// succeed
|
// succeed
|
||||||
editDialog.succeeded.emit(true)
|
editDialog.succeeded.emit(searchResults.tags[0] as any)
|
||||||
expect(toastInfoSpy).toHaveBeenCalled()
|
expect(toastInfoSpy).toHaveBeenCalled()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -47,7 +47,7 @@ export abstract class EditDialogComponent<
|
|||||||
object: T
|
object: T
|
||||||
|
|
||||||
@Output()
|
@Output()
|
||||||
succeeded = new EventEmitter()
|
succeeded = new EventEmitter<T>()
|
||||||
|
|
||||||
@Output()
|
@Output()
|
||||||
failed = new EventEmitter()
|
failed = new EventEmitter()
|
||||||
|
@ -123,7 +123,15 @@
|
|||||||
<p class="small" i18n>Set scheduled trigger offset and which date field to use.</p>
|
<p class="small" i18n>Set scheduled trigger offset and which date field to use.</p>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-4">
|
<div class="col-4">
|
||||||
<pngx-input-number i18n-title title="Offset days" formControlName="schedule_offset_days" [showAdd]="false" [error]="error?.schedule_offset_days"></pngx-input-number>
|
<pngx-input-number
|
||||||
|
i18n-title
|
||||||
|
title="Offset days"
|
||||||
|
formControlName="schedule_offset_days"
|
||||||
|
[showAdd]="false"
|
||||||
|
[error]="error?.schedule_offset_days"
|
||||||
|
hint="Positive values will trigger the workflow before the date, negative values after."
|
||||||
|
i18n-hint
|
||||||
|
></pngx-input-number>
|
||||||
</div>
|
</div>
|
||||||
<div class="col-4">
|
<div class="col-4">
|
||||||
<pngx-input-select i18n-title title="Relative to" formControlName="schedule_date_field" [items]="scheduleDateFieldOptions" [error]="error?.schedule_date_field"></pngx-input-select>
|
<pngx-input-select i18n-title title="Relative to" formControlName="schedule_date_field" [items]="scheduleDateFieldOptions" [error]="error?.schedule_date_field"></pngx-input-select>
|
||||||
|
@ -586,6 +586,8 @@ export class FilterableDropdownComponent
|
|||||||
this.selectionModel.reset()
|
this.selectionModel.reset()
|
||||||
this.modelIsDirty = false
|
this.modelIsDirty = false
|
||||||
}
|
}
|
||||||
|
this.selectionModel.singleSelect =
|
||||||
|
this.editing && !this.selectionModel.manyToOne
|
||||||
this.opened.next(this)
|
this.opened.next(this)
|
||||||
} else {
|
} else {
|
||||||
if (this.creating) {
|
if (this.creating) {
|
||||||
|
@ -33,7 +33,7 @@
|
|||||||
</ng-template>
|
</ng-template>
|
||||||
</ng-select>
|
</ng-select>
|
||||||
@if (allowCreate && !hideAddButton) {
|
@if (allowCreate && !hideAddButton) {
|
||||||
<button class="btn btn-outline-secondary" type="button" (click)="createTag()" [disabled]="disabled">
|
<button class="btn btn-outline-secondary" type="button" (click)="createTag(null, true)" [disabled]="disabled">
|
||||||
<i-bs width="1.2em" height="1.2em" name="plus"></i-bs>
|
<i-bs width="1.2em" height="1.2em" name="plus"></i-bs>
|
||||||
</button>
|
</button>
|
||||||
}
|
}
|
||||||
|
@ -130,7 +130,7 @@ export class TagsComponent implements OnInit, ControlValueAccessor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
createTag(name: string = null) {
|
createTag(name: string = null, add: boolean = false) {
|
||||||
var modal = this.modalService.open(TagEditDialogComponent, {
|
var modal = this.modalService.open(TagEditDialogComponent, {
|
||||||
backdrop: 'static',
|
backdrop: 'static',
|
||||||
})
|
})
|
||||||
@ -143,9 +143,10 @@ export class TagsComponent implements OnInit, ControlValueAccessor {
|
|||||||
return firstValueFrom(
|
return firstValueFrom(
|
||||||
(modal.componentInstance as TagEditDialogComponent).succeeded.pipe(
|
(modal.componentInstance as TagEditDialogComponent).succeeded.pipe(
|
||||||
first(),
|
first(),
|
||||||
tap(() => {
|
tap((newTag) => {
|
||||||
this.tagService.listAll().subscribe((tags) => {
|
this.tagService.listAll().subscribe((tags) => {
|
||||||
this.tags = tags.results
|
this.tags = tags.results
|
||||||
|
add && this.addTag(newTag.id)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
@ -43,7 +43,7 @@
|
|||||||
<a routerLink="/documents/{{doc.id}}" class="btn-link text-dark text-decoration-none py-2 py-md-3" title="Open document" i18n-title>{{doc.added | customDate}}</a>
|
<a routerLink="/documents/{{doc.id}}" class="btn-link text-dark text-decoration-none py-2 py-md-3" title="Open document" i18n-title>{{doc.added | customDate}}</a>
|
||||||
}
|
}
|
||||||
@case (DisplayField.CREATED) {
|
@case (DisplayField.CREATED) {
|
||||||
<a routerLink="/documents/{{doc.id}}" class="btn-link text-dark text-decoration-none py-2 py-md-3" title="Open document" i18n-title>{{doc.created_date | customDate}}</a>
|
<a routerLink="/documents/{{doc.id}}" class="btn-link text-dark text-decoration-none py-2 py-md-3" title="Open document" i18n-title>{{doc.created | customDate}}</a>
|
||||||
}
|
}
|
||||||
@case (DisplayField.TITLE) {
|
@case (DisplayField.TITLE) {
|
||||||
<a routerLink="/documents/{{doc.id}}" title="Open document" i18n-title class="btn-link text-dark text-decoration-none py-2 py-md-3">{{doc.title | documentTitle}}</a>
|
<a routerLink="/documents/{{doc.id}}" title="Open document" i18n-title class="btn-link text-dark text-decoration-none py-2 py-md-3">{{doc.title | documentTitle}}</a>
|
||||||
|
@ -82,10 +82,20 @@ describe('UploadFileWidgetComponent', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should upload files', () => {
|
it('should upload files', () => {
|
||||||
const uploadSpy = jest.spyOn(uploadDocumentsService, 'uploadFiles')
|
const uploadSpy = jest.spyOn(uploadDocumentsService, 'uploadFile')
|
||||||
fixture.debugElement
|
const file = new File(
|
||||||
.query(By.css('input'))
|
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||||
.nativeElement.dispatchEvent(new Event('change'))
|
'file.pdf'
|
||||||
|
)
|
||||||
|
const fileInput = fixture.debugElement.query(By.css('input'))
|
||||||
|
jest.spyOn(fileInput.nativeElement, 'files', 'get').mockReturnValue({
|
||||||
|
item: () => file,
|
||||||
|
length: 1,
|
||||||
|
[Symbol.iterator]: () => ({
|
||||||
|
next: () => ({ done: false, value: file }),
|
||||||
|
}),
|
||||||
|
} as any)
|
||||||
|
fileInput.nativeElement.dispatchEvent(new Event('change'))
|
||||||
expect(uploadSpy).toHaveBeenCalled()
|
expect(uploadSpy).toHaveBeenCalled()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -134,9 +134,11 @@ export class UploadFileWidgetComponent extends ComponentWithPermissions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public onFileSelected(event: Event) {
|
public onFileSelected(event: Event) {
|
||||||
this.uploadDocumentsService.uploadFiles(
|
const files = (event.target as HTMLInputElement).files
|
||||||
(event.target as HTMLInputElement).files
|
for (let i = 0; i < files?.length; i++) {
|
||||||
)
|
const file = files.item(i)
|
||||||
|
file && this.uploadDocumentsService.uploadFile(file)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
get slimSidebarEnabled(): boolean {
|
get slimSidebarEnabled(): boolean {
|
||||||
|
@ -9,9 +9,9 @@
|
|||||||
}
|
}
|
||||||
<div class="input-group input-group-sm me-md-5 d-none d-md-flex">
|
<div class="input-group input-group-sm me-md-5 d-none d-md-flex">
|
||||||
<button class="btn btn-outline-secondary" (click)="decreaseZoom()" i18n>-</button>
|
<button class="btn btn-outline-secondary" (click)="decreaseZoom()" i18n>-</button>
|
||||||
<select class="form-select" (change)="setZoom($event.target.value)">
|
<select class="form-select" (change)="setZoom($event.target.value)" [ngModel]="currentZoom">
|
||||||
@for (setting of zoomSettings; track setting) {
|
@for (setting of zoomSettings; track setting) {
|
||||||
<option [value]="setting" [attr.selected]="isZoomSelected(setting) ? 'selected' : null">
|
<option [value]="setting">
|
||||||
{{ getZoomSettingTitle(setting) }}
|
{{ getZoomSettingTitle(setting) }}
|
||||||
</option>
|
</option>
|
||||||
}
|
}
|
||||||
@ -129,8 +129,8 @@
|
|||||||
<div>
|
<div>
|
||||||
<pngx-input-text #inputTitle i18n-title title="Title" formControlName="title" [horizontal]="true" (keyup)="titleKeyUp($event)" [error]="error?.title"></pngx-input-text>
|
<pngx-input-text #inputTitle i18n-title title="Title" formControlName="title" [horizontal]="true" (keyup)="titleKeyUp($event)" [error]="error?.title"></pngx-input-text>
|
||||||
<pngx-input-number i18n-title title="Archive serial number" [error]="error?.archive_serial_number" [horizontal]="true" formControlName='archive_serial_number'></pngx-input-number>
|
<pngx-input-number i18n-title title="Archive serial number" [error]="error?.archive_serial_number" [horizontal]="true" formControlName='archive_serial_number'></pngx-input-number>
|
||||||
<pngx-input-date i18n-title title="Date created" formControlName="created_date" [suggestions]="suggestions?.dates" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event)"
|
<pngx-input-date i18n-title title="Date created" formControlName="created" [suggestions]="suggestions?.dates" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event)"
|
||||||
[error]="error?.created_date"></pngx-input-date>
|
[error]="error?.created"></pngx-input-date>
|
||||||
<pngx-input-select [items]="correspondents" i18n-title title="Correspondent" formControlName="correspondent" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.Correspondent)"
|
<pngx-input-select [items]="correspondents" i18n-title title="Correspondent" formControlName="correspondent" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.Correspondent)"
|
||||||
(createNew)="createCorrespondent($event)" [hideAddButton]="createDisabled(DataType.Correspondent)" [suggestions]="suggestions?.correspondents" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Correspondent }"></pngx-input-select>
|
(createNew)="createCorrespondent($event)" [hideAddButton]="createDisabled(DataType.Correspondent)" [suggestions]="suggestions?.correspondents" *pngxIfPermissions="{ action: PermissionAction.View, type: PermissionType.Correspondent }"></pngx-input-select>
|
||||||
<pngx-input-select [items]="documentTypes" i18n-title title="Document type" formControlName="document_type" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.DocumentType)"
|
<pngx-input-select [items]="documentTypes" i18n-title title="Document type" formControlName="document_type" [allowNull]="true" [showFilter]="true" [horizontal]="true" (filterDocuments)="filterDocuments($event, DataType.DocumentType)"
|
||||||
|
@ -456,11 +456,11 @@ describe('DocumentDetailComponent', () => {
|
|||||||
initNormally()
|
initNormally()
|
||||||
component.title = 'Foo Bar'
|
component.title = 'Foo Bar'
|
||||||
const closeSpy = jest.spyOn(component, 'close')
|
const closeSpy = jest.spyOn(component, 'close')
|
||||||
const updateSpy = jest.spyOn(documentService, 'update')
|
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||||
const toastSpy = jest.spyOn(toastService, 'showInfo')
|
const toastSpy = jest.spyOn(toastService, 'showInfo')
|
||||||
updateSpy.mockImplementation((o) => of(doc))
|
patchSpy.mockImplementation((o) => of(doc))
|
||||||
component.save(true)
|
component.save(true)
|
||||||
expect(updateSpy).toHaveBeenCalled()
|
expect(patchSpy).toHaveBeenCalled()
|
||||||
expect(closeSpy).toHaveBeenCalled()
|
expect(closeSpy).toHaveBeenCalled()
|
||||||
expect(toastSpy).toHaveBeenCalledWith(
|
expect(toastSpy).toHaveBeenCalledWith(
|
||||||
'Document "Doc 3" saved successfully.'
|
'Document "Doc 3" saved successfully.'
|
||||||
@ -471,11 +471,11 @@ describe('DocumentDetailComponent', () => {
|
|||||||
initNormally()
|
initNormally()
|
||||||
component.title = 'Foo Bar'
|
component.title = 'Foo Bar'
|
||||||
const closeSpy = jest.spyOn(component, 'close')
|
const closeSpy = jest.spyOn(component, 'close')
|
||||||
const updateSpy = jest.spyOn(documentService, 'update')
|
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||||
const toastSpy = jest.spyOn(toastService, 'showInfo')
|
const toastSpy = jest.spyOn(toastService, 'showInfo')
|
||||||
updateSpy.mockImplementation((o) => of(doc))
|
patchSpy.mockImplementation((o) => of(doc))
|
||||||
component.save()
|
component.save()
|
||||||
expect(updateSpy).toHaveBeenCalled()
|
expect(patchSpy).toHaveBeenCalled()
|
||||||
expect(closeSpy).not.toHaveBeenCalled()
|
expect(closeSpy).not.toHaveBeenCalled()
|
||||||
expect(toastSpy).toHaveBeenCalledWith(
|
expect(toastSpy).toHaveBeenCalledWith(
|
||||||
'Document "Doc 3" saved successfully.'
|
'Document "Doc 3" saved successfully.'
|
||||||
@ -487,12 +487,12 @@ describe('DocumentDetailComponent', () => {
|
|||||||
initNormally()
|
initNormally()
|
||||||
component.title = 'Foo Bar'
|
component.title = 'Foo Bar'
|
||||||
const closeSpy = jest.spyOn(component, 'close')
|
const closeSpy = jest.spyOn(component, 'close')
|
||||||
const updateSpy = jest.spyOn(documentService, 'update')
|
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||||
const toastSpy = jest.spyOn(toastService, 'showError')
|
const toastSpy = jest.spyOn(toastService, 'showError')
|
||||||
const error = new Error('failed to save')
|
const error = new Error('failed to save')
|
||||||
updateSpy.mockImplementation(() => throwError(() => error))
|
patchSpy.mockImplementation(() => throwError(() => error))
|
||||||
component.save()
|
component.save()
|
||||||
expect(updateSpy).toHaveBeenCalled()
|
expect(patchSpy).toHaveBeenCalled()
|
||||||
expect(closeSpy).not.toHaveBeenCalled()
|
expect(closeSpy).not.toHaveBeenCalled()
|
||||||
expect(toastSpy).toHaveBeenCalledWith(
|
expect(toastSpy).toHaveBeenCalledWith(
|
||||||
'Error saving document "Doc 3"',
|
'Error saving document "Doc 3"',
|
||||||
@ -505,13 +505,13 @@ describe('DocumentDetailComponent', () => {
|
|||||||
initNormally()
|
initNormally()
|
||||||
component.title = 'Foo Bar'
|
component.title = 'Foo Bar'
|
||||||
const closeSpy = jest.spyOn(component, 'close')
|
const closeSpy = jest.spyOn(component, 'close')
|
||||||
const updateSpy = jest.spyOn(documentService, 'update')
|
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||||
const toastSpy = jest.spyOn(toastService, 'showInfo')
|
const toastSpy = jest.spyOn(toastService, 'showInfo')
|
||||||
updateSpy.mockImplementation(() =>
|
patchSpy.mockImplementation(() =>
|
||||||
throwError(() => new Error('failed to save'))
|
throwError(() => new Error('failed to save'))
|
||||||
)
|
)
|
||||||
component.save(true)
|
component.save(true)
|
||||||
expect(updateSpy).toHaveBeenCalled()
|
expect(patchSpy).toHaveBeenCalled()
|
||||||
expect(closeSpy).toHaveBeenCalled()
|
expect(closeSpy).toHaveBeenCalled()
|
||||||
expect(toastSpy).toHaveBeenCalledWith(
|
expect(toastSpy).toHaveBeenCalledWith(
|
||||||
'Document "Doc 3" saved successfully.'
|
'Document "Doc 3" saved successfully.'
|
||||||
@ -522,8 +522,8 @@ describe('DocumentDetailComponent', () => {
|
|||||||
initNormally()
|
initNormally()
|
||||||
const nextDocId = 100
|
const nextDocId = 100
|
||||||
component.title = 'Foo Bar'
|
component.title = 'Foo Bar'
|
||||||
const updateSpy = jest.spyOn(documentService, 'update')
|
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||||
updateSpy.mockReturnValue(of(doc))
|
patchSpy.mockReturnValue(of(doc))
|
||||||
const nextSpy = jest.spyOn(documentListViewService, 'getNext')
|
const nextSpy = jest.spyOn(documentListViewService, 'getNext')
|
||||||
nextSpy.mockReturnValue(of(nextDocId))
|
nextSpy.mockReturnValue(of(nextDocId))
|
||||||
const closeSpy = jest.spyOn(openDocumentsService, 'closeDocument')
|
const closeSpy = jest.spyOn(openDocumentsService, 'closeDocument')
|
||||||
@ -531,7 +531,7 @@ describe('DocumentDetailComponent', () => {
|
|||||||
const navigateSpy = jest.spyOn(router, 'navigate')
|
const navigateSpy = jest.spyOn(router, 'navigate')
|
||||||
|
|
||||||
component.saveEditNext()
|
component.saveEditNext()
|
||||||
expect(updateSpy).toHaveBeenCalled()
|
expect(patchSpy).toHaveBeenCalled()
|
||||||
expect(navigateSpy).toHaveBeenCalledWith(['documents', nextDocId])
|
expect(navigateSpy).toHaveBeenCalledWith(['documents', nextDocId])
|
||||||
expect
|
expect
|
||||||
})
|
})
|
||||||
@ -541,12 +541,12 @@ describe('DocumentDetailComponent', () => {
|
|||||||
initNormally()
|
initNormally()
|
||||||
component.title = 'Foo Bar'
|
component.title = 'Foo Bar'
|
||||||
const closeSpy = jest.spyOn(component, 'close')
|
const closeSpy = jest.spyOn(component, 'close')
|
||||||
const updateSpy = jest.spyOn(documentService, 'update')
|
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||||
const toastSpy = jest.spyOn(toastService, 'showError')
|
const toastSpy = jest.spyOn(toastService, 'showError')
|
||||||
const error = new Error('failed to save')
|
const error = new Error('failed to save')
|
||||||
updateSpy.mockImplementation(() => throwError(() => error))
|
patchSpy.mockImplementation(() => throwError(() => error))
|
||||||
component.saveEditNext()
|
component.saveEditNext()
|
||||||
expect(updateSpy).toHaveBeenCalled()
|
expect(patchSpy).toHaveBeenCalled()
|
||||||
expect(closeSpy).not.toHaveBeenCalled()
|
expect(closeSpy).not.toHaveBeenCalled()
|
||||||
expect(toastSpy).toHaveBeenCalledWith('Error saving document', error)
|
expect(toastSpy).toHaveBeenCalledWith('Error saving document', error)
|
||||||
})
|
})
|
||||||
@ -791,14 +791,9 @@ describe('DocumentDetailComponent', () => {
|
|||||||
it('should select correct zoom setting in dropdown', () => {
|
it('should select correct zoom setting in dropdown', () => {
|
||||||
initNormally()
|
initNormally()
|
||||||
component.setZoom(ZoomSetting.PageFit)
|
component.setZoom(ZoomSetting.PageFit)
|
||||||
expect(component.isZoomSelected(ZoomSetting.PageFit)).toBeTruthy()
|
expect(component.currentZoom).toEqual(ZoomSetting.PageFit)
|
||||||
expect(component.isZoomSelected(ZoomSetting.One)).toBeFalsy()
|
|
||||||
component.setZoom(ZoomSetting.PageWidth)
|
|
||||||
expect(component.isZoomSelected(ZoomSetting.One)).toBeTruthy()
|
|
||||||
expect(component.isZoomSelected(ZoomSetting.PageFit)).toBeFalsy()
|
|
||||||
component.setZoom(ZoomSetting.Quarter)
|
component.setZoom(ZoomSetting.Quarter)
|
||||||
expect(component.isZoomSelected(ZoomSetting.Quarter)).toBeTruthy()
|
expect(component.currentZoom).toEqual(ZoomSetting.Quarter)
|
||||||
expect(component.isZoomSelected(ZoomSetting.PageFit)).toBeFalsy()
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should support updating notes dynamically', () => {
|
it('should support updating notes dynamically', () => {
|
||||||
@ -970,10 +965,10 @@ describe('DocumentDetailComponent', () => {
|
|||||||
expect(fixture.debugElement.nativeElement.textContent).toContain(
|
expect(fixture.debugElement.nativeElement.textContent).toContain(
|
||||||
customFields[1].name
|
customFields[1].name
|
||||||
)
|
)
|
||||||
const updateSpy = jest.spyOn(documentService, 'update')
|
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||||
component.save(true)
|
component.save(true)
|
||||||
expect(updateSpy.mock.lastCall[0].custom_fields).toHaveLength(2)
|
expect(patchSpy.mock.lastCall[0].custom_fields).toHaveLength(2)
|
||||||
expect(updateSpy.mock.lastCall[0].custom_fields[1]).toEqual({
|
expect(patchSpy.mock.lastCall[0].custom_fields[1]).toEqual({
|
||||||
field: customFields[1].id,
|
field: customFields[1].id,
|
||||||
value: null,
|
value: null,
|
||||||
})
|
})
|
||||||
@ -990,13 +985,51 @@ describe('DocumentDetailComponent', () => {
|
|||||||
expect(
|
expect(
|
||||||
fixture.debugElement.query(By.css('form')).nativeElement.textContent
|
fixture.debugElement.query(By.css('form')).nativeElement.textContent
|
||||||
).not.toContain('Field 1')
|
).not.toContain('Field 1')
|
||||||
const updateSpy = jest.spyOn(documentService, 'update')
|
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||||
component.save(true)
|
component.save(true)
|
||||||
expect(updateSpy.mock.lastCall[0].custom_fields).toHaveLength(
|
expect(patchSpy.mock.lastCall[0].custom_fields).toHaveLength(
|
||||||
initialLength - 1
|
initialLength - 1
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should correctly determine changed fields', () => {
|
||||||
|
initNormally()
|
||||||
|
expect(component['getChangedFields']()).toEqual({
|
||||||
|
id: doc.id,
|
||||||
|
})
|
||||||
|
component.documentForm.get('title').setValue('Foo Bar')
|
||||||
|
component.documentForm.get('permissions_form').setValue({
|
||||||
|
owner: 1,
|
||||||
|
set_permissions: {
|
||||||
|
view: {
|
||||||
|
users: [2],
|
||||||
|
groups: [],
|
||||||
|
},
|
||||||
|
change: {
|
||||||
|
users: [3],
|
||||||
|
groups: [],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
component.documentForm.get('title').markAsDirty()
|
||||||
|
component.documentForm.get('permissions_form').markAsDirty()
|
||||||
|
expect(component['getChangedFields']()).toEqual({
|
||||||
|
id: doc.id,
|
||||||
|
title: 'Foo Bar',
|
||||||
|
owner: 1,
|
||||||
|
set_permissions: {
|
||||||
|
view: {
|
||||||
|
users: [2],
|
||||||
|
groups: [],
|
||||||
|
},
|
||||||
|
change: {
|
||||||
|
users: [3],
|
||||||
|
groups: [],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
it('should show custom field errors', () => {
|
it('should show custom field errors', () => {
|
||||||
initNormally()
|
initNormally()
|
||||||
component.error = {
|
component.error = {
|
||||||
|
@ -208,7 +208,7 @@ export class DocumentDetailComponent
|
|||||||
documentForm: FormGroup = new FormGroup({
|
documentForm: FormGroup = new FormGroup({
|
||||||
title: new FormControl(''),
|
title: new FormControl(''),
|
||||||
content: new FormControl(''),
|
content: new FormControl(''),
|
||||||
created_date: new FormControl(),
|
created: new FormControl(),
|
||||||
correspondent: new FormControl(),
|
correspondent: new FormControl(),
|
||||||
document_type: new FormControl(),
|
document_type: new FormControl(),
|
||||||
storage_path: new FormControl(),
|
storage_path: new FormControl(),
|
||||||
@ -490,7 +490,7 @@ export class DocumentDetailComponent
|
|||||||
this.store = new BehaviorSubject({
|
this.store = new BehaviorSubject({
|
||||||
title: doc.title,
|
title: doc.title,
|
||||||
content: doc.content,
|
content: doc.content,
|
||||||
created_date: doc.created_date,
|
created: doc.created,
|
||||||
correspondent: doc.correspondent,
|
correspondent: doc.correspondent,
|
||||||
document_type: doc.document_type,
|
document_type: doc.document_type,
|
||||||
storage_path: doc.storage_path,
|
storage_path: doc.storage_path,
|
||||||
@ -784,6 +784,7 @@ export class DocumentDetailComponent
|
|||||||
this.title = doc.title
|
this.title = doc.title
|
||||||
this.updateFormForCustomFields()
|
this.updateFormForCustomFields()
|
||||||
this.documentForm.patchValue(doc)
|
this.documentForm.patchValue(doc)
|
||||||
|
this.documentForm.markAsPristine()
|
||||||
this.openDocumentService.setDirty(doc, false)
|
this.openDocumentService.setDirty(doc, false)
|
||||||
},
|
},
|
||||||
error: () => {
|
error: () => {
|
||||||
@ -794,11 +795,30 @@ export class DocumentDetailComponent
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private getChangedFields(): any {
|
||||||
|
const changes = {
|
||||||
|
id: this.document.id,
|
||||||
|
}
|
||||||
|
Object.keys(this.documentForm.controls).forEach((key) => {
|
||||||
|
if (this.documentForm.get(key).dirty) {
|
||||||
|
if (key === 'permissions_form') {
|
||||||
|
changes['owner'] =
|
||||||
|
this.documentForm.get('permissions_form').value['owner']
|
||||||
|
changes['set_permissions'] =
|
||||||
|
this.documentForm.get('permissions_form').value['set_permissions']
|
||||||
|
} else {
|
||||||
|
changes[key] = this.documentForm.get(key).value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return changes
|
||||||
|
}
|
||||||
|
|
||||||
save(close: boolean = false) {
|
save(close: boolean = false) {
|
||||||
this.networkActive = true
|
this.networkActive = true
|
||||||
;(document.activeElement as HTMLElement)?.dispatchEvent(new Event('change'))
|
;(document.activeElement as HTMLElement)?.dispatchEvent(new Event('change'))
|
||||||
this.documentsService
|
this.documentsService
|
||||||
.update(this.document)
|
.patch(this.getChangedFields())
|
||||||
.pipe(first())
|
.pipe(first())
|
||||||
.subscribe({
|
.subscribe({
|
||||||
next: (docValues) => {
|
next: (docValues) => {
|
||||||
@ -852,7 +872,7 @@ export class DocumentDetailComponent
|
|||||||
this.networkActive = true
|
this.networkActive = true
|
||||||
this.store.next(this.documentForm.value)
|
this.store.next(this.documentForm.value)
|
||||||
this.documentsService
|
this.documentsService
|
||||||
.update(this.document)
|
.patch(this.getChangedFields())
|
||||||
.pipe(
|
.pipe(
|
||||||
switchMap((updateResult) => {
|
switchMap((updateResult) => {
|
||||||
return this.documentListViewService
|
return this.documentListViewService
|
||||||
@ -1099,12 +1119,10 @@ export class DocumentDetailComponent
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
isZoomSelected(setting: ZoomSetting): boolean {
|
get currentZoom() {
|
||||||
if (this.previewZoomScale === ZoomSetting.PageFit) {
|
if (this.previewZoomScale === ZoomSetting.PageFit) {
|
||||||
return setting === ZoomSetting.PageFit
|
return ZoomSetting.PageFit
|
||||||
}
|
} else return this.previewZoomSetting
|
||||||
|
|
||||||
return this.previewZoomSetting === setting
|
|
||||||
}
|
}
|
||||||
|
|
||||||
getZoomSettingTitle(setting: ZoomSetting): string {
|
getZoomSettingTitle(setting: ZoomSetting): string {
|
||||||
@ -1305,6 +1323,8 @@ export class DocumentDetailComponent
|
|||||||
created: new Date(),
|
created: new Date(),
|
||||||
})
|
})
|
||||||
this.updateFormForCustomFields(true)
|
this.updateFormForCustomFields(true)
|
||||||
|
this.documentForm.get('custom_fields').markAsDirty()
|
||||||
|
this.documentForm.updateValueAndValidity()
|
||||||
}
|
}
|
||||||
|
|
||||||
public removeField(fieldInstance: CustomFieldInstance) {
|
public removeField(fieldInstance: CustomFieldInstance) {
|
||||||
@ -1313,6 +1333,7 @@ export class DocumentDetailComponent
|
|||||||
1
|
1
|
||||||
)
|
)
|
||||||
this.updateFormForCustomFields(true)
|
this.updateFormForCustomFields(true)
|
||||||
|
this.documentForm.get('custom_fields').markAsDirty()
|
||||||
this.documentForm.updateValueAndValidity()
|
this.documentForm.updateValueAndValidity()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -112,14 +112,14 @@
|
|||||||
@if (displayFields.includes(DisplayField.CREATED) || displayFields.includes(DisplayField.ADDED)) {
|
@if (displayFields.includes(DisplayField.CREATED) || displayFields.includes(DisplayField.ADDED)) {
|
||||||
<ng-template #dateTooltip>
|
<ng-template #dateTooltip>
|
||||||
<div class="d-flex flex-column text-light">
|
<div class="d-flex flex-column text-light">
|
||||||
<span i18n>Created: {{ document.created_date | customDate }}</span>
|
<span i18n>Created: {{ document.created | customDate }}</span>
|
||||||
<span i18n>Added: {{ document.added | customDate }}</span>
|
<span i18n>Added: {{ document.added | customDate }}</span>
|
||||||
<span i18n>Modified: {{ document.modified | customDate }}</span>
|
<span i18n>Modified: {{ document.modified | customDate }}</span>
|
||||||
</div>
|
</div>
|
||||||
</ng-template>
|
</ng-template>
|
||||||
@if (displayFields.includes(DisplayField.CREATED)) {
|
@if (displayFields.includes(DisplayField.CREATED)) {
|
||||||
<div class="list-group-item bg-light text-dark p-1 border-0 d-flex align-items-center" [ngbTooltip]="dateTooltip">
|
<div class="list-group-item bg-light text-dark p-1 border-0 d-flex align-items-center" [ngbTooltip]="dateTooltip">
|
||||||
<i-bs width=".9em" height=".9em" class="me-2 text-muted" name="calendar-event"></i-bs><small>{{document.created_date | customDate:'mediumDate'}}</small>
|
<i-bs width=".9em" height=".9em" class="me-2 text-muted" name="calendar-event"></i-bs><small>{{document.created | customDate:'mediumDate'}}</small>
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
@if (displayFields.includes(DisplayField.ADDED)) {
|
@if (displayFields.includes(DisplayField.ADDED)) {
|
||||||
|
@ -73,14 +73,14 @@
|
|||||||
<div class="list-group-item bg-transparent p-0 border-0 d-flex flex-wrap-reverse justify-content-between">
|
<div class="list-group-item bg-transparent p-0 border-0 d-flex flex-wrap-reverse justify-content-between">
|
||||||
<ng-template #dateTooltip>
|
<ng-template #dateTooltip>
|
||||||
<div class="d-flex flex-column text-light">
|
<div class="d-flex flex-column text-light">
|
||||||
<span i18n>Created: {{ document.created_date | customDate }}</span>
|
<span i18n>Created: {{ document.created | customDate }}</span>
|
||||||
<span i18n>Added: {{ document.added | customDate }}</span>
|
<span i18n>Added: {{ document.added | customDate }}</span>
|
||||||
<span i18n>Modified: {{ document.modified | customDate }}</span>
|
<span i18n>Modified: {{ document.modified | customDate }}</span>
|
||||||
</div>
|
</div>
|
||||||
</ng-template>
|
</ng-template>
|
||||||
<div class="ps-0 p-1" placement="top" [ngbTooltip]="dateTooltip">
|
<div class="ps-0 p-1" placement="top" [ngbTooltip]="dateTooltip">
|
||||||
<i-bs width="1em" height="1em" class="me-2 text-muted" name="calendar-event"></i-bs>
|
<i-bs width="1em" height="1em" class="me-2 text-muted" name="calendar-event"></i-bs>
|
||||||
<small>{{document.created_date | customDate:'mediumDate'}}</small>
|
<small>{{document.created | customDate:'mediumDate'}}</small>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
|
@ -348,7 +348,7 @@
|
|||||||
}
|
}
|
||||||
@if (activeDisplayFields.includes(DisplayField.CREATED)) {
|
@if (activeDisplayFields.includes(DisplayField.CREATED)) {
|
||||||
<td>
|
<td>
|
||||||
{{d.created_date | customDate}}
|
{{d.created | customDate}}
|
||||||
</td>
|
</td>
|
||||||
}
|
}
|
||||||
@if (activeDisplayFields.includes(DisplayField.ADDED)) {
|
@if (activeDisplayFields.includes(DisplayField.ADDED)) {
|
||||||
|
@ -2,13 +2,6 @@
|
|||||||
<ng-content select="[content]"></ng-content>
|
<ng-content select="[content]"></ng-content>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="global-dropzone-overlay position-fixed top-0 start-0 bottom-0 end-0 text-center pe-none fade" [class.show]="fileIsOver" [class.hide]="hidden">
|
<div class="global-dropzone-overlay position-fixed top-0 start-0 bottom-0 end-0 text-center pe-none" [class.active]="fileIsOver && !hidden">
|
||||||
<h2 class="pe-none position-absolute top-50 start-50 translate-middle" i18n>Drop files to begin upload</h2>
|
<h2 class="pe-none position-absolute top-50 start-50 translate-middle" i18n>Drop files to begin upload</h2>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<ngx-file-drop
|
|
||||||
dropZoneClassName="visually-hidden"
|
|
||||||
contentClassName="visually-hidden"
|
|
||||||
(onFileDrop)="dropped($event)"
|
|
||||||
#ngxFileDrop>
|
|
||||||
</ngx-file-drop>
|
|
||||||
|
@ -1,8 +1,14 @@
|
|||||||
.global-dropzone-overlay {
|
.global-dropzone-overlay {
|
||||||
|
opacity: 0;
|
||||||
|
transition: opacity 0.25s ease-in-out;
|
||||||
background-color: hsla(var(--pngx-primary), var(--pngx-primary-lightness), .8);
|
background-color: hsla(var(--pngx-primary), var(--pngx-primary-lightness), .8);
|
||||||
z-index: 1200;
|
z-index: 1200;
|
||||||
|
|
||||||
h2 {
|
h2 {
|
||||||
color: var(--pngx-primary-text-contrast)
|
color: var(--pngx-primary-text-contrast)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
&.active {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -9,7 +9,6 @@ import {
|
|||||||
tick,
|
tick,
|
||||||
} from '@angular/core/testing'
|
} from '@angular/core/testing'
|
||||||
import { By } from '@angular/platform-browser'
|
import { By } from '@angular/platform-browser'
|
||||||
import { NgxFileDropEntry, NgxFileDropModule } from 'ngx-file-drop'
|
|
||||||
import { PermissionsService } from 'src/app/services/permissions.service'
|
import { PermissionsService } from 'src/app/services/permissions.service'
|
||||||
import { SettingsService } from 'src/app/services/settings.service'
|
import { SettingsService } from 'src/app/services/settings.service'
|
||||||
import { ToastService } from 'src/app/services/toast.service'
|
import { ToastService } from 'src/app/services/toast.service'
|
||||||
@ -27,7 +26,7 @@ describe('FileDropComponent', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
TestBed.configureTestingModule({
|
TestBed.configureTestingModule({
|
||||||
imports: [NgxFileDropModule, FileDropComponent, ToastsComponent],
|
imports: [FileDropComponent, ToastsComponent],
|
||||||
providers: [
|
providers: [
|
||||||
provideHttpClient(withInterceptorsFromDi()),
|
provideHttpClient(withInterceptorsFromDi()),
|
||||||
provideHttpClientTesting(),
|
provideHttpClientTesting(),
|
||||||
@ -66,12 +65,12 @@ describe('FileDropComponent', () => {
|
|||||||
const dropzone = fixture.debugElement.query(
|
const dropzone = fixture.debugElement.query(
|
||||||
By.css('.global-dropzone-overlay')
|
By.css('.global-dropzone-overlay')
|
||||||
)
|
)
|
||||||
expect(dropzone.classes['hide']).toBeTruthy()
|
expect(dropzone.classes['active']).toBeFalsy()
|
||||||
component.onDragLeave(new Event('dragleave') as DragEvent)
|
component.onDragLeave(new Event('dragleave') as DragEvent)
|
||||||
tick(700)
|
tick(700)
|
||||||
fixture.detectChanges()
|
fixture.detectChanges()
|
||||||
// drop
|
// drop
|
||||||
const uploadSpy = jest.spyOn(uploadDocumentsService, 'uploadFiles')
|
const uploadSpy = jest.spyOn(uploadDocumentsService, 'uploadFile')
|
||||||
const dragEvent = new Event('drop')
|
const dragEvent = new Event('drop')
|
||||||
dragEvent['dataTransfer'] = {
|
dragEvent['dataTransfer'] = {
|
||||||
files: {
|
files: {
|
||||||
@ -93,53 +92,209 @@ describe('FileDropComponent', () => {
|
|||||||
tick(1)
|
tick(1)
|
||||||
fixture.detectChanges()
|
fixture.detectChanges()
|
||||||
expect(component.fileIsOver).toBeTruthy()
|
expect(component.fileIsOver).toBeTruthy()
|
||||||
const dropzone = fixture.debugElement.query(
|
|
||||||
By.css('.global-dropzone-overlay')
|
|
||||||
)
|
|
||||||
component.onDragLeave(new Event('dragleave') as DragEvent)
|
component.onDragLeave(new Event('dragleave') as DragEvent)
|
||||||
tick(700)
|
tick(700)
|
||||||
fixture.detectChanges()
|
fixture.detectChanges()
|
||||||
expect(dropzone.classes['hide']).toBeTruthy()
|
|
||||||
// drop
|
// drop
|
||||||
const toastSpy = jest.spyOn(toastService, 'show')
|
const toastSpy = jest.spyOn(toastService, 'show')
|
||||||
const uploadSpy = jest.spyOn(
|
const uploadSpy = jest.spyOn(uploadDocumentsService, 'uploadFile')
|
||||||
UploadDocumentsService.prototype as any,
|
const file = new File(
|
||||||
'uploadFile'
|
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||||
|
'file.pdf'
|
||||||
)
|
)
|
||||||
const dragEvent = new Event('drop')
|
const dragEvent = new Event('drop')
|
||||||
dragEvent['dataTransfer'] = {
|
dragEvent['dataTransfer'] = {
|
||||||
files: {
|
items: [
|
||||||
item: () => {
|
{
|
||||||
return new File(
|
kind: 'file',
|
||||||
[new Blob(['testing'], { type: 'application/pdf' })],
|
type: 'application/pdf',
|
||||||
'file.pdf'
|
getAsFile: () => file,
|
||||||
)
|
|
||||||
},
|
},
|
||||||
length: 1,
|
],
|
||||||
} as unknown as FileList,
|
|
||||||
}
|
}
|
||||||
component.onDrop(dragEvent as DragEvent)
|
component.onDrop(dragEvent as DragEvent)
|
||||||
component.dropped([
|
|
||||||
{
|
|
||||||
fileEntry: {
|
|
||||||
isFile: true,
|
|
||||||
file: (callback) => {
|
|
||||||
callback(
|
|
||||||
new File(
|
|
||||||
[new Blob(['testing'], { type: 'application/pdf' })],
|
|
||||||
'file.pdf'
|
|
||||||
)
|
|
||||||
)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
} as unknown as NgxFileDropEntry,
|
|
||||||
])
|
|
||||||
tick(3000)
|
tick(3000)
|
||||||
expect(toastSpy).toHaveBeenCalled()
|
expect(toastSpy).toHaveBeenCalled()
|
||||||
expect(uploadSpy).toHaveBeenCalled()
|
expect(uploadSpy).toHaveBeenCalled()
|
||||||
discardPeriodicTasks()
|
discardPeriodicTasks()
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
it('should support drag drop, initiate upload with webkitGetAsEntry', fakeAsync(() => {
|
||||||
|
jest.spyOn(permissionsService, 'currentUserCan').mockReturnValue(true)
|
||||||
|
expect(component.fileIsOver).toBeFalsy()
|
||||||
|
const overEvent = new Event('dragover') as DragEvent
|
||||||
|
;(overEvent as any).dataTransfer = { types: ['Files'] }
|
||||||
|
component.onDragOver(overEvent)
|
||||||
|
tick(1)
|
||||||
|
fixture.detectChanges()
|
||||||
|
expect(component.fileIsOver).toBeTruthy()
|
||||||
|
component.onDragLeave(new Event('dragleave') as DragEvent)
|
||||||
|
tick(700)
|
||||||
|
fixture.detectChanges()
|
||||||
|
// drop
|
||||||
|
const toastSpy = jest.spyOn(toastService, 'show')
|
||||||
|
const uploadSpy = jest.spyOn(uploadDocumentsService, 'uploadFile')
|
||||||
|
const file = new File(
|
||||||
|
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||||
|
'file.pdf'
|
||||||
|
)
|
||||||
|
const dragEvent = new Event('drop')
|
||||||
|
dragEvent['dataTransfer'] = {
|
||||||
|
items: [
|
||||||
|
{
|
||||||
|
kind: 'file',
|
||||||
|
type: 'application/pdf',
|
||||||
|
webkitGetAsEntry: () => ({
|
||||||
|
isFile: true,
|
||||||
|
isDirectory: false,
|
||||||
|
file: (cb: (file: File) => void) => cb(file),
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
files: [],
|
||||||
|
}
|
||||||
|
component.onDrop(dragEvent as DragEvent)
|
||||||
|
tick(3000)
|
||||||
|
expect(toastSpy).toHaveBeenCalled()
|
||||||
|
expect(uploadSpy).toHaveBeenCalled()
|
||||||
|
discardPeriodicTasks()
|
||||||
|
}))
|
||||||
|
|
||||||
|
it('should show an error on traverseFileTree error', fakeAsync(() => {
|
||||||
|
jest.spyOn(permissionsService, 'currentUserCan').mockReturnValue(true)
|
||||||
|
const toastSpy = jest.spyOn(toastService, 'showError')
|
||||||
|
const traverseSpy = jest
|
||||||
|
.spyOn(component as any, 'traverseFileTree')
|
||||||
|
.mockReturnValue(Promise.reject(new Error('Error traversing file tree')))
|
||||||
|
fixture.detectChanges()
|
||||||
|
|
||||||
|
// Simulate a drop with a directory entry
|
||||||
|
const mockEntry = {
|
||||||
|
isDirectory: true,
|
||||||
|
isFile: false,
|
||||||
|
createReader: () => ({ readEntries: jest.fn() }),
|
||||||
|
} as unknown as FileSystemDirectoryEntry
|
||||||
|
|
||||||
|
const event = {
|
||||||
|
preventDefault: () => {},
|
||||||
|
stopImmediatePropagation: () => {},
|
||||||
|
dataTransfer: {
|
||||||
|
items: [
|
||||||
|
{
|
||||||
|
kind: 'file',
|
||||||
|
webkitGetAsEntry: () => mockEntry,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
} as unknown as DragEvent
|
||||||
|
|
||||||
|
component.onDrop(event)
|
||||||
|
|
||||||
|
tick() // flush microtasks (e.g., Promise.reject)
|
||||||
|
|
||||||
|
expect(traverseSpy).toHaveBeenCalled()
|
||||||
|
expect(toastSpy).toHaveBeenCalledWith(
|
||||||
|
$localize`Failed to read dropped items: Error traversing file tree`
|
||||||
|
)
|
||||||
|
|
||||||
|
discardPeriodicTasks()
|
||||||
|
}))
|
||||||
|
|
||||||
|
it('should support drag drop, initiate upload without DataTransfer API support', fakeAsync(() => {
|
||||||
|
jest.spyOn(permissionsService, 'currentUserCan').mockReturnValue(true)
|
||||||
|
expect(component.fileIsOver).toBeFalsy()
|
||||||
|
const overEvent = new Event('dragover') as DragEvent
|
||||||
|
;(overEvent as any).dataTransfer = { types: ['Files'] }
|
||||||
|
component.onDragOver(overEvent)
|
||||||
|
tick(1)
|
||||||
|
fixture.detectChanges()
|
||||||
|
expect(component.fileIsOver).toBeTruthy()
|
||||||
|
component.onDragLeave(new Event('dragleave') as DragEvent)
|
||||||
|
tick(700)
|
||||||
|
fixture.detectChanges()
|
||||||
|
// drop
|
||||||
|
const toastSpy = jest.spyOn(toastService, 'show')
|
||||||
|
const uploadSpy = jest.spyOn(uploadDocumentsService, 'uploadFile')
|
||||||
|
const file = new File(
|
||||||
|
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||||
|
'file.pdf'
|
||||||
|
)
|
||||||
|
const dragEvent = new Event('drop')
|
||||||
|
dragEvent['dataTransfer'] = {
|
||||||
|
items: [],
|
||||||
|
files: [file],
|
||||||
|
}
|
||||||
|
component.onDrop(dragEvent as DragEvent)
|
||||||
|
tick(3000)
|
||||||
|
expect(toastSpy).toHaveBeenCalled()
|
||||||
|
expect(uploadSpy).toHaveBeenCalled()
|
||||||
|
discardPeriodicTasks()
|
||||||
|
}))
|
||||||
|
|
||||||
|
it('should resolve a single file when entry isFile', () => {
|
||||||
|
const mockFile = new File(['data'], 'test.txt', { type: 'text/plain' })
|
||||||
|
const mockEntry = {
|
||||||
|
isFile: true,
|
||||||
|
isDirectory: false,
|
||||||
|
file: (cb: (f: File) => void) => cb(mockFile),
|
||||||
|
} as unknown as FileSystemFileEntry
|
||||||
|
|
||||||
|
return (component as any)
|
||||||
|
.traverseFileTree(mockEntry)
|
||||||
|
.then((result: File[]) => {
|
||||||
|
expect(result).toEqual([mockFile])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should resolve all files in a flat directory', async () => {
|
||||||
|
const file1 = new File(['data'], 'file1.txt')
|
||||||
|
const file2 = new File(['data'], 'file2.txt')
|
||||||
|
|
||||||
|
const mockFileEntry1 = {
|
||||||
|
isFile: true,
|
||||||
|
isDirectory: false,
|
||||||
|
file: (cb: (f: File) => void) => cb(file1),
|
||||||
|
} as unknown as FileSystemFileEntry
|
||||||
|
|
||||||
|
const mockFileEntry2 = {
|
||||||
|
isFile: true,
|
||||||
|
isDirectory: false,
|
||||||
|
file: (cb: (f: File) => void) => cb(file2),
|
||||||
|
} as unknown as FileSystemFileEntry
|
||||||
|
|
||||||
|
let callCount = 0
|
||||||
|
|
||||||
|
const mockDirEntry = {
|
||||||
|
isFile: false,
|
||||||
|
isDirectory: true,
|
||||||
|
createReader: () => ({
|
||||||
|
readEntries: (cb: (batch: FileSystemEntry[]) => void) => {
|
||||||
|
if (callCount++ === 0) {
|
||||||
|
cb([mockFileEntry1, mockFileEntry2])
|
||||||
|
} else {
|
||||||
|
cb([]) // second call: signal EOF
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
} as unknown as FileSystemDirectoryEntry
|
||||||
|
|
||||||
|
const result = await (component as any).traverseFileTree(mockDirEntry)
|
||||||
|
expect(result).toEqual([file1, file2])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should resolve a non-file non-directory entry as an empty array', () => {
|
||||||
|
const mockEntry = {
|
||||||
|
isFile: false,
|
||||||
|
isDirectory: false,
|
||||||
|
file: (cb: (f: File) => void) => cb(new File([], '')),
|
||||||
|
} as unknown as FileSystemEntry
|
||||||
|
return (component as any)
|
||||||
|
.traverseFileTree(mockEntry)
|
||||||
|
.then((result: File[]) => {
|
||||||
|
expect(result).toEqual([])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
it('should ignore events if disabled', fakeAsync(() => {
|
it('should ignore events if disabled', fakeAsync(() => {
|
||||||
settingsService.globalDropzoneEnabled = false
|
settingsService.globalDropzoneEnabled = false
|
||||||
expect(settingsService.globalDropzoneActive).toBeFalsy()
|
expect(settingsService.globalDropzoneActive).toBeFalsy()
|
||||||
|
@ -1,9 +1,4 @@
|
|||||||
import { Component, HostListener, ViewChild } from '@angular/core'
|
import { Component, HostListener } from '@angular/core'
|
||||||
import {
|
|
||||||
NgxFileDropComponent,
|
|
||||||
NgxFileDropEntry,
|
|
||||||
NgxFileDropModule,
|
|
||||||
} from 'ngx-file-drop'
|
|
||||||
import {
|
import {
|
||||||
PermissionAction,
|
PermissionAction,
|
||||||
PermissionsService,
|
PermissionsService,
|
||||||
@ -17,7 +12,7 @@ import { UploadDocumentsService } from 'src/app/services/upload-documents.servic
|
|||||||
selector: 'pngx-file-drop',
|
selector: 'pngx-file-drop',
|
||||||
templateUrl: './file-drop.component.html',
|
templateUrl: './file-drop.component.html',
|
||||||
styleUrls: ['./file-drop.component.scss'],
|
styleUrls: ['./file-drop.component.scss'],
|
||||||
imports: [NgxFileDropModule],
|
imports: [],
|
||||||
})
|
})
|
||||||
export class FileDropComponent {
|
export class FileDropComponent {
|
||||||
private fileLeaveTimeoutID: any
|
private fileLeaveTimeoutID: any
|
||||||
@ -41,8 +36,6 @@ export class FileDropComponent {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ViewChild('ngxFileDrop') ngxFileDrop: NgxFileDropComponent
|
|
||||||
|
|
||||||
@HostListener('document:dragover', ['$event']) onDragOver(event: DragEvent) {
|
@HostListener('document:dragover', ['$event']) onDragOver(event: DragEvent) {
|
||||||
if (!this.dragDropEnabled || !event.dataTransfer?.types?.includes('Files'))
|
if (!this.dragDropEnabled || !event.dataTransfer?.types?.includes('Files'))
|
||||||
return
|
return
|
||||||
@ -78,19 +71,85 @@ export class FileDropComponent {
|
|||||||
}, ms)
|
}, ms)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private traverseFileTree(entry: FileSystemEntry): Promise<File[]> {
|
||||||
|
if (entry.isFile) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
;(entry as FileSystemFileEntry).file(resolve, reject)
|
||||||
|
}).then((file: File) => [file])
|
||||||
|
}
|
||||||
|
|
||||||
|
if (entry.isDirectory) {
|
||||||
|
return new Promise<File[]>((resolve, reject) => {
|
||||||
|
const dirReader = (entry as FileSystemDirectoryEntry).createReader()
|
||||||
|
const allEntries: FileSystemEntry[] = []
|
||||||
|
|
||||||
|
const readEntries = () => {
|
||||||
|
dirReader.readEntries((batch) => {
|
||||||
|
if (batch.length === 0) {
|
||||||
|
const promises = allEntries.map((child) =>
|
||||||
|
this.traverseFileTree(child)
|
||||||
|
)
|
||||||
|
Promise.all(promises)
|
||||||
|
.then((results) => resolve([].concat(...results)))
|
||||||
|
.catch(reject)
|
||||||
|
} else {
|
||||||
|
allEntries.push(...batch)
|
||||||
|
readEntries() // keep reading
|
||||||
|
}
|
||||||
|
}, reject)
|
||||||
|
}
|
||||||
|
|
||||||
|
readEntries()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return Promise.resolve([])
|
||||||
|
}
|
||||||
|
|
||||||
@HostListener('document:drop', ['$event']) public onDrop(event: DragEvent) {
|
@HostListener('document:drop', ['$event']) public onDrop(event: DragEvent) {
|
||||||
if (!this.dragDropEnabled) return
|
if (!this.dragDropEnabled) return
|
||||||
event.preventDefault()
|
event.preventDefault()
|
||||||
event.stopImmediatePropagation()
|
event.stopImmediatePropagation()
|
||||||
// pass event onto ngx-file-drop to handle files
|
|
||||||
this.ngxFileDrop.dropFiles(event)
|
|
||||||
this.onDragLeave(event, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
public dropped(files: NgxFileDropEntry[]) {
|
const files: File[] = []
|
||||||
this.uploadDocumentsService.onNgxFileDrop(files)
|
const entries: FileSystemEntry[] = []
|
||||||
if (files.length > 0)
|
if (event.dataTransfer?.items && event.dataTransfer.items.length) {
|
||||||
|
for (const item of Array.from(event.dataTransfer.items)) {
|
||||||
|
if (item.webkitGetAsEntry) {
|
||||||
|
// webkitGetAsEntry not standard, but is widely supported
|
||||||
|
const entry = item.webkitGetAsEntry()
|
||||||
|
if (entry) entries.push(entry)
|
||||||
|
} else if (item.kind === 'file') {
|
||||||
|
const file = item.getAsFile()
|
||||||
|
if (file) files.push(file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (event.dataTransfer?.files) {
|
||||||
|
// Fallback for browsers without DataTransferItem API
|
||||||
|
for (const file of Array.from(event.dataTransfer.files)) {
|
||||||
|
files.push(file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (entries.length) {
|
||||||
|
const promises = entries.map((entry) => this.traverseFileTree(entry))
|
||||||
|
Promise.all(promises)
|
||||||
|
.then((results) => {
|
||||||
|
files.push(...[].concat(...results))
|
||||||
|
this.toastService.showInfo($localize`Initiating upload...`, 3000)
|
||||||
|
files.forEach((file) => this.uploadDocumentsService.uploadFile(file))
|
||||||
|
})
|
||||||
|
.catch((e) => {
|
||||||
|
this.toastService.showError(
|
||||||
|
$localize`Failed to read dropped items: ${e.message}`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
} else if (files.length) {
|
||||||
this.toastService.showInfo($localize`Initiating upload...`, 3000)
|
this.toastService.showInfo($localize`Initiating upload...`, 3000)
|
||||||
|
files.forEach((file) => this.uploadDocumentsService.uploadFile(file))
|
||||||
|
}
|
||||||
|
|
||||||
|
this.onDragLeave(event, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
@HostListener('window:blur', ['$event']) public onWindowBlur() {
|
@HostListener('window:blur', ['$event']) public onWindowBlur() {
|
||||||
|
@ -188,7 +188,7 @@ describe('MailComponent', () => {
|
|||||||
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
|
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
|
||||||
editDialog.failed.emit()
|
editDialog.failed.emit()
|
||||||
expect(toastErrorSpy).toBeCalled()
|
expect(toastErrorSpy).toBeCalled()
|
||||||
editDialog.succeeded.emit(mailAccounts[0])
|
editDialog.succeeded.emit(mailAccounts[0] as any)
|
||||||
expect(toastInfoSpy).toHaveBeenCalledWith(
|
expect(toastInfoSpy).toHaveBeenCalledWith(
|
||||||
`Saved account "${mailAccounts[0].name}".`
|
`Saved account "${mailAccounts[0].name}".`
|
||||||
)
|
)
|
||||||
@ -246,7 +246,7 @@ describe('MailComponent', () => {
|
|||||||
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
|
const toastInfoSpy = jest.spyOn(toastService, 'showInfo')
|
||||||
editDialog.failed.emit()
|
editDialog.failed.emit()
|
||||||
expect(toastErrorSpy).toBeCalled()
|
expect(toastErrorSpy).toBeCalled()
|
||||||
editDialog.succeeded.emit(mailRules[0])
|
editDialog.succeeded.emit(mailRules[0] as any)
|
||||||
expect(toastInfoSpy).toHaveBeenCalledWith(
|
expect(toastInfoSpy).toHaveBeenCalledWith(
|
||||||
`Saved rule "${mailRules[0].name}".`
|
`Saved rule "${mailRules[0].name}".`
|
||||||
)
|
)
|
||||||
|
@ -130,9 +130,6 @@ export interface Document extends ObjectWithPermissions {
|
|||||||
// UTC
|
// UTC
|
||||||
created?: Date
|
created?: Date
|
||||||
|
|
||||||
// localized date
|
|
||||||
created_date?: Date
|
|
||||||
|
|
||||||
modified?: Date
|
modified?: Date
|
||||||
|
|
||||||
added?: Date
|
added?: Date
|
||||||
|
@ -49,6 +49,7 @@ export enum ConfigOptionType {
|
|||||||
export const ConfigCategory = {
|
export const ConfigCategory = {
|
||||||
General: $localize`General Settings`,
|
General: $localize`General Settings`,
|
||||||
OCR: $localize`OCR Settings`,
|
OCR: $localize`OCR Settings`,
|
||||||
|
Barcode: $localize`Barcode Settings`,
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ConfigOption {
|
export interface ConfigOption {
|
||||||
@ -180,6 +181,83 @@ export const PaperlessConfigOptions: ConfigOption[] = [
|
|||||||
config_key: 'PAPERLESS_APP_TITLE',
|
config_key: 'PAPERLESS_APP_TITLE',
|
||||||
category: ConfigCategory.General,
|
category: ConfigCategory.General,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
key: 'barcodes_enabled',
|
||||||
|
title: $localize`Enable Barcodes`,
|
||||||
|
type: ConfigOptionType.Boolean,
|
||||||
|
config_key: 'PAPERLESS_CONSUMER_ENABLE_BARCODES',
|
||||||
|
category: ConfigCategory.Barcode,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'barcode_enable_tiff_support',
|
||||||
|
title: $localize`Enable TIFF Support`,
|
||||||
|
type: ConfigOptionType.Boolean,
|
||||||
|
config_key: 'PAPERLESS_CONSUMER_BARCODE_TIFF_SUPPORT',
|
||||||
|
category: ConfigCategory.Barcode,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'barcode_string',
|
||||||
|
title: $localize`Barcode String`,
|
||||||
|
type: ConfigOptionType.String,
|
||||||
|
config_key: 'PAPERLESS_CONSUMER_BARCODE_STRING',
|
||||||
|
category: ConfigCategory.Barcode,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'barcode_retain_split_pages',
|
||||||
|
title: $localize`Retain Split Pages`,
|
||||||
|
type: ConfigOptionType.Boolean,
|
||||||
|
config_key: 'PAPERLESS_CONSUMER_BARCODE_RETAIN_SPLIT_PAGES',
|
||||||
|
category: ConfigCategory.Barcode,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'barcode_enable_asn',
|
||||||
|
title: $localize`Enable ASN`,
|
||||||
|
type: ConfigOptionType.Boolean,
|
||||||
|
config_key: 'PAPERLESS_CONSUMER_ENABLE_ASN_BARCODE',
|
||||||
|
category: ConfigCategory.Barcode,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'barcode_asn_prefix',
|
||||||
|
title: $localize`ASN Prefix`,
|
||||||
|
type: ConfigOptionType.String,
|
||||||
|
config_key: 'PAPERLESS_CONSUMER_ASN_BARCODE_PREFIX',
|
||||||
|
category: ConfigCategory.Barcode,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'barcode_upscale',
|
||||||
|
title: $localize`Upscale`,
|
||||||
|
type: ConfigOptionType.Number,
|
||||||
|
config_key: 'PAPERLESS_CONSUMER_BARCODE_UPSCALE',
|
||||||
|
category: ConfigCategory.Barcode,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'barcode_dpi',
|
||||||
|
title: $localize`DPI`,
|
||||||
|
type: ConfigOptionType.Number,
|
||||||
|
config_key: 'PAPERLESS_CONSUMER_BARCODE_DPI',
|
||||||
|
category: ConfigCategory.Barcode,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'barcode_max_pages',
|
||||||
|
title: $localize`Max Pages`,
|
||||||
|
type: ConfigOptionType.Number,
|
||||||
|
config_key: 'PAPERLESS_CONSUMER_BARCODE_MAX_PAGES',
|
||||||
|
category: ConfigCategory.Barcode,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'barcode_enable_tag',
|
||||||
|
title: $localize`Enable Tag Detection`,
|
||||||
|
type: ConfigOptionType.Boolean,
|
||||||
|
config_key: 'PAPERLESS_CONSUMER_ENABLE_TAG_BARCODE',
|
||||||
|
category: ConfigCategory.Barcode,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'barcode_tag_mapping',
|
||||||
|
title: $localize`Tag Mapping`,
|
||||||
|
type: ConfigOptionType.JSON,
|
||||||
|
config_key: 'PAPERLESS_CONSUMER_TAG_BARCODE_MAPPING',
|
||||||
|
category: ConfigCategory.Barcode,
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
export interface PaperlessConfig extends ObjectWithId {
|
export interface PaperlessConfig extends ObjectWithId {
|
||||||
@ -198,4 +276,15 @@ export interface PaperlessConfig extends ObjectWithId {
|
|||||||
user_args: object
|
user_args: object
|
||||||
app_logo: string
|
app_logo: string
|
||||||
app_title: string
|
app_title: string
|
||||||
|
barcodes_enabled: boolean
|
||||||
|
barcode_enable_tiff_support: boolean
|
||||||
|
barcode_string: string
|
||||||
|
barcode_retain_split_pages: boolean
|
||||||
|
barcode_enable_asn: boolean
|
||||||
|
barcode_asn_prefix: string
|
||||||
|
barcode_upscale: number
|
||||||
|
barcode_dpi: number
|
||||||
|
barcode_max_pages: number
|
||||||
|
barcode_enable_tag: boolean
|
||||||
|
barcode_tag_mapping: object
|
||||||
}
|
}
|
||||||
|
@ -7,4 +7,6 @@ export interface WebsocketProgressMessage {
|
|||||||
message?: string
|
message?: string
|
||||||
document_id: number
|
document_id: number
|
||||||
owner_id?: number
|
owner_id?: number
|
||||||
|
users_can_view?: number[]
|
||||||
|
groups_can_view?: number[]
|
||||||
}
|
}
|
||||||
|
@ -268,15 +268,15 @@ describe(`DocumentService`, () => {
|
|||||||
expect(req.request.method).toEqual('GET')
|
expect(req.request.method).toEqual('GET')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should pass remove_inbox_tags setting to update', () => {
|
it('should pass remove_inbox_tags setting to patch', () => {
|
||||||
subscription = service.update(documents[0]).subscribe()
|
subscription = service.patch(documents[0]).subscribe()
|
||||||
let req = httpTestingController.expectOne(
|
let req = httpTestingController.expectOne(
|
||||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/`
|
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/`
|
||||||
)
|
)
|
||||||
expect(req.request.body.remove_inbox_tags).toEqual(false)
|
expect(req.request.body.remove_inbox_tags).toEqual(false)
|
||||||
|
|
||||||
settingsService.set(SETTINGS_KEYS.DOCUMENT_EDITING_REMOVE_INBOX_TAGS, true)
|
settingsService.set(SETTINGS_KEYS.DOCUMENT_EDITING_REMOVE_INBOX_TAGS, true)
|
||||||
subscription = service.update(documents[0]).subscribe()
|
subscription = service.patch(documents[0]).subscribe()
|
||||||
req = httpTestingController.expectOne(
|
req = httpTestingController.expectOne(
|
||||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/`
|
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/`
|
||||||
)
|
)
|
||||||
|
@ -189,13 +189,11 @@ export class DocumentService extends AbstractPaperlessService<Document> {
|
|||||||
return this.http.get<number>(this.getResourceUrl(null, 'next_asn'))
|
return this.http.get<number>(this.getResourceUrl(null, 'next_asn'))
|
||||||
}
|
}
|
||||||
|
|
||||||
update(o: Document): Observable<Document> {
|
patch(o: Document): Observable<Document> {
|
||||||
// we want to only set created_date
|
|
||||||
o.created = undefined
|
|
||||||
o.remove_inbox_tags = !!this.settingsService.get(
|
o.remove_inbox_tags = !!this.settingsService.get(
|
||||||
SETTINGS_KEYS.DOCUMENT_EDITING_REMOVE_INBOX_TAGS
|
SETTINGS_KEYS.DOCUMENT_EDITING_REMOVE_INBOX_TAGS
|
||||||
)
|
)
|
||||||
return super.update(o)
|
return super.patch(o)
|
||||||
}
|
}
|
||||||
|
|
||||||
uploadDocument(formData) {
|
uploadDocument(formData) {
|
||||||
|
@ -15,33 +15,6 @@ import {
|
|||||||
WebsocketStatusService,
|
WebsocketStatusService,
|
||||||
} from './websocket-status.service'
|
} from './websocket-status.service'
|
||||||
|
|
||||||
const files = [
|
|
||||||
{
|
|
||||||
lastModified: 1693349892540,
|
|
||||||
lastModifiedDate: new Date(),
|
|
||||||
name: 'file1.pdf',
|
|
||||||
size: 386,
|
|
||||||
type: 'application/pdf',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
lastModified: 1695618533892,
|
|
||||||
lastModifiedDate: new Date(),
|
|
||||||
name: 'file2.pdf',
|
|
||||||
size: 358265,
|
|
||||||
type: 'application/pdf',
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
const fileList = {
|
|
||||||
item: (x) => {
|
|
||||||
return new File(
|
|
||||||
[new Blob(['testing'], { type: files[x].type })],
|
|
||||||
files[x].name
|
|
||||||
)
|
|
||||||
},
|
|
||||||
length: files.length,
|
|
||||||
} as unknown as FileList
|
|
||||||
|
|
||||||
describe('UploadDocumentsService', () => {
|
describe('UploadDocumentsService', () => {
|
||||||
let httpTestingController: HttpTestingController
|
let httpTestingController: HttpTestingController
|
||||||
let uploadDocumentsService: UploadDocumentsService
|
let uploadDocumentsService: UploadDocumentsService
|
||||||
@ -68,7 +41,11 @@ describe('UploadDocumentsService', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('calls post_document api endpoint on upload', () => {
|
it('calls post_document api endpoint on upload', () => {
|
||||||
uploadDocumentsService.uploadFiles(fileList)
|
const file = new File(
|
||||||
|
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||||
|
'file.pdf'
|
||||||
|
)
|
||||||
|
uploadDocumentsService.uploadFile(file)
|
||||||
const req = httpTestingController.match(
|
const req = httpTestingController.match(
|
||||||
`${environment.apiBaseUrl}documents/post_document/`
|
`${environment.apiBaseUrl}documents/post_document/`
|
||||||
)
|
)
|
||||||
@ -78,7 +55,16 @@ describe('UploadDocumentsService', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('updates progress during upload and failure', () => {
|
it('updates progress during upload and failure', () => {
|
||||||
uploadDocumentsService.uploadFiles(fileList)
|
const file = new File(
|
||||||
|
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||||
|
'file.pdf'
|
||||||
|
)
|
||||||
|
const file2 = new File(
|
||||||
|
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||||
|
'file2.pdf'
|
||||||
|
)
|
||||||
|
uploadDocumentsService.uploadFile(file)
|
||||||
|
uploadDocumentsService.uploadFile(file2)
|
||||||
|
|
||||||
expect(websocketStatusService.getConsumerStatusNotCompleted()).toHaveLength(
|
expect(websocketStatusService.getConsumerStatusNotCompleted()).toHaveLength(
|
||||||
2
|
2
|
||||||
@ -103,7 +89,11 @@ describe('UploadDocumentsService', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('updates progress on failure', () => {
|
it('updates progress on failure', () => {
|
||||||
uploadDocumentsService.uploadFiles(fileList)
|
const file = new File(
|
||||||
|
[new Blob(['testing'], { type: 'application/pdf' })],
|
||||||
|
'file.pdf'
|
||||||
|
)
|
||||||
|
uploadDocumentsService.uploadFile(file)
|
||||||
|
|
||||||
let req = httpTestingController.match(
|
let req = httpTestingController.match(
|
||||||
`${environment.apiBaseUrl}documents/post_document/`
|
`${environment.apiBaseUrl}documents/post_document/`
|
||||||
@ -125,7 +115,7 @@ describe('UploadDocumentsService', () => {
|
|||||||
websocketStatusService.getConsumerStatus(FileStatusPhase.FAILED)
|
websocketStatusService.getConsumerStatus(FileStatusPhase.FAILED)
|
||||||
).toHaveLength(1)
|
).toHaveLength(1)
|
||||||
|
|
||||||
uploadDocumentsService.uploadFiles(fileList)
|
uploadDocumentsService.uploadFile(file)
|
||||||
|
|
||||||
req = httpTestingController.match(
|
req = httpTestingController.match(
|
||||||
`${environment.apiBaseUrl}documents/post_document/`
|
`${environment.apiBaseUrl}documents/post_document/`
|
||||||
@ -143,35 +133,4 @@ describe('UploadDocumentsService', () => {
|
|||||||
websocketStatusService.getConsumerStatus(FileStatusPhase.FAILED)
|
websocketStatusService.getConsumerStatus(FileStatusPhase.FAILED)
|
||||||
).toHaveLength(2)
|
).toHaveLength(2)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('accepts files via drag and drop', () => {
|
|
||||||
const uploadSpy = jest.spyOn(
|
|
||||||
UploadDocumentsService.prototype as any,
|
|
||||||
'uploadFile'
|
|
||||||
)
|
|
||||||
const fileEntry = {
|
|
||||||
name: 'file.pdf',
|
|
||||||
isDirectory: false,
|
|
||||||
isFile: true,
|
|
||||||
file: (callback) => {
|
|
||||||
return callback(
|
|
||||||
new File(
|
|
||||||
[new Blob(['testing'], { type: 'application/pdf' })],
|
|
||||||
'file.pdf'
|
|
||||||
)
|
|
||||||
)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
uploadDocumentsService.onNgxFileDrop([
|
|
||||||
{
|
|
||||||
relativePath: 'path/to/file.pdf',
|
|
||||||
fileEntry,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
expect(uploadSpy).toHaveBeenCalled()
|
|
||||||
|
|
||||||
let req = httpTestingController.match(
|
|
||||||
`${environment.apiBaseUrl}documents/post_document/`
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
import { HttpEventType } from '@angular/common/http'
|
import { HttpEventType } from '@angular/common/http'
|
||||||
import { Injectable } from '@angular/core'
|
import { Injectable } from '@angular/core'
|
||||||
import { FileSystemFileEntry, NgxFileDropEntry } from 'ngx-file-drop'
|
|
||||||
import { Subscription } from 'rxjs'
|
import { Subscription } from 'rxjs'
|
||||||
import { DocumentService } from './rest/document.service'
|
import { DocumentService } from './rest/document.service'
|
||||||
import {
|
import {
|
||||||
@ -19,22 +18,7 @@ export class UploadDocumentsService {
|
|||||||
private websocketStatusService: WebsocketStatusService
|
private websocketStatusService: WebsocketStatusService
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
onNgxFileDrop(files: NgxFileDropEntry[]) {
|
public uploadFile(file: File) {
|
||||||
for (const droppedFile of files) {
|
|
||||||
if (droppedFile.fileEntry.isFile) {
|
|
||||||
const fileEntry = droppedFile.fileEntry as FileSystemFileEntry
|
|
||||||
fileEntry.file((file: File) => this.uploadFile(file))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
uploadFiles(files: FileList) {
|
|
||||||
for (let index = 0; index < files.length; index++) {
|
|
||||||
this.uploadFile(files.item(index))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private uploadFile(file: File) {
|
|
||||||
let formData = new FormData()
|
let formData = new FormData()
|
||||||
formData.append('document', file, file.name)
|
formData.append('document', file, file.name)
|
||||||
formData.append('from_webui', 'true')
|
formData.append('from_webui', 'true')
|
||||||
|
@ -355,6 +355,50 @@ describe('ConsumerStatusService', () => {
|
|||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('should notify user if user can view or is in group', () => {
|
||||||
|
settingsService.currentUser = {
|
||||||
|
id: 1,
|
||||||
|
username: 'testuser',
|
||||||
|
is_superuser: false,
|
||||||
|
groups: [1],
|
||||||
|
}
|
||||||
|
websocketStatusService.connect()
|
||||||
|
server.send({
|
||||||
|
type: WebsocketStatusType.STATUS_UPDATE,
|
||||||
|
data: {
|
||||||
|
task_id: '1234',
|
||||||
|
filename: 'file1.pdf',
|
||||||
|
current_progress: 50,
|
||||||
|
max_progress: 100,
|
||||||
|
docuement_id: 12,
|
||||||
|
owner_id: 2,
|
||||||
|
status: 'WORKING',
|
||||||
|
users_can_view: [1],
|
||||||
|
groups_can_view: [],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
expect(websocketStatusService.getConsumerStatusNotCompleted()).toHaveLength(
|
||||||
|
1
|
||||||
|
)
|
||||||
|
server.send({
|
||||||
|
type: WebsocketStatusType.STATUS_UPDATE,
|
||||||
|
data: {
|
||||||
|
task_id: '5678',
|
||||||
|
filename: 'file2.pdf',
|
||||||
|
current_progress: 50,
|
||||||
|
max_progress: 100,
|
||||||
|
docuement_id: 13,
|
||||||
|
owner_id: 2,
|
||||||
|
status: 'WORKING',
|
||||||
|
users_can_view: [],
|
||||||
|
groups_can_view: [1],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
expect(websocketStatusService.getConsumerStatusNotCompleted()).toHaveLength(
|
||||||
|
2
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
it('should trigger deleted subject on document deleted', () => {
|
it('should trigger deleted subject on document deleted', () => {
|
||||||
let deleted = false
|
let deleted = false
|
||||||
websocketStatusService.onDocumentDeleted().subscribe(() => {
|
websocketStatusService.onDocumentDeleted().subscribe(() => {
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import { Injectable } from '@angular/core'
|
import { Injectable } from '@angular/core'
|
||||||
import { Subject } from 'rxjs'
|
import { Subject } from 'rxjs'
|
||||||
import { environment } from 'src/environments/environment'
|
import { environment } from 'src/environments/environment'
|
||||||
|
import { User } from '../data/user'
|
||||||
import { WebsocketDocumentsDeletedMessage } from '../data/websocket-documents-deleted-message'
|
import { WebsocketDocumentsDeletedMessage } from '../data/websocket-documents-deleted-message'
|
||||||
import { WebsocketProgressMessage } from '../data/websocket-progress-message'
|
import { WebsocketProgressMessage } from '../data/websocket-progress-message'
|
||||||
import { SettingsService } from './settings.service'
|
import { SettingsService } from './settings.service'
|
||||||
@ -173,13 +174,25 @@ export class WebsocketStatusService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private canViewMessage(messageData: WebsocketProgressMessage): boolean {
|
||||||
|
// see paperless.consumers.StatusConsumer._can_view
|
||||||
|
const user: User = this.settingsService.currentUser
|
||||||
|
return (
|
||||||
|
!messageData.owner_id ||
|
||||||
|
user.is_superuser ||
|
||||||
|
(messageData.owner_id && messageData.owner_id === user.id) ||
|
||||||
|
(messageData.users_can_view &&
|
||||||
|
messageData.users_can_view.includes(user.id)) ||
|
||||||
|
(messageData.groups_can_view &&
|
||||||
|
messageData.groups_can_view.some((groupId) =>
|
||||||
|
user.groups?.includes(groupId)
|
||||||
|
))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
handleProgressUpdate(messageData: WebsocketProgressMessage) {
|
handleProgressUpdate(messageData: WebsocketProgressMessage) {
|
||||||
// fallback if backend didn't restrict message
|
// fallback if backend didn't restrict message
|
||||||
if (
|
if (!this.canViewMessage(messageData)) {
|
||||||
messageData.owner_id &&
|
|
||||||
messageData.owner_id !== this.settingsService.currentUser?.id &&
|
|
||||||
!this.settingsService.currentUser?.is_superuser
|
|
||||||
) {
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ const base_url = new URL(document.baseURI)
|
|||||||
export const environment = {
|
export const environment = {
|
||||||
production: true,
|
production: true,
|
||||||
apiBaseUrl: document.baseURI + 'api/',
|
apiBaseUrl: document.baseURI + 'api/',
|
||||||
apiVersion: '7',
|
apiVersion: '9', // match src/paperless/settings.py
|
||||||
appTitle: 'Paperless-ngx',
|
appTitle: 'Paperless-ngx',
|
||||||
version: '2.15.3',
|
version: '2.15.3',
|
||||||
webSocketHost: window.location.host,
|
webSocketHost: window.location.host,
|
||||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user