mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-12-20 01:45:58 -06:00
Compare commits
1 Commits
fix-11615
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4f82110742 |
15
.github/workflows/ci.yml
vendored
15
.github/workflows/ci.yml
vendored
@@ -275,12 +275,8 @@ jobs:
|
|||||||
tests-frontend-e2e:
|
tests-frontend-e2e:
|
||||||
name: "Frontend E2E Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
name: "Frontend E2E Tests (Node ${{ matrix.node-version }} - ${{ matrix.shard-index }}/${{ matrix.shard-count }})"
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
container: mcr.microsoft.com/playwright:v1.57.0-noble
|
|
||||||
needs:
|
needs:
|
||||||
- install-frontend-dependencies
|
- install-frontend-dependencies
|
||||||
env:
|
|
||||||
PLAYWRIGHT_BROWSERS_PATH: /ms-playwright
|
|
||||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
@@ -309,8 +305,19 @@ jobs:
|
|||||||
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
key: ${{ runner.os }}-frontenddeps-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||||
- name: Re-link Angular cli
|
- name: Re-link Angular cli
|
||||||
run: cd src-ui && pnpm link @angular/cli
|
run: cd src-ui && pnpm link @angular/cli
|
||||||
|
- name: Cache Playwright browsers
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ~/.cache/ms-playwright
|
||||||
|
key: ${{ runner.os }}-playwright-${{ hashFiles('src-ui/pnpm-lock.yaml') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-playwright-
|
||||||
|
- name: Install Playwright system dependencies
|
||||||
|
run: npx playwright install-deps
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: cd src-ui && pnpm install --no-frozen-lockfile
|
run: cd src-ui && pnpm install --no-frozen-lockfile
|
||||||
|
- name: Install Playwright
|
||||||
|
run: cd src-ui && pnpm exec playwright install
|
||||||
- name: Run Playwright e2e tests
|
- name: Run Playwright e2e tests
|
||||||
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
|
||||||
frontend-bundle-analysis:
|
frontend-bundle-analysis:
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ RUN set -eux \
|
|||||||
# Purpose: Installs s6-overlay and rootfs
|
# Purpose: Installs s6-overlay and rootfs
|
||||||
# Comments:
|
# Comments:
|
||||||
# - Don't leave anything extra in here either
|
# - Don't leave anything extra in here either
|
||||||
FROM ghcr.io/astral-sh/uv:0.9.15-python3.12-trixie-slim AS s6-overlay-base
|
FROM ghcr.io/astral-sh/uv:0.9.17-python3.12-trixie-slim AS s6-overlay-base
|
||||||
|
|
||||||
WORKDIR /usr/src/s6
|
WORKDIR /usr/src/s6
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ from datetime import time
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from datetime import timezone
|
from datetime import timezone
|
||||||
from shutil import rmtree
|
from shutil import rmtree
|
||||||
from time import sleep
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
from typing import Literal
|
from typing import Literal
|
||||||
|
|
||||||
@@ -33,7 +32,6 @@ from whoosh.highlight import HtmlFormatter
|
|||||||
from whoosh.idsets import BitSet
|
from whoosh.idsets import BitSet
|
||||||
from whoosh.idsets import DocIdSet
|
from whoosh.idsets import DocIdSet
|
||||||
from whoosh.index import FileIndex
|
from whoosh.index import FileIndex
|
||||||
from whoosh.index import LockError
|
|
||||||
from whoosh.index import create_in
|
from whoosh.index import create_in
|
||||||
from whoosh.index import exists_in
|
from whoosh.index import exists_in
|
||||||
from whoosh.index import open_dir
|
from whoosh.index import open_dir
|
||||||
@@ -99,33 +97,11 @@ def get_schema() -> Schema:
|
|||||||
|
|
||||||
|
|
||||||
def open_index(*, recreate=False) -> FileIndex:
|
def open_index(*, recreate=False) -> FileIndex:
|
||||||
transient_exceptions = (FileNotFoundError, LockError)
|
try:
|
||||||
max_retries = 3
|
if exists_in(settings.INDEX_DIR) and not recreate:
|
||||||
retry_delay = 0.1
|
return open_dir(settings.INDEX_DIR, schema=get_schema())
|
||||||
|
except Exception:
|
||||||
for attempt in range(max_retries + 1):
|
logger.exception("Error while opening the index, recreating.")
|
||||||
try:
|
|
||||||
if exists_in(settings.INDEX_DIR) and not recreate:
|
|
||||||
return open_dir(settings.INDEX_DIR, schema=get_schema())
|
|
||||||
break
|
|
||||||
except transient_exceptions as exc:
|
|
||||||
is_last_attempt = attempt == max_retries or recreate
|
|
||||||
if is_last_attempt:
|
|
||||||
logger.exception(
|
|
||||||
"Error while opening the index after retries, recreating.",
|
|
||||||
)
|
|
||||||
break
|
|
||||||
|
|
||||||
logger.warning(
|
|
||||||
"Transient error while opening the index (attempt %s/%s): %s. Retrying.",
|
|
||||||
attempt + 1,
|
|
||||||
max_retries + 1,
|
|
||||||
exc,
|
|
||||||
)
|
|
||||||
sleep(retry_delay)
|
|
||||||
except Exception:
|
|
||||||
logger.exception("Error while opening the index, recreating.")
|
|
||||||
break
|
|
||||||
|
|
||||||
# create_in doesn't handle corrupted indexes very well, remove the directory entirely first
|
# create_in doesn't handle corrupted indexes very well, remove the directory entirely first
|
||||||
if settings.INDEX_DIR.is_dir():
|
if settings.INDEX_DIR.is_dir():
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.test import SimpleTestCase
|
from django.test import SimpleTestCase
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
@@ -252,31 +251,3 @@ class TestRewriteNaturalDateKeywords(SimpleTestCase):
|
|||||||
result = self._rewrite_with_now("added:today", fixed_now)
|
result = self._rewrite_with_now("added:today", fixed_now)
|
||||||
# Should convert to UTC properly
|
# Should convert to UTC properly
|
||||||
self.assertIn("added:[20250719", result)
|
self.assertIn("added:[20250719", result)
|
||||||
|
|
||||||
|
|
||||||
class TestIndexResilience(DirectoriesMixin, SimpleTestCase):
|
|
||||||
def test_transient_missing_segment_does_not_force_recreate(self):
|
|
||||||
file_marker = settings.INDEX_DIR / "file_marker.txt"
|
|
||||||
file_marker.write_text("keep")
|
|
||||||
expected_index = object()
|
|
||||||
|
|
||||||
with (
|
|
||||||
mock.patch("documents.index.exists_in", return_value=True),
|
|
||||||
mock.patch(
|
|
||||||
"documents.index.open_dir",
|
|
||||||
side_effect=[FileNotFoundError("missing"), expected_index],
|
|
||||||
) as mock_open_dir,
|
|
||||||
mock.patch(
|
|
||||||
"documents.index.create_in",
|
|
||||||
) as mock_create_in,
|
|
||||||
mock.patch(
|
|
||||||
"documents.index.rmtree",
|
|
||||||
) as mock_rmtree,
|
|
||||||
):
|
|
||||||
ix = index.open_index()
|
|
||||||
|
|
||||||
self.assertIs(ix, expected_index)
|
|
||||||
self.assertGreaterEqual(mock_open_dir.call_count, 2)
|
|
||||||
mock_rmtree.assert_not_called()
|
|
||||||
mock_create_in.assert_not_called()
|
|
||||||
self.assertEqual(file_marker.read_text(), "keep")
|
|
||||||
|
|||||||
Reference in New Issue
Block a user