Merge branch 'dev' into feature-ai

This commit is contained in:
shamoon
2025-11-17 18:49:57 -08:00
21 changed files with 276 additions and 156 deletions

View File

@@ -88,7 +88,7 @@ jobs:
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v6
uses: astral-sh/setup-uv@v7
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
@@ -115,7 +115,7 @@ jobs:
--frozen \
mkdocs gh-deploy --force --no-history
- name: Upload artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v5
with:
name: documentation
path: site/
@@ -142,7 +142,7 @@ jobs:
with:
python-version: "${{ matrix.python-version }}"
- name: Install uv
uses: astral-sh/setup-uv@v6
uses: astral-sh/setup-uv@v7
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
@@ -208,7 +208,7 @@ jobs:
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: 'pnpm'
@@ -241,7 +241,7 @@ jobs:
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: 'pnpm'
@@ -290,7 +290,7 @@ jobs:
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: 'pnpm'
@@ -333,7 +333,7 @@ jobs:
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: 'pnpm'
@@ -459,7 +459,7 @@ jobs:
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
- name: Upload frontend artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v5
with:
name: frontend-compiled
path: src/documents/static/frontend/
@@ -479,7 +479,7 @@ jobs:
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v6
uses: astral-sh/setup-uv@v7
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true
@@ -492,12 +492,12 @@ jobs:
sudo apt-get update -qq
sudo apt-get install -qq --no-install-recommends gettext liblept5
- name: Download frontend artifact
uses: actions/download-artifact@v5
uses: actions/download-artifact@v6
with:
name: frontend-compiled
path: src/documents/static/frontend/
- name: Download documentation artifact
uses: actions/download-artifact@v5
uses: actions/download-artifact@v6
with:
name: documentation
path: docs/_build/html/
@@ -560,7 +560,7 @@ jobs:
sudo chown -R 1000:1000 paperless-ngx/
tar -cJf paperless-ngx.tar.xz paperless-ngx/
- name: Upload release artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v5
with:
name: release
path: dist/paperless-ngx.tar.xz
@@ -577,7 +577,7 @@ jobs:
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
steps:
- name: Download release artifact
uses: actions/download-artifact@v5
uses: actions/download-artifact@v6
with:
name: release
path: ./
@@ -627,7 +627,7 @@ jobs:
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v6
uses: astral-sh/setup-uv@v7
with:
version: ${{ env.DEFAULT_UV_VERSION }}
enable-cache: true

View File

@@ -27,7 +27,7 @@ jobs:
steps:
- name: Clean temporary images
if: "${{ env.TOKEN != '' }}"
uses: stumpylog/image-cleaner-action/ephemeral@v0.11.0
uses: stumpylog/image-cleaner-action/ephemeral@v0.12.0
with:
token: "${{ env.TOKEN }}"
owner: "${{ github.repository_owner }}"
@@ -53,7 +53,7 @@ jobs:
steps:
- name: Clean untagged images
if: "${{ env.TOKEN != '' }}"
uses: stumpylog/image-cleaner-action/untagged@v0.11.0
uses: stumpylog/image-cleaner-action/untagged@v0.12.0
with:
token: "${{ env.TOKEN }}"
owner: "${{ github.repository_owner }}"

View File

@@ -37,7 +37,7 @@ jobs:
uses: actions/checkout@v5
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
uses: github/codeql-action/init@v4
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -45,4 +45,4 @@ jobs:
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
uses: github/codeql-action/analyze@v4

View File

@@ -23,7 +23,7 @@ jobs:
sudo apt-get update -qq
sudo apt-get install -qq --no-install-recommends gettext
- name: Install uv
uses: astral-sh/setup-uv@v6
uses: astral-sh/setup-uv@v7
with:
enable-cache: true
- name: Install backend python dependencies
@@ -38,7 +38,7 @@ jobs:
with:
version: 10
- name: Use Node.js 20
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: 'pnpm'
@@ -61,7 +61,7 @@ jobs:
cd src-ui
pnpm run ng extract-i18n
- name: Commit changes
uses: stefanzweifel/git-auto-commit-action@v6
uses: stefanzweifel/git-auto-commit-action@v7
with:
file_pattern: 'src-ui/messages.xlf src/locale/en_US/LC_MESSAGES/django.po'
commit_message: "Auto translate strings"

View File

@@ -32,7 +32,7 @@ RUN set -eux \
# Purpose: Installs s6-overlay and rootfs
# Comments:
# - Don't leave anything extra in here either
FROM ghcr.io/astral-sh/uv:0.9.9-python3.12-bookworm-slim AS s6-overlay-base
FROM ghcr.io/astral-sh/uv:0.9.10-python3.12-bookworm-slim AS s6-overlay-base
WORKDIR /usr/src/s6

View File

@@ -1,70 +1,66 @@
#!/command/with-contenv /usr/bin/bash
# shellcheck shell=bash
# vim: set ft=bash ts=4 sw=4 sts=4 et :
declare -r log_prefix="[init-db-wait]"
set -euo pipefail
declare -r LOG_PREFIX="[init-db-wait]"
declare -ri TIMEOUT=60
declare -i ATTEMPT=0
declare -i DELAY=0
declare -i STARTED_AT=${EPOCHSECONDS:?EPOCHSECONDS var unset}
delay_next_attempt() {
local -i elapsed=$(( EPOCHSECONDS - STARTED_AT ))
local -ri remaining=$(( TIMEOUT - elapsed ))
if (( remaining <= 0 )); then
echo "${LOG_PREFIX} Unable to connect after $elapsed seconds."
exit 1
fi
DELAY+=1
# clamp to remaining time
if (( DELAY > remaining )); then
DELAY=$remaining
fi
ATTEMPT+=1
echo "${LOG_PREFIX} Attempt $ATTEMPT failed! Trying again in $DELAY seconds..."
sleep "$DELAY"
}
wait_for_postgres() {
local attempt_num=1
local -r max_attempts=5
echo "${log_prefix} Waiting for PostgreSQL to start..."
echo "${LOG_PREFIX} Waiting for PostgreSQL to start..."
local -r host="${PAPERLESS_DBHOST:-localhost}"
local -r port="${PAPERLESS_DBPORT:-5432}"
local -r user="${PAPERLESS_DBUSER:-paperless}"
# Disable warning, host and port can't have spaces
# shellcheck disable=SC2086
while [ ! "$(pg_isready -h ${host} -p ${port} --username ${user})" ]; do
if [ $attempt_num -eq $max_attempts ]; then
echo "${log_prefix} Unable to connect to database."
exit 1
else
echo "${log_prefix} Attempt $attempt_num failed! Trying again in 5 seconds..."
fi
attempt_num=$(("$attempt_num" + 1))
sleep 5
while ! pg_isready -h "${host}" -p "${port}" --username "${user}"; do
delay_next_attempt
done
# Extra in case this is a first start
sleep 5
echo "Connected to PostgreSQL"
echo "${LOG_PREFIX} Connected to PostgreSQL"
}
wait_for_mariadb() {
echo "${log_prefix} Waiting for MariaDB to start..."
echo "${LOG_PREFIX} Waiting for MariaDB to start..."
local -r host="${PAPERLESS_DBHOST:=localhost}"
local -r port="${PAPERLESS_DBPORT:=3306}"
local -r host="${PAPERLESS_DBHOST:-localhost}"
local -r port="${PAPERLESS_DBPORT:-3306}"
local attempt_num=1
local -r max_attempts=5
# Disable warning, host and port can't have spaces
# shellcheck disable=SC2086
while ! true > /dev/tcp/$host/$port; do
if [ $attempt_num -eq $max_attempts ]; then
echo "${log_prefix} Unable to connect to database."
exit 1
else
echo "${log_prefix} Attempt $attempt_num failed! Trying again in 5 seconds..."
fi
attempt_num=$(("$attempt_num" + 1))
sleep 5
while ! true > "/dev/tcp/$host/$port"; do
delay_next_attempt
done
echo "Connected to MariaDB"
echo "${LOG_PREFIX} Connected to MariaDB"
}
if [[ "${PAPERLESS_DBENGINE}" == "mariadb" ]]; then
echo "${log_prefix} Waiting for MariaDB to report ready"
if [[ "${PAPERLESS_DBENGINE:-}" == "mariadb" ]]; then
wait_for_mariadb
elif [[ -n "${PAPERLESS_DBHOST}" ]]; then
echo "${log_prefix} Waiting for postgresql to report ready"
elif [[ -n "${PAPERLESS_DBHOST:-}" ]]; then
wait_for_postgres
fi
echo "${log_prefix} Database is ready"
echo "${LOG_PREFIX} Database is ready"

View File

@@ -1,5 +1,35 @@
# Changelog
## paperless-ngx 2.19.6
### Bug Fixes
- Chore: include password validation on user edit [@shamoon](https://github.com/shamoon) ([#11308](https://github.com/paperless-ngx/paperless-ngx/pull/11308))
- Fix: include BASE_URL when constructing for workflows [@ebardsley](https://github.com/ebardsley) ([#11360](https://github.com/paperless-ngx/paperless-ngx/pull/11360))
- Fixhancement: refactor email attachment logic [@shamoon](https://github.com/shamoon) ([#11336](https://github.com/paperless-ngx/paperless-ngx/pull/11336))
- Fixhancement: trim whitespace for some text searches [@shamoon](https://github.com/shamoon) ([#11357](https://github.com/paperless-ngx/paperless-ngx/pull/11357))
- Fix: update Outlook refresh token when refreshed [@shamoon](https://github.com/shamoon) ([#11341](https://github.com/paperless-ngx/paperless-ngx/pull/11341))
- Fix: only cache remote version data for version checking [@shamoon](https://github.com/shamoon) ([#11320](https://github.com/paperless-ngx/paperless-ngx/pull/11320))
- Fix: include replace none logic in storage path preview, improve jinja conditionals for empty metadata [@shamoon](https://github.com/shamoon) ([#11315](https://github.com/paperless-ngx/paperless-ngx/pull/11315))
### Dependencies
- docker(deps): bump astral-sh/uv from 0.9.7-python3.12-bookworm-slim to 0.9.9-python3.12-bookworm-slim @[dependabot[bot]](https://github.com/apps/dependabot) ([#11338](https://github.com/paperless-ngx/paperless-ngx/pull/11338))
### All App Changes
<details>
<summary>7 changes</summary>
- Fix: include BASE_URL when constructing for workflows [@ebardsley](https://github.com/ebardsley) ([#11360](https://github.com/paperless-ngx/paperless-ngx/pull/11360))
- Fixhancement: refactor email attachment logic [@shamoon](https://github.com/shamoon) ([#11336](https://github.com/paperless-ngx/paperless-ngx/pull/11336))
- Fixhancement: trim whitespace for some text searches [@shamoon](https://github.com/shamoon) ([#11357](https://github.com/paperless-ngx/paperless-ngx/pull/11357))
- Fix: update Outlook refresh token when refreshed [@shamoon](https://github.com/shamoon) ([#11341](https://github.com/paperless-ngx/paperless-ngx/pull/11341))
- Fix: only cache remote version data for version checking [@shamoon](https://github.com/shamoon) ([#11320](https://github.com/paperless-ngx/paperless-ngx/pull/11320))
- Fix: include replace none logic in storage path preview, improve jinja conditionals for empty metadata [@shamoon](https://github.com/shamoon) ([#11315](https://github.com/paperless-ngx/paperless-ngx/pull/11315))
- Chore: include password validation on user edit [@shamoon](https://github.com/shamoon) ([#11308](https://github.com/paperless-ngx/paperless-ngx/pull/11308))
</details>
## paperless-ngx 2.19.5
### Bug Fixes

View File

@@ -575,6 +575,7 @@ applied. You can use the following placeholders in the template with any trigger
- `{{added_time}}`: added time in HH:MM format
- `{{original_filename}}`: original file name without extension
- `{{filename}}`: current file name without extension
- `{{doc_title}}`: current document title
The following placeholders are only available for "added" or "updated" triggers

View File

@@ -1,6 +1,6 @@
[project]
name = "paperless-ngx"
version = "2.19.5"
version = "2.19.6"
description = "A community-supported supercharged document management system: scan, index and archive all your physical documents"
readme = "README.md"
requires-python = ">=3.10"
@@ -26,8 +26,8 @@ dependencies = [
# WARNING: django does not use semver.
# Only patch versions are guaranteed to not introduce breaking changes.
"django~=5.2.5",
"django-allauth[mfa,socialaccount]~=65.4.0",
"django-auditlog~=3.2.1",
"django-allauth[mfa,socialaccount]~=65.12.1",
"django-auditlog~=3.3.0",
"django-cachalot~=2.8.0",
"django-celery-results~=2.6.0",
"django-compression-middleware~=0.5.0",
@@ -41,7 +41,7 @@ dependencies = [
"djangorestframework~=3.16",
"djangorestframework-guardian~=0.4.0",
"drf-spectacular~=0.28",
"drf-spectacular-sidecar~=2025.9.1",
"drf-spectacular-sidecar~=2025.10.1",
"drf-writable-nested~=0.7.1",
"faiss-cpu>=1.10",
"filelock~=3.20.0",

View File

@@ -1,6 +1,6 @@
{
"name": "paperless-ngx-ui",
"version": "2.19.5",
"version": "2.19.6",
"scripts": {
"preinstall": "npx only-allow pnpm",
"ng": "ng",

View File

@@ -6,7 +6,7 @@ export const environment = {
apiVersion: '9', // match src/paperless/settings.py
appTitle: 'Paperless-ngx',
tag: 'prod',
version: '2.19.5',
version: '2.19.6',
webSocketHost: window.location.host,
webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:',
webSocketBaseUrl: base_url.pathname + 'ws/',

View File

@@ -287,15 +287,75 @@ class DelayedQuery:
self.first_score = None
self.filter_queryset = filter_queryset
self.suggested_correction = None
self._manual_hits_cache: list | None = None
def __len__(self) -> int:
if self._manual_sort_requested():
manual_hits = self._manual_hits()
return len(manual_hits)
page = self[0:1]
return len(page)
def _manual_sort_requested(self):
ordering = self.query_params.get("ordering", "")
return ordering.lstrip("-").startswith("custom_field_")
def _manual_hits(self):
if self._manual_hits_cache is None:
q, mask, suggested_correction = self._get_query()
self.suggested_correction = suggested_correction
results = self.searcher.search(
q,
mask=mask,
filter=MappedDocIdSet(self.filter_queryset, self.searcher.ixreader),
limit=None,
)
results.fragmenter = highlight.ContextFragmenter(surround=50)
results.formatter = HtmlFormatter(tagname="span", between=" ... ")
if not self.first_score and len(results) > 0:
self.first_score = results[0].score
if self.first_score:
results.top_n = [
(
(hit[0] / self.first_score) if self.first_score else None,
hit[1],
)
for hit in results.top_n
]
hits_by_id = {hit["id"]: hit for hit in results}
matching_ids = list(hits_by_id.keys())
ordered_ids = list(
self.filter_queryset.filter(id__in=matching_ids).values_list(
"id",
flat=True,
),
)
ordered_ids = list(dict.fromkeys(ordered_ids))
self._manual_hits_cache = [
hits_by_id[_id] for _id in ordered_ids if _id in hits_by_id
]
return self._manual_hits_cache
def __getitem__(self, item):
if item.start in self.saved_results:
return self.saved_results[item.start]
if self._manual_sort_requested():
manual_hits = self._manual_hits()
start = 0 if item.start is None else item.start
stop = item.stop
hits = manual_hits[start:stop] if stop is not None else manual_hits[start:]
page = ManualResultsPage(hits)
self.saved_results[start] = page
return page
q, mask, suggested_correction = self._get_query()
self.suggested_correction = suggested_correction
sortedby, reverse = self._get_query_sortedby()
@@ -315,21 +375,33 @@ class DelayedQuery:
if not self.first_score and len(page.results) > 0 and sortedby is None:
self.first_score = page.results[0].score
page.results.top_n = list(
map(
lambda hit: (
(hit[0] / self.first_score) if self.first_score else None,
hit[1],
),
page.results.top_n,
),
)
page.results.top_n = [
(
(hit[0] / self.first_score) if self.first_score else None,
hit[1],
)
for hit in page.results.top_n
]
self.saved_results[item.start] = page
return page
class ManualResultsPage(list):
def __init__(self, hits):
super().__init__(hits)
self.results = ManualResults(hits)
class ManualResults:
def __init__(self, hits):
self._docnums = [hit.docnum for hit in hits]
def docs(self):
return self._docnums
class LocalDateParser(English):
def reverse_timezone_offset(self, d):
return (d.replace(tzinfo=django_timezone.get_current_timezone())).astimezone(

View File

@@ -48,12 +48,13 @@ if settings.AUDIT_LOG_ENABLED:
@contextmanager
def disable_signal(sig, receiver, sender) -> Generator:
def disable_signal(sig, receiver, sender, *, weak: bool | None = None) -> Generator:
try:
sig.disconnect(receiver=receiver, sender=sender)
yield
finally:
sig.connect(receiver=receiver, sender=sender)
kwargs = {"weak": weak} if weak is not None else {}
sig.connect(receiver=receiver, sender=sender, **kwargs)
class Command(CryptMixin, BaseCommand):
@@ -258,16 +259,19 @@ class Command(CryptMixin, BaseCommand):
post_save,
receiver=update_filename_and_move_files,
sender=Document,
weak=False,
),
disable_signal(
m2m_changed,
receiver=update_filename_and_move_files,
sender=Document.tags.through,
weak=False,
),
disable_signal(
post_save,
receiver=update_filename_and_move_files,
sender=CustomFieldInstance,
weak=False,
),
disable_signal(
post_save,

View File

@@ -396,9 +396,9 @@ class CannotMoveFilesException(Exception):
# should be disabled in /src/documents/management/commands/document_importer.py handle
@receiver(models.signals.post_save, sender=CustomFieldInstance)
@receiver(models.signals.m2m_changed, sender=Document.tags.through)
@receiver(models.signals.post_save, sender=Document)
@receiver(models.signals.post_save, sender=CustomFieldInstance, weak=False)
@receiver(models.signals.m2m_changed, sender=Document.tags.through, weak=False)
@receiver(models.signals.post_save, sender=Document, weak=False)
def update_filename_and_move_files(
sender,
instance: Document | CustomFieldInstance,

View File

@@ -89,6 +89,65 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
self.assertEqual(len(results), 0)
self.assertCountEqual(response.data["all"], [])
def test_search_custom_field_ordering(self):
custom_field = CustomField.objects.create(
name="Sortable field",
data_type=CustomField.FieldDataType.INT,
)
d1 = Document.objects.create(
title="first",
content="match",
checksum="A1",
)
d2 = Document.objects.create(
title="second",
content="match",
checksum="B2",
)
d3 = Document.objects.create(
title="third",
content="match",
checksum="C3",
)
CustomFieldInstance.objects.create(
document=d1,
field=custom_field,
value_int=30,
)
CustomFieldInstance.objects.create(
document=d2,
field=custom_field,
value_int=10,
)
CustomFieldInstance.objects.create(
document=d3,
field=custom_field,
value_int=20,
)
with AsyncWriter(index.open_index()) as writer:
index.update_document(writer, d1)
index.update_document(writer, d2)
index.update_document(writer, d3)
response = self.client.get(
f"/api/documents/?query=match&ordering=custom_field_{custom_field.pk}",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
[doc["id"] for doc in response.data["results"]],
[d2.id, d3.id, d1.id],
)
response = self.client.get(
f"/api/documents/?query=match&ordering=-custom_field_{custom_field.pk}",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
[doc["id"] for doc in response.data["results"]],
[d1.id, d3.id, d2.id],
)
def test_search_multi_page(self):
with AsyncWriter(index.open_index()) as writer:
for i in range(55):

View File

@@ -54,8 +54,8 @@ class TestCustomAccountAdapter(TestCase):
# False because request host is not in allowed hosts
self.assertFalse(adapter.is_safe_url(url))
@mock.patch("allauth.core.ratelimit._consume_rate", return_value=True)
def test_pre_authenticate(self, mock_consume_rate):
@mock.patch("allauth.core.internal.ratelimit.consume", return_value=True)
def test_pre_authenticate(self, mock_consume):
adapter = get_adapter()
request = HttpRequest()
request.get_host = mock.Mock(return_value="example.com")

View File

@@ -1,6 +1,6 @@
from typing import Final
__version__: Final[tuple[int, int, int]] = (2, 19, 5)
__version__: Final[tuple[int, int, int]] = (2, 19, 6)
# Version string like X.Y.Z
__full_version_str__: Final[str] = ".".join(map(str, __version__))
# Version string like X.Y

View File

@@ -55,7 +55,7 @@ Content-Transfer-Encoding: 7bit
<p>Some Text</p>
<p>
<img src="cid:part1.pNdUSz0s.D3NqVtPg@example.de" alt="Has to be rewritten to work..">
<img src="https://upload.wikimedia.org/wikipedia/en/f/f7/RickRoll.png" alt="This image should not be shown.">
<img src="https://docs.paperless-ngx.com/assets/logo_full_white.svg" alt="This image should not be shown.">
</p>
<p>and an embedded image.<br>

View File

@@ -6,7 +6,7 @@
<p>Some Text</p>
<p>
<img src="cid:part1.pNdUSz0s.D3NqVtPg@example.de" alt="Has to be rewritten to work..">
<img src="https://upload.wikimedia.org/wikipedia/en/f/f7/RickRoll.png" alt="This image should not be shown.">
<img src="https://docs.paperless-ngx.com/assets/logo_full_white.svg" alt="This image should not be shown.">
</p>
<p>and an embedded image.<br>

View File

@@ -2,7 +2,6 @@ import os
import shutil
import subprocess
import tempfile
import time
from pathlib import Path
import httpx
@@ -54,34 +53,6 @@ class TestUrlCanary:
Verify certain URLs are still available so testing is valid still
"""
@classmethod
def _fetch_wikimedia(cls, url: str) -> httpx.Response:
"""
Wikimedia occasionally throttles automated requests (HTTP 429). Retry a few
times with a short backoff so the tests stay stable, and skip if throttling
persists.
"""
last_resp: httpx.Response | None = None
# Wikimedia rejects requests without a browser-like User-Agent header and returns 403.
headers = {
"User-Agent": (
"Mozilla/5.0 (X11; Linux x86_64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/123.0.0.0 Safari/537.36"
),
}
for delay in (0, 1, 2):
resp = httpx.get(url, headers=headers, timeout=30.0)
if resp.status_code != httpx.codes.TOO_MANY_REQUESTS:
return resp
last_resp = resp
time.sleep(delay)
pytest.skip(
"Wikimedia throttled the canary request with HTTP 429; try rerunning later.",
)
return last_resp # pragma: no cover
def test_online_image_exception_on_not_available(self):
"""
GIVEN:
@@ -96,8 +67,8 @@ class TestUrlCanary:
whether this image stays online forever, so here we check if we can detect if is not
available anymore.
"""
resp = self._fetch_wikimedia(
"https://upload.wikimedia.org/wikipedia/en/f/f7/nonexistent.png",
resp = httpx.get(
"https://docs.paperless-ngx.com/assets/non-existent.png",
)
with pytest.raises(httpx.HTTPStatusError) as exec_info:
resp.raise_for_status()
@@ -119,8 +90,8 @@ class TestUrlCanary:
"""
# Now check the URL used in samples/sample.html
resp = self._fetch_wikimedia(
"https://upload.wikimedia.org/wikipedia/en/f/f7/RickRoll.png",
resp = httpx.get(
"https://docs.paperless-ngx.com/assets/logo_full_white.svg",
)
resp.raise_for_status()

45
uv.lock generated
View File

@@ -842,13 +842,13 @@ wheels = [
[[package]]
name = "django-allauth"
version = "65.4.1"
version = "65.12.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "asgiref", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b1/e7/b3232c27da9f43e3db72d16addd90891ee233fa058ddd0588bafcded2ea7/django_allauth-65.4.1.tar.gz", hash = "sha256:60b32aef7dbbcc213319aa4fd8f570e985266ea1162ae6ef7a26a24efca85c8c", size = 1558220, upload-time = "2025-02-07T09:35:18.359Z" }
sdist = { url = "https://files.pythonhosted.org/packages/52/94/75d7f8c59e061d1b66a6d917b287817fe02d2671c9e6376a4ddfb3954989/django_allauth-65.12.1.tar.gz", hash = "sha256:662666ff2d5c71766f66b1629ac7345c30796813221184e13e11ed7460940c6a", size = 1967971, upload-time = "2025-10-16T16:39:58.342Z" }
[package.optional-dependencies]
mfa = [
@@ -856,22 +856,22 @@ mfa = [
{ name = "qrcode", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
socialaccount = [
{ name = "oauthlib", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "pyjwt", extra = ["crypto"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "requests-oauthlib", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
[[package]]
name = "django-auditlog"
version = "3.2.1"
version = "3.3.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/e1/46/9da1d94493832fa18d2f6324a76d387fa232001593866987a96047709f4e/django_auditlog-3.2.1.tar.gz", hash = "sha256:63a4c9f7793e94eed804bc31a04d9b0b58244b1d280e2ed273c8b406bff1f779", size = 72926, upload-time = "2025-07-03T20:08:17.734Z" }
sdist = { url = "https://files.pythonhosted.org/packages/37/d8/ddd1c653ffb7ed1984596420982e32a0b163a0be316721a801a54dcbf016/django_auditlog-3.3.0.tar.gz", hash = "sha256:01331a0e7bb1a8ff7573311b486c88f3d0c431c388f5a1e4a9b6b26911dd79b8", size = 85941, upload-time = "2025-10-02T17:16:27.591Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a7/06/67296d050a72dcd76f57f220df621cb27e5b9282ba7ad0f5f74870dce241/django_auditlog-3.2.1-py3-none-any.whl", hash = "sha256:99603ca9d015f7e9b062b1c34f3e0826a3ce6ae6e5950c81bb7e663f7802a899", size = 38330, upload-time = "2025-07-03T20:07:51.735Z" },
{ url = "https://files.pythonhosted.org/packages/f3/bc/6e1b503d1755ab09cff6480cb088def073f1303165ab59b1a09247a2e756/django_auditlog-3.3.0-py3-none-any.whl", hash = "sha256:ab0f0f556a7107ac01c8fa87137bdfbb2b6f0debf70f7753169d9a40673d2636", size = 39676, upload-time = "2025-10-02T17:15:42.922Z" },
]
[[package]]
@@ -940,14 +940,14 @@ wheels = [
[[package]]
name = "django-filter"
version = "25.1"
version = "25.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b5/40/c702a6fe8cccac9bf426b55724ebdf57d10a132bae80a17691d0cf0b9bac/django_filter-25.1.tar.gz", hash = "sha256:1ec9eef48fa8da1c0ac9b411744b16c3f4c31176c867886e4c48da369c407153", size = 143021, upload-time = "2025-02-14T16:30:53.238Z" }
sdist = { url = "https://files.pythonhosted.org/packages/2c/e4/465d2699cd388c0005fb8d6ae6709f239917c6d8790ac35719676fffdcf3/django_filter-25.2.tar.gz", hash = "sha256:760e984a931f4468d096f5541787efb8998c61217b73006163bf2f9523fe8f23", size = 143818, upload-time = "2025-10-05T09:51:31.521Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/07/a6/70dcd68537c434ba7cb9277d403c5c829caf04f35baf5eb9458be251e382/django_filter-25.1-py3-none-any.whl", hash = "sha256:4fa48677cf5857b9b1347fed23e355ea792464e0fe07244d1fdfb8a806215b80", size = 94114, upload-time = "2025-02-14T16:30:50.435Z" },
{ url = "https://files.pythonhosted.org/packages/c1/40/6a02495c5658beb1f31eb09952d8aa12ef3c2a66342331ce3a35f7132439/django_filter-25.2-py3-none-any.whl", hash = "sha256:9c0f8609057309bba611062fe1b720b4a873652541192d232dd28970383633e3", size = 94145, upload-time = "2025-10-05T09:51:29.728Z" },
]
[[package]]
@@ -1112,14 +1112,14 @@ wheels = [
[[package]]
name = "drf-spectacular-sidecar"
version = "2025.9.1"
version = "2025.10.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/51/e2/85a0b8dbed8631165a6b49b2aee57636da8e4e710c444566636ffd972a7b/drf_spectacular_sidecar-2025.9.1.tar.gz", hash = "sha256:da2aa45da48fff76de7a1e357b84d1eb0b9df40ca89ec19d5fe94ad1037bb3c8", size = 2420902, upload-time = "2025-09-01T11:23:24.156Z" }
sdist = { url = "https://files.pythonhosted.org/packages/c3/e4/99cd1b1c8c69788bd6cb6a2459674f8c75728e79df23ac7beddd094bf805/drf_spectacular_sidecar-2025.10.1.tar.gz", hash = "sha256:506a5a21ce1ad7211c28acb4e2112e213f6dc095a2052ee6ed6db1ffe8eb5a7b", size = 2420998, upload-time = "2025-10-01T11:23:27.092Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/96/24/db59146ba89491fe1d44ca8aef239c94bf3c7fd41523976090f099430312/drf_spectacular_sidecar-2025.9.1-py3-none-any.whl", hash = "sha256:8e80625209b8a23ff27616db305b9ab71c2e2d1069dacd99720a9c11e429af50", size = 2440255, upload-time = "2025-09-01T11:23:22.822Z" },
{ url = "https://files.pythonhosted.org/packages/ab/87/70c67391e4ce68715d4dfae8dd33caeda2552af22f436ba55b8867a040fe/drf_spectacular_sidecar-2025.10.1-py3-none-any.whl", hash = "sha256:f1de343184d1a938179ce363d318258fe1e5f02f2f774625272364835f1c42bd", size = 2440241, upload-time = "2025-10-01T11:23:25.743Z" },
]
[[package]]
@@ -2924,7 +2924,7 @@ wheels = [
[[package]]
name = "paperless-ngx"
version = "2.19.5"
version = "2.19.6"
source = { virtual = "." }
dependencies = [
{ name = "babel", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
@@ -3080,8 +3080,8 @@ requires-dist = [
{ name = "concurrent-log-handler", specifier = "~=0.9.25" },
{ name = "dateparser", specifier = "~=1.2" },
{ name = "django", specifier = "~=5.2.5" },
{ name = "django-allauth", extras = ["mfa", "socialaccount"], specifier = "~=65.4.0" },
{ name = "django-auditlog", specifier = "~=3.2.1" },
{ name = "django-allauth", extras = ["mfa", "socialaccount"], specifier = "~=65.12.1" },
{ name = "django-auditlog", specifier = "~=3.3.0" },
{ name = "django-cachalot", specifier = "~=2.8.0" },
{ name = "django-celery-results", specifier = "~=2.6.0" },
{ name = "django-compression-middleware", specifier = "~=0.5.0" },
@@ -3095,7 +3095,7 @@ requires-dist = [
{ name = "djangorestframework", specifier = "~=3.16" },
{ name = "djangorestframework-guardian", specifier = "~=0.4.0" },
{ name = "drf-spectacular", specifier = "~=0.28" },
{ name = "drf-spectacular-sidecar", specifier = "~=2025.9.1" },
{ name = "drf-spectacular-sidecar", specifier = "~=2025.10.1" },
{ name = "drf-writable-nested", specifier = "~=0.7.1" },
{ name = "faiss-cpu", specifier = ">=1.10" },
{ name = "filelock", specifier = "~=3.20.0" },
@@ -4377,19 +4377,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
]
[[package]]
name = "requests-oauthlib"
version = "2.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "oauthlib", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/42/f2/05f29bc3913aea15eb670be136045bf5c5bbf4b99ecb839da9b422bb2c85/requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9", size = 55650, upload-time = "2024-03-22T20:32:29.939Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36", size = 24179, upload-time = "2024-03-22T20:32:28.055Z" },
]
[[package]]
name = "rich"
version = "14.1.0"