mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-02-01 23:19:00 -06:00
Compare commits
39 Commits
feature-as
...
feature-mi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
38df71b71a | ||
|
|
7bae6b7f6d | ||
|
|
1c99e55069 | ||
|
|
b44eea6508 | ||
|
|
b8af971652 | ||
|
|
66593ec660 | ||
|
|
e1655045ca | ||
|
|
1a638d8cc0 | ||
|
|
b21ff75a30 | ||
|
|
58f1a186d4 | ||
|
|
2a1c06c047 | ||
|
|
770dc02833 | ||
|
|
af9d75dfcf | ||
|
|
7b23cdc0c1 | ||
|
|
09892809f9 | ||
|
|
94c6108006 | ||
|
|
33c5d5bab0 | ||
|
|
9beb508f1d | ||
|
|
a290fcfe6f | ||
|
|
0846fe9845 | ||
|
|
910d16374b | ||
|
|
35d77b144d | ||
|
|
5987e35101 | ||
|
|
96259ce441 | ||
|
|
283afb265d | ||
|
|
67564dd573 | ||
|
|
046d65c2ba | ||
|
|
8761816635 | ||
|
|
a1cdc45f1a | ||
|
|
190e42e722 | ||
|
|
75c6ffe01f | ||
|
|
2964b4b256 | ||
|
|
f52f9dd325 | ||
|
|
5827a0ec25 | ||
|
|
990ef05d99 | ||
|
|
9f48b8e6e1 | ||
|
|
42689070b3 | ||
|
|
09f3cfdb93 | ||
|
|
84f408fa43 |
@@ -37,7 +37,7 @@ repos:
|
||||
- json
|
||||
# See https://github.com/prettier/prettier/issues/15742 for the fork reason
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: 'v3.6.2'
|
||||
rev: 'v3.8.1'
|
||||
hooks:
|
||||
- id: prettier
|
||||
types_or:
|
||||
@@ -49,7 +49,7 @@ repos:
|
||||
- 'prettier-plugin-organize-imports@4.1.0'
|
||||
# Python hooks
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.14.5
|
||||
rev: v0.14.14
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
- id: ruff-format
|
||||
@@ -76,7 +76,7 @@ repos:
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
- repo: https://github.com/google/yamlfmt
|
||||
rev: v0.20.0
|
||||
rev: v0.21.0
|
||||
hooks:
|
||||
- id: yamlfmt
|
||||
exclude: "^src-ui/pnpm-lock.yaml"
|
||||
|
||||
@@ -8,6 +8,11 @@ echo "${log_prefix} Apply database migrations..."
|
||||
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ "${PAPERLESS_MIGRATION_MODE:-0}" == "1" ]]; then
|
||||
echo "${log_prefix} Migration mode enabled, skipping migrations."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# The whole migrate, with flock, needs to run as the right user
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
exec s6-setlock -n "${data_dir}/migration_lock" python3 manage.py migrate --skip-checks --no-input
|
||||
|
||||
@@ -9,7 +9,15 @@ echo "${log_prefix} Running Django checks"
|
||||
cd "${PAPERLESS_SRC_DIR}"
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
python3 manage.py check
|
||||
if [[ "${PAPERLESS_MIGRATION_MODE:-0}" == "1" ]]; then
|
||||
python3 manage_migration.py check
|
||||
else
|
||||
python3 manage.py check
|
||||
fi
|
||||
else
|
||||
s6-setuidgid paperless python3 manage.py check
|
||||
if [[ "${PAPERLESS_MIGRATION_MODE:-0}" == "1" ]]; then
|
||||
s6-setuidgid paperless python3 manage_migration.py check
|
||||
else
|
||||
s6-setuidgid paperless python3 manage.py check
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -13,8 +13,14 @@ if [[ -n "${PAPERLESS_FORCE_SCRIPT_NAME}" ]]; then
|
||||
export GRANIAN_URL_PATH_PREFIX=${PAPERLESS_FORCE_SCRIPT_NAME}
|
||||
fi
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
exec granian --interface asginl --ws --loop uvloop "paperless.asgi:application"
|
||||
if [[ "${PAPERLESS_MIGRATION_MODE:-0}" == "1" ]]; then
|
||||
app_module="paperless.migration_asgi:application"
|
||||
else
|
||||
exec s6-setuidgid paperless granian --interface asginl --ws --loop uvloop "paperless.asgi:application"
|
||||
app_module="paperless.asgi:application"
|
||||
fi
|
||||
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
exec granian --interface asginl --ws --loop uvloop "${app_module}"
|
||||
else
|
||||
exec s6-setuidgid paperless granian --interface asginl --ws --loop uvloop "${app_module}"
|
||||
fi
|
||||
|
||||
@@ -16,18 +16,17 @@ classifiers = [
|
||||
# This will allow testing to not install a webserver, mysql, etc
|
||||
|
||||
dependencies = [
|
||||
"adrf~=0.1.12",
|
||||
"azure-ai-documentintelligence>=1.0.2",
|
||||
"babel>=2.17",
|
||||
"bleach~=6.3.0",
|
||||
"celery[redis]~=5.5.1",
|
||||
"celery[redis]~=5.6.2",
|
||||
"channels~=4.2",
|
||||
"channels-redis~=4.2",
|
||||
"concurrent-log-handler~=0.9.25",
|
||||
"dateparser~=1.2",
|
||||
# WARNING: django does not use semver.
|
||||
# Only patch versions are guaranteed to not introduce breaking changes.
|
||||
"django~=5.2.5",
|
||||
"django~=5.2.10",
|
||||
"django-allauth[mfa,socialaccount]~=65.13.1",
|
||||
"django-auditlog~=3.4.1",
|
||||
"django-cachalot~=2.8.0",
|
||||
@@ -50,6 +49,8 @@ dependencies = [
|
||||
"flower~=2.0.1",
|
||||
"gotenberg-client~=0.13.1",
|
||||
"httpx-oauth~=0.16",
|
||||
"ijson",
|
||||
"ijson~=3.3",
|
||||
"imap-tools~=1.11.0",
|
||||
"jinja2~=3.1.5",
|
||||
"langdetect~=1.0.9",
|
||||
@@ -73,6 +74,7 @@ dependencies = [
|
||||
"rapidfuzz~=3.14.0",
|
||||
"redis[hiredis]~=5.2.1",
|
||||
"regex>=2025.9.18",
|
||||
"rich~=14.1.0",
|
||||
"scikit-learn~=1.7.0",
|
||||
"sentence-transformers>=4.1",
|
||||
"setproctitle~=1.3.4",
|
||||
@@ -80,7 +82,7 @@ dependencies = [
|
||||
"torch~=2.9.1",
|
||||
"tqdm~=4.67.1",
|
||||
"watchfiles>=1.1.1",
|
||||
"whitenoise~=6.9",
|
||||
"whitenoise~=6.11",
|
||||
"whoosh-reloaded>=2.7.5",
|
||||
"zxing-cpp~=2.3.0",
|
||||
]
|
||||
@@ -89,13 +91,13 @@ optional-dependencies.mariadb = [
|
||||
"mysqlclient~=2.2.7",
|
||||
]
|
||||
optional-dependencies.postgres = [
|
||||
"psycopg[c,pool]==3.2.12",
|
||||
"psycopg[c,pool]==3.3",
|
||||
# Direct dependency for proper resolution of the pre-built wheels
|
||||
"psycopg-c==3.2.12",
|
||||
"psycopg-c==3.3",
|
||||
"psycopg-pool==3.3",
|
||||
]
|
||||
optional-dependencies.webserver = [
|
||||
"granian[uvloop]~=2.5.1",
|
||||
"granian[uvloop]~=2.6.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
@@ -153,7 +155,7 @@ typing = [
|
||||
]
|
||||
|
||||
[tool.uv]
|
||||
required-version = ">=0.5.14"
|
||||
required-version = ">=0.9.0"
|
||||
package = false
|
||||
environments = [
|
||||
"sys_platform == 'darwin'",
|
||||
@@ -163,8 +165,8 @@ environments = [
|
||||
[tool.uv.sources]
|
||||
# Markers are chosen to select these almost exclusively when building the Docker image
|
||||
psycopg-c = [
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-bookworm-3.2.12/psycopg_c-3.2.12-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-bookworm-3.2.12/psycopg_c-3.2.12-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-trixie-3.3.0/psycopg_c-3.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/psycopg-trixie-3.3.0/psycopg_c-3.3.0-cp312-cp312-linux_aarch64.whl", marker = "sys_platform == 'linux' and platform_machine == 'aarch64' and python_version == '3.12'" },
|
||||
]
|
||||
zxing-cpp = [
|
||||
{ url = "https://github.com/paperless-ngx/builder/releases/download/zxing-2.3.0/zxing_cpp-2.3.0-cp312-cp312-linux_x86_64.whl", marker = "sys_platform == 'linux' and platform_machine == 'x86_64' and python_version == '3.12'" },
|
||||
|
||||
@@ -3,7 +3,12 @@ import os
|
||||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
|
||||
try:
|
||||
from paperless_migration.detect import choose_settings_module
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", choose_settings_module())
|
||||
except Exception:
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
|
||||
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
|
||||
13
src/manage_migration.py
Executable file
13
src/manage_migration.py
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
os.environ.setdefault(
|
||||
"DJANGO_SETTINGS_MODULE",
|
||||
"paperless_migration.settings",
|
||||
)
|
||||
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
execute_from_command_line(sys.argv)
|
||||
@@ -1,12 +1,18 @@
|
||||
import os
|
||||
|
||||
try:
|
||||
from paperless_migration.detect import choose_settings_module
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", choose_settings_module())
|
||||
except Exception:
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
|
||||
|
||||
from django.core.asgi import get_asgi_application
|
||||
|
||||
# Fetch Django ASGI application early to ensure AppRegistry is populated
|
||||
# before importing consumers and AuthMiddlewareStack that may import ORM
|
||||
# models.
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
|
||||
django_asgi_app = get_asgi_application()
|
||||
|
||||
from channels.auth import AuthMiddlewareStack # noqa: E402
|
||||
|
||||
7
src/paperless/migration_asgi.py
Normal file
7
src/paperless/migration_asgi.py
Normal file
@@ -0,0 +1,7 @@
|
||||
import os
|
||||
|
||||
from django.core.asgi import get_asgi_application
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless_migration.settings")
|
||||
|
||||
application = get_asgi_application()
|
||||
@@ -9,9 +9,14 @@ https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
|
||||
|
||||
import os
|
||||
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
try:
|
||||
from paperless_migration.detect import choose_settings_module
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", choose_settings_module())
|
||||
except Exception:
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
|
||||
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
|
||||
application = get_wsgi_application()
|
||||
|
||||
|
||||
@@ -1,12 +1,7 @@
|
||||
import datetime
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from adrf.views import APIView
|
||||
from adrf.viewsets import ModelViewSet
|
||||
from adrf.viewsets import ReadOnlyModelViewSet
|
||||
from asgiref.sync import sync_to_async
|
||||
from django.http import HttpResponseBadRequest
|
||||
from django.http import HttpResponseForbidden
|
||||
from django.http import HttpResponseRedirect
|
||||
@@ -20,9 +15,11 @@ from httpx_oauth.oauth2 import GetAccessTokenError
|
||||
from rest_framework import serializers
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.filters import OrderingFilter
|
||||
from rest_framework.generics import GenericAPIView
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework.viewsets import ReadOnlyModelViewSet
|
||||
|
||||
from documents.filters import ObjectOwnedOrGrantedPermissionsFilter
|
||||
from documents.permissions import PaperlessObjectPermissions
|
||||
@@ -42,8 +39,6 @@ from paperless_mail.serialisers import MailRuleSerializer
|
||||
from paperless_mail.serialisers import ProcessedMailSerializer
|
||||
from paperless_mail.tasks import process_mail_accounts
|
||||
|
||||
logger: logging.Logger = logging.getLogger("paperless_mail")
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
test=extend_schema(
|
||||
@@ -71,75 +66,71 @@ logger: logging.Logger = logging.getLogger("paperless_mail")
|
||||
),
|
||||
)
|
||||
class MailAccountViewSet(ModelViewSet, PassUserMixin):
|
||||
model = MailAccount
|
||||
|
||||
queryset = MailAccount.objects.all().order_by("pk")
|
||||
serializer_class = MailAccountSerializer
|
||||
pagination_class = StandardPagination
|
||||
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
|
||||
filter_backends = (ObjectOwnedOrGrantedPermissionsFilter,)
|
||||
|
||||
def get_permissions(self) -> list[Any]:
|
||||
def get_permissions(self):
|
||||
if self.action == "test":
|
||||
return [IsAuthenticated()]
|
||||
# Test action does not require object level permissions
|
||||
self.permission_classes = (IsAuthenticated,)
|
||||
return super().get_permissions()
|
||||
|
||||
@action(methods=["post"], detail=False)
|
||||
async def test(self, request: Request) -> Response | HttpResponseBadRequest:
|
||||
def test(self, request):
|
||||
logger = logging.getLogger("paperless_mail")
|
||||
request.data["name"] = datetime.datetime.now().isoformat()
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
# Validation must be wrapped because of sync DB validators
|
||||
await sync_to_async(serializer.is_valid)(raise_exception=True)
|
||||
|
||||
validated_data: dict[str, Any] = serializer.validated_data
|
||||
|
||||
# account exists, use the password from there instead of *** and refresh_token / expiration
|
||||
if (
|
||||
len(str(validated_data.get("password", "")).replace("*", "")) == 0
|
||||
and request.data.get("id") is not None
|
||||
len(serializer.validated_data.get("password").replace("*", "")) == 0
|
||||
and request.data["id"] is not None
|
||||
):
|
||||
existing_account = await MailAccount.objects.aget(pk=request.data["id"])
|
||||
validated_data.update(
|
||||
{
|
||||
"password": existing_account.password,
|
||||
"account_type": existing_account.account_type,
|
||||
"refresh_token": existing_account.refresh_token,
|
||||
"expiration": existing_account.expiration,
|
||||
},
|
||||
)
|
||||
existing_account = MailAccount.objects.get(pk=request.data["id"])
|
||||
serializer.validated_data["password"] = existing_account.password
|
||||
serializer.validated_data["account_type"] = existing_account.account_type
|
||||
serializer.validated_data["refresh_token"] = existing_account.refresh_token
|
||||
serializer.validated_data["expiration"] = existing_account.expiration
|
||||
|
||||
account = MailAccount(**validated_data)
|
||||
|
||||
def _blocking_imap_test() -> bool:
|
||||
with get_mailbox(
|
||||
account.imap_server,
|
||||
account.imap_port,
|
||||
account.imap_security,
|
||||
) as m_box:
|
||||
account = MailAccount(**serializer.validated_data)
|
||||
with get_mailbox(
|
||||
account.imap_server,
|
||||
account.imap_port,
|
||||
account.imap_security,
|
||||
) as M:
|
||||
try:
|
||||
if (
|
||||
account.is_token
|
||||
and account.expiration
|
||||
and account.expiration is not None
|
||||
and account.expiration < timezone.now()
|
||||
):
|
||||
oauth_manager = PaperlessMailOAuth2Manager()
|
||||
if oauth_manager.refresh_account_oauth_token(existing_account):
|
||||
# User is not changing password and token needs to be refreshed
|
||||
existing_account.refresh_from_db()
|
||||
account.password = existing_account.password
|
||||
else:
|
||||
raise MailError("Unable to refresh oauth token")
|
||||
mailbox_login(m_box, account)
|
||||
return True
|
||||
|
||||
try:
|
||||
await sync_to_async(_blocking_imap_test, thread_sensitive=False)()
|
||||
return Response({"success": True})
|
||||
except MailError as e:
|
||||
logger.error(f"Mail account {account} test failed: {e}")
|
||||
return HttpResponseBadRequest("Unable to connect to server")
|
||||
mailbox_login(M, account)
|
||||
return Response({"success": True})
|
||||
except MailError as e:
|
||||
logger.error(
|
||||
f"Mail account {account} test failed: {e}",
|
||||
)
|
||||
return HttpResponseBadRequest("Unable to connect to server")
|
||||
|
||||
@action(methods=["post"], detail=True)
|
||||
async def process(self, request: Request, pk: int | None = None) -> Response:
|
||||
# FIX: Use aget_object() provided by adrf to avoid SynchronousOnlyOperation
|
||||
account = await self.aget_object()
|
||||
def process(self, request, pk=None):
|
||||
account = self.get_object()
|
||||
process_mail_accounts.delay([account.pk])
|
||||
|
||||
return Response({"result": "OK"})
|
||||
|
||||
|
||||
@@ -153,38 +144,21 @@ class ProcessedMailViewSet(ReadOnlyModelViewSet, PassUserMixin):
|
||||
ObjectOwnedOrGrantedPermissionsFilter,
|
||||
)
|
||||
filterset_class = ProcessedMailFilterSet
|
||||
|
||||
queryset = ProcessedMail.objects.all().order_by("-processed")
|
||||
|
||||
@action(methods=["post"], detail=False)
|
||||
async def bulk_delete(
|
||||
self,
|
||||
request: Request,
|
||||
) -> Response | HttpResponseBadRequest | HttpResponseForbidden:
|
||||
mail_ids: list[int] = request.data.get("mail_ids", [])
|
||||
def bulk_delete(self, request):
|
||||
mail_ids = request.data.get("mail_ids", [])
|
||||
if not isinstance(mail_ids, list) or not all(
|
||||
isinstance(i, int) for i in mail_ids
|
||||
):
|
||||
return HttpResponseBadRequest("mail_ids must be a list of integers")
|
||||
|
||||
# Store objects to delete after verification
|
||||
to_delete: list[ProcessedMail] = []
|
||||
|
||||
# We must verify permissions for every requested ID
|
||||
async for mail in ProcessedMail.objects.filter(id__in=mail_ids):
|
||||
can_delete = await sync_to_async(has_perms_owner_aware)(
|
||||
request.user,
|
||||
"delete_processedmail",
|
||||
mail,
|
||||
)
|
||||
if not can_delete:
|
||||
# This is what the test is looking for: 403 on permission failure
|
||||
mails = ProcessedMail.objects.filter(id__in=mail_ids)
|
||||
for mail in mails:
|
||||
if not has_perms_owner_aware(request.user, "delete_processedmail", mail):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
to_delete.append(mail)
|
||||
|
||||
# Only perform deletions if all items passed the permission check
|
||||
for mail in to_delete:
|
||||
await mail.adelete()
|
||||
|
||||
mail.delete()
|
||||
return Response({"result": "OK", "deleted_mail_ids": mail_ids})
|
||||
|
||||
|
||||
@@ -204,74 +178,77 @@ class MailRuleViewSet(ModelViewSet, PassUserMixin):
|
||||
responses={200: None},
|
||||
),
|
||||
)
|
||||
class OauthCallbackView(APIView):
|
||||
class OauthCallbackView(GenericAPIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
async def get(
|
||||
self,
|
||||
request: Request,
|
||||
) -> Response | HttpResponseBadRequest | HttpResponseRedirect:
|
||||
has_perm = await sync_to_async(request.user.has_perm)(
|
||||
"paperless_mail.add_mailaccount",
|
||||
)
|
||||
if not has_perm:
|
||||
def get(self, request, format=None):
|
||||
if not (
|
||||
request.user and request.user.has_perms(["paperless_mail.add_mailaccount"])
|
||||
):
|
||||
return HttpResponseBadRequest(
|
||||
"You do not have permission to add mail accounts",
|
||||
)
|
||||
|
||||
code: str | None = request.query_params.get("code")
|
||||
state: str | None = request.query_params.get("state")
|
||||
scope: str | None = request.query_params.get("scope")
|
||||
logger = logging.getLogger("paperless_mail")
|
||||
code = request.query_params.get("code")
|
||||
# Gmail passes scope as a query param, Outlook does not
|
||||
scope = request.query_params.get("scope")
|
||||
|
||||
if not code or not state:
|
||||
return HttpResponseBadRequest("Invalid request parameters")
|
||||
if code is None:
|
||||
logger.error(
|
||||
f"Invalid oauth callback request, code: {code}, scope: {scope}",
|
||||
)
|
||||
return HttpResponseBadRequest("Invalid request, see logs for more detail")
|
||||
|
||||
oauth_manager = PaperlessMailOAuth2Manager(
|
||||
state=request.session.get("oauth_state"),
|
||||
)
|
||||
|
||||
state = request.query_params.get("state", "")
|
||||
if not oauth_manager.validate_state(state):
|
||||
return HttpResponseBadRequest("Invalid OAuth state")
|
||||
logger.error(
|
||||
f"Invalid oauth callback request received state: {state}, expected: {oauth_manager.state}",
|
||||
)
|
||||
return HttpResponseBadRequest("Invalid request, see logs for more detail")
|
||||
|
||||
try:
|
||||
defaults: dict[str, Any] = {
|
||||
"username": "",
|
||||
"imap_security": MailAccount.ImapSecurity.SSL,
|
||||
"imap_port": 993,
|
||||
}
|
||||
|
||||
if scope and "google" in scope:
|
||||
if scope is not None and "google" in scope:
|
||||
# Google
|
||||
account_type = MailAccount.MailAccountType.GMAIL_OAUTH
|
||||
imap_server = "imap.gmail.com"
|
||||
defaults.update(
|
||||
{
|
||||
"name": f"Gmail OAuth {timezone.now()}",
|
||||
"account_type": account_type,
|
||||
},
|
||||
)
|
||||
result = await sync_to_async(oauth_manager.get_gmail_access_token)(code)
|
||||
else:
|
||||
defaults = {
|
||||
"name": f"Gmail OAuth {timezone.now()}",
|
||||
"username": "",
|
||||
"imap_security": MailAccount.ImapSecurity.SSL,
|
||||
"imap_port": 993,
|
||||
"account_type": account_type,
|
||||
}
|
||||
result = oauth_manager.get_gmail_access_token(code)
|
||||
|
||||
elif scope is None:
|
||||
# Outlook
|
||||
account_type = MailAccount.MailAccountType.OUTLOOK_OAUTH
|
||||
imap_server = "outlook.office365.com"
|
||||
defaults.update(
|
||||
{
|
||||
"name": f"Outlook OAuth {timezone.now()}",
|
||||
"account_type": account_type,
|
||||
},
|
||||
)
|
||||
result = await sync_to_async(oauth_manager.get_outlook_access_token)(
|
||||
code,
|
||||
)
|
||||
defaults = {
|
||||
"name": f"Outlook OAuth {timezone.now()}",
|
||||
"username": "",
|
||||
"imap_security": MailAccount.ImapSecurity.SSL,
|
||||
"imap_port": 993,
|
||||
"account_type": account_type,
|
||||
}
|
||||
|
||||
account, _ = await MailAccount.objects.aupdate_or_create(
|
||||
result = oauth_manager.get_outlook_access_token(code)
|
||||
|
||||
access_token = result["access_token"]
|
||||
refresh_token = result["refresh_token"]
|
||||
expires_in = result["expires_in"]
|
||||
account, _ = MailAccount.objects.update_or_create(
|
||||
password=access_token,
|
||||
is_token=True,
|
||||
imap_server=imap_server,
|
||||
refresh_token=result["refresh_token"],
|
||||
defaults={
|
||||
**defaults,
|
||||
"password": result["access_token"],
|
||||
"is_token": True,
|
||||
"expiration": timezone.now()
|
||||
+ timedelta(seconds=result["expires_in"]),
|
||||
},
|
||||
refresh_token=refresh_token,
|
||||
expiration=timezone.now() + timedelta(seconds=expires_in),
|
||||
defaults=defaults,
|
||||
)
|
||||
return HttpResponseRedirect(
|
||||
f"{oauth_manager.oauth_redirect_url}?oauth_success=1&account_id={account.pk}",
|
||||
|
||||
0
src/paperless_migration/__init__.py
Normal file
0
src/paperless_migration/__init__.py
Normal file
6
src/paperless_migration/apps.py
Normal file
6
src/paperless_migration/apps.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class PaperlessMigrationConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "paperless_migration"
|
||||
28
src/paperless_migration/asgi.py
Normal file
28
src/paperless_migration/asgi.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""ASGI application for migration mode with WebSocket support."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from channels.auth import AuthMiddlewareStack
|
||||
from channels.routing import ProtocolTypeRouter
|
||||
from channels.routing import URLRouter
|
||||
from channels.security.websocket import AllowedHostsOriginValidator
|
||||
from django.core.asgi import get_asgi_application
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless_migration.settings")
|
||||
|
||||
# Initialize Django ASGI application early to ensure settings are loaded
|
||||
django_asgi_app = get_asgi_application()
|
||||
|
||||
# Import routing after Django is initialized
|
||||
from paperless_migration.routing import websocket_urlpatterns # noqa: E402
|
||||
|
||||
application = ProtocolTypeRouter(
|
||||
{
|
||||
"http": django_asgi_app,
|
||||
"websocket": AllowedHostsOriginValidator(
|
||||
AuthMiddlewareStack(URLRouter(websocket_urlpatterns)),
|
||||
),
|
||||
},
|
||||
)
|
||||
245
src/paperless_migration/consumers.py
Normal file
245
src/paperless_migration/consumers.py
Normal file
@@ -0,0 +1,245 @@
|
||||
"""WebSocket consumers for migration operations."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from channels.generic.websocket import AsyncWebsocketConsumer
|
||||
from django.conf import settings
|
||||
|
||||
from paperless_migration.services.importer import ImportService
|
||||
from paperless_migration.services.transform import TransformService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MigrationConsumerBase(AsyncWebsocketConsumer):
|
||||
"""Base consumer with common authentication and messaging logic."""
|
||||
|
||||
async def connect(self) -> None:
|
||||
"""Authenticate and accept or reject the connection."""
|
||||
user = self.scope.get("user")
|
||||
session = self.scope.get("session", {})
|
||||
|
||||
if not user or not user.is_authenticated:
|
||||
logger.warning("WebSocket connection rejected: not authenticated")
|
||||
await self.close(code=4001)
|
||||
return
|
||||
|
||||
if not user.is_superuser:
|
||||
logger.warning("WebSocket connection rejected: not superuser")
|
||||
await self.close(code=4003)
|
||||
return
|
||||
|
||||
if not session.get("migration_code_ok"):
|
||||
logger.warning("WebSocket connection rejected: migration code not verified")
|
||||
await self.close(code=4002)
|
||||
return
|
||||
|
||||
await self.accept()
|
||||
logger.info("WebSocket connection accepted for user: %s", user.username)
|
||||
|
||||
async def disconnect(self, close_code: int) -> None:
|
||||
"""Handle disconnection."""
|
||||
logger.debug("WebSocket disconnected with code: %d", close_code)
|
||||
|
||||
async def receive(self, text_data: str | None = None, **kwargs: Any) -> None:
|
||||
"""Handle incoming messages - triggers the operation."""
|
||||
if text_data is None:
|
||||
return
|
||||
|
||||
try:
|
||||
data = json.loads(text_data)
|
||||
except json.JSONDecodeError:
|
||||
await self.send_error("Invalid JSON message")
|
||||
return
|
||||
|
||||
action = data.get("action")
|
||||
if action == "start":
|
||||
await self.run_operation()
|
||||
else:
|
||||
await self.send_error(f"Unknown action: {action}")
|
||||
|
||||
async def run_operation(self) -> None:
|
||||
"""Override in subclasses to run the specific operation."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def send_message(self, msg_type: str, **kwargs: Any) -> None:
|
||||
"""Send a typed JSON message to the client."""
|
||||
await self.send(text_data=json.dumps({"type": msg_type, **kwargs}))
|
||||
|
||||
async def send_log(self, message: str, level: str = "info") -> None:
|
||||
"""Send a log message."""
|
||||
await self.send_message("log", message=message, level=level)
|
||||
|
||||
async def send_progress(
|
||||
self,
|
||||
current: int,
|
||||
total: int | None = None,
|
||||
label: str = "",
|
||||
) -> None:
|
||||
"""Send a progress update."""
|
||||
await self.send_message(
|
||||
"progress",
|
||||
current=current,
|
||||
total=total,
|
||||
label=label,
|
||||
)
|
||||
|
||||
async def send_stats(self, stats: dict[str, Any]) -> None:
|
||||
"""Send statistics update."""
|
||||
await self.send_message("stats", **stats)
|
||||
|
||||
async def send_complete(
|
||||
self,
|
||||
duration: float,
|
||||
*,
|
||||
success: bool,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Send completion message."""
|
||||
await self.send_message(
|
||||
"complete",
|
||||
success=success,
|
||||
duration=duration,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
async def send_error(self, message: str) -> None:
|
||||
"""Send an error message."""
|
||||
await self.send_message("error", message=message)
|
||||
|
||||
|
||||
class TransformConsumer(MigrationConsumerBase):
|
||||
"""WebSocket consumer for transform operations."""
|
||||
|
||||
async def run_operation(self) -> None:
|
||||
"""Run the transform operation."""
|
||||
input_path = Path(settings.MIGRATION_EXPORT_PATH)
|
||||
output_path = Path(settings.MIGRATION_TRANSFORMED_PATH)
|
||||
frequency = settings.MIGRATION_PROGRESS_FREQUENCY
|
||||
|
||||
if not input_path.exists():
|
||||
await self.send_error(f"Export file not found: {input_path}")
|
||||
return
|
||||
|
||||
if output_path.exists():
|
||||
await self.send_error(
|
||||
f"Output file already exists: {output_path}. "
|
||||
"Delete it first to re-run transform.",
|
||||
)
|
||||
return
|
||||
|
||||
await self.send_log("Starting transform operation...")
|
||||
|
||||
service = TransformService(
|
||||
input_path=input_path,
|
||||
output_path=output_path,
|
||||
update_frequency=frequency,
|
||||
)
|
||||
|
||||
try:
|
||||
async for update in service.run_async():
|
||||
match update["type"]:
|
||||
case "progress":
|
||||
await self.send_progress(
|
||||
current=update["completed"],
|
||||
label=f"{update['completed']:,} rows processed",
|
||||
)
|
||||
if update.get("stats"):
|
||||
await self.send_stats({"transformed": update["stats"]})
|
||||
case "complete":
|
||||
await self.send_complete(
|
||||
success=True,
|
||||
duration=update["duration"],
|
||||
total_processed=update["total_processed"],
|
||||
stats=update["stats"],
|
||||
speed=update["speed"],
|
||||
)
|
||||
case "error":
|
||||
await self.send_error(update["message"])
|
||||
case "log":
|
||||
await self.send_log(
|
||||
update["message"],
|
||||
update.get("level", "info"),
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.exception("Transform operation failed")
|
||||
await self.send_error(f"Transform failed: {exc}")
|
||||
|
||||
|
||||
class ImportConsumer(MigrationConsumerBase):
|
||||
"""WebSocket consumer for import operations."""
|
||||
|
||||
async def run_operation(self) -> None:
|
||||
"""Run the import operation (wipe, migrate, import)."""
|
||||
export_path = Path(settings.MIGRATION_EXPORT_PATH)
|
||||
transformed_path = Path(settings.MIGRATION_TRANSFORMED_PATH)
|
||||
imported_marker = Path(settings.MIGRATION_IMPORTED_PATH)
|
||||
source_dir = export_path.parent
|
||||
|
||||
if not export_path.exists():
|
||||
await self.send_error("Export file not found. Upload or re-check export.")
|
||||
return
|
||||
|
||||
if not transformed_path.exists():
|
||||
await self.send_error("Transformed file not found. Run transform first.")
|
||||
return
|
||||
|
||||
await self.send_log("Preparing import operation...")
|
||||
|
||||
# Backup original manifest and swap in transformed version
|
||||
backup_path: Path | None = None
|
||||
try:
|
||||
backup_fd, backup_name = tempfile.mkstemp(
|
||||
prefix="manifest.v2.",
|
||||
suffix=".json",
|
||||
dir=source_dir,
|
||||
)
|
||||
os.close(backup_fd)
|
||||
backup_path = Path(backup_name)
|
||||
shutil.copy2(export_path, backup_path)
|
||||
shutil.copy2(transformed_path, export_path)
|
||||
await self.send_log("Manifest files prepared")
|
||||
except Exception as exc:
|
||||
await self.send_error(f"Failed to prepare import manifest: {exc}")
|
||||
return
|
||||
|
||||
service = ImportService(
|
||||
source_dir=source_dir,
|
||||
imported_marker=imported_marker,
|
||||
)
|
||||
|
||||
try:
|
||||
async for update in service.run_async():
|
||||
match update["type"]:
|
||||
case "phase":
|
||||
await self.send_log(f"Phase: {update['phase']}", level="info")
|
||||
case "log":
|
||||
await self.send_log(
|
||||
update["message"],
|
||||
update.get("level", "info"),
|
||||
)
|
||||
case "complete":
|
||||
await self.send_complete(
|
||||
success=update["success"],
|
||||
duration=update["duration"],
|
||||
)
|
||||
case "error":
|
||||
await self.send_error(update["message"])
|
||||
except Exception as exc:
|
||||
logger.exception("Import operation failed")
|
||||
await self.send_error(f"Import failed: {exc}")
|
||||
finally:
|
||||
# Restore original manifest
|
||||
if backup_path and backup_path.exists():
|
||||
try:
|
||||
shutil.move(str(backup_path), str(export_path))
|
||||
except Exception as exc:
|
||||
logger.warning("Failed to restore backup manifest: %s", exc)
|
||||
150
src/paperless_migration/detect.py
Normal file
150
src/paperless_migration/detect.py
Normal file
@@ -0,0 +1,150 @@
|
||||
"""Lightweight detection to decide if we should boot migration mode."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
|
||||
_DOC_EXISTS_QUERY = "SELECT 1 FROM documents_document LIMIT 1;"
|
||||
|
||||
|
||||
def _get_db_config() -> dict[str, Any]:
|
||||
data_dir = Path(os.getenv("PAPERLESS_DATA_DIR", BASE_DIR.parent / "data")).resolve()
|
||||
if not os.getenv("PAPERLESS_DBHOST"):
|
||||
return {
|
||||
"ENGINE": "sqlite",
|
||||
"NAME": data_dir / "db.sqlite3",
|
||||
}
|
||||
|
||||
engine = "mariadb" if os.getenv("PAPERLESS_DBENGINE") == "mariadb" else "postgres"
|
||||
cfg = {
|
||||
"ENGINE": engine,
|
||||
"HOST": os.getenv("PAPERLESS_DBHOST"),
|
||||
"PORT": os.getenv("PAPERLESS_DBPORT"),
|
||||
"NAME": os.getenv("PAPERLESS_DBNAME", "paperless"),
|
||||
"USER": os.getenv("PAPERLESS_DBUSER", "paperless"),
|
||||
"PASSWORD": os.getenv("PAPERLESS_DBPASS", "paperless"),
|
||||
}
|
||||
return cfg
|
||||
|
||||
|
||||
def _probe_sqlite(path: Path) -> bool:
|
||||
if not path.exists():
|
||||
return False
|
||||
try:
|
||||
conn = sqlite3.connect(path, timeout=1)
|
||||
cur = conn.cursor()
|
||||
cur.execute(_DOC_EXISTS_QUERY)
|
||||
cur.fetchone()
|
||||
return True
|
||||
except sqlite3.Error:
|
||||
return False
|
||||
finally:
|
||||
try:
|
||||
conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def _probe_postgres(cfg: dict[str, Any]) -> bool:
|
||||
try:
|
||||
import psycopg
|
||||
except ImportError: # pragma: no cover
|
||||
logger.debug("psycopg not installed; skipping postgres probe")
|
||||
return False
|
||||
|
||||
try:
|
||||
conn = psycopg.connect(
|
||||
host=cfg["HOST"],
|
||||
port=cfg["PORT"],
|
||||
dbname=cfg["NAME"],
|
||||
user=cfg["USER"],
|
||||
password=cfg["PASSWORD"],
|
||||
connect_timeout=2,
|
||||
)
|
||||
with conn, conn.cursor() as cur:
|
||||
cur.execute(_DOC_EXISTS_QUERY)
|
||||
cur.fetchone()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
finally:
|
||||
try:
|
||||
conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def _probe_mariadb(cfg: dict[str, Any]) -> bool:
|
||||
try:
|
||||
import MySQLdb # type: ignore
|
||||
except ImportError: # pragma: no cover
|
||||
logger.debug("mysqlclient not installed; skipping mariadb probe")
|
||||
return False
|
||||
|
||||
try:
|
||||
conn = MySQLdb.connect(
|
||||
host=cfg["HOST"],
|
||||
port=int(cfg["PORT"] or 3306),
|
||||
user=cfg["USER"],
|
||||
passwd=cfg["PASSWORD"],
|
||||
db=cfg["NAME"],
|
||||
connect_timeout=2,
|
||||
)
|
||||
cur = conn.cursor()
|
||||
cur.execute("SELECT 1 FROM documents_document LIMIT 1;")
|
||||
cur.fetchone()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
finally:
|
||||
try:
|
||||
conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def is_v2_database() -> bool:
|
||||
cfg = _get_db_config()
|
||||
if cfg["ENGINE"] == "sqlite":
|
||||
return _probe_sqlite(cfg["NAME"])
|
||||
if cfg["ENGINE"] == "postgres":
|
||||
return _probe_postgres(cfg)
|
||||
if cfg["ENGINE"] == "mariadb":
|
||||
return _probe_mariadb(cfg)
|
||||
return False
|
||||
|
||||
|
||||
def choose_settings_module() -> str:
|
||||
# ENV override
|
||||
toggle = os.getenv("PAPERLESS_MIGRATION_MODE")
|
||||
if toggle is not None:
|
||||
chosen = (
|
||||
"paperless_migration.settings"
|
||||
if str(toggle).lower() in ("1", "true", "yes", "on")
|
||||
else "paperless.settings"
|
||||
)
|
||||
os.environ["PAPERLESS_MIGRATION_MODE"] = "1" if "migration" in chosen else "0"
|
||||
return chosen
|
||||
|
||||
# Auto-detect via DB probe
|
||||
if is_v2_database():
|
||||
logger.warning("Detected v2 schema; booting migration mode.")
|
||||
os.environ["PAPERLESS_MIGRATION_MODE"] = "1"
|
||||
return "paperless_migration.settings"
|
||||
|
||||
os.environ["PAPERLESS_MIGRATION_MODE"] = "0"
|
||||
return "paperless.settings"
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
logger.info(
|
||||
"v2 database detected" if is_v2_database() else "v2 database not detected",
|
||||
)
|
||||
13
src/paperless_migration/routing.py
Normal file
13
src/paperless_migration/routing.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""WebSocket URL routing for migration operations."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from django.urls import path
|
||||
|
||||
from paperless_migration.consumers import ImportConsumer
|
||||
from paperless_migration.consumers import TransformConsumer
|
||||
|
||||
websocket_urlpatterns = [
|
||||
path("ws/migration/transform/", TransformConsumer.as_asgi()),
|
||||
path("ws/migration/import/", ImportConsumer.as_asgi()),
|
||||
]
|
||||
0
src/paperless_migration/services/__init__.py
Normal file
0
src/paperless_migration/services/__init__.py
Normal file
186
src/paperless_migration/services/importer.py
Normal file
186
src/paperless_migration/services/importer.py
Normal file
@@ -0,0 +1,186 @@
|
||||
"""Import service for loading transformed data into v3 database."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypedDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import AsyncGenerator
|
||||
from collections.abc import Generator
|
||||
|
||||
|
||||
class ProgressUpdate(TypedDict, total=False):
|
||||
"""Progress update message structure."""
|
||||
|
||||
type: str
|
||||
phase: str
|
||||
message: str
|
||||
level: str
|
||||
success: bool
|
||||
duration: float
|
||||
return_code: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class ImportService:
|
||||
"""Service for importing transformed data into v3 database.
|
||||
|
||||
This service orchestrates the three-phase import process:
|
||||
1. Wipe the existing database
|
||||
2. Run Django migrations for v3 schema
|
||||
3. Import the transformed data
|
||||
"""
|
||||
|
||||
source_dir: Path
|
||||
imported_marker: Path
|
||||
manage_path: Path | None = None
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if self.manage_path is None:
|
||||
# Default to manage.py in the src directory
|
||||
self.manage_path = (
|
||||
Path(__file__).resolve().parent.parent.parent / "manage.py"
|
||||
)
|
||||
|
||||
def _get_env(self) -> dict[str, str]:
|
||||
"""Get environment variables for subprocess calls."""
|
||||
import os
|
||||
|
||||
env = os.environ.copy()
|
||||
env["DJANGO_SETTINGS_MODULE"] = "paperless.settings"
|
||||
env["PAPERLESS_MIGRATION_MODE"] = "0"
|
||||
return env
|
||||
|
||||
def _run_command(
|
||||
self,
|
||||
args: list[str],
|
||||
label: str,
|
||||
) -> Generator[ProgressUpdate, None, int]:
|
||||
"""Run a command and yield log lines. Returns the return code."""
|
||||
yield {"type": "log", "message": f"Running: {label}", "level": "info"}
|
||||
|
||||
process = subprocess.Popen(
|
||||
args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
bufsize=1,
|
||||
text=True,
|
||||
env=self._get_env(),
|
||||
)
|
||||
|
||||
try:
|
||||
if process.stdout:
|
||||
for line in process.stdout:
|
||||
yield {
|
||||
"type": "log",
|
||||
"message": line.rstrip(),
|
||||
"level": "info",
|
||||
}
|
||||
process.wait()
|
||||
return process.returncode
|
||||
finally:
|
||||
if process.poll() is None:
|
||||
process.kill()
|
||||
|
||||
def run_sync(self) -> Generator[ProgressUpdate, None, None]:
|
||||
"""Run the import synchronously, yielding progress updates.
|
||||
|
||||
This orchestrates:
|
||||
1. Database wipe
|
||||
2. Django migrations
|
||||
3. Document import
|
||||
"""
|
||||
start_time = time.perf_counter()
|
||||
|
||||
# Phase 1: Wipe database
|
||||
yield {"type": "phase", "phase": "wipe"}
|
||||
wipe_cmd = [
|
||||
sys.executable,
|
||||
"-m",
|
||||
"paperless_migration.services.wipe_db",
|
||||
]
|
||||
wipe_code = yield from self._run_command(wipe_cmd, "Database wipe")
|
||||
|
||||
if wipe_code != 0:
|
||||
yield {
|
||||
"type": "error",
|
||||
"message": f"Database wipe failed with code {wipe_code}",
|
||||
}
|
||||
return
|
||||
|
||||
yield {"type": "log", "message": "Database wipe complete", "level": "info"}
|
||||
|
||||
# Phase 2: Run migrations
|
||||
yield {"type": "phase", "phase": "migrate"}
|
||||
migrate_cmd = [
|
||||
sys.executable,
|
||||
str(self.manage_path),
|
||||
"migrate",
|
||||
"--noinput",
|
||||
]
|
||||
migrate_code = yield from self._run_command(migrate_cmd, "Django migrations")
|
||||
|
||||
if migrate_code != 0:
|
||||
yield {
|
||||
"type": "error",
|
||||
"message": f"Migrations failed with code {migrate_code}",
|
||||
}
|
||||
return
|
||||
|
||||
yield {"type": "log", "message": "Migrations complete", "level": "info"}
|
||||
|
||||
# Phase 3: Import data
|
||||
yield {"type": "phase", "phase": "import"}
|
||||
import_cmd = [
|
||||
sys.executable,
|
||||
str(self.manage_path),
|
||||
"document_importer",
|
||||
str(self.source_dir),
|
||||
"--data-only",
|
||||
]
|
||||
import_code = yield from self._run_command(import_cmd, "Document import")
|
||||
|
||||
if import_code != 0:
|
||||
yield {
|
||||
"type": "error",
|
||||
"message": f"Import failed with code {import_code}",
|
||||
}
|
||||
return
|
||||
|
||||
# Mark import as complete
|
||||
try:
|
||||
self.imported_marker.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.imported_marker.write_text("ok\n", encoding="utf-8")
|
||||
except Exception as exc:
|
||||
yield {
|
||||
"type": "log",
|
||||
"message": f"Warning: Could not write import marker: {exc}",
|
||||
"level": "warning",
|
||||
}
|
||||
|
||||
end_time = time.perf_counter()
|
||||
duration = end_time - start_time
|
||||
|
||||
yield {
|
||||
"type": "complete",
|
||||
"success": True,
|
||||
"duration": duration,
|
||||
}
|
||||
|
||||
async def run_async(self) -> AsyncGenerator[ProgressUpdate, None]:
|
||||
"""Run the import asynchronously, yielding progress updates.
|
||||
|
||||
This wraps the synchronous implementation to work with async consumers.
|
||||
"""
|
||||
import asyncio
|
||||
|
||||
for update in self.run_sync():
|
||||
yield update
|
||||
# Yield control to the event loop
|
||||
await asyncio.sleep(0)
|
||||
173
src/paperless_migration/services/transform.py
Normal file
173
src/paperless_migration/services/transform.py
Normal file
@@ -0,0 +1,173 @@
|
||||
"""Transform service for converting v2 exports to v3 format."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import time
|
||||
from collections import Counter
|
||||
from collections.abc import AsyncGenerator
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Generator
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import field
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Any
|
||||
from typing import TypedDict
|
||||
|
||||
import ijson
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class FixtureObject(TypedDict):
|
||||
"""Structure of a Django fixture object."""
|
||||
|
||||
model: str
|
||||
pk: int
|
||||
fields: dict[str, Any]
|
||||
|
||||
|
||||
class ProgressUpdate(TypedDict, total=False):
|
||||
"""Progress update message structure."""
|
||||
|
||||
type: str
|
||||
completed: int
|
||||
stats: dict[str, int]
|
||||
message: str
|
||||
level: str
|
||||
duration: float
|
||||
total_processed: int
|
||||
speed: float
|
||||
|
||||
|
||||
TransformFn = Callable[[FixtureObject], FixtureObject]
|
||||
|
||||
|
||||
def transform_documents_document(obj: FixtureObject) -> FixtureObject:
|
||||
"""Transform a documents.document fixture object for v3 schema."""
|
||||
fields: dict[str, Any] = obj["fields"]
|
||||
fields.pop("storage_type", None)
|
||||
content: Any = fields.get("content")
|
||||
fields["content_length"] = len(content) if isinstance(content, str) else 0
|
||||
return obj
|
||||
|
||||
|
||||
# Registry of model-specific transforms
|
||||
TRANSFORMS: dict[str, TransformFn] = {
|
||||
"documents.document": transform_documents_document,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class TransformService:
|
||||
"""Service for transforming v2 exports to v3 format.
|
||||
|
||||
This service processes JSON fixtures incrementally using ijson for
|
||||
memory-efficient streaming, and yields progress updates suitable
|
||||
for WebSocket transmission.
|
||||
"""
|
||||
|
||||
input_path: Path
|
||||
output_path: Path
|
||||
update_frequency: int = 100
|
||||
_stats: Counter[str] = field(default_factory=Counter, init=False)
|
||||
_total_processed: int = field(default=0, init=False)
|
||||
|
||||
def validate(self) -> str | None:
|
||||
"""Validate preconditions for transform. Returns error message or None."""
|
||||
if not self.input_path.exists():
|
||||
return f"Input file not found: {self.input_path}"
|
||||
if self.output_path.exists():
|
||||
return f"Output file already exists: {self.output_path}"
|
||||
if self.input_path.resolve() == self.output_path.resolve():
|
||||
return "Input and output paths cannot be the same file"
|
||||
return None
|
||||
|
||||
def _process_fixture(self, obj: FixtureObject) -> FixtureObject:
|
||||
"""Apply any registered transforms to a fixture object."""
|
||||
model: str = obj["model"]
|
||||
transform: TransformFn | None = TRANSFORMS.get(model)
|
||||
if transform:
|
||||
obj = transform(obj)
|
||||
self._stats[model] += 1
|
||||
return obj
|
||||
|
||||
def run_sync(self) -> Generator[ProgressUpdate, None, None]:
|
||||
"""Run the transform synchronously, yielding progress updates.
|
||||
|
||||
This is the core implementation that processes the JSON file
|
||||
and yields progress updates at regular intervals.
|
||||
"""
|
||||
error = self.validate()
|
||||
if error:
|
||||
yield {"type": "error", "message": error}
|
||||
return
|
||||
|
||||
self._stats.clear()
|
||||
self._total_processed = 0
|
||||
start_time = time.perf_counter()
|
||||
|
||||
yield {"type": "log", "message": "Opening input file...", "level": "info"}
|
||||
|
||||
try:
|
||||
with (
|
||||
self.input_path.open("rb") as infile,
|
||||
self.output_path.open("w", encoding="utf-8") as outfile,
|
||||
):
|
||||
outfile.write("[\n")
|
||||
first = True
|
||||
|
||||
for i, obj in enumerate(ijson.items(infile, "item")):
|
||||
fixture: FixtureObject = obj
|
||||
fixture = self._process_fixture(fixture)
|
||||
self._total_processed += 1
|
||||
|
||||
if not first:
|
||||
outfile.write(",\n")
|
||||
first = False
|
||||
|
||||
json.dump(fixture, outfile, ensure_ascii=False)
|
||||
|
||||
# Yield progress at configured frequency
|
||||
if i > 0 and i % self.update_frequency == 0:
|
||||
yield {
|
||||
"type": "progress",
|
||||
"completed": self._total_processed,
|
||||
"stats": dict(self._stats),
|
||||
}
|
||||
|
||||
outfile.write("\n]\n")
|
||||
|
||||
except Exception as exc:
|
||||
# Clean up partial output on error
|
||||
if self.output_path.exists():
|
||||
self.output_path.unlink()
|
||||
yield {"type": "error", "message": str(exc)}
|
||||
return
|
||||
|
||||
end_time = time.perf_counter()
|
||||
duration = end_time - start_time
|
||||
speed = self._total_processed / duration if duration > 0 else 0
|
||||
|
||||
yield {
|
||||
"type": "complete",
|
||||
"duration": duration,
|
||||
"total_processed": self._total_processed,
|
||||
"stats": dict(self._stats),
|
||||
"speed": speed,
|
||||
}
|
||||
|
||||
async def run_async(self) -> AsyncGenerator[ProgressUpdate, None]:
|
||||
"""Run the transform asynchronously, yielding progress updates.
|
||||
|
||||
This wraps the synchronous implementation to work with async consumers.
|
||||
The actual I/O is done synchronously since ijson doesn't support async,
|
||||
but we yield control periodically to keep the event loop responsive.
|
||||
"""
|
||||
import asyncio
|
||||
|
||||
for update in self.run_sync():
|
||||
yield update
|
||||
# Yield control to the event loop periodically
|
||||
await asyncio.sleep(0)
|
||||
115
src/paperless_migration/services/wipe_db.py
Normal file
115
src/paperless_migration/services/wipe_db.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""Database wipe service for migration import process.
|
||||
|
||||
This module can be run as a script via:
|
||||
python -m paperless_migration.services.wipe_db
|
||||
|
||||
It uses the paperless_migration settings to wipe all tables
|
||||
before running v3 migrations.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.db.backends.base.base import BaseDatabaseWrapper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_target_tables(connection: BaseDatabaseWrapper) -> list[str]:
|
||||
"""Get list of tables to drop that exist in the database."""
|
||||
from django.apps import apps
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
|
||||
model_tables = {
|
||||
model._meta.db_table for model in apps.get_models(include_auto_created=True)
|
||||
}
|
||||
model_tables.add(MigrationRecorder.Migration._meta.db_table)
|
||||
existing_tables = set(connection.introspection.table_names())
|
||||
return sorted(model_tables & existing_tables)
|
||||
|
||||
|
||||
def _drop_sqlite_tables(connection: BaseDatabaseWrapper) -> int:
|
||||
"""Drop tables for SQLite database. Returns count of tables dropped."""
|
||||
tables = _get_target_tables(connection)
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("PRAGMA foreign_keys=OFF;")
|
||||
for table in tables:
|
||||
cursor.execute(f'DROP TABLE IF EXISTS "{table}";')
|
||||
cursor.execute("PRAGMA foreign_keys=ON;")
|
||||
return len(tables)
|
||||
|
||||
|
||||
def _drop_postgres_tables(connection: BaseDatabaseWrapper) -> int:
|
||||
"""Drop tables for PostgreSQL database. Returns count of tables dropped."""
|
||||
tables = _get_target_tables(connection)
|
||||
if not tables:
|
||||
return 0
|
||||
with connection.cursor() as cursor:
|
||||
for table in tables:
|
||||
cursor.execute(f'DROP TABLE IF EXISTS "{table}" CASCADE;')
|
||||
return len(tables)
|
||||
|
||||
|
||||
def _drop_mysql_tables(connection: BaseDatabaseWrapper) -> int:
|
||||
"""Drop tables for MySQL/MariaDB database. Returns count of tables dropped."""
|
||||
tables = _get_target_tables(connection)
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SET FOREIGN_KEY_CHECKS=0;")
|
||||
for table in tables:
|
||||
cursor.execute(f"DROP TABLE IF EXISTS `{table}`;")
|
||||
cursor.execute("SET FOREIGN_KEY_CHECKS=1;")
|
||||
return len(tables)
|
||||
|
||||
|
||||
def wipe_database() -> tuple[bool, str]:
|
||||
"""Wipe all application tables from the database.
|
||||
|
||||
Returns:
|
||||
Tuple of (success: bool, message: str)
|
||||
"""
|
||||
from django.db import connection
|
||||
|
||||
vendor = connection.vendor
|
||||
logger.info("Wiping database for vendor: %s", vendor)
|
||||
|
||||
try:
|
||||
match vendor:
|
||||
case "sqlite":
|
||||
count = _drop_sqlite_tables(connection)
|
||||
case "postgresql":
|
||||
count = _drop_postgres_tables(connection)
|
||||
case "mysql":
|
||||
count = _drop_mysql_tables(connection)
|
||||
case _:
|
||||
return False, f"Unsupported database vendor: {vendor}"
|
||||
|
||||
message = f"Dropped {count} tables from {vendor} database"
|
||||
logger.info(message)
|
||||
return True, message
|
||||
|
||||
except Exception as exc:
|
||||
message = f"Failed to wipe database: {exc}"
|
||||
logger.exception(message)
|
||||
return False, message
|
||||
|
||||
|
||||
def main() -> int:
|
||||
"""Entry point when run as a script."""
|
||||
import os
|
||||
|
||||
import django
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless_migration.settings")
|
||||
django.setup()
|
||||
|
||||
success, message = wipe_database()
|
||||
print(message) # noqa: T201
|
||||
return 0 if success else 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
245
src/paperless_migration/settings.py
Normal file
245
src/paperless_migration/settings.py
Normal file
@@ -0,0 +1,245 @@
|
||||
"""Settings for migration-mode Django instance."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
|
||||
DEBUG = os.getenv("PAPERLESS_DEBUG", "false").lower() == "true"
|
||||
|
||||
ALLOWED_HOSTS = ["*"]
|
||||
|
||||
# Tap paperless.conf if it's available
|
||||
for path in [
|
||||
os.getenv("PAPERLESS_CONFIGURATION_PATH"),
|
||||
"../paperless.conf",
|
||||
"/etc/paperless.conf",
|
||||
"/usr/local/etc/paperless.conf",
|
||||
]:
|
||||
if path and Path(path).exists():
|
||||
load_dotenv(path)
|
||||
break
|
||||
|
||||
|
||||
def __get_path(
|
||||
key: str,
|
||||
default: str | Path,
|
||||
) -> Path:
|
||||
if key in os.environ:
|
||||
return Path(os.environ[key]).resolve()
|
||||
return Path(default).resolve()
|
||||
|
||||
|
||||
DATA_DIR = __get_path("PAPERLESS_DATA_DIR", BASE_DIR.parent / "data")
|
||||
EXPORT_DIR = __get_path("PAPERLESS_EXPORT_DIR", BASE_DIR.parent / "export")
|
||||
|
||||
|
||||
def _parse_redis_url() -> str:
|
||||
"""Parse Redis URL from environment with sensible defaults."""
|
||||
return os.getenv("PAPERLESS_REDIS_URL", "redis://localhost:6379")
|
||||
|
||||
|
||||
def _parse_db_settings() -> dict[str, dict[str, Any]]:
|
||||
databases: dict[str, dict[str, Any]] = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.sqlite3",
|
||||
"NAME": DATA_DIR / "db.sqlite3",
|
||||
"OPTIONS": {},
|
||||
},
|
||||
}
|
||||
if os.getenv("PAPERLESS_DBHOST"):
|
||||
databases["sqlite"] = databases["default"].copy()
|
||||
databases["default"] = {
|
||||
"HOST": os.getenv("PAPERLESS_DBHOST"),
|
||||
"NAME": os.getenv("PAPERLESS_DBNAME", "paperless"),
|
||||
"USER": os.getenv("PAPERLESS_DBUSER", "paperless"),
|
||||
"PASSWORD": os.getenv("PAPERLESS_DBPASS", "paperless"),
|
||||
"OPTIONS": {},
|
||||
}
|
||||
if os.getenv("PAPERLESS_DBPORT"):
|
||||
databases["default"]["PORT"] = os.getenv("PAPERLESS_DBPORT")
|
||||
|
||||
if os.getenv("PAPERLESS_DBENGINE") == "mariadb":
|
||||
engine = "django.db.backends.mysql"
|
||||
options = {
|
||||
"read_default_file": "/etc/mysql/my.cnf",
|
||||
"charset": "utf8mb4",
|
||||
"ssl_mode": os.getenv("PAPERLESS_DBSSLMODE", "PREFERRED"),
|
||||
"ssl": {
|
||||
"ca": os.getenv("PAPERLESS_DBSSLROOTCERT"),
|
||||
"cert": os.getenv("PAPERLESS_DBSSLCERT"),
|
||||
"key": os.getenv("PAPERLESS_DBSSLKEY"),
|
||||
},
|
||||
}
|
||||
else:
|
||||
engine = "django.db.backends.postgresql"
|
||||
options = {
|
||||
"sslmode": os.getenv("PAPERLESS_DBSSLMODE", "prefer"),
|
||||
"sslrootcert": os.getenv("PAPERLESS_DBSSLROOTCERT"),
|
||||
"sslcert": os.getenv("PAPERLESS_DBSSLCERT"),
|
||||
"sslkey": os.getenv("PAPERLESS_DBSSLKEY"),
|
||||
}
|
||||
|
||||
databases["default"]["ENGINE"] = engine
|
||||
databases["default"]["OPTIONS"].update(options)
|
||||
|
||||
if os.getenv("PAPERLESS_DB_TIMEOUT") is not None:
|
||||
timeout = int(os.getenv("PAPERLESS_DB_TIMEOUT"))
|
||||
if databases["default"]["ENGINE"] == "django.db.backends.sqlite3":
|
||||
databases["default"]["OPTIONS"].update({"timeout": timeout})
|
||||
else:
|
||||
databases["default"]["OPTIONS"].update({"connect_timeout": timeout})
|
||||
databases["sqlite"]["OPTIONS"].update({"timeout": timeout})
|
||||
return databases
|
||||
|
||||
|
||||
DATABASES = _parse_db_settings()
|
||||
|
||||
SECRET_KEY = os.getenv("PAPERLESS_SECRET_KEY")
|
||||
|
||||
AUTH_PASSWORD_VALIDATORS = [
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
|
||||
},
|
||||
]
|
||||
|
||||
LANGUAGE_CODE = "en-us"
|
||||
TIME_ZONE = "UTC"
|
||||
USE_I18N = True
|
||||
USE_TZ = True
|
||||
CSRF_TRUSTED_ORIGINS: list[str] = []
|
||||
|
||||
INSTALLED_APPS = [
|
||||
"django.contrib.auth",
|
||||
"django.contrib.contenttypes",
|
||||
"django.contrib.sessions",
|
||||
"django.contrib.messages",
|
||||
"django.contrib.staticfiles",
|
||||
"channels",
|
||||
"allauth",
|
||||
"allauth.account",
|
||||
"allauth.socialaccount",
|
||||
"allauth.mfa",
|
||||
"paperless_migration",
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
"django.middleware.security.SecurityMiddleware",
|
||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||
"django.middleware.common.CommonMiddleware",
|
||||
"django.middleware.csrf.CsrfViewMiddleware",
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||
"django.contrib.messages.middleware.MessageMiddleware",
|
||||
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
||||
"allauth.account.middleware.AccountMiddleware",
|
||||
]
|
||||
|
||||
ROOT_URLCONF = "paperless_migration.urls"
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [
|
||||
BASE_DIR / "paperless_migration" / "templates",
|
||||
BASE_DIR / "documents" / "templates",
|
||||
],
|
||||
"APP_DIRS": True,
|
||||
"OPTIONS": {
|
||||
"context_processors": [
|
||||
"django.template.context_processors.request",
|
||||
"django.contrib.auth.context_processors.auth",
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
# ASGI application for Channels
|
||||
ASGI_APPLICATION = "paperless_migration.asgi.application"
|
||||
|
||||
# Channel layers configuration using Redis
|
||||
REDIS_URL = _parse_redis_url()
|
||||
|
||||
CHANNEL_LAYERS = {
|
||||
"default": {
|
||||
"BACKEND": "channels_redis.core.RedisChannelLayer",
|
||||
"CONFIG": {
|
||||
"hosts": [REDIS_URL],
|
||||
"capacity": 1500,
|
||||
"expiry": 10,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
# Keep WSGI for compatibility
|
||||
WSGI_APPLICATION = "paperless_migration.wsgi.application"
|
||||
|
||||
AUTHENTICATION_BACKENDS = [
|
||||
"django.contrib.auth.backends.ModelBackend",
|
||||
"allauth.account.auth_backends.AuthenticationBackend",
|
||||
]
|
||||
|
||||
STATIC_URL = "/static/"
|
||||
STATICFILES_DIRS = [
|
||||
BASE_DIR / ".." / "static",
|
||||
BASE_DIR / "static",
|
||||
BASE_DIR / "documents" / "static",
|
||||
]
|
||||
|
||||
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
||||
|
||||
LOGIN_URL = "/accounts/login/"
|
||||
LOGIN_REDIRECT_URL = "/migration/"
|
||||
LOGOUT_REDIRECT_URL = "/accounts/login/?loggedout=1"
|
||||
|
||||
ACCOUNT_ADAPTER = "allauth.account.adapter.DefaultAccountAdapter"
|
||||
ACCOUNT_AUTHENTICATED_LOGIN_REDIRECTS = False
|
||||
SOCIALACCOUNT_ADAPTER = "allauth.socialaccount.adapter.DefaultSocialAccountAdapter"
|
||||
SOCIALACCOUNT_ENABLED = False
|
||||
|
||||
SESSION_ENGINE = "django.contrib.sessions.backends.db"
|
||||
|
||||
MIGRATION_EXPORT_PATH = __get_path(
|
||||
"PAPERLESS_MIGRATION_EXPORT_PATH",
|
||||
EXPORT_DIR / "manifest.json",
|
||||
)
|
||||
MIGRATION_TRANSFORMED_PATH = __get_path(
|
||||
"PAPERLESS_MIGRATION_TRANSFORMED_PATH",
|
||||
EXPORT_DIR / "manifest.v3.json",
|
||||
)
|
||||
MIGRATION_IMPORTED_PATH = Path(EXPORT_DIR / "import.completed").resolve()
|
||||
|
||||
# Progress update frequency (rows between WebSocket updates)
|
||||
MIGRATION_PROGRESS_FREQUENCY = int(
|
||||
os.getenv("PAPERLESS_MIGRATION_PROGRESS_FREQUENCY", "100"),
|
||||
)
|
||||
|
||||
# One-time access code required for migration logins; stable across autoreload
|
||||
_code = os.getenv("PAPERLESS_MIGRATION_ACCESS_CODE")
|
||||
if not _code:
|
||||
import secrets
|
||||
|
||||
_code = secrets.token_urlsafe(12)
|
||||
os.environ["PAPERLESS_MIGRATION_ACCESS_CODE"] = _code
|
||||
MIGRATION_ACCESS_CODE = _code
|
||||
if os.environ.get("PAPERLESS_MIGRATION_CODE_LOGGED") != "1":
|
||||
logging.getLogger(__name__).warning(
|
||||
"Migration one-time access code: %s",
|
||||
MIGRATION_ACCESS_CODE,
|
||||
)
|
||||
os.environ["PAPERLESS_MIGRATION_CODE_LOGGED"] = "1"
|
||||
77
src/paperless_migration/templates/account/login.html
Normal file
77
src/paperless_migration/templates/account/login.html
Normal file
@@ -0,0 +1,77 @@
|
||||
{% load i18n static %}
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||
<meta name="author" content="Paperless-ngx project and contributors">
|
||||
<meta name="robots" content="noindex,nofollow">
|
||||
<meta name="color-scheme" content="light">
|
||||
<title>{% translate "Paperless-ngx sign in" %}</title>
|
||||
<link href="{% static 'bootstrap.min.css' %}" rel="stylesheet">
|
||||
<link href="{% static 'base.css' %}" rel="stylesheet">
|
||||
<style>
|
||||
:root, body, .form-control, .form-floating {
|
||||
color-scheme: light;
|
||||
--bs-body-bg: #f5f5f5;
|
||||
--bs-body-color: #212529;
|
||||
--bs-body-color-rgb: 33, 37, 41;
|
||||
--bs-border-color: #dee2e6;
|
||||
--bs-link-color: #17541f;
|
||||
--bs-link-color-rgb: 23, 84, 31;
|
||||
}
|
||||
@media (prefers-color-scheme: dark) { :root { color-scheme: light; } }
|
||||
body {
|
||||
min-height: 100vh;
|
||||
background:
|
||||
radial-gradient(circle at 20% 20%, #eef5ef, #f7fbf7),
|
||||
linear-gradient(120deg, rgba(23, 84, 31, 0.05) 0%, rgba(0,0,0,0) 30%),
|
||||
linear-gradient(300deg, rgba(15, 54, 20, 0.06) 0%, rgba(0,0,0,0) 40%);
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body class="d-flex align-items-center justify-content-center text-center p-3">
|
||||
<main class="w-100" style="max-width: 360px;">
|
||||
<form class="form-accounts p-4 rounded-4" id="form-account" method="post">
|
||||
{% csrf_token %}
|
||||
{% include "paperless-ngx/snippets/svg_logo.html" with extra_attrs="width='240' class='logo mb-3'" %}
|
||||
<p class="text-uppercase fw-semibold mb-1 text-secondary small" style="letter-spacing: 0.12rem;">{% translate "Migration Mode" %}</p>
|
||||
|
||||
{% for message in messages %}
|
||||
<div class="alert alert-{{ message.level_tag }} mb-2" role="alert">{{ message }}</div>
|
||||
{% endfor %}
|
||||
|
||||
<p class="mb-3">{% translate "Login with a superuser account to proceed." %}</p>
|
||||
|
||||
{% if form.errors %}
|
||||
<div class="alert alert-danger" role="alert">
|
||||
{% for field, errors in form.errors.items %}
|
||||
{% for error in errors %}
|
||||
{{ error }}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% translate "Username" as i18n_username %}
|
||||
{% translate "Password" as i18n_password %}
|
||||
<div class="form-floating form-stacked-top">
|
||||
<input type="text" name="login" id="inputUsername" placeholder="{{ i18n_username }}" class="form-control" autocorrect="off" autocapitalize="none" required autofocus>
|
||||
<label for="inputUsername">{{ i18n_username }}</label>
|
||||
</div>
|
||||
<div class="form-floating form-stacked-middle">
|
||||
<input type="password" name="password" id="inputPassword" placeholder="{{ i18n_password }}" class="form-control" required>
|
||||
<label for="inputPassword">{{ i18n_password }}</label>
|
||||
</div>
|
||||
<div class="form-floating form-stacked-bottom">
|
||||
<input type="text" name="code" id="inputCode" placeholder="One-time code" class="form-control" required>
|
||||
<label for="inputCode">One-time code</label>
|
||||
</div>
|
||||
<p class="mt-2 small fst-italic">{% translate "Code can be found in the startup logs." %}</p>
|
||||
<div class="d-grid mt-3">
|
||||
<button class="btn btn-lg btn-primary" type="submit">{% translate "Sign in" %}</button>
|
||||
</div>
|
||||
</form>
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
||||
@@ -0,0 +1,558 @@
|
||||
<!doctype html>
|
||||
{% load static %}
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>Paperless-ngx Migration Mode</title>
|
||||
<link rel="stylesheet" href="{% static 'bootstrap.min.css' %}" />
|
||||
<link rel="stylesheet" href="{% static 'base.css' %}" />
|
||||
<style>
|
||||
:root, .form-control {
|
||||
color-scheme: light;
|
||||
--bs-body-bg: #f5f5f5;
|
||||
--bs-body-color: #212529;
|
||||
--bs-body-color-rgb: 33, 37, 41;
|
||||
--bs-border-color: #dee2e6;
|
||||
--bs-link-color: var(--pngx-primary);
|
||||
--bs-link-color-rgb: 23, 84, 31;
|
||||
}
|
||||
@media (prefers-color-scheme: dark) { :root { color-scheme: light; } }
|
||||
|
||||
.btn-primary:disabled {
|
||||
--bs-btn-disabled-bg: #4d7352;
|
||||
--bs-btn-disabled-border-color: #4d7352;
|
||||
}
|
||||
|
||||
body {
|
||||
background:
|
||||
radial-gradient(circle at 20% 20%, #eef5ef, #f7fbf7),
|
||||
linear-gradient(120deg, rgba(23, 84, 31, 0.05) 0%, rgba(0,0,0,0) 30%),
|
||||
linear-gradient(300deg, rgba(15, 54, 20, 0.06) 0%, rgba(0,0,0,0) 40%);
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
svg.logo .text {
|
||||
fill: #161616 !important;
|
||||
}
|
||||
|
||||
.hero-card,
|
||||
.card-step {
|
||||
background: #fff;
|
||||
backdrop-filter: blur(6px);
|
||||
border: 1px solid rgba(23, 84, 31, 0.08);
|
||||
box-shadow: 0 16px 40px rgba(0, 0, 0, 0.06);
|
||||
border-radius: 18px;
|
||||
}
|
||||
|
||||
.status-dot {
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
border-radius: 50%;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.card-step {
|
||||
border-radius: 16px;
|
||||
transition: transform 0.15s ease, box-shadow 0.15s ease;
|
||||
}
|
||||
|
||||
.card-step.done-step {
|
||||
opacity: 0.4;
|
||||
}
|
||||
|
||||
.path-pill {
|
||||
background: rgba(23, 84, 31, 0.08);
|
||||
color: var(--bs-body-color);
|
||||
border-radius: 12px;
|
||||
padding: 0.4rem 0.75rem;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
.step-rail {
|
||||
position: relative;
|
||||
height: 4px;
|
||||
background: rgba(23, 84, 31, 0.12);
|
||||
border-radius: 999px;
|
||||
}
|
||||
|
||||
.step-rail .fill {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
width: calc({{ export_exists|yesno:'33,0' }}% + {{ transformed_exists|yesno:'33,0' }}% + {{ imported_exists|yesno:'34,0' }}%);
|
||||
max-width: 100%;
|
||||
background: linear-gradient(90deg, #17541f, #2c7a3c);
|
||||
border-radius: 999px;
|
||||
transition: width 0.3s ease;
|
||||
}
|
||||
|
||||
.step-chip {
|
||||
width: 38px;
|
||||
height: 38px;
|
||||
border-radius: 50%;
|
||||
display: grid;
|
||||
place-items: center;
|
||||
font-weight: 700;
|
||||
background: #fff;
|
||||
border: 2px solid rgba(23, 84, 31, 0.25);
|
||||
color: #17541f;
|
||||
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.08);
|
||||
}
|
||||
|
||||
.step-chip.done {
|
||||
background: #17541f;
|
||||
color: #fff;
|
||||
border-color: #17541f;
|
||||
}
|
||||
|
||||
.console-log {
|
||||
background: #0f1a12;
|
||||
color: #d1e7d6;
|
||||
border-radius: 12px;
|
||||
min-height: 180px;
|
||||
max-height: 400px;
|
||||
padding: 12px;
|
||||
font-size: 0.85rem;
|
||||
font-family: 'Consolas', 'Monaco', monospace;
|
||||
overflow: auto;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.console-log .log-error { color: #ff6b6b; }
|
||||
.console-log .log-warning { color: #ffd93d; }
|
||||
.console-log .log-success { color: #6bcb77; }
|
||||
.console-log .log-info { color: #4d96ff; }
|
||||
|
||||
.progress-bar-container {
|
||||
height: 24px;
|
||||
background: rgba(23, 84, 31, 0.1);
|
||||
border-radius: 12px;
|
||||
overflow: hidden;
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.progress-bar-fill {
|
||||
height: 100%;
|
||||
background: linear-gradient(90deg, #17541f, #2c7a3c);
|
||||
border-radius: 12px;
|
||||
transition: width 0.3s ease;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
color: white;
|
||||
font-size: 0.75rem;
|
||||
font-weight: 600;
|
||||
min-width: fit-content;
|
||||
padding: 0 8px;
|
||||
}
|
||||
|
||||
.stats-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(120px, 1fr));
|
||||
gap: 0.5rem;
|
||||
margin-top: 0.5rem;
|
||||
}
|
||||
|
||||
.stat-item {
|
||||
background: rgba(23, 84, 31, 0.05);
|
||||
border-radius: 8px;
|
||||
padding: 0.5rem;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.stat-value {
|
||||
font-size: 1.25rem;
|
||||
font-weight: 700;
|
||||
color: #17541f;
|
||||
}
|
||||
|
||||
.stat-label {
|
||||
font-size: 0.75rem;
|
||||
color: #666;
|
||||
}
|
||||
|
||||
.ws-status {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
padding: 0.25rem 0.75rem;
|
||||
border-radius: 999px;
|
||||
font-size: 0.8rem;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.ws-status.connected { background: #d4edda; color: #155724; }
|
||||
.ws-status.disconnected { background: #f8d7da; color: #721c24; }
|
||||
.ws-status.connecting { background: #fff3cd; color: #856404; }
|
||||
</style>
|
||||
</head>
|
||||
<body class="pb-4">
|
||||
<div class="container py-4">
|
||||
<div class="row justify-content-center mb-4">
|
||||
<div class="col-lg-9">
|
||||
<div class="hero-card p-4">
|
||||
<div class="d-flex flex-wrap align-items-center justify-content-between gap-3">
|
||||
<div class="d-flex align-items-center gap-3">
|
||||
{% include "paperless-ngx/snippets/svg_logo.html" with extra_attrs="width='280' class='logo'" %}
|
||||
<div class="ps-2">
|
||||
<p class="text-uppercase fw-semibold mb-1 text-secondary" style="letter-spacing: 0.12rem;">Migration Mode</p>
|
||||
<h1 class="h3 mb-2 text-primary">Paperless-ngx v2 to v3</h1>
|
||||
<p class="text-muted mb-0">Migrate your data from Paperless-ngx version 2 to version 3.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="text-end">
|
||||
<span class="badge bg-success-subtle text-success border border-success-subtle px-3 py-2">Online</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="mt-4">
|
||||
<div class="d-flex justify-content-between align-items-center mb-2">
|
||||
<div class="d-flex align-items-center gap-2">
|
||||
<span class="step-chip {% if export_exists %}done{% endif %}">1</span>
|
||||
<div>
|
||||
<div class="fw-semibold mb-0">Export</div>
|
||||
<small class="text-muted">v2 data</small>
|
||||
</div>
|
||||
</div>
|
||||
<div class="d-flex align-items-center gap-2">
|
||||
<span class="step-chip {% if transformed_exists %}done{% endif %}">2</span>
|
||||
<div>
|
||||
<div class="fw-semibold mb-0">Transform</div>
|
||||
<small class="text-muted">to v3 schema</small>
|
||||
</div>
|
||||
</div>
|
||||
<div class="d-flex align-items-center gap-2">
|
||||
<span class="step-chip {% if imported_exists %}done{% endif %}">3</span>
|
||||
<div>
|
||||
<div class="fw-semibold mb-0">Import</div>
|
||||
<small class="text-muted">into v3</small>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="step-rail">
|
||||
<div class="fill"></div>
|
||||
</div>
|
||||
</div>
|
||||
{% if messages %}
|
||||
<div class="mt-4">
|
||||
{% for message in messages %}
|
||||
<div class="alert alert-{{ message.level_tag }} mb-2" role="alert">{{ message }}</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="row g-3 mt-2">
|
||||
<div class="col-md-6">
|
||||
<div class="d-flex align-items-center gap-2">
|
||||
<span class="status-dot bg-{{ export_exists|yesno:'success,danger' }}"></span>
|
||||
<div>
|
||||
<div class="fw-semibold">Export file</div>
|
||||
<div class="small text-muted">{{ export_exists|yesno:"Ready,Missing" }}</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="path-pill mt-2 text-truncate" title="{{ export_path }}">{{ export_path }}</div>
|
||||
</div>
|
||||
<div class="col-md-6">
|
||||
<div class="d-flex align-items-center gap-2">
|
||||
<span class="status-dot bg-{{ transformed_exists|yesno:'success,warning' }}"></span>
|
||||
<div>
|
||||
<div class="fw-semibold">Transformed file</div>
|
||||
<div class="small text-muted">{{ transformed_exists|yesno:"Ready,Pending" }}</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="path-pill mt-2 text-truncate" title="{{ transformed_path }}">{{ transformed_path }}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row gy-4 justify-content-center">
|
||||
<div class="col-lg-3 col-md-4">
|
||||
<div class="card card-step h-100 {% if export_exists %}done-step{% endif %}">
|
||||
<div class="card-body d-flex flex-column gap-3">
|
||||
<div>
|
||||
<p class="text-uppercase text-muted mb-1 fw-semibold" style="letter-spacing: 0.08rem;">Step 1</p>
|
||||
<h3 class="h5 mb-1">Export (v2)</h3>
|
||||
<p class="small text-muted mb-0">Generate and upload the v2 export file.</p>
|
||||
</div>
|
||||
<div class="mt-auto d-grid gap-2">
|
||||
<form method="post" enctype="multipart/form-data" class="d-flex gap-2 align-items-center">
|
||||
{% csrf_token %}
|
||||
<input class="form-control form-control-sm" type="file" name="export_file" accept=".json" {% if export_exists %}disabled{% endif %} required>
|
||||
<button class="btn btn-outline-secondary btn-sm" type="submit" name="action" value="upload" {% if export_exists %}disabled aria-disabled="true"{% endif %}>Upload</button>
|
||||
</form>
|
||||
<form method="post">
|
||||
{% csrf_token %}
|
||||
<button class="btn btn-primary w-100" type="submit" name="action" value="check" {% if export_exists %}disabled aria-disabled="true"{% endif %}>Re-check export</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="col-lg-3 col-md-4">
|
||||
<div class="card card-step h-100 {% if transformed_exists %}done-step{% endif %}">
|
||||
<div class="card-body d-flex flex-column gap-3">
|
||||
<div>
|
||||
<p class="text-uppercase text-muted mb-1 fw-semibold" style="letter-spacing: 0.08rem;">Step 2</p>
|
||||
<h3 class="h5 mb-1">Transform</h3>
|
||||
<p class="small text-muted mb-0">Convert the export into the v3-ready structure.</p>
|
||||
</div>
|
||||
<div class="mt-auto d-grid gap-2">
|
||||
<form method="post">
|
||||
{% csrf_token %}
|
||||
<button
|
||||
class="btn btn-outline-primary w-100"
|
||||
type="submit"
|
||||
name="action"
|
||||
value="transform"
|
||||
id="btn-transform"
|
||||
{% if not export_exists or transformed_exists %}disabled aria-disabled="true"{% endif %}
|
||||
>
|
||||
Transform export
|
||||
</button>
|
||||
</form>
|
||||
{% if transformed_exists %}
|
||||
<form method="post">
|
||||
{% csrf_token %}
|
||||
<button class="btn btn-outline-danger btn-sm w-100" type="submit" name="action" value="reset_transform">
|
||||
Reset transform
|
||||
</button>
|
||||
</form>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="col-lg-3 col-md-4">
|
||||
<div class="card card-step h-100 {% if imported_exists %}done-step{% endif %}">
|
||||
<div class="card-body d-flex flex-column gap-3">
|
||||
<div>
|
||||
<p class="text-uppercase text-muted mb-1 fw-semibold" style="letter-spacing: 0.08rem;">Step 3</p>
|
||||
<h3 class="h5 mb-1">Import (v3)</h3>
|
||||
<p class="small text-muted mb-0">Load the transformed data into your v3 instance.</p>
|
||||
</div>
|
||||
<div class="mt-auto">
|
||||
<form method="post">
|
||||
{% csrf_token %}
|
||||
<button
|
||||
class="btn btn-outline-secondary w-100"
|
||||
type="submit"
|
||||
name="action"
|
||||
value="import"
|
||||
id="btn-import"
|
||||
{% if not transformed_exists or imported_exists %}disabled aria-disabled="true"{% endif %}
|
||||
>
|
||||
Import transformed data
|
||||
</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row justify-content-center mt-4">
|
||||
<div class="col-lg-9">
|
||||
{% if not export_exists %}
|
||||
<div class="alert alert-info mb-3">
|
||||
<div class="fw-semibold mb-1">Export file not found</div>
|
||||
<div class="small">
|
||||
Run the v2 export from your Paperless instance, e.g.:
|
||||
<code>docker run --rm ghcr.io/paperless-ngx/paperless-ngx:2.20.6 document_exporter --data-only</code>
|
||||
(see <a href="https://docs.paperless-ngx.com/administration/#exporter" target="_blank" rel="noopener noreferrer">documentation</a>). Once the <code>manifest.json</code> is in-place, upload it or (especially for larger files) place it directly at the expected location and click "Re-check export".
|
||||
<p class="mt-2 mb-0 text-danger fst-italic">Warning: The export must be generated with version Paperless-ngx v2.20.6</p>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="card card-step">
|
||||
<div class="card-body">
|
||||
<div class="d-flex justify-content-between align-items-center mb-2">
|
||||
<div class="fw-semibold">Migration console</div>
|
||||
<span id="ws-status" class="ws-status disconnected">
|
||||
<span class="status-dot"></span>
|
||||
<span class="status-text">Ready</span>
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div id="progress-container" class="mb-3" style="display: none;">
|
||||
<div class="progress-bar-container">
|
||||
<div id="progress-bar" class="progress-bar-fill" style="width: 0%;">
|
||||
<span id="progress-text">0 rows</span>
|
||||
</div>
|
||||
</div>
|
||||
<div id="stats-container" class="stats-grid"></div>
|
||||
</div>
|
||||
|
||||
<div id="migration-log" class="console-log">Ready to begin migration...</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
(function() {
|
||||
const logEl = document.getElementById('migration-log');
|
||||
const wsStatusEl = document.getElementById('ws-status');
|
||||
const progressContainer = document.getElementById('progress-container');
|
||||
const progressBar = document.getElementById('progress-bar');
|
||||
const progressText = document.getElementById('progress-text');
|
||||
const statsContainer = document.getElementById('stats-container');
|
||||
|
||||
function setWsStatus(status, text) {
|
||||
wsStatusEl.className = 'ws-status ' + status;
|
||||
wsStatusEl.querySelector('.status-text').textContent = text;
|
||||
}
|
||||
|
||||
function appendLog(message, level) {
|
||||
const line = document.createElement('div');
|
||||
line.className = 'log-' + (level || 'info');
|
||||
line.textContent = message;
|
||||
logEl.appendChild(line);
|
||||
logEl.scrollTop = logEl.scrollHeight;
|
||||
}
|
||||
|
||||
function clearLog() {
|
||||
logEl.innerHTML = '';
|
||||
}
|
||||
|
||||
function updateProgress(current, total, label) {
|
||||
progressContainer.style.display = 'block';
|
||||
const pct = total ? Math.min(100, (current / total) * 100) : 0;
|
||||
progressBar.style.width = (total ? pct : 100) + '%';
|
||||
progressText.textContent = label || (current.toLocaleString() + ' rows');
|
||||
}
|
||||
|
||||
function updateStats(stats) {
|
||||
if (!stats || Object.keys(stats).length === 0) {
|
||||
statsContainer.innerHTML = '';
|
||||
return;
|
||||
}
|
||||
|
||||
let html = '';
|
||||
for (const [key, value] of Object.entries(stats)) {
|
||||
const label = key.replace('documents.', '').replace('_', ' ');
|
||||
html += '<div class="stat-item">' +
|
||||
'<div class="stat-value">' + (typeof value === 'number' ? value.toLocaleString() : value) + '</div>' +
|
||||
'<div class="stat-label">' + label + '</div>' +
|
||||
'</div>';
|
||||
}
|
||||
statsContainer.innerHTML = html;
|
||||
}
|
||||
|
||||
function formatDuration(seconds) {
|
||||
if (seconds < 60) return seconds.toFixed(1) + 's';
|
||||
const mins = Math.floor(seconds / 60);
|
||||
const secs = (seconds % 60).toFixed(0);
|
||||
return mins + 'm ' + secs + 's';
|
||||
}
|
||||
|
||||
function startWebSocket(action) {
|
||||
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
const wsUrl = protocol + '//' + window.location.host + '/ws/migration/' + action + '/';
|
||||
|
||||
clearLog();
|
||||
appendLog('Connecting to ' + action + ' service...', 'info');
|
||||
setWsStatus('connecting', 'Connecting...');
|
||||
progressContainer.style.display = 'none';
|
||||
statsContainer.innerHTML = '';
|
||||
|
||||
const ws = new WebSocket(wsUrl);
|
||||
|
||||
ws.onopen = function() {
|
||||
setWsStatus('connected', 'Connected');
|
||||
appendLog('Connected. Starting ' + action + '...', 'success');
|
||||
ws.send(JSON.stringify({ action: 'start' }));
|
||||
};
|
||||
|
||||
ws.onmessage = function(event) {
|
||||
try {
|
||||
const data = JSON.parse(event.data);
|
||||
|
||||
switch (data.type) {
|
||||
case 'log':
|
||||
appendLog(data.message, data.level || 'info');
|
||||
break;
|
||||
|
||||
case 'progress':
|
||||
updateProgress(data.current, data.total, data.label);
|
||||
break;
|
||||
|
||||
case 'stats':
|
||||
if (data.transformed) {
|
||||
updateStats(data.transformed);
|
||||
} else {
|
||||
updateStats(data);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'complete':
|
||||
const status = data.success ? 'success' : 'error';
|
||||
const msg = data.success
|
||||
? 'Completed successfully in ' + formatDuration(data.duration)
|
||||
: 'Operation failed';
|
||||
appendLog(msg, status);
|
||||
|
||||
if (data.total_processed) {
|
||||
appendLog('Total processed: ' + data.total_processed.toLocaleString() + ' rows', 'info');
|
||||
}
|
||||
if (data.speed) {
|
||||
appendLog('Speed: ' + Math.round(data.speed).toLocaleString() + ' rows/sec', 'info');
|
||||
}
|
||||
if (data.stats) {
|
||||
updateStats(data.stats);
|
||||
}
|
||||
|
||||
setWsStatus('disconnected', 'Complete');
|
||||
ws.close();
|
||||
|
||||
if (data.success) {
|
||||
setTimeout(function() { window.location.reload(); }, 1500);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'error':
|
||||
appendLog('Error: ' + data.message, 'error');
|
||||
setWsStatus('disconnected', 'Error');
|
||||
break;
|
||||
|
||||
default:
|
||||
appendLog(JSON.stringify(data), 'info');
|
||||
}
|
||||
} catch (e) {
|
||||
appendLog('Received: ' + event.data, 'info');
|
||||
}
|
||||
};
|
||||
|
||||
ws.onerror = function(error) {
|
||||
appendLog('WebSocket error occurred', 'error');
|
||||
setWsStatus('disconnected', 'Error');
|
||||
};
|
||||
|
||||
ws.onclose = function(event) {
|
||||
if (event.code !== 1000) {
|
||||
const reason = event.code === 4001 ? 'Not authenticated'
|
||||
: event.code === 4002 ? 'Migration code not verified'
|
||||
: event.code === 4003 ? 'Superuser access required'
|
||||
: 'Connection closed (code: ' + event.code + ')';
|
||||
appendLog(reason, 'error');
|
||||
}
|
||||
setWsStatus('disconnected', 'Disconnected');
|
||||
};
|
||||
}
|
||||
|
||||
// Check if we should auto-start a WebSocket action
|
||||
{% if ws_action %}
|
||||
startWebSocket('{{ ws_action }}');
|
||||
{% endif %}
|
||||
|
||||
// Expose for manual triggering if needed
|
||||
window.startMigrationWs = startWebSocket;
|
||||
})();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
21
src/paperless_migration/urls.py
Normal file
21
src/paperless_migration/urls.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""URL configuration for migration mode."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
|
||||
from django.urls import include
|
||||
from django.urls import path
|
||||
|
||||
from paperless_migration import views
|
||||
|
||||
urlpatterns = [
|
||||
path("accounts/login/", views.migration_login, name="account_login"),
|
||||
path("accounts/", include("allauth.urls")),
|
||||
path("migration/", views.migration_home, name="migration_home"),
|
||||
# Redirect root to migration home
|
||||
path("", views.migration_home, name="home"),
|
||||
]
|
||||
|
||||
if settings.DEBUG:
|
||||
urlpatterns += staticfiles_urlpatterns()
|
||||
132
src/paperless_migration/views.py
Normal file
132
src/paperless_migration/views.py
Normal file
@@ -0,0 +1,132 @@
|
||||
"""Views for migration mode web interface."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib import messages
|
||||
from django.contrib.auth import authenticate
|
||||
from django.contrib.auth import login
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.http import HttpResponseForbidden
|
||||
from django.shortcuts import redirect
|
||||
from django.shortcuts import render
|
||||
from django.views.decorators.http import require_http_methods
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.http import HttpRequest
|
||||
from django.http import HttpResponse
|
||||
|
||||
|
||||
def _check_migration_access(request: HttpRequest) -> HttpResponse | None:
|
||||
"""Check if user has migration access. Returns error response or None."""
|
||||
if not request.session.get("migration_code_ok"):
|
||||
return HttpResponseForbidden("Access code required")
|
||||
if not request.user.is_superuser:
|
||||
return HttpResponseForbidden("Superuser access required")
|
||||
return None
|
||||
|
||||
|
||||
@login_required
|
||||
@require_http_methods(["GET", "POST"])
|
||||
def migration_home(request: HttpRequest) -> HttpResponse:
|
||||
"""Main migration dashboard view."""
|
||||
error_response = _check_migration_access(request)
|
||||
if error_response:
|
||||
return error_response
|
||||
|
||||
export_path = Path(settings.MIGRATION_EXPORT_PATH)
|
||||
transformed_path = Path(settings.MIGRATION_TRANSFORMED_PATH)
|
||||
imported_marker = Path(settings.MIGRATION_IMPORTED_PATH)
|
||||
|
||||
if request.method == "POST":
|
||||
action = request.POST.get("action")
|
||||
|
||||
if action == "check":
|
||||
messages.success(request, "Checked export paths.")
|
||||
|
||||
elif action == "upload":
|
||||
upload = request.FILES.get("export_file")
|
||||
if not upload:
|
||||
messages.error(request, "No file selected.")
|
||||
else:
|
||||
try:
|
||||
export_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with export_path.open("wb") as dest:
|
||||
for chunk in upload.chunks():
|
||||
dest.write(chunk)
|
||||
messages.success(request, f"Uploaded to {export_path}.")
|
||||
except Exception as exc:
|
||||
messages.error(request, f"Failed to save file: {exc}")
|
||||
|
||||
elif action == "transform":
|
||||
if imported_marker.exists():
|
||||
imported_marker.unlink()
|
||||
# Signal to start WebSocket connection for transform
|
||||
request.session["start_ws_action"] = "transform"
|
||||
messages.info(request, "Starting transform via WebSocket...")
|
||||
|
||||
elif action == "import":
|
||||
# Signal to start WebSocket connection for import
|
||||
request.session["start_ws_action"] = "import"
|
||||
messages.info(request, "Starting import via WebSocket...")
|
||||
|
||||
elif action == "reset_transform":
|
||||
if transformed_path.exists():
|
||||
try:
|
||||
transformed_path.unlink()
|
||||
messages.success(request, "Transformed file deleted.")
|
||||
except Exception as exc:
|
||||
messages.error(request, f"Failed to delete transformed file: {exc}")
|
||||
if imported_marker.exists():
|
||||
try:
|
||||
imported_marker.unlink()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
else:
|
||||
messages.error(request, "Unknown action.")
|
||||
|
||||
return redirect("migration_home")
|
||||
|
||||
ws_action = request.session.pop("start_ws_action", None)
|
||||
|
||||
context = {
|
||||
"export_path": export_path,
|
||||
"export_exists": export_path.exists(),
|
||||
"transformed_path": transformed_path,
|
||||
"transformed_exists": transformed_path.exists(),
|
||||
"imported_exists": imported_marker.exists(),
|
||||
"ws_action": ws_action,
|
||||
}
|
||||
return render(request, "paperless_migration/migration_home.html", context)
|
||||
|
||||
|
||||
@require_http_methods(["GET", "POST"])
|
||||
def migration_login(request: HttpRequest) -> HttpResponse:
|
||||
"""Migration-specific login view requiring access code."""
|
||||
if request.method == "POST":
|
||||
username = request.POST.get("login", "")
|
||||
password = request.POST.get("password", "")
|
||||
code = request.POST.get("code", "")
|
||||
|
||||
if not code or code != settings.MIGRATION_ACCESS_CODE:
|
||||
messages.error(request, "One-time code is required.")
|
||||
return redirect("account_login")
|
||||
|
||||
user = authenticate(request, username=username, password=password)
|
||||
if user is None:
|
||||
messages.error(request, "Invalid username or password.")
|
||||
return redirect("account_login")
|
||||
|
||||
if not user.is_superuser:
|
||||
messages.error(request, "Superuser access required.")
|
||||
return redirect("account_login")
|
||||
|
||||
login(request, user)
|
||||
request.session["migration_code_ok"] = True
|
||||
return redirect(settings.LOGIN_REDIRECT_URL)
|
||||
|
||||
return render(request, "account/login.html")
|
||||
7
src/paperless_migration/wsgi.py
Normal file
7
src/paperless_migration/wsgi.py
Normal file
@@ -0,0 +1,7 @@
|
||||
import os
|
||||
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless_migration.settings")
|
||||
|
||||
application = get_wsgi_application()
|
||||
Reference in New Issue
Block a user