mirror of
				https://github.com/paperless-ngx/paperless-ngx.git
				synced 2025-11-03 03:16:10 -06:00 
			
		
		
		
	Compare commits
	
		
			5 Commits
		
	
	
		
			90561857e8
			...
			feature-si
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					1af6bf70b9 | ||
| 
						 | 
					fce7b03324 | ||
| 
						 | 
					79956d6a7b | ||
| 
						 | 
					978b072bff | ||
| 
						 | 
					9c6f695dbf | 
@@ -51,7 +51,7 @@ repos:
 | 
			
		||||
          - 'prettier-plugin-organize-imports@4.1.0'
 | 
			
		||||
  # Python hooks
 | 
			
		||||
  - repo: https://github.com/astral-sh/ruff-pre-commit
 | 
			
		||||
    rev: v0.9.2
 | 
			
		||||
    rev: v0.9.3
 | 
			
		||||
    hooks:
 | 
			
		||||
      - id: ruff
 | 
			
		||||
      - id: ruff-format
 | 
			
		||||
 
 | 
			
		||||
@@ -38,7 +38,6 @@ ignore = ["DJ001", "SIM105", "RUF012"]
 | 
			
		||||
[lint.per-file-ignores]
 | 
			
		||||
".github/scripts/*.py" = ["E501", "INP001", "SIM117"]
 | 
			
		||||
"docker/wait-for-redis.py" = ["INP001", "T201"]
 | 
			
		||||
"src/documents/consumer.py" = ["PTH"]  # TODO Enable & remove
 | 
			
		||||
"src/documents/file_handling.py" = ["PTH"]  # TODO Enable & remove
 | 
			
		||||
"src/documents/management/commands/document_consumer.py" = ["PTH"]  # TODO Enable & remove
 | 
			
		||||
"src/documents/management/commands/document_exporter.py" = ["PTH"]  # TODO Enable & remove
 | 
			
		||||
@@ -51,8 +50,6 @@ ignore = ["DJ001", "SIM105", "RUF012"]
 | 
			
		||||
"src/documents/signals/handlers.py" = ["PTH"]  # TODO Enable & remove
 | 
			
		||||
"src/documents/tasks.py" = ["PTH"]  # TODO Enable & remove
 | 
			
		||||
"src/documents/tests/test_api_app_config.py" = ["PTH"]  # TODO Enable & remove
 | 
			
		||||
"src/documents/tests/test_api_bulk_download.py" = ["PTH"]  # TODO Enable & remove
 | 
			
		||||
"src/documents/tests/test_api_documents.py" = ["PTH"]  # TODO Enable & remove
 | 
			
		||||
"src/documents/tests/test_classifier.py" = ["PTH"]  # TODO Enable & remove
 | 
			
		||||
"src/documents/tests/test_consumer.py" = ["PTH"]  # TODO Enable & remove
 | 
			
		||||
"src/documents/tests/test_file_handling.py" = ["PTH"]  # TODO Enable & remove
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										38
									
								
								Pipfile.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										38
									
								
								Pipfile.lock
									
									
									
										generated
									
									
									
								
							@@ -3983,28 +3983,28 @@
 | 
			
		||||
        },
 | 
			
		||||
        "ruff": {
 | 
			
		||||
            "hashes": [
 | 
			
		||||
                "sha256:1a605fdcf6e8b2d39f9436d343d1f0ff70c365a1e681546de0104bef81ce88df",
 | 
			
		||||
                "sha256:3292c5a22ea9a5f9a185e2d131dc7f98f8534a32fb6d2ee7b9944569239c648d",
 | 
			
		||||
                "sha256:492a5e44ad9b22a0ea98cf72e40305cbdaf27fac0d927f8bc9e1df316dcc96eb",
 | 
			
		||||
                "sha256:71cbe22e178c5da20e1514e1e01029c73dc09288a8028a5d3446e6bba87a5145",
 | 
			
		||||
                "sha256:80605a039ba1454d002b32139e4970becf84b5fee3a3c3bf1c2af6f61a784347",
 | 
			
		||||
                "sha256:82b35259b0cbf8daa22a498018e300b9bb0174c2bbb7bcba593935158a78054d",
 | 
			
		||||
                "sha256:8b6a9701d1e371bf41dca22015c3f89769da7576884d2add7317ec1ec8cb9c3c",
 | 
			
		||||
                "sha256:8efd9da7a1ee314b910da155ca7e8953094a7c10d0c0a39bfde3fcfd2a015684",
 | 
			
		||||
                "sha256:9cc53e68b3c5ae41e8faf83a3b89f4a5d7b2cb666dff4b366bb86ed2a85b481f",
 | 
			
		||||
                "sha256:a1b63fa24149918f8b37cef2ee6fff81f24f0d74b6f0bdc37bc3e1f2143e41c6",
 | 
			
		||||
                "sha256:af1e9e9fe7b1f767264d26b1075ac4ad831c7db976911fa362d09b2d0356426a",
 | 
			
		||||
                "sha256:b338edc4610142355ccf6b87bd356729b62bf1bc152a2fad5b0c7dc04af77bfe",
 | 
			
		||||
                "sha256:b5eceb334d55fae5f316f783437392642ae18e16dcf4f1858d55d3c2a0f8f5d0",
 | 
			
		||||
                "sha256:b9aab82bb20afd5f596527045c01e6ae25a718ff1784cb92947bff1f83068b00",
 | 
			
		||||
                "sha256:c547f7f256aa366834829a08375c297fa63386cbe5f1459efaf174086b564247",
 | 
			
		||||
                "sha256:c5e1d6abc798419cf46eed03f54f2e0c3adb1ad4b801119dedf23fcaf69b55b5",
 | 
			
		||||
                "sha256:d18bba3d3353ed916e882521bc3e0af403949dbada344c20c16ea78f47af965e",
 | 
			
		||||
                "sha256:fbd337bac1cfa96be615f6efcd4bc4d077edbc127ef30e2b8ba2a27e18c054d4"
 | 
			
		||||
                "sha256:006e5de2621304c8810bcd2ee101587712fa93b4f955ed0985907a36c427e0c2",
 | 
			
		||||
                "sha256:040ceb7f20791dfa0e78b4230ee9dce23da3b64dd5848e40e3bf3ab76468dcf4",
 | 
			
		||||
                "sha256:2dc153c25e715be41bb228bc651c1e9b1a88d5c6e5ed0194fa0dfea02b026439",
 | 
			
		||||
                "sha256:33866c3cc2a575cbd546f2cd02bdd466fed65118e4365ee538a3deffd6fcb730",
 | 
			
		||||
                "sha256:5a5a46e09355695fbdbb30ed9889d6cf1c61b77b700a9fafc21b41f097bfbba4",
 | 
			
		||||
                "sha256:646909a1e25e0dc28fbc529eab8eb7bb583079628e8cbe738192853dbbe43af5",
 | 
			
		||||
                "sha256:7f39b879064c7d9670197d91124a75d118d00b0990586549949aae80cdc16624",
 | 
			
		||||
                "sha256:800d773f6d4d33b0a3c60e2c6ae8f4c202ea2de056365acfa519aa48acf28e0b",
 | 
			
		||||
                "sha256:8293f89985a090ebc3ed1064df31f3b4b56320cdfcec8b60d3295bddb955c22a",
 | 
			
		||||
                "sha256:90230a6b8055ad47d3325e9ee8f8a9ae7e273078a66401ac66df68943ced029b",
 | 
			
		||||
                "sha256:96a87ec31dc1044d8c2da2ebbed1c456d9b561e7d087734336518181b26b3aa5",
 | 
			
		||||
                "sha256:9bb7554aca6f842645022fe2d301c264e6925baa708b392867b7a62645304df4",
 | 
			
		||||
                "sha256:a187171e7c09efa4b4cc30ee5d0d55a8d6c5311b3e1b74ac5cb96cc89bafc43c",
 | 
			
		||||
                "sha256:ba6eea4459dbd6b1be4e6bfc766079fb9b8dd2e5a35aff6baee4d9b1514ea519",
 | 
			
		||||
                "sha256:c4bb09d2bbb394e3730d0918c00276e79b2de70ec2a5231cd4ebb51a57df9ba1",
 | 
			
		||||
                "sha256:c59ab92f8e92d6725b7ded9d4a31be3ef42688a115c6d3da9457a5bda140e2b4",
 | 
			
		||||
                "sha256:cabc332b7075a914ecea912cd1f3d4370489c8018f2c945a30bcc934e3bc06a6",
 | 
			
		||||
                "sha256:eabe5eb2c19a42f4808c03b82bd313fc84d4e395133fb3fc1b1516170a31213c"
 | 
			
		||||
            ],
 | 
			
		||||
            "index": "pypi",
 | 
			
		||||
            "markers": "python_version >= '3.7'",
 | 
			
		||||
            "version": "==0.9.2"
 | 
			
		||||
            "version": "==0.9.3"
 | 
			
		||||
        },
 | 
			
		||||
        "scipy": {
 | 
			
		||||
            "hashes": [
 | 
			
		||||
 
 | 
			
		||||
@@ -5,6 +5,7 @@ import { first, Subscription } from 'rxjs'
 | 
			
		||||
import { ToastsComponent } from './components/common/toasts/toasts.component'
 | 
			
		||||
import { FileDropComponent } from './components/file-drop/file-drop.component'
 | 
			
		||||
import { SETTINGS_KEYS } from './data/ui-settings'
 | 
			
		||||
import { ComponentRouterService } from './services/component-router.service'
 | 
			
		||||
import { ConsumerStatusService } from './services/consumer-status.service'
 | 
			
		||||
import { HotKeyService } from './services/hot-key.service'
 | 
			
		||||
import {
 | 
			
		||||
@@ -41,7 +42,8 @@ export class AppComponent implements OnInit, OnDestroy {
 | 
			
		||||
    public tourService: TourService,
 | 
			
		||||
    private renderer: Renderer2,
 | 
			
		||||
    private permissionsService: PermissionsService,
 | 
			
		||||
    private hotKeyService: HotKeyService
 | 
			
		||||
    private hotKeyService: HotKeyService,
 | 
			
		||||
    private componentRouterService: ComponentRouterService
 | 
			
		||||
  ) {
 | 
			
		||||
    let anyWindow = window as any
 | 
			
		||||
    anyWindow.pdfWorkerSrc = 'assets/js/pdf.worker.min.mjs'
 | 
			
		||||
 
 | 
			
		||||
@@ -45,6 +45,7 @@ import { Tag } from 'src/app/data/tag'
 | 
			
		||||
import { PermissionsGuard } from 'src/app/guards/permissions.guard'
 | 
			
		||||
import { CustomDatePipe } from 'src/app/pipes/custom-date.pipe'
 | 
			
		||||
import { DocumentTitlePipe } from 'src/app/pipes/document-title.pipe'
 | 
			
		||||
import { ComponentRouterService } from 'src/app/services/component-router.service'
 | 
			
		||||
import { DocumentListViewService } from 'src/app/services/document-list-view.service'
 | 
			
		||||
import { OpenDocumentsService } from 'src/app/services/open-documents.service'
 | 
			
		||||
import { PermissionsService } from 'src/app/services/permissions.service'
 | 
			
		||||
@@ -127,6 +128,7 @@ describe('DocumentDetailComponent', () => {
 | 
			
		||||
  let settingsService: SettingsService
 | 
			
		||||
  let customFieldsService: CustomFieldsService
 | 
			
		||||
  let httpTestingController: HttpTestingController
 | 
			
		||||
  let componentRouterService: ComponentRouterService
 | 
			
		||||
 | 
			
		||||
  let currentUserCan = true
 | 
			
		||||
  let currentUserHasObjectPermissions = true
 | 
			
		||||
@@ -264,6 +266,7 @@ describe('DocumentDetailComponent', () => {
 | 
			
		||||
    customFieldsService = TestBed.inject(CustomFieldsService)
 | 
			
		||||
    fixture = TestBed.createComponent(DocumentDetailComponent)
 | 
			
		||||
    httpTestingController = TestBed.inject(HttpTestingController)
 | 
			
		||||
    componentRouterService = TestBed.inject(ComponentRouterService)
 | 
			
		||||
    component = fixture.componentInstance
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
@@ -568,6 +571,16 @@ describe('DocumentDetailComponent', () => {
 | 
			
		||||
    expect(navigateSpy).toHaveBeenCalledWith(['documents'])
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  it('should allow close and navigate to the last view if available', () => {
 | 
			
		||||
    initNormally()
 | 
			
		||||
    jest
 | 
			
		||||
      .spyOn(componentRouterService, 'getComponentURLBefore')
 | 
			
		||||
      .mockReturnValue('dashboard')
 | 
			
		||||
    const navigateSpy = jest.spyOn(router, 'navigate')
 | 
			
		||||
    component.close()
 | 
			
		||||
    expect(navigateSpy).toHaveBeenCalledWith(['dashboard'])
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  it('should allow close and navigate to documents by default', () => {
 | 
			
		||||
    initNormally()
 | 
			
		||||
    jest
 | 
			
		||||
 
 | 
			
		||||
@@ -59,6 +59,7 @@ import { CustomDatePipe } from 'src/app/pipes/custom-date.pipe'
 | 
			
		||||
import { DocumentTitlePipe } from 'src/app/pipes/document-title.pipe'
 | 
			
		||||
import { FileSizePipe } from 'src/app/pipes/file-size.pipe'
 | 
			
		||||
import { SafeUrlPipe } from 'src/app/pipes/safeurl.pipe'
 | 
			
		||||
import { ComponentRouterService } from 'src/app/services/component-router.service'
 | 
			
		||||
import { DocumentListViewService } from 'src/app/services/document-list-view.service'
 | 
			
		||||
import { HotKeyService } from 'src/app/services/hot-key.service'
 | 
			
		||||
import { OpenDocumentsService } from 'src/app/services/open-documents.service'
 | 
			
		||||
@@ -272,7 +273,8 @@ export class DocumentDetailComponent
 | 
			
		||||
    private userService: UserService,
 | 
			
		||||
    private customFieldsService: CustomFieldsService,
 | 
			
		||||
    private http: HttpClient,
 | 
			
		||||
    private hotKeyService: HotKeyService
 | 
			
		||||
    private hotKeyService: HotKeyService,
 | 
			
		||||
    private componentRouterService: ComponentRouterService
 | 
			
		||||
  ) {
 | 
			
		||||
    super()
 | 
			
		||||
  }
 | 
			
		||||
@@ -888,6 +890,10 @@ export class DocumentDetailComponent
 | 
			
		||||
            'view',
 | 
			
		||||
            this.documentListViewService.activeSavedViewId,
 | 
			
		||||
          ])
 | 
			
		||||
        } else if (this.componentRouterService.getComponentURLBefore()) {
 | 
			
		||||
          this.router.navigate([
 | 
			
		||||
            this.componentRouterService.getComponentURLBefore(),
 | 
			
		||||
          ])
 | 
			
		||||
        } else {
 | 
			
		||||
          this.router.navigate(['documents'])
 | 
			
		||||
        }
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										102
									
								
								src-ui/src/app/services/component-router.service.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										102
									
								
								src-ui/src/app/services/component-router.service.spec.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,102 @@
 | 
			
		||||
import { TestBed } from '@angular/core/testing'
 | 
			
		||||
import { ActivationStart, Router } from '@angular/router'
 | 
			
		||||
import { Subject } from 'rxjs'
 | 
			
		||||
import { ComponentRouterService } from './component-router.service'
 | 
			
		||||
 | 
			
		||||
describe('ComponentRouterService', () => {
 | 
			
		||||
  let service: ComponentRouterService
 | 
			
		||||
  let router: Router
 | 
			
		||||
  let eventsSubject: Subject<any>
 | 
			
		||||
 | 
			
		||||
  beforeEach(() => {
 | 
			
		||||
    eventsSubject = new Subject<any>()
 | 
			
		||||
    TestBed.configureTestingModule({
 | 
			
		||||
      providers: [
 | 
			
		||||
        ComponentRouterService,
 | 
			
		||||
        {
 | 
			
		||||
          provide: Router,
 | 
			
		||||
          useValue: {
 | 
			
		||||
            events: eventsSubject.asObservable(),
 | 
			
		||||
          },
 | 
			
		||||
        },
 | 
			
		||||
      ],
 | 
			
		||||
    })
 | 
			
		||||
    service = TestBed.inject(ComponentRouterService)
 | 
			
		||||
    router = TestBed.inject(Router)
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  it('should add to history and componentHistory on ActivationStart event', () => {
 | 
			
		||||
    eventsSubject.next(
 | 
			
		||||
      new ActivationStart({
 | 
			
		||||
        url: 'test-url',
 | 
			
		||||
        component: { name: 'TestComponent' },
 | 
			
		||||
      } as any)
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    expect((service as any).history).toEqual(['test-url'])
 | 
			
		||||
    expect((service as any).componentHistory).toEqual(['TestComponent'])
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  it('should not add duplicate component names to componentHistory', () => {
 | 
			
		||||
    eventsSubject.next(
 | 
			
		||||
      new ActivationStart({
 | 
			
		||||
        url: 'test-url-1',
 | 
			
		||||
        component: { name: 'TestComponent' },
 | 
			
		||||
      } as any)
 | 
			
		||||
    )
 | 
			
		||||
    eventsSubject.next(
 | 
			
		||||
      new ActivationStart({
 | 
			
		||||
        url: 'test-url-2',
 | 
			
		||||
        component: { name: 'TestComponent' },
 | 
			
		||||
      } as any)
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    expect((service as any).componentHistory.length).toBe(1)
 | 
			
		||||
    expect((service as any).componentHistory).toEqual(['TestComponent'])
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  it('should return the URL of the component before the current one', () => {
 | 
			
		||||
    eventsSubject.next(
 | 
			
		||||
      new ActivationStart({
 | 
			
		||||
        url: 'test-url-1',
 | 
			
		||||
        component: { name: 'TestComponent1' },
 | 
			
		||||
      } as any)
 | 
			
		||||
    )
 | 
			
		||||
    eventsSubject.next(
 | 
			
		||||
      new ActivationStart({
 | 
			
		||||
        url: 'test-url-2',
 | 
			
		||||
        component: { name: 'TestComponent2' },
 | 
			
		||||
      } as any)
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    expect(service.getComponentURLBefore()).toBe('test-url-1')
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  it('should update the URL of the current component if the same component is loaded via a different URL', () => {
 | 
			
		||||
    eventsSubject.next(
 | 
			
		||||
      new ActivationStart({
 | 
			
		||||
        url: 'test-url-1',
 | 
			
		||||
        component: { name: 'TestComponent' },
 | 
			
		||||
      } as any)
 | 
			
		||||
    )
 | 
			
		||||
    eventsSubject.next(
 | 
			
		||||
      new ActivationStart({
 | 
			
		||||
        url: 'test-url-2',
 | 
			
		||||
        component: { name: 'TestComponent' },
 | 
			
		||||
      } as any)
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    expect((service as any).history).toEqual(['test-url-2'])
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  it('should return null if there is no previous component', () => {
 | 
			
		||||
    eventsSubject.next(
 | 
			
		||||
      new ActivationStart({
 | 
			
		||||
        url: 'test-url',
 | 
			
		||||
        component: { name: 'TestComponent' },
 | 
			
		||||
      } as any)
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    expect(service.getComponentURLBefore()).toBeNull()
 | 
			
		||||
  })
 | 
			
		||||
})
 | 
			
		||||
							
								
								
									
										35
									
								
								src-ui/src/app/services/component-router.service.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								src-ui/src/app/services/component-router.service.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,35 @@
 | 
			
		||||
import { Injectable } from '@angular/core'
 | 
			
		||||
import { ActivationStart, Event, Router } from '@angular/router'
 | 
			
		||||
import { filter } from 'rxjs'
 | 
			
		||||
 | 
			
		||||
@Injectable({
 | 
			
		||||
  providedIn: 'root',
 | 
			
		||||
})
 | 
			
		||||
export class ComponentRouterService {
 | 
			
		||||
  private history: string[] = []
 | 
			
		||||
  private componentHistory: any[] = []
 | 
			
		||||
 | 
			
		||||
  constructor(private router: Router) {
 | 
			
		||||
    this.router.events
 | 
			
		||||
      .pipe(filter((event: Event) => event instanceof ActivationStart))
 | 
			
		||||
      .subscribe((event: ActivationStart) => {
 | 
			
		||||
        if (
 | 
			
		||||
          this.componentHistory[this.componentHistory.length - 1] !==
 | 
			
		||||
          event.snapshot.component.name
 | 
			
		||||
        ) {
 | 
			
		||||
          this.history.push(event.snapshot.url.toString())
 | 
			
		||||
          this.componentHistory.push(event.snapshot.component.name)
 | 
			
		||||
        } else {
 | 
			
		||||
          // Update the URL of the current component in case the same component was loaded via a different URL
 | 
			
		||||
          this.history[this.history.length - 1] = event.snapshot.url.toString()
 | 
			
		||||
        }
 | 
			
		||||
      })
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  public getComponentURLBefore(): any {
 | 
			
		||||
    if (this.componentHistory.length > 1) {
 | 
			
		||||
      return this.history[this.history.length - 2]
 | 
			
		||||
    }
 | 
			
		||||
    return null
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -4,6 +4,7 @@ import os
 | 
			
		||||
import tempfile
 | 
			
		||||
from enum import Enum
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
from typing import TYPE_CHECKING
 | 
			
		||||
 | 
			
		||||
import magic
 | 
			
		||||
from django.conf import settings
 | 
			
		||||
@@ -154,7 +155,11 @@ class ConsumerPlugin(
 | 
			
		||||
        """
 | 
			
		||||
        Confirm the input file still exists where it should
 | 
			
		||||
        """
 | 
			
		||||
        if not os.path.isfile(self.input_doc.original_file):
 | 
			
		||||
        if TYPE_CHECKING:
 | 
			
		||||
            assert isinstance(self.input_doc.original_file, Path), (
 | 
			
		||||
                self.input_doc.original_file
 | 
			
		||||
            )
 | 
			
		||||
        if not self.input_doc.original_file.is_file():
 | 
			
		||||
            self._fail(
 | 
			
		||||
                ConsumerStatusShortMessage.FILE_NOT_FOUND,
 | 
			
		||||
                f"Cannot consume {self.input_doc.original_file}: File not found.",
 | 
			
		||||
@@ -164,7 +169,7 @@ class ConsumerPlugin(
 | 
			
		||||
        """
 | 
			
		||||
        Using the MD5 of the file, check this exact file doesn't already exist
 | 
			
		||||
        """
 | 
			
		||||
        with open(self.input_doc.original_file, "rb") as f:
 | 
			
		||||
        with Path(self.input_doc.original_file).open("rb") as f:
 | 
			
		||||
            checksum = hashlib.md5(f.read()).hexdigest()
 | 
			
		||||
        existing_doc = Document.global_objects.filter(
 | 
			
		||||
            Q(checksum=checksum) | Q(archive_checksum=checksum),
 | 
			
		||||
@@ -178,7 +183,7 @@ class ConsumerPlugin(
 | 
			
		||||
                log_msg += " Note: existing document is in the trash."
 | 
			
		||||
 | 
			
		||||
            if settings.CONSUMER_DELETE_DUPLICATES:
 | 
			
		||||
                os.unlink(self.input_doc.original_file)
 | 
			
		||||
                Path(self.input_doc.original_file).unlink()
 | 
			
		||||
            self._fail(
 | 
			
		||||
                msg,
 | 
			
		||||
                log_msg,
 | 
			
		||||
@@ -237,7 +242,7 @@ class ConsumerPlugin(
 | 
			
		||||
        if not settings.PRE_CONSUME_SCRIPT:
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        if not os.path.isfile(settings.PRE_CONSUME_SCRIPT):
 | 
			
		||||
        if not Path(settings.PRE_CONSUME_SCRIPT).is_file():
 | 
			
		||||
            self._fail(
 | 
			
		||||
                ConsumerStatusShortMessage.PRE_CONSUME_SCRIPT_NOT_FOUND,
 | 
			
		||||
                f"Configured pre-consume script "
 | 
			
		||||
@@ -280,7 +285,7 @@ class ConsumerPlugin(
 | 
			
		||||
        if not settings.POST_CONSUME_SCRIPT:
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        if not os.path.isfile(settings.POST_CONSUME_SCRIPT):
 | 
			
		||||
        if not Path(settings.POST_CONSUME_SCRIPT).is_file():
 | 
			
		||||
            self._fail(
 | 
			
		||||
                ConsumerStatusShortMessage.POST_CONSUME_SCRIPT_NOT_FOUND,
 | 
			
		||||
                f"Configured post-consume script "
 | 
			
		||||
@@ -582,7 +587,7 @@ class ConsumerPlugin(
 | 
			
		||||
                        document.thumbnail_path,
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
                    if archive_path and os.path.isfile(archive_path):
 | 
			
		||||
                    if archive_path and Path(archive_path).is_file():
 | 
			
		||||
                        document.archive_filename = generate_unique_filename(
 | 
			
		||||
                            document,
 | 
			
		||||
                            archive_filename=True,
 | 
			
		||||
@@ -594,7 +599,7 @@ class ConsumerPlugin(
 | 
			
		||||
                            document.archive_path,
 | 
			
		||||
                        )
 | 
			
		||||
 | 
			
		||||
                        with open(archive_path, "rb") as f:
 | 
			
		||||
                        with Path(archive_path).open("rb") as f:
 | 
			
		||||
                            document.archive_checksum = hashlib.md5(
 | 
			
		||||
                                f.read(),
 | 
			
		||||
                            ).hexdigest()
 | 
			
		||||
@@ -612,14 +617,14 @@ class ConsumerPlugin(
 | 
			
		||||
                    self.unmodified_original.unlink()
 | 
			
		||||
 | 
			
		||||
                # https://github.com/jonaswinkler/paperless-ng/discussions/1037
 | 
			
		||||
                shadow_file = os.path.join(
 | 
			
		||||
                    os.path.dirname(self.input_doc.original_file),
 | 
			
		||||
                    "._" + os.path.basename(self.input_doc.original_file),
 | 
			
		||||
                shadow_file = (
 | 
			
		||||
                    Path(self.input_doc.original_file).parent
 | 
			
		||||
                    / f"._{Path(self.input_doc.original_file).name}"
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                if os.path.isfile(shadow_file):
 | 
			
		||||
                if Path(shadow_file).is_file():
 | 
			
		||||
                    self.log.debug(f"Deleting file {shadow_file}")
 | 
			
		||||
                    os.unlink(shadow_file)
 | 
			
		||||
                    Path(shadow_file).unlink()
 | 
			
		||||
 | 
			
		||||
        except Exception as e:
 | 
			
		||||
            self._fail(
 | 
			
		||||
@@ -704,7 +709,7 @@ class ConsumerPlugin(
 | 
			
		||||
            create_date = date
 | 
			
		||||
            self.log.debug(f"Creation date from parse_date: {create_date}")
 | 
			
		||||
        else:
 | 
			
		||||
            stats = os.stat(self.input_doc.original_file)
 | 
			
		||||
            stats = Path(self.input_doc.original_file).stat()
 | 
			
		||||
            create_date = timezone.make_aware(
 | 
			
		||||
                datetime.datetime.fromtimestamp(stats.st_mtime),
 | 
			
		||||
            )
 | 
			
		||||
@@ -800,7 +805,10 @@ class ConsumerPlugin(
 | 
			
		||||
                )  # adds to document
 | 
			
		||||
 | 
			
		||||
    def _write(self, storage_type, source, target):
 | 
			
		||||
        with open(source, "rb") as read_file, open(target, "wb") as write_file:
 | 
			
		||||
        with (
 | 
			
		||||
            Path(source).open("rb") as read_file,
 | 
			
		||||
            Path(target).open("wb") as write_file,
 | 
			
		||||
        ):
 | 
			
		||||
            write_file.write(read_file.read())
 | 
			
		||||
 | 
			
		||||
        # Attempt to copy file's original stats, but it's ok if we can't
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,5 @@
 | 
			
		||||
import logging
 | 
			
		||||
import os
 | 
			
		||||
from concurrent.futures import ThreadPoolExecutor
 | 
			
		||||
from fnmatch import filter
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
from pathlib import PurePath
 | 
			
		||||
@@ -13,8 +12,9 @@ from django import db
 | 
			
		||||
from django.conf import settings
 | 
			
		||||
from django.core.management.base import BaseCommand
 | 
			
		||||
from django.core.management.base import CommandError
 | 
			
		||||
from watchdog.events import FileSystemEventHandler
 | 
			
		||||
from watchdog.observers.polling import PollingObserver
 | 
			
		||||
from watchfiles import Change
 | 
			
		||||
from watchfiles import DefaultFilter
 | 
			
		||||
from watchfiles import watch
 | 
			
		||||
 | 
			
		||||
from documents.data_models import ConsumableDocument
 | 
			
		||||
from documents.data_models import DocumentMetadataOverrides
 | 
			
		||||
@@ -141,53 +141,6 @@ def _consume(filepath: str) -> None:
 | 
			
		||||
        logger.exception("Error while consuming document")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _consume_wait_unmodified(file: str) -> None:
 | 
			
		||||
    """
 | 
			
		||||
    Waits for the given file to appear unmodified based on file size
 | 
			
		||||
    and modification time.  Will wait a configured number of seconds
 | 
			
		||||
    and retry a configured number of times before either consuming or
 | 
			
		||||
    giving up
 | 
			
		||||
    """
 | 
			
		||||
    if _is_ignored(file):
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    logger.debug(f"Waiting for file {file} to remain unmodified")
 | 
			
		||||
    mtime = -1
 | 
			
		||||
    size = -1
 | 
			
		||||
    current_try = 0
 | 
			
		||||
    while current_try < settings.CONSUMER_POLLING_RETRY_COUNT:
 | 
			
		||||
        try:
 | 
			
		||||
            stat_data = os.stat(file)
 | 
			
		||||
            new_mtime = stat_data.st_mtime
 | 
			
		||||
            new_size = stat_data.st_size
 | 
			
		||||
        except FileNotFoundError:
 | 
			
		||||
            logger.debug(
 | 
			
		||||
                f"File {file} moved while waiting for it to remain unmodified.",
 | 
			
		||||
            )
 | 
			
		||||
            return
 | 
			
		||||
        if new_mtime == mtime and new_size == size:
 | 
			
		||||
            _consume(file)
 | 
			
		||||
            return
 | 
			
		||||
        mtime = new_mtime
 | 
			
		||||
        size = new_size
 | 
			
		||||
        sleep(settings.CONSUMER_POLLING_DELAY)
 | 
			
		||||
        current_try += 1
 | 
			
		||||
 | 
			
		||||
    logger.error(f"Timeout while waiting on file {file} to remain unmodified.")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Handler(FileSystemEventHandler):
 | 
			
		||||
    def __init__(self, pool: ThreadPoolExecutor) -> None:
 | 
			
		||||
        super().__init__()
 | 
			
		||||
        self._pool = pool
 | 
			
		||||
 | 
			
		||||
    def on_created(self, event):
 | 
			
		||||
        self._pool.submit(_consume_wait_unmodified, event.src_path)
 | 
			
		||||
 | 
			
		||||
    def on_moved(self, event):
 | 
			
		||||
        self._pool.submit(_consume_wait_unmodified, event.dest_path)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Command(BaseCommand):
 | 
			
		||||
    """
 | 
			
		||||
    On every iteration of an infinite loop, consume what we can from the
 | 
			
		||||
@@ -199,7 +152,7 @@ class Command(BaseCommand):
 | 
			
		||||
    # Also only for testing, configures in one place the timeout used before checking
 | 
			
		||||
    # the stop flag
 | 
			
		||||
    testing_timeout_s: Final[float] = 0.5
 | 
			
		||||
    testing_timeout_ms: Final[float] = testing_timeout_s * 1000.0
 | 
			
		||||
    testing_timeout_ms: Final[int] = int(testing_timeout_s * 1000)
 | 
			
		||||
 | 
			
		||||
    def add_arguments(self, parser):
 | 
			
		||||
        parser.add_argument(
 | 
			
		||||
@@ -221,99 +174,80 @@ class Command(BaseCommand):
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def handle(self, *args, **options):
 | 
			
		||||
        directory = options["directory"]
 | 
			
		||||
        recursive = settings.CONSUMER_RECURSIVE
 | 
			
		||||
        directory: Final[Path] = Path(options["directory"]).resolve()
 | 
			
		||||
        is_recursive: Final[bool] = settings.CONSUMER_RECURSIVE
 | 
			
		||||
        is_oneshot: Final[bool] = options["oneshot"]
 | 
			
		||||
        is_testing: Final[bool] = options["testing"]
 | 
			
		||||
 | 
			
		||||
        if not directory:
 | 
			
		||||
            raise CommandError("CONSUMPTION_DIR does not appear to be set.")
 | 
			
		||||
 | 
			
		||||
        directory = os.path.abspath(directory)
 | 
			
		||||
 | 
			
		||||
        if not os.path.isdir(directory):
 | 
			
		||||
        if not directory.exists():
 | 
			
		||||
            raise CommandError(f"Consumption directory {directory} does not exist")
 | 
			
		||||
 | 
			
		||||
        if not directory.is_dir():
 | 
			
		||||
            raise CommandError(f"Consumption directory {directory} is not a directory")
 | 
			
		||||
 | 
			
		||||
        # Consumer will need this
 | 
			
		||||
        settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
 | 
			
		||||
 | 
			
		||||
        if recursive:
 | 
			
		||||
            for dirpath, _, filenames in os.walk(directory):
 | 
			
		||||
                for filename in filenames:
 | 
			
		||||
                    filepath = os.path.join(dirpath, filename)
 | 
			
		||||
                    _consume(filepath)
 | 
			
		||||
        else:
 | 
			
		||||
            for entry in os.scandir(directory):
 | 
			
		||||
                _consume(entry.path)
 | 
			
		||||
        # Check for existing files at startup
 | 
			
		||||
        glob_str = "**/*" if is_recursive else "*"
 | 
			
		||||
 | 
			
		||||
        if options["oneshot"]:
 | 
			
		||||
        for filepath in directory.glob(glob_str):
 | 
			
		||||
            _consume(filepath)
 | 
			
		||||
 | 
			
		||||
        if is_oneshot:
 | 
			
		||||
            logger.info("One shot consume requested, exiting")
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        if settings.CONSUMER_POLLING == 0 and INotify:
 | 
			
		||||
            self.handle_inotify(directory, recursive, options["testing"])
 | 
			
		||||
        use_polling: Final[bool] = settings.CONSUMER_POLLING != 0
 | 
			
		||||
        poll_delay_ms: Final[int] = int(settings.CONSUMER_POLLING * 1000)
 | 
			
		||||
 | 
			
		||||
        if use_polling:
 | 
			
		||||
            logger.info(
 | 
			
		||||
                f"Polling {directory} for changes every {settings.CONSUMER_POLLING}s ",
 | 
			
		||||
            )
 | 
			
		||||
        else:
 | 
			
		||||
            if INotify is None and settings.CONSUMER_POLLING == 0:  # pragma: no cover
 | 
			
		||||
                logger.warning("Using polling as INotify import failed")
 | 
			
		||||
            self.handle_polling(directory, recursive, options["testing"])
 | 
			
		||||
            logger.info(f"Using inotify to watch {directory} for changes")
 | 
			
		||||
 | 
			
		||||
        logger.debug("Consumer exiting.")
 | 
			
		||||
 | 
			
		||||
    def handle_polling(self, directory, recursive, is_testing: bool):
 | 
			
		||||
        logger.info(f"Polling directory for changes: {directory}")
 | 
			
		||||
 | 
			
		||||
        timeout = None
 | 
			
		||||
        if is_testing:
 | 
			
		||||
            timeout = self.testing_timeout_s
 | 
			
		||||
            logger.debug(f"Configuring timeout to {timeout}s")
 | 
			
		||||
 | 
			
		||||
        polling_interval = settings.CONSUMER_POLLING
 | 
			
		||||
        if polling_interval == 0:  # pragma: no cover
 | 
			
		||||
            # Only happens if INotify failed to import
 | 
			
		||||
            logger.warning("Using polling of 10s, consider setting this")
 | 
			
		||||
            polling_interval = 10
 | 
			
		||||
 | 
			
		||||
        with ThreadPoolExecutor(max_workers=4) as pool:
 | 
			
		||||
            observer = PollingObserver(timeout=polling_interval)
 | 
			
		||||
            observer.schedule(Handler(pool), directory, recursive=recursive)
 | 
			
		||||
            observer.start()
 | 
			
		||||
            try:
 | 
			
		||||
                while observer.is_alive():
 | 
			
		||||
                    observer.join(timeout)
 | 
			
		||||
                    if self.stop_flag.is_set():
 | 
			
		||||
                        observer.stop()
 | 
			
		||||
            except KeyboardInterrupt:
 | 
			
		||||
                observer.stop()
 | 
			
		||||
            observer.join()
 | 
			
		||||
 | 
			
		||||
    def handle_inotify(self, directory, recursive, is_testing: bool):
 | 
			
		||||
        logger.info(f"Using inotify to watch directory for changes: {directory}")
 | 
			
		||||
 | 
			
		||||
        timeout_ms = None
 | 
			
		||||
        if is_testing:
 | 
			
		||||
            timeout_ms = self.testing_timeout_ms
 | 
			
		||||
            logger.debug(f"Configuring timeout to {timeout_ms}ms")
 | 
			
		||||
 | 
			
		||||
        inotify = INotify()
 | 
			
		||||
        inotify_flags = flags.CLOSE_WRITE | flags.MOVED_TO | flags.MODIFY
 | 
			
		||||
        if recursive:
 | 
			
		||||
            descriptor = inotify.add_watch_recursive(directory, inotify_flags)
 | 
			
		||||
        else:
 | 
			
		||||
            descriptor = inotify.add_watch(directory, inotify_flags)
 | 
			
		||||
        read_timeout_ms = 0
 | 
			
		||||
        if options["testing"]:
 | 
			
		||||
            read_timeout_ms = self.testing_timeout_ms
 | 
			
		||||
            logger.debug(f"Configuring initial timeout to {read_timeout_ms}ms")
 | 
			
		||||
 | 
			
		||||
        inotify_debounce_secs: Final[float] = settings.CONSUMER_INOTIFY_DELAY
 | 
			
		||||
        inotify_debounce_ms: Final[int] = inotify_debounce_secs * 1000
 | 
			
		||||
        inotify_debounce_ms: Final[int] = int(inotify_debounce_secs * 1000)
 | 
			
		||||
 | 
			
		||||
        finished = False
 | 
			
		||||
        filter = DefaultFilter(ignore_entity_patterns={r"__paperless_write_test_\d+__"})
 | 
			
		||||
 | 
			
		||||
        notified_files = {}
 | 
			
		||||
 | 
			
		||||
        while not finished:
 | 
			
		||||
        notified_files: dict[Path, float] = {}
 | 
			
		||||
        while not self.stop_flag.is_set():
 | 
			
		||||
            try:
 | 
			
		||||
                for event in inotify.read(timeout=timeout_ms):
 | 
			
		||||
                    path = inotify.get_path(event.wd) if recursive else directory
 | 
			
		||||
                    filepath = os.path.join(path, event.name)
 | 
			
		||||
                    if flags.MODIFY in flags.from_mask(event.mask):
 | 
			
		||||
                        notified_files.pop(filepath, None)
 | 
			
		||||
                    else:
 | 
			
		||||
                        notified_files[filepath] = monotonic()
 | 
			
		||||
                for changes in watch(
 | 
			
		||||
                    directory,
 | 
			
		||||
                    watch_filter=filter,
 | 
			
		||||
                    rust_timeout=read_timeout_ms,
 | 
			
		||||
                    yield_on_timeout=True,
 | 
			
		||||
                    force_polling=use_polling,
 | 
			
		||||
                    poll_delay_ms=poll_delay_ms,
 | 
			
		||||
                    recursive=is_recursive,
 | 
			
		||||
                    stop_event=self.stop_flag,
 | 
			
		||||
                ):
 | 
			
		||||
                    for change_type, path in changes:
 | 
			
		||||
                        path = Path(path).resolve()
 | 
			
		||||
                        logger.info(f"Got {change_type.name} for {path}")
 | 
			
		||||
 | 
			
		||||
                        match change_type:
 | 
			
		||||
                            case Change.added | Change.modified:
 | 
			
		||||
                                logger.info(
 | 
			
		||||
                                    f"New event time for {path} at {monotonic()}",
 | 
			
		||||
                                )
 | 
			
		||||
                                notified_files[path] = monotonic()
 | 
			
		||||
                            case Change.deleted:
 | 
			
		||||
                                notified_files.pop(path, None)
 | 
			
		||||
 | 
			
		||||
                    logger.info("Checking for files that are ready")
 | 
			
		||||
 | 
			
		||||
                    # Check the files against the timeout
 | 
			
		||||
                    still_waiting = {}
 | 
			
		||||
@@ -326,11 +260,14 @@ class Command(BaseCommand):
 | 
			
		||||
 | 
			
		||||
                        # Also make sure the file exists still, some scanners might write a
 | 
			
		||||
                        # temporary file first
 | 
			
		||||
                    file_still_exists = os.path.exists(filepath) and os.path.isfile(
 | 
			
		||||
                        filepath,
 | 
			
		||||
                        file_still_exists = filepath.exists() and filepath.is_file()
 | 
			
		||||
 | 
			
		||||
                        logger.info(
 | 
			
		||||
                            f"{filepath} - {waited_long_enough} - {file_still_exists}",
 | 
			
		||||
                        )
 | 
			
		||||
 | 
			
		||||
                        if waited_long_enough and file_still_exists:
 | 
			
		||||
                            logger.info(f"Consuming {filepath}")
 | 
			
		||||
                            _consume(filepath)
 | 
			
		||||
                        elif file_still_exists:
 | 
			
		||||
                            still_waiting[filepath] = last_event_time
 | 
			
		||||
@@ -338,22 +275,20 @@ class Command(BaseCommand):
 | 
			
		||||
                        # These files are still waiting to hit the timeout
 | 
			
		||||
                        notified_files = still_waiting
 | 
			
		||||
 | 
			
		||||
                # If files are waiting, need to exit read() to check them
 | 
			
		||||
                # Otherwise, go back to infinite sleep time, but only if not testing
 | 
			
		||||
                    # Always exit the watch loop to reconfigure the timeout
 | 
			
		||||
                    break
 | 
			
		||||
 | 
			
		||||
                if len(notified_files) > 0:
 | 
			
		||||
                    timeout_ms = inotify_debounce_ms
 | 
			
		||||
                    logger.info("Using inotify_debounce_ms")
 | 
			
		||||
                    read_timeout_ms = inotify_debounce_ms
 | 
			
		||||
                elif is_testing:
 | 
			
		||||
                    timeout_ms = self.testing_timeout_ms
 | 
			
		||||
                    logger.info("Using testing_timeout_ms")
 | 
			
		||||
                    read_timeout_ms = self.testing_timeout_ms
 | 
			
		||||
                else:
 | 
			
		||||
                    timeout_ms = None
 | 
			
		||||
 | 
			
		||||
                if self.stop_flag.is_set():
 | 
			
		||||
                    logger.debug("Finishing because event is set")
 | 
			
		||||
                    finished = True
 | 
			
		||||
 | 
			
		||||
                    logger.info("No files in waiting, configuring indefinite timeout")
 | 
			
		||||
                    read_timeout_ms = 0
 | 
			
		||||
                logger.info(f"Configuring timeout to {read_timeout_ms}ms")
 | 
			
		||||
            except KeyboardInterrupt:
 | 
			
		||||
                logger.info("Received SIGINT, stopping inotify")
 | 
			
		||||
                finished = True
 | 
			
		||||
                self.stop_flag.set()
 | 
			
		||||
 | 
			
		||||
        inotify.rm_watch(descriptor)
 | 
			
		||||
        inotify.close()
 | 
			
		||||
        logger.debug("Consumer exiting.")
 | 
			
		||||
 
 | 
			
		||||
@@ -353,7 +353,7 @@ def cleanup_document_deletion(sender, instance, **kwargs):
 | 
			
		||||
                        f"{filename} could not be deleted: {e}",
 | 
			
		||||
                    )
 | 
			
		||||
            elif filename and not os.path.isfile(filename):
 | 
			
		||||
                logger.warn(f"Expected {filename} tp exist, but it did not")
 | 
			
		||||
                logger.warning(f"Expected {filename} to exist, but it did not")
 | 
			
		||||
 | 
			
		||||
        delete_empty_directories(
 | 
			
		||||
            os.path.dirname(instance.source_path),
 | 
			
		||||
 
 | 
			
		||||
@@ -1,7 +1,6 @@
 | 
			
		||||
import datetime
 | 
			
		||||
import io
 | 
			
		||||
import json
 | 
			
		||||
import os
 | 
			
		||||
import shutil
 | 
			
		||||
import zipfile
 | 
			
		||||
 | 
			
		||||
@@ -15,9 +14,10 @@ from documents.models import Correspondent
 | 
			
		||||
from documents.models import Document
 | 
			
		||||
from documents.models import DocumentType
 | 
			
		||||
from documents.tests.utils import DirectoriesMixin
 | 
			
		||||
from documents.tests.utils import SampleDirMixin
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TestBulkDownload(DirectoriesMixin, APITestCase):
 | 
			
		||||
class TestBulkDownload(DirectoriesMixin, SampleDirMixin, APITestCase):
 | 
			
		||||
    ENDPOINT = "/api/documents/bulk_download/"
 | 
			
		||||
 | 
			
		||||
    def setUp(self):
 | 
			
		||||
@@ -51,22 +51,10 @@ class TestBulkDownload(DirectoriesMixin, APITestCase):
 | 
			
		||||
            archive_checksum="D",
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        shutil.copy(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            self.doc2.source_path,
 | 
			
		||||
        )
 | 
			
		||||
        shutil.copy(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.png"),
 | 
			
		||||
            self.doc2b.source_path,
 | 
			
		||||
        )
 | 
			
		||||
        shutil.copy(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.jpg"),
 | 
			
		||||
            self.doc3.source_path,
 | 
			
		||||
        )
 | 
			
		||||
        shutil.copy(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "test_with_bom.pdf"),
 | 
			
		||||
            self.doc3.archive_path,
 | 
			
		||||
        )
 | 
			
		||||
        shutil.copy(self.SAMPLE_DIR / "simple.pdf", self.doc2.source_path)
 | 
			
		||||
        shutil.copy(self.SAMPLE_DIR / "simple.png", self.doc2b.source_path)
 | 
			
		||||
        shutil.copy(self.SAMPLE_DIR / "simple.jpg", self.doc3.source_path)
 | 
			
		||||
        shutil.copy(self.SAMPLE_DIR / "test_with_bom.pdf", self.doc3.archive_path)
 | 
			
		||||
 | 
			
		||||
    def test_download_originals(self):
 | 
			
		||||
        response = self.client.post(
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,4 @@
 | 
			
		||||
import datetime
 | 
			
		||||
import os
 | 
			
		||||
import shutil
 | 
			
		||||
import tempfile
 | 
			
		||||
import uuid
 | 
			
		||||
@@ -8,6 +7,7 @@ from binascii import hexlify
 | 
			
		||||
from datetime import date
 | 
			
		||||
from datetime import timedelta
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
from typing import TYPE_CHECKING
 | 
			
		||||
from unittest import mock
 | 
			
		||||
 | 
			
		||||
import celery
 | 
			
		||||
@@ -171,19 +171,18 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
        content = b"This is a test"
 | 
			
		||||
        content_thumbnail = b"thumbnail content"
 | 
			
		||||
 | 
			
		||||
        with open(filename, "wb") as f:
 | 
			
		||||
        with Path(filename).open("wb") as f:
 | 
			
		||||
            f.write(content)
 | 
			
		||||
 | 
			
		||||
        doc = Document.objects.create(
 | 
			
		||||
            title="none",
 | 
			
		||||
            filename=os.path.basename(filename),
 | 
			
		||||
            filename=Path(filename).name,
 | 
			
		||||
            mime_type="application/pdf",
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(self.dirs.thumbnail_dir, f"{doc.pk:07d}.webp"),
 | 
			
		||||
            "wb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        if TYPE_CHECKING:
 | 
			
		||||
            assert isinstance(self.dirs.thumbnail_dir, Path), self.dirs.thumbnail_dir
 | 
			
		||||
        with (self.dirs.thumbnail_dir / f"{doc.pk:07d}.webp").open("wb") as f:
 | 
			
		||||
            f.write(content_thumbnail)
 | 
			
		||||
 | 
			
		||||
        response = self.client.get(f"/api/documents/{doc.pk}/download/")
 | 
			
		||||
@@ -217,7 +216,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
        content = b"This is a test"
 | 
			
		||||
        content_thumbnail = b"thumbnail content"
 | 
			
		||||
 | 
			
		||||
        with open(filename, "wb") as f:
 | 
			
		||||
        with Path(filename).open("wb") as f:
 | 
			
		||||
            f.write(content)
 | 
			
		||||
 | 
			
		||||
        user1 = User.objects.create_user(username="test1")
 | 
			
		||||
@@ -229,15 +228,12 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
 | 
			
		||||
        doc = Document.objects.create(
 | 
			
		||||
            title="none",
 | 
			
		||||
            filename=os.path.basename(filename),
 | 
			
		||||
            filename=Path(filename).name,
 | 
			
		||||
            mime_type="application/pdf",
 | 
			
		||||
            owner=user1,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(self.dirs.thumbnail_dir, f"{doc.pk:07d}.webp"),
 | 
			
		||||
            "wb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(self.dirs.thumbnail_dir) / f"{doc.pk:07d}.webp").open("wb") as f:
 | 
			
		||||
            f.write(content_thumbnail)
 | 
			
		||||
 | 
			
		||||
        response = self.client.get(f"/api/documents/{doc.pk}/download/")
 | 
			
		||||
@@ -272,10 +268,10 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
            mime_type="application/pdf",
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        with open(doc.source_path, "wb") as f:
 | 
			
		||||
        with Path(doc.source_path).open("wb") as f:
 | 
			
		||||
            f.write(content)
 | 
			
		||||
 | 
			
		||||
        with open(doc.archive_path, "wb") as f:
 | 
			
		||||
        with Path(doc.archive_path).open("wb") as f:
 | 
			
		||||
            f.write(content_archive)
 | 
			
		||||
 | 
			
		||||
        response = self.client.get(f"/api/documents/{doc.pk}/download/")
 | 
			
		||||
@@ -305,7 +301,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
    def test_document_actions_not_existing_file(self):
 | 
			
		||||
        doc = Document.objects.create(
 | 
			
		||||
            title="none",
 | 
			
		||||
            filename=os.path.basename("asd"),
 | 
			
		||||
            filename=Path("asd").name,
 | 
			
		||||
            mime_type="application/pdf",
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
@@ -1026,10 +1022,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
            id=str(uuid.uuid4()),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {"document": f},
 | 
			
		||||
@@ -1061,10 +1054,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
            id=str(uuid.uuid4()),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {
 | 
			
		||||
@@ -1095,10 +1085,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
            id=str(uuid.uuid4()),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {"documenst": f},
 | 
			
		||||
@@ -1111,10 +1098,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
            id=str(uuid.uuid4()),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.zip"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "simple.zip").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {"document": f},
 | 
			
		||||
@@ -1127,10 +1111,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
            id=str(uuid.uuid4()),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {"document": f, "title": "my custom title"},
 | 
			
		||||
@@ -1152,10 +1133,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        c = Correspondent.objects.create(name="test-corres")
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {"document": f, "correspondent": c.id},
 | 
			
		||||
@@ -1176,10 +1154,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
            id=str(uuid.uuid4()),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {"document": f, "correspondent": 3456},
 | 
			
		||||
@@ -1194,10 +1169,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        dt = DocumentType.objects.create(name="invoice")
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {"document": f, "document_type": dt.id},
 | 
			
		||||
@@ -1218,10 +1190,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
            id=str(uuid.uuid4()),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {"document": f, "document_type": 34578},
 | 
			
		||||
@@ -1236,10 +1205,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        sp = StoragePath.objects.create(name="invoices")
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {"document": f, "storage_path": sp.id},
 | 
			
		||||
@@ -1260,10 +1226,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
            id=str(uuid.uuid4()),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {"document": f, "storage_path": 34578},
 | 
			
		||||
@@ -1279,10 +1242,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
 | 
			
		||||
        t1 = Tag.objects.create(name="tag1")
 | 
			
		||||
        t2 = Tag.objects.create(name="tag2")
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {"document": f, "tags": [t2.id, t1.id]},
 | 
			
		||||
@@ -1305,10 +1265,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
 | 
			
		||||
        t1 = Tag.objects.create(name="tag1")
 | 
			
		||||
        t2 = Tag.objects.create(name="tag2")
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {"document": f, "tags": [t2.id, t1.id, 734563]},
 | 
			
		||||
@@ -1332,10 +1289,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
            0,
 | 
			
		||||
            tzinfo=zoneinfo.ZoneInfo("America/Los_Angeles"),
 | 
			
		||||
        )
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {"document": f, "created": created},
 | 
			
		||||
@@ -1353,10 +1307,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
            id=str(uuid.uuid4()),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {"document": f, "archive_serial_number": 500},
 | 
			
		||||
@@ -1385,10 +1336,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
            data_type=CustomField.FieldDataType.STRING,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "simple.pdf").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {
 | 
			
		||||
@@ -1417,10 +1365,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
            id=str(uuid.uuid4()),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        with open(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "invalid_pdf.pdf"),
 | 
			
		||||
            "rb",
 | 
			
		||||
        ) as f:
 | 
			
		||||
        with (Path(__file__).parent / "samples" / "invalid_pdf.pdf").open("rb") as f:
 | 
			
		||||
            response = self.client.post(
 | 
			
		||||
                "/api/documents/post_document/",
 | 
			
		||||
                {"document": f},
 | 
			
		||||
@@ -1437,14 +1382,14 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
            archive_filename="archive.pdf",
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        source_file = os.path.join(
 | 
			
		||||
            os.path.dirname(__file__),
 | 
			
		||||
            "samples",
 | 
			
		||||
            "documents",
 | 
			
		||||
            "thumbnails",
 | 
			
		||||
            "0000001.webp",
 | 
			
		||||
        source_file: Path = (
 | 
			
		||||
            Path(__file__).parent
 | 
			
		||||
            / "samples"
 | 
			
		||||
            / "documents"
 | 
			
		||||
            / "thumbnails"
 | 
			
		||||
            / "0000001.webp"
 | 
			
		||||
        )
 | 
			
		||||
        archive_file = os.path.join(os.path.dirname(__file__), "samples", "simple.pdf")
 | 
			
		||||
        archive_file: Path = Path(__file__).parent / "samples" / "simple.pdf"
 | 
			
		||||
 | 
			
		||||
        shutil.copy(source_file, doc.source_path)
 | 
			
		||||
        shutil.copy(archive_file, doc.archive_path)
 | 
			
		||||
@@ -1460,8 +1405,8 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
        self.assertGreater(len(meta["archive_metadata"]), 0)
 | 
			
		||||
        self.assertEqual(meta["media_filename"], "file.pdf")
 | 
			
		||||
        self.assertEqual(meta["archive_media_filename"], "archive.pdf")
 | 
			
		||||
        self.assertEqual(meta["original_size"], os.stat(source_file).st_size)
 | 
			
		||||
        self.assertEqual(meta["archive_size"], os.stat(archive_file).st_size)
 | 
			
		||||
        self.assertEqual(meta["original_size"], Path(source_file).stat().st_size)
 | 
			
		||||
        self.assertEqual(meta["archive_size"], Path(archive_file).stat().st_size)
 | 
			
		||||
 | 
			
		||||
        response = self.client.get(f"/api/documents/{doc.pk}/metadata/")
 | 
			
		||||
        self.assertEqual(response.status_code, status.HTTP_200_OK)
 | 
			
		||||
@@ -1477,10 +1422,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
            mime_type="application/pdf",
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        shutil.copy(
 | 
			
		||||
            os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
 | 
			
		||||
            doc.source_path,
 | 
			
		||||
        )
 | 
			
		||||
        shutil.copy(Path(__file__).parent / "samples" / "simple.pdf", doc.source_path)
 | 
			
		||||
 | 
			
		||||
        response = self.client.get(f"/api/documents/{doc.pk}/metadata/")
 | 
			
		||||
        self.assertEqual(response.status_code, status.HTTP_200_OK)
 | 
			
		||||
@@ -1939,9 +1881,9 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
 | 
			
		||||
    def test_get_logs(self):
 | 
			
		||||
        log_data = "test\ntest2\n"
 | 
			
		||||
        with open(os.path.join(settings.LOGGING_DIR, "mail.log"), "w") as f:
 | 
			
		||||
        with (Path(settings.LOGGING_DIR) / "mail.log").open("w") as f:
 | 
			
		||||
            f.write(log_data)
 | 
			
		||||
        with open(os.path.join(settings.LOGGING_DIR, "paperless.log"), "w") as f:
 | 
			
		||||
        with (Path(settings.LOGGING_DIR) / "paperless.log").open("w") as f:
 | 
			
		||||
            f.write(log_data)
 | 
			
		||||
        response = self.client.get("/api/logs/")
 | 
			
		||||
        self.assertEqual(response.status_code, status.HTTP_200_OK)
 | 
			
		||||
@@ -1949,7 +1891,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
 | 
			
		||||
    def test_get_logs_only_when_exist(self):
 | 
			
		||||
        log_data = "test\ntest2\n"
 | 
			
		||||
        with open(os.path.join(settings.LOGGING_DIR, "paperless.log"), "w") as f:
 | 
			
		||||
        with (Path(settings.LOGGING_DIR) / "paperless.log").open("w") as f:
 | 
			
		||||
            f.write(log_data)
 | 
			
		||||
        response = self.client.get("/api/logs/")
 | 
			
		||||
        self.assertEqual(response.status_code, status.HTTP_200_OK)
 | 
			
		||||
@@ -1966,7 +1908,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
 | 
			
		||||
 | 
			
		||||
    def test_get_log(self):
 | 
			
		||||
        log_data = "test\ntest2\n"
 | 
			
		||||
        with open(os.path.join(settings.LOGGING_DIR, "paperless.log"), "w") as f:
 | 
			
		||||
        with (Path(settings.LOGGING_DIR) / "paperless.log").open("w") as f:
 | 
			
		||||
            f.write(log_data)
 | 
			
		||||
        response = self.client.get("/api/logs/paperless/")
 | 
			
		||||
        self.assertEqual(response.status_code, status.HTTP_200_OK)
 | 
			
		||||
 
 | 
			
		||||
@@ -3,6 +3,7 @@ import json
 | 
			
		||||
from unittest import mock
 | 
			
		||||
 | 
			
		||||
from allauth.mfa.models import Authenticator
 | 
			
		||||
from allauth.mfa.totp.internal import auth as totp_auth
 | 
			
		||||
from django.contrib.auth.models import Group
 | 
			
		||||
from django.contrib.auth.models import Permission
 | 
			
		||||
from django.contrib.auth.models import User
 | 
			
		||||
@@ -488,6 +489,71 @@ class TestApiAuth(DirectoriesMixin, APITestCase):
 | 
			
		||||
        self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
 | 
			
		||||
        self.assertEqual(response.data["detail"], "MFA required")
 | 
			
		||||
 | 
			
		||||
    @mock.patch("allauth.mfa.totp.internal.auth.TOTP.validate_code")
 | 
			
		||||
    def test_get_token_mfa_enabled(self, mock_validate_code):
 | 
			
		||||
        """
 | 
			
		||||
        GIVEN:
 | 
			
		||||
            - User with MFA enabled
 | 
			
		||||
        WHEN:
 | 
			
		||||
            - API request is made to obtain an auth token
 | 
			
		||||
        THEN:
 | 
			
		||||
            - MFA code is required
 | 
			
		||||
        """
 | 
			
		||||
        user1 = User.objects.create_user(username="user1")
 | 
			
		||||
        user1.set_password("password")
 | 
			
		||||
        user1.save()
 | 
			
		||||
 | 
			
		||||
        response = self.client.post(
 | 
			
		||||
            "/api/token/",
 | 
			
		||||
            data={
 | 
			
		||||
                "username": "user1",
 | 
			
		||||
                "password": "password",
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
        self.assertEqual(response.status_code, status.HTTP_200_OK)
 | 
			
		||||
 | 
			
		||||
        secret = totp_auth.generate_totp_secret()
 | 
			
		||||
        totp_auth.TOTP.activate(
 | 
			
		||||
            user1,
 | 
			
		||||
            secret,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # no code
 | 
			
		||||
        response = self.client.post(
 | 
			
		||||
            "/api/token/",
 | 
			
		||||
            data={
 | 
			
		||||
                "username": "user1",
 | 
			
		||||
                "password": "password",
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 | 
			
		||||
        self.assertEqual(response.data["non_field_errors"][0], "MFA code is required")
 | 
			
		||||
 | 
			
		||||
        # invalid code
 | 
			
		||||
        mock_validate_code.return_value = False
 | 
			
		||||
        response = self.client.post(
 | 
			
		||||
            "/api/token/",
 | 
			
		||||
            data={
 | 
			
		||||
                "username": "user1",
 | 
			
		||||
                "password": "password",
 | 
			
		||||
                "code": "123456",
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
 | 
			
		||||
        self.assertEqual(response.data["non_field_errors"][0], "Invalid MFA code")
 | 
			
		||||
 | 
			
		||||
        # valid code
 | 
			
		||||
        mock_validate_code.return_value = True
 | 
			
		||||
        response = self.client.post(
 | 
			
		||||
            "/api/token/",
 | 
			
		||||
            data={
 | 
			
		||||
                "username": "user1",
 | 
			
		||||
                "password": "password",
 | 
			
		||||
                "code": "123456",
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
        self.assertEqual(response.status_code, status.HTTP_200_OK)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TestApiUser(DirectoriesMixin, APITestCase):
 | 
			
		||||
    ENDPOINT = "/api/users/"
 | 
			
		||||
 
 | 
			
		||||
@@ -1,11 +1,14 @@
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from allauth.mfa.adapter import get_adapter as get_mfa_adapter
 | 
			
		||||
from allauth.mfa.models import Authenticator
 | 
			
		||||
from allauth.mfa.totp.internal.auth import TOTP
 | 
			
		||||
from allauth.socialaccount.models import SocialAccount
 | 
			
		||||
from django.contrib.auth.models import Group
 | 
			
		||||
from django.contrib.auth.models import Permission
 | 
			
		||||
from django.contrib.auth.models import User
 | 
			
		||||
from rest_framework import serializers
 | 
			
		||||
from rest_framework.authtoken.serializers import AuthTokenSerializer
 | 
			
		||||
 | 
			
		||||
from paperless.models import ApplicationConfiguration
 | 
			
		||||
 | 
			
		||||
@@ -24,6 +27,36 @@ class ObfuscatedUserPasswordField(serializers.Field):
 | 
			
		||||
        return data
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class PaperlessAuthTokenSerializer(AuthTokenSerializer):
 | 
			
		||||
    code = serializers.CharField(
 | 
			
		||||
        label="MFA Code",
 | 
			
		||||
        write_only=True,
 | 
			
		||||
        required=False,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    def validate(self, attrs):
 | 
			
		||||
        attrs = super().validate(attrs)
 | 
			
		||||
        user = attrs.get("user")
 | 
			
		||||
        code = attrs.get("code")
 | 
			
		||||
        mfa_adapter = get_mfa_adapter()
 | 
			
		||||
        if mfa_adapter.is_mfa_enabled(user):
 | 
			
		||||
            if not code:
 | 
			
		||||
                raise serializers.ValidationError(
 | 
			
		||||
                    "MFA code is required",
 | 
			
		||||
                )
 | 
			
		||||
            authenticator = Authenticator.objects.get(
 | 
			
		||||
                user=user,
 | 
			
		||||
                type=Authenticator.Type.TOTP,
 | 
			
		||||
            )
 | 
			
		||||
            if not TOTP(instance=authenticator).validate_code(
 | 
			
		||||
                code,
 | 
			
		||||
            ):
 | 
			
		||||
                raise serializers.ValidationError(
 | 
			
		||||
                    "Invalid MFA code",
 | 
			
		||||
                )
 | 
			
		||||
        return attrs
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class UserSerializer(serializers.ModelSerializer):
 | 
			
		||||
    password = ObfuscatedUserPasswordField(required=False)
 | 
			
		||||
    user_permissions = serializers.SlugRelatedField(
 | 
			
		||||
 
 | 
			
		||||
@@ -14,7 +14,6 @@ from django.utils.translation import gettext_lazy as _
 | 
			
		||||
from django.views.decorators.csrf import ensure_csrf_cookie
 | 
			
		||||
from django.views.generic import RedirectView
 | 
			
		||||
from django.views.static import serve
 | 
			
		||||
from rest_framework.authtoken import views
 | 
			
		||||
from rest_framework.routers import DefaultRouter
 | 
			
		||||
 | 
			
		||||
from documents.views import BulkDownloadView
 | 
			
		||||
@@ -50,6 +49,7 @@ from paperless.views import DisconnectSocialAccountView
 | 
			
		||||
from paperless.views import FaviconView
 | 
			
		||||
from paperless.views import GenerateAuthTokenView
 | 
			
		||||
from paperless.views import GroupViewSet
 | 
			
		||||
from paperless.views import PaperlessObtainAuthTokenView
 | 
			
		||||
from paperless.views import ProfileView
 | 
			
		||||
from paperless.views import SocialAccountProvidersView
 | 
			
		||||
from paperless.views import TOTPView
 | 
			
		||||
@@ -157,7 +157,7 @@ urlpatterns = [
 | 
			
		||||
                ),
 | 
			
		||||
                path(
 | 
			
		||||
                    "token/",
 | 
			
		||||
                    views.obtain_auth_token,
 | 
			
		||||
                    PaperlessObtainAuthTokenView.as_view(),
 | 
			
		||||
                ),
 | 
			
		||||
                re_path(
 | 
			
		||||
                    "^profile/",
 | 
			
		||||
 
 | 
			
		||||
@@ -19,6 +19,7 @@ from django.http import HttpResponseNotFound
 | 
			
		||||
from django.views.generic import View
 | 
			
		||||
from django_filters.rest_framework import DjangoFilterBackend
 | 
			
		||||
from rest_framework.authtoken.models import Token
 | 
			
		||||
from rest_framework.authtoken.views import ObtainAuthToken
 | 
			
		||||
from rest_framework.decorators import action
 | 
			
		||||
from rest_framework.filters import OrderingFilter
 | 
			
		||||
from rest_framework.generics import GenericAPIView
 | 
			
		||||
@@ -35,10 +36,15 @@ from paperless.filters import UserFilterSet
 | 
			
		||||
from paperless.models import ApplicationConfiguration
 | 
			
		||||
from paperless.serialisers import ApplicationConfigurationSerializer
 | 
			
		||||
from paperless.serialisers import GroupSerializer
 | 
			
		||||
from paperless.serialisers import PaperlessAuthTokenSerializer
 | 
			
		||||
from paperless.serialisers import ProfileSerializer
 | 
			
		||||
from paperless.serialisers import UserSerializer
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class PaperlessObtainAuthTokenView(ObtainAuthToken):
 | 
			
		||||
    serializer_class = PaperlessAuthTokenSerializer
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class StandardPagination(PageNumberPagination):
 | 
			
		||||
    page_size = 25
 | 
			
		||||
    page_size_query_param = "page_size"
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user