feat(#240): backend for Mission Control Strip — queue endpoints + expert flag

Adds the server-side foundation for the dashboard transcription widget:

- V36 migration: needs_expert BOOLEAN NOT NULL DEFAULT FALSE on documents
- Document entity: needsExpert field (@Schema required)
- DocumentRepository: 4 native queries — segmentation queue, transcription
  queue, ready-to-read queue (seeded weekly shuffle sort), weekly pulse stats
- TranscriptionQueueService: maps Object[] rows to typed DTOs, handles
  PostgreSQL type variations (UUID/String, Date/LocalDate, Number/BigDecimal)
- TranscriptionQueueController: GET /api/transcription/{segmentation-queue,
  transcription-queue, ready-to-read, weekly-stats} — all guarded by READ_ALL
- DocumentService + DocumentController: PATCH /api/documents/{id}/needs-expert
  toggles the expert flag (WRITE_ALL required)

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Marcel
2026-04-15 22:59:17 +02:00
parent 4b8da0024f
commit 1177de2f95
9 changed files with 277 additions and 0 deletions

View File

@@ -212,6 +212,14 @@ public class DocumentController {
return ResponseEntity.ok(DocumentSearchResult.of(results));
}
// --- EXPERT FLAG ---
@PatchMapping("/{id}/needs-expert")
@RequirePermission(Permission.WRITE_ALL)
public Document toggleNeedsExpert(@PathVariable UUID id) {
return documentService.toggleNeedsExpert(id);
}
// --- TRAINING LABELS ---
public record TrainingLabelRequest(String label, boolean enrolled) {}

View File

@@ -0,0 +1,47 @@
package org.raddatz.familienarchiv.controller;
import lombok.RequiredArgsConstructor;
import org.raddatz.familienarchiv.dto.TranscriptionQueueItemDTO;
import org.raddatz.familienarchiv.dto.TranscriptionWeeklyStatsDTO;
import org.raddatz.familienarchiv.security.Permission;
import org.raddatz.familienarchiv.security.RequirePermission;
import org.raddatz.familienarchiv.service.TranscriptionQueueService;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* Serves the three Mission Control Strip columns for the dashboard.
* All endpoints require READ_ALL — same guard as the rest of the archive.
*/
@RestController
@RequestMapping("/api/transcription")
@RequiredArgsConstructor
@RequirePermission(Permission.READ_ALL)
public class TranscriptionQueueController {
private final TranscriptionQueueService transcriptionQueueService;
@GetMapping("/segmentation-queue")
public ResponseEntity<List<TranscriptionQueueItemDTO>> getSegmentationQueue() {
return ResponseEntity.ok(transcriptionQueueService.getSegmentationQueue());
}
@GetMapping("/transcription-queue")
public ResponseEntity<List<TranscriptionQueueItemDTO>> getTranscriptionQueue() {
return ResponseEntity.ok(transcriptionQueueService.getTranscriptionQueue());
}
@GetMapping("/ready-to-read")
public ResponseEntity<List<TranscriptionQueueItemDTO>> getReadyToRead() {
return ResponseEntity.ok(transcriptionQueueService.getReadyToReadQueue());
}
@GetMapping("/weekly-stats")
public ResponseEntity<TranscriptionWeeklyStatsDTO> getWeeklyStats() {
return ResponseEntity.ok(transcriptionQueueService.getWeeklyStats());
}
}

View File

@@ -0,0 +1,19 @@
package org.raddatz.familienarchiv.dto;
import java.time.LocalDate;
import java.util.UUID;
/**
* A single row in one of the three Mission Control Strip queues.
* Annotation/block counts drive the per-document mini progress bar
* in the Transkription column and the percentage label in Lesefertig.
*/
public record TranscriptionQueueItemDTO(
UUID id,
String title,
LocalDate documentDate,
boolean needsExpert,
int annotationCount,
int textedBlockCount,
int reviewedBlockCount
) {}

View File

@@ -0,0 +1,12 @@
package org.raddatz.familienarchiv.dto;
/**
* Weekly activity pulse for the Mission Control Strip column headers.
* Counts documents that received new work in each pipeline stage
* during the last 7 days.
*/
public record TranscriptionWeeklyStatsDTO(
long segmentationCount,
long transcriptionCount,
long readyCount
) {}

View File

@@ -97,6 +97,11 @@ public class Document {
@Builder.Default
private ScriptType scriptType = ScriptType.UNKNOWN;
@Column(name = "needs_expert", nullable = false)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
@Builder.Default
private boolean needsExpert = false;
@ManyToMany(fetch = FetchType.EAGER)
@JoinTable(name = "document_receivers", joinColumns = @JoinColumn(name = "document_id"), inverseJoinColumns = @JoinColumn(name = "person_id"))
@Builder.Default

View File

@@ -89,4 +89,82 @@ public interface DocumentRepository extends JpaRepository<Document, UUID>, JpaSp
""")
List<UUID> findRankedIdsByFts(@Param("query") String query);
// --- Mission Control Strip queues ---
/** Documents with no annotations — Segmentierung column. */
@Query(nativeQuery = true, value = """
SELECT d.id, d.title, d.meta_date AS documentDate, d.needs_expert AS needsExpert,
0 AS annotationCount, 0 AS textedBlockCount, 0 AS reviewedBlockCount
FROM documents d
WHERE d.status NOT IN ('PLACEHOLDER')
AND NOT EXISTS (SELECT 1 FROM document_annotations da WHERE da.document_id = d.id)
ORDER BY d.needs_expert ASC,
HASHTEXT(d.id::text || EXTRACT(WEEK FROM NOW())::int::text)
LIMIT :limit
""")
List<Object[]> findSegmentationQueue(@Param("limit") int limit);
/** Documents with annotations but not yet fully reviewed — Transkription column. */
@Query(nativeQuery = true, value = """
SELECT d.id, d.title, d.meta_date AS documentDate, d.needs_expert AS needsExpert,
COUNT(DISTINCT da.id) AS annotationCount,
COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) AS textedBlockCount,
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END) AS reviewedBlockCount
FROM documents d
JOIN document_annotations da ON da.document_id = d.id
LEFT JOIN transcription_blocks tb ON tb.document_id = d.id
GROUP BY d.id, d.title, d.meta_date, d.needs_expert
HAVING COUNT(DISTINCT da.id) > 0
AND (
COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) = 0
OR (
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END)::float /
NULLIF(COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END), 0)
) < 0.90
)
ORDER BY d.needs_expert ASC,
COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) DESC,
HASHTEXT(d.id::text || EXTRACT(WEEK FROM NOW())::int::text)
LIMIT :limit
""")
List<Object[]> findTranscriptionQueue(@Param("limit") int limit);
/** Documents with reviewed_pct >= 90 % — Lesefertig column. */
@Query(nativeQuery = true, value = """
SELECT d.id, d.title, d.meta_date AS documentDate, d.needs_expert AS needsExpert,
COUNT(DISTINCT da.id) AS annotationCount,
COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) AS textedBlockCount,
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END) AS reviewedBlockCount
FROM documents d
JOIN document_annotations da ON da.document_id = d.id
LEFT JOIN transcription_blocks tb ON tb.document_id = d.id
GROUP BY d.id, d.title, d.meta_date, d.needs_expert
HAVING COUNT(DISTINCT da.id) > 0
AND COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) > 0
AND (
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END)::float /
COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END)
) >= 0.90
ORDER BY (
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END)::float /
COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END)
) DESC
LIMIT :limit
""")
List<Object[]> findReadyToReadQueue(@Param("limit") int limit);
/** Weekly pulse: distinct documents that received new work in each pipeline stage. */
@Query(nativeQuery = true, value = """
SELECT
(SELECT COUNT(DISTINCT da.document_id) FROM document_annotations da
WHERE da.created_at >= NOW() - INTERVAL '7 days') AS segmentationCount,
(SELECT COUNT(DISTINCT tb.document_id) FROM transcription_blocks tb
WHERE tb.created_at >= NOW() - INTERVAL '7 days'
AND tb.text IS NOT NULL AND tb.text <> '') AS transcriptionCount,
(SELECT COUNT(DISTINCT tb.document_id) FROM transcription_blocks tb
WHERE tb.updated_at >= NOW() - INTERVAL '7 days'
AND tb.reviewed = true) AS readyCount
""")
Object[] findWeeklyStats();
}

View File

@@ -570,6 +570,13 @@ public class DocumentService {
return parsed != null ? parsed.title() : stripExtension(filename);
}
@Transactional
public Document toggleNeedsExpert(UUID documentId) {
Document doc = getDocumentById(documentId);
doc.setNeedsExpert(!doc.isNeedsExpert());
return documentRepository.save(doc);
}
private static String tryParseDate(String s) {
if (s.matches("\\d{4}-\\d{2}-\\d{2}")) {
int m = Integer.parseInt(s.substring(5, 7));

View File

@@ -0,0 +1,100 @@
package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor;
import org.raddatz.familienarchiv.dto.TranscriptionQueueItemDTO;
import org.raddatz.familienarchiv.dto.TranscriptionWeeklyStatsDTO;
import org.raddatz.familienarchiv.repository.DocumentRepository;
import org.springframework.stereotype.Service;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.util.List;
import java.util.UUID;
/**
* Serves the three Mission Control Strip queues (Segmentierung / Transkription / Lesefertig)
* and the weekly activity pulse used by the column headers.
*/
@Service
@RequiredArgsConstructor
public class TranscriptionQueueService {
private static final int DEFAULT_QUEUE_SIZE = 5;
private final DocumentRepository documentRepository;
public List<TranscriptionQueueItemDTO> getSegmentationQueue() {
return documentRepository.findSegmentationQueue(DEFAULT_QUEUE_SIZE)
.stream()
.map(this::mapRow)
.toList();
}
public List<TranscriptionQueueItemDTO> getTranscriptionQueue() {
return documentRepository.findTranscriptionQueue(DEFAULT_QUEUE_SIZE)
.stream()
.map(this::mapRow)
.toList();
}
public List<TranscriptionQueueItemDTO> getReadyToReadQueue() {
return documentRepository.findReadyToReadQueue(DEFAULT_QUEUE_SIZE)
.stream()
.map(this::mapRow)
.toList();
}
public TranscriptionWeeklyStatsDTO getWeeklyStats() {
Object[] row = documentRepository.findWeeklyStats();
return new TranscriptionWeeklyStatsDTO(
toLong(row[0]),
toLong(row[1]),
toLong(row[2])
);
}
// --- mapping helpers ---
private TranscriptionQueueItemDTO mapRow(Object[] row) {
UUID id = toUUID(row[0]);
String title = (String) row[1];
LocalDate documentDate = toLocalDate(row[2]);
boolean needsExpert = toBoolean(row[3]);
int annotationCount = toInt(row[4]);
int textedBlockCount = toInt(row[5]);
int reviewedBlockCount = toInt(row[6]);
return new TranscriptionQueueItemDTO(id, title, documentDate, needsExpert,
annotationCount, textedBlockCount, reviewedBlockCount);
}
private UUID toUUID(Object o) {
if (o instanceof UUID u) return u;
return UUID.fromString(o.toString());
}
private LocalDate toLocalDate(Object o) {
if (o == null) return null;
if (o instanceof LocalDate d) return d;
if (o instanceof java.sql.Date d) return d.toLocalDate();
return LocalDate.parse(o.toString());
}
private boolean toBoolean(Object o) {
if (o instanceof Boolean b) return b;
return Boolean.parseBoolean(o.toString());
}
private int toInt(Object o) {
if (o == null) return 0;
if (o instanceof Number n) return n.intValue();
if (o instanceof BigDecimal bd) return bd.intValue();
return Integer.parseInt(o.toString());
}
private long toLong(Object o) {
if (o == null) return 0L;
if (o instanceof Number n) return n.longValue();
if (o instanceof BigDecimal bd) return bd.longValue();
return Long.parseLong(o.toString());
}
}

View File

@@ -0,0 +1 @@
ALTER TABLE documents ADD COLUMN needs_expert BOOLEAN NOT NULL DEFAULT FALSE;