diff --git a/backend/src/main/java/org/raddatz/familienarchiv/controller/DocumentController.java b/backend/src/main/java/org/raddatz/familienarchiv/controller/DocumentController.java index 91e3c250..4e6b9c37 100644 --- a/backend/src/main/java/org/raddatz/familienarchiv/controller/DocumentController.java +++ b/backend/src/main/java/org/raddatz/familienarchiv/controller/DocumentController.java @@ -211,6 +211,14 @@ public class DocumentController { return ResponseEntity.ok(documentService.searchDocuments(q, from, to, senderId, receiverId, tags, tagQ, status, sort, dir)); } + // --- EXPERT FLAG --- + + @PatchMapping("/{id}/needs-expert") + @RequirePermission(Permission.WRITE_ALL) + public Document toggleNeedsExpert(@PathVariable UUID id) { + return documentService.toggleNeedsExpert(id); + } + // --- TRAINING LABELS --- public record TrainingLabelRequest(String label, boolean enrolled) {} diff --git a/backend/src/main/java/org/raddatz/familienarchiv/controller/TranscriptionQueueController.java b/backend/src/main/java/org/raddatz/familienarchiv/controller/TranscriptionQueueController.java new file mode 100644 index 00000000..59591795 --- /dev/null +++ b/backend/src/main/java/org/raddatz/familienarchiv/controller/TranscriptionQueueController.java @@ -0,0 +1,47 @@ +package org.raddatz.familienarchiv.controller; + +import lombok.RequiredArgsConstructor; +import org.raddatz.familienarchiv.dto.TranscriptionQueueItemDTO; +import org.raddatz.familienarchiv.dto.TranscriptionWeeklyStatsDTO; +import org.raddatz.familienarchiv.security.Permission; +import org.raddatz.familienarchiv.security.RequirePermission; +import org.raddatz.familienarchiv.service.TranscriptionQueueService; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import java.util.List; + +/** + * Serves the three Mission Control Strip columns for the dashboard. + * All endpoints require READ_ALL — same guard as the rest of the archive. + */ +@RestController +@RequestMapping("/api/transcription") +@RequiredArgsConstructor +@RequirePermission(Permission.READ_ALL) +public class TranscriptionQueueController { + + private final TranscriptionQueueService transcriptionQueueService; + + @GetMapping("/segmentation-queue") + public ResponseEntity> getSegmentationQueue() { + return ResponseEntity.ok(transcriptionQueueService.getSegmentationQueue()); + } + + @GetMapping("/transcription-queue") + public ResponseEntity> getTranscriptionQueue() { + return ResponseEntity.ok(transcriptionQueueService.getTranscriptionQueue()); + } + + @GetMapping("/ready-to-read") + public ResponseEntity> getReadyToRead() { + return ResponseEntity.ok(transcriptionQueueService.getReadyToReadQueue()); + } + + @GetMapping("/weekly-stats") + public ResponseEntity getWeeklyStats() { + return ResponseEntity.ok(transcriptionQueueService.getWeeklyStats()); + } +} diff --git a/backend/src/main/java/org/raddatz/familienarchiv/dto/TranscriptionQueueItemDTO.java b/backend/src/main/java/org/raddatz/familienarchiv/dto/TranscriptionQueueItemDTO.java new file mode 100644 index 00000000..ab441cd9 --- /dev/null +++ b/backend/src/main/java/org/raddatz/familienarchiv/dto/TranscriptionQueueItemDTO.java @@ -0,0 +1,19 @@ +package org.raddatz.familienarchiv.dto; + +import java.time.LocalDate; +import java.util.UUID; + +/** + * A single row in one of the three Mission Control Strip queues. + * Annotation/block counts drive the per-document mini progress bar + * in the Transkription column and the percentage label in Lesefertig. + */ +public record TranscriptionQueueItemDTO( + UUID id, + String title, + LocalDate documentDate, + boolean needsExpert, + int annotationCount, + int textedBlockCount, + int reviewedBlockCount +) {} diff --git a/backend/src/main/java/org/raddatz/familienarchiv/dto/TranscriptionWeeklyStatsDTO.java b/backend/src/main/java/org/raddatz/familienarchiv/dto/TranscriptionWeeklyStatsDTO.java new file mode 100644 index 00000000..71e9fc71 --- /dev/null +++ b/backend/src/main/java/org/raddatz/familienarchiv/dto/TranscriptionWeeklyStatsDTO.java @@ -0,0 +1,12 @@ +package org.raddatz.familienarchiv.dto; + +/** + * Weekly activity pulse for the Mission Control Strip column headers. + * Counts documents that received new work in each pipeline stage + * during the last 7 days. + */ +public record TranscriptionWeeklyStatsDTO( + long segmentationCount, + long transcriptionCount, + long readyCount +) {} diff --git a/backend/src/main/java/org/raddatz/familienarchiv/model/Document.java b/backend/src/main/java/org/raddatz/familienarchiv/model/Document.java index 5a5ca54a..0e7e954a 100644 --- a/backend/src/main/java/org/raddatz/familienarchiv/model/Document.java +++ b/backend/src/main/java/org/raddatz/familienarchiv/model/Document.java @@ -97,6 +97,11 @@ public class Document { @Builder.Default private ScriptType scriptType = ScriptType.UNKNOWN; + @Column(name = "needs_expert", nullable = false) + @Schema(requiredMode = Schema.RequiredMode.REQUIRED) + @Builder.Default + private boolean needsExpert = false; + @ManyToMany(fetch = FetchType.EAGER) @JoinTable(name = "document_receivers", joinColumns = @JoinColumn(name = "document_id"), inverseJoinColumns = @JoinColumn(name = "person_id")) @Builder.Default diff --git a/backend/src/main/java/org/raddatz/familienarchiv/repository/DocumentRepository.java b/backend/src/main/java/org/raddatz/familienarchiv/repository/DocumentRepository.java index 022a2ebb..6e20c064 100644 --- a/backend/src/main/java/org/raddatz/familienarchiv/repository/DocumentRepository.java +++ b/backend/src/main/java/org/raddatz/familienarchiv/repository/DocumentRepository.java @@ -167,4 +167,82 @@ public interface DocumentRepository extends JpaRepository, JpaSp """) List findEnrichmentData(@Param("ids") Collection ids, @Param("query") String query); + // --- Mission Control Strip queues --- + + /** Documents with no annotations — Segmentierung column. */ + @Query(nativeQuery = true, value = """ + SELECT d.id, d.title, d.meta_date AS documentDate, d.needs_expert AS needsExpert, + 0 AS annotationCount, 0 AS textedBlockCount, 0 AS reviewedBlockCount + FROM documents d + WHERE d.status NOT IN ('PLACEHOLDER') + AND NOT EXISTS (SELECT 1 FROM document_annotations da WHERE da.document_id = d.id) + ORDER BY d.needs_expert ASC, + HASHTEXT(d.id::text || EXTRACT(WEEK FROM NOW())::int::text) + LIMIT :limit + """) + List findSegmentationQueue(@Param("limit") int limit); + + /** Documents with annotations but not yet fully reviewed — Transkription column. */ + @Query(nativeQuery = true, value = """ + SELECT d.id, d.title, d.meta_date AS documentDate, d.needs_expert AS needsExpert, + COUNT(DISTINCT da.id) AS annotationCount, + COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) AS textedBlockCount, + COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END) AS reviewedBlockCount + FROM documents d + JOIN document_annotations da ON da.document_id = d.id + LEFT JOIN transcription_blocks tb ON tb.document_id = d.id + GROUP BY d.id, d.title, d.meta_date, d.needs_expert + HAVING COUNT(DISTINCT da.id) > 0 + AND ( + COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) = 0 + OR ( + COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END)::float / + NULLIF(COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END), 0) + ) < 0.90 + ) + ORDER BY d.needs_expert ASC, + COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) DESC, + HASHTEXT(d.id::text || EXTRACT(WEEK FROM NOW())::int::text) + LIMIT :limit + """) + List findTranscriptionQueue(@Param("limit") int limit); + + /** Documents with reviewed_pct >= 90 % — Lesefertig column. */ + @Query(nativeQuery = true, value = """ + SELECT d.id, d.title, d.meta_date AS documentDate, d.needs_expert AS needsExpert, + COUNT(DISTINCT da.id) AS annotationCount, + COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) AS textedBlockCount, + COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END) AS reviewedBlockCount + FROM documents d + JOIN document_annotations da ON da.document_id = d.id + LEFT JOIN transcription_blocks tb ON tb.document_id = d.id + GROUP BY d.id, d.title, d.meta_date, d.needs_expert + HAVING COUNT(DISTINCT da.id) > 0 + AND COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) > 0 + AND ( + COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END)::float / + COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) + ) >= 0.90 + ORDER BY ( + COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END)::float / + COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) + ) DESC + LIMIT :limit + """) + List findReadyToReadQueue(@Param("limit") int limit); + + /** Weekly pulse: distinct documents that received new work in each pipeline stage. */ + @Query(nativeQuery = true, value = """ + SELECT + (SELECT COUNT(DISTINCT da.document_id) FROM document_annotations da + WHERE da.created_at >= NOW() - INTERVAL '7 days') AS segmentationCount, + (SELECT COUNT(DISTINCT tb.document_id) FROM transcription_blocks tb + WHERE tb.created_at >= NOW() - INTERVAL '7 days' + AND tb.text IS NOT NULL AND tb.text <> '') AS transcriptionCount, + (SELECT COUNT(DISTINCT tb.document_id) FROM transcription_blocks tb + WHERE tb.updated_at >= NOW() - INTERVAL '7 days' + AND tb.reviewed = true) AS readyCount + """) + Object[] findWeeklyStats(); + } \ No newline at end of file diff --git a/backend/src/main/java/org/raddatz/familienarchiv/service/DocumentService.java b/backend/src/main/java/org/raddatz/familienarchiv/service/DocumentService.java index ab50ab22..272da789 100644 --- a/backend/src/main/java/org/raddatz/familienarchiv/service/DocumentService.java +++ b/backend/src/main/java/org/raddatz/familienarchiv/service/DocumentService.java @@ -577,6 +577,13 @@ public class DocumentService { return parsed != null ? parsed.title() : stripExtension(filename); } + @Transactional + public Document toggleNeedsExpert(UUID documentId) { + Document doc = getDocumentById(documentId); + doc.setNeedsExpert(!doc.isNeedsExpert()); + return documentRepository.save(doc); + } + private static String tryParseDate(String s) { if (s.matches("\\d{4}-\\d{2}-\\d{2}")) { int m = Integer.parseInt(s.substring(5, 7)); diff --git a/backend/src/main/java/org/raddatz/familienarchiv/service/TranscriptionQueueService.java b/backend/src/main/java/org/raddatz/familienarchiv/service/TranscriptionQueueService.java new file mode 100644 index 00000000..1b4a0fde --- /dev/null +++ b/backend/src/main/java/org/raddatz/familienarchiv/service/TranscriptionQueueService.java @@ -0,0 +1,100 @@ +package org.raddatz.familienarchiv.service; + +import lombok.RequiredArgsConstructor; +import org.raddatz.familienarchiv.dto.TranscriptionQueueItemDTO; +import org.raddatz.familienarchiv.dto.TranscriptionWeeklyStatsDTO; +import org.raddatz.familienarchiv.repository.DocumentRepository; +import org.springframework.stereotype.Service; + +import java.math.BigDecimal; +import java.time.LocalDate; +import java.util.List; +import java.util.UUID; + +/** + * Serves the three Mission Control Strip queues (Segmentierung / Transkription / Lesefertig) + * and the weekly activity pulse used by the column headers. + */ +@Service +@RequiredArgsConstructor +public class TranscriptionQueueService { + + private static final int DEFAULT_QUEUE_SIZE = 5; + + private final DocumentRepository documentRepository; + + public List getSegmentationQueue() { + return documentRepository.findSegmentationQueue(DEFAULT_QUEUE_SIZE) + .stream() + .map(this::mapRow) + .toList(); + } + + public List getTranscriptionQueue() { + return documentRepository.findTranscriptionQueue(DEFAULT_QUEUE_SIZE) + .stream() + .map(this::mapRow) + .toList(); + } + + public List getReadyToReadQueue() { + return documentRepository.findReadyToReadQueue(DEFAULT_QUEUE_SIZE) + .stream() + .map(this::mapRow) + .toList(); + } + + public TranscriptionWeeklyStatsDTO getWeeklyStats() { + Object[] row = documentRepository.findWeeklyStats(); + return new TranscriptionWeeklyStatsDTO( + toLong(row[0]), + toLong(row[1]), + toLong(row[2]) + ); + } + + // --- mapping helpers --- + + private TranscriptionQueueItemDTO mapRow(Object[] row) { + UUID id = toUUID(row[0]); + String title = (String) row[1]; + LocalDate documentDate = toLocalDate(row[2]); + boolean needsExpert = toBoolean(row[3]); + int annotationCount = toInt(row[4]); + int textedBlockCount = toInt(row[5]); + int reviewedBlockCount = toInt(row[6]); + return new TranscriptionQueueItemDTO(id, title, documentDate, needsExpert, + annotationCount, textedBlockCount, reviewedBlockCount); + } + + private UUID toUUID(Object o) { + if (o instanceof UUID u) return u; + return UUID.fromString(o.toString()); + } + + private LocalDate toLocalDate(Object o) { + if (o == null) return null; + if (o instanceof LocalDate d) return d; + if (o instanceof java.sql.Date d) return d.toLocalDate(); + return LocalDate.parse(o.toString()); + } + + private boolean toBoolean(Object o) { + if (o instanceof Boolean b) return b; + return Boolean.parseBoolean(o.toString()); + } + + private int toInt(Object o) { + if (o == null) return 0; + if (o instanceof Number n) return n.intValue(); + if (o instanceof BigDecimal bd) return bd.intValue(); + return Integer.parseInt(o.toString()); + } + + private long toLong(Object o) { + if (o == null) return 0L; + if (o instanceof Number n) return n.longValue(); + if (o instanceof BigDecimal bd) return bd.longValue(); + return Long.parseLong(o.toString()); + } +} diff --git a/backend/src/main/resources/db/migration/V36__add_needs_expert_to_documents.sql b/backend/src/main/resources/db/migration/V36__add_needs_expert_to_documents.sql new file mode 100644 index 00000000..03ee225a --- /dev/null +++ b/backend/src/main/resources/db/migration/V36__add_needs_expert_to_documents.sql @@ -0,0 +1 @@ +ALTER TABLE documents ADD COLUMN needs_expert BOOLEAN NOT NULL DEFAULT FALSE;