Compare commits

..

1 Commits

Author SHA1 Message Date
Marcel
ca4861a90d fix(pdf): merge setElements and render effects so canvas remount triggers re-render
Some checks failed
CI / Unit & Component Tests (push) Failing after 2m28s
CI / Backend Unit Tests (push) Failing after 2m33s
The refactor made pdfDoc a plain variable so renderer.isLoaded was not
reactive. Svelte only tracked currentPage and scale — but when the canvas
reappeared after loading, neither changed, so the PDF stayed blank.

Fix: merge the two effects into one that reads canvasEl synchronously.
Svelte now tracks canvasEl as a dependency; when the canvas remounts
(loading spinner → false), the effect re-fires and renders the
already-loaded PDF document.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-15 19:25:36 +02:00
489 changed files with 4379 additions and 202422 deletions

View File

@@ -47,26 +47,6 @@ jobs:
name: unit-test-screenshots name: unit-test-screenshots
path: frontend/test-results/screenshots/ path: frontend/test-results/screenshots/
# ─── OCR Service Unit Tests ───────────────────────────────────────────────────
# Only spell_check.py, test_confidence.py, test_sender_registry.py — no ML stack required.
ocr-tests:
name: OCR Service Tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Install test dependencies
run: pip install "pyspellchecker==0.9.0" pytest pytest-asyncio
working-directory: ocr-service
- name: Run OCR unit tests (no ML stack required)
run: python -m pytest test_spell_check.py test_confidence.py test_sender_registry.py -v
working-directory: ocr-service
# ─── Backend Unit & Slice Tests ─────────────────────────────────────────────── # ─── Backend Unit & Slice Tests ───────────────────────────────────────────────
# Pure Mockito + WebMvcTest — no DB or S3 needed. # Pure Mockito + WebMvcTest — no DB or S3 needed.
backend-unit-tests: backend-unit-tests:

3
.gitignore vendored
View File

@@ -11,5 +11,4 @@ gitea/
scripts/large-data.sql scripts/large-data.sql
.vitest-attachments .vitest-attachments
**/test-results/ **/test-results/
.worktrees/

View File

@@ -311,15 +311,13 @@ Save bar pattern — use **sticky full-bleed** for long forms (edit document), *
<div class="mt-4 flex items-center justify-between rounded-sm border border-brand-sand bg-white px-6 py-4 shadow-sm"> <div class="mt-4 flex items-center justify-between rounded-sm border border-brand-sand bg-white px-6 py-4 shadow-sm">
``` ```
Back button pattern — use the shared `<BackButton>` component from `$lib/components/BackButton.svelte`: Back link pattern:
```svelte ```svelte
<script lang="ts"> <a href="/persons" class="inline-flex items-center text-xs font-bold uppercase tracking-widest text-gray-500 hover:text-brand-navy transition-colors group mb-4">
import BackButton from '$lib/components/BackButton.svelte'; <svg class="w-4 h-4 mr-2 transform group-hover:-translate-x-1 transition-transform" .../>
</script> Zurück zur Übersicht
</a>
<BackButton />
``` ```
The component calls `history.back()` so the user returns to wherever they came from. Label is always "Zurück" (no contextual suffix — destination is unknown). Touch target ≥ 44px and focus ring are built in. Do not use a static `<a href>` for back navigation.
Subtle action link (e.g. "new document/person"): Subtle action link (e.g. "new document/person"):
```svelte ```svelte

View File

@@ -103,11 +103,6 @@
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webmvc-test</artifactId> <artifactId>spring-boot-starter-webmvc-test</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId>
<scope>test</scope>
</dependency> </dependency>
<!-- Excel Bearbeitung (Apache POI) --> <!-- Excel Bearbeitung (Apache POI) -->
<dependency> <dependency>
@@ -151,12 +146,6 @@
<artifactId>flyway-database-postgresql</artifactId> <artifactId>flyway-database-postgresql</artifactId>
</dependency> </dependency>
<!-- Caffeine cache for in-memory rate limiting -->
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
</dependency>
<!-- OpenAPI / Swagger UI — enabled only in the dev Spring profile --> <!-- OpenAPI / Swagger UI — enabled only in the dev Spring profile -->
<dependency> <dependency>
<groupId>org.springdoc</groupId> <groupId>org.springdoc</groupId>
@@ -164,19 +153,12 @@
<version>3.0.2</version> <version>3.0.2</version>
</dependency> </dependency>
<!-- PDF rendering for training data export and thumbnail generation --> <!-- PDF rendering for training data export -->
<dependency> <dependency>
<groupId>org.apache.pdfbox</groupId> <groupId>org.apache.pdfbox</groupId>
<artifactId>pdfbox</artifactId> <artifactId>pdfbox</artifactId>
<version>3.0.4</version> <version>3.0.4</version>
</dependency> </dependency>
<!-- TIFF decoding plugin for ImageIO (thumbnail generation from scanned TIFFs) -->
<dependency>
<groupId>com.twelvemonkeys.imageio</groupId>
<artifactId>imageio-tiff</artifactId>
<version>3.12.0</version>
</dependency>
</dependencies> </dependencies>

View File

@@ -1,10 +0,0 @@
package org.raddatz.familienarchiv.audit;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.annotation.Nullable;
public record ActivityActorDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String initials,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String color,
@Nullable String name
) {}

View File

@@ -1,20 +0,0 @@
package org.raddatz.familienarchiv.audit;
import java.time.Instant;
import java.util.UUID;
public interface ActivityFeedRow {
String getKind();
UUID getActorId();
String getActorInitials();
String getActorColor();
String getActorName();
UUID getDocumentId();
Instant getHappenedAt();
boolean isYouMentioned();
boolean isYouParticipated();
int getCount();
Instant getHappenedAtUntil();
/** Present only for COMMENT_ADDED and MENTION_CREATED — null otherwise. */
UUID getCommentId();
}

View File

@@ -1,35 +0,0 @@
package org.raddatz.familienarchiv.audit;
import java.util.Set;
public enum AuditKind {
/** Payload: none */
FILE_UPLOADED,
/** Payload: {@code {"oldStatus": "UPLOADED", "newStatus": "TRANSCRIBED"}} */
STATUS_CHANGED,
/** Payload: none */
METADATA_UPDATED,
/** Payload: {@code {"pageNumber": 3}} */
TEXT_SAVED,
/** Payload: none */
BLOCK_REVIEWED,
/** Payload: {@code {"pageNumber": 3}} */
ANNOTATION_CREATED,
/** Payload: {@code {"commentId": "uuid"}} */
COMMENT_ADDED,
/** Payload: {@code {"commentId": "uuid", "mentionedUserId": "uuid"}} */
MENTION_CREATED;
public static final Set<AuditKind> ROLLUP_ELIGIBLE = Set.of(
TEXT_SAVED, FILE_UPLOADED, ANNOTATION_CREATED,
BLOCK_REVIEWED, COMMENT_ADDED, MENTION_CREATED
);
}

View File

@@ -1,46 +0,0 @@
package org.raddatz.familienarchiv.audit;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.persistence.*;
import lombok.*;
import org.hibernate.annotations.CreationTimestamp;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.type.SqlTypes;
import java.time.OffsetDateTime;
import java.util.Map;
import java.util.UUID;
@Entity
@Table(name = "audit_log")
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class AuditLog {
@Id
@GeneratedValue(strategy = GenerationType.UUID)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private UUID id;
@Column(name = "happened_at", nullable = false, updatable = false)
@CreationTimestamp
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private OffsetDateTime happenedAt;
@Column(name = "actor_id")
private UUID actorId;
@Enumerated(EnumType.STRING)
@Column(name = "kind", nullable = false)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private AuditKind kind;
@Column(name = "document_id")
private UUID documentId;
@JdbcTypeCode(SqlTypes.JSON)
@Column(columnDefinition = "jsonb")
private Map<String, Object> payload;
}

View File

@@ -1,200 +0,0 @@
package org.raddatz.familienarchiv.audit;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.time.OffsetDateTime;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
public interface AuditLogQueryRepository extends JpaRepository<AuditLog, UUID> {
@Query(value = """
SELECT a.document_id
FROM audit_log a
WHERE a.kind IN ('TEXT_SAVED', 'ANNOTATION_CREATED')
AND a.actor_id = :userId
AND a.document_id IS NOT NULL
ORDER BY a.happened_at DESC
LIMIT 1
""", nativeQuery = true)
Optional<UUID> findMostRecentDocumentIdByActor(@Param("userId") UUID userId);
@Query(value = """
WITH events AS (
SELECT
a.kind,
a.actor_id,
a.document_id,
a.happened_at,
a.payload,
LAG(a.happened_at) OVER (
PARTITION BY a.actor_id, a.document_id, a.kind
ORDER BY a.happened_at
) AS prev_happened_at
FROM audit_log a
WHERE a.kind IN (:kinds)
AND a.document_id IS NOT NULL
),
sessions_marked AS (
SELECT
kind, actor_id, document_id, happened_at, payload,
CASE
WHEN kind IN ('COMMENT_ADDED','MENTION_CREATED') THEN 1
WHEN prev_happened_at IS NULL THEN 1
WHEN EXTRACT(EPOCH FROM (happened_at - prev_happened_at)) > 7200 THEN 1
ELSE 0
END AS is_new_session
FROM events
),
sessions AS (
SELECT
kind, actor_id, document_id, happened_at, payload,
SUM(is_new_session) OVER (
PARTITION BY actor_id, document_id, kind
ORDER BY happened_at
ROWS UNBOUNDED PRECEDING
) AS session_id
FROM sessions_marked
),
aggregated AS (
SELECT
s.kind,
s.actor_id,
s.document_id,
s.session_id,
MIN(s.happened_at) AS happened_at,
CASE WHEN COUNT(*) > 1 THEN MAX(s.happened_at) ELSE NULL END AS happened_at_until,
COUNT(*)::int AS count,
BOOL_OR(s.kind = 'MENTION_CREATED'
AND s.payload->>'mentionedUserId' = :currentUserId) AS you_mentioned,
-- COMMENT_ADDED/MENTION_CREATED always have is_new_session=1, so each group has one row and MIN collapses to that row payload
MIN(s.payload::text)::jsonb AS payload
FROM sessions s
GROUP BY s.kind, s.actor_id, s.document_id, s.session_id
)
SELECT
ag.kind AS kind,
ag.actor_id AS actorId,
CASE
WHEN u.first_name IS NOT NULL AND u.last_name IS NOT NULL
THEN UPPER(LEFT(u.first_name, 1)) || UPPER(LEFT(u.last_name, 1))
WHEN u.first_name IS NOT NULL THEN UPPER(LEFT(u.first_name, 1))
WHEN u.last_name IS NOT NULL THEN UPPER(LEFT(u.last_name, 1))
ELSE '?'
END AS actorInitials,
COALESCE(u.color, '') AS actorColor,
CONCAT_WS(' ', u.first_name, u.last_name) AS actorName,
ag.document_id AS documentId,
ag.happened_at AS happened_at,
ag.you_mentioned AS youMentioned,
-- payload->>'commentId' matches notifications.reference_id per AuditKind.COMMENT_ADDED contract
EXISTS(
SELECT 1 FROM notifications n
WHERE n.type = 'REPLY'
AND n.recipient_id = CAST(:currentUserId AS uuid)
AND n.reference_id = (ag.payload->>'commentId')::uuid
) AS youParticipated,
ag.count AS count,
ag.happened_at_until AS happenedAtUntil,
(ag.payload->>'commentId')::uuid AS commentId
FROM aggregated ag
LEFT JOIN users u ON u.id = ag.actor_id
ORDER BY ag.happened_at DESC
LIMIT :limit
""", nativeQuery = true)
List<ActivityFeedRow> findRolledUpActivityFeed(
@Param("currentUserId") String currentUserId,
@Param("limit") int limit,
@Param("kinds") Collection<String> kinds);
@Query(value = """
SELECT
COUNT(DISTINCT (a.document_id::text || '|' || (a.payload->>'pageNumber'))) AS pages,
COUNT(*) FILTER (WHERE a.kind = 'ANNOTATION_CREATED') AS annotated,
COUNT(DISTINCT a.payload->>'blockId') FILTER (WHERE a.kind = 'TEXT_SAVED') AS transcribed,
COUNT(DISTINCT a.document_id) FILTER (WHERE a.kind = 'FILE_UPLOADED') AS uploaded,
COUNT(DISTINCT (a.document_id::text || '|' || (a.payload->>'pageNumber')))
FILTER (WHERE (a.kind = 'ANNOTATION_CREATED' OR a.kind = 'TEXT_SAVED')
AND a.actor_id::text = :userId) AS yourPages
FROM audit_log a
WHERE a.happened_at >= :weekStart
AND a.kind IN ('ANNOTATION_CREATED','TEXT_SAVED','FILE_UPLOADED')
""", nativeQuery = true)
PulseStatsRow getPulseStats(
@Param("weekStart") OffsetDateTime weekStart,
@Param("userId") String userId);
@Query(value = """
SELECT DISTINCT ON (a.document_id)
a.document_id AS documentId,
a.actor_id AS actorId
FROM audit_log a
WHERE a.kind = :kind
AND a.document_id IN :documentIds
AND a.actor_id IS NOT NULL
ORDER BY a.document_id, a.happened_at DESC
""", nativeQuery = true)
List<Object[]> findMostRecentActorPerDocument(
@Param("documentIds") List<UUID> documentIds,
@Param("kind") String kind);
@Query(value = """
SELECT
a.document_id AS documentId,
CASE
WHEN u.first_name IS NOT NULL AND u.last_name IS NOT NULL
THEN UPPER(LEFT(u.first_name, 1)) || UPPER(LEFT(u.last_name, 1))
WHEN u.first_name IS NOT NULL THEN UPPER(LEFT(u.first_name, 1))
WHEN u.last_name IS NOT NULL THEN UPPER(LEFT(u.last_name, 1))
ELSE '?'
END AS actorInitials,
COALESCE(u.color, '') AS actorColor,
CONCAT_WS(' ', u.first_name, u.last_name) AS actorName
FROM audit_log a
LEFT JOIN users u ON u.id = a.actor_id
WHERE a.kind IN ('ANNOTATION_CREATED', 'TEXT_SAVED', 'BLOCK_REVIEWED')
AND a.document_id IN :documentIds
AND a.actor_id IS NOT NULL
GROUP BY a.document_id, a.actor_id, u.first_name, u.last_name, u.color
ORDER BY a.document_id, MIN(a.happened_at)
""", nativeQuery = true)
List<ContributorRow> findContributorsPerDocument(@Param("documentIds") List<UUID> documentIds);
@Query(value = """
SELECT
ranked.document_id AS documentId,
ranked.actorInitials AS actorInitials,
ranked.actorColor AS actorColor,
ranked.actorName AS actorName
FROM (
SELECT
a.document_id,
CASE
WHEN u.first_name IS NOT NULL AND u.last_name IS NOT NULL
THEN UPPER(LEFT(u.first_name, 1)) || UPPER(LEFT(u.last_name, 1))
WHEN u.first_name IS NOT NULL THEN UPPER(LEFT(u.first_name, 1))
WHEN u.last_name IS NOT NULL THEN UPPER(LEFT(u.last_name, 1))
ELSE '?'
END AS actorInitials,
COALESCE(u.color, '') AS actorColor,
NULLIF(CONCAT_WS(' ', u.first_name, u.last_name), '') AS actorName,
ROW_NUMBER() OVER (
PARTITION BY a.document_id
ORDER BY MAX(a.happened_at) DESC
) AS rn
FROM audit_log a
LEFT JOIN users u ON u.id = a.actor_id
WHERE a.kind IN ('ANNOTATION_CREATED', 'TEXT_SAVED', 'BLOCK_REVIEWED')
AND a.document_id IN :documentIds
AND a.actor_id IS NOT NULL
GROUP BY a.document_id, a.actor_id, u.first_name, u.last_name, u.color
) ranked
WHERE ranked.rn <= 4
ORDER BY ranked.document_id, ranked.rn
""", nativeQuery = true)
List<ContributorRow> findRecentContributorsForDocuments(@Param("documentIds") List<UUID> documentIds);
}

View File

@@ -1,62 +0,0 @@
package org.raddatz.familienarchiv.audit;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
import java.time.OffsetDateTime;
import java.util.*;
@Service
@RequiredArgsConstructor
public class AuditLogQueryService {
private final AuditLogQueryRepository queryRepository;
public Optional<UUID> findMostRecentDocumentForUser(UUID userId) {
return queryRepository.findMostRecentDocumentIdByActor(userId);
}
public List<ActivityFeedRow> findActivityFeed(UUID currentUserId, int limit) {
return findActivityFeed(currentUserId, limit, AuditKind.ROLLUP_ELIGIBLE);
}
public List<ActivityFeedRow> findActivityFeed(UUID currentUserId, int limit, Set<AuditKind> kinds) {
List<String> kindNames = kinds.stream().map(Enum::name).toList();
return queryRepository.findRolledUpActivityFeed(currentUserId.toString(), limit, kindNames);
}
public PulseStatsRow getPulseStats(OffsetDateTime weekStart, UUID userId) {
return queryRepository.getPulseStats(weekStart, userId.toString());
}
public Map<UUID, UUID> findMostRecentActorPerDocument(List<UUID> documentIds, String kind) {
if (documentIds.isEmpty()) return Map.of();
List<Object[]> rows = queryRepository.findMostRecentActorPerDocument(documentIds, kind);
Map<UUID, UUID> result = new LinkedHashMap<>();
for (Object[] row : rows) {
UUID docId = (UUID) row[0];
UUID actorId = (UUID) row[1];
result.put(docId, actorId);
}
return result;
}
public Map<UUID, List<ActivityActorDTO>> findContributorsPerDocument(List<UUID> documentIds) {
if (documentIds.isEmpty()) return Map.of();
return toContributorMap(queryRepository.findContributorsPerDocument(documentIds));
}
public Map<UUID, List<ActivityActorDTO>> findRecentContributorsPerDocument(List<UUID> documentIds) {
if (documentIds.isEmpty()) return Map.of();
return toContributorMap(queryRepository.findRecentContributorsForDocuments(documentIds));
}
private Map<UUID, List<ActivityActorDTO>> toContributorMap(List<ContributorRow> rows) {
Map<UUID, List<ActivityActorDTO>> result = new LinkedHashMap<>();
for (ContributorRow row : rows) {
result.computeIfAbsent(row.getDocumentId(), k -> new ArrayList<>())
.add(new ActivityActorDTO(row.getActorInitials(), row.getActorColor(), row.getActorName()));
}
return result;
}
}

View File

@@ -1,8 +0,0 @@
package org.raddatz.familienarchiv.audit;
import org.springframework.data.jpa.repository.JpaRepository;
import java.util.UUID;
public interface AuditLogRepository extends JpaRepository<AuditLog, UUID> {
}

View File

@@ -1,57 +0,0 @@
package org.raddatz.familienarchiv.audit;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.core.task.TaskExecutor;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.support.TransactionSynchronization;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import java.util.Map;
import java.util.UUID;
@Service
@RequiredArgsConstructor
@Slf4j
public class AuditService {
private final AuditLogRepository auditLogRepository;
@Qualifier("auditExecutor")
private final TaskExecutor auditExecutor;
@Async("auditExecutor")
public void log(AuditKind kind, UUID actorId, UUID documentId, Map<String, Object> payload) {
writeLog(kind, actorId, documentId, payload);
}
public void logAfterCommit(AuditKind kind, UUID actorId, UUID documentId, Map<String, Object> payload) {
if (TransactionSynchronizationManager.isActualTransactionActive()) {
TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronization() {
@Override
public void afterCommit() {
// Run on a separate thread: the afterCommit() callback fires while Spring's
// transaction synchronizations are still active on the current thread, which
// prevents SimpleJpaRepository.save() from starting a new transaction inline.
auditExecutor.execute(() -> writeLog(kind, actorId, documentId, payload));
}
});
} else {
writeLog(kind, actorId, documentId, payload);
}
}
private void writeLog(AuditKind kind, UUID actorId, UUID documentId, Map<String, Object> payload) {
try {
auditLogRepository.save(AuditLog.builder()
.kind(kind)
.actorId(actorId)
.documentId(documentId)
.payload(payload)
.build());
} catch (Exception e) {
log.error("Audit log write failed: kind={}, document={}", kind, documentId, e);
}
}
}

View File

@@ -1,10 +0,0 @@
package org.raddatz.familienarchiv.audit;
import java.util.UUID;
public interface ContributorRow {
UUID getDocumentId();
String getActorInitials();
String getActorColor();
String getActorName();
}

View File

@@ -1,9 +0,0 @@
package org.raddatz.familienarchiv.audit;
public interface PulseStatsRow {
long getPages();
long getAnnotated();
long getTranscribed();
long getUploaded();
long getYourPages();
}

View File

@@ -23,33 +23,4 @@ public class AsyncConfig {
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy()); executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy());
return executor; return executor;
} }
@Bean("auditExecutor")
public Executor auditExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(1);
executor.setMaxPoolSize(2);
executor.setQueueCapacity(50);
executor.setThreadNamePrefix("Audit-");
// AbortPolicy instead of CallerRunsPolicy: if CallerRunsPolicy ran the task on the
// afterCommit() callback thread, Spring's transaction synchronizations would still be
// active on that thread and SimpleJpaRepository.save() would throw IllegalStateException.
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy());
return executor;
}
@Bean("thumbnailExecutor")
public Executor thumbnailExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(1);
executor.setMaxPoolSize(2);
executor.setQueueCapacity(200);
executor.setThreadNamePrefix("Thumbnail-");
// CallerRunsPolicy applies back-pressure to quick-upload batches and admin backfill
// instead of dropping work (shared taskExecutor uses AbortPolicy). Safe because the
// task is dispatched via TransactionSynchronization.afterCommit, which runs on a
// post-commit callback thread without active transaction synchronization.
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
return executor;
}
} }

View File

@@ -31,8 +31,8 @@ import java.util.Set;
@DependsOn("flyway") @DependsOn("flyway")
public class DataInitializer { public class DataInitializer {
@Value("${app.admin.email:admin@familyarchive.local}") @Value("${app.admin.username:admin}")
private String adminEmail; private String adminUsername;
@Value("${app.admin.password:admin123}") @Value("${app.admin.password:admin123}")
private String adminPassword; private String adminPassword;
@@ -43,23 +43,26 @@ public class DataInitializer {
@Bean @Bean
public CommandLineRunner initAdminUser(PasswordEncoder passwordEncoder) { public CommandLineRunner initAdminUser(PasswordEncoder passwordEncoder) {
return args -> { return args -> {
if (userRepository.findByEmail(adminEmail).isEmpty()) { if (userRepository.findByUsername(adminUsername).isEmpty()) {
log.info("Kein Admin-User '{}' gefunden. Erstelle Default-Admin...", adminEmail); log.info("Kein Admin-User '{}' gefunden. Erstelle Default-Admin...", adminUsername);
// 1. Admin Gruppe erstellen
UserGroup adminGroup = UserGroup.builder() UserGroup adminGroup = UserGroup.builder()
.name("Administrators") .name("Administrators")
.permissions(Set.of("ADMIN", "READ_ALL", "WRITE_ALL", "ANNOTATE_ALL", "ADMIN_USER", "ADMIN_TAG", "ADMIN_PERMISSION")) .permissions(Set.of("ADMIN", "READ_ALL", "WRITE_ALL", "ANNOTATE_ALL", "ADMIN_USER", "ADMIN_TAG", "ADMIN_PERMISSION"))
.build(); .build();
groupRepository.save(adminGroup); groupRepository.save(adminGroup);
// 2. Admin User erstellen
AppUser admin = AppUser.builder() AppUser admin = AppUser.builder()
.email(adminEmail) .username(adminUsername)
.password(passwordEncoder.encode(adminPassword)) .password(passwordEncoder.encode(adminPassword)) // Passwort verschlüsseln!
.email("admin@familyarchive.local")
.groups(Set.of(adminGroup)) .groups(Set.of(adminGroup))
.build(); .build();
userRepository.save(admin); userRepository.save(admin);
log.info("Default Admin erstellt: Email='{}'", adminEmail); log.info("Default Admin erstellt: User='{}'", adminUsername);
} }
}; };
} }
@@ -81,13 +84,16 @@ public class DataInitializer {
TagRepository tagRepo, TagRepository tagRepo,
PasswordEncoder passwordEncoder) { PasswordEncoder passwordEncoder) {
return args -> { return args -> {
userRepository.findByEmail(adminEmail).ifPresent(admin -> { // Always reset the admin password to the configured value so a failed password-reset
// test from a previous run can never leave the account locked out.
userRepository.findByUsername(adminUsername).ifPresent(admin -> {
admin.setPassword(passwordEncoder.encode(adminPassword)); admin.setPassword(passwordEncoder.encode(adminPassword));
userRepository.save(admin); userRepository.save(admin);
log.info("E2E seed: Admin-Passwort auf konfigurierten Wert zurückgesetzt."); log.info("E2E seed: Admin-Passwort auf konfigurierten Wert zurückgesetzt.");
}); });
if (userRepository.findByEmail("reader@familyarchive.local").isEmpty()) { // Always ensure the read-only test user exists, even when seed data was already loaded.
if (userRepository.findByUsername("reader").isEmpty()) {
log.info("E2E seed: Erstelle 'reader'-Testbenutzer..."); log.info("E2E seed: Erstelle 'reader'-Testbenutzer...");
UserGroup leserGroup = groupRepository.findByName("Leser").orElseGet(() -> UserGroup leserGroup = groupRepository.findByName("Leser").orElseGet(() ->
groupRepository.save(UserGroup.builder() groupRepository.save(UserGroup.builder()
@@ -95,7 +101,7 @@ public class DataInitializer {
.permissions(Set.of("READ_ALL")) .permissions(Set.of("READ_ALL"))
.build())); .build()));
userRepository.save(AppUser.builder() userRepository.save(AppUser.builder()
.email("reader@familyarchive.local") .username("reader")
.password(passwordEncoder.encode("reader123")) .password(passwordEncoder.encode("reader123"))
.groups(Set.of(leserGroup)) .groups(Set.of(leserGroup))
.build()); .build());
@@ -125,6 +131,7 @@ public class DataInitializer {
Tag tagUrlaub = tagRepo.save(Tag.builder().name("Urlaub").build()); Tag tagUrlaub = tagRepo.save(Tag.builder().name("Urlaub").build());
// ── Documents ──────────────────────────────────────────────────── // ── Documents ────────────────────────────────────────────────────
// 1. Fully transcribed letter — used by search + detail E2E tests
docRepo.save(Document.builder() docRepo.save(Document.builder()
.title("Geburtsurkunde Hans Müller") .title("Geburtsurkunde Hans Müller")
.originalFilename("geburtsurkunde_hans.pdf") .originalFilename("geburtsurkunde_hans.pdf")
@@ -137,6 +144,7 @@ public class DataInitializer {
.transcription("Hiermit wird beurkundet, dass Hans Müller am 12. April 1923 in Berlin geboren wurde.") .transcription("Hiermit wird beurkundet, dass Hans Müller am 12. April 1923 in Berlin geboren wurde.")
.build()); .build());
// 2. Letter with multiple receivers and tags — tests multi-receiver display
docRepo.save(Document.builder() docRepo.save(Document.builder()
.title("Brief aus dem Krieg") .title("Brief aus dem Krieg")
.originalFilename("brief_krieg_1944.pdf") .originalFilename("brief_krieg_1944.pdf")
@@ -149,6 +157,7 @@ public class DataInitializer {
.transcription("Liebe Anna, ich schreibe dir aus der Front. Es geht mir den Umständen entsprechend gut.") .transcription("Liebe Anna, ich schreibe dir aus der Front. Es geht mir den Umständen entsprechend gut.")
.build()); .build());
// 3. Postcard — no transcription, tests PLACEHOLDER status
docRepo.save(Document.builder() docRepo.save(Document.builder()
.title("Urlaubspostkarte Ostsee") .title("Urlaubspostkarte Ostsee")
.originalFilename("postkarte_1965.jpg") .originalFilename("postkarte_1965.jpg")
@@ -160,6 +169,7 @@ public class DataInitializer {
.tags(Set.of(tagUrlaub)) .tags(Set.of(tagUrlaub))
.build()); .build());
// 4. Document with no sender — tests null-sender display ("Unbekannt")
docRepo.save(Document.builder() docRepo.save(Document.builder()
.title("Unbekanntes Dokument") .title("Unbekanntes Dokument")
.originalFilename("unbekannt.pdf") .originalFilename("unbekannt.pdf")
@@ -169,6 +179,7 @@ public class DataInitializer {
.receivers(Set.of(maria)) .receivers(Set.of(maria))
.build()); .build());
// 5. Document with minimal metadata — tests sparse display
docRepo.save(Document.builder() docRepo.save(Document.builder()
.title("Scan ohne Titel") .title("Scan ohne Titel")
.originalFilename("scan_ohne_titel.pdf") .originalFilename("scan_ohne_titel.pdf")

View File

@@ -1,69 +0,0 @@
package org.raddatz.familienarchiv.config;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.http.HttpStatus;
import org.springframework.web.servlet.HandlerInterceptor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
public class RateLimitInterceptor implements HandlerInterceptor {
private static final int MAX_REQUESTS_PER_MINUTE = 10;
// Caffeine cache: per-IP counter that expires 1 minute after first access.
// Bounded to 10_000 entries to prevent OOM from IP exhaustion.
private final Cache<String, AtomicInteger> requestCounts = Caffeine.newBuilder()
.expireAfterAccess(1, TimeUnit.MINUTES)
.maximumSize(10_000)
.build();
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler)
throws Exception {
String ip = resolveClientIp(request);
AtomicInteger count = requestCounts.get(ip, k -> new AtomicInteger(0));
if (count.incrementAndGet() > MAX_REQUESTS_PER_MINUTE) {
response.setStatus(HttpStatus.TOO_MANY_REQUESTS.value());
response.getWriter().write("{\"code\":\"RATE_LIMIT_EXCEEDED\",\"message\":\"Too many requests\"}");
return false;
}
return true;
}
private String resolveClientIp(HttpServletRequest request) {
// Only trust X-Forwarded-For when the direct connection comes from a known
// reverse proxy (loopback or Docker private network). Trusting it unconditionally
// allows any client to spoof a different IP and bypass per-IP rate limiting.
String remoteAddr = request.getRemoteAddr();
if (isTrustedProxy(remoteAddr)) {
String forwarded = request.getHeader("X-Forwarded-For");
if (forwarded != null && !forwarded.isBlank()) {
return forwarded.split(",")[0].trim();
}
}
return remoteAddr;
}
private boolean isTrustedProxy(String ip) {
if (ip.equals("127.0.0.1") || ip.equals("::1") || ip.startsWith("10.") || ip.startsWith("192.168.")) {
return true;
}
// Only RFC 1918 172.16.0.0/12 (172.16172.31), not all of 172.x
if (ip.startsWith("172.")) {
String[] parts = ip.split("\\.");
if (parts.length >= 2) {
try {
int second = Integer.parseInt(parts[1]);
return second >= 16 && second <= 31;
} catch (NumberFormatException ignored) {
return false;
}
}
}
return false;
}
}

View File

@@ -50,8 +50,6 @@ public class SecurityConfig {
auth.requestMatchers("/actuator/health").permitAll(); auth.requestMatchers("/actuator/health").permitAll();
// Password reset endpoints are unauthenticated by nature // Password reset endpoints are unauthenticated by nature
auth.requestMatchers("/api/auth/forgot-password", "/api/auth/reset-password").permitAll(); auth.requestMatchers("/api/auth/forgot-password", "/api/auth/reset-password").permitAll();
// Invite-based registration endpoints are public
auth.requestMatchers("/api/auth/invite/**", "/api/auth/register").permitAll();
// E2E test helper (only active under "e2e" profile) // E2E test helper (only active under "e2e" profile)
auth.requestMatchers("/api/auth/reset-token-for-test").permitAll(); auth.requestMatchers("/api/auth/reset-token-for-test").permitAll();
// In dev, allow unauthenticated access to the OpenAPI spec and Swagger UI // In dev, allow unauthenticated access to the OpenAPI spec and Swagger UI
@@ -69,7 +67,7 @@ public class SecurityConfig {
.frameOptions(frameOptions -> frameOptions.sameOrigin())) .frameOptions(frameOptions -> frameOptions.sameOrigin()))
// Erlaubt Login via Browser-Popup oder REST-Header (Authorization: Basic ...) // Erlaubt Login via Browser-Popup oder REST-Header (Authorization: Basic ...)
.httpBasic(Customizer.withDefaults()) .httpBasic(Customizer.withDefaults())
.formLogin(form -> form.usernameParameter("email")); .formLogin(Customizer.withDefaults());
return http.build(); return http.build();
} }

View File

@@ -1,15 +0,0 @@
package org.raddatz.familienarchiv.config;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
@Configuration
public class WebConfig implements WebMvcConfigurer {
@Override
public void addInterceptors(InterceptorRegistry registry) {
registry.addInterceptor(new RateLimitInterceptor())
.addPathPatterns("/api/auth/invite/**", "/api/auth/register");
}
}

View File

@@ -6,7 +6,6 @@ import org.raddatz.familienarchiv.security.RequirePermission;
import org.raddatz.familienarchiv.service.DocumentService; import org.raddatz.familienarchiv.service.DocumentService;
import org.raddatz.familienarchiv.service.DocumentVersionService; import org.raddatz.familienarchiv.service.DocumentVersionService;
import org.raddatz.familienarchiv.service.MassImportService; import org.raddatz.familienarchiv.service.MassImportService;
import org.raddatz.familienarchiv.service.ThumbnailBackfillService;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.PostMapping;
@@ -24,7 +23,6 @@ public class AdminController {
private final MassImportService massImportService; private final MassImportService massImportService;
private final DocumentService documentService; private final DocumentService documentService;
private final DocumentVersionService documentVersionService; private final DocumentVersionService documentVersionService;
private final ThumbnailBackfillService thumbnailBackfillService;
@PostMapping("/trigger-import") @PostMapping("/trigger-import")
public ResponseEntity<MassImportService.ImportStatus> triggerMassImport() { public ResponseEntity<MassImportService.ImportStatus> triggerMassImport() {
@@ -49,15 +47,4 @@ public class AdminController {
int count = documentService.backfillFileHashes(); int count = documentService.backfillFileHashes();
return ResponseEntity.ok(new BackfillResult(count)); return ResponseEntity.ok(new BackfillResult(count));
} }
@PostMapping("/generate-thumbnails")
public ResponseEntity<ThumbnailBackfillService.BackfillStatus> generateThumbnails() {
thumbnailBackfillService.runBackfillAsync();
return ResponseEntity.accepted().body(thumbnailBackfillService.getStatus());
}
@GetMapping("/thumbnail-status")
public ResponseEntity<ThumbnailBackfillService.BackfillStatus> thumbnailStatus() {
return ResponseEntity.ok(thumbnailBackfillService.getStatus());
}
} }

View File

@@ -72,7 +72,7 @@ public class AnnotationController {
private UUID resolveUserId(Authentication authentication) { private UUID resolveUserId(Authentication authentication) {
if (authentication == null || !authentication.isAuthenticated()) return null; if (authentication == null || !authentication.isAuthenticated()) return null;
try { try {
AppUser user = userService.findByEmail(authentication.getName()); AppUser user = userService.findByUsername(authentication.getName());
return user != null ? user.getId() : null; return user != null ? user.getId() : null;
} catch (Exception e) { } catch (Exception e) {
log.warn("Could not resolve user for annotation: {}", e.getMessage()); log.warn("Could not resolve user for annotation: {}", e.getMessage());

View File

@@ -1,18 +1,14 @@
package org.raddatz.familienarchiv.controller; package org.raddatz.familienarchiv.controller;
import jakarta.validation.Valid;
import org.raddatz.familienarchiv.dto.ForgotPasswordRequest; import org.raddatz.familienarchiv.dto.ForgotPasswordRequest;
import org.raddatz.familienarchiv.dto.InvitePrefillDTO;
import org.raddatz.familienarchiv.dto.RegisterRequest;
import org.raddatz.familienarchiv.dto.ResetPasswordRequest; import org.raddatz.familienarchiv.dto.ResetPasswordRequest;
import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.model.InviteToken;
import org.raddatz.familienarchiv.service.InviteService;
import org.raddatz.familienarchiv.service.PasswordResetService; import org.raddatz.familienarchiv.service.PasswordResetService;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
@@ -22,7 +18,6 @@ import lombok.RequiredArgsConstructor;
public class AuthController { public class AuthController {
private final PasswordResetService passwordResetService; private final PasswordResetService passwordResetService;
private final InviteService inviteService;
@Value("${app.base-url:http://localhost:3000}") @Value("${app.base-url:http://localhost:3000}")
private String appBaseUrl; private String appBaseUrl;
@@ -39,20 +34,4 @@ public class AuthController {
passwordResetService.resetPassword(request); passwordResetService.resetPassword(request);
return ResponseEntity.noContent().build(); return ResponseEntity.noContent().build();
} }
@GetMapping("/invite/{code}")
public InvitePrefillDTO getInvitePrefill(@PathVariable String code) {
InviteToken token = inviteService.validateCode(code);
return new InvitePrefillDTO(
token.getPrefillFirstName(),
token.getPrefillLastName(),
token.getPrefillEmail()
);
}
@PostMapping("/register")
public ResponseEntity<AppUser> register(@Valid @RequestBody RegisterRequest request) {
AppUser user = inviteService.redeemInvite(request);
return ResponseEntity.status(HttpStatus.CREATED).body(user);
}
} }

View File

@@ -24,6 +24,67 @@ public class CommentController {
private final CommentService commentService; private final CommentService commentService;
private final UserService userService; private final UserService userService;
// ─── General document comments ────────────────────────────────────────────
@GetMapping("/api/documents/{documentId}/comments")
public List<DocumentComment> getDocumentComments(@PathVariable UUID documentId) {
return commentService.getCommentsForDocument(documentId);
}
@PostMapping("/api/documents/{documentId}/comments")
@ResponseStatus(HttpStatus.CREATED)
@RequirePermission({Permission.ANNOTATE_ALL, Permission.WRITE_ALL})
public DocumentComment postDocumentComment(
@PathVariable UUID documentId,
@RequestBody CreateCommentDTO dto,
Authentication authentication) {
AppUser author = resolveUser(authentication);
return commentService.postComment(documentId, null, dto.getContent(), dto.getMentionedUserIds(), author);
}
@PostMapping("/api/documents/{documentId}/comments/{commentId}/replies")
@ResponseStatus(HttpStatus.CREATED)
@RequirePermission({Permission.ANNOTATE_ALL, Permission.WRITE_ALL})
public DocumentComment replyToDocumentComment(
@PathVariable UUID documentId,
@PathVariable UUID commentId,
@RequestBody CreateCommentDTO dto,
Authentication authentication) {
AppUser author = resolveUser(authentication);
return commentService.replyToComment(documentId, commentId, dto.getContent(), dto.getMentionedUserIds(), author);
}
// ─── Annotation comments ──────────────────────────────────────────────────
@GetMapping("/api/documents/{documentId}/annotations/{annotationId}/comments")
public List<DocumentComment> getAnnotationComments(@PathVariable UUID annotationId) {
return commentService.getCommentsForAnnotation(annotationId);
}
@PostMapping("/api/documents/{documentId}/annotations/{annotationId}/comments")
@ResponseStatus(HttpStatus.CREATED)
@RequirePermission({Permission.ANNOTATE_ALL, Permission.WRITE_ALL})
public DocumentComment postAnnotationComment(
@PathVariable UUID documentId,
@PathVariable UUID annotationId,
@RequestBody CreateCommentDTO dto,
Authentication authentication) {
AppUser author = resolveUser(authentication);
return commentService.postComment(documentId, annotationId, dto.getContent(), dto.getMentionedUserIds(), author);
}
@PostMapping("/api/documents/{documentId}/annotations/{annotationId}/comments/{commentId}/replies")
@ResponseStatus(HttpStatus.CREATED)
@RequirePermission({Permission.ANNOTATE_ALL, Permission.WRITE_ALL})
public DocumentComment replyToAnnotationComment(
@PathVariable UUID documentId,
@PathVariable UUID commentId,
@RequestBody CreateCommentDTO dto,
Authentication authentication) {
AppUser author = resolveUser(authentication);
return commentService.replyToComment(documentId, commentId, dto.getContent(), dto.getMentionedUserIds(), author);
}
// ─── Block (transcription) comments ──────────────────────────────────────── // ─── Block (transcription) comments ────────────────────────────────────────
@GetMapping("/api/documents/{documentId}/transcription-blocks/{blockId}/comments") @GetMapping("/api/documents/{documentId}/transcription-blocks/{blockId}/comments")
@@ -83,7 +144,7 @@ public class CommentController {
private AppUser resolveUser(Authentication authentication) { private AppUser resolveUser(Authentication authentication) {
if (authentication == null || !authentication.isAuthenticated()) return null; if (authentication == null || !authentication.isAuthenticated()) return null;
try { try {
return userService.findByEmail(authentication.getName()); return userService.findByUsername(authentication.getName());
} catch (Exception e) { } catch (Exception e) {
log.warn("Could not resolve user for comment: {}", e.getMessage()); log.warn("Could not resolve user for comment: {}", e.getMessage());
return null; return null;

View File

@@ -13,15 +13,8 @@ import java.util.UUID;
import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponse;
import jakarta.validation.constraints.Max;
import jakarta.validation.constraints.Min;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.validation.annotation.Validated;
import org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO;
import org.raddatz.familienarchiv.dto.DocumentSearchResult; import org.raddatz.familienarchiv.dto.DocumentSearchResult;
import org.raddatz.familienarchiv.dto.DocumentUpdateDTO; import org.raddatz.familienarchiv.dto.DocumentUpdateDTO;
import org.raddatz.familienarchiv.dto.TagOperator;
import org.raddatz.familienarchiv.dto.DocumentVersionSummary; import org.raddatz.familienarchiv.dto.DocumentVersionSummary;
import org.raddatz.familienarchiv.dto.IncompleteDocumentDTO; import org.raddatz.familienarchiv.dto.IncompleteDocumentDTO;
import org.raddatz.familienarchiv.exception.DomainException; import org.raddatz.familienarchiv.exception.DomainException;
@@ -31,16 +24,12 @@ import org.raddatz.familienarchiv.dto.DocumentSort;
import org.raddatz.familienarchiv.model.DocumentStatus; import org.raddatz.familienarchiv.model.DocumentStatus;
import org.raddatz.familienarchiv.model.TrainingLabel; import org.raddatz.familienarchiv.model.TrainingLabel;
import org.raddatz.familienarchiv.model.DocumentVersion; import org.raddatz.familienarchiv.model.DocumentVersion;
import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.security.Permission; import org.raddatz.familienarchiv.security.Permission;
import org.raddatz.familienarchiv.security.RequirePermission; import org.raddatz.familienarchiv.security.RequirePermission;
import org.raddatz.familienarchiv.security.SecurityUtils;
import org.raddatz.familienarchiv.service.DocumentService; import org.raddatz.familienarchiv.service.DocumentService;
import org.raddatz.familienarchiv.service.DocumentVersionService; import org.raddatz.familienarchiv.service.DocumentVersionService;
import org.raddatz.familienarchiv.service.FileService; import org.raddatz.familienarchiv.service.FileService;
import org.raddatz.familienarchiv.service.UserService;
import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort;
import org.springframework.security.core.Authentication;
import org.springframework.http.HttpHeaders; import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType; import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
@@ -68,13 +57,11 @@ import lombok.extern.slf4j.Slf4j;
@RequestMapping("/api/documents") @RequestMapping("/api/documents")
@RequiredArgsConstructor @RequiredArgsConstructor
@Slf4j @Slf4j
@Validated
public class DocumentController { public class DocumentController {
private final DocumentService documentService; private final DocumentService documentService;
private final DocumentVersionService documentVersionService; private final DocumentVersionService documentVersionService;
private final FileService fileService; private final FileService fileService;
private final UserService userService;
// --- DOWNLOAD --- // --- DOWNLOAD ---
@GetMapping("/{id}/file") @GetMapping("/{id}/file")
@@ -101,31 +88,6 @@ public class DocumentController {
} }
} }
// --- THUMBNAIL ---
@GetMapping("/{id}/thumbnail")
public ResponseEntity<InputStreamResource> getDocumentThumbnail(@PathVariable UUID id) {
Document doc = documentService.getDocumentById(id);
if (doc.getThumbnailKey() == null) {
throw DomainException.notFound(ErrorCode.FILE_NOT_FOUND, "No thumbnail for document: " + id);
}
try {
FileService.S3FileDownload download = fileService.downloadFile(doc.getThumbnailKey());
return ResponseEntity.ok()
.contentType(MediaType.IMAGE_JPEG)
// `private` (not `public`) prevents shared caches from serving one user's
// thumbnail to another (CWE-525). `immutable` is safe because the URL
// carries a ?v=<thumbnailGeneratedAt> cache-buster that changes whenever
// the underlying file is replaced.
.header(HttpHeaders.CACHE_CONTROL, "private, max-age=31536000, immutable")
.body(download.resource());
} catch (FileService.StorageFileNotFoundException e) {
throw DomainException.notFound(ErrorCode.FILE_NOT_FOUND,
"Thumbnail missing in storage: " + doc.getThumbnailKey());
}
}
// --- METADATA --- // --- METADATA ---
@GetMapping("/{id}") @GetMapping("/{id}")
public Document getDocument(@PathVariable UUID id) { public Document getDocument(@PathVariable UUID id) {
@@ -149,10 +111,9 @@ public class DocumentController {
public Document updateDocument( public Document updateDocument(
@PathVariable UUID id, @PathVariable UUID id,
@ModelAttribute DocumentUpdateDTO dto, @ModelAttribute DocumentUpdateDTO dto,
@RequestPart(value = "file", required = false) MultipartFile file, @RequestPart(value = "file", required = false) MultipartFile file) {
Authentication authentication) {
try { try {
return documentService.updateDocument(id, dto, file, requireUserId(authentication)); return documentService.updateDocument(id, dto, file);
} catch (IOException e) { } catch (IOException e) {
throw DomainException.internal(ErrorCode.FILE_UPLOAD_FAILED, "Failed to upload file: " + e.getMessage()); throw DomainException.internal(ErrorCode.FILE_UPLOAD_FAILED, "Failed to upload file: " + e.getMessage());
} }
@@ -167,35 +128,18 @@ public class DocumentController {
return ResponseEntity.noContent().build(); return ResponseEntity.noContent().build();
} }
// --- ATTACH FILE --- // --- QUICK UPLOAD ---
private static final Set<String> ALLOWED_CONTENT_TYPES = Set.of( private static final Set<String> ALLOWED_CONTENT_TYPES = Set.of(
"application/pdf", "image/jpeg", "image/png", "image/tiff"); "application/pdf", "image/jpeg", "image/png", "image/tiff");
@PostMapping(value = "/{id}/file", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
@RequirePermission(Permission.WRITE_ALL)
public Document attachFile(
@PathVariable UUID id,
@RequestPart("file") MultipartFile file,
Authentication authentication) {
String contentType = file.getContentType();
if (contentType == null || !ALLOWED_CONTENT_TYPES.contains(contentType)) {
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "Unsupported file type: " + contentType);
}
return documentService.attachFile(id, file, requireUserId(authentication));
}
// --- QUICK UPLOAD ---
public record UploadError(String filename, String code) {} public record UploadError(String filename, String code) {}
public record QuickUploadResult(List<Document> created, List<Document> updated, List<UploadError> errors) {} public record QuickUploadResult(List<Document> created, List<Document> updated, List<UploadError> errors) {}
@PostMapping(value = "/quick-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) @PostMapping(value = "/quick-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
@RequirePermission(Permission.WRITE_ALL) @RequirePermission(Permission.WRITE_ALL)
public QuickUploadResult quickUpload( public QuickUploadResult quickUpload(
@RequestPart(value = "files", required = false) List<MultipartFile> files, @RequestPart(value = "files", required = false) List<MultipartFile> files) {
@RequestPart(value = "metadata", required = false) DocumentBatchMetadataDTO metadata,
Authentication authentication) {
List<Document> created = new ArrayList<>(); List<Document> created = new ArrayList<>();
List<Document> updated = new ArrayList<>(); List<Document> updated = new ArrayList<>();
List<UploadError> errors = new ArrayList<>(); List<UploadError> errors = new ArrayList<>();
@@ -204,21 +148,13 @@ public class DocumentController {
return new QuickUploadResult(created, updated, errors); return new QuickUploadResult(created, updated, errors);
} }
documentService.validateBatch(files.size(), metadata); for (MultipartFile file : files) {
UUID actorId = requireUserId(authentication);
long totalBytes = files.stream().mapToLong(MultipartFile::getSize).sum();
for (int i = 0; i < files.size(); i++) {
MultipartFile file = files.get(i);
if (!ALLOWED_CONTENT_TYPES.contains(file.getContentType())) { if (!ALLOWED_CONTENT_TYPES.contains(file.getContentType())) {
errors.add(new UploadError(file.getOriginalFilename(), "UNSUPPORTED_FILE_TYPE")); errors.add(new UploadError(file.getOriginalFilename(), "UNSUPPORTED_FILE_TYPE"));
continue; continue;
} }
try { try {
DocumentService.StoreResult result = metadata != null DocumentService.StoreResult result = documentService.storeDocument(file);
? documentService.storeDocumentWithBatchMetadata(file, metadata, i, actorId)
: documentService.storeDocument(file, actorId);
if (result.isNew()) { if (result.isNew()) {
created.add(result.document()); created.add(result.document());
} else { } else {
@@ -230,35 +166,33 @@ public class DocumentController {
} }
} }
log.info("quickUpload actor={} files={} totalBytes={} withMetadata={} created={} updated={} errors={}",
actorId, files.size(), totalBytes, metadata != null,
created.size(), updated.size(), errors.size());
return new QuickUploadResult(created, updated, errors); return new QuickUploadResult(created, updated, errors);
} }
@GetMapping("/incomplete-count") @GetMapping("/incomplete-count")
@RequirePermission(Permission.WRITE_ALL)
public Map<String, Long> getIncompleteCount() { public Map<String, Long> getIncompleteCount() {
return Map.of("count", documentService.getIncompleteCount()); return Map.of("count", documentService.getIncompleteCount());
} }
@GetMapping("/incomplete") @GetMapping("/incomplete")
@RequirePermission(Permission.WRITE_ALL)
public List<IncompleteDocumentDTO> getIncomplete( public List<IncompleteDocumentDTO> getIncomplete(
@Parameter(description = "Maximum number of results (server caps at 200)") @Parameter(description = "Maximum number of results") @RequestParam(defaultValue = "10") int size) {
@RequestParam(defaultValue = "50") int size) { return documentService.findIncompleteDocuments(size);
return documentService.findIncompleteDocuments(Math.min(size, 200));
} }
@GetMapping("/incomplete/next") @GetMapping("/incomplete/next")
@RequirePermission(Permission.WRITE_ALL)
public ResponseEntity<Document> getNextIncomplete(@RequestParam UUID excludeId) { public ResponseEntity<Document> getNextIncomplete(@RequestParam UUID excludeId) {
return documentService.findNextIncompleteDocument(excludeId) return documentService.findNextIncompleteDocument(excludeId)
.map(ResponseEntity::ok) .map(ResponseEntity::ok)
.orElse(ResponseEntity.noContent().build()); .orElse(ResponseEntity.noContent().build());
} }
@GetMapping("/recent-activity")
public ResponseEntity<List<Document>> getRecentActivity(
@RequestParam(defaultValue = "5") int size) {
return ResponseEntity.ok(documentService.getRecentActivity(size));
}
@GetMapping("/search") @GetMapping("/search")
public ResponseEntity<DocumentSearchResult> search( public ResponseEntity<DocumentSearchResult> search(
@RequestParam(required = false) String q, @RequestParam(required = false) String q,
@@ -270,21 +204,12 @@ public class DocumentController {
@RequestParam(required = false) String tagQ, @RequestParam(required = false) String tagQ,
@Parameter(description = "Filter by document status") @RequestParam(required = false) DocumentStatus status, @Parameter(description = "Filter by document status") @RequestParam(required = false) DocumentStatus status,
@Parameter(description = "Sort field") @RequestParam(required = false) DocumentSort sort, @Parameter(description = "Sort field") @RequestParam(required = false) DocumentSort sort,
@Parameter(description = "Sort direction: ASC or DESC") @RequestParam(required = false, defaultValue = "DESC") String dir, @Parameter(description = "Sort direction: ASC or DESC") @RequestParam(required = false, defaultValue = "DESC") String dir) {
@Parameter(description = "Tag operator: AND (default) or OR") @RequestParam(required = false) String tagOp,
// @Max on page guards against overflow when pageable.getOffset() is computed
// as page * size — Integer.MAX_VALUE * 50 would wrap to a negative long, which
// Hibernate cheerfully turns into an invalid SQL OFFSET.
@Parameter(description = "Page number (0-indexed)") @RequestParam(defaultValue = "0") @Min(0) @Max(100_000) int page,
@Parameter(description = "Page size (max 100)") @RequestParam(defaultValue = "50") @Min(1) @Max(100) int size) {
if (!"ASC".equalsIgnoreCase(dir) && !"DESC".equalsIgnoreCase(dir)) { if (!"ASC".equalsIgnoreCase(dir) && !"DESC".equalsIgnoreCase(dir)) {
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "dir must be ASC or DESC"); throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "dir must be ASC or DESC");
} }
// tagOp is a raw String at the HTTP boundary; any value other than "OR" (case-insensitive) List<Document> results = documentService.searchDocuments(q, from, to, senderId, receiverId, tags, tagQ, status, sort, dir);
// defaults to AND, which matches the frontend default and keeps old clients working. return ResponseEntity.ok(DocumentSearchResult.of(results));
TagOperator operator = "OR".equalsIgnoreCase(tagOp) ? TagOperator.OR : TagOperator.AND;
Pageable pageable = PageRequest.of(page, size);
return ResponseEntity.ok(documentService.searchDocuments(q, from, to, senderId, receiverId, tags, tagQ, status, sort, dir, operator, pageable));
} }
// --- TRAINING LABELS --- // --- TRAINING LABELS ---
@@ -333,8 +258,4 @@ public class DocumentController {
Sort sort = Sort.by(Sort.Direction.fromString(dir.toUpperCase()), "documentDate"); Sort sort = Sort.by(Sort.Direction.fromString(dir.toUpperCase()), "documentDate");
return documentService.getConversationFiltered(senderId, receiverId, from, to, sort); return documentService.getConversationFiltered(senderId, receiverId, from, to, sort);
} }
private UUID requireUserId(Authentication authentication) {
return SecurityUtils.requireUserId(authentication, userService);
}
} }

View File

@@ -1,57 +0,0 @@
package org.raddatz.familienarchiv.controller;
import lombok.RequiredArgsConstructor;
import org.raddatz.familienarchiv.dto.CreateInviteRequest;
import org.raddatz.familienarchiv.dto.InviteListItemDTO;
import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.security.Permission;
import org.raddatz.familienarchiv.security.RequirePermission;
import org.raddatz.familienarchiv.service.InviteService;
import org.raddatz.familienarchiv.service.UserService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.core.annotation.AuthenticationPrincipal;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.UUID;
@RestController
@RequestMapping("/api/invites")
@RequiredArgsConstructor
public class InviteController {
private final InviteService inviteService;
private final UserService userService;
@Value("${app.base-url:http://localhost:3000}")
private String appBaseUrl;
@GetMapping
@RequirePermission(Permission.ADMIN_USER)
public List<InviteListItemDTO> listInvites(
@RequestParam(value = "status", defaultValue = "active") String status) {
boolean activeOnly = !"all".equalsIgnoreCase(status);
return inviteService.listInvites(activeOnly, appBaseUrl);
}
@PostMapping
@RequirePermission(Permission.ADMIN_USER)
public ResponseEntity<InviteListItemDTO> createInvite(
@RequestBody CreateInviteRequest request,
@AuthenticationPrincipal UserDetails principal) {
AppUser creator = userService.findByEmail(principal.getUsername());
InviteListItemDTO created = inviteService.toListItemDTO(
inviteService.createInvite(request, creator), appBaseUrl);
return ResponseEntity.status(HttpStatus.CREATED).body(created);
}
@DeleteMapping("/{id}")
@RequirePermission(Permission.ADMIN_USER)
public ResponseEntity<Void> revokeInvite(@PathVariable UUID id) {
inviteService.revokeInvite(id);
return ResponseEntity.noContent().build();
}
}

View File

@@ -100,6 +100,6 @@ public class NotificationController {
// ─── private helpers ────────────────────────────────────────────────────── // ─── private helpers ──────────────────────────────────────────────────────
private AppUser resolveUser(Authentication authentication) { private AppUser resolveUser(Authentication authentication) {
return userService.findByEmail(authentication.getName()); return userService.findByUsername(authentication.getName());
} }
} }

View File

@@ -4,10 +4,7 @@ import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.dto.BatchOcrDTO; import org.raddatz.familienarchiv.dto.BatchOcrDTO;
import org.raddatz.familienarchiv.dto.OcrStatusDTO; import org.raddatz.familienarchiv.dto.OcrStatusDTO;
import org.raddatz.familienarchiv.dto.TrainingHistoryResponse;
import org.raddatz.familienarchiv.dto.TrainingInfoResponse;
import org.raddatz.familienarchiv.dto.TriggerOcrDTO; import org.raddatz.familienarchiv.dto.TriggerOcrDTO;
import org.raddatz.familienarchiv.dto.TriggerSenderTrainingDTO;
import org.raddatz.familienarchiv.model.AppUser; import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.model.OcrJob; import org.raddatz.familienarchiv.model.OcrJob;
import org.raddatz.familienarchiv.model.OcrTrainingRun; import org.raddatz.familienarchiv.model.OcrTrainingRun;
@@ -18,7 +15,6 @@ import org.raddatz.familienarchiv.service.OcrProgressService;
import org.raddatz.familienarchiv.service.OcrService; import org.raddatz.familienarchiv.service.OcrService;
import org.raddatz.familienarchiv.service.OcrTrainingService; import org.raddatz.familienarchiv.service.OcrTrainingService;
import org.raddatz.familienarchiv.service.SegmentationTrainingExportService; import org.raddatz.familienarchiv.service.SegmentationTrainingExportService;
import org.raddatz.familienarchiv.service.SenderModelService;
import org.raddatz.familienarchiv.service.TrainingDataExportService; import org.raddatz.familienarchiv.service.TrainingDataExportService;
import org.raddatz.familienarchiv.service.UserService; import org.raddatz.familienarchiv.service.UserService;
import org.springframework.http.HttpHeaders; import org.springframework.http.HttpHeaders;
@@ -46,7 +42,6 @@ public class OcrController {
private final TrainingDataExportService trainingDataExportService; private final TrainingDataExportService trainingDataExportService;
private final SegmentationTrainingExportService segmentationTrainingExportService; private final SegmentationTrainingExportService segmentationTrainingExportService;
private final OcrTrainingService ocrTrainingService; private final OcrTrainingService ocrTrainingService;
private final SenderModelService senderModelService;
@PostMapping("/api/documents/{documentId}/ocr") @PostMapping("/api/documents/{documentId}/ocr")
@ResponseStatus(HttpStatus.ACCEPTED) @ResponseStatus(HttpStatus.ACCEPTED)
@@ -135,33 +130,14 @@ public class OcrController {
@GetMapping("/api/ocr/training-info") @GetMapping("/api/ocr/training-info")
@RequirePermission(Permission.ADMIN) @RequirePermission(Permission.ADMIN)
public TrainingInfoResponse getTrainingInfo() { public OcrTrainingService.TrainingInfoResponse getTrainingInfo() {
return ocrTrainingService.getTrainingInfo(); return ocrTrainingService.getTrainingInfo();
} }
@GetMapping("/api/ocr/training-info/global")
@RequirePermission(Permission.ADMIN)
public TrainingHistoryResponse getGlobalTrainingHistory() {
return ocrTrainingService.getGlobalTrainingHistory();
}
@GetMapping("/api/ocr/training-info/{personId}")
@RequirePermission(Permission.ADMIN)
public TrainingHistoryResponse getSenderTrainingHistory(@PathVariable UUID personId) {
return ocrTrainingService.getSenderTrainingHistory(personId);
}
@PostMapping("/api/ocr/train-sender")
@ResponseStatus(HttpStatus.ACCEPTED)
@RequirePermission(Permission.ADMIN)
public OcrTrainingRun triggerSenderTraining(@Valid @RequestBody TriggerSenderTrainingDTO dto) {
return senderModelService.triggerManualSenderTraining(dto.personId());
}
private UUID resolveUserId(Authentication authentication) { private UUID resolveUserId(Authentication authentication) {
if (authentication == null || !authentication.isAuthenticated()) return null; if (authentication == null || !authentication.isAuthenticated()) return null;
try { try {
AppUser user = userService.findByEmail(authentication.getName()); AppUser user = userService.findByUsername(authentication.getName());
return user != null ? user.getId() : null; return user != null ? user.getId() : null;
} catch (Exception e) { } catch (Exception e) {
log.warn("Failed to resolve user ID for authentication: {}", authentication.getName(), e); log.warn("Failed to resolve user ID for authentication: {}", authentication.getName(), e);

View File

@@ -1,29 +1,23 @@
package org.raddatz.familienarchiv.controller; package org.raddatz.familienarchiv.controller;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.UUID; import java.util.UUID;
import org.raddatz.familienarchiv.dto.MergeTagDTO;
import org.raddatz.familienarchiv.dto.TagTreeNodeDTO;
import org.raddatz.familienarchiv.dto.TagUpdateDTO;
import org.raddatz.familienarchiv.model.Tag; import org.raddatz.familienarchiv.model.Tag;
import org.raddatz.familienarchiv.security.Permission; import org.raddatz.familienarchiv.security.Permission;
import org.raddatz.familienarchiv.security.RequirePermission; import org.raddatz.familienarchiv.security.RequirePermission;
import org.raddatz.familienarchiv.service.DocumentService; import org.raddatz.familienarchiv.service.DocumentService;
import org.raddatz.familienarchiv.service.TagService; import org.raddatz.familienarchiv.service.TagService;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController; import org.springframework.web.bind.annotation.RestController;
import jakarta.validation.Valid;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
@@ -37,8 +31,8 @@ public class TagController {
@PutMapping("/{id}") @PutMapping("/{id}")
@RequirePermission(Permission.ADMIN_TAG) @RequirePermission(Permission.ADMIN_TAG)
public ResponseEntity<Tag> updateTag(@PathVariable UUID id, @RequestBody TagUpdateDTO dto) { public ResponseEntity<Tag> updateTag(@PathVariable UUID id, @RequestBody Map<String, String> payload) {
return ResponseEntity.ok(tagService.update(id, dto)); return ResponseEntity.ok(tagService.update(id, payload.get("name")));
} }
@DeleteMapping("/{id}") @DeleteMapping("/{id}")
@@ -52,22 +46,4 @@ public class TagController {
public List<Tag> searchTags(@RequestParam(defaultValue = "") String query) { public List<Tag> searchTags(@RequestParam(defaultValue = "") String query) {
return tagService.search(query); return tagService.search(query);
} }
@GetMapping("/tree")
public List<TagTreeNodeDTO> getTagTree() {
return tagService.getTagTree();
}
@PostMapping("/{id}/merge")
@RequirePermission(Permission.ADMIN_TAG)
public ResponseEntity<Tag> mergeTag(@PathVariable UUID id, @Valid @RequestBody MergeTagDTO dto) {
return ResponseEntity.ok(tagService.mergeTags(id, dto.targetId()));
}
@DeleteMapping("/{id}/subtree")
@ResponseStatus(HttpStatus.NO_CONTENT)
@RequirePermission(Permission.ADMIN_TAG)
public void deleteSubtree(@PathVariable UUID id) {
tagService.deleteWithDescendants(id);
}
} }

View File

@@ -5,11 +5,12 @@ import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.dto.CreateTranscriptionBlockDTO; import org.raddatz.familienarchiv.dto.CreateTranscriptionBlockDTO;
import org.raddatz.familienarchiv.dto.ReorderTranscriptionBlocksDTO; import org.raddatz.familienarchiv.dto.ReorderTranscriptionBlocksDTO;
import org.raddatz.familienarchiv.dto.UpdateTranscriptionBlockDTO; import org.raddatz.familienarchiv.dto.UpdateTranscriptionBlockDTO;
import org.raddatz.familienarchiv.exception.DomainException;
import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.model.TranscriptionBlock; import org.raddatz.familienarchiv.model.TranscriptionBlock;
import org.raddatz.familienarchiv.model.TranscriptionBlockVersion; import org.raddatz.familienarchiv.model.TranscriptionBlockVersion;
import org.raddatz.familienarchiv.security.Permission; import org.raddatz.familienarchiv.security.Permission;
import org.raddatz.familienarchiv.security.RequirePermission; import org.raddatz.familienarchiv.security.RequirePermission;
import org.raddatz.familienarchiv.security.SecurityUtils;
import org.raddatz.familienarchiv.service.TranscriptionService; import org.raddatz.familienarchiv.service.TranscriptionService;
import org.raddatz.familienarchiv.service.UserService; import org.raddatz.familienarchiv.service.UserService;
import org.springframework.http.HttpStatus; import org.springframework.http.HttpStatus;
@@ -84,10 +85,8 @@ public class TranscriptionBlockController {
@RequirePermission(Permission.WRITE_ALL) @RequirePermission(Permission.WRITE_ALL)
public TranscriptionBlock reviewBlock( public TranscriptionBlock reviewBlock(
@PathVariable UUID documentId, @PathVariable UUID documentId,
@PathVariable UUID blockId, @PathVariable UUID blockId) {
Authentication authentication) { return transcriptionService.reviewBlock(documentId, blockId);
UUID userId = requireUserId(authentication);
return transcriptionService.reviewBlock(documentId, blockId, userId);
} }
@GetMapping("/{blockId}/history") @GetMapping("/{blockId}/history")
@@ -99,6 +98,13 @@ public class TranscriptionBlockController {
} }
private UUID requireUserId(Authentication authentication) { private UUID requireUserId(Authentication authentication) {
return SecurityUtils.requireUserId(authentication, userService); if (authentication == null || !authentication.isAuthenticated()) {
throw DomainException.unauthorized("Authentication required");
}
AppUser user = userService.findByUsername(authentication.getName());
if (user == null) {
throw DomainException.unauthorized("User not found");
}
return user.getId();
} }
} }

View File

@@ -1,47 +0,0 @@
package org.raddatz.familienarchiv.controller;
import lombok.RequiredArgsConstructor;
import org.raddatz.familienarchiv.dto.TranscriptionQueueItemDTO;
import org.raddatz.familienarchiv.dto.TranscriptionWeeklyStatsDTO;
import org.raddatz.familienarchiv.security.Permission;
import org.raddatz.familienarchiv.security.RequirePermission;
import org.raddatz.familienarchiv.service.TranscriptionQueueService;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* Serves the three Mission Control Strip columns for the dashboard.
* All endpoints require READ_ALL — same guard as the rest of the archive.
*/
@RestController
@RequestMapping("/api/transcription")
@RequiredArgsConstructor
@RequirePermission(Permission.READ_ALL)
public class TranscriptionQueueController {
private final TranscriptionQueueService transcriptionQueueService;
@GetMapping("/segmentation-queue")
public ResponseEntity<List<TranscriptionQueueItemDTO>> getSegmentationQueue() {
return ResponseEntity.ok(transcriptionQueueService.getSegmentationQueue());
}
@GetMapping("/transcription-queue")
public ResponseEntity<List<TranscriptionQueueItemDTO>> getTranscriptionQueue() {
return ResponseEntity.ok(transcriptionQueueService.getTranscriptionQueue());
}
@GetMapping("/ready-to-read")
public ResponseEntity<List<TranscriptionQueueItemDTO>> getReadyToRead() {
return ResponseEntity.ok(transcriptionQueueService.getReadyToReadQueue());
}
@GetMapping("/weekly-stats")
public ResponseEntity<TranscriptionWeeklyStatsDTO> getWeeklyStats() {
return ResponseEntity.ok(transcriptionQueueService.getWeeklyStats());
}
}

View File

@@ -4,7 +4,6 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID; import java.util.UUID;
import jakarta.validation.Valid;
import org.raddatz.familienarchiv.dto.AdminUpdateUserRequest; import org.raddatz.familienarchiv.dto.AdminUpdateUserRequest;
import org.raddatz.familienarchiv.dto.ChangePasswordDTO; import org.raddatz.familienarchiv.dto.ChangePasswordDTO;
import org.raddatz.familienarchiv.dto.CreateUserRequest; import org.raddatz.familienarchiv.dto.CreateUserRequest;
@@ -39,7 +38,7 @@ public class UserController {
if (authentication == null || !authentication.isAuthenticated()) { if (authentication == null || !authentication.isAuthenticated()) {
return ResponseEntity.status(HttpStatus.UNAUTHORIZED).build(); return ResponseEntity.status(HttpStatus.UNAUTHORIZED).build();
} }
AppUser user = userService.findByEmail(authentication.getName()); AppUser user = userService.findByUsername(authentication.getName());
user.setPassword(null); user.setPassword(null);
return ResponseEntity.ok(user); return ResponseEntity.ok(user);
} }
@@ -47,7 +46,7 @@ public class UserController {
@PutMapping("users/me") @PutMapping("users/me")
public ResponseEntity<AppUser> updateProfile(Authentication authentication, public ResponseEntity<AppUser> updateProfile(Authentication authentication,
@RequestBody UpdateProfileDTO dto) { @RequestBody UpdateProfileDTO dto) {
AppUser current = userService.findByEmail(authentication.getName()); AppUser current = userService.findByUsername(authentication.getName());
AppUser updated = userService.updateProfile(current.getId(), dto); AppUser updated = userService.updateProfile(current.getId(), dto);
updated.setPassword(null); updated.setPassword(null);
return ResponseEntity.ok(updated); return ResponseEntity.ok(updated);
@@ -57,7 +56,7 @@ public class UserController {
@ResponseStatus(HttpStatus.NO_CONTENT) @ResponseStatus(HttpStatus.NO_CONTENT)
public void changePassword(Authentication authentication, public void changePassword(Authentication authentication,
@RequestBody ChangePasswordDTO dto) { @RequestBody ChangePasswordDTO dto) {
AppUser current = userService.findByEmail(authentication.getName()); AppUser current = userService.findByUsername(authentication.getName());
userService.changePassword(current.getId(), dto); userService.changePassword(current.getId(), dto);
} }
@@ -78,7 +77,7 @@ public class UserController {
@PostMapping("/users") @PostMapping("/users")
@RequirePermission(Permission.ADMIN_USER) @RequirePermission(Permission.ADMIN_USER)
public ResponseEntity<AppUser> createUser(@Valid @RequestBody CreateUserRequest request) { public ResponseEntity<AppUser> createUser(@RequestBody CreateUserRequest request) {
return ResponseEntity.ok(userService.createUserOrUpdate(request)); return ResponseEntity.ok(userService.createUserOrUpdate(request));
} }

View File

@@ -1,33 +0,0 @@
package org.raddatz.familienarchiv.dashboard;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.annotation.Nullable;
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
import org.raddatz.familienarchiv.audit.AuditKind;
import java.time.OffsetDateTime;
import java.util.UUID;
public record ActivityFeedItemDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) AuditKind kind,
@Nullable ActivityActorDTO actor,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID documentId,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String documentTitle,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) OffsetDateTime happenedAt,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) boolean youMentioned,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) boolean youParticipated,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int count,
@Nullable OffsetDateTime happenedAtUntil,
@Nullable
@Schema(
requiredMode = Schema.RequiredMode.NOT_REQUIRED,
description = "Deep-link target comment; populated only for COMMENT_ADDED and MENTION_CREATED kinds."
)
UUID commentId,
@Nullable
@Schema(
requiredMode = Schema.RequiredMode.NOT_REQUIRED,
description = "Annotation associated with the comment; populated only for COMMENT_ADDED and MENTION_CREATED kinds."
)
UUID annotationId
) {}

View File

@@ -1,51 +0,0 @@
package org.raddatz.familienarchiv.dashboard;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.ArraySchema;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.RequiredArgsConstructor;
import org.raddatz.familienarchiv.audit.AuditKind;
import org.raddatz.familienarchiv.security.Permission;
import org.raddatz.familienarchiv.security.RequirePermission;
import org.raddatz.familienarchiv.security.SecurityUtils;
import org.raddatz.familienarchiv.service.UserService;
import org.springframework.security.core.Authentication;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Set;
import java.util.UUID;
@RestController
@RequestMapping("/api/dashboard")
@RequirePermission(Permission.READ_ALL)
@RequiredArgsConstructor
public class DashboardController {
private final DashboardService dashboardService;
private final UserService userService;
@GetMapping("/resume")
public DashboardResumeDTO getResume(Authentication authentication) {
UUID userId = SecurityUtils.requireUserId(authentication, userService);
return dashboardService.getResume(userId);
}
@GetMapping("/pulse")
public DashboardPulseDTO getPulse(Authentication authentication) {
UUID userId = SecurityUtils.requireUserId(authentication, userService);
return dashboardService.getPulse(userId);
}
@GetMapping("/activity")
public List<ActivityFeedItemDTO> getActivity(
Authentication authentication,
@RequestParam(defaultValue = "7") int limit,
@Parameter(description = "Filter by audit kinds; omit for all rollup-eligible kinds",
array = @ArraySchema(schema = @Schema(implementation = AuditKind.class)))
@RequestParam(required = false) Set<AuditKind> kinds) {
UUID userId = SecurityUtils.requireUserId(authentication, userService);
Set<AuditKind> effectiveKinds = (kinds == null || kinds.isEmpty()) ? AuditKind.ROLLUP_ELIGIBLE : kinds;
return dashboardService.getActivity(userId, Math.min(limit, 40), effectiveKinds);
}
}

View File

@@ -1,15 +0,0 @@
package org.raddatz.familienarchiv.dashboard;
import io.swagger.v3.oas.annotations.media.Schema;
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
import java.util.List;
public record DashboardPulseDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int pages,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int annotated,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int transcribed,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int uploaded,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int yourPages,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) List<ActivityActorDTO> contributors
) {}

View File

@@ -1,19 +0,0 @@
package org.raddatz.familienarchiv.dashboard;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.annotation.Nullable;
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
import java.util.List;
import java.util.UUID;
public record DashboardResumeDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID documentId,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String title,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String caption,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String excerpt,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int totalBlocks,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int pct,
@Nullable String thumbnailUrl,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) List<ActivityActorDTO> collaborators
) {}

View File

@@ -1,203 +0,0 @@
package org.raddatz.familienarchiv.dashboard;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
import org.raddatz.familienarchiv.audit.ActivityFeedRow;
import org.raddatz.familienarchiv.audit.AuditKind;
import org.raddatz.familienarchiv.audit.AuditLogQueryService;
import org.raddatz.familienarchiv.audit.PulseStatsRow;
import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.model.Document;
import org.raddatz.familienarchiv.model.Person;
import org.raddatz.familienarchiv.model.TranscriptionBlock;
import org.raddatz.familienarchiv.service.CommentService;
import org.raddatz.familienarchiv.service.DocumentService;
import org.raddatz.familienarchiv.service.TranscriptionService;
import org.raddatz.familienarchiv.service.UserService;
import org.springframework.stereotype.Service;
import java.time.DayOfWeek;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.temporal.TemporalAdjusters;
import java.util.*;
import java.util.stream.Stream;
import java.util.stream.Collectors;
@Service
@RequiredArgsConstructor
@Slf4j
public class DashboardService {
private final AuditLogQueryService auditLogQueryService;
private final DocumentService documentService;
private final TranscriptionService transcriptionService;
private final UserService userService;
private final CommentService commentService;
public DashboardResumeDTO getResume(UUID userId) {
Optional<UUID> docIdOpt = auditLogQueryService.findMostRecentDocumentForUser(userId);
if (docIdOpt.isEmpty()) return null;
UUID docId = docIdOpt.get();
Document doc;
try {
doc = documentService.getDocumentById(docId);
} catch (Exception e) {
log.warn("Resume: document {} not found for user {}", docId, userId);
return null;
}
List<TranscriptionBlock> blocks = transcriptionService.listBlocks(docId);
String excerpt = blocks.stream()
.filter(b -> b.getText() != null && !b.getText().isBlank())
.min(Comparator.comparingInt(TranscriptionBlock::getSortOrder))
.map(b -> b.getText().length() > 200 ? b.getText().substring(0, 200) + "" : b.getText())
.orElse("");
int totalBlocks = blocks.size();
long reviewedBlocks = blocks.stream().filter(TranscriptionBlock::isReviewed).count();
int pct = totalBlocks > 0 ? (int) (reviewedBlocks * 100L / totalBlocks) : 0;
String caption = buildCaption(doc);
List<UUID> collaboratorIds = blocks.stream()
.map(TranscriptionBlock::getUpdatedBy)
.filter(Objects::nonNull)
.distinct()
.limit(5)
.toList();
List<ActivityActorDTO> collaborators = collaboratorIds.stream()
.map(uid -> {
try {
AppUser u = userService.getById(uid);
return toActorDTO(u);
} catch (Exception e) {
return null;
}
})
.filter(Objects::nonNull)
.toList();
return new DashboardResumeDTO(docId, doc.getTitle(), caption, excerpt,
totalBlocks, pct, doc.getThumbnailUrl(), collaborators);
}
public DashboardPulseDTO getPulse(UUID userId) {
OffsetDateTime weekStart = OffsetDateTime.now(ZoneOffset.UTC)
.with(TemporalAdjusters.previousOrSame(DayOfWeek.MONDAY))
.withHour(0).withMinute(0).withSecond(0).withNano(0);
PulseStatsRow stats = auditLogQueryService.getPulseStats(weekStart, userId);
List<ActivityFeedRow> feed = auditLogQueryService.findActivityFeed(userId, 50);
List<ActivityActorDTO> contributors = feed.stream()
.filter(r -> r.getActorId() != null)
.map(r -> new ActivityActorDTO(r.getActorInitials(), r.getActorColor(), r.getActorName()))
.filter(a -> !a.initials().isBlank())
.distinct()
.limit(6)
.toList();
return new DashboardPulseDTO(
(int) stats.getPages(),
(int) stats.getAnnotated(),
(int) stats.getTranscribed(),
(int) stats.getUploaded(),
(int) stats.getYourPages(),
contributors
);
}
public List<ActivityFeedItemDTO> getActivity(UUID currentUserId, int limit, Set<AuditKind> kinds) {
List<ActivityFeedRow> rows = auditLogQueryService.findActivityFeed(currentUserId, limit, kinds);
List<UUID> docIds = rows.stream()
.map(ActivityFeedRow::getDocumentId)
.filter(Objects::nonNull)
.distinct()
.toList();
Map<UUID, String> titleCache = new HashMap<>();
try {
documentService.getDocumentsByIds(docIds)
.forEach(d -> titleCache.put(d.getId(), d.getTitle()));
} catch (Exception e) {
log.warn("Activity: failed to bulk-load document titles", e);
}
List<UUID> commentIds = rows.stream()
.map(ActivityFeedRow::getCommentId)
.filter(Objects::nonNull)
.distinct()
.toList();
Map<UUID, UUID> annotationByComment = commentIds.isEmpty()
? Map.of()
: commentService.findAnnotationIdsByIds(commentIds);
return rows.stream().map(row -> {
ActivityActorDTO actor = row.getActorId() != null
? new ActivityActorDTO(row.getActorInitials(), row.getActorColor(), row.getActorName())
: null;
String docTitle = titleCache.getOrDefault(row.getDocumentId(), "");
OffsetDateTime happenedAtUntil = row.getHappenedAtUntil() != null
? row.getHappenedAtUntil().atOffset(ZoneOffset.UTC)
: null;
UUID commentId = row.getCommentId();
UUID annotationId = commentId != null ? annotationByComment.get(commentId) : null;
return new ActivityFeedItemDTO(
org.raddatz.familienarchiv.audit.AuditKind.valueOf(row.getKind()),
actor,
row.getDocumentId(),
docTitle,
row.getHappenedAt().atOffset(ZoneOffset.UTC),
row.isYouMentioned(),
row.isYouParticipated(),
row.getCount(),
happenedAtUntil,
commentId,
annotationId
);
}).toList();
}
private String buildCaption(Document doc) {
StringBuilder sb = new StringBuilder();
if (doc.getSender() != null) sb.append(personName(doc.getSender()));
if (!doc.getReceivers().isEmpty()) {
String receivers = doc.getReceivers().stream()
.map(this::personName).collect(Collectors.joining(", "));
if (!sb.isEmpty()) sb.append(" an ");
sb.append(receivers);
}
if (doc.getDocumentDate() != null) {
if (!sb.isEmpty()) sb.append(" · ");
sb.append(doc.getDocumentDate());
}
return sb.toString();
}
private String personName(Person p) {
if (p == null) return "";
if (p.getFirstName() != null && p.getLastName() != null) return p.getFirstName() + " " + p.getLastName();
if (p.getFirstName() != null) return p.getFirstName();
if (p.getLastName() != null) return p.getLastName();
return "";
}
private ActivityActorDTO toActorDTO(AppUser u) {
String initials = "";
if (u.getFirstName() != null && !u.getFirstName().isBlank())
initials += u.getFirstName().charAt(0);
if (u.getLastName() != null && !u.getLastName().isBlank())
initials += u.getLastName().charAt(0);
if (initials.isBlank() && u.getEmail() != null)
initials = u.getEmail().substring(0, 1).toUpperCase();
String fullName = Stream.of(u.getFirstName(), u.getLastName())
.filter(Objects::nonNull)
.collect(Collectors.joining(" "));
return new ActivityActorDTO(initials.toUpperCase(), u.getColor(), fullName);
}
}

View File

@@ -1,18 +0,0 @@
package org.raddatz.familienarchiv.dto;
import lombok.Data;
import java.time.LocalDateTime;
import java.util.List;
import java.util.UUID;
@Data
public class CreateInviteRequest {
private String label;
private Integer maxUses;
private String prefillFirstName;
private String prefillLastName;
private String prefillEmail;
private List<UUID> groupIds;
private LocalDateTime expiresAt;
}

View File

@@ -1,8 +1,6 @@
package org.raddatz.familienarchiv.dto; package org.raddatz.familienarchiv.dto;
import jakarta.validation.constraints.Email;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.Pattern;
import lombok.Data; import lombok.Data;
import java.time.LocalDate; import java.time.LocalDate;
@@ -11,9 +9,7 @@ import java.util.UUID;
@Data @Data
public class CreateUserRequest { public class CreateUserRequest {
@NotBlank private String username;
@Email
@Pattern(regexp = "^[^:]+$", message = "Email must not contain a colon")
private String email; private String email;
private String initialPassword; private String initialPassword;
private List<UUID> groupIds; private List<UUID> groupIds;

View File

@@ -1,18 +0,0 @@
package org.raddatz.familienarchiv.dto;
import lombok.Data;
import java.time.LocalDate;
import java.util.List;
import java.util.UUID;
@Data
public class DocumentBatchMetadataDTO {
private List<String> titles;
private UUID senderId;
private List<UUID> receiverIds;
private LocalDate documentDate;
private String location;
private List<String> tagNames;
private Boolean metadataComplete;
}

View File

@@ -1,18 +0,0 @@
package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
import org.raddatz.familienarchiv.model.Document;
import java.util.List;
public record DocumentSearchItem(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
Document document,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
SearchMatchData matchData,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
int completionPercentage,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
List<ActivityActorDTO> contributors
) {}

View File

@@ -1,38 +1,16 @@
package org.raddatz.familienarchiv.dto; package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema; import org.raddatz.familienarchiv.model.Document;
import org.springframework.data.domain.Pageable;
import java.util.List; import java.util.List;
public record DocumentSearchResult( public record DocumentSearchResult(List<Document> documents, long total) {
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
List<DocumentSearchItem> items,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
long totalElements,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
int pageNumber,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
int pageSize,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
int totalPages
) {
/** /**
* Single-page convenience factory used by empty-result shortcuts and by tests that * Creates a result where total equals the list size.
* don't care about paging. Treats the whole list as page 0 of itself. * No pagination yet — the full matched set is always returned.
* When pagination is added, total must come from a DB COUNT query, not list.size().
*/ */
public static DocumentSearchResult of(List<DocumentSearchItem> items) { public static DocumentSearchResult of(List<Document> documents) {
int size = items.size(); return new DocumentSearchResult(documents, documents.size());
return new DocumentSearchResult(items, size, 0, size, size == 0 ? 0 : 1);
}
/**
* Paged factory used by the service when it has a real Pageable + full match count
* (e.g. from Spring's Page<T> or from an in-memory sort-then-slice).
*/
public static DocumentSearchResult paged(List<DocumentSearchItem> slice, Pageable pageable, long totalElements) {
int pageSize = pageable.getPageSize();
int totalPages = pageSize == 0 ? 0 : (int) ((totalElements + pageSize - 1) / pageSize);
return new DocumentSearchResult(slice, totalElements, pageable.getPageNumber(), pageSize, totalPages);
} }
} }

View File

@@ -2,11 +2,9 @@ package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import java.time.LocalDateTime;
import java.util.UUID; import java.util.UUID;
public record IncompleteDocumentDTO( public record IncompleteDocumentDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID id, @Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID id,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String title, @Schema(requiredMode = Schema.RequiredMode.REQUIRED) String title
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) LocalDateTime uploadedAt
) {} ) {}

View File

@@ -1,35 +0,0 @@
package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.time.LocalDateTime;
import java.util.UUID;
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class InviteListItemDTO {
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private UUID id;
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String code;
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String displayCode;
private String label;
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private int useCount;
private Integer maxUses;
private LocalDateTime expiresAt;
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private boolean revoked;
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String status;
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private LocalDateTime createdAt;
private String shareableUrl;
}

View File

@@ -1,18 +0,0 @@
package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class InvitePrefillDTO {
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String firstName;
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String lastName;
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String email;
}

View File

@@ -1,14 +0,0 @@
package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema;
/**
* Character-level offset of a highlighted term within a text field.
* Offsets are Java {@code String} character positions (UTF-16 code units),
* which are identical to JavaScript string positions — consistent end-to-end
* for all German BMP characters (ä, ö, ü, ß, etc.).
*/
public record MatchOffset(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int start,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int length
) {}

View File

@@ -1,6 +0,0 @@
package org.raddatz.familienarchiv.dto;
import jakarta.validation.constraints.NotNull;
import java.util.UUID;
public record MergeTagDTO(@NotNull UUID targetId) {}

View File

@@ -1,19 +0,0 @@
package org.raddatz.familienarchiv.dto;
import jakarta.validation.constraints.Email;
import jakarta.validation.constraints.NotBlank;
import lombok.Data;
@Data
public class RegisterRequest {
@NotBlank
private String code;
@NotBlank
@Email
private String email;
@NotBlank
private String password;
private String firstName;
private String lastName;
private boolean notifyOnMention = true;
}

View File

@@ -1,67 +0,0 @@
package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.List;
import java.util.UUID;
/**
* Match signals for a single document in a full-text search result.
* All fields are non-null except {@code transcriptionSnippet} and {@code summarySnippet},
* which are null when the respective field did not match the query.
*/
public record SearchMatchData(
/**
* Best-ranked matching transcription line, or null if no block matched.
*/
String transcriptionSnippet,
/**
* Character offsets of highlighted terms within the document title.
* Empty when the title did not contribute to the match.
*/
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
List<MatchOffset> titleOffsets,
/**
* True when the sender's name matched the query.
*/
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
boolean senderMatched,
/**
* IDs of receiver persons whose names matched the query.
*/
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
List<UUID> matchedReceiverIds,
/**
* IDs of tags whose names matched the query.
*/
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
List<UUID> matchedTagIds,
/**
* Character offsets of highlighted terms within the transcription snippet.
* Empty when no transcription block matched or the snippet has no highlights.
*/
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
List<MatchOffset> snippetOffsets,
/**
* Highlighted summary excerpt, or null if the summary did not match the query.
*/
String summarySnippet,
/**
* Character offsets of highlighted terms within the summary snippet.
* Empty when the summary did not match or has no highlights.
*/
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
List<MatchOffset> summaryOffsets
) {
/** Canonical "no match data" value for a single document. */
public static SearchMatchData empty() {
return new SearchMatchData(null, List.of(), false, List.of(), List.of(), List.of(), null, List.of());
}
}

View File

@@ -1,9 +0,0 @@
package org.raddatz.familienarchiv.dto;
/** Determines how multiple selected tag filters are combined in a document search. */
public enum TagOperator {
/** Every tag set must match (default). */
AND,
/** At least one tag set must match. */
OR
}

View File

@@ -1,14 +0,0 @@
package org.raddatz.familienarchiv.dto;
import java.util.List;
import java.util.UUID;
import io.swagger.v3.oas.annotations.media.Schema;
public record TagTreeNodeDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID id,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String name,
String color,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int documentCount,
List<TagTreeNodeDTO> children,
@Schema(description = "Parent tag ID, null for root tags") UUID parentId) {}

View File

@@ -1,5 +0,0 @@
package org.raddatz.familienarchiv.dto;
import java.util.UUID;
public record TagUpdateDTO(String name, UUID parentId, String color) {}

View File

@@ -1,11 +0,0 @@
package org.raddatz.familienarchiv.dto;
import org.raddatz.familienarchiv.model.OcrTrainingRun;
import java.util.List;
import java.util.Map;
public record TrainingHistoryResponse(
List<OcrTrainingRun> runs,
Map<String, String> personNames
) {}

View File

@@ -1,19 +0,0 @@
package org.raddatz.familienarchiv.dto;
import org.raddatz.familienarchiv.model.OcrTrainingRun;
import org.raddatz.familienarchiv.model.SenderModel;
import java.util.List;
import java.util.Map;
public record TrainingInfoResponse(
int availableBlocks,
int totalOcrBlocks,
int availableDocuments,
int availableSegBlocks,
boolean ocrServiceAvailable,
OcrTrainingRun lastRun,
List<OcrTrainingRun> runs,
Map<String, String> personNames,
List<SenderModel> senderModels
) {}

View File

@@ -1,19 +0,0 @@
package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
import java.time.LocalDate;
import java.util.List;
import java.util.UUID;
public record TranscriptionQueueItemDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID id,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String title,
LocalDate documentDate,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int annotationCount,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int textedBlockCount,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int reviewedBlockCount,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) List<ActivityActorDTO> contributors,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) boolean hasMoreContributors
) {}

View File

@@ -1,13 +0,0 @@
package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema;
/**
* Weekly activity pulse for the Mission Control Strip column headers.
* Counts documents that received new work in each pipeline stage
* during the last 7 days.
*/
public record TranscriptionWeeklyStatsDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) long segmentationCount,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) long transcriptionCount
) {}

View File

@@ -1,12 +0,0 @@
package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotNull;
import java.util.UUID;
public record TriggerSenderTrainingDTO(
@NotNull
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
UUID personId
) {}

View File

@@ -38,20 +38,6 @@ public enum ErrorCode {
/** A mass import is already in progress; only one can run at a time. 409 */ /** A mass import is already in progress; only one can run at a time. 409 */
IMPORT_ALREADY_RUNNING, IMPORT_ALREADY_RUNNING,
// --- Thumbnails ---
/** A thumbnail backfill is already in progress; only one can run at a time. 409 */
THUMBNAIL_BACKFILL_ALREADY_RUNNING,
// --- Invites ---
/** The invite code does not exist. 404 */
INVITE_NOT_FOUND,
/** The invite has already reached its use limit. 409 */
INVITE_EXHAUSTED,
/** The invite has been revoked by an admin. 409 */
INVITE_REVOKED,
/** The invite has passed its expiry date. 410 */
INVITE_EXPIRED,
// --- Auth --- // --- Auth ---
/** The request is not authenticated. 401 */ /** The request is not authenticated. 401 */
UNAUTHORIZED, UNAUTHORIZED,
@@ -91,26 +77,10 @@ public enum ErrorCode {
OCR_PROCESSING_FAILED, OCR_PROCESSING_FAILED,
/** A training run is already in progress. 409 */ /** A training run is already in progress. 409 */
TRAINING_ALREADY_RUNNING, TRAINING_ALREADY_RUNNING,
/** Internal inconsistency: expected training run row was not found after creation. 500 */
OCR_TRAINING_CONFLICT,
// --- Tags ---
/** A tag with the given ID does not exist. 404 */
TAG_NOT_FOUND,
/** The supplied color token is not in the allowed palette. 400 */
INVALID_TAG_COLOR,
/** Setting this parent would create a cycle in the tag hierarchy. 400 */
TAG_CYCLE_DETECTED,
/** Merge source and target are the same tag. 400 */
TAG_MERGE_SELF,
/** The merge target is a descendant of the source tag. 400 */
TAG_MERGE_INVALID_TARGET,
// --- Generic --- // --- Generic ---
/** Request validation failed (missing or malformed fields). 400 */ /** Request validation failed (missing or malformed fields). 400 */
VALIDATION_ERROR, VALIDATION_ERROR,
/** Batch upload exceeds the maximum allowed file count per request. 400 */
BATCH_TOO_LARGE,
/** An unexpected server-side error occurred. 500 */ /** An unexpected server-side error occurred. 500 */
INTERNAL_ERROR, INTERNAL_ERROR,
} }

View File

@@ -1,9 +1,6 @@
package org.raddatz.familienarchiv.model; package org.raddatz.familienarchiv.model;
import jakarta.persistence.*; import jakarta.persistence.*;
import jakarta.validation.constraints.Email;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.Pattern;
import lombok.*; import lombok.*;
import org.hibernate.annotations.CreationTimestamp; import org.hibernate.annotations.CreationTimestamp;
@@ -19,12 +16,8 @@ import java.util.HashSet;
import java.util.Set; import java.util.Set;
import java.util.UUID; import java.util.UUID;
import jakarta.persistence.PostLoad;
import jakarta.persistence.PrePersist;
import jakarta.persistence.PreUpdate;
@Entity @Entity
@Table(name = "users") @Table(name = "users") // Tabellenname in Postgres
@Data @Data
@NoArgsConstructor @NoArgsConstructor
@AllArgsConstructor @AllArgsConstructor
@@ -37,26 +30,26 @@ public class AppUser {
private UUID id; private UUID id;
@Column(unique = true, nullable = false) @Column(unique = true, nullable = false)
@NotBlank
@Email
@Pattern(regexp = "^[^:]+$", message = "Email must not contain a colon")
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) @Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String email; private String username;
@Column(nullable = false) @Column(nullable = false)
@JsonProperty(access = JsonProperty.Access.WRITE_ONLY) @JsonProperty(access = JsonProperty.Access.WRITE_ONLY)
private String password; private String password; // Wird verschlüsselt gespeichert (BCrypt)
private String firstName; private String firstName;
private String lastName; private String lastName;
private LocalDate birthDate; private LocalDate birthDate;
@Column(unique = true)
private String email;
@Column(columnDefinition = "TEXT") @Column(columnDefinition = "TEXT")
private String contact; private String contact;
@Builder.Default @Builder.Default
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) @Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private boolean enabled = true; private boolean enabled = true; // Um User zu sperren ohne sie zu löschen
@Column(nullable = false) @Column(nullable = false)
@Builder.Default @Builder.Default
@@ -68,6 +61,7 @@ public class AppUser {
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) @Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private boolean notifyOnMention = false; private boolean notifyOnMention = false;
// Ein User kann in mehreren Gruppen sein
@ManyToMany(fetch = FetchType.EAGER) @ManyToMany(fetch = FetchType.EAGER)
@JoinTable(name = "users_groups", joinColumns = @JoinColumn(name = "user_id"), inverseJoinColumns = @JoinColumn(name = "group_id")) @JoinTable(name = "users_groups", joinColumns = @JoinColumn(name = "user_id"), inverseJoinColumns = @JoinColumn(name = "group_id"))
@Builder.Default @Builder.Default
@@ -78,48 +72,31 @@ public class AppUser {
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) @Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private LocalDateTime createdAt; private LocalDateTime createdAt;
@Column(nullable = false)
@Builder.Default
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String color = "";
private static final String[] PALETTE = {
"#7a4f9a", "#5a8a6a", "#3060b0", "#a0522d", "#c0446e", "#c17a00", "#0e7490", "#1d4ed8"
};
public static String computeColor(UUID id) {
return PALETTE[Math.abs(id.hashCode()) % PALETTE.length];
}
@PrePersist
@PreUpdate
@PostLoad
void deriveColor() {
if (id != null && (color == null || color.isEmpty())) {
this.color = computeColor(id);
}
}
public boolean hasPermission(String permission) { public boolean hasPermission(String permission) {
if (groups == null || groups.isEmpty()) { if (groups == null || groups.isEmpty()) {
return false; return false;
} }
return this.groups.stream().anyMatch(group -> group.getPermissions().contains(permission)); return this.groups.stream().anyMatch(group -> group.getPermissions().contains(permission));
} }
public AppUser updateFromRequest(CreateUserRequest request, PasswordEncoder passwordEncoder, Set<UserGroup> groups) { public AppUser updateFromRequest(CreateUserRequest request, PasswordEncoder passwordEncoder, Set<UserGroup> groups) {
if (request.getEmail() != null && !request.getEmail().isBlank()) { if (request.getUsername() != null && !request.getUsername().isBlank()) {
this.email = request.getEmail(); this.username = request.getUsername();
}
if (request.getInitialPassword() != null && !request.getInitialPassword().isBlank()) {
this.password = passwordEncoder.encode(request.getInitialPassword());
}
if (groups != null && !groups.isEmpty()) {
this.groups = groups;
}
return this;
} }
if (request.getEmail() != null && !request.getEmail().isBlank()) {
this.email = request.getEmail();
}
if (request.getInitialPassword() != null && !request.getInitialPassword().isBlank()) {
this.password = passwordEncoder.encode(request.getInitialPassword());
}
if (groups != null && !groups.isEmpty()) {
this.groups = groups;
}
return this;
}
} }

View File

@@ -6,11 +6,8 @@ import org.hibernate.annotations.CreationTimestamp;
import org.hibernate.annotations.UpdateTimestamp; import org.hibernate.annotations.UpdateTimestamp;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.time.LocalDate; import java.time.LocalDate;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.util.HashSet; import java.util.HashSet;
@@ -46,20 +43,6 @@ public class Document {
@Column(name = "file_hash", length = 64) @Column(name = "file_hash", length = 64)
private String fileHash; private String fileHash;
// S3 key of the generated thumbnail (e.g. "thumbnails/{docId}.jpg"); null until generated
@Column(name = "thumbnail_key")
private String thumbnailKey;
@Column(name = "thumbnail_generated_at")
private LocalDateTime thumbnailGeneratedAt;
@Enumerated(EnumType.STRING)
@Column(name = "thumbnail_aspect", length = 16)
private ThumbnailAspect thumbnailAspect;
@Column(name = "page_count")
private Integer pageCount;
// Originaler Dateiname beim Upload (z.B. "Brief_Oma_1940.pdf") // Originaler Dateiname beim Upload (z.B. "Brief_Oma_1940.pdf")
@Column(name = "original_filename", nullable = false) @Column(name = "original_filename", nullable = false)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) @Schema(requiredMode = Schema.RequiredMode.REQUIRED)
@@ -134,19 +117,4 @@ public class Document {
@Enumerated(EnumType.STRING) @Enumerated(EnumType.STRING)
@Builder.Default @Builder.Default
private Set<TrainingLabel> trainingLabels = new HashSet<>(); private Set<TrainingLabel> trainingLabels = new HashSet<>();
// The `?v={thumbnailGeneratedAt}` cache-buster is load-bearing: the thumbnail
// endpoint sends `Cache-Control: private, max-age=31536000, immutable`
// (DocumentController.getDocumentThumbnail). `immutable` is only safe because
// this URL changes whenever the underlying file does. Dropping the query param
// would let browsers serve a stale thumbnail for a year after the file is
// replaced, and shared caches could leak one user's thumbnail to another
// (CWE-525).
@JsonProperty("thumbnailUrl")
public String getThumbnailUrl() {
if (thumbnailKey == null) return null;
String base = "/api/documents/" + id + "/thumbnail";
if (thumbnailGeneratedAt == null) return base;
return base + "?v=" + URLEncoder.encode(thumbnailGeneratedAt.toString(), StandardCharsets.UTF_8);
}
} }

View File

@@ -1,76 +0,0 @@
package org.raddatz.familienarchiv.model;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.persistence.*;
import lombok.*;
import org.hibernate.annotations.CreationTimestamp;
import java.time.LocalDateTime;
import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
@Entity
@Table(name = "invite_tokens")
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class InviteToken {
@Id
@GeneratedValue(strategy = GenerationType.UUID)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private UUID id;
@Column(nullable = false, unique = true, length = 10)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String code;
private String label;
private Integer maxUses;
@Column(nullable = false)
@Builder.Default
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private int useCount = 0;
private String prefillFirstName;
private String prefillLastName;
private String prefillEmail;
@ElementCollection(fetch = FetchType.EAGER)
@CollectionTable(name = "invite_token_group_ids", joinColumns = @JoinColumn(name = "invite_token_id"))
@Column(name = "group_id")
@Builder.Default
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private Set<UUID> groupIds = new HashSet<>();
private LocalDateTime expiresAt;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "created_by", nullable = false)
private AppUser createdBy;
@CreationTimestamp
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private LocalDateTime createdAt;
@Column(nullable = false)
@Builder.Default
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private boolean revoked = false;
public boolean isExhausted() {
return maxUses != null && useCount >= maxUses;
}
public boolean isExpired() {
return expiresAt != null && expiresAt.isBefore(LocalDateTime.now());
}
public boolean isActive() {
return !revoked && !isExhausted() && !isExpired();
}
}

View File

@@ -59,9 +59,6 @@ public class OcrTrainingRun {
@Column(name = "triggered_by") @Column(name = "triggered_by")
private UUID triggeredBy; private UUID triggeredBy;
@Column(name = "person_id")
private UUID personId;
@CreationTimestamp @CreationTimestamp
@Column(name = "created_at", nullable = false, updatable = false) @Column(name = "created_at", nullable = false, updatable = false)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) @Schema(requiredMode = Schema.RequiredMode.REQUIRED)

View File

@@ -1,56 +0,0 @@
package org.raddatz.familienarchiv.model;
import com.fasterxml.jackson.annotation.JsonIgnore;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.persistence.*;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.hibernate.annotations.CreationTimestamp;
import org.hibernate.annotations.UpdateTimestamp;
import java.time.Instant;
import java.util.UUID;
@Entity
@Table(name = "sender_models")
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class SenderModel {
@Id
@GeneratedValue(strategy = GenerationType.UUID)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private UUID id;
@Column(name = "person_id", nullable = false, unique = true)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private UUID personId;
@JsonIgnore
@Column(name = "model_path", nullable = false)
private String modelPath;
@Column
private Double accuracy;
@Column
private Double cer;
@Column(name = "corrected_lines_at_training", nullable = false)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private int correctedLinesAtTraining;
@CreationTimestamp
@Column(name = "created_at", nullable = false, updatable = false)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private Instant createdAt;
@UpdateTimestamp
@Column(name = "updated_at", nullable = false)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private Instant updatedAt;
}

View File

@@ -20,11 +20,4 @@ public class Tag {
@Column(unique = true, nullable = false) @Column(unique = true, nullable = false)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) @Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String name; private String name;
/** UUID of the parent tag, or null for root-level tags. */
@Column(name = "parent_id")
private UUID parentId;
/** Color token name (e.g. "sage"), only set on root-level tags. Null means no color. */
private String color;
} }

View File

@@ -1,6 +0,0 @@
package org.raddatz.familienarchiv.model;
public enum ThumbnailAspect {
PORTRAIT,
LANDSCAPE
}

View File

@@ -1,7 +1,6 @@
package org.raddatz.familienarchiv.model; package org.raddatz.familienarchiv.model;
public enum TrainingStatus { public enum TrainingStatus {
QUEUED,
RUNNING, RUNNING,
DONE, DONE,
FAILED FAILED

View File

@@ -13,10 +13,11 @@ import java.util.UUID;
@Repository @Repository
public interface AppUserRepository extends JpaRepository<AppUser, UUID> { public interface AppUserRepository extends JpaRepository<AppUser, UUID> {
Optional<AppUser> findByUsername(String username);
Optional<AppUser> findByEmail(String email); Optional<AppUser> findByEmail(String email);
@Query("SELECT u FROM AppUser u WHERE " + @Query("SELECT u FROM AppUser u WHERE " +
"LOWER(u.email) LIKE LOWER(CONCAT('%', :q, '%')) " + "LOWER(COALESCE(u.firstName, '') || ' ' || COALESCE(u.lastName, '')) LIKE LOWER(CONCAT('%', :q, '%')) " +
"OR LOWER(COALESCE(u.firstName, '') || ' ' || COALESCE(u.lastName, '')) LIKE LOWER(CONCAT('%', :q, '%'))") "OR LOWER(u.username) LIKE LOWER(CONCAT('%', :q, '%'))")
List<AppUser> searchByEmailOrName(@Param("q") String q, Pageable pageable); List<AppUser> searchByNameOrUsername(@Param("q") String q, Pageable pageable);
} }

View File

@@ -8,6 +8,10 @@ import java.util.UUID;
public interface CommentRepository extends JpaRepository<DocumentComment, UUID> { public interface CommentRepository extends JpaRepository<DocumentComment, UUID> {
List<DocumentComment> findByDocumentIdAndAnnotationIdIsNullAndParentIdIsNull(UUID documentId);
List<DocumentComment> findByAnnotationIdAndParentIdIsNull(UUID annotationId);
List<DocumentComment> findByParentId(UUID parentId); List<DocumentComment> findByParentId(UUID parentId);
List<DocumentComment> findByBlockIdAndParentIdIsNull(UUID blockId); List<DocumentComment> findByBlockIdAndParentIdIsNull(UUID blockId);

View File

@@ -1,8 +0,0 @@
package org.raddatz.familienarchiv.repository;
import java.util.UUID;
public interface CompletionStatsRow {
UUID getDocumentId();
int getCompletionPercentage();
}

View File

@@ -46,8 +46,6 @@ public interface DocumentRepository extends JpaRepository<Document, UUID>, JpaSp
List<Document> findByFileHashIsNullAndFilePathIsNotNull(); List<Document> findByFileHashIsNullAndFilePathIsNotNull();
List<Document> findByFilePathIsNotNullAndThumbnailKeyIsNull();
@Query("SELECT d.id, d.title FROM Document d WHERE d.id IN :ids") @Query("SELECT d.id, d.title FROM Document d WHERE d.id IN :ids")
List<Object[]> findIdAndTitleByIdIn(@Param("ids") Collection<UUID> ids); List<Object[]> findIdAndTitleByIdIn(@Param("ids") Collection<UUID> ids);
@@ -85,157 +83,10 @@ public interface DocumentRepository extends JpaRepository<Document, UUID>, JpaSp
@Query(nativeQuery = true, value = """ @Query(nativeQuery = true, value = """
SELECT d.id FROM documents d SELECT d.id FROM documents d
CROSS JOIN LATERAL ( WHERE d.search_vector @@ websearch_to_tsquery('german', :query)
SELECT CASE WHEN websearch_to_tsquery('german', :query)::text <> '' ORDER BY ts_rank(d.search_vector, websearch_to_tsquery('german', :query)) DESC,
THEN to_tsquery('german', regexp_replace(
websearch_to_tsquery('german', :query)::text,
'''([^'']+)''',
'''\\1'':*',
'g'))
END AS pq
) q
WHERE d.search_vector @@ q.pq
ORDER BY ts_rank(d.search_vector, q.pq) DESC,
d.meta_date DESC NULLS LAST d.meta_date DESC NULLS LAST
""") """)
List<UUID> findRankedIdsByFts(@Param("query") String query); List<UUID> findRankedIdsByFts(@Param("query") String query);
/**
* Returns match-enrichment data for a set of documents identified by their IDs.
* Each row contains (in column order):
* <ol>
* <li>UUID — document id</li>
* <li>String — title headline with \x01/\x02 delimiters around matched terms</li>
* <li>String — best-ranked transcription snippet with \x01/\x02 delimiters, or null</li>
* <li>Boolean — whether the sender's name matched the query</li>
* <li>String — comma-separated matched receiver UUIDs, or null</li>
* <li>String — comma-separated matched tag UUIDs, or null</li>
* <li>String — summary snippet with \x01/\x02 delimiters, or null if summary didn't match</li>
* </ol>
* Short-circuit before calling this method when {@code ids} is empty or {@code query} is blank.
*/
@Query(nativeQuery = true, value = """
SELECT
d.id,
ts_headline('german', d.title, q.pq,
'StartSel=' || chr(1) || ',StopSel=' || chr(2) || ',HighlightAll=true')
AS title_headline,
CASE WHEN best_block.text IS NOT NULL THEN
ts_headline('german', best_block.text, q.pq,
'StartSel=' || chr(1) || ',StopSel=' || chr(2) || ',MaxWords=50,MinWords=20')
END AS transcription_snippet,
(s.id IS NOT NULL AND
to_tsvector('german', COALESCE(s.first_name, '') || ' ' || COALESCE(s.last_name, ''))
@@ q.pq)
AS sender_matched,
(SELECT string_agg(r.id::text, ',')
FROM document_receivers dr
JOIN persons r ON r.id = dr.person_id
WHERE dr.document_id = d.id
AND to_tsvector('german', COALESCE(r.first_name, '') || ' ' || r.last_name)
@@ q.pq
) AS matched_receiver_ids,
(SELECT string_agg(t.id::text, ',')
FROM document_tags dt
JOIN tag t ON t.id = dt.tag_id
WHERE dt.document_id = d.id
AND to_tsvector('german', t.name) @@ q.pq
) AS matched_tag_ids,
CASE WHEN d.summary IS NOT NULL AND d.summary <> ''
AND to_tsvector('german', d.summary) @@ q.pq
THEN ts_headline('german', d.summary, q.pq,
'StartSel=' || chr(1) || ',StopSel=' || chr(2) || ',MaxWords=50,MinWords=20')
END AS summary_snippet
FROM documents d
CROSS JOIN LATERAL (
SELECT CASE WHEN websearch_to_tsquery('german', :query)::text <> ''
THEN to_tsquery('german', regexp_replace(
websearch_to_tsquery('german', :query)::text,
'''([^'']+)''',
'''\\1'':*',
'g'))
END AS pq
) q
LEFT JOIN persons s ON s.id = d.sender_id
LEFT JOIN LATERAL (
SELECT tb.text
FROM transcription_blocks tb
WHERE tb.document_id = d.id
AND to_tsvector('german', tb.text) @@ q.pq
ORDER BY ts_rank(to_tsvector('german', tb.text), q.pq) DESC
LIMIT 1
) best_block ON true
WHERE d.id IN :ids
""")
List<Object[]> findEnrichmentData(@Param("ids") Collection<UUID> ids, @Param("query") String query);
// --- Mission Control Strip queues ---
/** Documents with no annotations — Segmentierung column. */
@Query(nativeQuery = true, value = """
SELECT d.id, d.title, d.meta_date AS documentDate,
0 AS annotationCount, 0 AS textedBlockCount, 0 AS reviewedBlockCount
FROM documents d
WHERE d.status NOT IN ('PLACEHOLDER')
AND NOT EXISTS (SELECT 1 FROM document_annotations da WHERE da.document_id = d.id)
ORDER BY HASHTEXT(d.id::text || EXTRACT(WEEK FROM NOW())::int::text)
LIMIT :limit
""")
List<TranscriptionQueueProjection> findSegmentationQueue(@Param("limit") int limit);
/** Documents with annotations but not yet fully reviewed — Transkription column. */
@Query(nativeQuery = true, value = """
SELECT d.id, d.title, d.meta_date AS documentDate,
COUNT(DISTINCT da.id) AS annotationCount,
COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) AS textedBlockCount,
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END) AS reviewedBlockCount
FROM documents d
JOIN document_annotations da ON da.document_id = d.id
LEFT JOIN transcription_blocks tb ON tb.document_id = d.id
GROUP BY d.id, d.title, d.meta_date
HAVING COUNT(DISTINCT da.id) > 0
AND (
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END)::float /
COUNT(DISTINCT da.id)
) < 0.90
ORDER BY COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) DESC,
HASHTEXT(d.id::text || EXTRACT(WEEK FROM NOW())::int::text)
LIMIT :limit
""")
List<TranscriptionQueueProjection> findTranscriptionQueue(@Param("limit") int limit);
/** Documents with reviewed_pct >= 90 % — Lesefertig column. */
@Query(nativeQuery = true, value = """
SELECT d.id, d.title, d.meta_date AS documentDate,
COUNT(DISTINCT da.id) AS annotationCount,
COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) AS textedBlockCount,
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END) AS reviewedBlockCount
FROM documents d
JOIN document_annotations da ON da.document_id = d.id
LEFT JOIN transcription_blocks tb ON tb.document_id = d.id
GROUP BY d.id, d.title, d.meta_date
HAVING COUNT(DISTINCT da.id) > 0
AND (
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END)::float /
COUNT(DISTINCT da.id)
) >= 0.90
ORDER BY (
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END)::float /
COUNT(DISTINCT da.id)
) DESC
LIMIT :limit
""")
List<TranscriptionQueueProjection> findReadyToReadQueue(@Param("limit") int limit);
/** Weekly pulse: distinct documents that received new work in each pipeline stage. */
@Query(nativeQuery = true, value = """
SELECT
(SELECT COUNT(DISTINCT da.document_id) FROM document_annotations da
WHERE da.created_at >= NOW() - INTERVAL '7 days') AS segmentationCount,
(SELECT COUNT(DISTINCT tb.document_id) FROM transcription_blocks tb
WHERE tb.created_at >= NOW() - INTERVAL '7 days'
AND tb.text IS NOT NULL AND tb.text <> '') AS transcriptionCount
""")
TranscriptionWeeklyStatsProjection findWeeklyStats();
} }

View File

@@ -4,7 +4,6 @@ import jakarta.persistence.criteria.*;
import java.time.LocalDate; import java.time.LocalDate;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Set;
import java.util.UUID; import java.util.UUID;
import org.raddatz.familienarchiv.model.Document; import org.raddatz.familienarchiv.model.Document;
@@ -55,64 +54,34 @@ public class DocumentSpecifications {
return (root, query, cb) -> status == null ? null : cb.equal(root.get("status"), status); return (root, query, cb) -> status == null ? null : cb.equal(root.get("status"), status);
} }
/** // Filtert nach Schlagworten (UND-Verknüpfung, exakter Match)
* Filtert nach vorausgeweiteten Tag-ID-Sets mit AND- oder OR-Logik. public static Specification<Document> hasTags(List<String> tags) {
*
* <p>AND (useOr=false): Das Dokument muss mindestens einen Tag aus <em>jedem</em> Set besitzen.
* <p>OR (useOr=true): Das Dokument muss mindestens einen Tag aus der Vereinigung aller Sets besitzen.
*
* <p>Jedes Set repräsentiert einen ausgewählten Tag inklusive aller seiner Nachkommen
* (vorausgeweitet durch {@code TagRepository.findDescendantIdsByName}).
*/
public static Specification<Document> hasTags(List<Set<UUID>> tagIdSets, boolean useOr) {
return (root, query, cb) -> { return (root, query, cb) -> {
if (tagIdSets == null || tagIdSets.isEmpty()) if (tags == null || tags.isEmpty())
return null; return null;
if (!useOr) {
// AND mode: an empty set means the tag resolved to no IDs (doesn't exist) —
// no document can satisfy the condition, so return no results immediately.
boolean hasEmptySet = tagIdSets.stream().anyMatch(s -> s == null || s.isEmpty());
if (hasEmptySet) return cb.disjunction();
}
List<Set<UUID>> nonEmpty = tagIdSets.stream()
.filter(s -> s != null && !s.isEmpty())
.toList();
if (nonEmpty.isEmpty()) return null;
if (useOr) {
Set<UUID> union = new java.util.HashSet<>();
nonEmpty.forEach(union::addAll);
return documentHasTagIn(root, query, cb, union);
}
// AND: one EXISTS subquery per set
List<Predicate> predicates = new ArrayList<>(); List<Predicate> predicates = new ArrayList<>();
for (Set<UUID> ids : nonEmpty) {
predicates.add(documentHasTagIn(root, query, cb, ids)); for (String tagName : tags) {
if (!StringUtils.hasText(tagName)) continue;
Subquery<Long> subquery = query.subquery(Long.class);
Root<Document> subRoot = subquery.from(Document.class);
Join<Document, Tag> subTags = subRoot.join("tags");
subquery.select(subRoot.get("id"))
.where(
cb.equal(subRoot.get("id"), root.get("id")),
cb.equal(cb.lower(subTags.get("name")), tagName.trim().toLowerCase())
);
predicates.add(cb.exists(subquery));
} }
return cb.and(predicates.toArray(new Predicate[0])); return cb.and(predicates.toArray(new Predicate[0]));
}; };
} }
private static Predicate documentHasTagIn(
Root<Document> root,
jakarta.persistence.criteria.CriteriaQuery<?> query,
jakarta.persistence.criteria.CriteriaBuilder cb,
Set<UUID> tagIds) {
Subquery<UUID> subquery = query.subquery(UUID.class);
Root<Document> subRoot = subquery.from(Document.class);
Join<Document, Tag> subTags = subRoot.join("tags");
subquery.select(subRoot.get("id"))
.where(
cb.equal(subRoot.get("id"), root.get("id")),
subTags.get("id").in(tagIds)
);
return cb.exists(subquery);
}
// Filtert nach partiellem Tag-Namen (ILIKE) — für Live-Tag-Suche // Filtert nach partiellem Tag-Namen (ILIKE) — für Live-Tag-Suche
public static Specification<Document> hasTagPartial(String tagQ) { public static Specification<Document> hasTagPartial(String tagQ) {
return (root, query, cb) -> { return (root, query, cb) -> {

View File

@@ -1,27 +0,0 @@
package org.raddatz.familienarchiv.repository;
import jakarta.persistence.LockModeType;
import org.raddatz.familienarchiv.model.InviteToken;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Lock;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
public interface InviteTokenRepository extends JpaRepository<InviteToken, UUID> {
Optional<InviteToken> findByCode(String code);
@Lock(LockModeType.PESSIMISTIC_WRITE)
@Query("SELECT t FROM InviteToken t WHERE t.code = :code")
Optional<InviteToken> findByCodeForUpdate(@Param("code") String code);
@Query("SELECT t FROM InviteToken t WHERE t.revoked = false AND (t.expiresAt IS NULL OR t.expiresAt > CURRENT_TIMESTAMP) AND (t.maxUses IS NULL OR t.useCount < t.maxUses) ORDER BY t.createdAt DESC")
List<InviteToken> findActive();
@Query("SELECT t FROM InviteToken t ORDER BY t.createdAt DESC")
List<InviteToken> findAllOrderedByCreatedAt();
}

View File

@@ -12,15 +12,5 @@ public interface OcrTrainingRunRepository extends JpaRepository<OcrTrainingRun,
Optional<OcrTrainingRun> findFirstByStatus(TrainingStatus status); Optional<OcrTrainingRun> findFirstByStatus(TrainingStatus status);
Optional<OcrTrainingRun> findFirstByStatusOrderByCreatedAtAsc(TrainingStatus status); List<OcrTrainingRun> findTop10ByOrderByCreatedAtDesc();
Optional<OcrTrainingRun> findFirstByPersonIdAndStatus(UUID personId, TrainingStatus status);
boolean existsByPersonIdAndStatus(UUID personId, TrainingStatus status);
List<OcrTrainingRun> findTop20ByOrderByCreatedAtDesc();
List<OcrTrainingRun> findByPersonIdIsNullOrderByCreatedAtDesc();
List<OcrTrainingRun> findByPersonIdOrderByCreatedAtDesc(UUID personId);
} }

View File

@@ -1,12 +0,0 @@
package org.raddatz.familienarchiv.repository;
import org.raddatz.familienarchiv.model.SenderModel;
import org.springframework.data.jpa.repository.JpaRepository;
import java.util.Optional;
import java.util.UUID;
public interface SenderModelRepository extends JpaRepository<SenderModel, UUID> {
Optional<SenderModel> findByPersonId(UUID personId);
}

View File

@@ -1,126 +1,13 @@
package org.raddatz.familienarchiv.repository; package org.raddatz.familienarchiv.repository;
import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.UUID; import java.util.UUID;
import org.raddatz.familienarchiv.model.Tag; import org.raddatz.familienarchiv.model.Tag;
import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
public interface TagRepository extends JpaRepository<Tag, UUID> { public interface TagRepository extends JpaRepository<Tag, UUID> {
/** Typed projection for document-count aggregation results. */
interface TagCount {
UUID getTagId();
Long getCount();
}
Optional<Tag> findByNameIgnoreCase(String name); Optional<Tag> findByNameIgnoreCase(String name);
List<Tag> findByNameContainingIgnoreCase(String name); List<Tag> findByNameContainingIgnoreCase(String name);
}
/**
* Returns the IDs of all ancestors of the given tag (parent, grandparent, …)
* via a recursive CTE. Used for cycle detection before assigning a new parent.
* Includes a depth guard of 50 levels to prevent runaway queries.
*/
@Query(value = """
WITH RECURSIVE ancestors AS (
SELECT parent_id, 0 AS depth
FROM tag
WHERE id = :tagId AND parent_id IS NOT NULL
UNION ALL
SELECT t.parent_id, a.depth + 1
FROM tag t
JOIN ancestors a ON t.id = a.parent_id
WHERE t.parent_id IS NOT NULL AND a.depth < 50
)
SELECT parent_id FROM ancestors
""", nativeQuery = true)
List<UUID> findAncestorIds(@Param("tagId") UUID tagId);
/**
* Returns the IDs of the tag with the given name AND all of its descendants
* via a recursive CTE. Used to expand a selected tag to inclusive hierarchy results.
* Includes a depth guard of 50 levels to prevent runaway queries.
*/
@Query(value = """
WITH RECURSIVE descendants AS (
SELECT id, 0 AS depth FROM tag WHERE LOWER(name) = LOWER(:name)
UNION ALL
SELECT t.id, d.depth + 1 FROM tag t
JOIN descendants d ON t.parent_id = d.id
WHERE d.depth < 50
)
SELECT id FROM descendants
""", nativeQuery = true)
List<UUID> findDescendantIdsByName(@Param("name") String name);
/**
* Returns the IDs of the tag with the given ID AND all of its descendants
* via a recursive CTE. Used for merge validation and subtree delete.
* Includes a depth guard of 50 levels to prevent runaway queries.
*/
@Query(value = """
WITH RECURSIVE descendants AS (
SELECT id, 0 AS depth FROM tag WHERE id = :tagId
UNION ALL
SELECT t.id, d.depth + 1 FROM tag t
JOIN descendants d ON t.parent_id = d.id
WHERE d.depth < 50
)
SELECT id FROM descendants
""", nativeQuery = true)
List<UUID> findDescendantIds(@Param("tagId") UUID tagId);
/**
* Reassigns document_tags rows from source to target, skipping rows where
* the target tag is already present (to avoid PK conflicts).
*/
@Modifying(clearAutomatically = true)
@Query(value = """
UPDATE document_tags
SET tag_id = :targetId
WHERE tag_id = :sourceId
AND NOT EXISTS (
SELECT 1 FROM document_tags d2
WHERE d2.document_id = document_tags.document_id
AND d2.tag_id = :targetId
)
""", nativeQuery = true)
void reassignDocumentTags(@Param("sourceId") UUID sourceId, @Param("targetId") UUID targetId);
/**
* Removes all document_tags rows for the given tag.
*/
@Modifying(clearAutomatically = true)
@Query(value = "DELETE FROM document_tags WHERE tag_id = :tagId", nativeQuery = true)
void deleteDocumentTagsByTagId(@Param("tagId") UUID tagId);
/**
* Removes all document_tags rows for the given collection of tag IDs.
* Caller must guard against an empty collection — PostgreSQL rejects IN ().
*/
@Modifying(clearAutomatically = true)
@Query(value = "DELETE FROM document_tags WHERE tag_id IN :ids", nativeQuery = true)
void deleteDocumentTagsByTagIds(@Param("ids") Collection<UUID> ids);
/**
* Re-parents all direct children of sourceId to targetId.
*/
@Modifying(clearAutomatically = true)
@Query(value = "UPDATE tag SET parent_id = :targetId WHERE parent_id = :sourceId", nativeQuery = true)
void reparentChildren(@Param("sourceId") UUID sourceId, @Param("targetId") UUID targetId);
/**
* Returns (tagId, count) pairs for all tags that appear in document_tags.
* Used to populate documentCount in the tag tree without N+1 queries.
*/
@Query(value = "SELECT tag_id AS tagId, COUNT(*) AS count FROM document_tags GROUP BY tag_id", nativeQuery = true)
List<TagCount> findDocumentCountsPerTag();
}

View File

@@ -3,26 +3,13 @@ package org.raddatz.familienarchiv.repository;
import org.raddatz.familienarchiv.model.TranscriptionBlock; import org.raddatz.familienarchiv.model.TranscriptionBlock;
import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query; import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.UUID; import java.util.UUID;
public interface TranscriptionBlockRepository extends JpaRepository<TranscriptionBlock, UUID> { public interface TranscriptionBlockRepository extends JpaRepository<TranscriptionBlock, UUID> {
@Query(value = """
SELECT
b.document_id AS documentId,
ROUND(COUNT(*) FILTER (WHERE b.reviewed = true) * 100.0 / COUNT(*))::int AS completionPercentage
FROM transcription_blocks b
WHERE b.document_id IN :documentIds
GROUP BY b.document_id
""", nativeQuery = true)
List<CompletionStatsRow> findCompletionStatsForDocuments(
@Param("documentIds") Collection<UUID> documentIds);
List<TranscriptionBlock> findByDocumentIdOrderBySortOrderAsc(UUID documentId); List<TranscriptionBlock> findByDocumentIdOrderBySortOrderAsc(UUID documentId);
Optional<TranscriptionBlock> findByIdAndDocumentId(UUID id, UUID documentId); Optional<TranscriptionBlock> findByIdAndDocumentId(UUID id, UUID documentId);
@@ -50,22 +37,4 @@ public interface TranscriptionBlockRepository extends JpaRepository<Transcriptio
AND 'KURRENT_SEGMENTATION' MEMBER OF d.trainingLabels AND 'KURRENT_SEGMENTATION' MEMBER OF d.trainingLabels
""") """)
List<TranscriptionBlock> findSegmentationBlocks(); List<TranscriptionBlock> findSegmentationBlocks();
@Query("""
SELECT COUNT(b) FROM TranscriptionBlock b
JOIN Document d ON d.id = b.documentId
WHERE b.source = 'MANUAL'
AND d.sender.id = :personId
AND d.scriptType = 'HANDWRITING_KURRENT'
""")
long countManualKurrentBlocksByPerson(@Param("personId") UUID personId);
@Query("""
SELECT b FROM TranscriptionBlock b
JOIN Document d ON d.id = b.documentId
WHERE b.source = 'MANUAL'
AND d.sender.id = :personId
AND d.scriptType = 'HANDWRITING_KURRENT'
""")
List<TranscriptionBlock> findManualKurrentBlocksByPerson(@Param("personId") UUID personId);
} }

View File

@@ -1,17 +0,0 @@
package org.raddatz.familienarchiv.repository;
import java.time.LocalDate;
import java.util.UUID;
/**
* Spring Data projection for a single row in one of the three Mission Control Strip queues.
* Column aliases in the native SQL queries must match these getter names exactly.
*/
public interface TranscriptionQueueProjection {
UUID getId();
String getTitle();
LocalDate getDocumentDate();
int getAnnotationCount();
int getTextedBlockCount();
int getReviewedBlockCount();
}

View File

@@ -1,10 +0,0 @@
package org.raddatz.familienarchiv.repository;
/**
* Spring Data projection for the weekly activity pulse stats.
* Column aliases in the native SQL query must match these getter names exactly.
*/
public interface TranscriptionWeeklyStatsProjection {
long getSegmentationCount();
long getTranscriptionCount();
}

View File

@@ -1,24 +0,0 @@
package org.raddatz.familienarchiv.security;
import org.raddatz.familienarchiv.exception.DomainException;
import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.service.UserService;
import org.springframework.security.core.Authentication;
import java.util.UUID;
public final class SecurityUtils {
private SecurityUtils() {}
public static UUID requireUserId(Authentication authentication, UserService userService) {
if (authentication == null || !authentication.isAuthenticated()) {
throw DomainException.unauthorized("Authentication required");
}
AppUser user = userService.findByEmail(authentication.getName());
if (user == null) {
throw DomainException.unauthorized("User not found");
}
return user.getId();
}
}

View File

@@ -2,8 +2,6 @@ package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.audit.AuditKind;
import org.raddatz.familienarchiv.audit.AuditService;
import org.raddatz.familienarchiv.dto.CreateAnnotationDTO; import org.raddatz.familienarchiv.dto.CreateAnnotationDTO;
import org.raddatz.familienarchiv.dto.UpdateAnnotationDTO; import org.raddatz.familienarchiv.dto.UpdateAnnotationDTO;
import org.raddatz.familienarchiv.exception.DomainException; import org.raddatz.familienarchiv.exception.DomainException;
@@ -16,7 +14,6 @@ import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.UUID; import java.util.UUID;
@Slf4j @Slf4j
@@ -26,7 +23,6 @@ public class AnnotationService {
private final AnnotationRepository annotationRepository; private final AnnotationRepository annotationRepository;
private final TranscriptionBlockRepository blockRepository; private final TranscriptionBlockRepository blockRepository;
private final AuditService auditService;
public List<DocumentAnnotation> listAnnotations(UUID documentId) { public List<DocumentAnnotation> listAnnotations(UUID documentId) {
return annotationRepository.findByDocumentId(documentId); return annotationRepository.findByDocumentId(documentId);
@@ -46,10 +42,7 @@ public class AnnotationService {
.createdBy(userId) .createdBy(userId)
.build(); .build();
DocumentAnnotation saved = annotationRepository.save(annotation); return annotationRepository.save(annotation);
auditService.logAfterCommit(AuditKind.ANNOTATION_CREATED, userId, saved.getDocumentId(),
Map.of("pageNumber", saved.getPageNumber()));
return saved;
} }
@Transactional @Transactional

View File

@@ -1,23 +1,17 @@
package org.raddatz.familienarchiv.service; package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.raddatz.familienarchiv.audit.AuditKind;
import org.raddatz.familienarchiv.audit.AuditService;
import org.raddatz.familienarchiv.dto.MentionDTO; import org.raddatz.familienarchiv.dto.MentionDTO;
import org.raddatz.familienarchiv.exception.DomainException; import org.raddatz.familienarchiv.exception.DomainException;
import org.raddatz.familienarchiv.exception.ErrorCode; import org.raddatz.familienarchiv.exception.ErrorCode;
import org.raddatz.familienarchiv.model.AppUser; import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.model.DocumentComment; import org.raddatz.familienarchiv.model.DocumentComment;
import org.raddatz.familienarchiv.model.TranscriptionBlock;
import org.raddatz.familienarchiv.repository.CommentRepository; import org.raddatz.familienarchiv.repository.CommentRepository;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.UUID; import java.util.UUID;
@@ -28,16 +22,16 @@ public class CommentService {
private final CommentRepository commentRepository; private final CommentRepository commentRepository;
private final UserService userService; private final UserService userService;
private final NotificationService notificationService; private final NotificationService notificationService;
private final AuditService auditService;
private final TranscriptionService transcriptionService;
public Map<UUID, UUID> findAnnotationIdsByIds(Collection<UUID> commentIds) { public List<DocumentComment> getCommentsForDocument(UUID documentId) {
if (commentIds == null || commentIds.isEmpty()) return Map.of(); List<DocumentComment> roots =
Map<UUID, UUID> result = new HashMap<>(); commentRepository.findByDocumentIdAndAnnotationIdIsNullAndParentIdIsNull(documentId);
for (DocumentComment c : commentRepository.findAllById(commentIds)) { return withRepliesAndMentions(roots);
if (c.getAnnotationId() != null) result.put(c.getId(), c.getAnnotationId()); }
}
return result; public List<DocumentComment> getCommentsForAnnotation(UUID annotationId) {
List<DocumentComment> roots = commentRepository.findByAnnotationIdAndParentIdIsNull(annotationId);
return withRepliesAndMentions(roots);
} }
public List<DocumentComment> getCommentsForBlock(UUID blockId) { public List<DocumentComment> getCommentsForBlock(UUID blockId) {
@@ -48,11 +42,9 @@ public class CommentService {
@Transactional @Transactional
public DocumentComment postBlockComment(UUID documentId, UUID blockId, String content, public DocumentComment postBlockComment(UUID documentId, UUID blockId, String content,
List<UUID> mentionedUserIds, AppUser author) { List<UUID> mentionedUserIds, AppUser author) {
TranscriptionBlock block = transcriptionService.getBlock(documentId, blockId);
DocumentComment comment = DocumentComment.builder() DocumentComment comment = DocumentComment.builder()
.documentId(documentId) .documentId(documentId)
.blockId(blockId) .blockId(blockId)
.annotationId(block.getAnnotationId())
.content(content) .content(content)
.authorId(author.getId()) .authorId(author.getId())
.authorName(resolveAuthorName(author)) .authorName(resolveAuthorName(author))
@@ -61,7 +53,23 @@ public class CommentService {
DocumentComment saved = commentRepository.save(comment); DocumentComment saved = commentRepository.save(comment);
withMentionDTOs(saved); withMentionDTOs(saved);
notificationService.notifyMentions(mentionedUserIds, saved); notificationService.notifyMentions(mentionedUserIds, saved);
logCommentPosted(author, documentId, saved, mentionedUserIds); return saved;
}
@Transactional
public DocumentComment postComment(UUID documentId, UUID annotationId, String content,
List<UUID> mentionedUserIds, AppUser author) {
DocumentComment comment = DocumentComment.builder()
.documentId(documentId)
.annotationId(annotationId)
.content(content)
.authorId(author.getId())
.authorName(resolveAuthorName(author))
.build();
saveMentions(comment, mentionedUserIds);
DocumentComment saved = commentRepository.save(comment);
withMentionDTOs(saved);
notificationService.notifyMentions(mentionedUserIds, saved);
return saved; return saved;
} }
@@ -93,7 +101,6 @@ public class CommentService {
participantIds.remove(author.getId()); participantIds.remove(author.getId());
notificationService.notifyReply(saved, participantIds); notificationService.notifyReply(saved, participantIds);
notificationService.notifyMentions(mentionedUserIds, saved); notificationService.notifyMentions(mentionedUserIds, saved);
logCommentPosted(author, documentId, saved, mentionedUserIds);
return saved; return saved;
} }
@@ -164,22 +171,11 @@ public class CommentService {
ErrorCode.COMMENT_NOT_FOUND, "Comment not found: " + commentId)); ErrorCode.COMMENT_NOT_FOUND, "Comment not found: " + commentId));
} }
private void logCommentPosted(AppUser author, UUID documentId, DocumentComment saved, List<UUID> mentionedUserIds) {
UUID actorId = author != null ? author.getId() : null;
String commentId = saved.getId().toString();
auditService.logAfterCommit(AuditKind.COMMENT_ADDED, actorId, documentId, Map.of("commentId", commentId));
if (mentionedUserIds != null) {
mentionedUserIds.forEach(mentionedUserId ->
auditService.logAfterCommit(AuditKind.MENTION_CREATED, actorId, documentId,
Map.of("commentId", commentId, "mentionedUserId", mentionedUserId.toString())));
}
}
private String resolveAuthorName(AppUser author) { private String resolveAuthorName(AppUser author) {
String first = author.getFirstName(); String first = author.getFirstName();
String last = author.getLastName(); String last = author.getLastName();
if ((first == null || first.isBlank()) && (last == null || last.isBlank())) { if ((first == null || first.isBlank()) && (last == null || last.isBlank())) {
return author.getEmail(); return author.getUsername();
} }
return ((first != null ? first : "") + " " + (last != null ? last : "")).strip(); return ((first != null ? first : "") + " " + (last != null ? last : "")).strip();
} }

View File

@@ -29,22 +29,24 @@ public class CustomUserDetailsService implements UserDetailsService {
private final AppUserRepository userRepository; private final AppUserRepository userRepository;
@Override @Override
public UserDetails loadUserByUsername(String email) throws UsernameNotFoundException { public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
AppUser appUser = userRepository.findByEmail(email) AppUser appUser = userRepository.findByUsername(username)
.orElseThrow(() -> new UsernameNotFoundException("User nicht gefunden: " + email)); .orElseThrow(() -> new UsernameNotFoundException("User nicht gefunden: " + username));
// Collect all permissions from all groups; warn about any that don't match a known Permission enum value
var authorities = appUser.getGroups().stream() var authorities = appUser.getGroups().stream()
.flatMap(group -> group.getPermissions().stream()) .flatMap(group -> group.getPermissions().stream())
.peek(p -> { .peek(p -> {
if (!KNOWN_PERMISSIONS.contains(p)) { if (!KNOWN_PERMISSIONS.contains(p)) {
log.warn("Unknown permission '{}' found in database for user '{}' — it will be granted but never matched by @RequirePermission", p, appUser.getEmail()); log.warn("Unknown permission '{}' found in database for user '{}' — it will be granted but never matched by @RequirePermission", p, appUser.getUsername());
} }
}) })
.map(SimpleGrantedAuthority::new) .map(SimpleGrantedAuthority::new)
.collect(Collectors.toSet()); .collect(Collectors.toSet());
// Rückgabe des Standard Spring Security User Objekts
return new User( return new User(
appUser.getEmail(), appUser.getUsername(),
appUser.getPassword(), appUser.getPassword(),
appUser.isEnabled(), appUser.isEnabled(),
true, true, true, true, true, true,

View File

@@ -3,29 +3,17 @@ package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
import org.raddatz.familienarchiv.audit.AuditKind;
import org.raddatz.familienarchiv.audit.AuditLogQueryService;
import org.raddatz.familienarchiv.audit.AuditService;
import org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO;
import org.raddatz.familienarchiv.dto.DocumentSearchItem;
import org.raddatz.familienarchiv.dto.DocumentSearchResult;
import org.raddatz.familienarchiv.dto.DocumentSort;
import org.raddatz.familienarchiv.dto.DocumentUpdateDTO; import org.raddatz.familienarchiv.dto.DocumentUpdateDTO;
import org.raddatz.familienarchiv.dto.IncompleteDocumentDTO; import org.raddatz.familienarchiv.dto.IncompleteDocumentDTO;
import org.raddatz.familienarchiv.dto.MatchOffset;
import org.raddatz.familienarchiv.dto.SearchMatchData;
import org.raddatz.familienarchiv.dto.TagOperator;
import org.raddatz.familienarchiv.model.Document; import org.raddatz.familienarchiv.model.Document;
import org.raddatz.familienarchiv.dto.DocumentSort;
import org.raddatz.familienarchiv.model.DocumentStatus; import org.raddatz.familienarchiv.model.DocumentStatus;
import org.raddatz.familienarchiv.model.ScriptType; import org.raddatz.familienarchiv.model.ScriptType;
import org.raddatz.familienarchiv.model.TrainingLabel; import org.raddatz.familienarchiv.model.TrainingLabel;
import org.raddatz.familienarchiv.model.Person; import org.raddatz.familienarchiv.model.Person;
import org.raddatz.familienarchiv.model.Tag; import org.raddatz.familienarchiv.model.Tag;
import org.raddatz.familienarchiv.repository.DocumentRepository; import org.raddatz.familienarchiv.repository.DocumentRepository;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort;
import org.springframework.data.jpa.domain.Specification; import org.springframework.data.jpa.domain.Specification;
import org.raddatz.familienarchiv.exception.DomainException; import org.raddatz.familienarchiv.exception.DomainException;
@@ -64,10 +52,6 @@ public class DocumentService {
private final TagService tagService; private final TagService tagService;
private final DocumentVersionService documentVersionService; private final DocumentVersionService documentVersionService;
private final AnnotationService annotationService; private final AnnotationService annotationService;
private final AuditService auditService;
private final TranscriptionBlockQueryService transcriptionBlockQueryService;
private final AuditLogQueryService auditLogQueryService;
private final ThumbnailAsyncRunner thumbnailAsyncRunner;
public record StoreResult(Document document, boolean isNew) {} public record StoreResult(Document document, boolean isNew) {}
@@ -87,7 +71,7 @@ public class DocumentService {
* - Wenn NEIN: Erstellt neuen Eintrag — isNew = true. * - Wenn NEIN: Erstellt neuen Eintrag — isNew = true.
*/ */
@Transactional @Transactional
public StoreResult storeDocument(MultipartFile file, UUID actorId) throws IOException { public StoreResult storeDocument(MultipartFile file) throws IOException {
String originalFilename = file.getOriginalFilename(); String originalFilename = file.getOriginalFilename();
// 1. Check for existing record (findFirst to survive duplicate filenames in the DB) // 1. Check for existing record (findFirst to survive duplicate filenames in the DB)
@@ -120,63 +104,11 @@ public class DocumentService {
document.setFilePath(upload.s3Key()); document.setFilePath(upload.s3Key());
document.setFileHash(upload.fileHash()); document.setFileHash(upload.fileHash());
document.setContentType(file.getContentType()); document.setContentType(file.getContentType());
boolean wasPlaceholder = document.getStatus() == DocumentStatus.PLACEHOLDER; if (document.getStatus() == DocumentStatus.PLACEHOLDER) {
if (wasPlaceholder) {
document.setStatus(DocumentStatus.UPLOADED); document.setStatus(DocumentStatus.UPLOADED);
} }
Document saved = documentRepository.save(document); return new StoreResult(documentRepository.save(document), isNew);
if (wasPlaceholder) {
auditService.logAfterCommit(AuditKind.FILE_UPLOADED, actorId, saved.getId(), null);
}
thumbnailAsyncRunner.dispatchAfterCommit(saved.getId());
return new StoreResult(saved, isNew);
}
public void validateBatch(int fileCount, DocumentBatchMetadataDTO metadata) {
// 50-file hard cap keeps FormData requests at a manageable size and protects against runaway bulk uploads.
if (fileCount > 50) {
throw DomainException.badRequest(ErrorCode.BATCH_TOO_LARGE, "Batch exceeds maximum of 50 files per request");
}
if (metadata != null && metadata.getTitles() != null && metadata.getTitles().size() > fileCount) {
throw DomainException.badRequest(ErrorCode.VALIDATION_ERROR, "titles count must not exceed files count");
}
}
@Transactional
public StoreResult storeDocumentWithBatchMetadata(
MultipartFile file, DocumentBatchMetadataDTO metadata, int fileIndex, UUID actorId) throws IOException {
StoreResult base = storeDocument(file, actorId);
Document doc = applyBatchMetadata(base.document(), metadata, fileIndex);
return new StoreResult(documentRepository.save(doc), base.isNew());
}
private Document applyBatchMetadata(Document doc, DocumentBatchMetadataDTO metadata, int fileIndex) {
if (metadata.getTitles() != null && fileIndex < metadata.getTitles().size()) {
doc.setTitle(metadata.getTitles().get(fileIndex));
}
if (metadata.getSenderId() != null) {
doc.setSender(personService.getById(metadata.getSenderId()));
}
if (metadata.getReceiverIds() != null && !metadata.getReceiverIds().isEmpty()) {
doc.setReceivers(new HashSet<>(personService.getAllById(metadata.getReceiverIds())));
}
if (metadata.getDocumentDate() != null) {
doc.setDocumentDate(metadata.getDocumentDate());
}
if (metadata.getLocation() != null) {
doc.setLocation(metadata.getLocation());
}
if (metadata.getMetadataComplete() != null) {
doc.setMetadataComplete(metadata.getMetadataComplete());
}
if (metadata.getTagNames() != null && !metadata.getTagNames().isEmpty()) {
UUID docId = doc.getId();
updateDocumentTags(docId, metadata.getTagNames());
doc = documentRepository.findById(docId)
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Not found after batch metadata: " + docId));
}
return doc;
} }
@Transactional @Transactional
@@ -238,8 +170,7 @@ public class DocumentService {
} }
// Datei // Datei
boolean fileUploaded = file != null && !file.isEmpty(); if (file != null && !file.isEmpty()) {
if (fileUploaded) {
FileService.UploadResult upload = fileService.uploadFile(file, file.getOriginalFilename()); FileService.UploadResult upload = fileService.uploadFile(file, file.getOriginalFilename());
doc.setFilePath(upload.s3Key()); doc.setFilePath(upload.s3Key());
doc.setFileHash(upload.fileHash()); doc.setFileHash(upload.fileHash());
@@ -249,19 +180,14 @@ public class DocumentService {
Document finalDoc = documentRepository.save(doc); Document finalDoc = documentRepository.save(doc);
documentVersionService.recordVersion(finalDoc); documentVersionService.recordVersion(finalDoc);
if (fileUploaded) {
thumbnailAsyncRunner.dispatchAfterCommit(finalDoc.getId());
}
return finalDoc; return finalDoc;
} }
@Transactional @Transactional
public Document updateDocument(UUID id, DocumentUpdateDTO dto, MultipartFile newFile, UUID actorId) throws IOException { public Document updateDocument(UUID id, DocumentUpdateDTO dto, MultipartFile newFile) throws IOException {
Document doc = documentRepository.findById(id) Document doc = documentRepository.findById(id)
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id)); .orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id));
DocumentStatus statusBefore = doc.getStatus();
// 1. Einfache Felder Update // 1. Einfache Felder Update
doc.setTitle(dto.getTitle()); doc.setTitle(dto.getTitle());
doc.setDocumentDate(dto.getDocumentDate()); doc.setDocumentDate(dto.getDocumentDate());
@@ -304,8 +230,7 @@ public class DocumentService {
} }
// 4. Datei austauschen (nur wenn eine neue ausgewählt wurde) // 4. Datei austauschen (nur wenn eine neue ausgewählt wurde)
boolean fileReplaced = newFile != null && !newFile.isEmpty(); if (newFile != null && !newFile.isEmpty()) {
if (fileReplaced) {
FileService.UploadResult upload = fileService.uploadFile(newFile, newFile.getOriginalFilename()); FileService.UploadResult upload = fileService.uploadFile(newFile, newFile.getOriginalFilename());
doc.setFilePath(upload.s3Key()); doc.setFilePath(upload.s3Key());
doc.setFileHash(upload.fileHash()); doc.setFileHash(upload.fileHash());
@@ -316,18 +241,6 @@ public class DocumentService {
Document saved = documentRepository.save(doc); Document saved = documentRepository.save(doc);
documentVersionService.recordVersion(saved); documentVersionService.recordVersion(saved);
if (saved.getStatus() != statusBefore) {
auditService.logAfterCommit(AuditKind.STATUS_CHANGED, actorId, saved.getId(),
Map.of("oldStatus", statusBefore.name(), "newStatus", saved.getStatus().name()));
} else {
auditService.logAfterCommit(AuditKind.METADATA_UPDATED, actorId, saved.getId(), null);
}
if (fileReplaced) {
thumbnailAsyncRunner.dispatchAfterCommit(saved.getId());
}
return saved; return saved;
} }
@@ -369,33 +282,6 @@ public class DocumentService {
return documentRepository.save(doc); return documentRepository.save(doc);
} }
@Transactional
public Document attachFile(UUID id, MultipartFile file, UUID actorId) {
Document doc = documentRepository.findById(id)
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id));
FileService.UploadResult upload;
try {
upload = fileService.uploadFile(file, file.getOriginalFilename());
} catch (IOException e) {
throw DomainException.internal(ErrorCode.FILE_UPLOAD_FAILED, "Failed to upload file: " + e.getMessage());
}
doc.setFilePath(upload.s3Key());
doc.setFileHash(upload.fileHash());
doc.setOriginalFilename(file.getOriginalFilename());
doc.setContentType(file.getContentType());
boolean wasPlaceholder = doc.getStatus() == DocumentStatus.PLACEHOLDER;
if (wasPlaceholder) {
doc.setStatus(DocumentStatus.UPLOADED);
}
Document saved = documentRepository.save(doc);
documentVersionService.recordVersion(saved);
thumbnailAsyncRunner.dispatchAfterCommit(saved.getId());
if (wasPlaceholder) {
auditService.logAfterCommit(AuditKind.FILE_UPLOADED, actorId, saved.getId(), null);
}
return saved;
}
// 0. Zuletzt aktive Dokumente (sortiert nach updatedAt DESC) // 0. Zuletzt aktive Dokumente (sortiert nach updatedAt DESC)
public List<Document> getRecentActivity(int size) { public List<Document> getRecentActivity(int size) {
return documentRepository.findAll( return documentRepository.findAll(
@@ -404,39 +290,33 @@ public class DocumentService {
} }
// 1. Allgemeine Suche (für das Suchfeld im Frontend) // 1. Allgemeine Suche (für das Suchfeld im Frontend)
public DocumentSearchResult searchDocuments(String text, LocalDate from, LocalDate to, UUID sender, UUID receiver, List<String> tags, String tagQ, DocumentStatus status, DocumentSort sort, String dir, TagOperator tagOperator, Pageable pageable) { public List<Document> searchDocuments(String text, LocalDate from, LocalDate to, UUID sender, UUID receiver, List<String> tags, String tagQ, DocumentStatus status, DocumentSort sort, String dir) {
boolean hasText = StringUtils.hasText(text); boolean hasText = StringUtils.hasText(text);
List<UUID> rankedIds = null; List<UUID> rankedIds = null;
if (hasText) { if (hasText) {
rankedIds = documentRepository.findRankedIdsByFts(text); rankedIds = documentRepository.findRankedIdsByFts(text);
if (rankedIds.isEmpty()) return DocumentSearchResult.of(List.of()); if (rankedIds.isEmpty()) return List.of();
} }
boolean useOrLogic = tagOperator == TagOperator.OR;
List<Set<UUID>> expandedTagSets = tagService.expandTagNamesToDescendantIdSets(tags);
Specification<Document> textSpec = hasText ? hasIds(rankedIds) : (root, query, cb) -> null; Specification<Document> textSpec = hasText ? hasIds(rankedIds) : (root, query, cb) -> null;
Specification<Document> spec = Specification.where(textSpec) Specification<Document> spec = Specification.where(textSpec)
.and(isBetween(from, to)) .and(isBetween(from, to))
.and(hasSender(sender)) .and(hasSender(sender))
.and(hasReceiver(receiver)) .and(hasReceiver(receiver))
.and(hasTags(expandedTagSets, useOrLogic)) .and(hasTags(tags))
.and(hasTagPartial(tagQ)) .and(hasTagPartial(tagQ))
.and(hasStatus(status)); .and(hasStatus(status));
// SENDER, RECEIVER and RELEVANCE sorts load the full match set and slice in memory. // SENDER and RECEIVER are sorted in-memory because JPA's Sort.by("sender.lastName")
// JPA's Sort.by("sender.lastName") generates an INNER JOIN that silently drops // generates an INNER JOIN that silently drops documents with null sender/receivers.
// documents with null sender/receivers; RELEVANCE maps a DB order to an external
// rank list. Cost scales linearly with match count — acceptable while documents
// stays under ~10k rows. Past that, replace with SQL-level LEFT JOIN sort.
if (sort == DocumentSort.RECEIVER) { if (sort == DocumentSort.RECEIVER) {
List<Document> sorted = sortByFirstReceiver(documentRepository.findAll(spec), dir); List<Document> results = documentRepository.findAll(spec);
return buildResultPaged(pageSlice(sorted, pageable), text, pageable, sorted.size()); return sortByFirstReceiver(results, dir);
} }
if (sort == DocumentSort.SENDER) { if (sort == DocumentSort.SENDER) {
List<Document> sorted = sortBySender(documentRepository.findAll(spec), dir); List<Document> results = documentRepository.findAll(spec);
return buildResultPaged(pageSlice(sorted, pageable), text, pageable, sorted.size()); return sortBySender(results, dir);
} }
// RELEVANCE: default when text present and no explicit sort given // RELEVANCE: default when text present and no explicit sort given
@@ -445,47 +325,14 @@ public class DocumentService {
List<Document> results = documentRepository.findAll(spec); List<Document> results = documentRepository.findAll(spec);
Map<UUID, Integer> rankMap = new HashMap<>(); Map<UUID, Integer> rankMap = new HashMap<>();
for (int i = 0; i < rankedIds.size(); i++) rankMap.put(rankedIds.get(i), i); for (int i = 0; i < rankedIds.size(); i++) rankMap.put(rankedIds.get(i), i);
List<Document> sorted = results.stream() return results.stream()
.sorted(Comparator.comparingInt( .sorted(Comparator.comparingInt(
doc -> rankMap.getOrDefault(doc.getId(), Integer.MAX_VALUE))) doc -> rankMap.getOrDefault(doc.getId(), Integer.MAX_VALUE)))
.toList(); .toList();
return buildResultPaged(pageSlice(sorted, pageable), text, pageable, sorted.size());
} }
// Fast path — push sort + paging into the DB and enrich only the returned slice. Sort springSort = resolveSort(sort, dir);
PageRequest pageRequest = PageRequest.of(pageable.getPageNumber(), pageable.getPageSize(), resolveSort(sort, dir)); return documentRepository.findAll(spec, springSort);
Page<Document> page = documentRepository.findAll(spec, pageRequest);
return buildResultPaged(page.getContent(), text, pageable, page.getTotalElements());
}
private static <T> List<T> pageSlice(List<T> sorted, Pageable pageable) {
int from = Math.min((int) pageable.getOffset(), sorted.size());
int to = Math.min(from + pageable.getPageSize(), sorted.size());
return sorted.subList(from, to);
}
private DocumentSearchResult buildResultPaged(List<Document> slice, String text, Pageable pageable, long totalElements) {
return DocumentSearchResult.paged(enrichItems(slice, text), pageable, totalElements);
}
private List<DocumentSearchItem> enrichItems(List<Document> documents, String text) {
List<Document> colorResolved = resolveDocumentTagColors(documents);
Map<UUID, SearchMatchData> matchData = enrichWithMatchData(colorResolved, text);
List<UUID> docIds = colorResolved.stream().map(Document::getId).toList();
Map<UUID, Integer> completionByDoc = fetchCompletionPercentages(docIds);
Map<UUID, List<ActivityActorDTO>> contributorsByDoc = auditLogQueryService.findRecentContributorsPerDocument(docIds);
return colorResolved.stream().map(doc -> new DocumentSearchItem(
doc,
matchData.getOrDefault(doc.getId(), SearchMatchData.empty()),
completionByDoc.getOrDefault(doc.getId(), 0),
contributorsByDoc.getOrDefault(doc.getId(), List.of())
)).toList();
}
private Map<UUID, Integer> fetchCompletionPercentages(List<UUID> docIds) {
return transcriptionBlockQueryService.getCompletionStats(docIds);
} }
private Sort resolveSort(DocumentSort sort, String dir) { private Sort resolveSort(DocumentSort sort, String dir) {
@@ -576,14 +423,8 @@ public class DocumentService {
} }
public Document getDocumentById(UUID id) { public Document getDocumentById(UUID id) {
Document doc = documentRepository.findById(id) return documentRepository.findById(id)
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id)); .orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id));
tagService.resolveEffectiveColors(doc.getTags());
return doc;
}
public List<Document> getDocumentsByIds(List<UUID> ids) {
return documentRepository.findAllById(ids);
} }
public List<Document> getDocumentsWithoutVersions() { public List<Document> getDocumentsWithoutVersions() {
@@ -615,7 +456,7 @@ public class DocumentService {
PageRequest pageable = PageRequest.of(0, size, Sort.by(Sort.Direction.DESC, "createdAt")); PageRequest pageable = PageRequest.of(0, size, Sort.by(Sort.Direction.DESC, "createdAt"));
return documentRepository.findByMetadataCompleteFalse(pageable) return documentRepository.findByMetadataCompleteFalse(pageable)
.stream() .stream()
.map(doc -> new IncompleteDocumentDTO(doc.getId(), doc.getTitle(), doc.getCreatedAt())) .map(doc -> new IncompleteDocumentDTO(doc.getId(), doc.getTitle()))
.toList(); .toList();
} }
@@ -662,12 +503,6 @@ public class DocumentService {
// ─── private helpers ────────────────────────────────────────────────────── // ─── private helpers ──────────────────────────────────────────────────────
private List<Document> resolveDocumentTagColors(List<Document> docs) {
List<Tag> allTags = docs.stream().flatMap(d -> d.getTags().stream()).toList();
tagService.resolveEffectiveColors(allTags);
return docs;
}
private static String stripExtension(String filename) { private static String stripExtension(String filename) {
if (filename == null) return null; if (filename == null) return null;
int dot = filename.lastIndexOf('.'); int dot = filename.lastIndexOf('.');
@@ -749,93 +584,6 @@ public class DocumentService {
return null; return null;
} }
/**
* Calls {@code findEnrichmentData} and converts the raw Object[] rows into a
* {@link SearchMatchData} per document. Short-circuits when the list is empty or
* the query is blank (no text search active).
*/
private Map<UUID, SearchMatchData> enrichWithMatchData(List<Document> docs, String query) {
if (docs.isEmpty() || !StringUtils.hasText(query)) return Map.of();
List<UUID> ids = docs.stream().map(Document::getId).toList();
Map<UUID, SearchMatchData> result = new HashMap<>();
for (Object[] row : documentRepository.findEnrichmentData(ids, query)) {
UUID docId = (UUID) row[0];
String titleHeadline = (String) row[1];
String snippetHeadline = (String) row[2];
Boolean senderMatched = (Boolean) row[3];
String receiverIdsStr = (String) row[4];
String tagIdsStr = (String) row[5];
String summaryHeadline = (String) row[6];
ParsedHighlight snippet = parseHighlight(snippetHeadline);
ParsedHighlight summary = parseHighlight(summaryHeadline);
result.put(docId, new SearchMatchData(
snippet != null ? snippet.cleanText() : null,
parseTitleOffsets(titleHeadline),
senderMatched != null && senderMatched,
parseUUIDs(receiverIdsStr),
parseUUIDs(tagIdsStr),
snippet != null ? snippet.offsets() : List.of(),
summary != null ? summary.cleanText() : null,
summary != null ? summary.offsets() : List.of()
));
}
return result;
}
/** Clean text + highlight offsets parsed from a {@code ts_headline} sentinel-delimited string. */
public record ParsedHighlight(String cleanText, List<MatchOffset> offsets) {}
/**
* Parses a {@code ts_headline} result that uses {@code chr(1)}/{@code chr(2)} as
* start/stop delimiters. Returns the clean text (delimiters stripped) together with
* the character offsets of each highlighted span. Returns {@code null} when
* {@code headline} is {@code null}.
*/
public static ParsedHighlight parseHighlight(String headline) {
if (headline == null) return null;
StringBuilder clean = new StringBuilder(headline.length());
List<MatchOffset> offsets = new ArrayList<>();
int i = 0;
int pos = 0; // position in the clean string (no delimiters)
while (i < headline.length()) {
char c = headline.charAt(i);
if (c == '\u0001') {
int start = pos;
i++;
while (i < headline.length() && headline.charAt(i) != '\u0002') {
clean.append(headline.charAt(i));
i++;
pos++;
}
offsets.add(new MatchOffset(start, pos - start));
i++; // skip \u0002
} else {
clean.append(c);
i++;
pos++;
}
}
return new ParsedHighlight(clean.toString(), offsets);
}
/**
* Extracts only the {@link MatchOffset} list from a title headline.
* The clean title text comes from the {@link Document} entity itself.
*/
private static List<MatchOffset> parseTitleOffsets(String headline) {
ParsedHighlight parsed = parseHighlight(headline);
return parsed != null ? parsed.offsets() : List.of();
}
private static List<UUID> parseUUIDs(String csv) {
if (csv == null || csv.isBlank()) return List.of();
return Arrays.stream(csv.split(","))
.map(String::trim)
.filter(s -> !s.isEmpty())
.map(UUID::fromString)
.toList();
}
private static String sha256Hex(byte[] bytes) { private static String sha256Hex(byte[] bytes) {
try { try {
MessageDigest digest = MessageDigest.getInstance("SHA-256"); MessageDigest digest = MessageDigest.getInstance("SHA-256");
@@ -849,5 +597,4 @@ public class DocumentService {
throw new IllegalStateException("SHA-256 not available", e); throw new IllegalStateException("SHA-256 not available", e);
} }
} }
} }

View File

@@ -100,7 +100,7 @@ public class DocumentVersionService {
return null; return null;
} }
try { try {
return userService.findByEmail(auth.getName()); return userService.findByUsername(auth.getName());
} catch (Exception e) { } catch (Exception e) {
log.warn("Could not resolve editor for version snapshot: {}", e.getMessage()); log.warn("Could not resolve editor for version snapshot: {}", e.getMessage());
return null; return null;
@@ -114,7 +114,7 @@ public class DocumentVersionService {
if (first != null && !first.isBlank() && last != null && !last.isBlank()) { if (first != null && !first.isBlank() && last != null && !last.isBlank()) {
return first + " " + last; return first + " " + last;
} }
return user.getEmail(); return user.getUsername();
} }
private String serializeSnapshot(Document doc) { private String serializeSnapshot(Document doc) {

View File

@@ -112,27 +112,6 @@ public class FileService {
} }
} }
/**
* Opens a streaming download from S3/MinIO. The caller is responsible for
* closing the returned stream — typically via try-with-resources. Preferred
* over {@link #downloadFileBytes(String)} for large files (multi-MB PDFs
* during thumbnail generation) because it avoids loading the entire file
* into heap memory.
*/
public InputStream downloadFileStream(String s3Key) throws IOException {
try {
GetObjectRequest getObjectRequest = GetObjectRequest.builder()
.bucket(bucketName)
.key(s3Key)
.build();
return s3Client.getObject(getObjectRequest);
} catch (NoSuchKeyException e) {
throw new StorageFileNotFoundException("File not found in storage: " + s3Key);
} catch (S3Exception e) {
throw new IOException("Failed to open stream from storage: " + e.getMessage(), e);
}
}
/** /**
* Generates a presigned URL for downloading an object from S3/MinIO. * Generates a presigned URL for downloading an object from S3/MinIO.
* Valid for 1 hour — covers multi-page documents on CPU-only OCR hardware * Valid for 1 hour — covers multi-page documents on CPU-only OCR hardware

View File

@@ -1,165 +0,0 @@
package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.dto.CreateInviteRequest;
import org.raddatz.familienarchiv.dto.InviteListItemDTO;
import org.raddatz.familienarchiv.dto.RegisterRequest;
import org.raddatz.familienarchiv.exception.DomainException;
import org.raddatz.familienarchiv.exception.ErrorCode;
import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.model.InviteToken;
import org.raddatz.familienarchiv.model.UserGroup;
import org.raddatz.familienarchiv.repository.InviteTokenRepository;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.security.SecureRandom;
import java.util.*;
@Service
@RequiredArgsConstructor
@Slf4j
public class InviteService {
static final int MIN_PASSWORD_LENGTH = 8;
private static final String CODE_ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
private static final int CODE_LENGTH = 10;
private static final int MAX_CODE_ATTEMPTS = 10;
private static final SecureRandom SECURE_RANDOM = new SecureRandom();
private final InviteTokenRepository inviteTokenRepository;
private final UserService userService;
public String generateCode() {
for (int attempt = 0; attempt < MAX_CODE_ATTEMPTS; attempt++) {
String code = buildRandomCode();
if (inviteTokenRepository.findByCode(code).isEmpty()) {
return code;
}
}
throw DomainException.internal(ErrorCode.INTERNAL_ERROR, "Failed to generate unique invite code after " + MAX_CODE_ATTEMPTS + " attempts");
}
public InviteToken validateCode(String code) {
InviteToken token = inviteTokenRepository.findByCode(code)
.orElseThrow(() -> DomainException.notFound(ErrorCode.INVITE_NOT_FOUND, "Invite not found: " + code));
checkTokenState(token);
return token;
}
@Transactional
public InviteToken createInvite(CreateInviteRequest dto, AppUser creator) {
Set<UUID> groupIds = new HashSet<>();
if (dto.getGroupIds() != null && !dto.getGroupIds().isEmpty()) {
List<UserGroup> groups = userService.findGroupsByIds(dto.getGroupIds());
groups.forEach(g -> groupIds.add(g.getId()));
}
InviteToken token = InviteToken.builder()
.code(generateCode())
.label(dto.getLabel())
.maxUses(dto.getMaxUses())
.prefillFirstName(dto.getPrefillFirstName())
.prefillLastName(dto.getPrefillLastName())
.prefillEmail(dto.getPrefillEmail())
.groupIds(groupIds)
.expiresAt(dto.getExpiresAt())
.createdBy(creator)
.build();
return inviteTokenRepository.save(token);
}
@Transactional
public AppUser redeemInvite(RegisterRequest dto) {
InviteToken token = inviteTokenRepository.findByCodeForUpdate(dto.getCode())
.orElseThrow(() -> DomainException.notFound(ErrorCode.INVITE_NOT_FOUND, "Invite not found: " + dto.getCode()));
checkTokenState(token);
if (dto.getPassword() == null || dto.getPassword().length() < MIN_PASSWORD_LENGTH) {
throw DomainException.badRequest(ErrorCode.VALIDATION_ERROR,
"Password must be at least " + MIN_PASSWORD_LENGTH + " characters");
}
AppUser user = userService.createUser(
dto.getEmail(),
dto.getPassword(),
dto.getFirstName(),
dto.getLastName(),
token.getGroupIds()
);
userService.updateNotificationPreferences(user.getId(), dto.isNotifyOnMention(), dto.isNotifyOnMention());
token.setUseCount(token.getUseCount() + 1);
inviteTokenRepository.save(token);
log.info("User {} registered via invite code {}", dto.getEmail(), dto.getCode());
return user;
}
@Transactional
public void revokeInvite(UUID id) {
InviteToken token = inviteTokenRepository.findById(id)
.orElseThrow(() -> DomainException.notFound(ErrorCode.INVITE_NOT_FOUND, "Invite not found: " + id));
token.setRevoked(true);
inviteTokenRepository.save(token);
}
public List<InviteListItemDTO> listInvites(boolean activeOnly, String appBaseUrl) {
List<InviteToken> tokens = activeOnly
? inviteTokenRepository.findActive()
: inviteTokenRepository.findAllOrderedByCreatedAt();
return tokens.stream().map(t -> toListItemDTO(t, appBaseUrl)).toList();
}
public InviteListItemDTO toListItemDTO(InviteToken token, String appBaseUrl) {
String status;
if (token.isRevoked()) status = "revoked";
else if (token.isExpired()) status = "expired";
else if (token.isExhausted()) status = "exhausted";
else status = "active";
return InviteListItemDTO.builder()
.id(token.getId())
.code(token.getCode())
.displayCode(formatDisplayCode(token.getCode()))
.label(token.getLabel())
.useCount(token.getUseCount())
.maxUses(token.getMaxUses())
.expiresAt(token.getExpiresAt())
.revoked(token.isRevoked())
.status(status)
.createdAt(token.getCreatedAt())
.shareableUrl(appBaseUrl + "/register?code=" + token.getCode())
.build();
}
private void checkTokenState(InviteToken token) {
if (token.isRevoked()) {
throw DomainException.conflict(ErrorCode.INVITE_REVOKED, "Invite has been revoked");
}
if (token.isExpired()) {
throw new DomainException(ErrorCode.INVITE_EXPIRED, org.springframework.http.HttpStatus.GONE,
"Invite has expired");
}
if (token.isExhausted()) {
throw DomainException.conflict(ErrorCode.INVITE_EXHAUSTED, "Invite use limit reached");
}
}
private String buildRandomCode() {
StringBuilder sb = new StringBuilder(CODE_LENGTH);
for (int i = 0; i < CODE_LENGTH; i++) {
sb.append(CODE_ALPHABET.charAt(SECURE_RANDOM.nextInt(CODE_ALPHABET.length())));
}
return sb.toString();
}
public static String formatDisplayCode(String code) {
if (code == null || code.length() != CODE_LENGTH) return code;
return code.substring(0, 5) + "-" + code.substring(5);
}
}

View File

@@ -59,7 +59,6 @@ public class MassImportService {
private final PersonService personService; private final PersonService personService;
private final TagService tagService; private final TagService tagService;
private final S3Client s3Client; private final S3Client s3Client;
private final ThumbnailAsyncRunner thumbnailAsyncRunner;
@Value("${app.s3.bucket}") @Value("${app.s3.bucket}")
private String bucketName; private String bucketName;
@@ -333,10 +332,7 @@ public class MassImportService {
if (tag != null) doc.getTags().add(tag); if (tag != null) doc.getTags().add(tag);
doc.setMetadataComplete(metadataComplete); doc.setMetadataComplete(metadataComplete);
Document saved = documentRepository.save(doc); documentRepository.save(doc);
if (file.isPresent()) {
thumbnailAsyncRunner.dispatchAfterCommit(saved.getId());
}
log.info("Importiert{}: {}", file.isEmpty() ? " (nur Metadaten)" : "", originalFilename); log.info("Importiert{}: {}", file.isEmpty() ? " (nur Metadaten)" : "", originalFilename);
} }

View File

@@ -9,12 +9,10 @@ import org.raddatz.familienarchiv.repository.OcrJobRepository;
import org.springframework.scheduling.annotation.Async; import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID; import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
@Component @Component
@RequiredArgsConstructor @RequiredArgsConstructor
@@ -31,7 +29,6 @@ public class OcrAsyncRunner {
private final OcrJobRepository ocrJobRepository; private final OcrJobRepository ocrJobRepository;
private final OcrJobDocumentRepository ocrJobDocumentRepository; private final OcrJobDocumentRepository ocrJobDocumentRepository;
private final OcrProgressService ocrProgressService; private final OcrProgressService ocrProgressService;
private final SenderModelService senderModelService;
@Async @Async
public void runSingleDocument(UUID jobId, UUID documentId, UUID userId) { public void runSingleDocument(UUID jobId, UUID documentId, UUID userId) {
@@ -71,18 +68,12 @@ public class OcrAsyncRunner {
String pdfUrl = fileService.generatePresignedUrl(doc.getFilePath()); String pdfUrl = fileService.generatePresignedUrl(doc.getFilePath());
String senderModelPath = null;
if (doc.getSender() != null && doc.getScriptType() == ScriptType.HANDWRITING_KURRENT) {
senderModelPath = senderModelService.maybeGetModelPath(doc.getSender().getId()).orElse(null);
}
AtomicInteger blockCounter = new AtomicInteger(0); AtomicInteger blockCounter = new AtomicInteger(0);
AtomicInteger currentPage = new AtomicInteger(0); AtomicInteger currentPage = new AtomicInteger(0);
AtomicInteger skippedPages = new AtomicInteger(0); AtomicInteger skippedPages = new AtomicInteger(0);
AtomicInteger totalPages = new AtomicInteger(0); AtomicInteger totalPages = new AtomicInteger(0);
final String finalSenderModelPath = senderModelPath; ocrClient.streamBlocks(pdfUrl, doc.getScriptType(), regions, event -> {
ocrClient.streamBlocks(pdfUrl, doc.getScriptType(), regions, finalSenderModelPath, event -> {
switch (event) { switch (event) {
case OcrStreamEvent.Start start -> { case OcrStreamEvent.Start start -> {
totalPages.set(start.totalPages()); totalPages.set(start.totalPages());
@@ -91,10 +82,6 @@ public class OcrAsyncRunner {
ocrJobDocumentRepository.save(jobDoc); ocrJobDocumentRepository.save(jobDoc);
} }
} }
case OcrStreamEvent.Preprocessing preprocessing -> {
updateProgress(job, "PREPROCESSING_PAGE:" + preprocessing.pageNumber()
+ ":" + totalPages.get());
}
case OcrStreamEvent.Page page -> { case OcrStreamEvent.Page page -> {
for (OcrBlockResult block : page.blocks()) { for (OcrBlockResult block : page.blocks()) {
createSingleBlock(documentId, block, userId, createSingleBlock(documentId, block, userId,
@@ -216,25 +203,7 @@ public class OcrAsyncRunner {
clearExistingBlocks(documentId); clearExistingBlocks(documentId);
String pdfUrl = fileService.generatePresignedUrl(doc.getFilePath()); String pdfUrl = fileService.generatePresignedUrl(doc.getFilePath());
List<OcrBlockResult> blocks = ocrClient.extractBlocks(pdfUrl, doc.getScriptType());
String senderModelPath = null;
if (doc.getSender() != null && doc.getScriptType() == ScriptType.HANDWRITING_KURRENT) {
senderModelPath = senderModelService.maybeGetModelPath(doc.getSender().getId()).orElse(null);
}
final AtomicReference<List<OcrBlockResult>> blocksRef = new AtomicReference<>();
final String finalSenderModelPath = senderModelPath;
ocrClient.streamBlocks(pdfUrl, doc.getScriptType(), null, finalSenderModelPath, event -> {
switch (event) {
case OcrStreamEvent.Page page -> {
blocksRef.compareAndSet(null, new ArrayList<>());
blocksRef.get().addAll(page.blocks());
}
default -> {}
}
});
List<OcrBlockResult> blocks = blocksRef.get() != null ? blocksRef.get() : List.of();
createTranscriptionBlocks(documentId, blocks, userId, doc.getFileHash()); createTranscriptionBlocks(documentId, blocks, userId, doc.getFileHash());
} }

View File

@@ -1,7 +1,6 @@
package org.raddatz.familienarchiv.service; package org.raddatz.familienarchiv.service;
import org.raddatz.familienarchiv.model.ScriptType; import org.raddatz.familienarchiv.model.ScriptType;
import org.springframework.lang.Nullable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
@@ -38,27 +37,15 @@ public interface OcrClient {
TrainingResult segtrainModel(byte[] trainingDataZip); TrainingResult segtrainModel(byte[] trainingDataZip);
/** /**
* Fine-tune the Kurrent model for a specific sender. * Stream OCR results page-by-page via NDJSON. Implementations should override
* this method. The default exists only for backward compatibility during migration
* — it calls extractBlocks() and synthesizes events from the collected result.
* *
* @param trainingDataZip raw ZIP bytes produced by TrainingDataExportService.exportForSender() * @param regions optional list of pre-drawn annotation regions; when non-null,
* @param outputModelPath where to save the trained model (e.g. /app/models/sender_{uuid}.mlmodel) * the OCR service runs in guided mode (crop + recognize per region)
* @return training result metrics
*/
TrainingResult trainSenderModel(byte[] trainingDataZip, String outputModelPath);
/**
* Stream OCR results page-by-page via NDJSON, optionally using a sender-specific model.
* The default implementation synthesizes events from extractBlocks() for backward compatibility.
* Implementations that support real streaming (e.g. RestClientOcrClient) override this.
*
* @param regions optional list of pre-drawn annotation regions; when non-null,
* the OCR service runs in guided mode (crop + recognize per region)
* @param senderModelPath optional path to a per-sender model file; null means use base model
*/ */
default void streamBlocks(String pdfUrl, ScriptType scriptType, default void streamBlocks(String pdfUrl, ScriptType scriptType,
List<OcrRegion> regions, List<OcrRegion> regions, Consumer<OcrStreamEvent> handler) {
@Nullable String senderModelPath,
Consumer<OcrStreamEvent> handler) {
List<OcrBlockResult> allBlocks = extractBlocks(pdfUrl, scriptType); List<OcrBlockResult> allBlocks = extractBlocks(pdfUrl, scriptType);
LinkedHashMap<Integer, List<OcrBlockResult>> byPage = new LinkedHashMap<>(); LinkedHashMap<Integer, List<OcrBlockResult>> byPage = new LinkedHashMap<>();
@@ -75,9 +62,4 @@ public interface OcrClient {
handler.accept(new OcrStreamEvent.Done(allBlocks.size(), 0)); handler.accept(new OcrStreamEvent.Done(allBlocks.size(), 0));
} }
default void streamBlocks(String pdfUrl, ScriptType scriptType,
List<OcrRegion> regions, Consumer<OcrStreamEvent> handler) {
streamBlocks(pdfUrl, scriptType, regions, null, handler);
}
} }

View File

@@ -6,8 +6,6 @@ public sealed interface OcrStreamEvent {
record Start(int totalPages) implements OcrStreamEvent {} record Start(int totalPages) implements OcrStreamEvent {}
record Preprocessing(int pageNumber) implements OcrStreamEvent {}
record Page(int pageNumber, List<OcrBlockResult> blocks) implements OcrStreamEvent {} record Page(int pageNumber, List<OcrBlockResult> blocks) implements OcrStreamEvent {}
record Error(int pageNumber, String message) implements OcrStreamEvent {} record Error(int pageNumber, String message) implements OcrStreamEvent {}

View File

@@ -2,12 +2,9 @@ package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.dto.TrainingHistoryResponse;
import org.raddatz.familienarchiv.dto.TrainingInfoResponse;
import org.raddatz.familienarchiv.exception.DomainException; import org.raddatz.familienarchiv.exception.DomainException;
import org.raddatz.familienarchiv.exception.ErrorCode; import org.raddatz.familienarchiv.exception.ErrorCode;
import org.raddatz.familienarchiv.model.OcrTrainingRun; import org.raddatz.familienarchiv.model.OcrTrainingRun;
import org.raddatz.familienarchiv.model.SenderModel;
import org.raddatz.familienarchiv.model.TrainingStatus; import org.raddatz.familienarchiv.model.TrainingStatus;
import org.raddatz.familienarchiv.repository.OcrTrainingRunRepository; import org.raddatz.familienarchiv.repository.OcrTrainingRunRepository;
import org.raddatz.familienarchiv.repository.TranscriptionBlockRepository; import org.raddatz.familienarchiv.repository.TranscriptionBlockRepository;
@@ -20,11 +17,9 @@ import org.springframework.transaction.support.TransactionTemplate;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.time.Instant; import java.time.Instant;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Optional;
import java.util.UUID; import java.util.UUID;
@Service @Service
@@ -39,8 +34,16 @@ public class OcrTrainingService {
private final OcrHealthClient ocrHealthClient; private final OcrHealthClient ocrHealthClient;
private final TranscriptionBlockRepository blockRepository; private final TranscriptionBlockRepository blockRepository;
private final TransactionTemplate txTemplate; private final TransactionTemplate txTemplate;
private final PersonService personService;
private final SenderModelService senderModelService; public record TrainingInfoResponse(
int availableBlocks,
int totalOcrBlocks,
int availableDocuments,
int availableSegBlocks,
boolean ocrServiceAvailable,
OcrTrainingRun lastRun,
List<OcrTrainingRun> runs
) {}
private void assertNoRunningTraining() { private void assertNoRunningTraining() {
if (trainingRunRepository.findFirstByStatus(TrainingStatus.RUNNING).isPresent()) { if (trainingRunRepository.findFirstByStatus(TrainingStatus.RUNNING).isPresent()) {
@@ -192,21 +195,9 @@ public class OcrTrainingService {
int totalOcrBlocks = (int) blockRepository.count(); int totalOcrBlocks = (int) blockRepository.count();
int availableSegBlocks = segmentationTrainingExportService.querySegmentationBlocks().size(); int availableSegBlocks = segmentationTrainingExportService.querySegmentationBlocks().size();
List<OcrTrainingRun> recentRuns = trainingRunRepository.findTop20ByOrderByCreatedAtDesc(); List<OcrTrainingRun> recentRuns = trainingRunRepository.findTop10ByOrderByCreatedAtDesc();
OcrTrainingRun lastRun = recentRuns.isEmpty() ? null : recentRuns.get(0); OcrTrainingRun lastRun = recentRuns.isEmpty() ? null : recentRuns.get(0);
List<SenderModel> senderModels = senderModelService.getAllSenderModels();
List<UUID> allPersonIds = senderModels.stream()
.map(SenderModel::getPersonId)
.distinct()
.toList();
Map<String, String> personNames = new HashMap<>();
if (!allPersonIds.isEmpty()) {
personService.getAllById(allPersonIds)
.forEach(p -> personNames.put(p.getId().toString(), p.getDisplayName()));
}
return new TrainingInfoResponse( return new TrainingInfoResponse(
eligibleBlocks.size(), eligibleBlocks.size(),
totalOcrBlocks, totalOcrBlocks,
@@ -214,23 +205,10 @@ public class OcrTrainingService {
availableSegBlocks, availableSegBlocks,
ocrHealthClient.isHealthy(), ocrHealthClient.isHealthy(),
lastRun, lastRun,
recentRuns, recentRuns
personNames,
senderModels
); );
} }
public TrainingHistoryResponse getGlobalTrainingHistory() {
List<OcrTrainingRun> runs = trainingRunRepository.findByPersonIdIsNullOrderByCreatedAtDesc();
return new TrainingHistoryResponse(runs, Map.of());
}
public TrainingHistoryResponse getSenderTrainingHistory(UUID personId) {
String personName = personService.getById(personId).getDisplayName();
List<OcrTrainingRun> runs = trainingRunRepository.findByPersonIdOrderByCreatedAtDesc(personId);
return new TrainingHistoryResponse(runs, Map.of(personId.toString(), personName));
}
@EventListener(ApplicationReadyEvent.class) @EventListener(ApplicationReadyEvent.class)
@Transactional @Transactional
public void recoverOrphanedRuns() { public void recoverOrphanedRuns() {
@@ -246,4 +224,15 @@ public class OcrTrainingService {
}); });
} }
public Map<String, Object> buildTrainingInfoMap(TrainingInfoResponse info) {
return Map.of(
"availableBlocks", info.availableBlocks(),
"totalOcrBlocks", info.totalOcrBlocks(),
"availableDocuments", info.availableDocuments(),
"availableSegBlocks", info.availableSegBlocks(),
"ocrServiceAvailable", info.ocrServiceAvailable(),
"lastRun", info.lastRun() != null ? info.lastRun() : Map.of(),
"runs", info.runs()
);
}
} }

View File

@@ -14,7 +14,6 @@ import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders; import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType; import org.springframework.http.MediaType;
import org.springframework.http.client.JdkClientHttpRequestFactory; import org.springframework.http.client.JdkClientHttpRequestFactory;
import org.springframework.lang.Nullable;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap; import org.springframework.util.MultiValueMap;
@@ -103,13 +102,6 @@ public class RestClientOcrClient implements OcrClient, OcrHealthClient {
.toList(); .toList();
} }
private RestClient.RequestBodySpec addTrainingAuth(RestClient.RequestBodySpec spec) {
if (trainingToken != null && !trainingToken.isBlank()) {
return spec.header("X-Training-Token", trainingToken);
}
return spec;
}
@Override @Override
public OcrClient.TrainingResult trainModel(byte[] trainingDataZip) { public OcrClient.TrainingResult trainModel(byte[] trainingDataZip) {
ByteArrayResource zipResource = new ByteArrayResource(trainingDataZip) { ByteArrayResource zipResource = new ByteArrayResource(trainingDataZip) {
@@ -122,10 +114,15 @@ public class RestClientOcrClient implements OcrClient, OcrHealthClient {
partHeaders.setContentType(MediaType.parseMediaType("application/zip")); partHeaders.setContentType(MediaType.parseMediaType("application/zip"));
body.add("file", new HttpEntity<>(zipResource, partHeaders)); body.add("file", new HttpEntity<>(zipResource, partHeaders));
TrainingResultJson result = addTrainingAuth( var spec = trainingRestClient.post()
trainingRestClient.post() .uri("/train")
.uri("/train") .contentType(MediaType.MULTIPART_FORM_DATA);
.contentType(MediaType.MULTIPART_FORM_DATA))
if (trainingToken != null && !trainingToken.isBlank()) {
spec = spec.header("X-Training-Token", trainingToken);
}
TrainingResultJson result = spec
.body(body) .body(body)
.retrieve() .retrieve()
.body(TrainingResultJson.class); .body(TrainingResultJson.class);
@@ -146,35 +143,15 @@ public class RestClientOcrClient implements OcrClient, OcrHealthClient {
partHeaders.setContentType(MediaType.parseMediaType("application/zip")); partHeaders.setContentType(MediaType.parseMediaType("application/zip"));
body.add("file", new HttpEntity<>(zipResource, partHeaders)); body.add("file", new HttpEntity<>(zipResource, partHeaders));
TrainingResultJson result = addTrainingAuth( var spec = trainingRestClient.post()
trainingRestClient.post() .uri("/segtrain")
.uri("/segtrain") .contentType(MediaType.MULTIPART_FORM_DATA);
.contentType(MediaType.MULTIPART_FORM_DATA))
.body(body)
.retrieve()
.body(TrainingResultJson.class);
if (result == null) return new OcrClient.TrainingResult(null, null, null, null); if (trainingToken != null && !trainingToken.isBlank()) {
return new OcrClient.TrainingResult(result.loss(), result.accuracy(), result.cer(), result.epochs()); spec = spec.header("X-Training-Token", trainingToken);
} }
@Override TrainingResultJson result = spec
public OcrClient.TrainingResult trainSenderModel(byte[] trainingDataZip, String outputModelPath) {
ByteArrayResource zipResource = new ByteArrayResource(trainingDataZip) {
@Override
public String getFilename() { return "sender-training-data.zip"; }
};
MultiValueMap<String, Object> body = new LinkedMultiValueMap<>();
HttpHeaders partHeaders = new HttpHeaders();
partHeaders.setContentType(MediaType.parseMediaType("application/zip"));
body.add("file", new HttpEntity<>(zipResource, partHeaders));
body.add("output_model_path", outputModelPath);
TrainingResultJson result = addTrainingAuth(
trainingRestClient.post()
.uri("/train-sender")
.contentType(MediaType.MULTIPART_FORM_DATA))
.body(body) .body(body)
.retrieve() .retrieve()
.body(TrainingResultJson.class); .body(TrainingResultJson.class);
@@ -199,8 +176,7 @@ public class RestClientOcrClient implements OcrClient, OcrHealthClient {
@Override @Override
public void streamBlocks(String pdfUrl, ScriptType scriptType, public void streamBlocks(String pdfUrl, ScriptType scriptType,
List<OcrRegion> regions, @Nullable String senderModelPath, List<OcrRegion> regions, Consumer<OcrStreamEvent> handler) {
Consumer<OcrStreamEvent> handler) {
String body; String body;
try { try {
var requestMap = new java.util.LinkedHashMap<String, Object>(); var requestMap = new java.util.LinkedHashMap<String, Object>();
@@ -210,9 +186,6 @@ public class RestClientOcrClient implements OcrClient, OcrHealthClient {
if (regions != null && !regions.isEmpty()) { if (regions != null && !regions.isEmpty()) {
requestMap.put("regions", regions); requestMap.put("regions", regions);
} }
if (senderModelPath != null) {
requestMap.put("senderModelPath", senderModelPath);
}
body = NDJSON_MAPPER.writeValueAsString(requestMap); body = NDJSON_MAPPER.writeValueAsString(requestMap);
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException("Failed to serialize OCR request", e); throw new RuntimeException("Failed to serialize OCR request", e);
@@ -231,12 +204,7 @@ public class RestClientOcrClient implements OcrClient, OcrHealthClient {
if (response.statusCode() == 404) { if (response.statusCode() == 404) {
log.info("OCR service does not support /ocr/stream (404), falling back to /ocr"); log.info("OCR service does not support /ocr/stream (404), falling back to /ocr");
List<OcrBlockResult> allBlocks = extractBlocks(pdfUrl, scriptType); OcrClient.super.streamBlocks(pdfUrl, scriptType, regions, handler);
handler.accept(new OcrStreamEvent.Start(0));
for (OcrBlockResult block : allBlocks) {
handler.accept(new OcrStreamEvent.Page(block.pageNumber(), List.of(block)));
}
handler.accept(new OcrStreamEvent.Done(allBlocks.size(), 0));
return; return;
} }
@@ -264,8 +232,6 @@ public class RestClientOcrClient implements OcrClient, OcrHealthClient {
switch (type) { switch (type) {
case "start" -> handler.accept( case "start" -> handler.accept(
new OcrStreamEvent.Start(node.path("totalPages").asInt())); new OcrStreamEvent.Start(node.path("totalPages").asInt()));
case "preprocessing" -> handler.accept(
new OcrStreamEvent.Preprocessing(node.path("pageNumber").asInt()));
case "page" -> { case "page" -> {
int pageNumber = node.path("pageNumber").asInt(); int pageNumber = node.path("pageNumber").asInt();
List<OcrBlockResult> blocks = NDJSON_MAPPER.convertValue( List<OcrBlockResult> blocks = NDJSON_MAPPER.convertValue(

View File

@@ -1,234 +0,0 @@
package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.exception.DomainException;
import org.raddatz.familienarchiv.exception.ErrorCode;
import org.raddatz.familienarchiv.model.OcrTrainingRun;
import org.raddatz.familienarchiv.model.SenderModel;
import org.raddatz.familienarchiv.model.TrainingStatus;
import org.raddatz.familienarchiv.repository.OcrTrainingRunRepository;
import org.raddatz.familienarchiv.repository.SenderModelRepository;
import org.raddatz.familienarchiv.repository.TranscriptionBlockRepository;
import org.slf4j.MDC;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.support.TransactionTemplate;
import java.io.ByteArrayOutputStream;
import java.time.Instant;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID;
@Service
@RequiredArgsConstructor
@Slf4j
public class SenderModelService {
private final SenderModelRepository senderModelRepository;
private final TranscriptionBlockRepository blockRepository;
private final OcrTrainingRunRepository trainingRunRepository;
private final OcrClient ocrClient;
private final TransactionTemplate txTemplate;
private final TrainingDataExportService trainingDataExportService;
private final PersonService personService;
// Self-reference through the Spring proxy so @Async is honoured on self-calls.
@Lazy
@Autowired
private SenderModelService self;
@Value("${ocr.sender-model.activation-threshold:100}")
private int activationThreshold;
@Value("${ocr.sender-model.retrain-delta:50}")
private int retrainDelta;
/** Returns the model path if a trained sender model exists for this person. */
public Optional<String> maybeGetModelPath(UUID personId) {
return senderModelRepository.findByPersonId(personId)
.map(SenderModel::getModelPath);
}
public List<SenderModel> getAllSenderModels() {
return senderModelRepository.findAll();
}
public OcrTrainingRun triggerManualSenderTraining(UUID personId) {
personService.getById(personId);
long correctedLines = blockRepository.countManualKurrentBlocksByPerson(personId);
boolean runNow = runOrQueueSenderTraining(personId, (int) correctedLines);
TrainingStatus targetStatus = runNow ? TrainingStatus.RUNNING : TrainingStatus.QUEUED;
OcrTrainingRun run = trainingRunRepository.findFirstByPersonIdAndStatus(personId, targetStatus)
.orElseThrow(() -> DomainException.internal(
ErrorCode.OCR_TRAINING_CONFLICT,
"Expected " + targetStatus + " run for person " + personId));
if (runNow) {
self.runSenderTraining(personId);
}
return run;
}
@Async
public void runSenderTraining(UUID personId) {
long correctedLines = blockRepository.countManualKurrentBlocksByPerson(personId);
triggerSenderTraining(personId, (int) correctedLines);
}
/**
* Called after every MANUAL block save for HANDWRITING_KURRENT documents.
* Checks activation and retrain thresholds; enqueues or starts sender training when met.
*/
@Async
public void checkAndTriggerTraining(UUID personId) {
long correctedLines = blockRepository.countManualKurrentBlocksByPerson(personId);
Optional<SenderModel> existing = senderModelRepository.findByPersonId(personId);
boolean shouldActivate = existing.isEmpty() && correctedLines >= activationThreshold;
boolean shouldRetrain = existing.isPresent()
&& (correctedLines - existing.get().getCorrectedLinesAtTraining()) >= retrainDelta;
if (!shouldActivate && !shouldRetrain) {
return;
}
log.info("Sender training threshold met for person {} (correctedLines={}, activate={}, retrain={})",
personId, correctedLines, shouldActivate, shouldRetrain);
boolean runNow = runOrQueueSenderTraining(personId, (int) correctedLines);
if (runNow) {
triggerSenderTraining(personId, (int) correctedLines);
}
}
/**
* Atomically checks the queue state and either creates a RUNNING row (returns true) or a
* QUEUED row (returns false). All three operations — idle check, duplicate-queue guard, and
* RUNNING row creation — happen in one transaction, eliminating the race window that would
* otherwise exist between the check and a separate RUNNING row creation.
*/
@Transactional
public boolean runOrQueueSenderTraining(UUID personId, int correctedLines) {
if (trainingRunRepository.existsByPersonIdAndStatus(personId, TrainingStatus.QUEUED)) {
log.info("Sender training already queued for person {} — skipping duplicate trigger", personId);
return false;
}
if (trainingRunRepository.findFirstByStatus(TrainingStatus.RUNNING).isPresent()) {
int blockCount = (int) blockRepository.countManualKurrentBlocksByPerson(personId);
trainingRunRepository.save(OcrTrainingRun.builder()
.status(TrainingStatus.QUEUED)
.personId(personId)
.blockCount(blockCount)
.documentCount(0)
.modelName("sender_" + personId)
.build());
log.info("Queued sender training for person {} — training already running", personId);
return false;
}
long blockCount = blockRepository.countManualKurrentBlocksByPerson(personId);
trainingRunRepository.save(OcrTrainingRun.builder()
.status(TrainingStatus.RUNNING)
.personId(personId)
.blockCount((int) blockCount)
.documentCount(0)
.modelName("sender_" + personId)
.build());
return true;
}
/**
* Executes sender training synchronously. Caller must run this on a background thread.
* The RUNNING row is expected to already exist — created atomically by
* runOrQueueSenderTraining (for new runs) or by promoteNextQueuedRun (for promoted runs).
*/
public void triggerSenderTraining(UUID personId, int correctedLines) {
String outputModelPath = "/app/models/sender_" + personId + ".mlmodel";
OcrTrainingRun run = Objects.requireNonNull(txTemplate.execute(status ->
trainingRunRepository.findFirstByPersonIdAndStatus(personId, TrainingStatus.RUNNING)
.orElseThrow(() -> DomainException.internal(
ErrorCode.OCR_TRAINING_CONFLICT,
"Expected RUNNING row for person " + personId + " but none found"))));
String runId = run.getId().toString();
MDC.put("trainingRunId", runId);
log.info("Started sender training run {} for person {}", runId, personId);
try {
byte[] zipBytes = exportSenderData(personId);
log.info("[trainingRun={}] Sending {} bytes to OCR service for sender training", runId, zipBytes.length);
OcrClient.TrainingResult result = ocrClient.trainSenderModel(zipBytes, outputModelPath);
txTemplate.execute(status -> {
SenderModel model = senderModelRepository.findByPersonId(personId)
.orElseGet(() -> SenderModel.builder().personId(personId).build());
model.setModelPath(outputModelPath);
model.setCer(result.cer());
model.setAccuracy(result.accuracy());
model.setCorrectedLinesAtTraining(correctedLines);
senderModelRepository.save(model);
run.setStatus(TrainingStatus.DONE);
run.setCompletedAt(Instant.now());
run.setCer(result.cer());
run.setAccuracy(result.accuracy());
run.setEpochs(result.epochs());
trainingRunRepository.save(run);
log.info("[trainingRun={}] Sender training completed — cer={}", runId, result.cer());
return null;
});
} catch (Exception e) {
txTemplate.execute(status -> {
run.setStatus(TrainingStatus.FAILED);
run.setErrorMessage(e.getMessage());
run.setCompletedAt(Instant.now());
trainingRunRepository.save(run);
log.error("[trainingRun={}] Sender training failed: {}", runId, e.getMessage(), e);
return null;
});
} finally {
MDC.remove("trainingRunId");
promoteNextQueuedRun();
}
}
private byte[] exportSenderData(UUID personId) throws java.io.IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
trainingDataExportService.exportForSender(personId).writeTo(baos);
return baos.toByteArray();
}
/**
* Promotes the oldest QUEUED sender run to RUNNING and triggers its training.
* Called in the finally block of triggerSenderTraining, creating a sequential chain:
* each run promotes the next only after it fully completes (success or failure).
*
* This is intentionally tail-recursive via the @Async thread: the same thread holds the
* full queue drain, serialising all sender training runs naturally without an external
* scheduler. With N queued runs the thread stays occupied for N sequential trainings —
* acceptable because the @Async executor is dedicated to long-running background work.
*/
private void promoteNextQueuedRun() {
Optional<OcrTrainingRun> queuedOpt = txTemplate.execute(status ->
trainingRunRepository.findFirstByStatusOrderByCreatedAtAsc(TrainingStatus.QUEUED)
.map(queued -> {
queued.setStatus(TrainingStatus.RUNNING);
return trainingRunRepository.save(queued);
}));
if (queuedOpt != null && queuedOpt.isPresent()) {
OcrTrainingRun promoted = queuedOpt.get();
log.info("Promoting queued sender training run {} for person {}", promoted.getId(), promoted.getPersonId());
long freshCount = blockRepository.countManualKurrentBlocksByPerson(promoted.getPersonId());
triggerSenderTraining(promoted.getPersonId(), (int) freshCount);
}
}
}

View File

@@ -1,52 +1,30 @@
package org.raddatz.familienarchiv.service; package org.raddatz.familienarchiv.service;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID; import java.util.UUID;
import java.util.stream.Collectors;
import org.raddatz.familienarchiv.dto.TagTreeNodeDTO;
import org.raddatz.familienarchiv.dto.TagUpdateDTO;
import org.raddatz.familienarchiv.exception.DomainException;
import org.raddatz.familienarchiv.exception.ErrorCode;
import org.raddatz.familienarchiv.model.Tag; import org.raddatz.familienarchiv.model.Tag;
import org.raddatz.familienarchiv.repository.TagRepository; import org.raddatz.familienarchiv.repository.TagRepository;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils; import org.springframework.web.server.ResponseStatusException;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
@Slf4j
public class TagService { public class TagService {
// These 10 color tokens are the fixed palette.
// Keep in sync with the --c-tag-* tokens defined in frontend/src/routes/layout.css.
static final Set<String> ALLOWED_TAG_COLORS = Set.of(
"sage", "sienna", "amber", "slate", "violet",
"rose", "cobalt", "moss", "sand", "coral"
);
private final TagRepository tagRepository; private final TagRepository tagRepository;
public List<Tag> search(String query) { public List<Tag> search(String query) {
List<Tag> matched = tagRepository.findByNameContainingIgnoreCase(query); return tagRepository.findByNameContainingIgnoreCase(query);
if (matched.isEmpty()) return matched;
return enrichWithRelatives(matched);
} }
public Tag getById(UUID id) { public Tag getById(UUID id) {
return tagRepository.findById(id) return tagRepository.findById(id)
.orElseThrow(() -> DomainException.notFound(ErrorCode.TAG_NOT_FOUND, "Tag not found: " + id)); .orElseThrow(() -> new ResponseStatusException(HttpStatus.NOT_FOUND, "Tag nicht gefunden"));
} }
public Tag findOrCreate(String name) { public Tag findOrCreate(String name) {
@@ -56,22 +34,9 @@ public class TagService {
} }
@Transactional @Transactional
public Tag update(UUID id, TagUpdateDTO dto) { public Tag update(UUID id, String newName) {
Tag tag = getById(id); Tag tag = getById(id);
tag.setName(newName);
if (dto.parentId() != null) {
validateNoSelfReference(id, dto.parentId());
validateNoAncestorCycle(id, dto.parentId());
getById(dto.parentId()); // ensure parent exists
}
if (dto.color() != null) {
validateColor(dto.color());
}
tag.setName(dto.name());
tag.setParentId(dto.parentId());
tag.setColor(dto.color());
return tagRepository.save(tag); return tagRepository.save(tag);
} }
@@ -79,175 +44,4 @@ public class TagService {
public void delete(UUID id) { public void delete(UUID id) {
tagRepository.delete(getById(id)); tagRepository.delete(getById(id));
} }
@Transactional
public Tag mergeTags(UUID sourceId, UUID targetId) {
validateNotSelf(sourceId, targetId);
Tag source = getById(sourceId);
Tag target = getById(targetId);
log.info("Merging tag '{}' ({}) into '{}' ({})", source.getName(), sourceId, target.getName(), targetId);
validateNotDescendant(sourceId, targetId);
transferDocuments(sourceId, targetId);
tagRepository.reparentChildren(sourceId, targetId);
tagRepository.deleteById(sourceId);
return target;
}
@Transactional
public void deleteWithDescendants(UUID id) {
log.info("Deleting subtree rooted at {}", id);
getById(id);
List<UUID> ids = tagRepository.findDescendantIds(id);
if (!ids.isEmpty()) tagRepository.deleteDocumentTagsByTagIds(ids);
tagRepository.deleteAllById(ids);
log.info("Deleted subtree rooted at {}, {} nodes", id, ids.size());
}
/**
* Sets the effective (inherited) color on child tags that have no color of their own.
* Colors are stored only on root-level tags; children inherit the parent's color.
* Parent tags are batch-loaded in a single query. Safe to call on detached entities.
*/
public void resolveEffectiveColors(Collection<Tag> tags) {
if (tags == null || tags.isEmpty()) return;
Set<UUID> parentIdsNeeded = tags.stream()
.filter(t -> t.getColor() == null && t.getParentId() != null)
.map(Tag::getParentId)
.collect(Collectors.toSet());
if (parentIdsNeeded.isEmpty()) return;
Map<UUID, String> parentColors = tagRepository.findAllById(parentIdsNeeded)
.stream()
.filter(p -> p.getColor() != null)
.collect(Collectors.toMap(Tag::getId, Tag::getColor));
tags.forEach(tag -> {
if (tag.getColor() == null && tag.getParentId() != null) {
String resolved = parentColors.get(tag.getParentId());
if (resolved != null) {
tag.setColor(resolved);
}
}
});
}
/**
* For each tag name, returns the set of that tag's ID plus all descendant IDs.
* Used by DocumentService to expand selected filter tags before applying AND/OR logic.
*/
public List<Set<UUID>> expandTagNamesToDescendantIdSets(List<String> tagNames) {
if (tagNames == null || tagNames.isEmpty()) return List.of();
return tagNames.stream()
.filter(StringUtils::hasText)
.map(name -> (Set<UUID>) new HashSet<>(tagRepository.findDescendantIdsByName(name.trim())))
.toList();
}
/**
* Returns all tags assembled into a tree with document counts per node.
* Uses a single aggregate query to avoid N+1 behaviour.
* NOTE: document counts are global per tag, not scoped to any search filter.
* The tree endpoint is only used for the admin sidebar, so this is intentional.
*/
public List<TagTreeNodeDTO> getTagTree() {
List<Tag> all = tagRepository.findAll();
Map<UUID, Long> counts = tagRepository.findDocumentCountsPerTag().stream()
.collect(Collectors.toMap(
TagRepository.TagCount::getTagId,
TagRepository.TagCount::getCount
));
return buildTree(all, counts);
}
// ─── private helpers ─────────────────────────────────────────────────────
// Each matched tag issues 1 CTE query (findDescendantIds or findAncestorIds) + 1 batch
// fetch for extras. Typical queries match 13 tags at depth ≤ 4, so 35 queries total.
private List<Tag> enrichWithRelatives(List<Tag> matched) {
Set<UUID> matchedIds = matched.stream().map(Tag::getId).collect(Collectors.toSet());
Set<UUID> extraIds = new HashSet<>();
for (Tag tag : matched) {
if (tag.getParentId() == null) {
extraIds.addAll(tagRepository.findDescendantIds(tag.getId()));
} else {
extraIds.addAll(tagRepository.findAncestorIds(tag.getId()));
}
}
extraIds.removeAll(matchedIds);
List<Tag> result = new ArrayList<>(matched);
if (!extraIds.isEmpty()) {
result.addAll(tagRepository.findAllById(extraIds));
}
resolveEffectiveColors(result);
return result;
}
private void validateNotSelf(UUID sourceId, UUID targetId) {
if (sourceId.equals(targetId)) {
throw DomainException.badRequest(ErrorCode.TAG_MERGE_SELF,
"Source and target must not be the same tag: " + sourceId);
}
}
private void validateNotDescendant(UUID sourceId, UUID targetId) {
List<UUID> descendants = tagRepository.findDescendantIds(sourceId);
if (descendants.contains(targetId)) {
throw DomainException.badRequest(ErrorCode.TAG_MERGE_INVALID_TARGET,
"Target " + targetId + " is a descendant of source " + sourceId);
}
}
private void transferDocuments(UUID sourceId, UUID targetId) {
tagRepository.reassignDocumentTags(sourceId, targetId);
tagRepository.deleteDocumentTagsByTagId(sourceId);
}
private void validateNoSelfReference(UUID tagId, UUID proposedParentId) {
if (tagId.equals(proposedParentId)) {
throw DomainException.badRequest(ErrorCode.TAG_CYCLE_DETECTED,
"A tag cannot be its own parent: " + tagId);
}
}
private void validateNoAncestorCycle(UUID tagId, UUID proposedParentId) {
// TOCTOU note: concurrent admin writes could both pass this check and create a
// multi-node cycle. This is intentionally not locked because: (a) the endpoint
// requires ADMIN_TAG permission so concurrency is rare, (b) the DB-level
// CHECK (parent_id != id) prevents infinite self-loops as a hard backstop,
// and (c) the window is microseconds. Do NOT add a pessimistic lock here.
List<UUID> ancestors = tagRepository.findAncestorIds(proposedParentId);
if (ancestors.contains(tagId)) {
throw DomainException.badRequest(ErrorCode.TAG_CYCLE_DETECTED,
"Assigning parent " + proposedParentId + " to tag " + tagId + " would create a cycle");
}
}
private void validateColor(String color) {
if (!ALLOWED_TAG_COLORS.contains(color)) {
throw DomainException.badRequest(ErrorCode.INVALID_TAG_COLOR,
"Color '" + color + "' is not in the allowed palette");
}
}
private List<TagTreeNodeDTO> buildTree(List<Tag> tags, Map<UUID, Long> counts) {
Map<UUID, TagTreeNodeDTO> nodeById = new LinkedHashMap<>();
for (Tag tag : tags) {
int documentCount = counts.getOrDefault(tag.getId(), 0L).intValue();
nodeById.put(tag.getId(), new TagTreeNodeDTO(
tag.getId(), tag.getName(), tag.getColor(), documentCount,
new ArrayList<>(), tag.getParentId()
));
}
for (TagTreeNodeDTO node : nodeById.values()) {
if (node.parentId() != null) {
TagTreeNodeDTO parent = nodeById.get(node.parentId());
if (parent != null) parent.children().add(node);
}
}
return nodeById.values().stream().filter(n -> n.parentId() == null).toList();
}
} }

View File

@@ -1,92 +0,0 @@
package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.model.Document;
import org.raddatz.familienarchiv.repository.DocumentRepository;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.support.TransactionSynchronization;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
/**
* Bridges document upload paths to asynchronous thumbnail generation. Use
* {@link #dispatchAfterCommit(UUID)} from inside {@code @Transactional} service methods —
* it registers a post-commit hook so the async task only fires when the surrounding
* transaction actually commits, and is silently skipped on rollback. Mirrors
* {@link org.raddatz.familienarchiv.audit.AuditService#logAfterCommit}.
*/
@Service
@RequiredArgsConstructor
@Slf4j
public class ThumbnailAsyncRunner {
private final DocumentRepository documentRepository;
private final ThumbnailService thumbnailService;
/** Per-document timeout for the whole generate() call — defense against corrupt PDFs. */
private long generateTimeoutSeconds = 30L;
/**
* Registers a post-commit hook that triggers asynchronous thumbnail generation for the
* given document. When no transaction is active the task is dispatched immediately.
* Safe to call from inside {@code @Transactional} service methods.
*/
public void dispatchAfterCommit(UUID documentId) {
if (TransactionSynchronizationManager.isSynchronizationActive()) {
TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronization() {
@Override
public void afterCommit() {
generateAsync(documentId);
}
});
} else {
generateAsync(documentId);
}
}
/**
* Runs thumbnail generation on the {@code thumbnailExecutor} pool, wrapped in a watchdog
* timeout so a hung PDFBox render cannot occupy a pool thread indefinitely. Never throws:
* all errors and timeouts are logged and swallowed so upload paths are not affected.
*/
@Async("thumbnailExecutor")
public void generateAsync(UUID documentId) {
Optional<Document> docOpt = documentRepository.findById(documentId);
if (docOpt.isEmpty()) {
log.warn("Thumbnail generation skipped: document not found id={}", documentId);
return;
}
Document doc = docOpt.get();
ExecutorService timeoutWorker = Executors.newSingleThreadExecutor(r -> {
Thread t = new Thread(r, "Thumbnail-Render-" + documentId);
t.setDaemon(true);
return t;
});
try {
Future<ThumbnailService.Outcome> future = timeoutWorker.submit(
() -> thumbnailService.generate(doc));
try {
future.get(generateTimeoutSeconds, TimeUnit.SECONDS);
} catch (TimeoutException e) {
future.cancel(true);
log.warn("Thumbnail generation timed out after {}s for doc={}",
generateTimeoutSeconds, documentId);
} catch (Exception e) {
log.warn("Thumbnail generation errored for doc={} reason={}",
documentId, e.getMessage());
}
} finally {
timeoutWorker.shutdownNow();
}
}
}

View File

@@ -1,104 +0,0 @@
package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.exception.DomainException;
import org.raddatz.familienarchiv.exception.ErrorCode;
import org.raddatz.familienarchiv.model.Document;
import org.raddatz.familienarchiv.repository.DocumentRepository;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import java.time.Duration;
import java.time.LocalDateTime;
import java.util.List;
/**
* Sequentially regenerates thumbnails for documents that have a file attached but no
* thumbnail yet. Runs on the {@code thumbnailExecutor} pool — single-threaded iteration
* is intentional: PDFBox + ImageIO are memory-heavy and we cap peak usage by processing
* documents one at a time. Only one backfill can run at a time; concurrent starts are
* rejected with {@link ErrorCode#THUMBNAIL_BACKFILL_ALREADY_RUNNING}.
*/
@Service
@RequiredArgsConstructor
@Slf4j
public class ThumbnailBackfillService {
public enum State { IDLE, RUNNING, DONE, FAILED }
public record BackfillStatus(
State state,
String message,
int total,
int processed,
int skipped,
int failed,
LocalDateTime startedAt
) {}
private final DocumentRepository documentRepository;
private final ThumbnailService thumbnailService;
private volatile BackfillStatus currentStatus = new BackfillStatus(
State.IDLE, "Kein Backfill gestartet.", 0, 0, 0, 0, null);
public BackfillStatus getStatus() {
return currentStatus;
}
@Async("thumbnailExecutor")
public void runBackfillAsync() {
if (currentStatus.state() == State.RUNNING) {
throw DomainException.conflict(ErrorCode.THUMBNAIL_BACKFILL_ALREADY_RUNNING,
"Thumbnail-Backfill läuft bereits");
}
LocalDateTime startedAt = LocalDateTime.now();
List<Document> docs;
try {
docs = documentRepository.findByFilePathIsNotNullAndThumbnailKeyIsNull();
} catch (Exception e) {
currentStatus = new BackfillStatus(State.FAILED,
"Backfill fehlgeschlagen: " + e.getMessage(),
0, 0, 0, 0, startedAt);
log.warn("Thumbnail backfill aborted before starting: {}", e.getMessage());
return;
}
int total = docs.size();
currentStatus = new BackfillStatus(State.RUNNING,
"Backfill läuft…", total, 0, 0, 0, startedAt);
log.info("Thumbnail backfill started: total={}", total);
int processed = 0;
int skipped = 0;
int failed = 0;
for (Document doc : docs) {
ThumbnailService.Outcome outcome;
try {
outcome = thumbnailService.generate(doc);
} catch (Exception e) {
log.warn("Thumbnail generation failed for doc={} reason={}",
doc.getId(), e.getMessage());
outcome = ThumbnailService.Outcome.FAILED;
}
switch (outcome) {
case SUCCESS -> processed++;
case SKIPPED -> skipped++;
case FAILED -> failed++;
}
currentStatus = new BackfillStatus(State.RUNNING,
"Backfill läuft…", total, processed, skipped, failed, startedAt);
}
long durationMs = Duration.between(startedAt, LocalDateTime.now()).toMillis();
log.info("Thumbnail backfill complete: total={} processed={} skipped={} failed={} durationMs={}",
total, processed, skipped, failed, durationMs);
currentStatus = new BackfillStatus(State.DONE,
String.format("Fertig: %d erzeugt, %d übersprungen, %d fehlgeschlagen.",
processed, skipped, failed),
total, processed, skipped, failed, startedAt);
}
}

View File

@@ -1,233 +0,0 @@
package org.raddatz.familienarchiv.service;
import lombok.extern.slf4j.Slf4j;
import org.apache.pdfbox.Loader;
import org.apache.pdfbox.io.RandomAccessReadBuffer;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.rendering.ImageType;
import org.apache.pdfbox.rendering.PDFRenderer;
import org.raddatz.familienarchiv.model.Document;
import org.raddatz.familienarchiv.model.ThumbnailAspect;
import org.raddatz.familienarchiv.repository.DocumentRepository;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import software.amazon.awssdk.core.sync.RequestBody;
import software.amazon.awssdk.services.s3.S3Client;
import software.amazon.awssdk.services.s3.model.PutObjectRequest;
import javax.imageio.IIOImage;
import javax.imageio.ImageIO;
import javax.imageio.ImageWriteParam;
import javax.imageio.ImageWriter;
import javax.imageio.stream.ImageOutputStream;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.image.BufferedImage;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.time.LocalDateTime;
import java.util.Set;
import java.util.UUID;
/**
* Generates JPEG thumbnail previews for documents (PDF first-page or scaled-down image)
* and uploads them to the S3 thumbnails/ prefix. Fire-and-forget from upload paths via
* {@link ThumbnailAsyncRunner}; also invoked by {@link ThumbnailBackfillService} for
* historical documents. Explicitly does not throw — failures are returned as
* {@link Outcome#FAILED} so the backfill can account for them without aborting the run.
*/
@Service
@Slf4j
public class ThumbnailService {
public enum Outcome { SUCCESS, SKIPPED, FAILED }
private static final int THUMBNAIL_WIDTH = 240;
private static final float JPEG_QUALITY = 0.85f;
private static final int PDF_RENDER_DPI = 100;
// Anything below this w/h ratio stays PORTRAIT — near-square A4 scans should
// render in the portrait tile rather than flipping to landscape at 1.01.
private static final float LANDSCAPE_THRESHOLD = 1.1f;
private static final String PDF_CONTENT_TYPE = "application/pdf";
private static final Set<String> IMAGE_CONTENT_TYPES =
Set.of("image/jpeg", "image/png", "image/tiff");
// Deterministic S3 key — `thumbnails/{docId}.jpg`. When a document's file is replaced
// the regenerated thumbnail overwrites this same key via PutObject, so we never
// orphan old thumbnails. The URL-level cache buster is the `thumbnail_generated_at`
// timestamp (see /api/documents/{id}/thumbnail ?v= param).
private static final String THUMBNAIL_KEY_PREFIX = "thumbnails/";
private static final String THUMBNAIL_KEY_SUFFIX = ".jpg";
private final FileService fileService;
private final S3Client s3Client;
private final DocumentRepository documentRepository;
@Value("${app.s3.bucket}")
private String bucketName;
public ThumbnailService(FileService fileService, S3Client s3Client,
DocumentRepository documentRepository) {
this.fileService = fileService;
this.s3Client = s3Client;
this.documentRepository = documentRepository;
}
public Outcome generate(Document doc) {
if (doc.getFilePath() == null) {
log.debug("Document {} has no filePath, skipping thumbnail", doc.getId());
return Outcome.SKIPPED;
}
String contentType = doc.getContentType();
if (contentType == null || !isSupported(contentType)) {
log.warn("Document {} has unsupported contentType {}, skipping thumbnail",
doc.getId(), contentType);
return Outcome.SKIPPED;
}
SourcePreview preview = readSourcePreview(doc, contentType);
if (preview == null
|| preview.image().getWidth() <= 0 || preview.image().getHeight() <= 0) {
log.warn("Thumbnail source has invalid dimensions for doc={}", doc.getId());
return Outcome.FAILED;
}
byte[] jpeg = encodeThumbnail(preview.image(), doc.getId());
if (jpeg == null) return Outcome.FAILED;
String thumbnailKey = thumbnailKeyFor(doc.getId());
if (!uploadToStorage(thumbnailKey, jpeg, doc.getId())) return Outcome.FAILED;
ThumbnailResult result = new ThumbnailResult(
thumbnailKey, aspectOf(preview.image()), preview.pageCount());
return persistThumbnailMetadata(doc, result);
}
private static ThumbnailAspect aspectOf(BufferedImage source) {
float ratio = (float) source.getWidth() / source.getHeight();
return ratio > LANDSCAPE_THRESHOLD ? ThumbnailAspect.LANDSCAPE : ThumbnailAspect.PORTRAIT;
}
// First-page image + total page count for the source file. Page count is always
// 1 for image uploads; for PDFs it comes straight from PDDocument.
private record SourcePreview(BufferedImage image, int pageCount) {}
// Everything the generate pipeline has already committed to storage and
// now wants stamped onto the Document entity in a single save call.
private record ThumbnailResult(String key, ThumbnailAspect aspect, int pageCount) {}
private static String thumbnailKeyFor(UUID documentId) {
return THUMBNAIL_KEY_PREFIX + documentId + THUMBNAIL_KEY_SUFFIX;
}
private SourcePreview readSourcePreview(Document doc, String contentType) {
try {
return PDF_CONTENT_TYPE.equals(contentType)
? renderPdfFirstPage(doc.getFilePath())
: new SourcePreview(readImage(doc.getFilePath()), 1);
} catch (Exception e) {
log.warn("Thumbnail source read failed for doc={} reason={}",
doc.getId(), e.getMessage());
return null;
}
}
private byte[] encodeThumbnail(BufferedImage source, UUID documentId) {
try {
BufferedImage scaled = scaleToWidth(source, THUMBNAIL_WIDTH);
return encodeJpeg(scaled, JPEG_QUALITY);
} catch (Exception e) {
log.warn("Thumbnail JPEG encoding failed for doc={} reason={}",
documentId, e.getMessage());
return null;
}
}
private boolean uploadToStorage(String thumbnailKey, byte[] jpeg, UUID documentId) {
try {
s3Client.putObject(
PutObjectRequest.builder()
.bucket(bucketName)
.key(thumbnailKey)
.contentType("image/jpeg")
.build(),
RequestBody.fromBytes(jpeg));
return true;
} catch (Exception e) {
log.warn("Thumbnail upload failed for doc={} key={} reason={}",
documentId, thumbnailKey, e.getMessage());
return false;
}
}
private Outcome persistThumbnailMetadata(Document doc, ThumbnailResult result) {
try {
doc.setThumbnailKey(result.key());
doc.setThumbnailGeneratedAt(LocalDateTime.now());
doc.setThumbnailAspect(result.aspect());
doc.setPageCount(result.pageCount());
documentRepository.save(doc);
return Outcome.SUCCESS;
} catch (Exception e) {
// Thumbnail is already in S3 but the entity update failed. Because the S3
// key is deterministic (thumbnails/{docId}.jpg), the next successful run
// — either a re-upload of this document or the admin backfill — will
// overwrite it cleanly. Logging distinctly so an operator tracking
// backfill totals can spot the database-side issue.
log.warn("Thumbnail persist failed for doc={} (orphaned in storage as {}): {}",
doc.getId(), result.key(), e.getMessage());
return Outcome.FAILED;
}
}
private boolean isSupported(String contentType) {
return PDF_CONTENT_TYPE.equals(contentType) || IMAGE_CONTENT_TYPES.contains(contentType);
}
private SourcePreview renderPdfFirstPage(String s3Key) throws IOException {
try (InputStream in = fileService.downloadFileStream(s3Key);
PDDocument pdf = Loader.loadPDF(new RandomAccessReadBuffer(in))) {
PDFRenderer renderer = new PDFRenderer(pdf);
BufferedImage image = renderer.renderImageWithDPI(0, PDF_RENDER_DPI, ImageType.RGB);
return new SourcePreview(image, pdf.getNumberOfPages());
}
}
private BufferedImage readImage(String s3Key) throws IOException {
try (InputStream in = fileService.downloadFileStream(s3Key)) {
BufferedImage img = ImageIO.read(in);
if (img == null) {
throw new IOException("No ImageIO reader available for " + s3Key);
}
return img;
}
}
private BufferedImage scaleToWidth(BufferedImage source, int targetWidth) {
int sourceWidth = source.getWidth();
int sourceHeight = source.getHeight();
int targetHeight = Math.max(1, Math.round((float) targetWidth * sourceHeight / sourceWidth));
BufferedImage scaled = new BufferedImage(targetWidth, targetHeight, BufferedImage.TYPE_INT_RGB);
Graphics2D g = scaled.createGraphics();
g.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BILINEAR);
g.drawImage(source, 0, 0, targetWidth, targetHeight, null);
g.dispose();
return scaled;
}
private byte[] encodeJpeg(BufferedImage image, float quality) throws IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ImageWriter writer = ImageIO.getImageWritersByFormatName("jpg").next();
ImageWriteParam param = writer.getDefaultWriteParam();
param.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
param.setCompressionQuality(quality);
try (ImageOutputStream out = ImageIO.createImageOutputStream(bos)) {
writer.setOutput(out);
writer.write(null, new IIOImage(image, null, null), param);
} finally {
writer.dispose();
}
return bos.toByteArray();
}
}

View File

@@ -38,20 +38,10 @@ public class TrainingDataExportService {
} }
public StreamingResponseBody exportToZip() { public StreamingResponseBody exportToZip() {
return exportBlocksToZip(queryEligibleBlocks()); // Collect all data before entering the lambda — no open DB txn during streaming
} List<TranscriptionBlock> blocks = queryEligibleBlocks();
public List<TranscriptionBlock> queryBlocksForSender(UUID personId) {
return blockRepository.findManualKurrentBlocksByPerson(personId);
}
public StreamingResponseBody exportForSender(UUID personId) {
return exportBlocksToZip(queryBlocksForSender(personId));
}
private StreamingResponseBody exportBlocksToZip(List<TranscriptionBlock> blocks) {
if (blocks.isEmpty()) { if (blocks.isEmpty()) {
return out -> {}; return out -> {}; // caller checks isEmpty() for 204 response
} }
// Group blocks by documentId so we only download each PDF once // Group blocks by documentId so we only download each PDF once

View File

@@ -1,27 +0,0 @@
package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor;
import org.raddatz.familienarchiv.repository.CompletionStatsRow;
import org.raddatz.familienarchiv.repository.TranscriptionBlockRepository;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
@Service
@RequiredArgsConstructor
public class TranscriptionBlockQueryService {
private final TranscriptionBlockRepository blockRepository;
public Map<UUID, Integer> getCompletionStats(List<UUID> documentIds) {
if (documentIds.isEmpty()) return Map.of();
Map<UUID, Integer> result = new HashMap<>();
for (CompletionStatsRow row : blockRepository.findCompletionStatsForDocuments(documentIds)) {
result.put(row.getDocumentId(), row.getCompletionPercentage());
}
return result;
}
}

View File

@@ -1,69 +0,0 @@
package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor;
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
import org.raddatz.familienarchiv.audit.AuditLogQueryService;
import org.raddatz.familienarchiv.dto.TranscriptionQueueItemDTO;
import org.raddatz.familienarchiv.dto.TranscriptionWeeklyStatsDTO;
import org.raddatz.familienarchiv.repository.DocumentRepository;
import org.raddatz.familienarchiv.repository.TranscriptionQueueProjection;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Map;
import java.util.UUID;
@Service
@RequiredArgsConstructor
public class TranscriptionQueueService {
private static final int DEFAULT_QUEUE_SIZE = 5;
private static final int MAX_CONTRIBUTORS = 5;
private final DocumentRepository documentRepository;
private final AuditLogQueryService auditLogQueryService;
public List<TranscriptionQueueItemDTO> getSegmentationQueue() {
return enrichWithContributors(documentRepository.findSegmentationQueue(DEFAULT_QUEUE_SIZE));
}
public List<TranscriptionQueueItemDTO> getTranscriptionQueue() {
return enrichWithContributors(documentRepository.findTranscriptionQueue(DEFAULT_QUEUE_SIZE));
}
public List<TranscriptionQueueItemDTO> getReadyToReadQueue() {
return enrichWithContributors(documentRepository.findReadyToReadQueue(DEFAULT_QUEUE_SIZE));
}
public TranscriptionWeeklyStatsDTO getWeeklyStats() {
var stats = documentRepository.findWeeklyStats();
return new TranscriptionWeeklyStatsDTO(
stats.getSegmentationCount(),
stats.getTranscriptionCount()
);
}
private List<TranscriptionQueueItemDTO> enrichWithContributors(List<TranscriptionQueueProjection> projections) {
if (projections.isEmpty()) return List.of();
List<UUID> ids = projections.stream().map(TranscriptionQueueProjection::getId).toList();
Map<UUID, List<ActivityActorDTO>> contributorMap = auditLogQueryService.findContributorsPerDocument(ids);
return projections.stream()
.map(p -> toDTO(p, contributorMap.getOrDefault(p.getId(), List.of())))
.toList();
}
private TranscriptionQueueItemDTO toDTO(TranscriptionQueueProjection p, List<ActivityActorDTO> allContributors) {
boolean hasMore = allContributors.size() > MAX_CONTRIBUTORS;
List<ActivityActorDTO> capped = hasMore ? allContributors.subList(0, MAX_CONTRIBUTORS) : allContributors;
return new TranscriptionQueueItemDTO(
p.getId(),
p.getTitle(),
p.getDocumentDate(),
p.getAnnotationCount(),
p.getTextedBlockCount(),
p.getReviewedBlockCount(),
capped,
hasMore
);
}
}

View File

@@ -2,8 +2,6 @@ package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.audit.AuditKind;
import org.raddatz.familienarchiv.audit.AuditService;
import org.raddatz.familienarchiv.dto.CreateAnnotationDTO; import org.raddatz.familienarchiv.dto.CreateAnnotationDTO;
import org.raddatz.familienarchiv.dto.CreateTranscriptionBlockDTO; import org.raddatz.familienarchiv.dto.CreateTranscriptionBlockDTO;
import org.raddatz.familienarchiv.dto.ReorderTranscriptionBlocksDTO; import org.raddatz.familienarchiv.dto.ReorderTranscriptionBlocksDTO;
@@ -13,7 +11,6 @@ import org.raddatz.familienarchiv.exception.ErrorCode;
import org.raddatz.familienarchiv.model.BlockSource; import org.raddatz.familienarchiv.model.BlockSource;
import org.raddatz.familienarchiv.model.Document; import org.raddatz.familienarchiv.model.Document;
import org.raddatz.familienarchiv.model.DocumentAnnotation; import org.raddatz.familienarchiv.model.DocumentAnnotation;
import org.raddatz.familienarchiv.model.ScriptType;
import org.raddatz.familienarchiv.model.TranscriptionBlock; import org.raddatz.familienarchiv.model.TranscriptionBlock;
import org.raddatz.familienarchiv.model.TranscriptionBlockVersion; import org.raddatz.familienarchiv.model.TranscriptionBlockVersion;
import org.raddatz.familienarchiv.repository.AnnotationRepository; import org.raddatz.familienarchiv.repository.AnnotationRepository;
@@ -23,8 +20,6 @@ import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID; import java.util.UUID;
@Service @Service
@@ -40,8 +35,6 @@ public class TranscriptionService {
private final AnnotationRepository annotationRepository; private final AnnotationRepository annotationRepository;
private final AnnotationService annotationService; private final AnnotationService annotationService;
private final DocumentService documentService; private final DocumentService documentService;
private final SenderModelService senderModelService;
private final AuditService auditService;
public List<TranscriptionBlock> listBlocks(UUID documentId) { public List<TranscriptionBlock> listBlocks(UUID documentId) {
return blockRepository.findByDocumentIdOrderBySortOrderAsc(documentId); return blockRepository.findByDocumentIdOrderBySortOrderAsc(documentId);
@@ -127,10 +120,8 @@ public class TranscriptionService {
UpdateTranscriptionBlockDTO dto, UUID userId) { UpdateTranscriptionBlockDTO dto, UUID userId) {
TranscriptionBlock block = getBlock(documentId, blockId); TranscriptionBlock block = getBlock(documentId, blockId);
String previousText = block.getText();
String text = sanitizeText(dto.getText()); String text = sanitizeText(dto.getText());
block.setText(text); block.setText(text);
block.setSource(BlockSource.MANUAL);
if (dto.getLabel() != null) { if (dto.getLabel() != null) {
block.setLabel(dto.getLabel()); block.setLabel(dto.getLabel());
} }
@@ -138,19 +129,6 @@ public class TranscriptionService {
TranscriptionBlock saved = blockRepository.save(block); TranscriptionBlock saved = blockRepository.save(block);
saveVersion(saved, userId); saveVersion(saved, userId);
if (!text.equals(previousText)) {
Optional<DocumentAnnotation> annotation = annotationRepository.findById(block.getAnnotationId());
int pageNumber = annotation.map(DocumentAnnotation::getPageNumber).orElse(0);
auditService.logAfterCommit(AuditKind.TEXT_SAVED, userId, documentId,
Map.of("pageNumber", pageNumber, "blockId", saved.getId().toString()));
}
Document doc = documentService.getDocumentById(documentId);
if (doc.getSender() != null && doc.getScriptType() == ScriptType.HANDWRITING_KURRENT) {
senderModelService.checkAndTriggerTraining(doc.getSender().getId());
}
return saved; return saved;
} }
@@ -194,15 +172,10 @@ public class TranscriptionService {
} }
@Transactional @Transactional
public TranscriptionBlock reviewBlock(UUID documentId, UUID blockId, UUID userId) { public TranscriptionBlock reviewBlock(UUID documentId, UUID blockId) {
TranscriptionBlock block = getBlock(documentId, blockId); TranscriptionBlock block = getBlock(documentId, blockId);
boolean wasReviewed = block.isReviewed(); block.setReviewed(!block.isReviewed());
block.setReviewed(!wasReviewed); return blockRepository.save(block);
TranscriptionBlock saved = blockRepository.save(block);
if (!wasReviewed && saved.isReviewed()) {
auditService.logAfterCommit(AuditKind.BLOCK_REVIEWED, userId, documentId, null);
}
return saved;
} }
public List<TranscriptionBlockVersion> getBlockHistory(UUID documentId, UUID blockId) { public List<TranscriptionBlockVersion> getBlockHistory(UUID documentId, UUID blockId) {
@@ -226,5 +199,4 @@ public class TranscriptionService {
} }
return text; return text;
} }
} }

Some files were not shown because too many files have changed in this diff Show More