Compare commits

..

1 Commits

Author SHA1 Message Date
Marcel
5bd7f0d486 docs(#240): add Mission Control Strip spec and pattern alternatives
Some checks failed
CI / Unit & Component Tests (push) Failing after 2m25s
CI / Backend Unit Tests (push) Failing after 2m38s
CI / Unit & Component Tests (pull_request) Failing after 2m11s
CI / Backend Unit Tests (pull_request) Failing after 8h41m14s
Adds the design decision record for how to expand the dashboard without
pushing content below the fold: a full-width 3-column strip (Segmentierung /
Transkription / Lesefertig) below the existing grid.

- dashboard-expansion-patterns.html — four pattern alternatives evaluated
  (Tabs, Accordion, Mission Control, Priority Queue) with annotated mockups,
  engagement feature proposal, and final recommendation.
- mission-control-strip-final.html — clean implementation blueprint with
  pipeline diagram, column definitions, seeded-weekly-shuffle sorting,
  expert-flag escape hatch, all Tailwind impl-ref values, and backend
  contracts.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-15 22:48:27 +02:00
538 changed files with 4732 additions and 205701 deletions

View File

@@ -47,26 +47,6 @@ jobs:
name: unit-test-screenshots name: unit-test-screenshots
path: frontend/test-results/screenshots/ path: frontend/test-results/screenshots/
# ─── OCR Service Unit Tests ───────────────────────────────────────────────────
# Only spell_check.py, test_confidence.py, test_sender_registry.py — no ML stack required.
ocr-tests:
name: OCR Service Tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Install test dependencies
run: pip install "pyspellchecker==0.9.0" pytest pytest-asyncio
working-directory: ocr-service
- name: Run OCR unit tests (no ML stack required)
run: python -m pytest test_spell_check.py test_confidence.py test_sender_registry.py -v
working-directory: ocr-service
# ─── Backend Unit & Slice Tests ─────────────────────────────────────────────── # ─── Backend Unit & Slice Tests ───────────────────────────────────────────────
# Pure Mockito + WebMvcTest — no DB or S3 needed. # Pure Mockito + WebMvcTest — no DB or S3 needed.
backend-unit-tests: backend-unit-tests:

3
.gitignore vendored
View File

@@ -11,5 +11,4 @@ gitea/
scripts/large-data.sql scripts/large-data.sql
.vitest-attachments .vitest-attachments
**/test-results/ **/test-results/
.worktrees/

View File

@@ -311,15 +311,13 @@ Save bar pattern — use **sticky full-bleed** for long forms (edit document), *
<div class="mt-4 flex items-center justify-between rounded-sm border border-brand-sand bg-white px-6 py-4 shadow-sm"> <div class="mt-4 flex items-center justify-between rounded-sm border border-brand-sand bg-white px-6 py-4 shadow-sm">
``` ```
Back button pattern — use the shared `<BackButton>` component from `$lib/components/BackButton.svelte`: Back link pattern:
```svelte ```svelte
<script lang="ts"> <a href="/persons" class="inline-flex items-center text-xs font-bold uppercase tracking-widest text-gray-500 hover:text-brand-navy transition-colors group mb-4">
import BackButton from '$lib/components/BackButton.svelte'; <svg class="w-4 h-4 mr-2 transform group-hover:-translate-x-1 transition-transform" .../>
</script> Zurück zur Übersicht
</a>
<BackButton />
``` ```
The component calls `history.back()` so the user returns to wherever they came from. Label is always "Zurück" (no contextual suffix — destination is unknown). Touch target ≥ 44px and focus ring are built in. Do not use a static `<a href>` for back navigation.
Subtle action link (e.g. "new document/person"): Subtle action link (e.g. "new document/person"):
```svelte ```svelte

View File

@@ -1,3 +0,0 @@
### Mark all blocks as reviewed
PUT http://localhost:8080/api/documents/{{documentId}}/transcription-blocks/review-all
Authorization: Basic admin admin123

View File

@@ -103,11 +103,6 @@
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webmvc-test</artifactId> <artifactId>spring-boot-starter-webmvc-test</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId>
<scope>test</scope>
</dependency> </dependency>
<!-- Excel Bearbeitung (Apache POI) --> <!-- Excel Bearbeitung (Apache POI) -->
<dependency> <dependency>
@@ -151,12 +146,6 @@
<artifactId>flyway-database-postgresql</artifactId> <artifactId>flyway-database-postgresql</artifactId>
</dependency> </dependency>
<!-- Caffeine cache for in-memory rate limiting -->
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
</dependency>
<!-- OpenAPI / Swagger UI — enabled only in the dev Spring profile --> <!-- OpenAPI / Swagger UI — enabled only in the dev Spring profile -->
<dependency> <dependency>
<groupId>org.springdoc</groupId> <groupId>org.springdoc</groupId>
@@ -164,19 +153,12 @@
<version>3.0.2</version> <version>3.0.2</version>
</dependency> </dependency>
<!-- PDF rendering for training data export and thumbnail generation --> <!-- PDF rendering for training data export -->
<dependency> <dependency>
<groupId>org.apache.pdfbox</groupId> <groupId>org.apache.pdfbox</groupId>
<artifactId>pdfbox</artifactId> <artifactId>pdfbox</artifactId>
<version>3.0.4</version> <version>3.0.4</version>
</dependency> </dependency>
<!-- TIFF decoding plugin for ImageIO (thumbnail generation from scanned TIFFs) -->
<dependency>
<groupId>com.twelvemonkeys.imageio</groupId>
<artifactId>imageio-tiff</artifactId>
<version>3.12.0</version>
</dependency>
</dependencies> </dependencies>

View File

@@ -1,10 +0,0 @@
package org.raddatz.familienarchiv.audit;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.annotation.Nullable;
public record ActivityActorDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String initials,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String color,
@Nullable String name
) {}

View File

@@ -1,20 +0,0 @@
package org.raddatz.familienarchiv.audit;
import java.time.Instant;
import java.util.UUID;
public interface ActivityFeedRow {
String getKind();
UUID getActorId();
String getActorInitials();
String getActorColor();
String getActorName();
UUID getDocumentId();
Instant getHappenedAt();
boolean isYouMentioned();
boolean isYouParticipated();
int getCount();
Instant getHappenedAtUntil();
/** Present only for COMMENT_ADDED and MENTION_CREATED — null otherwise. */
UUID getCommentId();
}

View File

@@ -1,44 +0,0 @@
package org.raddatz.familienarchiv.audit;
import java.util.Set;
public enum AuditKind {
/** Payload: none */
FILE_UPLOADED,
/** Payload: {@code {"oldStatus": "UPLOADED", "newStatus": "TRANSCRIBED"}} */
STATUS_CHANGED,
/** Payload: none */
METADATA_UPDATED,
/** Payload: {@code {"pageNumber": 3}} */
TEXT_SAVED,
/** Payload: none */
BLOCK_REVIEWED,
/** Payload: {@code {"pageNumber": 3}} */
ANNOTATION_CREATED,
/** Payload: {@code {"commentId": "uuid"}} */
COMMENT_ADDED,
/** Payload: {@code {"commentId": "uuid", "mentionedUserId": "uuid"}} */
MENTION_CREATED,
/** Payload: {@code {"userId": "uuid", "email": "addr"}} */
USER_CREATED,
/** Payload: {@code {"userId": "uuid", "email": "addr"}} */
USER_DELETED,
/** Payload: {@code {"userId": "uuid", "email": "addr", "addedGroups": ["Admin"], "removedGroups": []}} */
GROUP_MEMBERSHIP_CHANGED;
public static final Set<AuditKind> ROLLUP_ELIGIBLE = Set.of(
TEXT_SAVED, FILE_UPLOADED, ANNOTATION_CREATED,
BLOCK_REVIEWED, COMMENT_ADDED, MENTION_CREATED
);
}

View File

@@ -1,46 +0,0 @@
package org.raddatz.familienarchiv.audit;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.persistence.*;
import lombok.*;
import org.hibernate.annotations.CreationTimestamp;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.type.SqlTypes;
import java.time.OffsetDateTime;
import java.util.Map;
import java.util.UUID;
@Entity
@Table(name = "audit_log")
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class AuditLog {
@Id
@GeneratedValue(strategy = GenerationType.UUID)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private UUID id;
@Column(name = "happened_at", nullable = false, updatable = false)
@CreationTimestamp
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private OffsetDateTime happenedAt;
@Column(name = "actor_id")
private UUID actorId;
@Enumerated(EnumType.STRING)
@Column(name = "kind", nullable = false)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private AuditKind kind;
@Column(name = "document_id")
private UUID documentId;
@JdbcTypeCode(SqlTypes.JSON)
@Column(columnDefinition = "jsonb")
private Map<String, Object> payload;
}

View File

@@ -1,204 +0,0 @@
package org.raddatz.familienarchiv.audit;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.time.OffsetDateTime;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
public interface AuditLogQueryRepository extends JpaRepository<AuditLog, UUID> {
@Query(value = """
SELECT a.document_id
FROM audit_log a
WHERE a.kind IN ('TEXT_SAVED', 'ANNOTATION_CREATED')
AND a.actor_id = :userId
AND a.document_id IS NOT NULL
ORDER BY a.happened_at DESC
LIMIT 1
""", nativeQuery = true)
Optional<UUID> findMostRecentDocumentIdByActor(@Param("userId") UUID userId);
@Query(value = """
WITH events AS (
SELECT
a.kind,
a.actor_id,
a.document_id,
a.happened_at,
a.payload,
LAG(a.happened_at) OVER (
PARTITION BY a.actor_id, a.document_id, a.kind
ORDER BY a.happened_at
) AS prev_happened_at
FROM audit_log a
WHERE a.kind IN (:kinds)
AND a.document_id IS NOT NULL
),
sessions_marked AS (
SELECT
kind, actor_id, document_id, happened_at, payload,
CASE
WHEN kind IN ('COMMENT_ADDED','MENTION_CREATED') THEN 1
WHEN prev_happened_at IS NULL THEN 1
WHEN EXTRACT(EPOCH FROM (happened_at - prev_happened_at)) > 7200 THEN 1
ELSE 0
END AS is_new_session
FROM events
),
sessions AS (
SELECT
kind, actor_id, document_id, happened_at, payload,
SUM(is_new_session) OVER (
PARTITION BY actor_id, document_id, kind
ORDER BY happened_at
ROWS UNBOUNDED PRECEDING
) AS session_id
FROM sessions_marked
),
aggregated AS (
SELECT
s.kind,
s.actor_id,
s.document_id,
s.session_id,
MIN(s.happened_at) AS happened_at,
CASE WHEN COUNT(*) > 1 THEN MAX(s.happened_at) ELSE NULL END AS happened_at_until,
COUNT(*)::int AS count,
BOOL_OR(s.kind = 'MENTION_CREATED'
AND s.payload->>'mentionedUserId' = :currentUserId) AS you_mentioned,
-- COMMENT_ADDED/MENTION_CREATED always have is_new_session=1, so each group has one row and MIN collapses to that row payload
MIN(s.payload::text)::jsonb AS payload
FROM sessions s
GROUP BY s.kind, s.actor_id, s.document_id, s.session_id
)
SELECT
ag.kind AS kind,
ag.actor_id AS actorId,
CASE
WHEN u.first_name IS NOT NULL AND u.last_name IS NOT NULL
THEN UPPER(LEFT(u.first_name, 1)) || UPPER(LEFT(u.last_name, 1))
WHEN u.first_name IS NOT NULL THEN UPPER(LEFT(u.first_name, 1))
WHEN u.last_name IS NOT NULL THEN UPPER(LEFT(u.last_name, 1))
ELSE '?'
END AS actorInitials,
COALESCE(u.color, '') AS actorColor,
CONCAT_WS(' ', u.first_name, u.last_name) AS actorName,
ag.document_id AS documentId,
ag.happened_at AS happened_at,
ag.you_mentioned AS youMentioned,
-- payload->>'commentId' matches notifications.reference_id per AuditKind.COMMENT_ADDED contract
EXISTS(
SELECT 1 FROM notifications n
WHERE n.type = 'REPLY'
AND n.recipient_id = CAST(:currentUserId AS uuid)
AND n.reference_id = (ag.payload->>'commentId')::uuid
) AS youParticipated,
ag.count AS count,
ag.happened_at_until AS happenedAtUntil,
(ag.payload->>'commentId')::uuid AS commentId
FROM aggregated ag
LEFT JOIN users u ON u.id = ag.actor_id
ORDER BY ag.happened_at DESC
LIMIT :limit
""", nativeQuery = true)
List<ActivityFeedRow> findRolledUpActivityFeed(
@Param("currentUserId") String currentUserId,
@Param("limit") int limit,
@Param("kinds") Collection<String> kinds);
@Query(value = """
SELECT
COUNT(DISTINCT (a.document_id::text || '|' || (a.payload->>'pageNumber'))) AS pages,
COUNT(*) FILTER (WHERE a.kind = 'ANNOTATION_CREATED') AS annotated,
COUNT(DISTINCT a.payload->>'blockId') FILTER (WHERE a.kind = 'TEXT_SAVED') AS transcribed,
COUNT(DISTINCT a.document_id) FILTER (WHERE a.kind = 'FILE_UPLOADED') AS uploaded,
COUNT(DISTINCT (a.document_id::text || '|' || (a.payload->>'pageNumber')))
FILTER (WHERE (a.kind = 'ANNOTATION_CREATED' OR a.kind = 'TEXT_SAVED')
AND a.actor_id::text = :userId) AS yourPages
FROM audit_log a
WHERE a.happened_at >= :weekStart
AND a.kind IN ('ANNOTATION_CREATED','TEXT_SAVED','FILE_UPLOADED')
""", nativeQuery = true)
PulseStatsRow getPulseStats(
@Param("weekStart") OffsetDateTime weekStart,
@Param("userId") String userId);
@Query(value = """
SELECT DISTINCT ON (a.document_id)
a.document_id AS documentId,
a.actor_id AS actorId
FROM audit_log a
WHERE a.kind = :kind
AND a.document_id IN :documentIds
AND a.actor_id IS NOT NULL
ORDER BY a.document_id, a.happened_at DESC
""", nativeQuery = true)
List<Object[]> findMostRecentActorPerDocument(
@Param("documentIds") List<UUID> documentIds,
@Param("kind") String kind);
@Query(value = """
SELECT
a.document_id AS documentId,
CASE
WHEN u.first_name IS NOT NULL AND u.last_name IS NOT NULL
THEN UPPER(LEFT(u.first_name, 1)) || UPPER(LEFT(u.last_name, 1))
WHEN u.first_name IS NOT NULL THEN UPPER(LEFT(u.first_name, 1))
WHEN u.last_name IS NOT NULL THEN UPPER(LEFT(u.last_name, 1))
ELSE '?'
END AS actorInitials,
COALESCE(u.color, '') AS actorColor,
CONCAT_WS(' ', u.first_name, u.last_name) AS actorName
FROM audit_log a
LEFT JOIN users u ON u.id = a.actor_id
WHERE a.kind IN ('ANNOTATION_CREATED', 'TEXT_SAVED', 'BLOCK_REVIEWED')
AND a.document_id IN :documentIds
AND a.actor_id IS NOT NULL
GROUP BY a.document_id, a.actor_id, u.first_name, u.last_name, u.color
ORDER BY a.document_id, MIN(a.happened_at)
""", nativeQuery = true)
List<ContributorRow> findContributorsPerDocument(@Param("documentIds") List<UUID> documentIds);
@Query(value = """
SELECT
ranked.document_id AS documentId,
ranked.actorInitials AS actorInitials,
ranked.actorColor AS actorColor,
ranked.actorName AS actorName
FROM (
SELECT
a.document_id,
CASE
WHEN u.first_name IS NOT NULL AND u.last_name IS NOT NULL
THEN UPPER(LEFT(u.first_name, 1)) || UPPER(LEFT(u.last_name, 1))
WHEN u.first_name IS NOT NULL THEN UPPER(LEFT(u.first_name, 1))
WHEN u.last_name IS NOT NULL THEN UPPER(LEFT(u.last_name, 1))
ELSE '?'
END AS actorInitials,
COALESCE(u.color, '') AS actorColor,
NULLIF(CONCAT_WS(' ', u.first_name, u.last_name), '') AS actorName,
ROW_NUMBER() OVER (
PARTITION BY a.document_id
ORDER BY MAX(a.happened_at) DESC
) AS rn
FROM audit_log a
LEFT JOIN users u ON u.id = a.actor_id
WHERE a.kind IN ('ANNOTATION_CREATED', 'TEXT_SAVED', 'BLOCK_REVIEWED')
AND a.document_id IN :documentIds
AND a.actor_id IS NOT NULL
GROUP BY a.document_id, a.actor_id, u.first_name, u.last_name, u.color
) ranked
WHERE ranked.rn <= 4
ORDER BY ranked.document_id, ranked.rn
""", nativeQuery = true)
List<ContributorRow> findRecentContributorsForDocuments(@Param("documentIds") List<UUID> documentIds);
Page<AuditLog> findByKindIn(Collection<AuditKind> kinds, Pageable pageable);
}

View File

@@ -1,73 +0,0 @@
package org.raddatz.familienarchiv.audit;
import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Service;
import java.time.OffsetDateTime;
import java.util.*;
import static org.raddatz.familienarchiv.audit.AuditKind.GROUP_MEMBERSHIP_CHANGED;
import static org.raddatz.familienarchiv.audit.AuditKind.USER_CREATED;
import static org.raddatz.familienarchiv.audit.AuditKind.USER_DELETED;
@Service
@RequiredArgsConstructor
public class AuditLogQueryService {
private final AuditLogQueryRepository queryRepository;
public Optional<UUID> findMostRecentDocumentForUser(UUID userId) {
return queryRepository.findMostRecentDocumentIdByActor(userId);
}
public List<ActivityFeedRow> findActivityFeed(UUID currentUserId, int limit) {
return findActivityFeed(currentUserId, limit, AuditKind.ROLLUP_ELIGIBLE);
}
public List<ActivityFeedRow> findActivityFeed(UUID currentUserId, int limit, Set<AuditKind> kinds) {
List<String> kindNames = kinds.stream().map(Enum::name).toList();
return queryRepository.findRolledUpActivityFeed(currentUserId.toString(), limit, kindNames);
}
public PulseStatsRow getPulseStats(OffsetDateTime weekStart, UUID userId) {
return queryRepository.getPulseStats(weekStart, userId.toString());
}
public Map<UUID, UUID> findMostRecentActorPerDocument(List<UUID> documentIds, String kind) {
if (documentIds.isEmpty()) return Map.of();
List<Object[]> rows = queryRepository.findMostRecentActorPerDocument(documentIds, kind);
Map<UUID, UUID> result = new LinkedHashMap<>();
for (Object[] row : rows) {
UUID docId = (UUID) row[0];
UUID actorId = (UUID) row[1];
result.put(docId, actorId);
}
return result;
}
public Map<UUID, List<ActivityActorDTO>> findContributorsPerDocument(List<UUID> documentIds) {
if (documentIds.isEmpty()) return Map.of();
return toContributorMap(queryRepository.findContributorsPerDocument(documentIds));
}
public Map<UUID, List<ActivityActorDTO>> findRecentContributorsPerDocument(List<UUID> documentIds) {
if (documentIds.isEmpty()) return Map.of();
return toContributorMap(queryRepository.findRecentContributorsForDocuments(documentIds));
}
public List<AuditLog> findRecentUserManagementEvents(int limit) {
PageRequest page = PageRequest.of(0, limit, Sort.by("happenedAt").descending());
return queryRepository.findByKindIn(Set.of(USER_CREATED, USER_DELETED, GROUP_MEMBERSHIP_CHANGED), page).getContent();
}
private Map<UUID, List<ActivityActorDTO>> toContributorMap(List<ContributorRow> rows) {
Map<UUID, List<ActivityActorDTO>> result = new LinkedHashMap<>();
for (ContributorRow row : rows) {
result.computeIfAbsent(row.getDocumentId(), k -> new ArrayList<>())
.add(new ActivityActorDTO(row.getActorInitials(), row.getActorColor(), row.getActorName()));
}
return result;
}
}

View File

@@ -1,9 +0,0 @@
package org.raddatz.familienarchiv.audit;
import org.springframework.data.jpa.repository.JpaRepository;
import java.util.UUID;
public interface AuditLogRepository extends JpaRepository<AuditLog, UUID> {
boolean existsByKind(AuditKind kind);
}

View File

@@ -1,57 +0,0 @@
package org.raddatz.familienarchiv.audit;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.core.task.TaskExecutor;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.support.TransactionSynchronization;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import java.util.Map;
import java.util.UUID;
@Service
@RequiredArgsConstructor
@Slf4j
public class AuditService {
private final AuditLogRepository auditLogRepository;
@Qualifier("auditExecutor")
private final TaskExecutor auditExecutor;
@Async("auditExecutor")
public void log(AuditKind kind, UUID actorId, UUID documentId, Map<String, Object> payload) {
writeLog(kind, actorId, documentId, payload);
}
public void logAfterCommit(AuditKind kind, UUID actorId, UUID documentId, Map<String, Object> payload) {
if (TransactionSynchronizationManager.isActualTransactionActive()) {
TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronization() {
@Override
public void afterCommit() {
// Run on a separate thread: the afterCommit() callback fires while Spring's
// transaction synchronizations are still active on the current thread, which
// prevents SimpleJpaRepository.save() from starting a new transaction inline.
auditExecutor.execute(() -> writeLog(kind, actorId, documentId, payload));
}
});
} else {
writeLog(kind, actorId, documentId, payload);
}
}
private void writeLog(AuditKind kind, UUID actorId, UUID documentId, Map<String, Object> payload) {
try {
auditLogRepository.save(AuditLog.builder()
.kind(kind)
.actorId(actorId)
.documentId(documentId)
.payload(payload)
.build());
} catch (Exception e) {
log.error("Audit log write failed: kind={}, document={}", kind, documentId, e);
}
}
}

View File

@@ -1,10 +0,0 @@
package org.raddatz.familienarchiv.audit;
import java.util.UUID;
public interface ContributorRow {
UUID getDocumentId();
String getActorInitials();
String getActorColor();
String getActorName();
}

View File

@@ -1,9 +0,0 @@
package org.raddatz.familienarchiv.audit;
public interface PulseStatsRow {
long getPages();
long getAnnotated();
long getTranscribed();
long getUploaded();
long getYourPages();
}

View File

@@ -23,33 +23,4 @@ public class AsyncConfig {
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy()); executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy());
return executor; return executor;
} }
@Bean("auditExecutor")
public Executor auditExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(1);
executor.setMaxPoolSize(2);
executor.setQueueCapacity(50);
executor.setThreadNamePrefix("Audit-");
// AbortPolicy instead of CallerRunsPolicy: if CallerRunsPolicy ran the task on the
// afterCommit() callback thread, Spring's transaction synchronizations would still be
// active on that thread and SimpleJpaRepository.save() would throw IllegalStateException.
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy());
return executor;
}
@Bean("thumbnailExecutor")
public Executor thumbnailExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(1);
executor.setMaxPoolSize(2);
executor.setQueueCapacity(200);
executor.setThreadNamePrefix("Thumbnail-");
// CallerRunsPolicy applies back-pressure to quick-upload batches and admin backfill
// instead of dropping work (shared taskExecutor uses AbortPolicy). Safe because the
// task is dispatched via TransactionSynchronization.afterCommit, which runs on a
// post-commit callback thread without active transaction synchronization.
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
return executor;
}
} }

View File

@@ -31,8 +31,8 @@ import java.util.Set;
@DependsOn("flyway") @DependsOn("flyway")
public class DataInitializer { public class DataInitializer {
@Value("${app.admin.email:admin@familyarchive.local}") @Value("${app.admin.username:admin}")
private String adminEmail; private String adminUsername;
@Value("${app.admin.password:admin123}") @Value("${app.admin.password:admin123}")
private String adminPassword; private String adminPassword;
@@ -43,23 +43,26 @@ public class DataInitializer {
@Bean @Bean
public CommandLineRunner initAdminUser(PasswordEncoder passwordEncoder) { public CommandLineRunner initAdminUser(PasswordEncoder passwordEncoder) {
return args -> { return args -> {
if (userRepository.findByEmail(adminEmail).isEmpty()) { if (userRepository.findByUsername(adminUsername).isEmpty()) {
log.info("Kein Admin-User '{}' gefunden. Erstelle Default-Admin...", adminEmail); log.info("Kein Admin-User '{}' gefunden. Erstelle Default-Admin...", adminUsername);
// 1. Admin Gruppe erstellen
UserGroup adminGroup = UserGroup.builder() UserGroup adminGroup = UserGroup.builder()
.name("Administrators") .name("Administrators")
.permissions(Set.of("ADMIN", "READ_ALL", "WRITE_ALL", "ANNOTATE_ALL", "ADMIN_USER", "ADMIN_TAG", "ADMIN_PERMISSION")) .permissions(Set.of("ADMIN", "READ_ALL", "WRITE_ALL", "ANNOTATE_ALL", "ADMIN_USER", "ADMIN_TAG", "ADMIN_PERMISSION"))
.build(); .build();
groupRepository.save(adminGroup); groupRepository.save(adminGroup);
// 2. Admin User erstellen
AppUser admin = AppUser.builder() AppUser admin = AppUser.builder()
.email(adminEmail) .username(adminUsername)
.password(passwordEncoder.encode(adminPassword)) .password(passwordEncoder.encode(adminPassword)) // Passwort verschlüsseln!
.email("admin@familyarchive.local")
.groups(Set.of(adminGroup)) .groups(Set.of(adminGroup))
.build(); .build();
userRepository.save(admin); userRepository.save(admin);
log.info("Default Admin erstellt: Email='{}'", adminEmail); log.info("Default Admin erstellt: User='{}'", adminUsername);
} }
}; };
} }
@@ -81,13 +84,16 @@ public class DataInitializer {
TagRepository tagRepo, TagRepository tagRepo,
PasswordEncoder passwordEncoder) { PasswordEncoder passwordEncoder) {
return args -> { return args -> {
userRepository.findByEmail(adminEmail).ifPresent(admin -> { // Always reset the admin password to the configured value so a failed password-reset
// test from a previous run can never leave the account locked out.
userRepository.findByUsername(adminUsername).ifPresent(admin -> {
admin.setPassword(passwordEncoder.encode(adminPassword)); admin.setPassword(passwordEncoder.encode(adminPassword));
userRepository.save(admin); userRepository.save(admin);
log.info("E2E seed: Admin-Passwort auf konfigurierten Wert zurückgesetzt."); log.info("E2E seed: Admin-Passwort auf konfigurierten Wert zurückgesetzt.");
}); });
if (userRepository.findByEmail("reader@familyarchive.local").isEmpty()) { // Always ensure the read-only test user exists, even when seed data was already loaded.
if (userRepository.findByUsername("reader").isEmpty()) {
log.info("E2E seed: Erstelle 'reader'-Testbenutzer..."); log.info("E2E seed: Erstelle 'reader'-Testbenutzer...");
UserGroup leserGroup = groupRepository.findByName("Leser").orElseGet(() -> UserGroup leserGroup = groupRepository.findByName("Leser").orElseGet(() ->
groupRepository.save(UserGroup.builder() groupRepository.save(UserGroup.builder()
@@ -95,7 +101,7 @@ public class DataInitializer {
.permissions(Set.of("READ_ALL")) .permissions(Set.of("READ_ALL"))
.build())); .build()));
userRepository.save(AppUser.builder() userRepository.save(AppUser.builder()
.email("reader@familyarchive.local") .username("reader")
.password(passwordEncoder.encode("reader123")) .password(passwordEncoder.encode("reader123"))
.groups(Set.of(leserGroup)) .groups(Set.of(leserGroup))
.build()); .build());
@@ -125,6 +131,7 @@ public class DataInitializer {
Tag tagUrlaub = tagRepo.save(Tag.builder().name("Urlaub").build()); Tag tagUrlaub = tagRepo.save(Tag.builder().name("Urlaub").build());
// ── Documents ──────────────────────────────────────────────────── // ── Documents ────────────────────────────────────────────────────
// 1. Fully transcribed letter — used by search + detail E2E tests
docRepo.save(Document.builder() docRepo.save(Document.builder()
.title("Geburtsurkunde Hans Müller") .title("Geburtsurkunde Hans Müller")
.originalFilename("geburtsurkunde_hans.pdf") .originalFilename("geburtsurkunde_hans.pdf")
@@ -137,6 +144,7 @@ public class DataInitializer {
.transcription("Hiermit wird beurkundet, dass Hans Müller am 12. April 1923 in Berlin geboren wurde.") .transcription("Hiermit wird beurkundet, dass Hans Müller am 12. April 1923 in Berlin geboren wurde.")
.build()); .build());
// 2. Letter with multiple receivers and tags — tests multi-receiver display
docRepo.save(Document.builder() docRepo.save(Document.builder()
.title("Brief aus dem Krieg") .title("Brief aus dem Krieg")
.originalFilename("brief_krieg_1944.pdf") .originalFilename("brief_krieg_1944.pdf")
@@ -149,6 +157,7 @@ public class DataInitializer {
.transcription("Liebe Anna, ich schreibe dir aus der Front. Es geht mir den Umständen entsprechend gut.") .transcription("Liebe Anna, ich schreibe dir aus der Front. Es geht mir den Umständen entsprechend gut.")
.build()); .build());
// 3. Postcard — no transcription, tests PLACEHOLDER status
docRepo.save(Document.builder() docRepo.save(Document.builder()
.title("Urlaubspostkarte Ostsee") .title("Urlaubspostkarte Ostsee")
.originalFilename("postkarte_1965.jpg") .originalFilename("postkarte_1965.jpg")
@@ -160,6 +169,7 @@ public class DataInitializer {
.tags(Set.of(tagUrlaub)) .tags(Set.of(tagUrlaub))
.build()); .build());
// 4. Document with no sender — tests null-sender display ("Unbekannt")
docRepo.save(Document.builder() docRepo.save(Document.builder()
.title("Unbekanntes Dokument") .title("Unbekanntes Dokument")
.originalFilename("unbekannt.pdf") .originalFilename("unbekannt.pdf")
@@ -169,6 +179,7 @@ public class DataInitializer {
.receivers(Set.of(maria)) .receivers(Set.of(maria))
.build()); .build());
// 5. Document with minimal metadata — tests sparse display
docRepo.save(Document.builder() docRepo.save(Document.builder()
.title("Scan ohne Titel") .title("Scan ohne Titel")
.originalFilename("scan_ohne_titel.pdf") .originalFilename("scan_ohne_titel.pdf")

View File

@@ -1,69 +0,0 @@
package org.raddatz.familienarchiv.config;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.http.HttpStatus;
import org.springframework.web.servlet.HandlerInterceptor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
public class RateLimitInterceptor implements HandlerInterceptor {
private static final int MAX_REQUESTS_PER_MINUTE = 10;
// Caffeine cache: per-IP counter that expires 1 minute after first access.
// Bounded to 10_000 entries to prevent OOM from IP exhaustion.
private final Cache<String, AtomicInteger> requestCounts = Caffeine.newBuilder()
.expireAfterAccess(1, TimeUnit.MINUTES)
.maximumSize(10_000)
.build();
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler)
throws Exception {
String ip = resolveClientIp(request);
AtomicInteger count = requestCounts.get(ip, k -> new AtomicInteger(0));
if (count.incrementAndGet() > MAX_REQUESTS_PER_MINUTE) {
response.setStatus(HttpStatus.TOO_MANY_REQUESTS.value());
response.getWriter().write("{\"code\":\"RATE_LIMIT_EXCEEDED\",\"message\":\"Too many requests\"}");
return false;
}
return true;
}
private String resolveClientIp(HttpServletRequest request) {
// Only trust X-Forwarded-For when the direct connection comes from a known
// reverse proxy (loopback or Docker private network). Trusting it unconditionally
// allows any client to spoof a different IP and bypass per-IP rate limiting.
String remoteAddr = request.getRemoteAddr();
if (isTrustedProxy(remoteAddr)) {
String forwarded = request.getHeader("X-Forwarded-For");
if (forwarded != null && !forwarded.isBlank()) {
return forwarded.split(",")[0].trim();
}
}
return remoteAddr;
}
private boolean isTrustedProxy(String ip) {
if (ip.equals("127.0.0.1") || ip.equals("::1") || ip.startsWith("10.") || ip.startsWith("192.168.")) {
return true;
}
// Only RFC 1918 172.16.0.0/12 (172.16172.31), not all of 172.x
if (ip.startsWith("172.")) {
String[] parts = ip.split("\\.");
if (parts.length >= 2) {
try {
int second = Integer.parseInt(parts[1]);
return second >= 16 && second <= 31;
} catch (NumberFormatException ignored) {
return false;
}
}
}
return false;
}
}

View File

@@ -50,8 +50,6 @@ public class SecurityConfig {
auth.requestMatchers("/actuator/health").permitAll(); auth.requestMatchers("/actuator/health").permitAll();
// Password reset endpoints are unauthenticated by nature // Password reset endpoints are unauthenticated by nature
auth.requestMatchers("/api/auth/forgot-password", "/api/auth/reset-password").permitAll(); auth.requestMatchers("/api/auth/forgot-password", "/api/auth/reset-password").permitAll();
// Invite-based registration endpoints are public
auth.requestMatchers("/api/auth/invite/**", "/api/auth/register").permitAll();
// E2E test helper (only active under "e2e" profile) // E2E test helper (only active under "e2e" profile)
auth.requestMatchers("/api/auth/reset-token-for-test").permitAll(); auth.requestMatchers("/api/auth/reset-token-for-test").permitAll();
// In dev, allow unauthenticated access to the OpenAPI spec and Swagger UI // In dev, allow unauthenticated access to the OpenAPI spec and Swagger UI
@@ -69,7 +67,7 @@ public class SecurityConfig {
.frameOptions(frameOptions -> frameOptions.sameOrigin())) .frameOptions(frameOptions -> frameOptions.sameOrigin()))
// Erlaubt Login via Browser-Popup oder REST-Header (Authorization: Basic ...) // Erlaubt Login via Browser-Popup oder REST-Header (Authorization: Basic ...)
.httpBasic(Customizer.withDefaults()) .httpBasic(Customizer.withDefaults())
.formLogin(form -> form.usernameParameter("email")); .formLogin(Customizer.withDefaults());
return http.build(); return http.build();
} }

View File

@@ -1,15 +0,0 @@
package org.raddatz.familienarchiv.config;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
@Configuration
public class WebConfig implements WebMvcConfigurer {
@Override
public void addInterceptors(InterceptorRegistry registry) {
registry.addInterceptor(new RateLimitInterceptor())
.addPathPatterns("/api/auth/invite/**", "/api/auth/register");
}
}

View File

@@ -6,7 +6,6 @@ import org.raddatz.familienarchiv.security.RequirePermission;
import org.raddatz.familienarchiv.service.DocumentService; import org.raddatz.familienarchiv.service.DocumentService;
import org.raddatz.familienarchiv.service.DocumentVersionService; import org.raddatz.familienarchiv.service.DocumentVersionService;
import org.raddatz.familienarchiv.service.MassImportService; import org.raddatz.familienarchiv.service.MassImportService;
import org.raddatz.familienarchiv.service.ThumbnailBackfillService;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.PostMapping;
@@ -24,7 +23,6 @@ public class AdminController {
private final MassImportService massImportService; private final MassImportService massImportService;
private final DocumentService documentService; private final DocumentService documentService;
private final DocumentVersionService documentVersionService; private final DocumentVersionService documentVersionService;
private final ThumbnailBackfillService thumbnailBackfillService;
@PostMapping("/trigger-import") @PostMapping("/trigger-import")
public ResponseEntity<MassImportService.ImportStatus> triggerMassImport() { public ResponseEntity<MassImportService.ImportStatus> triggerMassImport() {
@@ -49,15 +47,4 @@ public class AdminController {
int count = documentService.backfillFileHashes(); int count = documentService.backfillFileHashes();
return ResponseEntity.ok(new BackfillResult(count)); return ResponseEntity.ok(new BackfillResult(count));
} }
@PostMapping("/generate-thumbnails")
public ResponseEntity<ThumbnailBackfillService.BackfillStatus> generateThumbnails() {
thumbnailBackfillService.runBackfillAsync();
return ResponseEntity.accepted().body(thumbnailBackfillService.getStatus());
}
@GetMapping("/thumbnail-status")
public ResponseEntity<ThumbnailBackfillService.BackfillStatus> thumbnailStatus() {
return ResponseEntity.ok(thumbnailBackfillService.getStatus());
}
} }

View File

@@ -72,7 +72,7 @@ public class AnnotationController {
private UUID resolveUserId(Authentication authentication) { private UUID resolveUserId(Authentication authentication) {
if (authentication == null || !authentication.isAuthenticated()) return null; if (authentication == null || !authentication.isAuthenticated()) return null;
try { try {
AppUser user = userService.findByEmail(authentication.getName()); AppUser user = userService.findByUsername(authentication.getName());
return user != null ? user.getId() : null; return user != null ? user.getId() : null;
} catch (Exception e) { } catch (Exception e) {
log.warn("Could not resolve user for annotation: {}", e.getMessage()); log.warn("Could not resolve user for annotation: {}", e.getMessage());

View File

@@ -1,18 +1,14 @@
package org.raddatz.familienarchiv.controller; package org.raddatz.familienarchiv.controller;
import jakarta.validation.Valid;
import org.raddatz.familienarchiv.dto.ForgotPasswordRequest; import org.raddatz.familienarchiv.dto.ForgotPasswordRequest;
import org.raddatz.familienarchiv.dto.InvitePrefillDTO;
import org.raddatz.familienarchiv.dto.RegisterRequest;
import org.raddatz.familienarchiv.dto.ResetPasswordRequest; import org.raddatz.familienarchiv.dto.ResetPasswordRequest;
import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.model.InviteToken;
import org.raddatz.familienarchiv.service.InviteService;
import org.raddatz.familienarchiv.service.PasswordResetService; import org.raddatz.familienarchiv.service.PasswordResetService;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
@@ -22,7 +18,6 @@ import lombok.RequiredArgsConstructor;
public class AuthController { public class AuthController {
private final PasswordResetService passwordResetService; private final PasswordResetService passwordResetService;
private final InviteService inviteService;
@Value("${app.base-url:http://localhost:3000}") @Value("${app.base-url:http://localhost:3000}")
private String appBaseUrl; private String appBaseUrl;
@@ -39,20 +34,4 @@ public class AuthController {
passwordResetService.resetPassword(request); passwordResetService.resetPassword(request);
return ResponseEntity.noContent().build(); return ResponseEntity.noContent().build();
} }
@GetMapping("/invite/{code}")
public InvitePrefillDTO getInvitePrefill(@PathVariable String code) {
InviteToken token = inviteService.validateCode(code);
return new InvitePrefillDTO(
token.getPrefillFirstName(),
token.getPrefillLastName(),
token.getPrefillEmail()
);
}
@PostMapping("/register")
public ResponseEntity<AppUser> register(@Valid @RequestBody RegisterRequest request) {
AppUser user = inviteService.redeemInvite(request);
return ResponseEntity.status(HttpStatus.CREATED).body(user);
}
} }

View File

@@ -24,6 +24,67 @@ public class CommentController {
private final CommentService commentService; private final CommentService commentService;
private final UserService userService; private final UserService userService;
// ─── General document comments ────────────────────────────────────────────
@GetMapping("/api/documents/{documentId}/comments")
public List<DocumentComment> getDocumentComments(@PathVariable UUID documentId) {
return commentService.getCommentsForDocument(documentId);
}
@PostMapping("/api/documents/{documentId}/comments")
@ResponseStatus(HttpStatus.CREATED)
@RequirePermission({Permission.ANNOTATE_ALL, Permission.WRITE_ALL})
public DocumentComment postDocumentComment(
@PathVariable UUID documentId,
@RequestBody CreateCommentDTO dto,
Authentication authentication) {
AppUser author = resolveUser(authentication);
return commentService.postComment(documentId, null, dto.getContent(), dto.getMentionedUserIds(), author);
}
@PostMapping("/api/documents/{documentId}/comments/{commentId}/replies")
@ResponseStatus(HttpStatus.CREATED)
@RequirePermission({Permission.ANNOTATE_ALL, Permission.WRITE_ALL})
public DocumentComment replyToDocumentComment(
@PathVariable UUID documentId,
@PathVariable UUID commentId,
@RequestBody CreateCommentDTO dto,
Authentication authentication) {
AppUser author = resolveUser(authentication);
return commentService.replyToComment(documentId, commentId, dto.getContent(), dto.getMentionedUserIds(), author);
}
// ─── Annotation comments ──────────────────────────────────────────────────
@GetMapping("/api/documents/{documentId}/annotations/{annotationId}/comments")
public List<DocumentComment> getAnnotationComments(@PathVariable UUID annotationId) {
return commentService.getCommentsForAnnotation(annotationId);
}
@PostMapping("/api/documents/{documentId}/annotations/{annotationId}/comments")
@ResponseStatus(HttpStatus.CREATED)
@RequirePermission({Permission.ANNOTATE_ALL, Permission.WRITE_ALL})
public DocumentComment postAnnotationComment(
@PathVariable UUID documentId,
@PathVariable UUID annotationId,
@RequestBody CreateCommentDTO dto,
Authentication authentication) {
AppUser author = resolveUser(authentication);
return commentService.postComment(documentId, annotationId, dto.getContent(), dto.getMentionedUserIds(), author);
}
@PostMapping("/api/documents/{documentId}/annotations/{annotationId}/comments/{commentId}/replies")
@ResponseStatus(HttpStatus.CREATED)
@RequirePermission({Permission.ANNOTATE_ALL, Permission.WRITE_ALL})
public DocumentComment replyToAnnotationComment(
@PathVariable UUID documentId,
@PathVariable UUID commentId,
@RequestBody CreateCommentDTO dto,
Authentication authentication) {
AppUser author = resolveUser(authentication);
return commentService.replyToComment(documentId, commentId, dto.getContent(), dto.getMentionedUserIds(), author);
}
// ─── Block (transcription) comments ──────────────────────────────────────── // ─── Block (transcription) comments ────────────────────────────────────────
@GetMapping("/api/documents/{documentId}/transcription-blocks/{blockId}/comments") @GetMapping("/api/documents/{documentId}/transcription-blocks/{blockId}/comments")
@@ -83,7 +144,7 @@ public class CommentController {
private AppUser resolveUser(Authentication authentication) { private AppUser resolveUser(Authentication authentication) {
if (authentication == null || !authentication.isAuthenticated()) return null; if (authentication == null || !authentication.isAuthenticated()) return null;
try { try {
return userService.findByEmail(authentication.getName()); return userService.findByUsername(authentication.getName());
} catch (Exception e) { } catch (Exception e) {
log.warn("Could not resolve user for comment: {}", e.getMessage()); log.warn("Could not resolve user for comment: {}", e.getMessage());
return null; return null;

View File

@@ -3,7 +3,6 @@ package org.raddatz.familienarchiv.controller;
import java.io.IOException; import java.io.IOException;
import java.time.LocalDate; import java.time.LocalDate;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
@@ -14,21 +13,8 @@ import java.util.UUID;
import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.responses.ApiResponse;
import jakarta.validation.Valid;
import jakarta.validation.constraints.Max;
import jakarta.validation.constraints.Min;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.validation.annotation.Validated;
import org.raddatz.familienarchiv.dto.BatchMetadataRequest;
import org.raddatz.familienarchiv.dto.BulkEditError;
import org.raddatz.familienarchiv.dto.BulkEditResult;
import org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO;
import org.raddatz.familienarchiv.dto.DocumentBatchSummary;
import org.raddatz.familienarchiv.dto.DocumentBulkEditDTO;
import org.raddatz.familienarchiv.dto.DocumentSearchResult; import org.raddatz.familienarchiv.dto.DocumentSearchResult;
import org.raddatz.familienarchiv.dto.DocumentUpdateDTO; import org.raddatz.familienarchiv.dto.DocumentUpdateDTO;
import org.raddatz.familienarchiv.dto.TagOperator;
import org.raddatz.familienarchiv.dto.DocumentVersionSummary; import org.raddatz.familienarchiv.dto.DocumentVersionSummary;
import org.raddatz.familienarchiv.dto.IncompleteDocumentDTO; import org.raddatz.familienarchiv.dto.IncompleteDocumentDTO;
import org.raddatz.familienarchiv.exception.DomainException; import org.raddatz.familienarchiv.exception.DomainException;
@@ -38,16 +24,12 @@ import org.raddatz.familienarchiv.dto.DocumentSort;
import org.raddatz.familienarchiv.model.DocumentStatus; import org.raddatz.familienarchiv.model.DocumentStatus;
import org.raddatz.familienarchiv.model.TrainingLabel; import org.raddatz.familienarchiv.model.TrainingLabel;
import org.raddatz.familienarchiv.model.DocumentVersion; import org.raddatz.familienarchiv.model.DocumentVersion;
import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.security.Permission; import org.raddatz.familienarchiv.security.Permission;
import org.raddatz.familienarchiv.security.RequirePermission; import org.raddatz.familienarchiv.security.RequirePermission;
import org.raddatz.familienarchiv.security.SecurityUtils;
import org.raddatz.familienarchiv.service.DocumentService; import org.raddatz.familienarchiv.service.DocumentService;
import org.raddatz.familienarchiv.service.DocumentVersionService; import org.raddatz.familienarchiv.service.DocumentVersionService;
import org.raddatz.familienarchiv.service.FileService; import org.raddatz.familienarchiv.service.FileService;
import org.raddatz.familienarchiv.service.UserService;
import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort;
import org.springframework.security.core.Authentication;
import org.springframework.http.HttpHeaders; import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType; import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
@@ -75,13 +57,11 @@ import lombok.extern.slf4j.Slf4j;
@RequestMapping("/api/documents") @RequestMapping("/api/documents")
@RequiredArgsConstructor @RequiredArgsConstructor
@Slf4j @Slf4j
@Validated
public class DocumentController { public class DocumentController {
private final DocumentService documentService; private final DocumentService documentService;
private final DocumentVersionService documentVersionService; private final DocumentVersionService documentVersionService;
private final FileService fileService; private final FileService fileService;
private final UserService userService;
// --- DOWNLOAD --- // --- DOWNLOAD ---
@GetMapping("/{id}/file") @GetMapping("/{id}/file")
@@ -108,31 +88,6 @@ public class DocumentController {
} }
} }
// --- THUMBNAIL ---
@GetMapping("/{id}/thumbnail")
public ResponseEntity<InputStreamResource> getDocumentThumbnail(@PathVariable UUID id) {
Document doc = documentService.getDocumentById(id);
if (doc.getThumbnailKey() == null) {
throw DomainException.notFound(ErrorCode.FILE_NOT_FOUND, "No thumbnail for document: " + id);
}
try {
FileService.S3FileDownload download = fileService.downloadFile(doc.getThumbnailKey());
return ResponseEntity.ok()
.contentType(MediaType.IMAGE_JPEG)
// `private` (not `public`) prevents shared caches from serving one user's
// thumbnail to another (CWE-525). `immutable` is safe because the URL
// carries a ?v=<thumbnailGeneratedAt> cache-buster that changes whenever
// the underlying file is replaced.
.header(HttpHeaders.CACHE_CONTROL, "private, max-age=31536000, immutable")
.body(download.resource());
} catch (FileService.StorageFileNotFoundException e) {
throw DomainException.notFound(ErrorCode.FILE_NOT_FOUND,
"Thumbnail missing in storage: " + doc.getThumbnailKey());
}
}
// --- METADATA --- // --- METADATA ---
@GetMapping("/{id}") @GetMapping("/{id}")
public Document getDocument(@PathVariable UUID id) { public Document getDocument(@PathVariable UUID id) {
@@ -156,10 +111,9 @@ public class DocumentController {
public Document updateDocument( public Document updateDocument(
@PathVariable UUID id, @PathVariable UUID id,
@ModelAttribute DocumentUpdateDTO dto, @ModelAttribute DocumentUpdateDTO dto,
@RequestPart(value = "file", required = false) MultipartFile file, @RequestPart(value = "file", required = false) MultipartFile file) {
Authentication authentication) {
try { try {
return documentService.updateDocument(id, dto, file, requireUserId(authentication)); return documentService.updateDocument(id, dto, file);
} catch (IOException e) { } catch (IOException e) {
throw DomainException.internal(ErrorCode.FILE_UPLOAD_FAILED, "Failed to upload file: " + e.getMessage()); throw DomainException.internal(ErrorCode.FILE_UPLOAD_FAILED, "Failed to upload file: " + e.getMessage());
} }
@@ -174,35 +128,18 @@ public class DocumentController {
return ResponseEntity.noContent().build(); return ResponseEntity.noContent().build();
} }
// --- ATTACH FILE --- // --- QUICK UPLOAD ---
private static final Set<String> ALLOWED_CONTENT_TYPES = Set.of( private static final Set<String> ALLOWED_CONTENT_TYPES = Set.of(
"application/pdf", "image/jpeg", "image/png", "image/tiff"); "application/pdf", "image/jpeg", "image/png", "image/tiff");
@PostMapping(value = "/{id}/file", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
@RequirePermission(Permission.WRITE_ALL)
public Document attachFile(
@PathVariable UUID id,
@RequestPart("file") MultipartFile file,
Authentication authentication) {
String contentType = file.getContentType();
if (contentType == null || !ALLOWED_CONTENT_TYPES.contains(contentType)) {
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "Unsupported file type: " + contentType);
}
return documentService.attachFile(id, file, requireUserId(authentication));
}
// --- QUICK UPLOAD ---
public record UploadError(String filename, String code) {} public record UploadError(String filename, String code) {}
public record QuickUploadResult(List<Document> created, List<Document> updated, List<UploadError> errors) {} public record QuickUploadResult(List<Document> created, List<Document> updated, List<UploadError> errors) {}
@PostMapping(value = "/quick-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE) @PostMapping(value = "/quick-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
@RequirePermission(Permission.WRITE_ALL) @RequirePermission(Permission.WRITE_ALL)
public QuickUploadResult quickUpload( public QuickUploadResult quickUpload(
@RequestPart(value = "files", required = false) List<MultipartFile> files, @RequestPart(value = "files", required = false) List<MultipartFile> files) {
@RequestPart(value = "metadata", required = false) DocumentBatchMetadataDTO metadata,
Authentication authentication) {
List<Document> created = new ArrayList<>(); List<Document> created = new ArrayList<>();
List<Document> updated = new ArrayList<>(); List<Document> updated = new ArrayList<>();
List<UploadError> errors = new ArrayList<>(); List<UploadError> errors = new ArrayList<>();
@@ -211,21 +148,13 @@ public class DocumentController {
return new QuickUploadResult(created, updated, errors); return new QuickUploadResult(created, updated, errors);
} }
documentService.validateBatch(files.size(), metadata); for (MultipartFile file : files) {
UUID actorId = requireUserId(authentication);
long totalBytes = files.stream().mapToLong(MultipartFile::getSize).sum();
for (int i = 0; i < files.size(); i++) {
MultipartFile file = files.get(i);
if (!ALLOWED_CONTENT_TYPES.contains(file.getContentType())) { if (!ALLOWED_CONTENT_TYPES.contains(file.getContentType())) {
errors.add(new UploadError(file.getOriginalFilename(), "UNSUPPORTED_FILE_TYPE")); errors.add(new UploadError(file.getOriginalFilename(), "UNSUPPORTED_FILE_TYPE"));
continue; continue;
} }
try { try {
DocumentService.StoreResult result = metadata != null DocumentService.StoreResult result = documentService.storeDocument(file);
? documentService.storeDocumentWithBatchMetadata(file, metadata, i, actorId)
: documentService.storeDocument(file, actorId);
if (result.isNew()) { if (result.isNew()) {
created.add(result.document()); created.add(result.document());
} else { } else {
@@ -237,129 +166,33 @@ public class DocumentController {
} }
} }
log.info("quickUpload actor={} files={} totalBytes={} withMetadata={} created={} updated={} errors={}",
actorId, files.size(), totalBytes, metadata != null,
created.size(), updated.size(), errors.size());
return new QuickUploadResult(created, updated, errors); return new QuickUploadResult(created, updated, errors);
} }
// --- BULK EDIT ---
private static final int BULK_EDIT_MAX_IDS = 500;
/** Hard cap for {@code GET /api/documents/ids}: prevents an unfiltered
* call from materialising the entire {@code documents} table into JSON.
* Generous enough for real-world "Alle X editieren" against the family
* archive's bounded scale (~1500 docs today, expected growth to ~5k). */
private static final int BULK_EDIT_FILTER_MAX_IDS = 5000;
@PatchMapping("/bulk")
@RequirePermission(Permission.WRITE_ALL)
public BulkEditResult patchBulk(
@RequestBody @Valid DocumentBulkEditDTO dto,
Authentication authentication) {
if (dto.getDocumentIds() == null || dto.getDocumentIds().isEmpty()) {
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "documentIds is required");
}
if (dto.getDocumentIds().size() > BULK_EDIT_MAX_IDS) {
throw DomainException.badRequest(ErrorCode.BULK_EDIT_TOO_MANY_IDS,
"Maximum " + BULK_EDIT_MAX_IDS + " documents per request, got: " + dto.getDocumentIds().size());
}
UUID actorId = requireUserId(authentication);
int updated = 0;
List<BulkEditError> errors = new ArrayList<>();
// Dedupe duplicate document IDs while preserving submission order. A
// double-click on "Alle X editieren" would otherwise hit each document
// twice and inflate the `updated` count returned to the user.
LinkedHashSet<UUID> uniqueIds = new LinkedHashSet<>(dto.getDocumentIds());
for (UUID id : uniqueIds) {
try {
documentService.applyBulkEditToDocument(id, dto, actorId);
updated++;
} catch (DomainException e) {
errors.add(new BulkEditError(id, sanitizeForLog(e.getMessage())));
} catch (Exception e) {
errors.add(new BulkEditError(id, "Internal error"));
log.warn("Bulk edit failed for document {}: {}", id, sanitizeForLog(e.getMessage()));
}
}
log.info("bulkEdit actor={} documentIds={} unique={} updated={} errors={}",
actorId, dto.getDocumentIds().size(), uniqueIds.size(), updated, errors.size());
return new BulkEditResult(updated, errors);
}
/** CRLF strip for any log line interpolating a free-form string (e.g.
* {@link Throwable#getMessage()}). Defends against CWE-117 log injection. */
private static String sanitizeForLog(String s) {
return s == null ? null : s.replaceAll("[\\r\\n]", "_");
}
@GetMapping("/ids")
@RequirePermission(Permission.WRITE_ALL)
public List<UUID> getDocumentIds(
@RequestParam(required = false) String q,
@RequestParam(required = false) LocalDate from,
@RequestParam(required = false) LocalDate to,
@RequestParam(required = false) UUID senderId,
@RequestParam(required = false) UUID receiverId,
@RequestParam(required = false, name = "tag") List<String> tags,
@RequestParam(required = false) String tagQ,
@RequestParam(required = false) DocumentStatus status,
@RequestParam(required = false) String tagOp,
Authentication authentication) {
TagOperator operator = "OR".equalsIgnoreCase(tagOp) ? TagOperator.OR : TagOperator.AND;
List<UUID> ids = documentService.findIdsForFilter(q, from, to, senderId, receiverId, tags, tagQ, status, operator);
if (ids.size() > BULK_EDIT_FILTER_MAX_IDS) {
throw DomainException.badRequest(ErrorCode.BULK_EDIT_TOO_MANY_IDS,
"Filter matches " + ids.size() + " documents — refine filter (max " + BULK_EDIT_FILTER_MAX_IDS + ")");
}
UUID actorId = requireUserId(authentication);
log.info("documentIds actor={} matched={}", actorId, ids.size());
return ids;
}
@PostMapping(value = "/batch-metadata", consumes = MediaType.APPLICATION_JSON_VALUE)
@RequirePermission(Permission.READ_ALL)
public List<DocumentBatchSummary> batchMetadata(@RequestBody @Valid BatchMetadataRequest request, Authentication authentication) {
if (request == null || request.ids() == null || request.ids().isEmpty()) {
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "ids is required");
}
if (request.ids().size() > BULK_EDIT_MAX_IDS) {
throw DomainException.badRequest(ErrorCode.BULK_EDIT_TOO_MANY_IDS,
"Maximum " + BULK_EDIT_MAX_IDS + " ids per request, got: " + request.ids().size());
}
UUID actorId = requireUserId(authentication);
log.info("batchMetadata actor={} ids={}", actorId, request.ids().size());
return documentService.batchMetadata(request.ids());
}
@GetMapping("/incomplete-count") @GetMapping("/incomplete-count")
@RequirePermission(Permission.WRITE_ALL)
public Map<String, Long> getIncompleteCount() { public Map<String, Long> getIncompleteCount() {
return Map.of("count", documentService.getIncompleteCount()); return Map.of("count", documentService.getIncompleteCount());
} }
@GetMapping("/incomplete") @GetMapping("/incomplete")
@RequirePermission(Permission.WRITE_ALL)
public List<IncompleteDocumentDTO> getIncomplete( public List<IncompleteDocumentDTO> getIncomplete(
@Parameter(description = "Maximum number of results (server caps at 200)") @Parameter(description = "Maximum number of results") @RequestParam(defaultValue = "10") int size) {
@RequestParam(defaultValue = "50") int size) { return documentService.findIncompleteDocuments(size);
return documentService.findIncompleteDocuments(Math.min(size, 200));
} }
@GetMapping("/incomplete/next") @GetMapping("/incomplete/next")
@RequirePermission(Permission.WRITE_ALL)
public ResponseEntity<Document> getNextIncomplete(@RequestParam UUID excludeId) { public ResponseEntity<Document> getNextIncomplete(@RequestParam UUID excludeId) {
return documentService.findNextIncompleteDocument(excludeId) return documentService.findNextIncompleteDocument(excludeId)
.map(ResponseEntity::ok) .map(ResponseEntity::ok)
.orElse(ResponseEntity.noContent().build()); .orElse(ResponseEntity.noContent().build());
} }
@GetMapping("/recent-activity")
public ResponseEntity<List<Document>> getRecentActivity(
@RequestParam(defaultValue = "5") int size) {
return ResponseEntity.ok(documentService.getRecentActivity(size));
}
@GetMapping("/search") @GetMapping("/search")
public ResponseEntity<DocumentSearchResult> search( public ResponseEntity<DocumentSearchResult> search(
@RequestParam(required = false) String q, @RequestParam(required = false) String q,
@@ -371,21 +204,12 @@ public class DocumentController {
@RequestParam(required = false) String tagQ, @RequestParam(required = false) String tagQ,
@Parameter(description = "Filter by document status") @RequestParam(required = false) DocumentStatus status, @Parameter(description = "Filter by document status") @RequestParam(required = false) DocumentStatus status,
@Parameter(description = "Sort field") @RequestParam(required = false) DocumentSort sort, @Parameter(description = "Sort field") @RequestParam(required = false) DocumentSort sort,
@Parameter(description = "Sort direction: ASC or DESC") @RequestParam(required = false, defaultValue = "DESC") String dir, @Parameter(description = "Sort direction: ASC or DESC") @RequestParam(required = false, defaultValue = "DESC") String dir) {
@Parameter(description = "Tag operator: AND (default) or OR") @RequestParam(required = false) String tagOp,
// @Max on page guards against overflow when pageable.getOffset() is computed
// as page * size — Integer.MAX_VALUE * 50 would wrap to a negative long, which
// Hibernate cheerfully turns into an invalid SQL OFFSET.
@Parameter(description = "Page number (0-indexed)") @RequestParam(defaultValue = "0") @Min(0) @Max(100_000) int page,
@Parameter(description = "Page size (max 100)") @RequestParam(defaultValue = "50") @Min(1) @Max(100) int size) {
if (!"ASC".equalsIgnoreCase(dir) && !"DESC".equalsIgnoreCase(dir)) { if (!"ASC".equalsIgnoreCase(dir) && !"DESC".equalsIgnoreCase(dir)) {
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "dir must be ASC or DESC"); throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "dir must be ASC or DESC");
} }
// tagOp is a raw String at the HTTP boundary; any value other than "OR" (case-insensitive) List<Document> results = documentService.searchDocuments(q, from, to, senderId, receiverId, tags, tagQ, status, sort, dir);
// defaults to AND, which matches the frontend default and keeps old clients working. return ResponseEntity.ok(DocumentSearchResult.of(results));
TagOperator operator = "OR".equalsIgnoreCase(tagOp) ? TagOperator.OR : TagOperator.AND;
Pageable pageable = PageRequest.of(page, size);
return ResponseEntity.ok(documentService.searchDocuments(q, from, to, senderId, receiverId, tags, tagQ, status, sort, dir, operator, pageable));
} }
// --- TRAINING LABELS --- // --- TRAINING LABELS ---
@@ -434,8 +258,4 @@ public class DocumentController {
Sort sort = Sort.by(Sort.Direction.fromString(dir.toUpperCase()), "documentDate"); Sort sort = Sort.by(Sort.Direction.fromString(dir.toUpperCase()), "documentDate");
return documentService.getConversationFiltered(senderId, receiverId, from, to, sort); return documentService.getConversationFiltered(senderId, receiverId, from, to, sort);
} }
private UUID requireUserId(Authentication authentication) {
return SecurityUtils.requireUserId(authentication, userService);
}
} }

View File

@@ -1,57 +0,0 @@
package org.raddatz.familienarchiv.controller;
import lombok.RequiredArgsConstructor;
import org.raddatz.familienarchiv.dto.CreateInviteRequest;
import org.raddatz.familienarchiv.dto.InviteListItemDTO;
import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.security.Permission;
import org.raddatz.familienarchiv.security.RequirePermission;
import org.raddatz.familienarchiv.service.InviteService;
import org.raddatz.familienarchiv.service.UserService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.core.annotation.AuthenticationPrincipal;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.UUID;
@RestController
@RequestMapping("/api/invites")
@RequiredArgsConstructor
public class InviteController {
private final InviteService inviteService;
private final UserService userService;
@Value("${app.base-url:http://localhost:3000}")
private String appBaseUrl;
@GetMapping
@RequirePermission(Permission.ADMIN_USER)
public List<InviteListItemDTO> listInvites(
@RequestParam(value = "status", defaultValue = "active") String status) {
boolean activeOnly = !"all".equalsIgnoreCase(status);
return inviteService.listInvites(activeOnly, appBaseUrl);
}
@PostMapping
@RequirePermission(Permission.ADMIN_USER)
public ResponseEntity<InviteListItemDTO> createInvite(
@RequestBody CreateInviteRequest request,
@AuthenticationPrincipal UserDetails principal) {
AppUser creator = userService.findByEmail(principal.getUsername());
InviteListItemDTO created = inviteService.toListItemDTO(
inviteService.createInvite(request, creator), appBaseUrl);
return ResponseEntity.status(HttpStatus.CREATED).body(created);
}
@DeleteMapping("/{id}")
@RequirePermission(Permission.ADMIN_USER)
public ResponseEntity<Void> revokeInvite(@PathVariable UUID id) {
inviteService.revokeInvite(id);
return ResponseEntity.noContent().build();
}
}

View File

@@ -100,6 +100,6 @@ public class NotificationController {
// ─── private helpers ────────────────────────────────────────────────────── // ─── private helpers ──────────────────────────────────────────────────────
private AppUser resolveUser(Authentication authentication) { private AppUser resolveUser(Authentication authentication) {
return userService.findByEmail(authentication.getName()); return userService.findByUsername(authentication.getName());
} }
} }

View File

@@ -4,10 +4,7 @@ import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.dto.BatchOcrDTO; import org.raddatz.familienarchiv.dto.BatchOcrDTO;
import org.raddatz.familienarchiv.dto.OcrStatusDTO; import org.raddatz.familienarchiv.dto.OcrStatusDTO;
import org.raddatz.familienarchiv.dto.TrainingHistoryResponse;
import org.raddatz.familienarchiv.dto.TrainingInfoResponse;
import org.raddatz.familienarchiv.dto.TriggerOcrDTO; import org.raddatz.familienarchiv.dto.TriggerOcrDTO;
import org.raddatz.familienarchiv.dto.TriggerSenderTrainingDTO;
import org.raddatz.familienarchiv.model.AppUser; import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.model.OcrJob; import org.raddatz.familienarchiv.model.OcrJob;
import org.raddatz.familienarchiv.model.OcrTrainingRun; import org.raddatz.familienarchiv.model.OcrTrainingRun;
@@ -18,7 +15,6 @@ import org.raddatz.familienarchiv.service.OcrProgressService;
import org.raddatz.familienarchiv.service.OcrService; import org.raddatz.familienarchiv.service.OcrService;
import org.raddatz.familienarchiv.service.OcrTrainingService; import org.raddatz.familienarchiv.service.OcrTrainingService;
import org.raddatz.familienarchiv.service.SegmentationTrainingExportService; import org.raddatz.familienarchiv.service.SegmentationTrainingExportService;
import org.raddatz.familienarchiv.service.SenderModelService;
import org.raddatz.familienarchiv.service.TrainingDataExportService; import org.raddatz.familienarchiv.service.TrainingDataExportService;
import org.raddatz.familienarchiv.service.UserService; import org.raddatz.familienarchiv.service.UserService;
import org.springframework.http.HttpHeaders; import org.springframework.http.HttpHeaders;
@@ -46,7 +42,6 @@ public class OcrController {
private final TrainingDataExportService trainingDataExportService; private final TrainingDataExportService trainingDataExportService;
private final SegmentationTrainingExportService segmentationTrainingExportService; private final SegmentationTrainingExportService segmentationTrainingExportService;
private final OcrTrainingService ocrTrainingService; private final OcrTrainingService ocrTrainingService;
private final SenderModelService senderModelService;
@PostMapping("/api/documents/{documentId}/ocr") @PostMapping("/api/documents/{documentId}/ocr")
@ResponseStatus(HttpStatus.ACCEPTED) @ResponseStatus(HttpStatus.ACCEPTED)
@@ -135,33 +130,14 @@ public class OcrController {
@GetMapping("/api/ocr/training-info") @GetMapping("/api/ocr/training-info")
@RequirePermission(Permission.ADMIN) @RequirePermission(Permission.ADMIN)
public TrainingInfoResponse getTrainingInfo() { public OcrTrainingService.TrainingInfoResponse getTrainingInfo() {
return ocrTrainingService.getTrainingInfo(); return ocrTrainingService.getTrainingInfo();
} }
@GetMapping("/api/ocr/training-info/global")
@RequirePermission(Permission.ADMIN)
public TrainingHistoryResponse getGlobalTrainingHistory() {
return ocrTrainingService.getGlobalTrainingHistory();
}
@GetMapping("/api/ocr/training-info/{personId}")
@RequirePermission(Permission.ADMIN)
public TrainingHistoryResponse getSenderTrainingHistory(@PathVariable UUID personId) {
return ocrTrainingService.getSenderTrainingHistory(personId);
}
@PostMapping("/api/ocr/train-sender")
@ResponseStatus(HttpStatus.ACCEPTED)
@RequirePermission(Permission.ADMIN)
public OcrTrainingRun triggerSenderTraining(@Valid @RequestBody TriggerSenderTrainingDTO dto) {
return senderModelService.triggerManualSenderTraining(dto.personId());
}
private UUID resolveUserId(Authentication authentication) { private UUID resolveUserId(Authentication authentication) {
if (authentication == null || !authentication.isAuthenticated()) return null; if (authentication == null || !authentication.isAuthenticated()) return null;
try { try {
AppUser user = userService.findByEmail(authentication.getName()); AppUser user = userService.findByUsername(authentication.getName());
return user != null ? user.getId() : null; return user != null ? user.getId() : null;
} catch (Exception e) { } catch (Exception e) {
log.warn("Failed to resolve user ID for authentication: {}", authentication.getName(), e); log.warn("Failed to resolve user ID for authentication: {}", authentication.getName(), e);

View File

@@ -63,33 +63,27 @@ public class PersonController {
@PostMapping @PostMapping
@RequirePermission(Permission.WRITE_ALL) @RequirePermission(Permission.WRITE_ALL)
public ResponseEntity<Person> createPerson(@Valid @RequestBody PersonUpdateDTO dto) { public ResponseEntity<Person> createPerson(@Valid @RequestBody PersonUpdateDTO dto) {
validatePersonNames(dto); if (dto.getFirstName() == null || dto.getFirstName().isBlank()
if (dto.getFirstName() != null) dto.setFirstName(dto.getFirstName().trim()); || dto.getLastName() == null || dto.getLastName().isBlank()) {
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "Vor- und Nachname sind Pflichtfelder");
}
dto.setFirstName(dto.getFirstName().trim());
dto.setLastName(dto.getLastName().trim()); dto.setLastName(dto.getLastName().trim());
if (dto.getTitle() != null) dto.setTitle(dto.getTitle().trim());
return ResponseEntity.ok(personService.createPerson(dto)); return ResponseEntity.ok(personService.createPerson(dto));
} }
@PutMapping("/{id}") @PutMapping("/{id}")
@RequirePermission(Permission.WRITE_ALL) @RequirePermission(Permission.WRITE_ALL)
public ResponseEntity<Person> updatePerson(@PathVariable UUID id, @Valid @RequestBody PersonUpdateDTO dto) { public ResponseEntity<Person> updatePerson(@PathVariable UUID id, @Valid @RequestBody PersonUpdateDTO dto) {
validatePersonNames(dto); if (dto.getFirstName() == null || dto.getFirstName().isBlank()
if (dto.getFirstName() != null) dto.setFirstName(dto.getFirstName().trim()); || dto.getLastName() == null || dto.getLastName().isBlank()) {
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "Vor- und Nachname sind Pflichtfelder");
}
dto.setFirstName(dto.getFirstName().trim());
dto.setLastName(dto.getLastName().trim()); dto.setLastName(dto.getLastName().trim());
if (dto.getTitle() != null) dto.setTitle(dto.getTitle().trim());
return ResponseEntity.ok(personService.updatePerson(id, dto)); return ResponseEntity.ok(personService.updatePerson(id, dto));
} }
private void validatePersonNames(PersonUpdateDTO dto) {
if (dto.getLastName() == null || dto.getLastName().isBlank()) {
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "Nachname ist Pflichtfeld");
}
if (dto.getPersonType() == org.raddatz.familienarchiv.model.PersonType.PERSON
&& (dto.getFirstName() == null || dto.getFirstName().isBlank())) {
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "Vorname ist Pflichtfeld");
}
}
@PostMapping("/{id}/merge") @PostMapping("/{id}/merge")
@ResponseStatus(HttpStatus.NO_CONTENT) @ResponseStatus(HttpStatus.NO_CONTENT)
@RequirePermission(Permission.WRITE_ALL) @RequirePermission(Permission.WRITE_ALL)

View File

@@ -1,29 +1,23 @@
package org.raddatz.familienarchiv.controller; package org.raddatz.familienarchiv.controller;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.UUID; import java.util.UUID;
import org.raddatz.familienarchiv.dto.MergeTagDTO;
import org.raddatz.familienarchiv.dto.TagTreeNodeDTO;
import org.raddatz.familienarchiv.dto.TagUpdateDTO;
import org.raddatz.familienarchiv.model.Tag; import org.raddatz.familienarchiv.model.Tag;
import org.raddatz.familienarchiv.security.Permission; import org.raddatz.familienarchiv.security.Permission;
import org.raddatz.familienarchiv.security.RequirePermission; import org.raddatz.familienarchiv.security.RequirePermission;
import org.raddatz.familienarchiv.service.DocumentService; import org.raddatz.familienarchiv.service.DocumentService;
import org.raddatz.familienarchiv.service.TagService; import org.raddatz.familienarchiv.service.TagService;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController; import org.springframework.web.bind.annotation.RestController;
import jakarta.validation.Valid;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
@@ -37,8 +31,8 @@ public class TagController {
@PutMapping("/{id}") @PutMapping("/{id}")
@RequirePermission(Permission.ADMIN_TAG) @RequirePermission(Permission.ADMIN_TAG)
public ResponseEntity<Tag> updateTag(@PathVariable UUID id, @RequestBody TagUpdateDTO dto) { public ResponseEntity<Tag> updateTag(@PathVariable UUID id, @RequestBody Map<String, String> payload) {
return ResponseEntity.ok(tagService.update(id, dto)); return ResponseEntity.ok(tagService.update(id, payload.get("name")));
} }
@DeleteMapping("/{id}") @DeleteMapping("/{id}")
@@ -52,22 +46,4 @@ public class TagController {
public List<Tag> searchTags(@RequestParam(defaultValue = "") String query) { public List<Tag> searchTags(@RequestParam(defaultValue = "") String query) {
return tagService.search(query); return tagService.search(query);
} }
@GetMapping("/tree")
public List<TagTreeNodeDTO> getTagTree() {
return tagService.getTagTree();
}
@PostMapping("/{id}/merge")
@RequirePermission(Permission.ADMIN_TAG)
public ResponseEntity<Tag> mergeTag(@PathVariable UUID id, @Valid @RequestBody MergeTagDTO dto) {
return ResponseEntity.ok(tagService.mergeTags(id, dto.targetId()));
}
@DeleteMapping("/{id}/subtree")
@ResponseStatus(HttpStatus.NO_CONTENT)
@RequirePermission(Permission.ADMIN_TAG)
public void deleteSubtree(@PathVariable UUID id) {
tagService.deleteWithDescendants(id);
}
} }

View File

@@ -5,11 +5,12 @@ import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.dto.CreateTranscriptionBlockDTO; import org.raddatz.familienarchiv.dto.CreateTranscriptionBlockDTO;
import org.raddatz.familienarchiv.dto.ReorderTranscriptionBlocksDTO; import org.raddatz.familienarchiv.dto.ReorderTranscriptionBlocksDTO;
import org.raddatz.familienarchiv.dto.UpdateTranscriptionBlockDTO; import org.raddatz.familienarchiv.dto.UpdateTranscriptionBlockDTO;
import org.raddatz.familienarchiv.exception.DomainException;
import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.model.TranscriptionBlock; import org.raddatz.familienarchiv.model.TranscriptionBlock;
import org.raddatz.familienarchiv.model.TranscriptionBlockVersion; import org.raddatz.familienarchiv.model.TranscriptionBlockVersion;
import org.raddatz.familienarchiv.security.Permission; import org.raddatz.familienarchiv.security.Permission;
import org.raddatz.familienarchiv.security.RequirePermission; import org.raddatz.familienarchiv.security.RequirePermission;
import org.raddatz.familienarchiv.security.SecurityUtils;
import org.raddatz.familienarchiv.service.TranscriptionService; import org.raddatz.familienarchiv.service.TranscriptionService;
import org.raddatz.familienarchiv.service.UserService; import org.raddatz.familienarchiv.service.UserService;
import org.springframework.http.HttpStatus; import org.springframework.http.HttpStatus;
@@ -84,19 +85,8 @@ public class TranscriptionBlockController {
@RequirePermission(Permission.WRITE_ALL) @RequirePermission(Permission.WRITE_ALL)
public TranscriptionBlock reviewBlock( public TranscriptionBlock reviewBlock(
@PathVariable UUID documentId, @PathVariable UUID documentId,
@PathVariable UUID blockId, @PathVariable UUID blockId) {
Authentication authentication) { return transcriptionService.reviewBlock(documentId, blockId);
UUID userId = requireUserId(authentication);
return transcriptionService.reviewBlock(documentId, blockId, userId);
}
@PutMapping("/review-all")
@RequirePermission(Permission.WRITE_ALL)
public List<TranscriptionBlock> markAllBlocksReviewed(
@PathVariable UUID documentId,
Authentication authentication) {
UUID userId = requireUserId(authentication);
return transcriptionService.markAllBlocksReviewed(documentId, userId);
} }
@GetMapping("/{blockId}/history") @GetMapping("/{blockId}/history")
@@ -108,6 +98,13 @@ public class TranscriptionBlockController {
} }
private UUID requireUserId(Authentication authentication) { private UUID requireUserId(Authentication authentication) {
return SecurityUtils.requireUserId(authentication, userService); if (authentication == null || !authentication.isAuthenticated()) {
throw DomainException.unauthorized("Authentication required");
}
AppUser user = userService.findByUsername(authentication.getName());
if (user == null) {
throw DomainException.unauthorized("User not found");
}
return user.getId();
} }
} }

View File

@@ -1,47 +0,0 @@
package org.raddatz.familienarchiv.controller;
import lombok.RequiredArgsConstructor;
import org.raddatz.familienarchiv.dto.TranscriptionQueueItemDTO;
import org.raddatz.familienarchiv.dto.TranscriptionWeeklyStatsDTO;
import org.raddatz.familienarchiv.security.Permission;
import org.raddatz.familienarchiv.security.RequirePermission;
import org.raddatz.familienarchiv.service.TranscriptionQueueService;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* Serves the three Mission Control Strip columns for the dashboard.
* All endpoints require READ_ALL — same guard as the rest of the archive.
*/
@RestController
@RequestMapping("/api/transcription")
@RequiredArgsConstructor
@RequirePermission(Permission.READ_ALL)
public class TranscriptionQueueController {
private final TranscriptionQueueService transcriptionQueueService;
@GetMapping("/segmentation-queue")
public ResponseEntity<List<TranscriptionQueueItemDTO>> getSegmentationQueue() {
return ResponseEntity.ok(transcriptionQueueService.getSegmentationQueue());
}
@GetMapping("/transcription-queue")
public ResponseEntity<List<TranscriptionQueueItemDTO>> getTranscriptionQueue() {
return ResponseEntity.ok(transcriptionQueueService.getTranscriptionQueue());
}
@GetMapping("/ready-to-read")
public ResponseEntity<List<TranscriptionQueueItemDTO>> getReadyToRead() {
return ResponseEntity.ok(transcriptionQueueService.getReadyToReadQueue());
}
@GetMapping("/weekly-stats")
public ResponseEntity<TranscriptionWeeklyStatsDTO> getWeeklyStats() {
return ResponseEntity.ok(transcriptionQueueService.getWeeklyStats());
}
}

View File

@@ -4,7 +4,6 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID; import java.util.UUID;
import jakarta.validation.Valid;
import org.raddatz.familienarchiv.dto.AdminUpdateUserRequest; import org.raddatz.familienarchiv.dto.AdminUpdateUserRequest;
import org.raddatz.familienarchiv.dto.ChangePasswordDTO; import org.raddatz.familienarchiv.dto.ChangePasswordDTO;
import org.raddatz.familienarchiv.dto.CreateUserRequest; import org.raddatz.familienarchiv.dto.CreateUserRequest;
@@ -39,7 +38,7 @@ public class UserController {
if (authentication == null || !authentication.isAuthenticated()) { if (authentication == null || !authentication.isAuthenticated()) {
return ResponseEntity.status(HttpStatus.UNAUTHORIZED).build(); return ResponseEntity.status(HttpStatus.UNAUTHORIZED).build();
} }
AppUser user = userService.findByEmail(authentication.getName()); AppUser user = userService.findByUsername(authentication.getName());
user.setPassword(null); user.setPassword(null);
return ResponseEntity.ok(user); return ResponseEntity.ok(user);
} }
@@ -47,7 +46,7 @@ public class UserController {
@PutMapping("users/me") @PutMapping("users/me")
public ResponseEntity<AppUser> updateProfile(Authentication authentication, public ResponseEntity<AppUser> updateProfile(Authentication authentication,
@RequestBody UpdateProfileDTO dto) { @RequestBody UpdateProfileDTO dto) {
AppUser current = userService.findByEmail(authentication.getName()); AppUser current = userService.findByUsername(authentication.getName());
AppUser updated = userService.updateProfile(current.getId(), dto); AppUser updated = userService.updateProfile(current.getId(), dto);
updated.setPassword(null); updated.setPassword(null);
return ResponseEntity.ok(updated); return ResponseEntity.ok(updated);
@@ -57,7 +56,7 @@ public class UserController {
@ResponseStatus(HttpStatus.NO_CONTENT) @ResponseStatus(HttpStatus.NO_CONTENT)
public void changePassword(Authentication authentication, public void changePassword(Authentication authentication,
@RequestBody ChangePasswordDTO dto) { @RequestBody ChangePasswordDTO dto) {
AppUser current = userService.findByEmail(authentication.getName()); AppUser current = userService.findByUsername(authentication.getName());
userService.changePassword(current.getId(), dto); userService.changePassword(current.getId(), dto);
} }
@@ -78,31 +77,24 @@ public class UserController {
@PostMapping("/users") @PostMapping("/users")
@RequirePermission(Permission.ADMIN_USER) @RequirePermission(Permission.ADMIN_USER)
public ResponseEntity<AppUser> createUser(Authentication authentication, public ResponseEntity<AppUser> createUser(@RequestBody CreateUserRequest request) {
@Valid @RequestBody CreateUserRequest request) { return ResponseEntity.ok(userService.createUserOrUpdate(request));
return ResponseEntity.ok(userService.createUserOrUpdate(actorId(authentication), request));
} }
@PutMapping("/users/{id}") @PutMapping("/users/{id}")
@RequirePermission(Permission.ADMIN_USER) @RequirePermission(Permission.ADMIN_USER)
public ResponseEntity<AppUser> adminUpdateUser(Authentication authentication, public ResponseEntity<AppUser> adminUpdateUser(@PathVariable UUID id,
@PathVariable UUID id,
@RequestBody AdminUpdateUserRequest dto) { @RequestBody AdminUpdateUserRequest dto) {
AppUser updated = userService.adminUpdateUser(actorId(authentication), id, dto); AppUser updated = userService.adminUpdateUser(id, dto);
updated.setPassword(null); updated.setPassword(null);
return ResponseEntity.ok(updated); return ResponseEntity.ok(updated);
} }
@DeleteMapping("/users/{id}") @DeleteMapping("/users/{id}")
@RequirePermission(Permission.ADMIN_USER) @RequirePermission(Permission.ADMIN_USER)
public ResponseEntity<Void> deleteUser(Authentication authentication, public ResponseEntity<Void> deleteUser(@PathVariable UUID id) {
@PathVariable UUID id) { userService.deleteUser(id);
userService.deleteUser(actorId(authentication), id);
return ResponseEntity.ok().build(); return ResponseEntity.ok().build();
} }
private UUID actorId(Authentication auth) {
return userService.findByEmail(auth.getName()).getId();
}
} }

View File

@@ -1,33 +0,0 @@
package org.raddatz.familienarchiv.dashboard;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.annotation.Nullable;
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
import org.raddatz.familienarchiv.audit.AuditKind;
import java.time.OffsetDateTime;
import java.util.UUID;
public record ActivityFeedItemDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) AuditKind kind,
@Nullable ActivityActorDTO actor,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID documentId,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String documentTitle,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) OffsetDateTime happenedAt,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) boolean youMentioned,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) boolean youParticipated,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int count,
@Nullable OffsetDateTime happenedAtUntil,
@Nullable
@Schema(
requiredMode = Schema.RequiredMode.NOT_REQUIRED,
description = "Deep-link target comment; populated only for COMMENT_ADDED and MENTION_CREATED kinds."
)
UUID commentId,
@Nullable
@Schema(
requiredMode = Schema.RequiredMode.NOT_REQUIRED,
description = "Annotation associated with the comment; populated only for COMMENT_ADDED and MENTION_CREATED kinds."
)
UUID annotationId
) {}

View File

@@ -1,51 +0,0 @@
package org.raddatz.familienarchiv.dashboard;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.ArraySchema;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.RequiredArgsConstructor;
import org.raddatz.familienarchiv.audit.AuditKind;
import org.raddatz.familienarchiv.security.Permission;
import org.raddatz.familienarchiv.security.RequirePermission;
import org.raddatz.familienarchiv.security.SecurityUtils;
import org.raddatz.familienarchiv.service.UserService;
import org.springframework.security.core.Authentication;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Set;
import java.util.UUID;
@RestController
@RequestMapping("/api/dashboard")
@RequirePermission(Permission.READ_ALL)
@RequiredArgsConstructor
public class DashboardController {
private final DashboardService dashboardService;
private final UserService userService;
@GetMapping("/resume")
public DashboardResumeDTO getResume(Authentication authentication) {
UUID userId = SecurityUtils.requireUserId(authentication, userService);
return dashboardService.getResume(userId);
}
@GetMapping("/pulse")
public DashboardPulseDTO getPulse(Authentication authentication) {
UUID userId = SecurityUtils.requireUserId(authentication, userService);
return dashboardService.getPulse(userId);
}
@GetMapping("/activity")
public List<ActivityFeedItemDTO> getActivity(
Authentication authentication,
@RequestParam(defaultValue = "7") int limit,
@Parameter(description = "Filter by audit kinds; omit for all rollup-eligible kinds",
array = @ArraySchema(schema = @Schema(implementation = AuditKind.class)))
@RequestParam(required = false) Set<AuditKind> kinds) {
UUID userId = SecurityUtils.requireUserId(authentication, userService);
Set<AuditKind> effectiveKinds = (kinds == null || kinds.isEmpty()) ? AuditKind.ROLLUP_ELIGIBLE : kinds;
return dashboardService.getActivity(userId, Math.min(limit, 40), effectiveKinds);
}
}

View File

@@ -1,15 +0,0 @@
package org.raddatz.familienarchiv.dashboard;
import io.swagger.v3.oas.annotations.media.Schema;
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
import java.util.List;
public record DashboardPulseDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int pages,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int annotated,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int transcribed,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int uploaded,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int yourPages,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) List<ActivityActorDTO> contributors
) {}

View File

@@ -1,19 +0,0 @@
package org.raddatz.familienarchiv.dashboard;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.annotation.Nullable;
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
import java.util.List;
import java.util.UUID;
public record DashboardResumeDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID documentId,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String title,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String caption,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String excerpt,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int totalBlocks,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int pct,
@Nullable String thumbnailUrl,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) List<ActivityActorDTO> collaborators
) {}

View File

@@ -1,203 +0,0 @@
package org.raddatz.familienarchiv.dashboard;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
import org.raddatz.familienarchiv.audit.ActivityFeedRow;
import org.raddatz.familienarchiv.audit.AuditKind;
import org.raddatz.familienarchiv.audit.AuditLogQueryService;
import org.raddatz.familienarchiv.audit.PulseStatsRow;
import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.model.Document;
import org.raddatz.familienarchiv.model.Person;
import org.raddatz.familienarchiv.model.TranscriptionBlock;
import org.raddatz.familienarchiv.service.CommentService;
import org.raddatz.familienarchiv.service.DocumentService;
import org.raddatz.familienarchiv.service.TranscriptionService;
import org.raddatz.familienarchiv.service.UserService;
import org.springframework.stereotype.Service;
import java.time.DayOfWeek;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.temporal.TemporalAdjusters;
import java.util.*;
import java.util.stream.Stream;
import java.util.stream.Collectors;
@Service
@RequiredArgsConstructor
@Slf4j
public class DashboardService {
private final AuditLogQueryService auditLogQueryService;
private final DocumentService documentService;
private final TranscriptionService transcriptionService;
private final UserService userService;
private final CommentService commentService;
public DashboardResumeDTO getResume(UUID userId) {
Optional<UUID> docIdOpt = auditLogQueryService.findMostRecentDocumentForUser(userId);
if (docIdOpt.isEmpty()) return null;
UUID docId = docIdOpt.get();
Document doc;
try {
doc = documentService.getDocumentById(docId);
} catch (Exception e) {
log.warn("Resume: document {} not found for user {}", docId, userId);
return null;
}
List<TranscriptionBlock> blocks = transcriptionService.listBlocks(docId);
String excerpt = blocks.stream()
.filter(b -> b.getText() != null && !b.getText().isBlank())
.min(Comparator.comparingInt(TranscriptionBlock::getSortOrder))
.map(b -> b.getText().length() > 200 ? b.getText().substring(0, 200) + "" : b.getText())
.orElse("");
int totalBlocks = blocks.size();
long reviewedBlocks = blocks.stream().filter(TranscriptionBlock::isReviewed).count();
int pct = totalBlocks > 0 ? (int) (reviewedBlocks * 100L / totalBlocks) : 0;
String caption = buildCaption(doc);
List<UUID> collaboratorIds = blocks.stream()
.map(TranscriptionBlock::getUpdatedBy)
.filter(Objects::nonNull)
.distinct()
.limit(5)
.toList();
List<ActivityActorDTO> collaborators = collaboratorIds.stream()
.map(uid -> {
try {
AppUser u = userService.getById(uid);
return toActorDTO(u);
} catch (Exception e) {
return null;
}
})
.filter(Objects::nonNull)
.toList();
return new DashboardResumeDTO(docId, doc.getTitle(), caption, excerpt,
totalBlocks, pct, doc.getThumbnailUrl(), collaborators);
}
public DashboardPulseDTO getPulse(UUID userId) {
OffsetDateTime weekStart = OffsetDateTime.now(ZoneOffset.UTC)
.with(TemporalAdjusters.previousOrSame(DayOfWeek.MONDAY))
.withHour(0).withMinute(0).withSecond(0).withNano(0);
PulseStatsRow stats = auditLogQueryService.getPulseStats(weekStart, userId);
List<ActivityFeedRow> feed = auditLogQueryService.findActivityFeed(userId, 50);
List<ActivityActorDTO> contributors = feed.stream()
.filter(r -> r.getActorId() != null)
.map(r -> new ActivityActorDTO(r.getActorInitials(), r.getActorColor(), r.getActorName()))
.filter(a -> !a.initials().isBlank())
.distinct()
.limit(6)
.toList();
return new DashboardPulseDTO(
(int) stats.getPages(),
(int) stats.getAnnotated(),
(int) stats.getTranscribed(),
(int) stats.getUploaded(),
(int) stats.getYourPages(),
contributors
);
}
public List<ActivityFeedItemDTO> getActivity(UUID currentUserId, int limit, Set<AuditKind> kinds) {
List<ActivityFeedRow> rows = auditLogQueryService.findActivityFeed(currentUserId, limit, kinds);
List<UUID> docIds = rows.stream()
.map(ActivityFeedRow::getDocumentId)
.filter(Objects::nonNull)
.distinct()
.toList();
Map<UUID, String> titleCache = new HashMap<>();
try {
documentService.getDocumentsByIds(docIds)
.forEach(d -> titleCache.put(d.getId(), d.getTitle()));
} catch (Exception e) {
log.warn("Activity: failed to bulk-load document titles", e);
}
List<UUID> commentIds = rows.stream()
.map(ActivityFeedRow::getCommentId)
.filter(Objects::nonNull)
.distinct()
.toList();
Map<UUID, UUID> annotationByComment = commentIds.isEmpty()
? Map.of()
: commentService.findAnnotationIdsByIds(commentIds);
return rows.stream().map(row -> {
ActivityActorDTO actor = row.getActorId() != null
? new ActivityActorDTO(row.getActorInitials(), row.getActorColor(), row.getActorName())
: null;
String docTitle = titleCache.getOrDefault(row.getDocumentId(), "");
OffsetDateTime happenedAtUntil = row.getHappenedAtUntil() != null
? row.getHappenedAtUntil().atOffset(ZoneOffset.UTC)
: null;
UUID commentId = row.getCommentId();
UUID annotationId = commentId != null ? annotationByComment.get(commentId) : null;
return new ActivityFeedItemDTO(
org.raddatz.familienarchiv.audit.AuditKind.valueOf(row.getKind()),
actor,
row.getDocumentId(),
docTitle,
row.getHappenedAt().atOffset(ZoneOffset.UTC),
row.isYouMentioned(),
row.isYouParticipated(),
row.getCount(),
happenedAtUntil,
commentId,
annotationId
);
}).toList();
}
private String buildCaption(Document doc) {
StringBuilder sb = new StringBuilder();
if (doc.getSender() != null) sb.append(personName(doc.getSender()));
if (!doc.getReceivers().isEmpty()) {
String receivers = doc.getReceivers().stream()
.map(this::personName).collect(Collectors.joining(", "));
if (!sb.isEmpty()) sb.append(" an ");
sb.append(receivers);
}
if (doc.getDocumentDate() != null) {
if (!sb.isEmpty()) sb.append(" · ");
sb.append(doc.getDocumentDate());
}
return sb.toString();
}
private String personName(Person p) {
if (p == null) return "";
if (p.getFirstName() != null && p.getLastName() != null) return p.getFirstName() + " " + p.getLastName();
if (p.getFirstName() != null) return p.getFirstName();
if (p.getLastName() != null) return p.getLastName();
return "";
}
private ActivityActorDTO toActorDTO(AppUser u) {
String initials = "";
if (u.getFirstName() != null && !u.getFirstName().isBlank())
initials += u.getFirstName().charAt(0);
if (u.getLastName() != null && !u.getLastName().isBlank())
initials += u.getLastName().charAt(0);
if (initials.isBlank() && u.getEmail() != null)
initials = u.getEmail().substring(0, 1).toUpperCase();
String fullName = Stream.of(u.getFirstName(), u.getLastName())
.filter(Objects::nonNull)
.collect(Collectors.joining(" "));
return new ActivityActorDTO(initials.toUpperCase(), u.getColor(), fullName);
}
}

View File

@@ -1,9 +0,0 @@
package org.raddatz.familienarchiv.dto;
import java.util.List;
import java.util.UUID;
import io.swagger.v3.oas.annotations.media.Schema;
public record BatchMetadataRequest(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) List<UUID> ids) {}

View File

@@ -1,9 +0,0 @@
package org.raddatz.familienarchiv.dto;
import java.util.UUID;
import io.swagger.v3.oas.annotations.media.Schema;
public record BulkEditError(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID id,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String message) {}

View File

@@ -1,9 +0,0 @@
package org.raddatz.familienarchiv.dto;
import java.util.List;
import io.swagger.v3.oas.annotations.media.Schema;
public record BulkEditResult(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int updated,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) List<BulkEditError> errors) {}

View File

@@ -1,18 +0,0 @@
package org.raddatz.familienarchiv.dto;
import lombok.Data;
import java.time.LocalDateTime;
import java.util.List;
import java.util.UUID;
@Data
public class CreateInviteRequest {
private String label;
private Integer maxUses;
private String prefillFirstName;
private String prefillLastName;
private String prefillEmail;
private List<UUID> groupIds;
private LocalDateTime expiresAt;
}

View File

@@ -1,8 +1,6 @@
package org.raddatz.familienarchiv.dto; package org.raddatz.familienarchiv.dto;
import jakarta.validation.constraints.Email;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.Pattern;
import lombok.Data; import lombok.Data;
import java.time.LocalDate; import java.time.LocalDate;
@@ -11,9 +9,7 @@ import java.util.UUID;
@Data @Data
public class CreateUserRequest { public class CreateUserRequest {
@NotBlank private String username;
@Email
@Pattern(regexp = "^[^:]+$", message = "Email must not contain a colon")
private String email; private String email;
private String initialPassword; private String initialPassword;
private List<UUID> groupIds; private List<UUID> groupIds;

View File

@@ -1,18 +0,0 @@
package org.raddatz.familienarchiv.dto;
import lombok.Data;
import java.time.LocalDate;
import java.util.List;
import java.util.UUID;
@Data
public class DocumentBatchMetadataDTO {
private List<String> titles;
private UUID senderId;
private List<UUID> receiverIds;
private LocalDate documentDate;
private String location;
private List<String> tagNames;
private Boolean metadataComplete;
}

View File

@@ -1,10 +0,0 @@
package org.raddatz.familienarchiv.dto;
import java.util.UUID;
import io.swagger.v3.oas.annotations.media.Schema;
public record DocumentBatchSummary(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID id,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String title,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String pdfUrl) {}

View File

@@ -1,60 +0,0 @@
package org.raddatz.familienarchiv.dto;
import java.util.List;
import java.util.UUID;
import jakarta.validation.constraints.Size;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* Request body for {@code PATCH /api/documents/bulk}. Field semantics:
* <ul>
* <li>{@code tagNames} and {@code receiverIds} are <b>additive</b> —
* merged into each document's existing set, never replacing it.</li>
* <li>{@code senderId}, {@code documentLocation}, {@code archiveBox},
* {@code archiveFolder} are <b>replace-on-non-blank</b> — null/blank
* fields are skipped, anything else overwrites.</li>
* </ul>
*
* <p>Kept as a Lombok {@code @Data} POJO (not a record) for symmetry with
* the existing {@code DocumentUpdateDTO} and to keep test setup terse —
* the per-feature DTOs introduced alongside this one ({@link BulkEditError},
* {@link BulkEditResult}, {@link BatchMetadataRequest},
* {@link DocumentBatchSummary}) <i>are</i> records because they have no
* test-side mutation. Tracked in the cycle-1 review for follow-up.
*
* <p>Bean-validation caps below defend against payload-amplification: the
* 1 MiB SvelteKit proxy cap allows ~26k UUIDs through to the backend, and
* Jetty's default body limit is 8 MB. {@code @Size} guards catch malformed
* clients without depending on those outer bounds.
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public class DocumentBulkEditDTO {
// No @Size cap here on purpose: the controller's BULK_EDIT_MAX_IDS check
// returns the typed BULK_EDIT_TOO_MANY_IDS error code, which the frontend
// maps to a localised "Maximal 500 …" message via Paraglide. A bean-
// validation @Size would short-circuit that with a generic VALIDATION_ERROR.
private List<UUID> documentIds;
@Size(max = 200, message = "tagNames must not exceed 200 entries")
private List<@Size(max = 200, message = "tagName must not exceed 200 chars") String> tagNames;
private UUID senderId;
@Size(max = 200, message = "receiverIds must not exceed 200 entries")
private List<UUID> receiverIds;
@Size(max = 255, message = "documentLocation must not exceed 255 chars")
private String documentLocation;
@Size(max = 255, message = "archiveBox must not exceed 255 chars")
private String archiveBox;
@Size(max = 255, message = "archiveFolder must not exceed 255 chars")
private String archiveFolder;
}

View File

@@ -1,18 +0,0 @@
package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
import org.raddatz.familienarchiv.model.Document;
import java.util.List;
public record DocumentSearchItem(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
Document document,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
SearchMatchData matchData,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
int completionPercentage,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
List<ActivityActorDTO> contributors
) {}

View File

@@ -1,38 +1,16 @@
package org.raddatz.familienarchiv.dto; package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema; import org.raddatz.familienarchiv.model.Document;
import org.springframework.data.domain.Pageable;
import java.util.List; import java.util.List;
public record DocumentSearchResult( public record DocumentSearchResult(List<Document> documents, long total) {
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
List<DocumentSearchItem> items,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
long totalElements,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
int pageNumber,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
int pageSize,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
int totalPages
) {
/** /**
* Single-page convenience factory used by empty-result shortcuts and by tests that * Creates a result where total equals the list size.
* don't care about paging. Treats the whole list as page 0 of itself. * No pagination yet — the full matched set is always returned.
* When pagination is added, total must come from a DB COUNT query, not list.size().
*/ */
public static DocumentSearchResult of(List<DocumentSearchItem> items) { public static DocumentSearchResult of(List<Document> documents) {
int size = items.size(); return new DocumentSearchResult(documents, documents.size());
return new DocumentSearchResult(items, size, 0, size, size == 0 ? 0 : 1);
}
/**
* Paged factory used by the service when it has a real Pageable + full match count
* (e.g. from Spring's Page<T> or from an in-memory sort-then-slice).
*/
public static DocumentSearchResult paged(List<DocumentSearchItem> slice, Pageable pageable, long totalElements) {
int pageSize = pageable.getPageSize();
int totalPages = pageSize == 0 ? 0 : (int) ((totalElements + pageSize - 1) / pageSize);
return new DocumentSearchResult(slice, totalElements, pageable.getPageNumber(), pageSize, totalPages);
} }
} }

View File

@@ -13,8 +13,6 @@ public class DocumentUpdateDTO {
private LocalDate documentDate; private LocalDate documentDate;
private String location; private String location;
private String documentLocation; private String documentLocation;
private String archiveBox;
private String archiveFolder;
private String transcription; private String transcription;
private String summary; private String summary;
private UUID senderId; private UUID senderId;

View File

@@ -2,11 +2,9 @@ package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import java.time.LocalDateTime;
import java.util.UUID; import java.util.UUID;
public record IncompleteDocumentDTO( public record IncompleteDocumentDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID id, @Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID id,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String title, @Schema(requiredMode = Schema.RequiredMode.REQUIRED) String title
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) LocalDateTime uploadedAt
) {} ) {}

View File

@@ -1,35 +0,0 @@
package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.time.LocalDateTime;
import java.util.UUID;
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class InviteListItemDTO {
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private UUID id;
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String code;
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String displayCode;
private String label;
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private int useCount;
private Integer maxUses;
private LocalDateTime expiresAt;
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private boolean revoked;
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String status;
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private LocalDateTime createdAt;
private String shareableUrl;
}

View File

@@ -1,18 +0,0 @@
package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class InvitePrefillDTO {
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String firstName;
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String lastName;
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String email;
}

View File

@@ -1,14 +0,0 @@
package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema;
/**
* Character-level offset of a highlighted term within a text field.
* Offsets are Java {@code String} character positions (UTF-16 code units),
* which are identical to JavaScript string positions — consistent end-to-end
* for all German BMP characters (ä, ö, ü, ß, etc.).
*/
public record MatchOffset(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int start,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int length
) {}

View File

@@ -1,6 +0,0 @@
package org.raddatz.familienarchiv.dto;
import jakarta.validation.constraints.NotNull;
import java.util.UUID;
public record MergeTagDTO(@NotNull UUID targetId) {}

View File

@@ -1,14 +1,10 @@
package org.raddatz.familienarchiv.dto; package org.raddatz.familienarchiv.dto;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size; import jakarta.validation.constraints.Size;
import lombok.Data; import lombok.Data;
import org.raddatz.familienarchiv.model.PersonType;
@Data @Data
public class PersonUpdateDTO { public class PersonUpdateDTO {
@NotNull
private PersonType personType;
@Size(max = 50) @Size(max = 50)
private String title; private String title;
@Size(max = 100) @Size(max = 100)

View File

@@ -1,19 +0,0 @@
package org.raddatz.familienarchiv.dto;
import jakarta.validation.constraints.Email;
import jakarta.validation.constraints.NotBlank;
import lombok.Data;
@Data
public class RegisterRequest {
@NotBlank
private String code;
@NotBlank
@Email
private String email;
@NotBlank
private String password;
private String firstName;
private String lastName;
private boolean notifyOnMention = true;
}

View File

@@ -1,67 +0,0 @@
package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.List;
import java.util.UUID;
/**
* Match signals for a single document in a full-text search result.
* All fields are non-null except {@code transcriptionSnippet} and {@code summarySnippet},
* which are null when the respective field did not match the query.
*/
public record SearchMatchData(
/**
* Best-ranked matching transcription line, or null if no block matched.
*/
String transcriptionSnippet,
/**
* Character offsets of highlighted terms within the document title.
* Empty when the title did not contribute to the match.
*/
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
List<MatchOffset> titleOffsets,
/**
* True when the sender's name matched the query.
*/
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
boolean senderMatched,
/**
* IDs of receiver persons whose names matched the query.
*/
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
List<UUID> matchedReceiverIds,
/**
* IDs of tags whose names matched the query.
*/
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
List<UUID> matchedTagIds,
/**
* Character offsets of highlighted terms within the transcription snippet.
* Empty when no transcription block matched or the snippet has no highlights.
*/
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
List<MatchOffset> snippetOffsets,
/**
* Highlighted summary excerpt, or null if the summary did not match the query.
*/
String summarySnippet,
/**
* Character offsets of highlighted terms within the summary snippet.
* Empty when the summary did not match or has no highlights.
*/
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
List<MatchOffset> summaryOffsets
) {
/** Canonical "no match data" value for a single document. */
public static SearchMatchData empty() {
return new SearchMatchData(null, List.of(), false, List.of(), List.of(), List.of(), null, List.of());
}
}

View File

@@ -1,9 +0,0 @@
package org.raddatz.familienarchiv.dto;
/** Determines how multiple selected tag filters are combined in a document search. */
public enum TagOperator {
/** Every tag set must match (default). */
AND,
/** At least one tag set must match. */
OR
}

View File

@@ -1,14 +0,0 @@
package org.raddatz.familienarchiv.dto;
import java.util.List;
import java.util.UUID;
import io.swagger.v3.oas.annotations.media.Schema;
public record TagTreeNodeDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID id,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String name,
String color,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int documentCount,
List<TagTreeNodeDTO> children,
@Schema(description = "Parent tag ID, null for root tags") UUID parentId) {}

View File

@@ -1,5 +0,0 @@
package org.raddatz.familienarchiv.dto;
import java.util.UUID;
public record TagUpdateDTO(String name, UUID parentId, String color) {}

View File

@@ -1,11 +0,0 @@
package org.raddatz.familienarchiv.dto;
import org.raddatz.familienarchiv.model.OcrTrainingRun;
import java.util.List;
import java.util.Map;
public record TrainingHistoryResponse(
List<OcrTrainingRun> runs,
Map<String, String> personNames
) {}

View File

@@ -1,19 +0,0 @@
package org.raddatz.familienarchiv.dto;
import org.raddatz.familienarchiv.model.OcrTrainingRun;
import org.raddatz.familienarchiv.model.SenderModel;
import java.util.List;
import java.util.Map;
public record TrainingInfoResponse(
int availableBlocks,
int totalOcrBlocks,
int availableDocuments,
int availableSegBlocks,
boolean ocrServiceAvailable,
OcrTrainingRun lastRun,
List<OcrTrainingRun> runs,
Map<String, String> personNames,
List<SenderModel> senderModels
) {}

View File

@@ -1,19 +0,0 @@
package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
import java.time.LocalDate;
import java.util.List;
import java.util.UUID;
public record TranscriptionQueueItemDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID id,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String title,
LocalDate documentDate,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int annotationCount,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int textedBlockCount,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int reviewedBlockCount,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) List<ActivityActorDTO> contributors,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) boolean hasMoreContributors
) {}

View File

@@ -1,13 +0,0 @@
package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema;
/**
* Weekly activity pulse for the Mission Control Strip column headers.
* Counts documents that received new work in each pipeline stage
* during the last 7 days.
*/
public record TranscriptionWeeklyStatsDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) long segmentationCount,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) long transcriptionCount
) {}

View File

@@ -1,12 +0,0 @@
package org.raddatz.familienarchiv.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotNull;
import java.util.UUID;
public record TriggerSenderTrainingDTO(
@NotNull
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
UUID personId
) {}

View File

@@ -13,8 +13,6 @@ public enum ErrorCode {
PERSON_NOT_FOUND, PERSON_NOT_FOUND,
/** A person name alias with the given ID does not exist. 404 */ /** A person name alias with the given ID does not exist. 404 */
ALIAS_NOT_FOUND, ALIAS_NOT_FOUND,
/** The submitted personType value is not allowed (e.g. SKIP is import-only). 400 */
INVALID_PERSON_TYPE,
// --- Documents --- // --- Documents ---
/** A document with the given ID does not exist. 404 */ /** A document with the given ID does not exist. 404 */
@@ -40,20 +38,6 @@ public enum ErrorCode {
/** A mass import is already in progress; only one can run at a time. 409 */ /** A mass import is already in progress; only one can run at a time. 409 */
IMPORT_ALREADY_RUNNING, IMPORT_ALREADY_RUNNING,
// --- Thumbnails ---
/** A thumbnail backfill is already in progress; only one can run at a time. 409 */
THUMBNAIL_BACKFILL_ALREADY_RUNNING,
// --- Invites ---
/** The invite code does not exist. 404 */
INVITE_NOT_FOUND,
/** The invite has already reached its use limit. 409 */
INVITE_EXHAUSTED,
/** The invite has been revoked by an admin. 409 */
INVITE_REVOKED,
/** The invite has passed its expiry date. 410 */
INVITE_EXPIRED,
// --- Auth --- // --- Auth ---
/** The request is not authenticated. 401 */ /** The request is not authenticated. 401 */
UNAUTHORIZED, UNAUTHORIZED,
@@ -93,28 +77,10 @@ public enum ErrorCode {
OCR_PROCESSING_FAILED, OCR_PROCESSING_FAILED,
/** A training run is already in progress. 409 */ /** A training run is already in progress. 409 */
TRAINING_ALREADY_RUNNING, TRAINING_ALREADY_RUNNING,
/** Internal inconsistency: expected training run row was not found after creation. 500 */
OCR_TRAINING_CONFLICT,
// --- Tags ---
/** A tag with the given ID does not exist. 404 */
TAG_NOT_FOUND,
/** The supplied color token is not in the allowed palette. 400 */
INVALID_TAG_COLOR,
/** Setting this parent would create a cycle in the tag hierarchy. 400 */
TAG_CYCLE_DETECTED,
/** Merge source and target are the same tag. 400 */
TAG_MERGE_SELF,
/** The merge target is a descendant of the source tag. 400 */
TAG_MERGE_INVALID_TARGET,
// --- Generic --- // --- Generic ---
/** Request validation failed (missing or malformed fields). 400 */ /** Request validation failed (missing or malformed fields). 400 */
VALIDATION_ERROR, VALIDATION_ERROR,
/** Batch upload exceeds the maximum allowed file count per request. 400 */
BATCH_TOO_LARGE,
/** Bulk edit request exceeds the per-request document ID cap. 400 */
BULK_EDIT_TOO_MANY_IDS,
/** An unexpected server-side error occurred. 500 */ /** An unexpected server-side error occurred. 500 */
INTERNAL_ERROR, INTERNAL_ERROR,
} }

View File

@@ -1,9 +1,6 @@
package org.raddatz.familienarchiv.model; package org.raddatz.familienarchiv.model;
import jakarta.persistence.*; import jakarta.persistence.*;
import jakarta.validation.constraints.Email;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.Pattern;
import lombok.*; import lombok.*;
import org.hibernate.annotations.CreationTimestamp; import org.hibernate.annotations.CreationTimestamp;
@@ -19,12 +16,8 @@ import java.util.HashSet;
import java.util.Set; import java.util.Set;
import java.util.UUID; import java.util.UUID;
import jakarta.persistence.PostLoad;
import jakarta.persistence.PrePersist;
import jakarta.persistence.PreUpdate;
@Entity @Entity
@Table(name = "users") @Table(name = "users") // Tabellenname in Postgres
@Data @Data
@NoArgsConstructor @NoArgsConstructor
@AllArgsConstructor @AllArgsConstructor
@@ -37,26 +30,26 @@ public class AppUser {
private UUID id; private UUID id;
@Column(unique = true, nullable = false) @Column(unique = true, nullable = false)
@NotBlank
@Email
@Pattern(regexp = "^[^:]+$", message = "Email must not contain a colon")
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) @Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String email; private String username;
@Column(nullable = false) @Column(nullable = false)
@JsonProperty(access = JsonProperty.Access.WRITE_ONLY) @JsonProperty(access = JsonProperty.Access.WRITE_ONLY)
private String password; private String password; // Wird verschlüsselt gespeichert (BCrypt)
private String firstName; private String firstName;
private String lastName; private String lastName;
private LocalDate birthDate; private LocalDate birthDate;
@Column(unique = true)
private String email;
@Column(columnDefinition = "TEXT") @Column(columnDefinition = "TEXT")
private String contact; private String contact;
@Builder.Default @Builder.Default
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) @Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private boolean enabled = true; private boolean enabled = true; // Um User zu sperren ohne sie zu löschen
@Column(nullable = false) @Column(nullable = false)
@Builder.Default @Builder.Default
@@ -68,6 +61,7 @@ public class AppUser {
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) @Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private boolean notifyOnMention = false; private boolean notifyOnMention = false;
// Ein User kann in mehreren Gruppen sein
@ManyToMany(fetch = FetchType.EAGER) @ManyToMany(fetch = FetchType.EAGER)
@JoinTable(name = "users_groups", joinColumns = @JoinColumn(name = "user_id"), inverseJoinColumns = @JoinColumn(name = "group_id")) @JoinTable(name = "users_groups", joinColumns = @JoinColumn(name = "user_id"), inverseJoinColumns = @JoinColumn(name = "group_id"))
@Builder.Default @Builder.Default
@@ -78,48 +72,31 @@ public class AppUser {
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) @Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private LocalDateTime createdAt; private LocalDateTime createdAt;
@Column(nullable = false)
@Builder.Default
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String color = "";
private static final String[] PALETTE = {
"#7a4f9a", "#5a8a6a", "#3060b0", "#a0522d", "#c0446e", "#c17a00", "#0e7490", "#1d4ed8"
};
public static String computeColor(UUID id) {
return PALETTE[Math.abs(id.hashCode()) % PALETTE.length];
}
@PrePersist
@PreUpdate
@PostLoad
void deriveColor() {
if (id != null && (color == null || color.isEmpty())) {
this.color = computeColor(id);
}
}
public boolean hasPermission(String permission) { public boolean hasPermission(String permission) {
if (groups == null || groups.isEmpty()) { if (groups == null || groups.isEmpty()) {
return false; return false;
} }
return this.groups.stream().anyMatch(group -> group.getPermissions().contains(permission)); return this.groups.stream().anyMatch(group -> group.getPermissions().contains(permission));
} }
public AppUser updateFromRequest(CreateUserRequest request, PasswordEncoder passwordEncoder, Set<UserGroup> groups) { public AppUser updateFromRequest(CreateUserRequest request, PasswordEncoder passwordEncoder, Set<UserGroup> groups) {
if (request.getEmail() != null && !request.getEmail().isBlank()) { if (request.getUsername() != null && !request.getUsername().isBlank()) {
this.email = request.getEmail(); this.username = request.getUsername();
}
if (request.getInitialPassword() != null && !request.getInitialPassword().isBlank()) {
this.password = passwordEncoder.encode(request.getInitialPassword());
}
if (groups != null && !groups.isEmpty()) {
this.groups = groups;
}
return this;
} }
if (request.getEmail() != null && !request.getEmail().isBlank()) {
this.email = request.getEmail();
}
if (request.getInitialPassword() != null && !request.getInitialPassword().isBlank()) {
this.password = passwordEncoder.encode(request.getInitialPassword());
}
if (groups != null && !groups.isEmpty()) {
this.groups = groups;
}
return this;
}
} }

View File

@@ -6,11 +6,8 @@ import org.hibernate.annotations.CreationTimestamp;
import org.hibernate.annotations.UpdateTimestamp; import org.hibernate.annotations.UpdateTimestamp;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.time.LocalDate; import java.time.LocalDate;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.util.HashSet; import java.util.HashSet;
@@ -46,20 +43,6 @@ public class Document {
@Column(name = "file_hash", length = 64) @Column(name = "file_hash", length = 64)
private String fileHash; private String fileHash;
// S3 key of the generated thumbnail (e.g. "thumbnails/{docId}.jpg"); null until generated
@Column(name = "thumbnail_key")
private String thumbnailKey;
@Column(name = "thumbnail_generated_at")
private LocalDateTime thumbnailGeneratedAt;
@Enumerated(EnumType.STRING)
@Column(name = "thumbnail_aspect", length = 16)
private ThumbnailAspect thumbnailAspect;
@Column(name = "page_count")
private Integer pageCount;
// Originaler Dateiname beim Upload (z.B. "Brief_Oma_1940.pdf") // Originaler Dateiname beim Upload (z.B. "Brief_Oma_1940.pdf")
@Column(name = "original_filename", nullable = false) @Column(name = "original_filename", nullable = false)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) @Schema(requiredMode = Schema.RequiredMode.REQUIRED)
@@ -134,19 +117,4 @@ public class Document {
@Enumerated(EnumType.STRING) @Enumerated(EnumType.STRING)
@Builder.Default @Builder.Default
private Set<TrainingLabel> trainingLabels = new HashSet<>(); private Set<TrainingLabel> trainingLabels = new HashSet<>();
// The `?v={thumbnailGeneratedAt}` cache-buster is load-bearing: the thumbnail
// endpoint sends `Cache-Control: private, max-age=31536000, immutable`
// (DocumentController.getDocumentThumbnail). `immutable` is only safe because
// this URL changes whenever the underlying file does. Dropping the query param
// would let browsers serve a stale thumbnail for a year after the file is
// replaced, and shared caches could leak one user's thumbnail to another
// (CWE-525).
@JsonProperty("thumbnailUrl")
public String getThumbnailUrl() {
if (thumbnailKey == null) return null;
String base = "/api/documents/" + id + "/thumbnail";
if (thumbnailGeneratedAt == null) return base;
return base + "?v=" + URLEncoder.encode(thumbnailGeneratedAt.toString(), StandardCharsets.UTF_8);
}
} }

View File

@@ -1,76 +0,0 @@
package org.raddatz.familienarchiv.model;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.persistence.*;
import lombok.*;
import org.hibernate.annotations.CreationTimestamp;
import java.time.LocalDateTime;
import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
@Entity
@Table(name = "invite_tokens")
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class InviteToken {
@Id
@GeneratedValue(strategy = GenerationType.UUID)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private UUID id;
@Column(nullable = false, unique = true, length = 10)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String code;
private String label;
private Integer maxUses;
@Column(nullable = false)
@Builder.Default
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private int useCount = 0;
private String prefillFirstName;
private String prefillLastName;
private String prefillEmail;
@ElementCollection(fetch = FetchType.EAGER)
@CollectionTable(name = "invite_token_group_ids", joinColumns = @JoinColumn(name = "invite_token_id"))
@Column(name = "group_id")
@Builder.Default
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private Set<UUID> groupIds = new HashSet<>();
private LocalDateTime expiresAt;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "created_by", nullable = false)
private AppUser createdBy;
@CreationTimestamp
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private LocalDateTime createdAt;
@Column(nullable = false)
@Builder.Default
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private boolean revoked = false;
public boolean isExhausted() {
return maxUses != null && useCount >= maxUses;
}
public boolean isExpired() {
return expiresAt != null && expiresAt.isBefore(LocalDateTime.now());
}
public boolean isActive() {
return !revoked && !isExhausted() && !isExpired();
}
}

View File

@@ -59,9 +59,6 @@ public class OcrTrainingRun {
@Column(name = "triggered_by") @Column(name = "triggered_by")
private UUID triggeredBy; private UUID triggeredBy;
@Column(name = "person_id")
private UUID personId;
@CreationTimestamp @CreationTimestamp
@Column(name = "created_at", nullable = false, updatable = false) @Column(name = "created_at", nullable = false, updatable = false)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) @Schema(requiredMode = Schema.RequiredMode.REQUIRED)

View File

@@ -1,56 +0,0 @@
package org.raddatz.familienarchiv.model;
import com.fasterxml.jackson.annotation.JsonIgnore;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.persistence.*;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.hibernate.annotations.CreationTimestamp;
import org.hibernate.annotations.UpdateTimestamp;
import java.time.Instant;
import java.util.UUID;
@Entity
@Table(name = "sender_models")
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class SenderModel {
@Id
@GeneratedValue(strategy = GenerationType.UUID)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private UUID id;
@Column(name = "person_id", nullable = false, unique = true)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private UUID personId;
@JsonIgnore
@Column(name = "model_path", nullable = false)
private String modelPath;
@Column
private Double accuracy;
@Column
private Double cer;
@Column(name = "corrected_lines_at_training", nullable = false)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private int correctedLinesAtTraining;
@CreationTimestamp
@Column(name = "created_at", nullable = false, updatable = false)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private Instant createdAt;
@UpdateTimestamp
@Column(name = "updated_at", nullable = false)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private Instant updatedAt;
}

View File

@@ -20,11 +20,4 @@ public class Tag {
@Column(unique = true, nullable = false) @Column(unique = true, nullable = false)
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) @Schema(requiredMode = Schema.RequiredMode.REQUIRED)
private String name; private String name;
/** UUID of the parent tag, or null for root-level tags. */
@Column(name = "parent_id")
private UUID parentId;
/** Color token name (e.g. "sage"), only set on root-level tags. Null means no color. */
private String color;
} }

View File

@@ -1,6 +0,0 @@
package org.raddatz.familienarchiv.model;
public enum ThumbnailAspect {
PORTRAIT,
LANDSCAPE
}

View File

@@ -1,7 +1,6 @@
package org.raddatz.familienarchiv.model; package org.raddatz.familienarchiv.model;
public enum TrainingStatus { public enum TrainingStatus {
QUEUED,
RUNNING, RUNNING,
DONE, DONE,
FAILED FAILED

View File

@@ -13,10 +13,11 @@ import java.util.UUID;
@Repository @Repository
public interface AppUserRepository extends JpaRepository<AppUser, UUID> { public interface AppUserRepository extends JpaRepository<AppUser, UUID> {
Optional<AppUser> findByUsername(String username);
Optional<AppUser> findByEmail(String email); Optional<AppUser> findByEmail(String email);
@Query("SELECT u FROM AppUser u WHERE " + @Query("SELECT u FROM AppUser u WHERE " +
"LOWER(u.email) LIKE LOWER(CONCAT('%', :q, '%')) " + "LOWER(COALESCE(u.firstName, '') || ' ' || COALESCE(u.lastName, '')) LIKE LOWER(CONCAT('%', :q, '%')) " +
"OR LOWER(COALESCE(u.firstName, '') || ' ' || COALESCE(u.lastName, '')) LIKE LOWER(CONCAT('%', :q, '%'))") "OR LOWER(u.username) LIKE LOWER(CONCAT('%', :q, '%'))")
List<AppUser> searchByEmailOrName(@Param("q") String q, Pageable pageable); List<AppUser> searchByNameOrUsername(@Param("q") String q, Pageable pageable);
} }

View File

@@ -8,6 +8,10 @@ import java.util.UUID;
public interface CommentRepository extends JpaRepository<DocumentComment, UUID> { public interface CommentRepository extends JpaRepository<DocumentComment, UUID> {
List<DocumentComment> findByDocumentIdAndAnnotationIdIsNullAndParentIdIsNull(UUID documentId);
List<DocumentComment> findByAnnotationIdAndParentIdIsNull(UUID annotationId);
List<DocumentComment> findByParentId(UUID parentId); List<DocumentComment> findByParentId(UUID parentId);
List<DocumentComment> findByBlockIdAndParentIdIsNull(UUID blockId); List<DocumentComment> findByBlockIdAndParentIdIsNull(UUID blockId);

View File

@@ -1,8 +0,0 @@
package org.raddatz.familienarchiv.repository;
import java.util.UUID;
public interface CompletionStatsRow {
UUID getDocumentId();
int getCompletionPercentage();
}

View File

@@ -46,8 +46,6 @@ public interface DocumentRepository extends JpaRepository<Document, UUID>, JpaSp
List<Document> findByFileHashIsNullAndFilePathIsNotNull(); List<Document> findByFileHashIsNullAndFilePathIsNotNull();
List<Document> findByFilePathIsNotNullAndThumbnailKeyIsNull();
@Query("SELECT d.id, d.title FROM Document d WHERE d.id IN :ids") @Query("SELECT d.id, d.title FROM Document d WHERE d.id IN :ids")
List<Object[]> findIdAndTitleByIdIn(@Param("ids") Collection<UUID> ids); List<Object[]> findIdAndTitleByIdIn(@Param("ids") Collection<UUID> ids);
@@ -85,157 +83,10 @@ public interface DocumentRepository extends JpaRepository<Document, UUID>, JpaSp
@Query(nativeQuery = true, value = """ @Query(nativeQuery = true, value = """
SELECT d.id FROM documents d SELECT d.id FROM documents d
CROSS JOIN LATERAL ( WHERE d.search_vector @@ websearch_to_tsquery('german', :query)
SELECT CASE WHEN websearch_to_tsquery('german', :query)::text <> '' ORDER BY ts_rank(d.search_vector, websearch_to_tsquery('german', :query)) DESC,
THEN to_tsquery('simple', regexp_replace(
websearch_to_tsquery('german', :query)::text,
'''([^'']+)''',
'''\\1'':*',
'g'))
END AS pq
) q
WHERE d.search_vector @@ q.pq
ORDER BY ts_rank(d.search_vector, q.pq) DESC,
d.meta_date DESC NULLS LAST d.meta_date DESC NULLS LAST
""") """)
List<UUID> findRankedIdsByFts(@Param("query") String query); List<UUID> findRankedIdsByFts(@Param("query") String query);
/**
* Returns match-enrichment data for a set of documents identified by their IDs.
* Each row contains (in column order):
* <ol>
* <li>UUID — document id</li>
* <li>String — title headline with \x01/\x02 delimiters around matched terms</li>
* <li>String — best-ranked transcription snippet with \x01/\x02 delimiters, or null</li>
* <li>Boolean — whether the sender's name matched the query</li>
* <li>String — comma-separated matched receiver UUIDs, or null</li>
* <li>String — comma-separated matched tag UUIDs, or null</li>
* <li>String — summary snippet with \x01/\x02 delimiters, or null if summary didn't match</li>
* </ol>
* Short-circuit before calling this method when {@code ids} is empty or {@code query} is blank.
*/
@Query(nativeQuery = true, value = """
SELECT
d.id,
ts_headline('german', d.title, q.pq,
'StartSel=' || chr(1) || ',StopSel=' || chr(2) || ',HighlightAll=true')
AS title_headline,
CASE WHEN best_block.text IS NOT NULL THEN
ts_headline('german', best_block.text, q.pq,
'StartSel=' || chr(1) || ',StopSel=' || chr(2) || ',MaxWords=50,MinWords=20')
END AS transcription_snippet,
(s.id IS NOT NULL AND
to_tsvector('german', COALESCE(s.first_name, '') || ' ' || COALESCE(s.last_name, ''))
@@ q.pq)
AS sender_matched,
(SELECT string_agg(r.id::text, ',')
FROM document_receivers dr
JOIN persons r ON r.id = dr.person_id
WHERE dr.document_id = d.id
AND to_tsvector('german', COALESCE(r.first_name, '') || ' ' || r.last_name)
@@ q.pq
) AS matched_receiver_ids,
(SELECT string_agg(t.id::text, ',')
FROM document_tags dt
JOIN tag t ON t.id = dt.tag_id
WHERE dt.document_id = d.id
AND to_tsvector('german', t.name) @@ q.pq
) AS matched_tag_ids,
CASE WHEN d.summary IS NOT NULL AND d.summary <> ''
AND to_tsvector('german', d.summary) @@ q.pq
THEN ts_headline('german', d.summary, q.pq,
'StartSel=' || chr(1) || ',StopSel=' || chr(2) || ',MaxWords=50,MinWords=20')
END AS summary_snippet
FROM documents d
CROSS JOIN LATERAL (
SELECT CASE WHEN websearch_to_tsquery('german', :query)::text <> ''
THEN to_tsquery('simple', regexp_replace(
websearch_to_tsquery('german', :query)::text,
'''([^'']+)''',
'''\\1'':*',
'g'))
END AS pq
) q
LEFT JOIN persons s ON s.id = d.sender_id
LEFT JOIN LATERAL (
SELECT tb.text
FROM transcription_blocks tb
WHERE tb.document_id = d.id
AND to_tsvector('german', tb.text) @@ q.pq
ORDER BY ts_rank(to_tsvector('german', tb.text), q.pq) DESC
LIMIT 1
) best_block ON true
WHERE d.id IN :ids
""")
List<Object[]> findEnrichmentData(@Param("ids") Collection<UUID> ids, @Param("query") String query);
// --- Mission Control Strip queues ---
/** Documents with no annotations — Segmentierung column. */
@Query(nativeQuery = true, value = """
SELECT d.id, d.title, d.meta_date AS documentDate,
0 AS annotationCount, 0 AS textedBlockCount, 0 AS reviewedBlockCount
FROM documents d
WHERE d.status NOT IN ('PLACEHOLDER')
AND NOT EXISTS (SELECT 1 FROM document_annotations da WHERE da.document_id = d.id)
ORDER BY HASHTEXT(d.id::text || EXTRACT(WEEK FROM NOW())::int::text)
LIMIT :limit
""")
List<TranscriptionQueueProjection> findSegmentationQueue(@Param("limit") int limit);
/** Documents with annotations but not yet fully reviewed — Transkription column. */
@Query(nativeQuery = true, value = """
SELECT d.id, d.title, d.meta_date AS documentDate,
COUNT(DISTINCT da.id) AS annotationCount,
COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) AS textedBlockCount,
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END) AS reviewedBlockCount
FROM documents d
JOIN document_annotations da ON da.document_id = d.id
LEFT JOIN transcription_blocks tb ON tb.document_id = d.id
GROUP BY d.id, d.title, d.meta_date
HAVING COUNT(DISTINCT da.id) > 0
AND (
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END)::float /
COUNT(DISTINCT da.id)
) < 0.90
ORDER BY COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) DESC,
HASHTEXT(d.id::text || EXTRACT(WEEK FROM NOW())::int::text)
LIMIT :limit
""")
List<TranscriptionQueueProjection> findTranscriptionQueue(@Param("limit") int limit);
/** Documents with reviewed_pct >= 90 % — Lesefertig column. */
@Query(nativeQuery = true, value = """
SELECT d.id, d.title, d.meta_date AS documentDate,
COUNT(DISTINCT da.id) AS annotationCount,
COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) AS textedBlockCount,
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END) AS reviewedBlockCount
FROM documents d
JOIN document_annotations da ON da.document_id = d.id
LEFT JOIN transcription_blocks tb ON tb.document_id = d.id
GROUP BY d.id, d.title, d.meta_date
HAVING COUNT(DISTINCT da.id) > 0
AND (
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END)::float /
COUNT(DISTINCT da.id)
) >= 0.90
ORDER BY (
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END)::float /
COUNT(DISTINCT da.id)
) DESC
LIMIT :limit
""")
List<TranscriptionQueueProjection> findReadyToReadQueue(@Param("limit") int limit);
/** Weekly pulse: distinct documents that received new work in each pipeline stage. */
@Query(nativeQuery = true, value = """
SELECT
(SELECT COUNT(DISTINCT da.document_id) FROM document_annotations da
WHERE da.created_at >= NOW() - INTERVAL '7 days') AS segmentationCount,
(SELECT COUNT(DISTINCT tb.document_id) FROM transcription_blocks tb
WHERE tb.created_at >= NOW() - INTERVAL '7 days'
AND tb.text IS NOT NULL AND tb.text <> '') AS transcriptionCount
""")
TranscriptionWeeklyStatsProjection findWeeklyStats();
} }

View File

@@ -4,7 +4,6 @@ import jakarta.persistence.criteria.*;
import java.time.LocalDate; import java.time.LocalDate;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Set;
import java.util.UUID; import java.util.UUID;
import org.raddatz.familienarchiv.model.Document; import org.raddatz.familienarchiv.model.Document;
@@ -55,64 +54,34 @@ public class DocumentSpecifications {
return (root, query, cb) -> status == null ? null : cb.equal(root.get("status"), status); return (root, query, cb) -> status == null ? null : cb.equal(root.get("status"), status);
} }
/** // Filtert nach Schlagworten (UND-Verknüpfung, exakter Match)
* Filtert nach vorausgeweiteten Tag-ID-Sets mit AND- oder OR-Logik. public static Specification<Document> hasTags(List<String> tags) {
*
* <p>AND (useOr=false): Das Dokument muss mindestens einen Tag aus <em>jedem</em> Set besitzen.
* <p>OR (useOr=true): Das Dokument muss mindestens einen Tag aus der Vereinigung aller Sets besitzen.
*
* <p>Jedes Set repräsentiert einen ausgewählten Tag inklusive aller seiner Nachkommen
* (vorausgeweitet durch {@code TagRepository.findDescendantIdsByName}).
*/
public static Specification<Document> hasTags(List<Set<UUID>> tagIdSets, boolean useOr) {
return (root, query, cb) -> { return (root, query, cb) -> {
if (tagIdSets == null || tagIdSets.isEmpty()) if (tags == null || tags.isEmpty())
return null; return null;
if (!useOr) {
// AND mode: an empty set means the tag resolved to no IDs (doesn't exist) —
// no document can satisfy the condition, so return no results immediately.
boolean hasEmptySet = tagIdSets.stream().anyMatch(s -> s == null || s.isEmpty());
if (hasEmptySet) return cb.disjunction();
}
List<Set<UUID>> nonEmpty = tagIdSets.stream()
.filter(s -> s != null && !s.isEmpty())
.toList();
if (nonEmpty.isEmpty()) return null;
if (useOr) {
Set<UUID> union = new java.util.HashSet<>();
nonEmpty.forEach(union::addAll);
return documentHasTagIn(root, query, cb, union);
}
// AND: one EXISTS subquery per set
List<Predicate> predicates = new ArrayList<>(); List<Predicate> predicates = new ArrayList<>();
for (Set<UUID> ids : nonEmpty) {
predicates.add(documentHasTagIn(root, query, cb, ids)); for (String tagName : tags) {
if (!StringUtils.hasText(tagName)) continue;
Subquery<Long> subquery = query.subquery(Long.class);
Root<Document> subRoot = subquery.from(Document.class);
Join<Document, Tag> subTags = subRoot.join("tags");
subquery.select(subRoot.get("id"))
.where(
cb.equal(subRoot.get("id"), root.get("id")),
cb.equal(cb.lower(subTags.get("name")), tagName.trim().toLowerCase())
);
predicates.add(cb.exists(subquery));
} }
return cb.and(predicates.toArray(new Predicate[0])); return cb.and(predicates.toArray(new Predicate[0]));
}; };
} }
private static Predicate documentHasTagIn(
Root<Document> root,
jakarta.persistence.criteria.CriteriaQuery<?> query,
jakarta.persistence.criteria.CriteriaBuilder cb,
Set<UUID> tagIds) {
Subquery<UUID> subquery = query.subquery(UUID.class);
Root<Document> subRoot = subquery.from(Document.class);
Join<Document, Tag> subTags = subRoot.join("tags");
subquery.select(subRoot.get("id"))
.where(
cb.equal(subRoot.get("id"), root.get("id")),
subTags.get("id").in(tagIds)
);
return cb.exists(subquery);
}
// Filtert nach partiellem Tag-Namen (ILIKE) — für Live-Tag-Suche // Filtert nach partiellem Tag-Namen (ILIKE) — für Live-Tag-Suche
public static Specification<Document> hasTagPartial(String tagQ) { public static Specification<Document> hasTagPartial(String tagQ) {
return (root, query, cb) -> { return (root, query, cb) -> {

View File

@@ -1,27 +0,0 @@
package org.raddatz.familienarchiv.repository;
import jakarta.persistence.LockModeType;
import org.raddatz.familienarchiv.model.InviteToken;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Lock;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
public interface InviteTokenRepository extends JpaRepository<InviteToken, UUID> {
Optional<InviteToken> findByCode(String code);
@Lock(LockModeType.PESSIMISTIC_WRITE)
@Query("SELECT t FROM InviteToken t WHERE t.code = :code")
Optional<InviteToken> findByCodeForUpdate(@Param("code") String code);
@Query("SELECT t FROM InviteToken t WHERE t.revoked = false AND (t.expiresAt IS NULL OR t.expiresAt > CURRENT_TIMESTAMP) AND (t.maxUses IS NULL OR t.useCount < t.maxUses) ORDER BY t.createdAt DESC")
List<InviteToken> findActive();
@Query("SELECT t FROM InviteToken t ORDER BY t.createdAt DESC")
List<InviteToken> findAllOrderedByCreatedAt();
}

View File

@@ -12,15 +12,5 @@ public interface OcrTrainingRunRepository extends JpaRepository<OcrTrainingRun,
Optional<OcrTrainingRun> findFirstByStatus(TrainingStatus status); Optional<OcrTrainingRun> findFirstByStatus(TrainingStatus status);
Optional<OcrTrainingRun> findFirstByStatusOrderByCreatedAtAsc(TrainingStatus status); List<OcrTrainingRun> findTop10ByOrderByCreatedAtDesc();
Optional<OcrTrainingRun> findFirstByPersonIdAndStatus(UUID personId, TrainingStatus status);
boolean existsByPersonIdAndStatus(UUID personId, TrainingStatus status);
List<OcrTrainingRun> findTop20ByOrderByCreatedAtDesc();
List<OcrTrainingRun> findByPersonIdIsNullOrderByCreatedAtDesc();
List<OcrTrainingRun> findByPersonIdOrderByCreatedAtDesc(UUID personId);
} }

View File

@@ -1,12 +0,0 @@
package org.raddatz.familienarchiv.repository;
import org.raddatz.familienarchiv.model.SenderModel;
import org.springframework.data.jpa.repository.JpaRepository;
import java.util.Optional;
import java.util.UUID;
public interface SenderModelRepository extends JpaRepository<SenderModel, UUID> {
Optional<SenderModel> findByPersonId(UUID personId);
}

View File

@@ -1,126 +1,13 @@
package org.raddatz.familienarchiv.repository; package org.raddatz.familienarchiv.repository;
import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.UUID; import java.util.UUID;
import org.raddatz.familienarchiv.model.Tag; import org.raddatz.familienarchiv.model.Tag;
import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
public interface TagRepository extends JpaRepository<Tag, UUID> { public interface TagRepository extends JpaRepository<Tag, UUID> {
/** Typed projection for document-count aggregation results. */
interface TagCount {
UUID getTagId();
Long getCount();
}
Optional<Tag> findByNameIgnoreCase(String name); Optional<Tag> findByNameIgnoreCase(String name);
List<Tag> findByNameContainingIgnoreCase(String name); List<Tag> findByNameContainingIgnoreCase(String name);
}
/**
* Returns the IDs of all ancestors of the given tag (parent, grandparent, …)
* via a recursive CTE. Used for cycle detection before assigning a new parent.
* Includes a depth guard of 50 levels to prevent runaway queries.
*/
@Query(value = """
WITH RECURSIVE ancestors AS (
SELECT parent_id, 0 AS depth
FROM tag
WHERE id = :tagId AND parent_id IS NOT NULL
UNION ALL
SELECT t.parent_id, a.depth + 1
FROM tag t
JOIN ancestors a ON t.id = a.parent_id
WHERE t.parent_id IS NOT NULL AND a.depth < 50
)
SELECT parent_id FROM ancestors
""", nativeQuery = true)
List<UUID> findAncestorIds(@Param("tagId") UUID tagId);
/**
* Returns the IDs of the tag with the given name AND all of its descendants
* via a recursive CTE. Used to expand a selected tag to inclusive hierarchy results.
* Includes a depth guard of 50 levels to prevent runaway queries.
*/
@Query(value = """
WITH RECURSIVE descendants AS (
SELECT id, 0 AS depth FROM tag WHERE LOWER(name) = LOWER(:name)
UNION ALL
SELECT t.id, d.depth + 1 FROM tag t
JOIN descendants d ON t.parent_id = d.id
WHERE d.depth < 50
)
SELECT id FROM descendants
""", nativeQuery = true)
List<UUID> findDescendantIdsByName(@Param("name") String name);
/**
* Returns the IDs of the tag with the given ID AND all of its descendants
* via a recursive CTE. Used for merge validation and subtree delete.
* Includes a depth guard of 50 levels to prevent runaway queries.
*/
@Query(value = """
WITH RECURSIVE descendants AS (
SELECT id, 0 AS depth FROM tag WHERE id = :tagId
UNION ALL
SELECT t.id, d.depth + 1 FROM tag t
JOIN descendants d ON t.parent_id = d.id
WHERE d.depth < 50
)
SELECT id FROM descendants
""", nativeQuery = true)
List<UUID> findDescendantIds(@Param("tagId") UUID tagId);
/**
* Reassigns document_tags rows from source to target, skipping rows where
* the target tag is already present (to avoid PK conflicts).
*/
@Modifying(clearAutomatically = true)
@Query(value = """
UPDATE document_tags
SET tag_id = :targetId
WHERE tag_id = :sourceId
AND NOT EXISTS (
SELECT 1 FROM document_tags d2
WHERE d2.document_id = document_tags.document_id
AND d2.tag_id = :targetId
)
""", nativeQuery = true)
void reassignDocumentTags(@Param("sourceId") UUID sourceId, @Param("targetId") UUID targetId);
/**
* Removes all document_tags rows for the given tag.
*/
@Modifying(clearAutomatically = true)
@Query(value = "DELETE FROM document_tags WHERE tag_id = :tagId", nativeQuery = true)
void deleteDocumentTagsByTagId(@Param("tagId") UUID tagId);
/**
* Removes all document_tags rows for the given collection of tag IDs.
* Caller must guard against an empty collection — PostgreSQL rejects IN ().
*/
@Modifying(clearAutomatically = true)
@Query(value = "DELETE FROM document_tags WHERE tag_id IN :ids", nativeQuery = true)
void deleteDocumentTagsByTagIds(@Param("ids") Collection<UUID> ids);
/**
* Re-parents all direct children of sourceId to targetId.
*/
@Modifying(clearAutomatically = true)
@Query(value = "UPDATE tag SET parent_id = :targetId WHERE parent_id = :sourceId", nativeQuery = true)
void reparentChildren(@Param("sourceId") UUID sourceId, @Param("targetId") UUID targetId);
/**
* Returns (tagId, count) pairs for all tags that appear in document_tags.
* Used to populate documentCount in the tag tree without N+1 queries.
*/
@Query(value = "SELECT tag_id AS tagId, COUNT(*) AS count FROM document_tags GROUP BY tag_id", nativeQuery = true)
List<TagCount> findDocumentCountsPerTag();
}

View File

@@ -3,26 +3,13 @@ package org.raddatz.familienarchiv.repository;
import org.raddatz.familienarchiv.model.TranscriptionBlock; import org.raddatz.familienarchiv.model.TranscriptionBlock;
import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query; import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.UUID; import java.util.UUID;
public interface TranscriptionBlockRepository extends JpaRepository<TranscriptionBlock, UUID> { public interface TranscriptionBlockRepository extends JpaRepository<TranscriptionBlock, UUID> {
@Query(value = """
SELECT
b.document_id AS documentId,
ROUND(COUNT(*) FILTER (WHERE b.reviewed = true) * 100.0 / COUNT(*))::int AS completionPercentage
FROM transcription_blocks b
WHERE b.document_id IN :documentIds
GROUP BY b.document_id
""", nativeQuery = true)
List<CompletionStatsRow> findCompletionStatsForDocuments(
@Param("documentIds") Collection<UUID> documentIds);
List<TranscriptionBlock> findByDocumentIdOrderBySortOrderAsc(UUID documentId); List<TranscriptionBlock> findByDocumentIdOrderBySortOrderAsc(UUID documentId);
Optional<TranscriptionBlock> findByIdAndDocumentId(UUID id, UUID documentId); Optional<TranscriptionBlock> findByIdAndDocumentId(UUID id, UUID documentId);
@@ -50,22 +37,4 @@ public interface TranscriptionBlockRepository extends JpaRepository<Transcriptio
AND 'KURRENT_SEGMENTATION' MEMBER OF d.trainingLabels AND 'KURRENT_SEGMENTATION' MEMBER OF d.trainingLabels
""") """)
List<TranscriptionBlock> findSegmentationBlocks(); List<TranscriptionBlock> findSegmentationBlocks();
@Query("""
SELECT COUNT(b) FROM TranscriptionBlock b
JOIN Document d ON d.id = b.documentId
WHERE b.source = 'MANUAL'
AND d.sender.id = :personId
AND d.scriptType = 'HANDWRITING_KURRENT'
""")
long countManualKurrentBlocksByPerson(@Param("personId") UUID personId);
@Query("""
SELECT b FROM TranscriptionBlock b
JOIN Document d ON d.id = b.documentId
WHERE b.source = 'MANUAL'
AND d.sender.id = :personId
AND d.scriptType = 'HANDWRITING_KURRENT'
""")
List<TranscriptionBlock> findManualKurrentBlocksByPerson(@Param("personId") UUID personId);
} }

View File

@@ -1,17 +0,0 @@
package org.raddatz.familienarchiv.repository;
import java.time.LocalDate;
import java.util.UUID;
/**
* Spring Data projection for a single row in one of the three Mission Control Strip queues.
* Column aliases in the native SQL queries must match these getter names exactly.
*/
public interface TranscriptionQueueProjection {
UUID getId();
String getTitle();
LocalDate getDocumentDate();
int getAnnotationCount();
int getTextedBlockCount();
int getReviewedBlockCount();
}

View File

@@ -1,10 +0,0 @@
package org.raddatz.familienarchiv.repository;
/**
* Spring Data projection for the weekly activity pulse stats.
* Column aliases in the native SQL query must match these getter names exactly.
*/
public interface TranscriptionWeeklyStatsProjection {
long getSegmentationCount();
long getTranscriptionCount();
}

View File

@@ -1,24 +0,0 @@
package org.raddatz.familienarchiv.security;
import org.raddatz.familienarchiv.exception.DomainException;
import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.service.UserService;
import org.springframework.security.core.Authentication;
import java.util.UUID;
public final class SecurityUtils {
private SecurityUtils() {}
public static UUID requireUserId(Authentication authentication, UserService userService) {
if (authentication == null || !authentication.isAuthenticated()) {
throw DomainException.unauthorized("Authentication required");
}
AppUser user = userService.findByEmail(authentication.getName());
if (user == null) {
throw DomainException.unauthorized("User not found");
}
return user.getId();
}
}

View File

@@ -2,8 +2,6 @@ package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.audit.AuditKind;
import org.raddatz.familienarchiv.audit.AuditService;
import org.raddatz.familienarchiv.dto.CreateAnnotationDTO; import org.raddatz.familienarchiv.dto.CreateAnnotationDTO;
import org.raddatz.familienarchiv.dto.UpdateAnnotationDTO; import org.raddatz.familienarchiv.dto.UpdateAnnotationDTO;
import org.raddatz.familienarchiv.exception.DomainException; import org.raddatz.familienarchiv.exception.DomainException;
@@ -16,7 +14,6 @@ import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.UUID; import java.util.UUID;
@Slf4j @Slf4j
@@ -26,7 +23,6 @@ public class AnnotationService {
private final AnnotationRepository annotationRepository; private final AnnotationRepository annotationRepository;
private final TranscriptionBlockRepository blockRepository; private final TranscriptionBlockRepository blockRepository;
private final AuditService auditService;
public List<DocumentAnnotation> listAnnotations(UUID documentId) { public List<DocumentAnnotation> listAnnotations(UUID documentId) {
return annotationRepository.findByDocumentId(documentId); return annotationRepository.findByDocumentId(documentId);
@@ -46,10 +42,7 @@ public class AnnotationService {
.createdBy(userId) .createdBy(userId)
.build(); .build();
DocumentAnnotation saved = annotationRepository.save(annotation); return annotationRepository.save(annotation);
auditService.logAfterCommit(AuditKind.ANNOTATION_CREATED, userId, saved.getDocumentId(),
Map.of("pageNumber", saved.getPageNumber()));
return saved;
} }
@Transactional @Transactional

View File

@@ -1,23 +1,17 @@
package org.raddatz.familienarchiv.service; package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.raddatz.familienarchiv.audit.AuditKind;
import org.raddatz.familienarchiv.audit.AuditService;
import org.raddatz.familienarchiv.dto.MentionDTO; import org.raddatz.familienarchiv.dto.MentionDTO;
import org.raddatz.familienarchiv.exception.DomainException; import org.raddatz.familienarchiv.exception.DomainException;
import org.raddatz.familienarchiv.exception.ErrorCode; import org.raddatz.familienarchiv.exception.ErrorCode;
import org.raddatz.familienarchiv.model.AppUser; import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.model.DocumentComment; import org.raddatz.familienarchiv.model.DocumentComment;
import org.raddatz.familienarchiv.model.TranscriptionBlock;
import org.raddatz.familienarchiv.repository.CommentRepository; import org.raddatz.familienarchiv.repository.CommentRepository;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.UUID; import java.util.UUID;
@@ -28,16 +22,16 @@ public class CommentService {
private final CommentRepository commentRepository; private final CommentRepository commentRepository;
private final UserService userService; private final UserService userService;
private final NotificationService notificationService; private final NotificationService notificationService;
private final AuditService auditService;
private final TranscriptionService transcriptionService;
public Map<UUID, UUID> findAnnotationIdsByIds(Collection<UUID> commentIds) { public List<DocumentComment> getCommentsForDocument(UUID documentId) {
if (commentIds == null || commentIds.isEmpty()) return Map.of(); List<DocumentComment> roots =
Map<UUID, UUID> result = new HashMap<>(); commentRepository.findByDocumentIdAndAnnotationIdIsNullAndParentIdIsNull(documentId);
for (DocumentComment c : commentRepository.findAllById(commentIds)) { return withRepliesAndMentions(roots);
if (c.getAnnotationId() != null) result.put(c.getId(), c.getAnnotationId()); }
}
return result; public List<DocumentComment> getCommentsForAnnotation(UUID annotationId) {
List<DocumentComment> roots = commentRepository.findByAnnotationIdAndParentIdIsNull(annotationId);
return withRepliesAndMentions(roots);
} }
public List<DocumentComment> getCommentsForBlock(UUID blockId) { public List<DocumentComment> getCommentsForBlock(UUID blockId) {
@@ -48,11 +42,9 @@ public class CommentService {
@Transactional @Transactional
public DocumentComment postBlockComment(UUID documentId, UUID blockId, String content, public DocumentComment postBlockComment(UUID documentId, UUID blockId, String content,
List<UUID> mentionedUserIds, AppUser author) { List<UUID> mentionedUserIds, AppUser author) {
TranscriptionBlock block = transcriptionService.getBlock(documentId, blockId);
DocumentComment comment = DocumentComment.builder() DocumentComment comment = DocumentComment.builder()
.documentId(documentId) .documentId(documentId)
.blockId(blockId) .blockId(blockId)
.annotationId(block.getAnnotationId())
.content(content) .content(content)
.authorId(author.getId()) .authorId(author.getId())
.authorName(resolveAuthorName(author)) .authorName(resolveAuthorName(author))
@@ -61,7 +53,23 @@ public class CommentService {
DocumentComment saved = commentRepository.save(comment); DocumentComment saved = commentRepository.save(comment);
withMentionDTOs(saved); withMentionDTOs(saved);
notificationService.notifyMentions(mentionedUserIds, saved); notificationService.notifyMentions(mentionedUserIds, saved);
logCommentPosted(author, documentId, saved, mentionedUserIds); return saved;
}
@Transactional
public DocumentComment postComment(UUID documentId, UUID annotationId, String content,
List<UUID> mentionedUserIds, AppUser author) {
DocumentComment comment = DocumentComment.builder()
.documentId(documentId)
.annotationId(annotationId)
.content(content)
.authorId(author.getId())
.authorName(resolveAuthorName(author))
.build();
saveMentions(comment, mentionedUserIds);
DocumentComment saved = commentRepository.save(comment);
withMentionDTOs(saved);
notificationService.notifyMentions(mentionedUserIds, saved);
return saved; return saved;
} }
@@ -93,7 +101,6 @@ public class CommentService {
participantIds.remove(author.getId()); participantIds.remove(author.getId());
notificationService.notifyReply(saved, participantIds); notificationService.notifyReply(saved, participantIds);
notificationService.notifyMentions(mentionedUserIds, saved); notificationService.notifyMentions(mentionedUserIds, saved);
logCommentPosted(author, documentId, saved, mentionedUserIds);
return saved; return saved;
} }
@@ -164,22 +171,11 @@ public class CommentService {
ErrorCode.COMMENT_NOT_FOUND, "Comment not found: " + commentId)); ErrorCode.COMMENT_NOT_FOUND, "Comment not found: " + commentId));
} }
private void logCommentPosted(AppUser author, UUID documentId, DocumentComment saved, List<UUID> mentionedUserIds) {
UUID actorId = author != null ? author.getId() : null;
String commentId = saved.getId().toString();
auditService.logAfterCommit(AuditKind.COMMENT_ADDED, actorId, documentId, Map.of("commentId", commentId));
if (mentionedUserIds != null) {
mentionedUserIds.forEach(mentionedUserId ->
auditService.logAfterCommit(AuditKind.MENTION_CREATED, actorId, documentId,
Map.of("commentId", commentId, "mentionedUserId", mentionedUserId.toString())));
}
}
private String resolveAuthorName(AppUser author) { private String resolveAuthorName(AppUser author) {
String first = author.getFirstName(); String first = author.getFirstName();
String last = author.getLastName(); String last = author.getLastName();
if ((first == null || first.isBlank()) && (last == null || last.isBlank())) { if ((first == null || first.isBlank()) && (last == null || last.isBlank())) {
return author.getEmail(); return author.getUsername();
} }
return ((first != null ? first : "") + " " + (last != null ? last : "")).strip(); return ((first != null ? first : "") + " " + (last != null ? last : "")).strip();
} }

View File

@@ -29,22 +29,24 @@ public class CustomUserDetailsService implements UserDetailsService {
private final AppUserRepository userRepository; private final AppUserRepository userRepository;
@Override @Override
public UserDetails loadUserByUsername(String email) throws UsernameNotFoundException { public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
AppUser appUser = userRepository.findByEmail(email) AppUser appUser = userRepository.findByUsername(username)
.orElseThrow(() -> new UsernameNotFoundException("User nicht gefunden: " + email)); .orElseThrow(() -> new UsernameNotFoundException("User nicht gefunden: " + username));
// Collect all permissions from all groups; warn about any that don't match a known Permission enum value
var authorities = appUser.getGroups().stream() var authorities = appUser.getGroups().stream()
.flatMap(group -> group.getPermissions().stream()) .flatMap(group -> group.getPermissions().stream())
.peek(p -> { .peek(p -> {
if (!KNOWN_PERMISSIONS.contains(p)) { if (!KNOWN_PERMISSIONS.contains(p)) {
log.warn("Unknown permission '{}' found in database for user '{}' — it will be granted but never matched by @RequirePermission", p, appUser.getEmail()); log.warn("Unknown permission '{}' found in database for user '{}' — it will be granted but never matched by @RequirePermission", p, appUser.getUsername());
} }
}) })
.map(SimpleGrantedAuthority::new) .map(SimpleGrantedAuthority::new)
.collect(Collectors.toSet()); .collect(Collectors.toSet());
// Rückgabe des Standard Spring Security User Objekts
return new User( return new User(
appUser.getEmail(), appUser.getUsername(),
appUser.getPassword(), appUser.getPassword(),
appUser.isEnabled(), appUser.isEnabled(),
true, true, true, true, true, true,

View File

@@ -3,31 +3,17 @@ package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
import org.raddatz.familienarchiv.audit.AuditKind;
import org.raddatz.familienarchiv.audit.AuditLogQueryService;
import org.raddatz.familienarchiv.audit.AuditService;
import org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO;
import org.raddatz.familienarchiv.dto.DocumentBatchSummary;
import org.raddatz.familienarchiv.dto.DocumentBulkEditDTO;
import org.raddatz.familienarchiv.dto.DocumentSearchItem;
import org.raddatz.familienarchiv.dto.DocumentSearchResult;
import org.raddatz.familienarchiv.dto.DocumentSort;
import org.raddatz.familienarchiv.dto.DocumentUpdateDTO; import org.raddatz.familienarchiv.dto.DocumentUpdateDTO;
import org.raddatz.familienarchiv.dto.IncompleteDocumentDTO; import org.raddatz.familienarchiv.dto.IncompleteDocumentDTO;
import org.raddatz.familienarchiv.dto.MatchOffset;
import org.raddatz.familienarchiv.dto.SearchMatchData;
import org.raddatz.familienarchiv.dto.TagOperator;
import org.raddatz.familienarchiv.model.Document; import org.raddatz.familienarchiv.model.Document;
import org.raddatz.familienarchiv.dto.DocumentSort;
import org.raddatz.familienarchiv.model.DocumentStatus; import org.raddatz.familienarchiv.model.DocumentStatus;
import org.raddatz.familienarchiv.model.ScriptType; import org.raddatz.familienarchiv.model.ScriptType;
import org.raddatz.familienarchiv.model.TrainingLabel; import org.raddatz.familienarchiv.model.TrainingLabel;
import org.raddatz.familienarchiv.model.Person; import org.raddatz.familienarchiv.model.Person;
import org.raddatz.familienarchiv.model.Tag; import org.raddatz.familienarchiv.model.Tag;
import org.raddatz.familienarchiv.repository.DocumentRepository; import org.raddatz.familienarchiv.repository.DocumentRepository;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort;
import org.springframework.data.jpa.domain.Specification; import org.springframework.data.jpa.domain.Specification;
import org.raddatz.familienarchiv.exception.DomainException; import org.raddatz.familienarchiv.exception.DomainException;
@@ -66,10 +52,6 @@ public class DocumentService {
private final TagService tagService; private final TagService tagService;
private final DocumentVersionService documentVersionService; private final DocumentVersionService documentVersionService;
private final AnnotationService annotationService; private final AnnotationService annotationService;
private final AuditService auditService;
private final TranscriptionBlockQueryService transcriptionBlockQueryService;
private final AuditLogQueryService auditLogQueryService;
private final ThumbnailAsyncRunner thumbnailAsyncRunner;
public record StoreResult(Document document, boolean isNew) {} public record StoreResult(Document document, boolean isNew) {}
@@ -89,7 +71,7 @@ public class DocumentService {
* - Wenn NEIN: Erstellt neuen Eintrag — isNew = true. * - Wenn NEIN: Erstellt neuen Eintrag — isNew = true.
*/ */
@Transactional @Transactional
public StoreResult storeDocument(MultipartFile file, UUID actorId) throws IOException { public StoreResult storeDocument(MultipartFile file) throws IOException {
String originalFilename = file.getOriginalFilename(); String originalFilename = file.getOriginalFilename();
// 1. Check for existing record (findFirst to survive duplicate filenames in the DB) // 1. Check for existing record (findFirst to survive duplicate filenames in the DB)
@@ -122,63 +104,11 @@ public class DocumentService {
document.setFilePath(upload.s3Key()); document.setFilePath(upload.s3Key());
document.setFileHash(upload.fileHash()); document.setFileHash(upload.fileHash());
document.setContentType(file.getContentType()); document.setContentType(file.getContentType());
boolean wasPlaceholder = document.getStatus() == DocumentStatus.PLACEHOLDER; if (document.getStatus() == DocumentStatus.PLACEHOLDER) {
if (wasPlaceholder) {
document.setStatus(DocumentStatus.UPLOADED); document.setStatus(DocumentStatus.UPLOADED);
} }
Document saved = documentRepository.save(document); return new StoreResult(documentRepository.save(document), isNew);
if (wasPlaceholder) {
auditService.logAfterCommit(AuditKind.FILE_UPLOADED, actorId, saved.getId(), null);
}
thumbnailAsyncRunner.dispatchAfterCommit(saved.getId());
return new StoreResult(saved, isNew);
}
public void validateBatch(int fileCount, DocumentBatchMetadataDTO metadata) {
// 50-file hard cap keeps FormData requests at a manageable size and protects against runaway bulk uploads.
if (fileCount > 50) {
throw DomainException.badRequest(ErrorCode.BATCH_TOO_LARGE, "Batch exceeds maximum of 50 files per request");
}
if (metadata != null && metadata.getTitles() != null && metadata.getTitles().size() > fileCount) {
throw DomainException.badRequest(ErrorCode.VALIDATION_ERROR, "titles count must not exceed files count");
}
}
@Transactional
public StoreResult storeDocumentWithBatchMetadata(
MultipartFile file, DocumentBatchMetadataDTO metadata, int fileIndex, UUID actorId) throws IOException {
StoreResult base = storeDocument(file, actorId);
Document doc = applyBatchMetadata(base.document(), metadata, fileIndex);
return new StoreResult(documentRepository.save(doc), base.isNew());
}
private Document applyBatchMetadata(Document doc, DocumentBatchMetadataDTO metadata, int fileIndex) {
if (metadata.getTitles() != null && fileIndex < metadata.getTitles().size()) {
doc.setTitle(metadata.getTitles().get(fileIndex));
}
if (metadata.getSenderId() != null) {
doc.setSender(personService.getById(metadata.getSenderId()));
}
if (metadata.getReceiverIds() != null && !metadata.getReceiverIds().isEmpty()) {
doc.setReceivers(new HashSet<>(personService.getAllById(metadata.getReceiverIds())));
}
if (metadata.getDocumentDate() != null) {
doc.setDocumentDate(metadata.getDocumentDate());
}
if (metadata.getLocation() != null) {
doc.setLocation(metadata.getLocation());
}
if (metadata.getMetadataComplete() != null) {
doc.setMetadataComplete(metadata.getMetadataComplete());
}
if (metadata.getTagNames() != null && !metadata.getTagNames().isEmpty()) {
UUID docId = doc.getId();
updateDocumentTags(docId, metadata.getTagNames());
doc = documentRepository.findById(docId)
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Not found after batch metadata: " + docId));
}
return doc;
} }
@Transactional @Transactional
@@ -240,8 +170,7 @@ public class DocumentService {
} }
// Datei // Datei
boolean fileUploaded = file != null && !file.isEmpty(); if (file != null && !file.isEmpty()) {
if (fileUploaded) {
FileService.UploadResult upload = fileService.uploadFile(file, file.getOriginalFilename()); FileService.UploadResult upload = fileService.uploadFile(file, file.getOriginalFilename());
doc.setFilePath(upload.s3Key()); doc.setFilePath(upload.s3Key());
doc.setFileHash(upload.fileHash()); doc.setFileHash(upload.fileHash());
@@ -251,19 +180,14 @@ public class DocumentService {
Document finalDoc = documentRepository.save(doc); Document finalDoc = documentRepository.save(doc);
documentVersionService.recordVersion(finalDoc); documentVersionService.recordVersion(finalDoc);
if (fileUploaded) {
thumbnailAsyncRunner.dispatchAfterCommit(finalDoc.getId());
}
return finalDoc; return finalDoc;
} }
@Transactional @Transactional
public Document updateDocument(UUID id, DocumentUpdateDTO dto, MultipartFile newFile, UUID actorId) throws IOException { public Document updateDocument(UUID id, DocumentUpdateDTO dto, MultipartFile newFile) throws IOException {
Document doc = documentRepository.findById(id) Document doc = documentRepository.findById(id)
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id)); .orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id));
DocumentStatus statusBefore = doc.getStatus();
// 1. Einfache Felder Update // 1. Einfache Felder Update
doc.setTitle(dto.getTitle()); doc.setTitle(dto.getTitle());
doc.setDocumentDate(dto.getDocumentDate()); doc.setDocumentDate(dto.getDocumentDate());
@@ -271,8 +195,6 @@ public class DocumentService {
doc.setTranscription(dto.getTranscription()); doc.setTranscription(dto.getTranscription());
doc.setSummary(dto.getSummary()); doc.setSummary(dto.getSummary());
doc.setDocumentLocation(dto.getDocumentLocation()); doc.setDocumentLocation(dto.getDocumentLocation());
doc.setArchiveBox(dto.getArchiveBox());
doc.setArchiveFolder(dto.getArchiveFolder());
List<String> tags = new ArrayList<>(); List<String> tags = new ArrayList<>();
if (dto.getTags() != null && !dto.getTags().isBlank()) { if (dto.getTags() != null && !dto.getTags().isBlank()) {
@@ -308,8 +230,7 @@ public class DocumentService {
} }
// 4. Datei austauschen (nur wenn eine neue ausgewählt wurde) // 4. Datei austauschen (nur wenn eine neue ausgewählt wurde)
boolean fileReplaced = newFile != null && !newFile.isEmpty(); if (newFile != null && !newFile.isEmpty()) {
if (fileReplaced) {
FileService.UploadResult upload = fileService.uploadFile(newFile, newFile.getOriginalFilename()); FileService.UploadResult upload = fileService.uploadFile(newFile, newFile.getOriginalFilename());
doc.setFilePath(upload.s3Key()); doc.setFilePath(upload.s3Key());
doc.setFileHash(upload.fileHash()); doc.setFileHash(upload.fileHash());
@@ -320,161 +241,26 @@ public class DocumentService {
Document saved = documentRepository.save(doc); Document saved = documentRepository.save(doc);
documentVersionService.recordVersion(saved); documentVersionService.recordVersion(saved);
if (saved.getStatus() != statusBefore) {
auditService.logAfterCommit(AuditKind.STATUS_CHANGED, actorId, saved.getId(),
Map.of("oldStatus", statusBefore.name(), "newStatus", saved.getStatus().name()));
} else {
auditService.logAfterCommit(AuditKind.METADATA_UPDATED, actorId, saved.getId(), null);
}
if (fileReplaced) {
thumbnailAsyncRunner.dispatchAfterCommit(saved.getId());
}
return saved; return saved;
} }
public Document updateDocumentTags(UUID docId, List<String> tagNames) { public Document updateDocumentTags(UUID docId, List<String> tagNames) {
Document doc = documentRepository.findById(docId) Document doc = documentRepository.findById(docId)
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + docId)); .orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + docId));
doc.setTags(resolveTags(tagNames));
return documentRepository.save(doc);
}
/** Set<Tag> newTags = new HashSet<>();
* Resolves a list of tag-name strings to {@link Tag} entities, trimming
* whitespace and skipping blank entries. Single source of truth for
* "name string → Tag" so the find-or-create policy stays consistent
* across single-doc updates ({@link #updateDocumentTags}), bulk edits
* ({@link #applyBulkEditToDocument}), and the upload-batch path
* ({@code applyBatchMetadata}).
*/
private Set<Tag> resolveTags(List<String> tagNames) {
if (tagNames == null || tagNames.isEmpty()) return new HashSet<>();
Set<Tag> resolved = new HashSet<>();
for (String name : tagNames) { for (String name : tagNames) {
// Clean the string
String cleanName = name.trim(); String cleanName = name.trim();
if (cleanName.isEmpty()) continue; if (cleanName.isEmpty())
resolved.add(tagService.findOrCreate(cleanName)); continue;
}
return resolved;
}
/** newTags.add(tagService.findOrCreate(cleanName));
* Returns all document IDs matching the given filter parameters, ignoring
* pagination. Used by the bulk-edit "Alle X editieren" fast path so the
* frontend can replace the selection with every match across pages in one
* round-trip.
*/
@Transactional(readOnly = true)
public List<UUID> findIdsForFilter(String text, LocalDate from, LocalDate to, UUID sender, UUID receiver,
List<String> tags, String tagQ, DocumentStatus status, TagOperator tagOperator) {
boolean hasText = StringUtils.hasText(text);
List<UUID> rankedIds = null;
if (hasText) {
rankedIds = documentRepository.findRankedIdsByFts(text);
if (rankedIds.isEmpty()) return List.of();
} }
Specification<Document> spec = buildSearchSpec( doc.setTags(newTags);
hasText, rankedIds, from, to, sender, receiver, tags, tagQ, status, tagOperator); return documentRepository.save(doc);
return documentRepository.findAll(spec).stream().map(Document::getId).toList();
}
/**
* Single source of truth for the search Specification chain. Shared by
* {@link #searchDocuments} (paged + sorted) and {@link #findIdsForFilter}
* (uncapped, ID-only). Caller does its own FTS short-circuit when the
* full-text query returned no rows.
*/
private Specification<Document> buildSearchSpec(boolean hasText, List<UUID> ftsIds,
LocalDate from, LocalDate to,
UUID sender, UUID receiver,
List<String> tags, String tagQ,
DocumentStatus status, TagOperator tagOperator) {
boolean useOrLogic = tagOperator == TagOperator.OR;
List<Set<UUID>> expandedTagSets = tagService.expandTagNamesToDescendantIdSets(tags);
Specification<Document> textSpec = hasText ? hasIds(ftsIds) : (root, query, cb) -> null;
return Specification.where(textSpec)
.and(isBetween(from, to))
.and(hasSender(sender))
.and(hasReceiver(receiver))
.and(hasTags(expandedTagSets, useOrLogic))
.and(hasTagPartial(tagQ))
.and(hasStatus(status));
}
/**
* Returns lightweight summaries (id, title, server PDF URL) for the given
* document IDs. Unknown IDs are silently dropped — the consumer is the
* bulk-edit page's left strip, where missing previews would already be
* obvious; surfacing them as errors here adds no value.
*/
@Transactional(readOnly = true)
public List<DocumentBatchSummary> batchMetadata(List<UUID> ids) {
if (ids == null || ids.isEmpty()) return List.of();
return documentRepository.findAllById(ids).stream()
.map(d -> new DocumentBatchSummary(
d.getId(),
d.getTitle() != null ? d.getTitle() : d.getOriginalFilename(),
"/api/documents/" + d.getId() + "/file"))
.toList();
}
/**
* Applies a bulk-edit DTO to a single document atomically.
* Tags and receivers are additive (merged into existing sets); sender and the
* three location fields are replace-on-non-blank (null/blank means "no change").
* Wrapped in its own transaction so a failure on one document never partially
* mutates another in the controller's batch loop.
*
* Each successful update emits a {@link AuditKind#METADATA_UPDATED} audit
* event tagged {@code source=BULK_EDIT} and writes a row to
* {@code document_versions} so the family archive's "who changed what"
* trail stays complete across both single- and bulk-doc edit paths.
*
* NOTE on N+1: tag and person resolution happens per-document. With 500
* documents × 10 tags this fans out to ~5000 tag-resolve queries per
* request. Acceptable today because the family archive is bounded at
* ~1500 documents total. Tracked as a perf follow-up.
*/
@Transactional
public Document applyBulkEditToDocument(UUID id, DocumentBulkEditDTO dto, UUID actorId) {
Document doc = documentRepository.findById(id)
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id));
if (dto.getTagNames() != null && !dto.getTagNames().isEmpty()) {
Set<Tag> merged = new HashSet<>(doc.getTags());
merged.addAll(resolveTags(dto.getTagNames()));
doc.setTags(merged);
}
if (dto.getSenderId() != null) {
doc.setSender(personService.getById(dto.getSenderId()));
}
if (dto.getReceiverIds() != null && !dto.getReceiverIds().isEmpty()) {
Set<Person> merged = new HashSet<>(doc.getReceivers());
merged.addAll(personService.getAllById(dto.getReceiverIds()));
doc.setReceivers(merged);
}
if (StringUtils.hasText(dto.getDocumentLocation())) {
doc.setDocumentLocation(dto.getDocumentLocation());
}
if (StringUtils.hasText(dto.getArchiveBox())) {
doc.setArchiveBox(dto.getArchiveBox());
}
if (StringUtils.hasText(dto.getArchiveFolder())) {
doc.setArchiveFolder(dto.getArchiveFolder());
}
Document saved = documentRepository.save(doc);
documentVersionService.recordVersion(saved);
auditService.logAfterCommit(AuditKind.METADATA_UPDATED, actorId, saved.getId(),
Map.of("source", "BULK_EDIT"));
return saved;
} }
/** /**
@@ -496,33 +282,6 @@ public class DocumentService {
return documentRepository.save(doc); return documentRepository.save(doc);
} }
@Transactional
public Document attachFile(UUID id, MultipartFile file, UUID actorId) {
Document doc = documentRepository.findById(id)
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id));
FileService.UploadResult upload;
try {
upload = fileService.uploadFile(file, file.getOriginalFilename());
} catch (IOException e) {
throw DomainException.internal(ErrorCode.FILE_UPLOAD_FAILED, "Failed to upload file: " + e.getMessage());
}
doc.setFilePath(upload.s3Key());
doc.setFileHash(upload.fileHash());
doc.setOriginalFilename(file.getOriginalFilename());
doc.setContentType(file.getContentType());
boolean wasPlaceholder = doc.getStatus() == DocumentStatus.PLACEHOLDER;
if (wasPlaceholder) {
doc.setStatus(DocumentStatus.UPLOADED);
}
Document saved = documentRepository.save(doc);
documentVersionService.recordVersion(saved);
thumbnailAsyncRunner.dispatchAfterCommit(saved.getId());
if (wasPlaceholder) {
auditService.logAfterCommit(AuditKind.FILE_UPLOADED, actorId, saved.getId(), null);
}
return saved;
}
// 0. Zuletzt aktive Dokumente (sortiert nach updatedAt DESC) // 0. Zuletzt aktive Dokumente (sortiert nach updatedAt DESC)
public List<Document> getRecentActivity(int size) { public List<Document> getRecentActivity(int size) {
return documentRepository.findAll( return documentRepository.findAll(
@@ -531,30 +290,33 @@ public class DocumentService {
} }
// 1. Allgemeine Suche (für das Suchfeld im Frontend) // 1. Allgemeine Suche (für das Suchfeld im Frontend)
public DocumentSearchResult searchDocuments(String text, LocalDate from, LocalDate to, UUID sender, UUID receiver, List<String> tags, String tagQ, DocumentStatus status, DocumentSort sort, String dir, TagOperator tagOperator, Pageable pageable) { public List<Document> searchDocuments(String text, LocalDate from, LocalDate to, UUID sender, UUID receiver, List<String> tags, String tagQ, DocumentStatus status, DocumentSort sort, String dir) {
boolean hasText = StringUtils.hasText(text); boolean hasText = StringUtils.hasText(text);
List<UUID> rankedIds = null; List<UUID> rankedIds = null;
if (hasText) { if (hasText) {
rankedIds = documentRepository.findRankedIdsByFts(text); rankedIds = documentRepository.findRankedIdsByFts(text);
if (rankedIds.isEmpty()) return DocumentSearchResult.of(List.of()); if (rankedIds.isEmpty()) return List.of();
} }
Specification<Document> spec = buildSearchSpec( Specification<Document> textSpec = hasText ? hasIds(rankedIds) : (root, query, cb) -> null;
hasText, rankedIds, from, to, sender, receiver, tags, tagQ, status, tagOperator); Specification<Document> spec = Specification.where(textSpec)
.and(isBetween(from, to))
.and(hasSender(sender))
.and(hasReceiver(receiver))
.and(hasTags(tags))
.and(hasTagPartial(tagQ))
.and(hasStatus(status));
// SENDER, RECEIVER and RELEVANCE sorts load the full match set and slice in memory. // SENDER and RECEIVER are sorted in-memory because JPA's Sort.by("sender.lastName")
// JPA's Sort.by("sender.lastName") generates an INNER JOIN that silently drops // generates an INNER JOIN that silently drops documents with null sender/receivers.
// documents with null sender/receivers; RELEVANCE maps a DB order to an external
// rank list. Cost scales linearly with match count — acceptable while documents
// stays under ~10k rows. Past that, replace with SQL-level LEFT JOIN sort.
if (sort == DocumentSort.RECEIVER) { if (sort == DocumentSort.RECEIVER) {
List<Document> sorted = sortByFirstReceiver(documentRepository.findAll(spec), dir); List<Document> results = documentRepository.findAll(spec);
return buildResultPaged(pageSlice(sorted, pageable), text, pageable, sorted.size()); return sortByFirstReceiver(results, dir);
} }
if (sort == DocumentSort.SENDER) { if (sort == DocumentSort.SENDER) {
List<Document> sorted = sortBySender(documentRepository.findAll(spec), dir); List<Document> results = documentRepository.findAll(spec);
return buildResultPaged(pageSlice(sorted, pageable), text, pageable, sorted.size()); return sortBySender(results, dir);
} }
// RELEVANCE: default when text present and no explicit sort given // RELEVANCE: default when text present and no explicit sort given
@@ -563,47 +325,14 @@ public class DocumentService {
List<Document> results = documentRepository.findAll(spec); List<Document> results = documentRepository.findAll(spec);
Map<UUID, Integer> rankMap = new HashMap<>(); Map<UUID, Integer> rankMap = new HashMap<>();
for (int i = 0; i < rankedIds.size(); i++) rankMap.put(rankedIds.get(i), i); for (int i = 0; i < rankedIds.size(); i++) rankMap.put(rankedIds.get(i), i);
List<Document> sorted = results.stream() return results.stream()
.sorted(Comparator.comparingInt( .sorted(Comparator.comparingInt(
doc -> rankMap.getOrDefault(doc.getId(), Integer.MAX_VALUE))) doc -> rankMap.getOrDefault(doc.getId(), Integer.MAX_VALUE)))
.toList(); .toList();
return buildResultPaged(pageSlice(sorted, pageable), text, pageable, sorted.size());
} }
// Fast path — push sort + paging into the DB and enrich only the returned slice. Sort springSort = resolveSort(sort, dir);
PageRequest pageRequest = PageRequest.of(pageable.getPageNumber(), pageable.getPageSize(), resolveSort(sort, dir)); return documentRepository.findAll(spec, springSort);
Page<Document> page = documentRepository.findAll(spec, pageRequest);
return buildResultPaged(page.getContent(), text, pageable, page.getTotalElements());
}
private static <T> List<T> pageSlice(List<T> sorted, Pageable pageable) {
int from = Math.min((int) pageable.getOffset(), sorted.size());
int to = Math.min(from + pageable.getPageSize(), sorted.size());
return sorted.subList(from, to);
}
private DocumentSearchResult buildResultPaged(List<Document> slice, String text, Pageable pageable, long totalElements) {
return DocumentSearchResult.paged(enrichItems(slice, text), pageable, totalElements);
}
private List<DocumentSearchItem> enrichItems(List<Document> documents, String text) {
List<Document> colorResolved = resolveDocumentTagColors(documents);
Map<UUID, SearchMatchData> matchData = enrichWithMatchData(colorResolved, text);
List<UUID> docIds = colorResolved.stream().map(Document::getId).toList();
Map<UUID, Integer> completionByDoc = fetchCompletionPercentages(docIds);
Map<UUID, List<ActivityActorDTO>> contributorsByDoc = auditLogQueryService.findRecentContributorsPerDocument(docIds);
return colorResolved.stream().map(doc -> new DocumentSearchItem(
doc,
matchData.getOrDefault(doc.getId(), SearchMatchData.empty()),
completionByDoc.getOrDefault(doc.getId(), 0),
contributorsByDoc.getOrDefault(doc.getId(), List.of())
)).toList();
}
private Map<UUID, Integer> fetchCompletionPercentages(List<UUID> docIds) {
return transcriptionBlockQueryService.getCompletionStats(docIds);
} }
private Sort resolveSort(DocumentSort sort, String dir) { private Sort resolveSort(DocumentSort sort, String dir) {
@@ -694,14 +423,8 @@ public class DocumentService {
} }
public Document getDocumentById(UUID id) { public Document getDocumentById(UUID id) {
Document doc = documentRepository.findById(id) return documentRepository.findById(id)
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id)); .orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id));
tagService.resolveEffectiveColors(doc.getTags());
return doc;
}
public List<Document> getDocumentsByIds(List<UUID> ids) {
return documentRepository.findAllById(ids);
} }
public List<Document> getDocumentsWithoutVersions() { public List<Document> getDocumentsWithoutVersions() {
@@ -733,7 +456,7 @@ public class DocumentService {
PageRequest pageable = PageRequest.of(0, size, Sort.by(Sort.Direction.DESC, "createdAt")); PageRequest pageable = PageRequest.of(0, size, Sort.by(Sort.Direction.DESC, "createdAt"));
return documentRepository.findByMetadataCompleteFalse(pageable) return documentRepository.findByMetadataCompleteFalse(pageable)
.stream() .stream()
.map(doc -> new IncompleteDocumentDTO(doc.getId(), doc.getTitle(), doc.getCreatedAt())) .map(doc -> new IncompleteDocumentDTO(doc.getId(), doc.getTitle()))
.toList(); .toList();
} }
@@ -780,12 +503,6 @@ public class DocumentService {
// ─── private helpers ────────────────────────────────────────────────────── // ─── private helpers ──────────────────────────────────────────────────────
private List<Document> resolveDocumentTagColors(List<Document> docs) {
List<Tag> allTags = docs.stream().flatMap(d -> d.getTags().stream()).toList();
tagService.resolveEffectiveColors(allTags);
return docs;
}
private static String stripExtension(String filename) { private static String stripExtension(String filename) {
if (filename == null) return null; if (filename == null) return null;
int dot = filename.lastIndexOf('.'); int dot = filename.lastIndexOf('.');
@@ -867,93 +584,6 @@ public class DocumentService {
return null; return null;
} }
/**
* Calls {@code findEnrichmentData} and converts the raw Object[] rows into a
* {@link SearchMatchData} per document. Short-circuits when the list is empty or
* the query is blank (no text search active).
*/
private Map<UUID, SearchMatchData> enrichWithMatchData(List<Document> docs, String query) {
if (docs.isEmpty() || !StringUtils.hasText(query)) return Map.of();
List<UUID> ids = docs.stream().map(Document::getId).toList();
Map<UUID, SearchMatchData> result = new HashMap<>();
for (Object[] row : documentRepository.findEnrichmentData(ids, query)) {
UUID docId = (UUID) row[0];
String titleHeadline = (String) row[1];
String snippetHeadline = (String) row[2];
Boolean senderMatched = (Boolean) row[3];
String receiverIdsStr = (String) row[4];
String tagIdsStr = (String) row[5];
String summaryHeadline = (String) row[6];
ParsedHighlight snippet = parseHighlight(snippetHeadline);
ParsedHighlight summary = parseHighlight(summaryHeadline);
result.put(docId, new SearchMatchData(
snippet != null ? snippet.cleanText() : null,
parseTitleOffsets(titleHeadline),
senderMatched != null && senderMatched,
parseUUIDs(receiverIdsStr),
parseUUIDs(tagIdsStr),
snippet != null ? snippet.offsets() : List.of(),
summary != null ? summary.cleanText() : null,
summary != null ? summary.offsets() : List.of()
));
}
return result;
}
/** Clean text + highlight offsets parsed from a {@code ts_headline} sentinel-delimited string. */
public record ParsedHighlight(String cleanText, List<MatchOffset> offsets) {}
/**
* Parses a {@code ts_headline} result that uses {@code chr(1)}/{@code chr(2)} as
* start/stop delimiters. Returns the clean text (delimiters stripped) together with
* the character offsets of each highlighted span. Returns {@code null} when
* {@code headline} is {@code null}.
*/
public static ParsedHighlight parseHighlight(String headline) {
if (headline == null) return null;
StringBuilder clean = new StringBuilder(headline.length());
List<MatchOffset> offsets = new ArrayList<>();
int i = 0;
int pos = 0; // position in the clean string (no delimiters)
while (i < headline.length()) {
char c = headline.charAt(i);
if (c == '\u0001') {
int start = pos;
i++;
while (i < headline.length() && headline.charAt(i) != '\u0002') {
clean.append(headline.charAt(i));
i++;
pos++;
}
offsets.add(new MatchOffset(start, pos - start));
i++; // skip \u0002
} else {
clean.append(c);
i++;
pos++;
}
}
return new ParsedHighlight(clean.toString(), offsets);
}
/**
* Extracts only the {@link MatchOffset} list from a title headline.
* The clean title text comes from the {@link Document} entity itself.
*/
private static List<MatchOffset> parseTitleOffsets(String headline) {
ParsedHighlight parsed = parseHighlight(headline);
return parsed != null ? parsed.offsets() : List.of();
}
private static List<UUID> parseUUIDs(String csv) {
if (csv == null || csv.isBlank()) return List.of();
return Arrays.stream(csv.split(","))
.map(String::trim)
.filter(s -> !s.isEmpty())
.map(UUID::fromString)
.toList();
}
private static String sha256Hex(byte[] bytes) { private static String sha256Hex(byte[] bytes) {
try { try {
MessageDigest digest = MessageDigest.getInstance("SHA-256"); MessageDigest digest = MessageDigest.getInstance("SHA-256");
@@ -967,5 +597,4 @@ public class DocumentService {
throw new IllegalStateException("SHA-256 not available", e); throw new IllegalStateException("SHA-256 not available", e);
} }
} }
} }

View File

@@ -100,7 +100,7 @@ public class DocumentVersionService {
return null; return null;
} }
try { try {
return userService.findByEmail(auth.getName()); return userService.findByUsername(auth.getName());
} catch (Exception e) { } catch (Exception e) {
log.warn("Could not resolve editor for version snapshot: {}", e.getMessage()); log.warn("Could not resolve editor for version snapshot: {}", e.getMessage());
return null; return null;
@@ -114,7 +114,7 @@ public class DocumentVersionService {
if (first != null && !first.isBlank() && last != null && !last.isBlank()) { if (first != null && !first.isBlank() && last != null && !last.isBlank()) {
return first + " " + last; return first + " " + last;
} }
return user.getEmail(); return user.getUsername();
} }
private String serializeSnapshot(Document doc) { private String serializeSnapshot(Document doc) {

View File

@@ -112,27 +112,6 @@ public class FileService {
} }
} }
/**
* Opens a streaming download from S3/MinIO. The caller is responsible for
* closing the returned stream — typically via try-with-resources. Preferred
* over {@link #downloadFileBytes(String)} for large files (multi-MB PDFs
* during thumbnail generation) because it avoids loading the entire file
* into heap memory.
*/
public InputStream downloadFileStream(String s3Key) throws IOException {
try {
GetObjectRequest getObjectRequest = GetObjectRequest.builder()
.bucket(bucketName)
.key(s3Key)
.build();
return s3Client.getObject(getObjectRequest);
} catch (NoSuchKeyException e) {
throw new StorageFileNotFoundException("File not found in storage: " + s3Key);
} catch (S3Exception e) {
throw new IOException("Failed to open stream from storage: " + e.getMessage(), e);
}
}
/** /**
* Generates a presigned URL for downloading an object from S3/MinIO. * Generates a presigned URL for downloading an object from S3/MinIO.
* Valid for 1 hour — covers multi-page documents on CPU-only OCR hardware * Valid for 1 hour — covers multi-page documents on CPU-only OCR hardware

View File

@@ -1,165 +0,0 @@
package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.dto.CreateInviteRequest;
import org.raddatz.familienarchiv.dto.InviteListItemDTO;
import org.raddatz.familienarchiv.dto.RegisterRequest;
import org.raddatz.familienarchiv.exception.DomainException;
import org.raddatz.familienarchiv.exception.ErrorCode;
import org.raddatz.familienarchiv.model.AppUser;
import org.raddatz.familienarchiv.model.InviteToken;
import org.raddatz.familienarchiv.model.UserGroup;
import org.raddatz.familienarchiv.repository.InviteTokenRepository;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.security.SecureRandom;
import java.util.*;
@Service
@RequiredArgsConstructor
@Slf4j
public class InviteService {
static final int MIN_PASSWORD_LENGTH = 8;
private static final String CODE_ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
private static final int CODE_LENGTH = 10;
private static final int MAX_CODE_ATTEMPTS = 10;
private static final SecureRandom SECURE_RANDOM = new SecureRandom();
private final InviteTokenRepository inviteTokenRepository;
private final UserService userService;
public String generateCode() {
for (int attempt = 0; attempt < MAX_CODE_ATTEMPTS; attempt++) {
String code = buildRandomCode();
if (inviteTokenRepository.findByCode(code).isEmpty()) {
return code;
}
}
throw DomainException.internal(ErrorCode.INTERNAL_ERROR, "Failed to generate unique invite code after " + MAX_CODE_ATTEMPTS + " attempts");
}
public InviteToken validateCode(String code) {
InviteToken token = inviteTokenRepository.findByCode(code)
.orElseThrow(() -> DomainException.notFound(ErrorCode.INVITE_NOT_FOUND, "Invite not found: " + code));
checkTokenState(token);
return token;
}
@Transactional
public InviteToken createInvite(CreateInviteRequest dto, AppUser creator) {
Set<UUID> groupIds = new HashSet<>();
if (dto.getGroupIds() != null && !dto.getGroupIds().isEmpty()) {
List<UserGroup> groups = userService.findGroupsByIds(dto.getGroupIds());
groups.forEach(g -> groupIds.add(g.getId()));
}
InviteToken token = InviteToken.builder()
.code(generateCode())
.label(dto.getLabel())
.maxUses(dto.getMaxUses())
.prefillFirstName(dto.getPrefillFirstName())
.prefillLastName(dto.getPrefillLastName())
.prefillEmail(dto.getPrefillEmail())
.groupIds(groupIds)
.expiresAt(dto.getExpiresAt())
.createdBy(creator)
.build();
return inviteTokenRepository.save(token);
}
@Transactional
public AppUser redeemInvite(RegisterRequest dto) {
InviteToken token = inviteTokenRepository.findByCodeForUpdate(dto.getCode())
.orElseThrow(() -> DomainException.notFound(ErrorCode.INVITE_NOT_FOUND, "Invite not found: " + dto.getCode()));
checkTokenState(token);
if (dto.getPassword() == null || dto.getPassword().length() < MIN_PASSWORD_LENGTH) {
throw DomainException.badRequest(ErrorCode.VALIDATION_ERROR,
"Password must be at least " + MIN_PASSWORD_LENGTH + " characters");
}
AppUser user = userService.createUser(
dto.getEmail(),
dto.getPassword(),
dto.getFirstName(),
dto.getLastName(),
token.getGroupIds()
);
userService.updateNotificationPreferences(user.getId(), dto.isNotifyOnMention(), dto.isNotifyOnMention());
token.setUseCount(token.getUseCount() + 1);
inviteTokenRepository.save(token);
log.info("User {} registered via invite code {}", dto.getEmail(), dto.getCode());
return user;
}
@Transactional
public void revokeInvite(UUID id) {
InviteToken token = inviteTokenRepository.findById(id)
.orElseThrow(() -> DomainException.notFound(ErrorCode.INVITE_NOT_FOUND, "Invite not found: " + id));
token.setRevoked(true);
inviteTokenRepository.save(token);
}
public List<InviteListItemDTO> listInvites(boolean activeOnly, String appBaseUrl) {
List<InviteToken> tokens = activeOnly
? inviteTokenRepository.findActive()
: inviteTokenRepository.findAllOrderedByCreatedAt();
return tokens.stream().map(t -> toListItemDTO(t, appBaseUrl)).toList();
}
public InviteListItemDTO toListItemDTO(InviteToken token, String appBaseUrl) {
String status;
if (token.isRevoked()) status = "revoked";
else if (token.isExpired()) status = "expired";
else if (token.isExhausted()) status = "exhausted";
else status = "active";
return InviteListItemDTO.builder()
.id(token.getId())
.code(token.getCode())
.displayCode(formatDisplayCode(token.getCode()))
.label(token.getLabel())
.useCount(token.getUseCount())
.maxUses(token.getMaxUses())
.expiresAt(token.getExpiresAt())
.revoked(token.isRevoked())
.status(status)
.createdAt(token.getCreatedAt())
.shareableUrl(appBaseUrl + "/register?code=" + token.getCode())
.build();
}
private void checkTokenState(InviteToken token) {
if (token.isRevoked()) {
throw DomainException.conflict(ErrorCode.INVITE_REVOKED, "Invite has been revoked");
}
if (token.isExpired()) {
throw new DomainException(ErrorCode.INVITE_EXPIRED, org.springframework.http.HttpStatus.GONE,
"Invite has expired");
}
if (token.isExhausted()) {
throw DomainException.conflict(ErrorCode.INVITE_EXHAUSTED, "Invite use limit reached");
}
}
private String buildRandomCode() {
StringBuilder sb = new StringBuilder(CODE_LENGTH);
for (int i = 0; i < CODE_LENGTH; i++) {
sb.append(CODE_ALPHABET.charAt(SECURE_RANDOM.nextInt(CODE_ALPHABET.length())));
}
return sb.toString();
}
public static String formatDisplayCode(String code) {
if (code == null || code.length() != CODE_LENGTH) return code;
return code.substring(0, 5) + "-" + code.substring(5);
}
}

View File

@@ -59,7 +59,6 @@ public class MassImportService {
private final PersonService personService; private final PersonService personService;
private final TagService tagService; private final TagService tagService;
private final S3Client s3Client; private final S3Client s3Client;
private final ThumbnailAsyncRunner thumbnailAsyncRunner;
@Value("${app.s3.bucket}") @Value("${app.s3.bucket}")
private String bucketName; private String bucketName;
@@ -333,10 +332,7 @@ public class MassImportService {
if (tag != null) doc.getTags().add(tag); if (tag != null) doc.getTags().add(tag);
doc.setMetadataComplete(metadataComplete); doc.setMetadataComplete(metadataComplete);
Document saved = documentRepository.save(doc); documentRepository.save(doc);
if (file.isPresent()) {
thumbnailAsyncRunner.dispatchAfterCommit(saved.getId());
}
log.info("Importiert{}: {}", file.isEmpty() ? " (nur Metadaten)" : "", originalFilename); log.info("Importiert{}: {}", file.isEmpty() ? " (nur Metadaten)" : "", originalFilename);
} }

View File

@@ -9,12 +9,10 @@ import org.raddatz.familienarchiv.repository.OcrJobRepository;
import org.springframework.scheduling.annotation.Async; import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID; import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
@Component @Component
@RequiredArgsConstructor @RequiredArgsConstructor
@@ -31,7 +29,6 @@ public class OcrAsyncRunner {
private final OcrJobRepository ocrJobRepository; private final OcrJobRepository ocrJobRepository;
private final OcrJobDocumentRepository ocrJobDocumentRepository; private final OcrJobDocumentRepository ocrJobDocumentRepository;
private final OcrProgressService ocrProgressService; private final OcrProgressService ocrProgressService;
private final SenderModelService senderModelService;
@Async @Async
public void runSingleDocument(UUID jobId, UUID documentId, UUID userId) { public void runSingleDocument(UUID jobId, UUID documentId, UUID userId) {
@@ -71,18 +68,12 @@ public class OcrAsyncRunner {
String pdfUrl = fileService.generatePresignedUrl(doc.getFilePath()); String pdfUrl = fileService.generatePresignedUrl(doc.getFilePath());
String senderModelPath = null;
if (doc.getSender() != null && doc.getScriptType() == ScriptType.HANDWRITING_KURRENT) {
senderModelPath = senderModelService.maybeGetModelPath(doc.getSender().getId()).orElse(null);
}
AtomicInteger blockCounter = new AtomicInteger(0); AtomicInteger blockCounter = new AtomicInteger(0);
AtomicInteger currentPage = new AtomicInteger(0); AtomicInteger currentPage = new AtomicInteger(0);
AtomicInteger skippedPages = new AtomicInteger(0); AtomicInteger skippedPages = new AtomicInteger(0);
AtomicInteger totalPages = new AtomicInteger(0); AtomicInteger totalPages = new AtomicInteger(0);
final String finalSenderModelPath = senderModelPath; ocrClient.streamBlocks(pdfUrl, doc.getScriptType(), regions, event -> {
ocrClient.streamBlocks(pdfUrl, doc.getScriptType(), regions, finalSenderModelPath, event -> {
switch (event) { switch (event) {
case OcrStreamEvent.Start start -> { case OcrStreamEvent.Start start -> {
totalPages.set(start.totalPages()); totalPages.set(start.totalPages());
@@ -91,10 +82,6 @@ public class OcrAsyncRunner {
ocrJobDocumentRepository.save(jobDoc); ocrJobDocumentRepository.save(jobDoc);
} }
} }
case OcrStreamEvent.Preprocessing preprocessing -> {
updateProgress(job, "PREPROCESSING_PAGE:" + preprocessing.pageNumber()
+ ":" + totalPages.get());
}
case OcrStreamEvent.Page page -> { case OcrStreamEvent.Page page -> {
for (OcrBlockResult block : page.blocks()) { for (OcrBlockResult block : page.blocks()) {
createSingleBlock(documentId, block, userId, createSingleBlock(documentId, block, userId,
@@ -216,25 +203,7 @@ public class OcrAsyncRunner {
clearExistingBlocks(documentId); clearExistingBlocks(documentId);
String pdfUrl = fileService.generatePresignedUrl(doc.getFilePath()); String pdfUrl = fileService.generatePresignedUrl(doc.getFilePath());
List<OcrBlockResult> blocks = ocrClient.extractBlocks(pdfUrl, doc.getScriptType());
String senderModelPath = null;
if (doc.getSender() != null && doc.getScriptType() == ScriptType.HANDWRITING_KURRENT) {
senderModelPath = senderModelService.maybeGetModelPath(doc.getSender().getId()).orElse(null);
}
final AtomicReference<List<OcrBlockResult>> blocksRef = new AtomicReference<>();
final String finalSenderModelPath = senderModelPath;
ocrClient.streamBlocks(pdfUrl, doc.getScriptType(), null, finalSenderModelPath, event -> {
switch (event) {
case OcrStreamEvent.Page page -> {
blocksRef.compareAndSet(null, new ArrayList<>());
blocksRef.get().addAll(page.blocks());
}
default -> {}
}
});
List<OcrBlockResult> blocks = blocksRef.get() != null ? blocksRef.get() : List.of();
createTranscriptionBlocks(documentId, blocks, userId, doc.getFileHash()); createTranscriptionBlocks(documentId, blocks, userId, doc.getFileHash());
} }

View File

@@ -1,7 +1,6 @@
package org.raddatz.familienarchiv.service; package org.raddatz.familienarchiv.service;
import org.raddatz.familienarchiv.model.ScriptType; import org.raddatz.familienarchiv.model.ScriptType;
import org.springframework.lang.Nullable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
@@ -38,27 +37,15 @@ public interface OcrClient {
TrainingResult segtrainModel(byte[] trainingDataZip); TrainingResult segtrainModel(byte[] trainingDataZip);
/** /**
* Fine-tune the Kurrent model for a specific sender. * Stream OCR results page-by-page via NDJSON. Implementations should override
* this method. The default exists only for backward compatibility during migration
* — it calls extractBlocks() and synthesizes events from the collected result.
* *
* @param trainingDataZip raw ZIP bytes produced by TrainingDataExportService.exportForSender() * @param regions optional list of pre-drawn annotation regions; when non-null,
* @param outputModelPath where to save the trained model (e.g. /app/models/sender_{uuid}.mlmodel) * the OCR service runs in guided mode (crop + recognize per region)
* @return training result metrics
*/
TrainingResult trainSenderModel(byte[] trainingDataZip, String outputModelPath);
/**
* Stream OCR results page-by-page via NDJSON, optionally using a sender-specific model.
* The default implementation synthesizes events from extractBlocks() for backward compatibility.
* Implementations that support real streaming (e.g. RestClientOcrClient) override this.
*
* @param regions optional list of pre-drawn annotation regions; when non-null,
* the OCR service runs in guided mode (crop + recognize per region)
* @param senderModelPath optional path to a per-sender model file; null means use base model
*/ */
default void streamBlocks(String pdfUrl, ScriptType scriptType, default void streamBlocks(String pdfUrl, ScriptType scriptType,
List<OcrRegion> regions, List<OcrRegion> regions, Consumer<OcrStreamEvent> handler) {
@Nullable String senderModelPath,
Consumer<OcrStreamEvent> handler) {
List<OcrBlockResult> allBlocks = extractBlocks(pdfUrl, scriptType); List<OcrBlockResult> allBlocks = extractBlocks(pdfUrl, scriptType);
LinkedHashMap<Integer, List<OcrBlockResult>> byPage = new LinkedHashMap<>(); LinkedHashMap<Integer, List<OcrBlockResult>> byPage = new LinkedHashMap<>();
@@ -75,9 +62,4 @@ public interface OcrClient {
handler.accept(new OcrStreamEvent.Done(allBlocks.size(), 0)); handler.accept(new OcrStreamEvent.Done(allBlocks.size(), 0));
} }
default void streamBlocks(String pdfUrl, ScriptType scriptType,
List<OcrRegion> regions, Consumer<OcrStreamEvent> handler) {
streamBlocks(pdfUrl, scriptType, regions, null, handler);
}
} }

View File

@@ -6,8 +6,6 @@ public sealed interface OcrStreamEvent {
record Start(int totalPages) implements OcrStreamEvent {} record Start(int totalPages) implements OcrStreamEvent {}
record Preprocessing(int pageNumber) implements OcrStreamEvent {}
record Page(int pageNumber, List<OcrBlockResult> blocks) implements OcrStreamEvent {} record Page(int pageNumber, List<OcrBlockResult> blocks) implements OcrStreamEvent {}
record Error(int pageNumber, String message) implements OcrStreamEvent {} record Error(int pageNumber, String message) implements OcrStreamEvent {}

View File

@@ -2,12 +2,9 @@ package org.raddatz.familienarchiv.service;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.raddatz.familienarchiv.dto.TrainingHistoryResponse;
import org.raddatz.familienarchiv.dto.TrainingInfoResponse;
import org.raddatz.familienarchiv.exception.DomainException; import org.raddatz.familienarchiv.exception.DomainException;
import org.raddatz.familienarchiv.exception.ErrorCode; import org.raddatz.familienarchiv.exception.ErrorCode;
import org.raddatz.familienarchiv.model.OcrTrainingRun; import org.raddatz.familienarchiv.model.OcrTrainingRun;
import org.raddatz.familienarchiv.model.SenderModel;
import org.raddatz.familienarchiv.model.TrainingStatus; import org.raddatz.familienarchiv.model.TrainingStatus;
import org.raddatz.familienarchiv.repository.OcrTrainingRunRepository; import org.raddatz.familienarchiv.repository.OcrTrainingRunRepository;
import org.raddatz.familienarchiv.repository.TranscriptionBlockRepository; import org.raddatz.familienarchiv.repository.TranscriptionBlockRepository;
@@ -20,11 +17,9 @@ import org.springframework.transaction.support.TransactionTemplate;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.time.Instant; import java.time.Instant;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Optional;
import java.util.UUID; import java.util.UUID;
@Service @Service
@@ -39,8 +34,16 @@ public class OcrTrainingService {
private final OcrHealthClient ocrHealthClient; private final OcrHealthClient ocrHealthClient;
private final TranscriptionBlockRepository blockRepository; private final TranscriptionBlockRepository blockRepository;
private final TransactionTemplate txTemplate; private final TransactionTemplate txTemplate;
private final PersonService personService;
private final SenderModelService senderModelService; public record TrainingInfoResponse(
int availableBlocks,
int totalOcrBlocks,
int availableDocuments,
int availableSegBlocks,
boolean ocrServiceAvailable,
OcrTrainingRun lastRun,
List<OcrTrainingRun> runs
) {}
private void assertNoRunningTraining() { private void assertNoRunningTraining() {
if (trainingRunRepository.findFirstByStatus(TrainingStatus.RUNNING).isPresent()) { if (trainingRunRepository.findFirstByStatus(TrainingStatus.RUNNING).isPresent()) {
@@ -192,21 +195,9 @@ public class OcrTrainingService {
int totalOcrBlocks = (int) blockRepository.count(); int totalOcrBlocks = (int) blockRepository.count();
int availableSegBlocks = segmentationTrainingExportService.querySegmentationBlocks().size(); int availableSegBlocks = segmentationTrainingExportService.querySegmentationBlocks().size();
List<OcrTrainingRun> recentRuns = trainingRunRepository.findTop20ByOrderByCreatedAtDesc(); List<OcrTrainingRun> recentRuns = trainingRunRepository.findTop10ByOrderByCreatedAtDesc();
OcrTrainingRun lastRun = recentRuns.isEmpty() ? null : recentRuns.get(0); OcrTrainingRun lastRun = recentRuns.isEmpty() ? null : recentRuns.get(0);
List<SenderModel> senderModels = senderModelService.getAllSenderModels();
List<UUID> allPersonIds = senderModels.stream()
.map(SenderModel::getPersonId)
.distinct()
.toList();
Map<String, String> personNames = new HashMap<>();
if (!allPersonIds.isEmpty()) {
personService.getAllById(allPersonIds)
.forEach(p -> personNames.put(p.getId().toString(), p.getDisplayName()));
}
return new TrainingInfoResponse( return new TrainingInfoResponse(
eligibleBlocks.size(), eligibleBlocks.size(),
totalOcrBlocks, totalOcrBlocks,
@@ -214,23 +205,10 @@ public class OcrTrainingService {
availableSegBlocks, availableSegBlocks,
ocrHealthClient.isHealthy(), ocrHealthClient.isHealthy(),
lastRun, lastRun,
recentRuns, recentRuns
personNames,
senderModels
); );
} }
public TrainingHistoryResponse getGlobalTrainingHistory() {
List<OcrTrainingRun> runs = trainingRunRepository.findByPersonIdIsNullOrderByCreatedAtDesc();
return new TrainingHistoryResponse(runs, Map.of());
}
public TrainingHistoryResponse getSenderTrainingHistory(UUID personId) {
String personName = personService.getById(personId).getDisplayName();
List<OcrTrainingRun> runs = trainingRunRepository.findByPersonIdOrderByCreatedAtDesc(personId);
return new TrainingHistoryResponse(runs, Map.of(personId.toString(), personName));
}
@EventListener(ApplicationReadyEvent.class) @EventListener(ApplicationReadyEvent.class)
@Transactional @Transactional
public void recoverOrphanedRuns() { public void recoverOrphanedRuns() {
@@ -246,4 +224,15 @@ public class OcrTrainingService {
}); });
} }
public Map<String, Object> buildTrainingInfoMap(TrainingInfoResponse info) {
return Map.of(
"availableBlocks", info.availableBlocks(),
"totalOcrBlocks", info.totalOcrBlocks(),
"availableDocuments", info.availableDocuments(),
"availableSegBlocks", info.availableSegBlocks(),
"ocrServiceAvailable", info.ocrServiceAvailable(),
"lastRun", info.lastRun() != null ? info.lastRun() : Map.of(),
"runs", info.runs()
);
}
} }

View File

@@ -109,12 +109,8 @@ public class PersonService {
@Transactional @Transactional
public Person createPerson(PersonUpdateDTO dto) { public Person createPerson(PersonUpdateDTO dto) {
if (dto.getPersonType() == PersonType.SKIP) {
throw DomainException.badRequest(ErrorCode.INVALID_PERSON_TYPE, "SKIP is not a valid person type for manual creation");
}
validateYears(dto.getBirthYear(), dto.getDeathYear()); validateYears(dto.getBirthYear(), dto.getDeathYear());
Person person = Person.builder() Person person = Person.builder()
.personType(dto.getPersonType())
.title(dto.getTitle() == null || dto.getTitle().isBlank() ? null : dto.getTitle().trim()) .title(dto.getTitle() == null || dto.getTitle().isBlank() ? null : dto.getTitle().trim())
.firstName(dto.getFirstName()) .firstName(dto.getFirstName())
.lastName(dto.getLastName()) .lastName(dto.getLastName())
@@ -140,13 +136,9 @@ public class PersonService {
@Transactional @Transactional
public Person updatePerson(UUID id, PersonUpdateDTO dto) { public Person updatePerson(UUID id, PersonUpdateDTO dto) {
if (dto.getPersonType() == PersonType.SKIP) {
throw DomainException.badRequest(ErrorCode.INVALID_PERSON_TYPE, "SKIP is not a valid person type for manual editing");
}
validateYears(dto.getBirthYear(), dto.getDeathYear()); validateYears(dto.getBirthYear(), dto.getDeathYear());
Person person = personRepository.findById(id) Person person = personRepository.findById(id)
.orElseThrow(() -> DomainException.notFound(ErrorCode.PERSON_NOT_FOUND, "Person not found: " + id)); .orElseThrow(() -> DomainException.notFound(ErrorCode.PERSON_NOT_FOUND, "Person not found: " + id));
person.setPersonType(dto.getPersonType());
person.setTitle(dto.getTitle() == null || dto.getTitle().isBlank() ? null : dto.getTitle().trim()); person.setTitle(dto.getTitle() == null || dto.getTitle().isBlank() ? null : dto.getTitle().trim());
person.setFirstName(dto.getFirstName()); person.setFirstName(dto.getFirstName());
person.setLastName(dto.getLastName()); person.setLastName(dto.getLastName());

Some files were not shown because too many files have changed in this diff Show More