Compare commits
1 Commits
feat/issue
...
feat/issue
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d5e0d2226a |
12
CLAUDE.md
12
CLAUDE.md
@@ -311,15 +311,13 @@ Save bar pattern — use **sticky full-bleed** for long forms (edit document), *
|
||||
<div class="mt-4 flex items-center justify-between rounded-sm border border-brand-sand bg-white px-6 py-4 shadow-sm">
|
||||
```
|
||||
|
||||
Back button pattern — use the shared `<BackButton>` component from `$lib/components/BackButton.svelte`:
|
||||
Back link pattern:
|
||||
```svelte
|
||||
<script lang="ts">
|
||||
import BackButton from '$lib/components/BackButton.svelte';
|
||||
</script>
|
||||
|
||||
<BackButton />
|
||||
<a href="/persons" class="inline-flex items-center text-xs font-bold uppercase tracking-widest text-gray-500 hover:text-brand-navy transition-colors group mb-4">
|
||||
<svg class="w-4 h-4 mr-2 transform group-hover:-translate-x-1 transition-transform" .../>
|
||||
Zurück zur Übersicht
|
||||
</a>
|
||||
```
|
||||
The component calls `history.back()` so the user returns to wherever they came from. Label is always "Zurück" (no contextual suffix — destination is unknown). Touch target ≥ 44px and focus ring are built in. Do not use a static `<a href>` for back navigation.
|
||||
|
||||
Subtle action link (e.g. "new document/person"):
|
||||
```svelte
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
### Mark all blocks as reviewed
|
||||
PUT http://localhost:8080/api/documents/{{documentId}}/transcription-blocks/review-all
|
||||
Authorization: Basic admin admin123
|
||||
@@ -164,19 +164,12 @@
|
||||
<version>3.0.2</version>
|
||||
</dependency>
|
||||
|
||||
<!-- PDF rendering for training data export and thumbnail generation -->
|
||||
<!-- PDF rendering for training data export -->
|
||||
<dependency>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>pdfbox</artifactId>
|
||||
<version>3.0.4</version>
|
||||
</dependency>
|
||||
|
||||
<!-- TIFF decoding plugin for ImageIO (thumbnail generation from scanned TIFFs) -->
|
||||
<dependency>
|
||||
<groupId>com.twelvemonkeys.imageio</groupId>
|
||||
<artifactId>imageio-tiff</artifactId>
|
||||
<version>3.12.0</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
|
||||
@@ -12,9 +12,4 @@ public interface ActivityFeedRow {
|
||||
UUID getDocumentId();
|
||||
Instant getHappenedAt();
|
||||
boolean isYouMentioned();
|
||||
boolean isYouParticipated();
|
||||
int getCount();
|
||||
Instant getHappenedAtUntil();
|
||||
/** Present only for COMMENT_ADDED and MENTION_CREATED — null otherwise. */
|
||||
UUID getCommentId();
|
||||
}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
package org.raddatz.familienarchiv.audit;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
public enum AuditKind {
|
||||
|
||||
/** Payload: none */
|
||||
@@ -27,18 +25,4 @@ public enum AuditKind {
|
||||
|
||||
/** Payload: {@code {"commentId": "uuid", "mentionedUserId": "uuid"}} */
|
||||
MENTION_CREATED,
|
||||
|
||||
/** Payload: {@code {"userId": "uuid", "email": "addr"}} */
|
||||
USER_CREATED,
|
||||
|
||||
/** Payload: {@code {"userId": "uuid", "email": "addr"}} */
|
||||
USER_DELETED,
|
||||
|
||||
/** Payload: {@code {"userId": "uuid", "email": "addr", "addedGroups": ["Admin"], "removedGroups": []}} */
|
||||
GROUP_MEMBERSHIP_CHANGED;
|
||||
|
||||
public static final Set<AuditKind> ROLLUP_ELIGIBLE = Set.of(
|
||||
TEXT_SAVED, FILE_UPLOADED, ANNOTATION_CREATED,
|
||||
BLOCK_REVIEWED, COMMENT_ADDED, MENTION_CREATED
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
package org.raddatz.familienarchiv.audit;
|
||||
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
@@ -26,92 +23,36 @@ public interface AuditLogQueryRepository extends JpaRepository<AuditLog, UUID> {
|
||||
Optional<UUID> findMostRecentDocumentIdByActor(@Param("userId") UUID userId);
|
||||
|
||||
@Query(value = """
|
||||
WITH events AS (
|
||||
SELECT
|
||||
a.kind,
|
||||
a.actor_id,
|
||||
a.document_id,
|
||||
a.happened_at,
|
||||
a.payload,
|
||||
LAG(a.happened_at) OVER (
|
||||
PARTITION BY a.actor_id, a.document_id, a.kind
|
||||
ORDER BY a.happened_at
|
||||
) AS prev_happened_at
|
||||
FROM audit_log a
|
||||
WHERE a.kind IN (:kinds)
|
||||
AND a.document_id IS NOT NULL
|
||||
),
|
||||
sessions_marked AS (
|
||||
SELECT
|
||||
kind, actor_id, document_id, happened_at, payload,
|
||||
SELECT * FROM (
|
||||
SELECT DISTINCT ON (a.actor_id, a.document_id, a.kind, date_trunc('hour', a.happened_at))
|
||||
a.kind AS kind,
|
||||
a.actor_id AS actorId,
|
||||
CASE
|
||||
WHEN kind IN ('COMMENT_ADDED','MENTION_CREATED') THEN 1
|
||||
WHEN prev_happened_at IS NULL THEN 1
|
||||
WHEN EXTRACT(EPOCH FROM (happened_at - prev_happened_at)) > 7200 THEN 1
|
||||
ELSE 0
|
||||
END AS is_new_session
|
||||
FROM events
|
||||
),
|
||||
sessions AS (
|
||||
SELECT
|
||||
kind, actor_id, document_id, happened_at, payload,
|
||||
SUM(is_new_session) OVER (
|
||||
PARTITION BY actor_id, document_id, kind
|
||||
ORDER BY happened_at
|
||||
ROWS UNBOUNDED PRECEDING
|
||||
) AS session_id
|
||||
FROM sessions_marked
|
||||
),
|
||||
aggregated AS (
|
||||
SELECT
|
||||
s.kind,
|
||||
s.actor_id,
|
||||
s.document_id,
|
||||
s.session_id,
|
||||
MIN(s.happened_at) AS happened_at,
|
||||
CASE WHEN COUNT(*) > 1 THEN MAX(s.happened_at) ELSE NULL END AS happened_at_until,
|
||||
COUNT(*)::int AS count,
|
||||
BOOL_OR(s.kind = 'MENTION_CREATED'
|
||||
AND s.payload->>'mentionedUserId' = :currentUserId) AS you_mentioned,
|
||||
-- COMMENT_ADDED/MENTION_CREATED always have is_new_session=1, so each group has one row and MIN collapses to that row payload
|
||||
MIN(s.payload::text)::jsonb AS payload
|
||||
FROM sessions s
|
||||
GROUP BY s.kind, s.actor_id, s.document_id, s.session_id
|
||||
)
|
||||
SELECT
|
||||
ag.kind AS kind,
|
||||
ag.actor_id AS actorId,
|
||||
CASE
|
||||
WHEN u.first_name IS NOT NULL AND u.last_name IS NOT NULL
|
||||
THEN UPPER(LEFT(u.first_name, 1)) || UPPER(LEFT(u.last_name, 1))
|
||||
WHEN u.first_name IS NOT NULL THEN UPPER(LEFT(u.first_name, 1))
|
||||
WHEN u.last_name IS NOT NULL THEN UPPER(LEFT(u.last_name, 1))
|
||||
ELSE '?'
|
||||
END AS actorInitials,
|
||||
COALESCE(u.color, '') AS actorColor,
|
||||
CONCAT_WS(' ', u.first_name, u.last_name) AS actorName,
|
||||
ag.document_id AS documentId,
|
||||
ag.happened_at AS happened_at,
|
||||
ag.you_mentioned AS youMentioned,
|
||||
-- payload->>'commentId' matches notifications.reference_id per AuditKind.COMMENT_ADDED contract
|
||||
EXISTS(
|
||||
SELECT 1 FROM notifications n
|
||||
WHERE n.type = 'REPLY'
|
||||
AND n.recipient_id = CAST(:currentUserId AS uuid)
|
||||
AND n.reference_id = (ag.payload->>'commentId')::uuid
|
||||
) AS youParticipated,
|
||||
ag.count AS count,
|
||||
ag.happened_at_until AS happenedAtUntil,
|
||||
(ag.payload->>'commentId')::uuid AS commentId
|
||||
FROM aggregated ag
|
||||
LEFT JOIN users u ON u.id = ag.actor_id
|
||||
ORDER BY ag.happened_at DESC
|
||||
WHEN u.first_name IS NOT NULL AND u.last_name IS NOT NULL
|
||||
THEN UPPER(LEFT(u.first_name, 1)) || UPPER(LEFT(u.last_name, 1))
|
||||
WHEN u.first_name IS NOT NULL THEN UPPER(LEFT(u.first_name, 1))
|
||||
WHEN u.last_name IS NOT NULL THEN UPPER(LEFT(u.last_name, 1))
|
||||
ELSE '?'
|
||||
END AS actorInitials,
|
||||
COALESCE(u.color, '') AS actorColor,
|
||||
CONCAT_WS(' ', u.first_name, u.last_name) AS actorName,
|
||||
a.document_id AS documentId,
|
||||
a.happened_at AS happened_at,
|
||||
(a.kind = 'MENTION_CREATED'
|
||||
AND a.payload->>'mentionedUserId' = :currentUserId) AS youMentioned
|
||||
FROM audit_log a
|
||||
LEFT JOIN users u ON u.id = a.actor_id
|
||||
WHERE a.kind IN ('TEXT_SAVED','FILE_UPLOADED','ANNOTATION_CREATED','COMMENT_ADDED','MENTION_CREATED')
|
||||
AND a.document_id IS NOT NULL
|
||||
ORDER BY a.actor_id, a.document_id, a.kind,
|
||||
date_trunc('hour', a.happened_at), a.happened_at DESC
|
||||
) deduped
|
||||
ORDER BY happened_at DESC
|
||||
LIMIT :limit
|
||||
""", nativeQuery = true)
|
||||
List<ActivityFeedRow> findRolledUpActivityFeed(
|
||||
List<ActivityFeedRow> findDedupedActivityFeed(
|
||||
@Param("currentUserId") String currentUserId,
|
||||
@Param("limit") int limit,
|
||||
@Param("kinds") Collection<String> kinds);
|
||||
@Param("limit") int limit);
|
||||
|
||||
@Query(value = """
|
||||
SELECT
|
||||
@@ -165,40 +106,4 @@ public interface AuditLogQueryRepository extends JpaRepository<AuditLog, UUID> {
|
||||
ORDER BY a.document_id, MIN(a.happened_at)
|
||||
""", nativeQuery = true)
|
||||
List<ContributorRow> findContributorsPerDocument(@Param("documentIds") List<UUID> documentIds);
|
||||
|
||||
@Query(value = """
|
||||
SELECT
|
||||
ranked.document_id AS documentId,
|
||||
ranked.actorInitials AS actorInitials,
|
||||
ranked.actorColor AS actorColor,
|
||||
ranked.actorName AS actorName
|
||||
FROM (
|
||||
SELECT
|
||||
a.document_id,
|
||||
CASE
|
||||
WHEN u.first_name IS NOT NULL AND u.last_name IS NOT NULL
|
||||
THEN UPPER(LEFT(u.first_name, 1)) || UPPER(LEFT(u.last_name, 1))
|
||||
WHEN u.first_name IS NOT NULL THEN UPPER(LEFT(u.first_name, 1))
|
||||
WHEN u.last_name IS NOT NULL THEN UPPER(LEFT(u.last_name, 1))
|
||||
ELSE '?'
|
||||
END AS actorInitials,
|
||||
COALESCE(u.color, '') AS actorColor,
|
||||
NULLIF(CONCAT_WS(' ', u.first_name, u.last_name), '') AS actorName,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY a.document_id
|
||||
ORDER BY MAX(a.happened_at) DESC
|
||||
) AS rn
|
||||
FROM audit_log a
|
||||
LEFT JOIN users u ON u.id = a.actor_id
|
||||
WHERE a.kind IN ('ANNOTATION_CREATED', 'TEXT_SAVED', 'BLOCK_REVIEWED')
|
||||
AND a.document_id IN :documentIds
|
||||
AND a.actor_id IS NOT NULL
|
||||
GROUP BY a.document_id, a.actor_id, u.first_name, u.last_name, u.color
|
||||
) ranked
|
||||
WHERE ranked.rn <= 4
|
||||
ORDER BY ranked.document_id, ranked.rn
|
||||
""", nativeQuery = true)
|
||||
List<ContributorRow> findRecentContributorsForDocuments(@Param("documentIds") List<UUID> documentIds);
|
||||
|
||||
Page<AuditLog> findByKindIn(Collection<AuditKind> kinds, Pageable pageable);
|
||||
}
|
||||
|
||||
@@ -1,17 +1,11 @@
|
||||
package org.raddatz.familienarchiv.audit;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.*;
|
||||
|
||||
import static org.raddatz.familienarchiv.audit.AuditKind.GROUP_MEMBERSHIP_CHANGED;
|
||||
import static org.raddatz.familienarchiv.audit.AuditKind.USER_CREATED;
|
||||
import static org.raddatz.familienarchiv.audit.AuditKind.USER_DELETED;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class AuditLogQueryService {
|
||||
@@ -23,12 +17,7 @@ public class AuditLogQueryService {
|
||||
}
|
||||
|
||||
public List<ActivityFeedRow> findActivityFeed(UUID currentUserId, int limit) {
|
||||
return findActivityFeed(currentUserId, limit, AuditKind.ROLLUP_ELIGIBLE);
|
||||
}
|
||||
|
||||
public List<ActivityFeedRow> findActivityFeed(UUID currentUserId, int limit, Set<AuditKind> kinds) {
|
||||
List<String> kindNames = kinds.stream().map(Enum::name).toList();
|
||||
return queryRepository.findRolledUpActivityFeed(currentUserId.toString(), limit, kindNames);
|
||||
return queryRepository.findDedupedActivityFeed(currentUserId.toString(), limit);
|
||||
}
|
||||
|
||||
public PulseStatsRow getPulseStats(OffsetDateTime weekStart, UUID userId) {
|
||||
@@ -49,20 +38,7 @@ public class AuditLogQueryService {
|
||||
|
||||
public Map<UUID, List<ActivityActorDTO>> findContributorsPerDocument(List<UUID> documentIds) {
|
||||
if (documentIds.isEmpty()) return Map.of();
|
||||
return toContributorMap(queryRepository.findContributorsPerDocument(documentIds));
|
||||
}
|
||||
|
||||
public Map<UUID, List<ActivityActorDTO>> findRecentContributorsPerDocument(List<UUID> documentIds) {
|
||||
if (documentIds.isEmpty()) return Map.of();
|
||||
return toContributorMap(queryRepository.findRecentContributorsForDocuments(documentIds));
|
||||
}
|
||||
|
||||
public List<AuditLog> findRecentUserManagementEvents(int limit) {
|
||||
PageRequest page = PageRequest.of(0, limit, Sort.by("happenedAt").descending());
|
||||
return queryRepository.findByKindIn(Set.of(USER_CREATED, USER_DELETED, GROUP_MEMBERSHIP_CHANGED), page).getContent();
|
||||
}
|
||||
|
||||
private Map<UUID, List<ActivityActorDTO>> toContributorMap(List<ContributorRow> rows) {
|
||||
List<ContributorRow> rows = queryRepository.findContributorsPerDocument(documentIds);
|
||||
Map<UUID, List<ActivityActorDTO>> result = new LinkedHashMap<>();
|
||||
for (ContributorRow row : rows) {
|
||||
result.computeIfAbsent(row.getDocumentId(), k -> new ArrayList<>())
|
||||
|
||||
@@ -5,5 +5,4 @@ import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import java.util.UUID;
|
||||
|
||||
public interface AuditLogRepository extends JpaRepository<AuditLog, UUID> {
|
||||
boolean existsByKind(AuditKind kind);
|
||||
}
|
||||
|
||||
@@ -37,19 +37,4 @@ public class AsyncConfig {
|
||||
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy());
|
||||
return executor;
|
||||
}
|
||||
|
||||
@Bean("thumbnailExecutor")
|
||||
public Executor thumbnailExecutor() {
|
||||
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
|
||||
executor.setCorePoolSize(1);
|
||||
executor.setMaxPoolSize(2);
|
||||
executor.setQueueCapacity(200);
|
||||
executor.setThreadNamePrefix("Thumbnail-");
|
||||
// CallerRunsPolicy applies back-pressure to quick-upload batches and admin backfill
|
||||
// instead of dropping work (shared taskExecutor uses AbortPolicy). Safe because the
|
||||
// task is dispatched via TransactionSynchronization.afterCommit, which runs on a
|
||||
// post-commit callback thread without active transaction synchronization.
|
||||
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
|
||||
return executor;
|
||||
}
|
||||
}
|
||||
@@ -6,7 +6,6 @@ import org.raddatz.familienarchiv.security.RequirePermission;
|
||||
import org.raddatz.familienarchiv.service.DocumentService;
|
||||
import org.raddatz.familienarchiv.service.DocumentVersionService;
|
||||
import org.raddatz.familienarchiv.service.MassImportService;
|
||||
import org.raddatz.familienarchiv.service.ThumbnailBackfillService;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
@@ -24,7 +23,6 @@ public class AdminController {
|
||||
private final MassImportService massImportService;
|
||||
private final DocumentService documentService;
|
||||
private final DocumentVersionService documentVersionService;
|
||||
private final ThumbnailBackfillService thumbnailBackfillService;
|
||||
|
||||
@PostMapping("/trigger-import")
|
||||
public ResponseEntity<MassImportService.ImportStatus> triggerMassImport() {
|
||||
@@ -49,15 +47,4 @@ public class AdminController {
|
||||
int count = documentService.backfillFileHashes();
|
||||
return ResponseEntity.ok(new BackfillResult(count));
|
||||
}
|
||||
|
||||
@PostMapping("/generate-thumbnails")
|
||||
public ResponseEntity<ThumbnailBackfillService.BackfillStatus> generateThumbnails() {
|
||||
thumbnailBackfillService.runBackfillAsync();
|
||||
return ResponseEntity.accepted().body(thumbnailBackfillService.getStatus());
|
||||
}
|
||||
|
||||
@GetMapping("/thumbnail-status")
|
||||
public ResponseEntity<ThumbnailBackfillService.BackfillStatus> thumbnailStatus() {
|
||||
return ResponseEntity.ok(thumbnailBackfillService.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,67 @@ public class CommentController {
|
||||
private final CommentService commentService;
|
||||
private final UserService userService;
|
||||
|
||||
// ─── General document comments ────────────────────────────────────────────
|
||||
|
||||
@GetMapping("/api/documents/{documentId}/comments")
|
||||
public List<DocumentComment> getDocumentComments(@PathVariable UUID documentId) {
|
||||
return commentService.getCommentsForDocument(documentId);
|
||||
}
|
||||
|
||||
@PostMapping("/api/documents/{documentId}/comments")
|
||||
@ResponseStatus(HttpStatus.CREATED)
|
||||
@RequirePermission({Permission.ANNOTATE_ALL, Permission.WRITE_ALL})
|
||||
public DocumentComment postDocumentComment(
|
||||
@PathVariable UUID documentId,
|
||||
@RequestBody CreateCommentDTO dto,
|
||||
Authentication authentication) {
|
||||
AppUser author = resolveUser(authentication);
|
||||
return commentService.postComment(documentId, null, dto.getContent(), dto.getMentionedUserIds(), author);
|
||||
}
|
||||
|
||||
@PostMapping("/api/documents/{documentId}/comments/{commentId}/replies")
|
||||
@ResponseStatus(HttpStatus.CREATED)
|
||||
@RequirePermission({Permission.ANNOTATE_ALL, Permission.WRITE_ALL})
|
||||
public DocumentComment replyToDocumentComment(
|
||||
@PathVariable UUID documentId,
|
||||
@PathVariable UUID commentId,
|
||||
@RequestBody CreateCommentDTO dto,
|
||||
Authentication authentication) {
|
||||
AppUser author = resolveUser(authentication);
|
||||
return commentService.replyToComment(documentId, commentId, dto.getContent(), dto.getMentionedUserIds(), author);
|
||||
}
|
||||
|
||||
// ─── Annotation comments ──────────────────────────────────────────────────
|
||||
|
||||
@GetMapping("/api/documents/{documentId}/annotations/{annotationId}/comments")
|
||||
public List<DocumentComment> getAnnotationComments(@PathVariable UUID annotationId) {
|
||||
return commentService.getCommentsForAnnotation(annotationId);
|
||||
}
|
||||
|
||||
@PostMapping("/api/documents/{documentId}/annotations/{annotationId}/comments")
|
||||
@ResponseStatus(HttpStatus.CREATED)
|
||||
@RequirePermission({Permission.ANNOTATE_ALL, Permission.WRITE_ALL})
|
||||
public DocumentComment postAnnotationComment(
|
||||
@PathVariable UUID documentId,
|
||||
@PathVariable UUID annotationId,
|
||||
@RequestBody CreateCommentDTO dto,
|
||||
Authentication authentication) {
|
||||
AppUser author = resolveUser(authentication);
|
||||
return commentService.postComment(documentId, annotationId, dto.getContent(), dto.getMentionedUserIds(), author);
|
||||
}
|
||||
|
||||
@PostMapping("/api/documents/{documentId}/annotations/{annotationId}/comments/{commentId}/replies")
|
||||
@ResponseStatus(HttpStatus.CREATED)
|
||||
@RequirePermission({Permission.ANNOTATE_ALL, Permission.WRITE_ALL})
|
||||
public DocumentComment replyToAnnotationComment(
|
||||
@PathVariable UUID documentId,
|
||||
@PathVariable UUID commentId,
|
||||
@RequestBody CreateCommentDTO dto,
|
||||
Authentication authentication) {
|
||||
AppUser author = resolveUser(authentication);
|
||||
return commentService.replyToComment(documentId, commentId, dto.getContent(), dto.getMentionedUserIds(), author);
|
||||
}
|
||||
|
||||
// ─── Block (transcription) comments ────────────────────────────────────────
|
||||
|
||||
@GetMapping("/api/documents/{documentId}/transcription-blocks/{blockId}/comments")
|
||||
|
||||
@@ -3,7 +3,6 @@ package org.raddatz.familienarchiv.controller;
|
||||
import java.io.IOException;
|
||||
import java.time.LocalDate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
@@ -14,23 +13,10 @@ import java.util.UUID;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.Max;
|
||||
import jakarta.validation.constraints.Min;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
import org.raddatz.familienarchiv.dto.BatchMetadataRequest;
|
||||
import org.raddatz.familienarchiv.dto.BulkEditError;
|
||||
import org.raddatz.familienarchiv.dto.BulkEditResult;
|
||||
import org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO;
|
||||
import org.raddatz.familienarchiv.dto.DocumentBatchSummary;
|
||||
import org.raddatz.familienarchiv.dto.DocumentBulkEditDTO;
|
||||
import org.raddatz.familienarchiv.dto.DocumentSearchResult;
|
||||
import org.raddatz.familienarchiv.dto.DocumentUpdateDTO;
|
||||
import org.raddatz.familienarchiv.dto.TagOperator;
|
||||
import org.raddatz.familienarchiv.dto.DocumentVersionSummary;
|
||||
import org.raddatz.familienarchiv.dto.IncompleteDocumentDTO;
|
||||
import org.raddatz.familienarchiv.exception.DomainException;
|
||||
import org.raddatz.familienarchiv.exception.ErrorCode;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
@@ -75,7 +61,6 @@ import lombok.extern.slf4j.Slf4j;
|
||||
@RequestMapping("/api/documents")
|
||||
@RequiredArgsConstructor
|
||||
@Slf4j
|
||||
@Validated
|
||||
public class DocumentController {
|
||||
|
||||
private final DocumentService documentService;
|
||||
@@ -108,31 +93,6 @@ public class DocumentController {
|
||||
}
|
||||
}
|
||||
|
||||
// --- THUMBNAIL ---
|
||||
@GetMapping("/{id}/thumbnail")
|
||||
public ResponseEntity<InputStreamResource> getDocumentThumbnail(@PathVariable UUID id) {
|
||||
Document doc = documentService.getDocumentById(id);
|
||||
|
||||
if (doc.getThumbnailKey() == null) {
|
||||
throw DomainException.notFound(ErrorCode.FILE_NOT_FOUND, "No thumbnail for document: " + id);
|
||||
}
|
||||
|
||||
try {
|
||||
FileService.S3FileDownload download = fileService.downloadFile(doc.getThumbnailKey());
|
||||
return ResponseEntity.ok()
|
||||
.contentType(MediaType.IMAGE_JPEG)
|
||||
// `private` (not `public`) prevents shared caches from serving one user's
|
||||
// thumbnail to another (CWE-525). `immutable` is safe because the URL
|
||||
// carries a ?v=<thumbnailGeneratedAt> cache-buster that changes whenever
|
||||
// the underlying file is replaced.
|
||||
.header(HttpHeaders.CACHE_CONTROL, "private, max-age=31536000, immutable")
|
||||
.body(download.resource());
|
||||
} catch (FileService.StorageFileNotFoundException e) {
|
||||
throw DomainException.notFound(ErrorCode.FILE_NOT_FOUND,
|
||||
"Thumbnail missing in storage: " + doc.getThumbnailKey());
|
||||
}
|
||||
}
|
||||
|
||||
// --- METADATA ---
|
||||
@GetMapping("/{id}")
|
||||
public Document getDocument(@PathVariable UUID id) {
|
||||
@@ -201,7 +161,6 @@ public class DocumentController {
|
||||
@RequirePermission(Permission.WRITE_ALL)
|
||||
public QuickUploadResult quickUpload(
|
||||
@RequestPart(value = "files", required = false) List<MultipartFile> files,
|
||||
@RequestPart(value = "metadata", required = false) DocumentBatchMetadataDTO metadata,
|
||||
Authentication authentication) {
|
||||
List<Document> created = new ArrayList<>();
|
||||
List<Document> updated = new ArrayList<>();
|
||||
@@ -211,21 +170,14 @@ public class DocumentController {
|
||||
return new QuickUploadResult(created, updated, errors);
|
||||
}
|
||||
|
||||
documentService.validateBatch(files.size(), metadata);
|
||||
|
||||
UUID actorId = requireUserId(authentication);
|
||||
long totalBytes = files.stream().mapToLong(MultipartFile::getSize).sum();
|
||||
|
||||
for (int i = 0; i < files.size(); i++) {
|
||||
MultipartFile file = files.get(i);
|
||||
for (MultipartFile file : files) {
|
||||
if (!ALLOWED_CONTENT_TYPES.contains(file.getContentType())) {
|
||||
errors.add(new UploadError(file.getOriginalFilename(), "UNSUPPORTED_FILE_TYPE"));
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
DocumentService.StoreResult result = metadata != null
|
||||
? documentService.storeDocumentWithBatchMetadata(file, metadata, i, actorId)
|
||||
: documentService.storeDocument(file, actorId);
|
||||
DocumentService.StoreResult result = documentService.storeDocument(file, actorId);
|
||||
if (result.isNew()) {
|
||||
created.add(result.document());
|
||||
} else {
|
||||
@@ -237,123 +189,15 @@ public class DocumentController {
|
||||
}
|
||||
}
|
||||
|
||||
log.info("quickUpload actor={} files={} totalBytes={} withMetadata={} created={} updated={} errors={}",
|
||||
actorId, files.size(), totalBytes, metadata != null,
|
||||
created.size(), updated.size(), errors.size());
|
||||
|
||||
return new QuickUploadResult(created, updated, errors);
|
||||
}
|
||||
|
||||
// --- BULK EDIT ---
|
||||
|
||||
private static final int BULK_EDIT_MAX_IDS = 500;
|
||||
/** Hard cap for {@code GET /api/documents/ids}: prevents an unfiltered
|
||||
* call from materialising the entire {@code documents} table into JSON.
|
||||
* Generous enough for real-world "Alle X editieren" against the family
|
||||
* archive's bounded scale (~1500 docs today, expected growth to ~5k). */
|
||||
private static final int BULK_EDIT_FILTER_MAX_IDS = 5000;
|
||||
|
||||
@PatchMapping("/bulk")
|
||||
@RequirePermission(Permission.WRITE_ALL)
|
||||
public BulkEditResult patchBulk(
|
||||
@RequestBody @Valid DocumentBulkEditDTO dto,
|
||||
Authentication authentication) {
|
||||
if (dto.getDocumentIds() == null || dto.getDocumentIds().isEmpty()) {
|
||||
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "documentIds is required");
|
||||
}
|
||||
if (dto.getDocumentIds().size() > BULK_EDIT_MAX_IDS) {
|
||||
throw DomainException.badRequest(ErrorCode.BULK_EDIT_TOO_MANY_IDS,
|
||||
"Maximum " + BULK_EDIT_MAX_IDS + " documents per request, got: " + dto.getDocumentIds().size());
|
||||
}
|
||||
|
||||
UUID actorId = requireUserId(authentication);
|
||||
int updated = 0;
|
||||
List<BulkEditError> errors = new ArrayList<>();
|
||||
|
||||
// Dedupe duplicate document IDs while preserving submission order. A
|
||||
// double-click on "Alle X editieren" would otherwise hit each document
|
||||
// twice and inflate the `updated` count returned to the user.
|
||||
LinkedHashSet<UUID> uniqueIds = new LinkedHashSet<>(dto.getDocumentIds());
|
||||
|
||||
for (UUID id : uniqueIds) {
|
||||
try {
|
||||
documentService.applyBulkEditToDocument(id, dto, actorId);
|
||||
updated++;
|
||||
} catch (DomainException e) {
|
||||
errors.add(new BulkEditError(id, sanitizeForLog(e.getMessage())));
|
||||
} catch (Exception e) {
|
||||
errors.add(new BulkEditError(id, "Internal error"));
|
||||
log.warn("Bulk edit failed for document {}: {}", id, sanitizeForLog(e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
log.info("bulkEdit actor={} documentIds={} unique={} updated={} errors={}",
|
||||
actorId, dto.getDocumentIds().size(), uniqueIds.size(), updated, errors.size());
|
||||
|
||||
return new BulkEditResult(updated, errors);
|
||||
}
|
||||
|
||||
/** CRLF strip for any log line interpolating a free-form string (e.g.
|
||||
* {@link Throwable#getMessage()}). Defends against CWE-117 log injection. */
|
||||
private static String sanitizeForLog(String s) {
|
||||
return s == null ? null : s.replaceAll("[\\r\\n]", "_");
|
||||
}
|
||||
|
||||
@GetMapping("/ids")
|
||||
@RequirePermission(Permission.WRITE_ALL)
|
||||
public List<UUID> getDocumentIds(
|
||||
@RequestParam(required = false) String q,
|
||||
@RequestParam(required = false) LocalDate from,
|
||||
@RequestParam(required = false) LocalDate to,
|
||||
@RequestParam(required = false) UUID senderId,
|
||||
@RequestParam(required = false) UUID receiverId,
|
||||
@RequestParam(required = false, name = "tag") List<String> tags,
|
||||
@RequestParam(required = false) String tagQ,
|
||||
@RequestParam(required = false) DocumentStatus status,
|
||||
@RequestParam(required = false) String tagOp,
|
||||
Authentication authentication) {
|
||||
TagOperator operator = "OR".equalsIgnoreCase(tagOp) ? TagOperator.OR : TagOperator.AND;
|
||||
List<UUID> ids = documentService.findIdsForFilter(q, from, to, senderId, receiverId, tags, tagQ, status, operator);
|
||||
if (ids.size() > BULK_EDIT_FILTER_MAX_IDS) {
|
||||
throw DomainException.badRequest(ErrorCode.BULK_EDIT_TOO_MANY_IDS,
|
||||
"Filter matches " + ids.size() + " documents — refine filter (max " + BULK_EDIT_FILTER_MAX_IDS + ")");
|
||||
}
|
||||
UUID actorId = requireUserId(authentication);
|
||||
log.info("documentIds actor={} matched={}", actorId, ids.size());
|
||||
return ids;
|
||||
}
|
||||
|
||||
@PostMapping(value = "/batch-metadata", consumes = MediaType.APPLICATION_JSON_VALUE)
|
||||
@RequirePermission(Permission.READ_ALL)
|
||||
public List<DocumentBatchSummary> batchMetadata(@RequestBody @Valid BatchMetadataRequest request, Authentication authentication) {
|
||||
if (request == null || request.ids() == null || request.ids().isEmpty()) {
|
||||
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "ids is required");
|
||||
}
|
||||
if (request.ids().size() > BULK_EDIT_MAX_IDS) {
|
||||
throw DomainException.badRequest(ErrorCode.BULK_EDIT_TOO_MANY_IDS,
|
||||
"Maximum " + BULK_EDIT_MAX_IDS + " ids per request, got: " + request.ids().size());
|
||||
}
|
||||
UUID actorId = requireUserId(authentication);
|
||||
log.info("batchMetadata actor={} ids={}", actorId, request.ids().size());
|
||||
return documentService.batchMetadata(request.ids());
|
||||
}
|
||||
|
||||
@GetMapping("/incomplete-count")
|
||||
@RequirePermission(Permission.WRITE_ALL)
|
||||
public Map<String, Long> getIncompleteCount() {
|
||||
return Map.of("count", documentService.getIncompleteCount());
|
||||
}
|
||||
|
||||
@GetMapping("/incomplete")
|
||||
@RequirePermission(Permission.WRITE_ALL)
|
||||
public List<IncompleteDocumentDTO> getIncomplete(
|
||||
@Parameter(description = "Maximum number of results (server caps at 200)")
|
||||
@RequestParam(defaultValue = "50") int size) {
|
||||
return documentService.findIncompleteDocuments(Math.min(size, 200));
|
||||
}
|
||||
|
||||
@GetMapping("/incomplete/next")
|
||||
@RequirePermission(Permission.WRITE_ALL)
|
||||
public ResponseEntity<Document> getNextIncomplete(@RequestParam UUID excludeId) {
|
||||
return documentService.findNextIncompleteDocument(excludeId)
|
||||
.map(ResponseEntity::ok)
|
||||
@@ -372,20 +216,14 @@ public class DocumentController {
|
||||
@Parameter(description = "Filter by document status") @RequestParam(required = false) DocumentStatus status,
|
||||
@Parameter(description = "Sort field") @RequestParam(required = false) DocumentSort sort,
|
||||
@Parameter(description = "Sort direction: ASC or DESC") @RequestParam(required = false, defaultValue = "DESC") String dir,
|
||||
@Parameter(description = "Tag operator: AND (default) or OR") @RequestParam(required = false) String tagOp,
|
||||
// @Max on page guards against overflow when pageable.getOffset() is computed
|
||||
// as page * size — Integer.MAX_VALUE * 50 would wrap to a negative long, which
|
||||
// Hibernate cheerfully turns into an invalid SQL OFFSET.
|
||||
@Parameter(description = "Page number (0-indexed)") @RequestParam(defaultValue = "0") @Min(0) @Max(100_000) int page,
|
||||
@Parameter(description = "Page size (max 100)") @RequestParam(defaultValue = "50") @Min(1) @Max(100) int size) {
|
||||
@Parameter(description = "Tag operator: AND (default) or OR") @RequestParam(required = false) String tagOp) {
|
||||
if (!"ASC".equalsIgnoreCase(dir) && !"DESC".equalsIgnoreCase(dir)) {
|
||||
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "dir must be ASC or DESC");
|
||||
}
|
||||
// tagOp is a raw String at the HTTP boundary; any value other than "OR" (case-insensitive)
|
||||
// defaults to AND, which matches the frontend default and keeps old clients working.
|
||||
TagOperator operator = "OR".equalsIgnoreCase(tagOp) ? TagOperator.OR : TagOperator.AND;
|
||||
Pageable pageable = PageRequest.of(page, size);
|
||||
return ResponseEntity.ok(documentService.searchDocuments(q, from, to, senderId, receiverId, tags, tagQ, status, sort, dir, operator, pageable));
|
||||
return ResponseEntity.ok(documentService.searchDocuments(q, from, to, senderId, receiverId, tags, tagQ, status, sort, dir, operator));
|
||||
}
|
||||
|
||||
// --- TRAINING LABELS ---
|
||||
|
||||
@@ -63,33 +63,27 @@ public class PersonController {
|
||||
@PostMapping
|
||||
@RequirePermission(Permission.WRITE_ALL)
|
||||
public ResponseEntity<Person> createPerson(@Valid @RequestBody PersonUpdateDTO dto) {
|
||||
validatePersonNames(dto);
|
||||
if (dto.getFirstName() != null) dto.setFirstName(dto.getFirstName().trim());
|
||||
if (dto.getFirstName() == null || dto.getFirstName().isBlank()
|
||||
|| dto.getLastName() == null || dto.getLastName().isBlank()) {
|
||||
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "Vor- und Nachname sind Pflichtfelder");
|
||||
}
|
||||
dto.setFirstName(dto.getFirstName().trim());
|
||||
dto.setLastName(dto.getLastName().trim());
|
||||
if (dto.getTitle() != null) dto.setTitle(dto.getTitle().trim());
|
||||
return ResponseEntity.ok(personService.createPerson(dto));
|
||||
}
|
||||
|
||||
@PutMapping("/{id}")
|
||||
@RequirePermission(Permission.WRITE_ALL)
|
||||
public ResponseEntity<Person> updatePerson(@PathVariable UUID id, @Valid @RequestBody PersonUpdateDTO dto) {
|
||||
validatePersonNames(dto);
|
||||
if (dto.getFirstName() != null) dto.setFirstName(dto.getFirstName().trim());
|
||||
if (dto.getFirstName() == null || dto.getFirstName().isBlank()
|
||||
|| dto.getLastName() == null || dto.getLastName().isBlank()) {
|
||||
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "Vor- und Nachname sind Pflichtfelder");
|
||||
}
|
||||
dto.setFirstName(dto.getFirstName().trim());
|
||||
dto.setLastName(dto.getLastName().trim());
|
||||
if (dto.getTitle() != null) dto.setTitle(dto.getTitle().trim());
|
||||
return ResponseEntity.ok(personService.updatePerson(id, dto));
|
||||
}
|
||||
|
||||
private void validatePersonNames(PersonUpdateDTO dto) {
|
||||
if (dto.getLastName() == null || dto.getLastName().isBlank()) {
|
||||
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "Nachname ist Pflichtfeld");
|
||||
}
|
||||
if (dto.getPersonType() == org.raddatz.familienarchiv.model.PersonType.PERSON
|
||||
&& (dto.getFirstName() == null || dto.getFirstName().isBlank())) {
|
||||
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "Vorname ist Pflichtfeld");
|
||||
}
|
||||
}
|
||||
|
||||
@PostMapping("/{id}/merge")
|
||||
@ResponseStatus(HttpStatus.NO_CONTENT)
|
||||
@RequirePermission(Permission.WRITE_ALL)
|
||||
|
||||
@@ -90,15 +90,6 @@ public class TranscriptionBlockController {
|
||||
return transcriptionService.reviewBlock(documentId, blockId, userId);
|
||||
}
|
||||
|
||||
@PutMapping("/review-all")
|
||||
@RequirePermission(Permission.WRITE_ALL)
|
||||
public List<TranscriptionBlock> markAllBlocksReviewed(
|
||||
@PathVariable UUID documentId,
|
||||
Authentication authentication) {
|
||||
UUID userId = requireUserId(authentication);
|
||||
return transcriptionService.markAllBlocksReviewed(documentId, userId);
|
||||
}
|
||||
|
||||
@GetMapping("/{blockId}/history")
|
||||
@RequirePermission(Permission.READ_ALL)
|
||||
public List<TranscriptionBlockVersion> getBlockHistory(
|
||||
|
||||
@@ -78,31 +78,24 @@ public class UserController {
|
||||
|
||||
@PostMapping("/users")
|
||||
@RequirePermission(Permission.ADMIN_USER)
|
||||
public ResponseEntity<AppUser> createUser(Authentication authentication,
|
||||
@Valid @RequestBody CreateUserRequest request) {
|
||||
return ResponseEntity.ok(userService.createUserOrUpdate(actorId(authentication), request));
|
||||
public ResponseEntity<AppUser> createUser(@Valid @RequestBody CreateUserRequest request) {
|
||||
return ResponseEntity.ok(userService.createUserOrUpdate(request));
|
||||
}
|
||||
|
||||
@PutMapping("/users/{id}")
|
||||
@RequirePermission(Permission.ADMIN_USER)
|
||||
public ResponseEntity<AppUser> adminUpdateUser(Authentication authentication,
|
||||
@PathVariable UUID id,
|
||||
public ResponseEntity<AppUser> adminUpdateUser(@PathVariable UUID id,
|
||||
@RequestBody AdminUpdateUserRequest dto) {
|
||||
AppUser updated = userService.adminUpdateUser(actorId(authentication), id, dto);
|
||||
AppUser updated = userService.adminUpdateUser(id, dto);
|
||||
updated.setPassword(null);
|
||||
return ResponseEntity.ok(updated);
|
||||
}
|
||||
|
||||
@DeleteMapping("/users/{id}")
|
||||
@RequirePermission(Permission.ADMIN_USER)
|
||||
public ResponseEntity<Void> deleteUser(Authentication authentication,
|
||||
@PathVariable UUID id) {
|
||||
userService.deleteUser(actorId(authentication), id);
|
||||
public ResponseEntity<Void> deleteUser(@PathVariable UUID id) {
|
||||
userService.deleteUser(id);
|
||||
return ResponseEntity.ok().build();
|
||||
}
|
||||
|
||||
private UUID actorId(Authentication auth) {
|
||||
return userService.findByEmail(auth.getName()).getId();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -14,20 +14,5 @@ public record ActivityFeedItemDTO(
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID documentId,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String documentTitle,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) OffsetDateTime happenedAt,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) boolean youMentioned,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) boolean youParticipated,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int count,
|
||||
@Nullable OffsetDateTime happenedAtUntil,
|
||||
@Nullable
|
||||
@Schema(
|
||||
requiredMode = Schema.RequiredMode.NOT_REQUIRED,
|
||||
description = "Deep-link target comment; populated only for COMMENT_ADDED and MENTION_CREATED kinds."
|
||||
)
|
||||
UUID commentId,
|
||||
@Nullable
|
||||
@Schema(
|
||||
requiredMode = Schema.RequiredMode.NOT_REQUIRED,
|
||||
description = "Annotation associated with the comment; populated only for COMMENT_ADDED and MENTION_CREATED kinds."
|
||||
)
|
||||
UUID annotationId
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) boolean youMentioned
|
||||
) {}
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
package org.raddatz.familienarchiv.dashboard;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.media.ArraySchema;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.raddatz.familienarchiv.audit.AuditKind;
|
||||
import org.raddatz.familienarchiv.security.Permission;
|
||||
import org.raddatz.familienarchiv.security.RequirePermission;
|
||||
import org.raddatz.familienarchiv.security.SecurityUtils;
|
||||
@@ -13,7 +9,6 @@ import org.springframework.security.core.Authentication;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
@RestController
|
||||
@@ -40,12 +35,8 @@ public class DashboardController {
|
||||
@GetMapping("/activity")
|
||||
public List<ActivityFeedItemDTO> getActivity(
|
||||
Authentication authentication,
|
||||
@RequestParam(defaultValue = "7") int limit,
|
||||
@Parameter(description = "Filter by audit kinds; omit for all rollup-eligible kinds",
|
||||
array = @ArraySchema(schema = @Schema(implementation = AuditKind.class)))
|
||||
@RequestParam(required = false) Set<AuditKind> kinds) {
|
||||
@RequestParam(defaultValue = "7") int limit) {
|
||||
UUID userId = SecurityUtils.requireUserId(authentication, userService);
|
||||
Set<AuditKind> effectiveKinds = (kinds == null || kinds.isEmpty()) ? AuditKind.ROLLUP_ELIGIBLE : kinds;
|
||||
return dashboardService.getActivity(userId, Math.min(limit, 40), effectiveKinds);
|
||||
return dashboardService.getActivity(userId, Math.min(limit, 20));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,14 +4,12 @@ import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
|
||||
import org.raddatz.familienarchiv.audit.ActivityFeedRow;
|
||||
import org.raddatz.familienarchiv.audit.AuditKind;
|
||||
import org.raddatz.familienarchiv.audit.AuditLogQueryService;
|
||||
import org.raddatz.familienarchiv.audit.PulseStatsRow;
|
||||
import org.raddatz.familienarchiv.model.AppUser;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.Person;
|
||||
import org.raddatz.familienarchiv.model.TranscriptionBlock;
|
||||
import org.raddatz.familienarchiv.service.CommentService;
|
||||
import org.raddatz.familienarchiv.service.DocumentService;
|
||||
import org.raddatz.familienarchiv.service.TranscriptionService;
|
||||
import org.raddatz.familienarchiv.service.UserService;
|
||||
@@ -34,7 +32,6 @@ public class DashboardService {
|
||||
private final DocumentService documentService;
|
||||
private final TranscriptionService transcriptionService;
|
||||
private final UserService userService;
|
||||
private final CommentService commentService;
|
||||
|
||||
public DashboardResumeDTO getResume(UUID userId) {
|
||||
Optional<UUID> docIdOpt = auditLogQueryService.findMostRecentDocumentForUser(userId);
|
||||
@@ -82,7 +79,7 @@ public class DashboardService {
|
||||
.toList();
|
||||
|
||||
return new DashboardResumeDTO(docId, doc.getTitle(), caption, excerpt,
|
||||
totalBlocks, pct, doc.getThumbnailUrl(), collaborators);
|
||||
totalBlocks, pct, null, collaborators);
|
||||
}
|
||||
|
||||
public DashboardPulseDTO getPulse(UUID userId) {
|
||||
@@ -111,8 +108,8 @@ public class DashboardService {
|
||||
);
|
||||
}
|
||||
|
||||
public List<ActivityFeedItemDTO> getActivity(UUID currentUserId, int limit, Set<AuditKind> kinds) {
|
||||
List<ActivityFeedRow> rows = auditLogQueryService.findActivityFeed(currentUserId, limit, kinds);
|
||||
public List<ActivityFeedItemDTO> getActivity(UUID currentUserId, int limit) {
|
||||
List<ActivityFeedRow> rows = auditLogQueryService.findActivityFeed(currentUserId, limit);
|
||||
|
||||
List<UUID> docIds = rows.stream()
|
||||
.map(ActivityFeedRow::getDocumentId)
|
||||
@@ -128,37 +125,18 @@ public class DashboardService {
|
||||
log.warn("Activity: failed to bulk-load document titles", e);
|
||||
}
|
||||
|
||||
List<UUID> commentIds = rows.stream()
|
||||
.map(ActivityFeedRow::getCommentId)
|
||||
.filter(Objects::nonNull)
|
||||
.distinct()
|
||||
.toList();
|
||||
Map<UUID, UUID> annotationByComment = commentIds.isEmpty()
|
||||
? Map.of()
|
||||
: commentService.findAnnotationIdsByIds(commentIds);
|
||||
|
||||
return rows.stream().map(row -> {
|
||||
ActivityActorDTO actor = row.getActorId() != null
|
||||
? new ActivityActorDTO(row.getActorInitials(), row.getActorColor(), row.getActorName())
|
||||
: null;
|
||||
String docTitle = titleCache.getOrDefault(row.getDocumentId(), "");
|
||||
OffsetDateTime happenedAtUntil = row.getHappenedAtUntil() != null
|
||||
? row.getHappenedAtUntil().atOffset(ZoneOffset.UTC)
|
||||
: null;
|
||||
UUID commentId = row.getCommentId();
|
||||
UUID annotationId = commentId != null ? annotationByComment.get(commentId) : null;
|
||||
return new ActivityFeedItemDTO(
|
||||
org.raddatz.familienarchiv.audit.AuditKind.valueOf(row.getKind()),
|
||||
actor,
|
||||
row.getDocumentId(),
|
||||
docTitle,
|
||||
row.getHappenedAt().atOffset(ZoneOffset.UTC),
|
||||
row.isYouMentioned(),
|
||||
row.isYouParticipated(),
|
||||
row.getCount(),
|
||||
happenedAtUntil,
|
||||
commentId,
|
||||
annotationId
|
||||
row.isYouMentioned()
|
||||
);
|
||||
}).toList();
|
||||
}
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
package org.raddatz.familienarchiv.dto;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
public record BatchMetadataRequest(
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) List<UUID> ids) {}
|
||||
@@ -1,9 +0,0 @@
|
||||
package org.raddatz.familienarchiv.dto;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
public record BulkEditError(
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID id,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String message) {}
|
||||
@@ -1,9 +0,0 @@
|
||||
package org.raddatz.familienarchiv.dto;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
public record BulkEditResult(
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int updated,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) List<BulkEditError> errors) {}
|
||||
@@ -1,18 +0,0 @@
|
||||
package org.raddatz.familienarchiv.dto;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
@Data
|
||||
public class DocumentBatchMetadataDTO {
|
||||
private List<String> titles;
|
||||
private UUID senderId;
|
||||
private List<UUID> receiverIds;
|
||||
private LocalDate documentDate;
|
||||
private String location;
|
||||
private List<String> tagNames;
|
||||
private Boolean metadataComplete;
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
package org.raddatz.familienarchiv.dto;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
public record DocumentBatchSummary(
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID id,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String title,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String pdfUrl) {}
|
||||
@@ -1,60 +0,0 @@
|
||||
package org.raddatz.familienarchiv.dto;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import jakarta.validation.constraints.Size;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
/**
|
||||
* Request body for {@code PATCH /api/documents/bulk}. Field semantics:
|
||||
* <ul>
|
||||
* <li>{@code tagNames} and {@code receiverIds} are <b>additive</b> —
|
||||
* merged into each document's existing set, never replacing it.</li>
|
||||
* <li>{@code senderId}, {@code documentLocation}, {@code archiveBox},
|
||||
* {@code archiveFolder} are <b>replace-on-non-blank</b> — null/blank
|
||||
* fields are skipped, anything else overwrites.</li>
|
||||
* </ul>
|
||||
*
|
||||
* <p>Kept as a Lombok {@code @Data} POJO (not a record) for symmetry with
|
||||
* the existing {@code DocumentUpdateDTO} and to keep test setup terse —
|
||||
* the per-feature DTOs introduced alongside this one ({@link BulkEditError},
|
||||
* {@link BulkEditResult}, {@link BatchMetadataRequest},
|
||||
* {@link DocumentBatchSummary}) <i>are</i> records because they have no
|
||||
* test-side mutation. Tracked in the cycle-1 review for follow-up.
|
||||
*
|
||||
* <p>Bean-validation caps below defend against payload-amplification: the
|
||||
* 1 MiB SvelteKit proxy cap allows ~26k UUIDs through to the backend, and
|
||||
* Jetty's default body limit is 8 MB. {@code @Size} guards catch malformed
|
||||
* clients without depending on those outer bounds.
|
||||
*/
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class DocumentBulkEditDTO {
|
||||
|
||||
// No @Size cap here on purpose: the controller's BULK_EDIT_MAX_IDS check
|
||||
// returns the typed BULK_EDIT_TOO_MANY_IDS error code, which the frontend
|
||||
// maps to a localised "Maximal 500 …" message via Paraglide. A bean-
|
||||
// validation @Size would short-circuit that with a generic VALIDATION_ERROR.
|
||||
private List<UUID> documentIds;
|
||||
|
||||
@Size(max = 200, message = "tagNames must not exceed 200 entries")
|
||||
private List<@Size(max = 200, message = "tagName must not exceed 200 chars") String> tagNames;
|
||||
|
||||
private UUID senderId;
|
||||
|
||||
@Size(max = 200, message = "receiverIds must not exceed 200 entries")
|
||||
private List<UUID> receiverIds;
|
||||
|
||||
@Size(max = 255, message = "documentLocation must not exceed 255 chars")
|
||||
private String documentLocation;
|
||||
|
||||
@Size(max = 255, message = "archiveBox must not exceed 255 chars")
|
||||
private String archiveBox;
|
||||
|
||||
@Size(max = 255, message = "archiveFolder must not exceed 255 chars")
|
||||
private String archiveFolder;
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
package org.raddatz.familienarchiv.dto;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public record DocumentSearchItem(
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||
Document document,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||
SearchMatchData matchData,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||
int completionPercentage,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||
List<ActivityActorDTO> contributors
|
||||
) {}
|
||||
@@ -1,38 +1,35 @@
|
||||
package org.raddatz.familienarchiv.dto;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
public record DocumentSearchResult(
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||
List<DocumentSearchItem> items,
|
||||
List<Document> documents,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||
long totalElements,
|
||||
long total,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||
int pageNumber,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||
int pageSize,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||
int totalPages
|
||||
Map<UUID, SearchMatchData> matchData
|
||||
) {
|
||||
/**
|
||||
* Single-page convenience factory used by empty-result shortcuts and by tests that
|
||||
* don't care about paging. Treats the whole list as page 0 of itself.
|
||||
* Creates a fully-enriched result from documents and their match overlay data.
|
||||
* Absent map entries (e.g. document deleted between FTS and enrichment) are safe —
|
||||
* the frontend treats a missing entry as "no match data".
|
||||
*/
|
||||
public static DocumentSearchResult of(List<DocumentSearchItem> items) {
|
||||
int size = items.size();
|
||||
return new DocumentSearchResult(items, size, 0, size, size == 0 ? 0 : 1);
|
||||
public static DocumentSearchResult withMatchData(List<Document> documents, Map<UUID, SearchMatchData> matchData) {
|
||||
return new DocumentSearchResult(documents, documents.size(), matchData);
|
||||
}
|
||||
|
||||
/**
|
||||
* Paged factory used by the service when it has a real Pageable + full match count
|
||||
* (e.g. from Spring's Page<T> or from an in-memory sort-then-slice).
|
||||
* Creates a result without match data — used for filter-only searches (no text query).
|
||||
* No pagination yet — the full matched set is always returned.
|
||||
* When pagination is added, total must come from a DB COUNT query, not list.size().
|
||||
*/
|
||||
public static DocumentSearchResult paged(List<DocumentSearchItem> slice, Pageable pageable, long totalElements) {
|
||||
int pageSize = pageable.getPageSize();
|
||||
int totalPages = pageSize == 0 ? 0 : (int) ((totalElements + pageSize - 1) / pageSize);
|
||||
return new DocumentSearchResult(slice, totalElements, pageable.getPageNumber(), pageSize, totalPages);
|
||||
public static DocumentSearchResult of(List<Document> documents) {
|
||||
return withMatchData(documents, Map.of());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,8 +13,6 @@ public class DocumentUpdateDTO {
|
||||
private LocalDate documentDate;
|
||||
private String location;
|
||||
private String documentLocation;
|
||||
private String archiveBox;
|
||||
private String archiveFolder;
|
||||
private String transcription;
|
||||
private String summary;
|
||||
private UUID senderId;
|
||||
|
||||
@@ -2,11 +2,9 @@ package org.raddatz.familienarchiv.dto;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.UUID;
|
||||
|
||||
public record IncompleteDocumentDTO(
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID id,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String title,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) LocalDateTime uploadedAt
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String title
|
||||
) {}
|
||||
|
||||
@@ -1,14 +1,10 @@
|
||||
package org.raddatz.familienarchiv.dto;
|
||||
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import lombok.Data;
|
||||
import org.raddatz.familienarchiv.model.PersonType;
|
||||
|
||||
@Data
|
||||
public class PersonUpdateDTO {
|
||||
@NotNull
|
||||
private PersonType personType;
|
||||
@Size(max = 50)
|
||||
private String title;
|
||||
@Size(max = 100)
|
||||
|
||||
@@ -13,8 +13,6 @@ public enum ErrorCode {
|
||||
PERSON_NOT_FOUND,
|
||||
/** A person name alias with the given ID does not exist. 404 */
|
||||
ALIAS_NOT_FOUND,
|
||||
/** The submitted personType value is not allowed (e.g. SKIP is import-only). 400 */
|
||||
INVALID_PERSON_TYPE,
|
||||
|
||||
// --- Documents ---
|
||||
/** A document with the given ID does not exist. 404 */
|
||||
@@ -40,10 +38,6 @@ public enum ErrorCode {
|
||||
/** A mass import is already in progress; only one can run at a time. 409 */
|
||||
IMPORT_ALREADY_RUNNING,
|
||||
|
||||
// --- Thumbnails ---
|
||||
/** A thumbnail backfill is already in progress; only one can run at a time. 409 */
|
||||
THUMBNAIL_BACKFILL_ALREADY_RUNNING,
|
||||
|
||||
// --- Invites ---
|
||||
/** The invite code does not exist. 404 */
|
||||
INVITE_NOT_FOUND,
|
||||
@@ -111,10 +105,6 @@ public enum ErrorCode {
|
||||
// --- Generic ---
|
||||
/** Request validation failed (missing or malformed fields). 400 */
|
||||
VALIDATION_ERROR,
|
||||
/** Batch upload exceeds the maximum allowed file count per request. 400 */
|
||||
BATCH_TOO_LARGE,
|
||||
/** Bulk edit request exceeds the per-request document ID cap. 400 */
|
||||
BULK_EDIT_TOO_MANY_IDS,
|
||||
/** An unexpected server-side error occurred. 500 */
|
||||
INTERNAL_ERROR,
|
||||
}
|
||||
|
||||
@@ -6,11 +6,8 @@ import org.hibernate.annotations.CreationTimestamp;
|
||||
import org.hibernate.annotations.UpdateTimestamp;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.LocalDate;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.HashSet;
|
||||
@@ -46,20 +43,6 @@ public class Document {
|
||||
@Column(name = "file_hash", length = 64)
|
||||
private String fileHash;
|
||||
|
||||
// S3 key of the generated thumbnail (e.g. "thumbnails/{docId}.jpg"); null until generated
|
||||
@Column(name = "thumbnail_key")
|
||||
private String thumbnailKey;
|
||||
|
||||
@Column(name = "thumbnail_generated_at")
|
||||
private LocalDateTime thumbnailGeneratedAt;
|
||||
|
||||
@Enumerated(EnumType.STRING)
|
||||
@Column(name = "thumbnail_aspect", length = 16)
|
||||
private ThumbnailAspect thumbnailAspect;
|
||||
|
||||
@Column(name = "page_count")
|
||||
private Integer pageCount;
|
||||
|
||||
// Originaler Dateiname beim Upload (z.B. "Brief_Oma_1940.pdf")
|
||||
@Column(name = "original_filename", nullable = false)
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||
@@ -134,19 +117,4 @@ public class Document {
|
||||
@Enumerated(EnumType.STRING)
|
||||
@Builder.Default
|
||||
private Set<TrainingLabel> trainingLabels = new HashSet<>();
|
||||
|
||||
// The `?v={thumbnailGeneratedAt}` cache-buster is load-bearing: the thumbnail
|
||||
// endpoint sends `Cache-Control: private, max-age=31536000, immutable`
|
||||
// (DocumentController.getDocumentThumbnail). `immutable` is only safe because
|
||||
// this URL changes whenever the underlying file does. Dropping the query param
|
||||
// would let browsers serve a stale thumbnail for a year after the file is
|
||||
// replaced, and shared caches could leak one user's thumbnail to another
|
||||
// (CWE-525).
|
||||
@JsonProperty("thumbnailUrl")
|
||||
public String getThumbnailUrl() {
|
||||
if (thumbnailKey == null) return null;
|
||||
String base = "/api/documents/" + id + "/thumbnail";
|
||||
if (thumbnailGeneratedAt == null) return base;
|
||||
return base + "?v=" + URLEncoder.encode(thumbnailGeneratedAt.toString(), StandardCharsets.UTF_8);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
package org.raddatz.familienarchiv.model;
|
||||
|
||||
public enum ThumbnailAspect {
|
||||
PORTRAIT,
|
||||
LANDSCAPE
|
||||
}
|
||||
@@ -8,6 +8,10 @@ import java.util.UUID;
|
||||
|
||||
public interface CommentRepository extends JpaRepository<DocumentComment, UUID> {
|
||||
|
||||
List<DocumentComment> findByDocumentIdAndAnnotationIdIsNullAndParentIdIsNull(UUID documentId);
|
||||
|
||||
List<DocumentComment> findByAnnotationIdAndParentIdIsNull(UUID annotationId);
|
||||
|
||||
List<DocumentComment> findByParentId(UUID parentId);
|
||||
|
||||
List<DocumentComment> findByBlockIdAndParentIdIsNull(UUID blockId);
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
package org.raddatz.familienarchiv.repository;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public interface CompletionStatsRow {
|
||||
UUID getDocumentId();
|
||||
int getCompletionPercentage();
|
||||
}
|
||||
@@ -46,8 +46,6 @@ public interface DocumentRepository extends JpaRepository<Document, UUID>, JpaSp
|
||||
|
||||
List<Document> findByFileHashIsNullAndFilePathIsNotNull();
|
||||
|
||||
List<Document> findByFilePathIsNotNullAndThumbnailKeyIsNull();
|
||||
|
||||
@Query("SELECT d.id, d.title FROM Document d WHERE d.id IN :ids")
|
||||
List<Object[]> findIdAndTitleByIdIn(@Param("ids") Collection<UUID> ids);
|
||||
|
||||
@@ -87,7 +85,7 @@ public interface DocumentRepository extends JpaRepository<Document, UUID>, JpaSp
|
||||
SELECT d.id FROM documents d
|
||||
CROSS JOIN LATERAL (
|
||||
SELECT CASE WHEN websearch_to_tsquery('german', :query)::text <> ''
|
||||
THEN to_tsquery('simple', regexp_replace(
|
||||
THEN to_tsquery('german', regexp_replace(
|
||||
websearch_to_tsquery('german', :query)::text,
|
||||
'''([^'']+)''',
|
||||
'''\\1'':*',
|
||||
@@ -149,7 +147,7 @@ public interface DocumentRepository extends JpaRepository<Document, UUID>, JpaSp
|
||||
FROM documents d
|
||||
CROSS JOIN LATERAL (
|
||||
SELECT CASE WHEN websearch_to_tsquery('german', :query)::text <> ''
|
||||
THEN to_tsquery('simple', regexp_replace(
|
||||
THEN to_tsquery('german', regexp_replace(
|
||||
websearch_to_tsquery('german', :query)::text,
|
||||
'''([^'']+)''',
|
||||
'''\\1'':*',
|
||||
|
||||
@@ -5,24 +5,12 @@ import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
public interface TranscriptionBlockRepository extends JpaRepository<TranscriptionBlock, UUID> {
|
||||
|
||||
@Query(value = """
|
||||
SELECT
|
||||
b.document_id AS documentId,
|
||||
ROUND(COUNT(*) FILTER (WHERE b.reviewed = true) * 100.0 / COUNT(*))::int AS completionPercentage
|
||||
FROM transcription_blocks b
|
||||
WHERE b.document_id IN :documentIds
|
||||
GROUP BY b.document_id
|
||||
""", nativeQuery = true)
|
||||
List<CompletionStatsRow> findCompletionStatsForDocuments(
|
||||
@Param("documentIds") Collection<UUID> documentIds);
|
||||
|
||||
List<TranscriptionBlock> findByDocumentIdOrderBySortOrderAsc(UUID documentId);
|
||||
|
||||
Optional<TranscriptionBlock> findByIdAndDocumentId(UUID id, UUID documentId);
|
||||
|
||||
@@ -8,13 +8,10 @@ import org.raddatz.familienarchiv.exception.DomainException;
|
||||
import org.raddatz.familienarchiv.exception.ErrorCode;
|
||||
import org.raddatz.familienarchiv.model.AppUser;
|
||||
import org.raddatz.familienarchiv.model.DocumentComment;
|
||||
import org.raddatz.familienarchiv.model.TranscriptionBlock;
|
||||
import org.raddatz.familienarchiv.repository.CommentRepository;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@@ -29,15 +26,16 @@ public class CommentService {
|
||||
private final UserService userService;
|
||||
private final NotificationService notificationService;
|
||||
private final AuditService auditService;
|
||||
private final TranscriptionService transcriptionService;
|
||||
|
||||
public Map<UUID, UUID> findAnnotationIdsByIds(Collection<UUID> commentIds) {
|
||||
if (commentIds == null || commentIds.isEmpty()) return Map.of();
|
||||
Map<UUID, UUID> result = new HashMap<>();
|
||||
for (DocumentComment c : commentRepository.findAllById(commentIds)) {
|
||||
if (c.getAnnotationId() != null) result.put(c.getId(), c.getAnnotationId());
|
||||
}
|
||||
return result;
|
||||
public List<DocumentComment> getCommentsForDocument(UUID documentId) {
|
||||
List<DocumentComment> roots =
|
||||
commentRepository.findByDocumentIdAndAnnotationIdIsNullAndParentIdIsNull(documentId);
|
||||
return withRepliesAndMentions(roots);
|
||||
}
|
||||
|
||||
public List<DocumentComment> getCommentsForAnnotation(UUID annotationId) {
|
||||
List<DocumentComment> roots = commentRepository.findByAnnotationIdAndParentIdIsNull(annotationId);
|
||||
return withRepliesAndMentions(roots);
|
||||
}
|
||||
|
||||
public List<DocumentComment> getCommentsForBlock(UUID blockId) {
|
||||
@@ -48,11 +46,27 @@ public class CommentService {
|
||||
@Transactional
|
||||
public DocumentComment postBlockComment(UUID documentId, UUID blockId, String content,
|
||||
List<UUID> mentionedUserIds, AppUser author) {
|
||||
TranscriptionBlock block = transcriptionService.getBlock(documentId, blockId);
|
||||
DocumentComment comment = DocumentComment.builder()
|
||||
.documentId(documentId)
|
||||
.blockId(blockId)
|
||||
.annotationId(block.getAnnotationId())
|
||||
.content(content)
|
||||
.authorId(author.getId())
|
||||
.authorName(resolveAuthorName(author))
|
||||
.build();
|
||||
saveMentions(comment, mentionedUserIds);
|
||||
DocumentComment saved = commentRepository.save(comment);
|
||||
withMentionDTOs(saved);
|
||||
notificationService.notifyMentions(mentionedUserIds, saved);
|
||||
logCommentPosted(author, documentId, saved, mentionedUserIds);
|
||||
return saved;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public DocumentComment postComment(UUID documentId, UUID annotationId, String content,
|
||||
List<UUID> mentionedUserIds, AppUser author) {
|
||||
DocumentComment comment = DocumentComment.builder()
|
||||
.documentId(documentId)
|
||||
.annotationId(annotationId)
|
||||
.content(content)
|
||||
.authorId(author.getId())
|
||||
.authorName(resolveAuthorName(author))
|
||||
|
||||
@@ -3,14 +3,8 @@ package org.raddatz.familienarchiv.service;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
|
||||
import org.raddatz.familienarchiv.audit.AuditKind;
|
||||
import org.raddatz.familienarchiv.audit.AuditLogQueryService;
|
||||
import org.raddatz.familienarchiv.audit.AuditService;
|
||||
import org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO;
|
||||
import org.raddatz.familienarchiv.dto.DocumentBatchSummary;
|
||||
import org.raddatz.familienarchiv.dto.DocumentBulkEditDTO;
|
||||
import org.raddatz.familienarchiv.dto.DocumentSearchItem;
|
||||
import org.raddatz.familienarchiv.dto.DocumentSearchResult;
|
||||
import org.raddatz.familienarchiv.dto.DocumentSort;
|
||||
import org.raddatz.familienarchiv.dto.DocumentUpdateDTO;
|
||||
@@ -25,9 +19,7 @@ import org.raddatz.familienarchiv.model.TrainingLabel;
|
||||
import org.raddatz.familienarchiv.model.Person;
|
||||
import org.raddatz.familienarchiv.model.Tag;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.jpa.domain.Specification;
|
||||
import org.raddatz.familienarchiv.exception.DomainException;
|
||||
@@ -67,9 +59,6 @@ public class DocumentService {
|
||||
private final DocumentVersionService documentVersionService;
|
||||
private final AnnotationService annotationService;
|
||||
private final AuditService auditService;
|
||||
private final TranscriptionBlockQueryService transcriptionBlockQueryService;
|
||||
private final AuditLogQueryService auditLogQueryService;
|
||||
private final ThumbnailAsyncRunner thumbnailAsyncRunner;
|
||||
|
||||
public record StoreResult(Document document, boolean isNew) {}
|
||||
|
||||
@@ -131,56 +120,9 @@ public class DocumentService {
|
||||
if (wasPlaceholder) {
|
||||
auditService.logAfterCommit(AuditKind.FILE_UPLOADED, actorId, saved.getId(), null);
|
||||
}
|
||||
thumbnailAsyncRunner.dispatchAfterCommit(saved.getId());
|
||||
return new StoreResult(saved, isNew);
|
||||
}
|
||||
|
||||
public void validateBatch(int fileCount, DocumentBatchMetadataDTO metadata) {
|
||||
// 50-file hard cap keeps FormData requests at a manageable size and protects against runaway bulk uploads.
|
||||
if (fileCount > 50) {
|
||||
throw DomainException.badRequest(ErrorCode.BATCH_TOO_LARGE, "Batch exceeds maximum of 50 files per request");
|
||||
}
|
||||
if (metadata != null && metadata.getTitles() != null && metadata.getTitles().size() > fileCount) {
|
||||
throw DomainException.badRequest(ErrorCode.VALIDATION_ERROR, "titles count must not exceed files count");
|
||||
}
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public StoreResult storeDocumentWithBatchMetadata(
|
||||
MultipartFile file, DocumentBatchMetadataDTO metadata, int fileIndex, UUID actorId) throws IOException {
|
||||
StoreResult base = storeDocument(file, actorId);
|
||||
Document doc = applyBatchMetadata(base.document(), metadata, fileIndex);
|
||||
return new StoreResult(documentRepository.save(doc), base.isNew());
|
||||
}
|
||||
|
||||
private Document applyBatchMetadata(Document doc, DocumentBatchMetadataDTO metadata, int fileIndex) {
|
||||
if (metadata.getTitles() != null && fileIndex < metadata.getTitles().size()) {
|
||||
doc.setTitle(metadata.getTitles().get(fileIndex));
|
||||
}
|
||||
if (metadata.getSenderId() != null) {
|
||||
doc.setSender(personService.getById(metadata.getSenderId()));
|
||||
}
|
||||
if (metadata.getReceiverIds() != null && !metadata.getReceiverIds().isEmpty()) {
|
||||
doc.setReceivers(new HashSet<>(personService.getAllById(metadata.getReceiverIds())));
|
||||
}
|
||||
if (metadata.getDocumentDate() != null) {
|
||||
doc.setDocumentDate(metadata.getDocumentDate());
|
||||
}
|
||||
if (metadata.getLocation() != null) {
|
||||
doc.setLocation(metadata.getLocation());
|
||||
}
|
||||
if (metadata.getMetadataComplete() != null) {
|
||||
doc.setMetadataComplete(metadata.getMetadataComplete());
|
||||
}
|
||||
if (metadata.getTagNames() != null && !metadata.getTagNames().isEmpty()) {
|
||||
UUID docId = doc.getId();
|
||||
updateDocumentTags(docId, metadata.getTagNames());
|
||||
doc = documentRepository.findById(docId)
|
||||
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Not found after batch metadata: " + docId));
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public Document createDocument(DocumentUpdateDTO dto, MultipartFile file) throws IOException {
|
||||
String filename = (file != null && !file.isEmpty())
|
||||
@@ -240,8 +182,7 @@ public class DocumentService {
|
||||
}
|
||||
|
||||
// Datei
|
||||
boolean fileUploaded = file != null && !file.isEmpty();
|
||||
if (fileUploaded) {
|
||||
if (file != null && !file.isEmpty()) {
|
||||
FileService.UploadResult upload = fileService.uploadFile(file, file.getOriginalFilename());
|
||||
doc.setFilePath(upload.s3Key());
|
||||
doc.setFileHash(upload.fileHash());
|
||||
@@ -251,9 +192,6 @@ public class DocumentService {
|
||||
|
||||
Document finalDoc = documentRepository.save(doc);
|
||||
documentVersionService.recordVersion(finalDoc);
|
||||
if (fileUploaded) {
|
||||
thumbnailAsyncRunner.dispatchAfterCommit(finalDoc.getId());
|
||||
}
|
||||
return finalDoc;
|
||||
}
|
||||
|
||||
@@ -271,8 +209,6 @@ public class DocumentService {
|
||||
doc.setTranscription(dto.getTranscription());
|
||||
doc.setSummary(dto.getSummary());
|
||||
doc.setDocumentLocation(dto.getDocumentLocation());
|
||||
doc.setArchiveBox(dto.getArchiveBox());
|
||||
doc.setArchiveFolder(dto.getArchiveFolder());
|
||||
|
||||
List<String> tags = new ArrayList<>();
|
||||
if (dto.getTags() != null && !dto.getTags().isBlank()) {
|
||||
@@ -308,8 +244,7 @@ public class DocumentService {
|
||||
}
|
||||
|
||||
// 4. Datei austauschen (nur wenn eine neue ausgewählt wurde)
|
||||
boolean fileReplaced = newFile != null && !newFile.isEmpty();
|
||||
if (fileReplaced) {
|
||||
if (newFile != null && !newFile.isEmpty()) {
|
||||
FileService.UploadResult upload = fileService.uploadFile(newFile, newFile.getOriginalFilename());
|
||||
doc.setFilePath(upload.s3Key());
|
||||
doc.setFileHash(upload.fileHash());
|
||||
@@ -328,153 +263,26 @@ public class DocumentService {
|
||||
auditService.logAfterCommit(AuditKind.METADATA_UPDATED, actorId, saved.getId(), null);
|
||||
}
|
||||
|
||||
if (fileReplaced) {
|
||||
thumbnailAsyncRunner.dispatchAfterCommit(saved.getId());
|
||||
}
|
||||
|
||||
return saved;
|
||||
}
|
||||
|
||||
public Document updateDocumentTags(UUID docId, List<String> tagNames) {
|
||||
Document doc = documentRepository.findById(docId)
|
||||
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + docId));
|
||||
doc.setTags(resolveTags(tagNames));
|
||||
return documentRepository.save(doc);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a list of tag-name strings to {@link Tag} entities, trimming
|
||||
* whitespace and skipping blank entries. Single source of truth for
|
||||
* "name string → Tag" so the find-or-create policy stays consistent
|
||||
* across single-doc updates ({@link #updateDocumentTags}), bulk edits
|
||||
* ({@link #applyBulkEditToDocument}), and the upload-batch path
|
||||
* ({@code applyBatchMetadata}).
|
||||
*/
|
||||
private Set<Tag> resolveTags(List<String> tagNames) {
|
||||
if (tagNames == null || tagNames.isEmpty()) return new HashSet<>();
|
||||
Set<Tag> resolved = new HashSet<>();
|
||||
Set<Tag> newTags = new HashSet<>();
|
||||
|
||||
for (String name : tagNames) {
|
||||
// Clean the string
|
||||
String cleanName = name.trim();
|
||||
if (cleanName.isEmpty()) continue;
|
||||
resolved.add(tagService.findOrCreate(cleanName));
|
||||
}
|
||||
return resolved;
|
||||
}
|
||||
if (cleanName.isEmpty())
|
||||
continue;
|
||||
|
||||
/**
|
||||
* Returns all document IDs matching the given filter parameters, ignoring
|
||||
* pagination. Used by the bulk-edit "Alle X editieren" fast path so the
|
||||
* frontend can replace the selection with every match across pages in one
|
||||
* round-trip.
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public List<UUID> findIdsForFilter(String text, LocalDate from, LocalDate to, UUID sender, UUID receiver,
|
||||
List<String> tags, String tagQ, DocumentStatus status, TagOperator tagOperator) {
|
||||
boolean hasText = StringUtils.hasText(text);
|
||||
List<UUID> rankedIds = null;
|
||||
if (hasText) {
|
||||
rankedIds = documentRepository.findRankedIdsByFts(text);
|
||||
if (rankedIds.isEmpty()) return List.of();
|
||||
newTags.add(tagService.findOrCreate(cleanName));
|
||||
}
|
||||
|
||||
Specification<Document> spec = buildSearchSpec(
|
||||
hasText, rankedIds, from, to, sender, receiver, tags, tagQ, status, tagOperator);
|
||||
return documentRepository.findAll(spec).stream().map(Document::getId).toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Single source of truth for the search Specification chain. Shared by
|
||||
* {@link #searchDocuments} (paged + sorted) and {@link #findIdsForFilter}
|
||||
* (uncapped, ID-only). Caller does its own FTS short-circuit when the
|
||||
* full-text query returned no rows.
|
||||
*/
|
||||
private Specification<Document> buildSearchSpec(boolean hasText, List<UUID> ftsIds,
|
||||
LocalDate from, LocalDate to,
|
||||
UUID sender, UUID receiver,
|
||||
List<String> tags, String tagQ,
|
||||
DocumentStatus status, TagOperator tagOperator) {
|
||||
boolean useOrLogic = tagOperator == TagOperator.OR;
|
||||
List<Set<UUID>> expandedTagSets = tagService.expandTagNamesToDescendantIdSets(tags);
|
||||
Specification<Document> textSpec = hasText ? hasIds(ftsIds) : (root, query, cb) -> null;
|
||||
return Specification.where(textSpec)
|
||||
.and(isBetween(from, to))
|
||||
.and(hasSender(sender))
|
||||
.and(hasReceiver(receiver))
|
||||
.and(hasTags(expandedTagSets, useOrLogic))
|
||||
.and(hasTagPartial(tagQ))
|
||||
.and(hasStatus(status));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns lightweight summaries (id, title, server PDF URL) for the given
|
||||
* document IDs. Unknown IDs are silently dropped — the consumer is the
|
||||
* bulk-edit page's left strip, where missing previews would already be
|
||||
* obvious; surfacing them as errors here adds no value.
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public List<DocumentBatchSummary> batchMetadata(List<UUID> ids) {
|
||||
if (ids == null || ids.isEmpty()) return List.of();
|
||||
return documentRepository.findAllById(ids).stream()
|
||||
.map(d -> new DocumentBatchSummary(
|
||||
d.getId(),
|
||||
d.getTitle() != null ? d.getTitle() : d.getOriginalFilename(),
|
||||
"/api/documents/" + d.getId() + "/file"))
|
||||
.toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies a bulk-edit DTO to a single document atomically.
|
||||
* Tags and receivers are additive (merged into existing sets); sender and the
|
||||
* three location fields are replace-on-non-blank (null/blank means "no change").
|
||||
* Wrapped in its own transaction so a failure on one document never partially
|
||||
* mutates another in the controller's batch loop.
|
||||
*
|
||||
* Each successful update emits a {@link AuditKind#METADATA_UPDATED} audit
|
||||
* event tagged {@code source=BULK_EDIT} and writes a row to
|
||||
* {@code document_versions} so the family archive's "who changed what"
|
||||
* trail stays complete across both single- and bulk-doc edit paths.
|
||||
*
|
||||
* NOTE on N+1: tag and person resolution happens per-document. With 500
|
||||
* documents × 10 tags this fans out to ~5000 tag-resolve queries per
|
||||
* request. Acceptable today because the family archive is bounded at
|
||||
* ~1500 documents total. Tracked as a perf follow-up.
|
||||
*/
|
||||
@Transactional
|
||||
public Document applyBulkEditToDocument(UUID id, DocumentBulkEditDTO dto, UUID actorId) {
|
||||
Document doc = documentRepository.findById(id)
|
||||
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id));
|
||||
|
||||
if (dto.getTagNames() != null && !dto.getTagNames().isEmpty()) {
|
||||
Set<Tag> merged = new HashSet<>(doc.getTags());
|
||||
merged.addAll(resolveTags(dto.getTagNames()));
|
||||
doc.setTags(merged);
|
||||
}
|
||||
|
||||
if (dto.getSenderId() != null) {
|
||||
doc.setSender(personService.getById(dto.getSenderId()));
|
||||
}
|
||||
|
||||
if (dto.getReceiverIds() != null && !dto.getReceiverIds().isEmpty()) {
|
||||
Set<Person> merged = new HashSet<>(doc.getReceivers());
|
||||
merged.addAll(personService.getAllById(dto.getReceiverIds()));
|
||||
doc.setReceivers(merged);
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(dto.getDocumentLocation())) {
|
||||
doc.setDocumentLocation(dto.getDocumentLocation());
|
||||
}
|
||||
if (StringUtils.hasText(dto.getArchiveBox())) {
|
||||
doc.setArchiveBox(dto.getArchiveBox());
|
||||
}
|
||||
if (StringUtils.hasText(dto.getArchiveFolder())) {
|
||||
doc.setArchiveFolder(dto.getArchiveFolder());
|
||||
}
|
||||
|
||||
Document saved = documentRepository.save(doc);
|
||||
documentVersionService.recordVersion(saved);
|
||||
auditService.logAfterCommit(AuditKind.METADATA_UPDATED, actorId, saved.getId(),
|
||||
Map.of("source", "BULK_EDIT"));
|
||||
return saved;
|
||||
doc.setTags(newTags);
|
||||
return documentRepository.save(doc);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -516,7 +324,6 @@ public class DocumentService {
|
||||
}
|
||||
Document saved = documentRepository.save(doc);
|
||||
documentVersionService.recordVersion(saved);
|
||||
thumbnailAsyncRunner.dispatchAfterCommit(saved.getId());
|
||||
if (wasPlaceholder) {
|
||||
auditService.logAfterCommit(AuditKind.FILE_UPLOADED, actorId, saved.getId(), null);
|
||||
}
|
||||
@@ -531,30 +338,38 @@ public class DocumentService {
|
||||
}
|
||||
|
||||
// 1. Allgemeine Suche (für das Suchfeld im Frontend)
|
||||
public DocumentSearchResult searchDocuments(String text, LocalDate from, LocalDate to, UUID sender, UUID receiver, List<String> tags, String tagQ, DocumentStatus status, DocumentSort sort, String dir, TagOperator tagOperator, Pageable pageable) {
|
||||
public DocumentSearchResult searchDocuments(String text, LocalDate from, LocalDate to, UUID sender, UUID receiver, List<String> tags, String tagQ, DocumentStatus status, DocumentSort sort, String dir, TagOperator tagOperator) {
|
||||
boolean hasText = StringUtils.hasText(text);
|
||||
List<UUID> rankedIds = null;
|
||||
|
||||
if (hasText) {
|
||||
rankedIds = documentRepository.findRankedIdsByFts(text);
|
||||
if (rankedIds.isEmpty()) return DocumentSearchResult.of(List.of());
|
||||
if (rankedIds.isEmpty()) return DocumentSearchResult.withMatchData(List.of(), Map.of());
|
||||
}
|
||||
|
||||
Specification<Document> spec = buildSearchSpec(
|
||||
hasText, rankedIds, from, to, sender, receiver, tags, tagQ, status, tagOperator);
|
||||
boolean useOrLogic = tagOperator == TagOperator.OR;
|
||||
List<Set<UUID>> expandedTagSets = tagService.expandTagNamesToDescendantIdSets(tags);
|
||||
|
||||
// SENDER, RECEIVER and RELEVANCE sorts load the full match set and slice in memory.
|
||||
// JPA's Sort.by("sender.lastName") generates an INNER JOIN that silently drops
|
||||
// documents with null sender/receivers; RELEVANCE maps a DB order to an external
|
||||
// rank list. Cost scales linearly with match count — acceptable while documents
|
||||
// stays under ~10k rows. Past that, replace with SQL-level LEFT JOIN sort.
|
||||
Specification<Document> textSpec = hasText ? hasIds(rankedIds) : (root, query, cb) -> null;
|
||||
Specification<Document> spec = Specification.where(textSpec)
|
||||
.and(isBetween(from, to))
|
||||
.and(hasSender(sender))
|
||||
.and(hasReceiver(receiver))
|
||||
.and(hasTags(expandedTagSets, useOrLogic))
|
||||
.and(hasTagPartial(tagQ))
|
||||
.and(hasStatus(status));
|
||||
|
||||
// SENDER and RECEIVER are sorted in-memory because JPA's Sort.by("sender.lastName")
|
||||
// generates an INNER JOIN that silently drops documents with null sender/receivers.
|
||||
if (sort == DocumentSort.RECEIVER) {
|
||||
List<Document> sorted = sortByFirstReceiver(documentRepository.findAll(spec), dir);
|
||||
return buildResultPaged(pageSlice(sorted, pageable), text, pageable, sorted.size());
|
||||
List<Document> results = documentRepository.findAll(spec);
|
||||
List<Document> sorted = sortByFirstReceiver(results, dir);
|
||||
return DocumentSearchResult.withMatchData(resolveDocumentTagColors(sorted), enrichWithMatchData(sorted, text));
|
||||
}
|
||||
if (sort == DocumentSort.SENDER) {
|
||||
List<Document> sorted = sortBySender(documentRepository.findAll(spec), dir);
|
||||
return buildResultPaged(pageSlice(sorted, pageable), text, pageable, sorted.size());
|
||||
List<Document> results = documentRepository.findAll(spec);
|
||||
List<Document> sorted = sortBySender(results, dir);
|
||||
return DocumentSearchResult.withMatchData(resolveDocumentTagColors(sorted), enrichWithMatchData(sorted, text));
|
||||
}
|
||||
|
||||
// RELEVANCE: default when text present and no explicit sort given
|
||||
@@ -567,43 +382,12 @@ public class DocumentService {
|
||||
.sorted(Comparator.comparingInt(
|
||||
doc -> rankMap.getOrDefault(doc.getId(), Integer.MAX_VALUE)))
|
||||
.toList();
|
||||
return buildResultPaged(pageSlice(sorted, pageable), text, pageable, sorted.size());
|
||||
return DocumentSearchResult.withMatchData(resolveDocumentTagColors(sorted), enrichWithMatchData(sorted, text));
|
||||
}
|
||||
|
||||
// Fast path — push sort + paging into the DB and enrich only the returned slice.
|
||||
PageRequest pageRequest = PageRequest.of(pageable.getPageNumber(), pageable.getPageSize(), resolveSort(sort, dir));
|
||||
Page<Document> page = documentRepository.findAll(spec, pageRequest);
|
||||
return buildResultPaged(page.getContent(), text, pageable, page.getTotalElements());
|
||||
}
|
||||
|
||||
private static <T> List<T> pageSlice(List<T> sorted, Pageable pageable) {
|
||||
int from = Math.min((int) pageable.getOffset(), sorted.size());
|
||||
int to = Math.min(from + pageable.getPageSize(), sorted.size());
|
||||
return sorted.subList(from, to);
|
||||
}
|
||||
|
||||
private DocumentSearchResult buildResultPaged(List<Document> slice, String text, Pageable pageable, long totalElements) {
|
||||
return DocumentSearchResult.paged(enrichItems(slice, text), pageable, totalElements);
|
||||
}
|
||||
|
||||
private List<DocumentSearchItem> enrichItems(List<Document> documents, String text) {
|
||||
List<Document> colorResolved = resolveDocumentTagColors(documents);
|
||||
Map<UUID, SearchMatchData> matchData = enrichWithMatchData(colorResolved, text);
|
||||
|
||||
List<UUID> docIds = colorResolved.stream().map(Document::getId).toList();
|
||||
Map<UUID, Integer> completionByDoc = fetchCompletionPercentages(docIds);
|
||||
Map<UUID, List<ActivityActorDTO>> contributorsByDoc = auditLogQueryService.findRecentContributorsPerDocument(docIds);
|
||||
|
||||
return colorResolved.stream().map(doc -> new DocumentSearchItem(
|
||||
doc,
|
||||
matchData.getOrDefault(doc.getId(), SearchMatchData.empty()),
|
||||
completionByDoc.getOrDefault(doc.getId(), 0),
|
||||
contributorsByDoc.getOrDefault(doc.getId(), List.of())
|
||||
)).toList();
|
||||
}
|
||||
|
||||
private Map<UUID, Integer> fetchCompletionPercentages(List<UUID> docIds) {
|
||||
return transcriptionBlockQueryService.getCompletionStats(docIds);
|
||||
Sort springSort = resolveSort(sort, dir);
|
||||
List<Document> results = documentRepository.findAll(spec, springSort);
|
||||
return DocumentSearchResult.withMatchData(resolveDocumentTagColors(results), enrichWithMatchData(results, text));
|
||||
}
|
||||
|
||||
private Sort resolveSort(DocumentSort sort, String dir) {
|
||||
@@ -733,7 +517,7 @@ public class DocumentService {
|
||||
PageRequest pageable = PageRequest.of(0, size, Sort.by(Sort.Direction.DESC, "createdAt"));
|
||||
return documentRepository.findByMetadataCompleteFalse(pageable)
|
||||
.stream()
|
||||
.map(doc -> new IncompleteDocumentDTO(doc.getId(), doc.getTitle(), doc.getCreatedAt()))
|
||||
.map(doc -> new IncompleteDocumentDTO(doc.getId(), doc.getTitle()))
|
||||
.toList();
|
||||
}
|
||||
|
||||
|
||||
@@ -112,27 +112,6 @@ public class FileService {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens a streaming download from S3/MinIO. The caller is responsible for
|
||||
* closing the returned stream — typically via try-with-resources. Preferred
|
||||
* over {@link #downloadFileBytes(String)} for large files (multi-MB PDFs
|
||||
* during thumbnail generation) because it avoids loading the entire file
|
||||
* into heap memory.
|
||||
*/
|
||||
public InputStream downloadFileStream(String s3Key) throws IOException {
|
||||
try {
|
||||
GetObjectRequest getObjectRequest = GetObjectRequest.builder()
|
||||
.bucket(bucketName)
|
||||
.key(s3Key)
|
||||
.build();
|
||||
return s3Client.getObject(getObjectRequest);
|
||||
} catch (NoSuchKeyException e) {
|
||||
throw new StorageFileNotFoundException("File not found in storage: " + s3Key);
|
||||
} catch (S3Exception e) {
|
||||
throw new IOException("Failed to open stream from storage: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a presigned URL for downloading an object from S3/MinIO.
|
||||
* Valid for 1 hour — covers multi-page documents on CPU-only OCR hardware
|
||||
|
||||
@@ -59,7 +59,6 @@ public class MassImportService {
|
||||
private final PersonService personService;
|
||||
private final TagService tagService;
|
||||
private final S3Client s3Client;
|
||||
private final ThumbnailAsyncRunner thumbnailAsyncRunner;
|
||||
|
||||
@Value("${app.s3.bucket}")
|
||||
private String bucketName;
|
||||
@@ -333,10 +332,7 @@ public class MassImportService {
|
||||
if (tag != null) doc.getTags().add(tag);
|
||||
doc.setMetadataComplete(metadataComplete);
|
||||
|
||||
Document saved = documentRepository.save(doc);
|
||||
if (file.isPresent()) {
|
||||
thumbnailAsyncRunner.dispatchAfterCommit(saved.getId());
|
||||
}
|
||||
documentRepository.save(doc);
|
||||
log.info("Importiert{}: {}", file.isEmpty() ? " (nur Metadaten)" : "", originalFilename);
|
||||
}
|
||||
|
||||
|
||||
@@ -109,12 +109,8 @@ public class PersonService {
|
||||
|
||||
@Transactional
|
||||
public Person createPerson(PersonUpdateDTO dto) {
|
||||
if (dto.getPersonType() == PersonType.SKIP) {
|
||||
throw DomainException.badRequest(ErrorCode.INVALID_PERSON_TYPE, "SKIP is not a valid person type for manual creation");
|
||||
}
|
||||
validateYears(dto.getBirthYear(), dto.getDeathYear());
|
||||
Person person = Person.builder()
|
||||
.personType(dto.getPersonType())
|
||||
.title(dto.getTitle() == null || dto.getTitle().isBlank() ? null : dto.getTitle().trim())
|
||||
.firstName(dto.getFirstName())
|
||||
.lastName(dto.getLastName())
|
||||
@@ -140,13 +136,9 @@ public class PersonService {
|
||||
|
||||
@Transactional
|
||||
public Person updatePerson(UUID id, PersonUpdateDTO dto) {
|
||||
if (dto.getPersonType() == PersonType.SKIP) {
|
||||
throw DomainException.badRequest(ErrorCode.INVALID_PERSON_TYPE, "SKIP is not a valid person type for manual editing");
|
||||
}
|
||||
validateYears(dto.getBirthYear(), dto.getDeathYear());
|
||||
Person person = personRepository.findById(id)
|
||||
.orElseThrow(() -> DomainException.notFound(ErrorCode.PERSON_NOT_FOUND, "Person not found: " + id));
|
||||
person.setPersonType(dto.getPersonType());
|
||||
person.setTitle(dto.getTitle() == null || dto.getTitle().isBlank() ? null : dto.getTitle().trim());
|
||||
person.setFirstName(dto.getFirstName());
|
||||
person.setLastName(dto.getLastName());
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
package org.raddatz.familienarchiv.service;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.support.TransactionSynchronization;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
/**
|
||||
* Bridges document upload paths to asynchronous thumbnail generation. Use
|
||||
* {@link #dispatchAfterCommit(UUID)} from inside {@code @Transactional} service methods —
|
||||
* it registers a post-commit hook so the async task only fires when the surrounding
|
||||
* transaction actually commits, and is silently skipped on rollback. Mirrors
|
||||
* {@link org.raddatz.familienarchiv.audit.AuditService#logAfterCommit}.
|
||||
*/
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@Slf4j
|
||||
public class ThumbnailAsyncRunner {
|
||||
|
||||
private final DocumentRepository documentRepository;
|
||||
private final ThumbnailService thumbnailService;
|
||||
|
||||
/** Per-document timeout for the whole generate() call — defense against corrupt PDFs. */
|
||||
private long generateTimeoutSeconds = 30L;
|
||||
|
||||
/**
|
||||
* Registers a post-commit hook that triggers asynchronous thumbnail generation for the
|
||||
* given document. When no transaction is active the task is dispatched immediately.
|
||||
* Safe to call from inside {@code @Transactional} service methods.
|
||||
*/
|
||||
public void dispatchAfterCommit(UUID documentId) {
|
||||
if (TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronization() {
|
||||
@Override
|
||||
public void afterCommit() {
|
||||
generateAsync(documentId);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
generateAsync(documentId);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs thumbnail generation on the {@code thumbnailExecutor} pool, wrapped in a watchdog
|
||||
* timeout so a hung PDFBox render cannot occupy a pool thread indefinitely. Never throws:
|
||||
* all errors and timeouts are logged and swallowed so upload paths are not affected.
|
||||
*/
|
||||
@Async("thumbnailExecutor")
|
||||
public void generateAsync(UUID documentId) {
|
||||
Optional<Document> docOpt = documentRepository.findById(documentId);
|
||||
if (docOpt.isEmpty()) {
|
||||
log.warn("Thumbnail generation skipped: document not found id={}", documentId);
|
||||
return;
|
||||
}
|
||||
Document doc = docOpt.get();
|
||||
|
||||
ExecutorService timeoutWorker = Executors.newSingleThreadExecutor(r -> {
|
||||
Thread t = new Thread(r, "Thumbnail-Render-" + documentId);
|
||||
t.setDaemon(true);
|
||||
return t;
|
||||
});
|
||||
try {
|
||||
Future<ThumbnailService.Outcome> future = timeoutWorker.submit(
|
||||
() -> thumbnailService.generate(doc));
|
||||
try {
|
||||
future.get(generateTimeoutSeconds, TimeUnit.SECONDS);
|
||||
} catch (TimeoutException e) {
|
||||
future.cancel(true);
|
||||
log.warn("Thumbnail generation timed out after {}s for doc={}",
|
||||
generateTimeoutSeconds, documentId);
|
||||
} catch (Exception e) {
|
||||
log.warn("Thumbnail generation errored for doc={} reason={}",
|
||||
documentId, e.getMessage());
|
||||
}
|
||||
} finally {
|
||||
timeoutWorker.shutdownNow();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,104 +0,0 @@
|
||||
package org.raddatz.familienarchiv.service;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.raddatz.familienarchiv.exception.DomainException;
|
||||
import org.raddatz.familienarchiv.exception.ErrorCode;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Sequentially regenerates thumbnails for documents that have a file attached but no
|
||||
* thumbnail yet. Runs on the {@code thumbnailExecutor} pool — single-threaded iteration
|
||||
* is intentional: PDFBox + ImageIO are memory-heavy and we cap peak usage by processing
|
||||
* documents one at a time. Only one backfill can run at a time; concurrent starts are
|
||||
* rejected with {@link ErrorCode#THUMBNAIL_BACKFILL_ALREADY_RUNNING}.
|
||||
*/
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@Slf4j
|
||||
public class ThumbnailBackfillService {
|
||||
|
||||
public enum State { IDLE, RUNNING, DONE, FAILED }
|
||||
|
||||
public record BackfillStatus(
|
||||
State state,
|
||||
String message,
|
||||
int total,
|
||||
int processed,
|
||||
int skipped,
|
||||
int failed,
|
||||
LocalDateTime startedAt
|
||||
) {}
|
||||
|
||||
private final DocumentRepository documentRepository;
|
||||
private final ThumbnailService thumbnailService;
|
||||
|
||||
private volatile BackfillStatus currentStatus = new BackfillStatus(
|
||||
State.IDLE, "Kein Backfill gestartet.", 0, 0, 0, 0, null);
|
||||
|
||||
public BackfillStatus getStatus() {
|
||||
return currentStatus;
|
||||
}
|
||||
|
||||
@Async("thumbnailExecutor")
|
||||
public void runBackfillAsync() {
|
||||
if (currentStatus.state() == State.RUNNING) {
|
||||
throw DomainException.conflict(ErrorCode.THUMBNAIL_BACKFILL_ALREADY_RUNNING,
|
||||
"Thumbnail-Backfill läuft bereits");
|
||||
}
|
||||
|
||||
LocalDateTime startedAt = LocalDateTime.now();
|
||||
List<Document> docs;
|
||||
try {
|
||||
docs = documentRepository.findByFilePathIsNotNullAndThumbnailKeyIsNull();
|
||||
} catch (Exception e) {
|
||||
currentStatus = new BackfillStatus(State.FAILED,
|
||||
"Backfill fehlgeschlagen: " + e.getMessage(),
|
||||
0, 0, 0, 0, startedAt);
|
||||
log.warn("Thumbnail backfill aborted before starting: {}", e.getMessage());
|
||||
return;
|
||||
}
|
||||
|
||||
int total = docs.size();
|
||||
currentStatus = new BackfillStatus(State.RUNNING,
|
||||
"Backfill läuft…", total, 0, 0, 0, startedAt);
|
||||
log.info("Thumbnail backfill started: total={}", total);
|
||||
|
||||
int processed = 0;
|
||||
int skipped = 0;
|
||||
int failed = 0;
|
||||
for (Document doc : docs) {
|
||||
ThumbnailService.Outcome outcome;
|
||||
try {
|
||||
outcome = thumbnailService.generate(doc);
|
||||
} catch (Exception e) {
|
||||
log.warn("Thumbnail generation failed for doc={} reason={}",
|
||||
doc.getId(), e.getMessage());
|
||||
outcome = ThumbnailService.Outcome.FAILED;
|
||||
}
|
||||
switch (outcome) {
|
||||
case SUCCESS -> processed++;
|
||||
case SKIPPED -> skipped++;
|
||||
case FAILED -> failed++;
|
||||
}
|
||||
currentStatus = new BackfillStatus(State.RUNNING,
|
||||
"Backfill läuft…", total, processed, skipped, failed, startedAt);
|
||||
}
|
||||
|
||||
long durationMs = Duration.between(startedAt, LocalDateTime.now()).toMillis();
|
||||
log.info("Thumbnail backfill complete: total={} processed={} skipped={} failed={} durationMs={}",
|
||||
total, processed, skipped, failed, durationMs);
|
||||
|
||||
currentStatus = new BackfillStatus(State.DONE,
|
||||
String.format("Fertig: %d erzeugt, %d übersprungen, %d fehlgeschlagen.",
|
||||
processed, skipped, failed),
|
||||
total, processed, skipped, failed, startedAt);
|
||||
}
|
||||
}
|
||||
@@ -1,233 +0,0 @@
|
||||
package org.raddatz.familienarchiv.service;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.pdfbox.Loader;
|
||||
import org.apache.pdfbox.io.RandomAccessReadBuffer;
|
||||
import org.apache.pdfbox.pdmodel.PDDocument;
|
||||
import org.apache.pdfbox.rendering.ImageType;
|
||||
import org.apache.pdfbox.rendering.PDFRenderer;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.ThumbnailAspect;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Service;
|
||||
import software.amazon.awssdk.core.sync.RequestBody;
|
||||
import software.amazon.awssdk.services.s3.S3Client;
|
||||
import software.amazon.awssdk.services.s3.model.PutObjectRequest;
|
||||
|
||||
import javax.imageio.IIOImage;
|
||||
import javax.imageio.ImageIO;
|
||||
import javax.imageio.ImageWriteParam;
|
||||
import javax.imageio.ImageWriter;
|
||||
import javax.imageio.stream.ImageOutputStream;
|
||||
import java.awt.Graphics2D;
|
||||
import java.awt.RenderingHints;
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* Generates JPEG thumbnail previews for documents (PDF first-page or scaled-down image)
|
||||
* and uploads them to the S3 thumbnails/ prefix. Fire-and-forget from upload paths via
|
||||
* {@link ThumbnailAsyncRunner}; also invoked by {@link ThumbnailBackfillService} for
|
||||
* historical documents. Explicitly does not throw — failures are returned as
|
||||
* {@link Outcome#FAILED} so the backfill can account for them without aborting the run.
|
||||
*/
|
||||
@Service
|
||||
@Slf4j
|
||||
public class ThumbnailService {
|
||||
|
||||
public enum Outcome { SUCCESS, SKIPPED, FAILED }
|
||||
|
||||
private static final int THUMBNAIL_WIDTH = 240;
|
||||
private static final float JPEG_QUALITY = 0.85f;
|
||||
private static final int PDF_RENDER_DPI = 100;
|
||||
// Anything below this w/h ratio stays PORTRAIT — near-square A4 scans should
|
||||
// render in the portrait tile rather than flipping to landscape at 1.01.
|
||||
private static final float LANDSCAPE_THRESHOLD = 1.1f;
|
||||
private static final String PDF_CONTENT_TYPE = "application/pdf";
|
||||
private static final Set<String> IMAGE_CONTENT_TYPES =
|
||||
Set.of("image/jpeg", "image/png", "image/tiff");
|
||||
|
||||
// Deterministic S3 key — `thumbnails/{docId}.jpg`. When a document's file is replaced
|
||||
// the regenerated thumbnail overwrites this same key via PutObject, so we never
|
||||
// orphan old thumbnails. The URL-level cache buster is the `thumbnail_generated_at`
|
||||
// timestamp (see /api/documents/{id}/thumbnail ?v= param).
|
||||
private static final String THUMBNAIL_KEY_PREFIX = "thumbnails/";
|
||||
private static final String THUMBNAIL_KEY_SUFFIX = ".jpg";
|
||||
|
||||
private final FileService fileService;
|
||||
private final S3Client s3Client;
|
||||
private final DocumentRepository documentRepository;
|
||||
|
||||
@Value("${app.s3.bucket}")
|
||||
private String bucketName;
|
||||
|
||||
public ThumbnailService(FileService fileService, S3Client s3Client,
|
||||
DocumentRepository documentRepository) {
|
||||
this.fileService = fileService;
|
||||
this.s3Client = s3Client;
|
||||
this.documentRepository = documentRepository;
|
||||
}
|
||||
|
||||
public Outcome generate(Document doc) {
|
||||
if (doc.getFilePath() == null) {
|
||||
log.debug("Document {} has no filePath, skipping thumbnail", doc.getId());
|
||||
return Outcome.SKIPPED;
|
||||
}
|
||||
String contentType = doc.getContentType();
|
||||
if (contentType == null || !isSupported(contentType)) {
|
||||
log.warn("Document {} has unsupported contentType {}, skipping thumbnail",
|
||||
doc.getId(), contentType);
|
||||
return Outcome.SKIPPED;
|
||||
}
|
||||
|
||||
SourcePreview preview = readSourcePreview(doc, contentType);
|
||||
if (preview == null
|
||||
|| preview.image().getWidth() <= 0 || preview.image().getHeight() <= 0) {
|
||||
log.warn("Thumbnail source has invalid dimensions for doc={}", doc.getId());
|
||||
return Outcome.FAILED;
|
||||
}
|
||||
|
||||
byte[] jpeg = encodeThumbnail(preview.image(), doc.getId());
|
||||
if (jpeg == null) return Outcome.FAILED;
|
||||
|
||||
String thumbnailKey = thumbnailKeyFor(doc.getId());
|
||||
if (!uploadToStorage(thumbnailKey, jpeg, doc.getId())) return Outcome.FAILED;
|
||||
|
||||
ThumbnailResult result = new ThumbnailResult(
|
||||
thumbnailKey, aspectOf(preview.image()), preview.pageCount());
|
||||
return persistThumbnailMetadata(doc, result);
|
||||
}
|
||||
|
||||
private static ThumbnailAspect aspectOf(BufferedImage source) {
|
||||
float ratio = (float) source.getWidth() / source.getHeight();
|
||||
return ratio > LANDSCAPE_THRESHOLD ? ThumbnailAspect.LANDSCAPE : ThumbnailAspect.PORTRAIT;
|
||||
}
|
||||
|
||||
// First-page image + total page count for the source file. Page count is always
|
||||
// 1 for image uploads; for PDFs it comes straight from PDDocument.
|
||||
private record SourcePreview(BufferedImage image, int pageCount) {}
|
||||
|
||||
// Everything the generate pipeline has already committed to storage and
|
||||
// now wants stamped onto the Document entity in a single save call.
|
||||
private record ThumbnailResult(String key, ThumbnailAspect aspect, int pageCount) {}
|
||||
|
||||
private static String thumbnailKeyFor(UUID documentId) {
|
||||
return THUMBNAIL_KEY_PREFIX + documentId + THUMBNAIL_KEY_SUFFIX;
|
||||
}
|
||||
|
||||
private SourcePreview readSourcePreview(Document doc, String contentType) {
|
||||
try {
|
||||
return PDF_CONTENT_TYPE.equals(contentType)
|
||||
? renderPdfFirstPage(doc.getFilePath())
|
||||
: new SourcePreview(readImage(doc.getFilePath()), 1);
|
||||
} catch (Exception e) {
|
||||
log.warn("Thumbnail source read failed for doc={} reason={}",
|
||||
doc.getId(), e.getMessage());
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private byte[] encodeThumbnail(BufferedImage source, UUID documentId) {
|
||||
try {
|
||||
BufferedImage scaled = scaleToWidth(source, THUMBNAIL_WIDTH);
|
||||
return encodeJpeg(scaled, JPEG_QUALITY);
|
||||
} catch (Exception e) {
|
||||
log.warn("Thumbnail JPEG encoding failed for doc={} reason={}",
|
||||
documentId, e.getMessage());
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean uploadToStorage(String thumbnailKey, byte[] jpeg, UUID documentId) {
|
||||
try {
|
||||
s3Client.putObject(
|
||||
PutObjectRequest.builder()
|
||||
.bucket(bucketName)
|
||||
.key(thumbnailKey)
|
||||
.contentType("image/jpeg")
|
||||
.build(),
|
||||
RequestBody.fromBytes(jpeg));
|
||||
return true;
|
||||
} catch (Exception e) {
|
||||
log.warn("Thumbnail upload failed for doc={} key={} reason={}",
|
||||
documentId, thumbnailKey, e.getMessage());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private Outcome persistThumbnailMetadata(Document doc, ThumbnailResult result) {
|
||||
try {
|
||||
doc.setThumbnailKey(result.key());
|
||||
doc.setThumbnailGeneratedAt(LocalDateTime.now());
|
||||
doc.setThumbnailAspect(result.aspect());
|
||||
doc.setPageCount(result.pageCount());
|
||||
documentRepository.save(doc);
|
||||
return Outcome.SUCCESS;
|
||||
} catch (Exception e) {
|
||||
// Thumbnail is already in S3 but the entity update failed. Because the S3
|
||||
// key is deterministic (thumbnails/{docId}.jpg), the next successful run
|
||||
// — either a re-upload of this document or the admin backfill — will
|
||||
// overwrite it cleanly. Logging distinctly so an operator tracking
|
||||
// backfill totals can spot the database-side issue.
|
||||
log.warn("Thumbnail persist failed for doc={} (orphaned in storage as {}): {}",
|
||||
doc.getId(), result.key(), e.getMessage());
|
||||
return Outcome.FAILED;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isSupported(String contentType) {
|
||||
return PDF_CONTENT_TYPE.equals(contentType) || IMAGE_CONTENT_TYPES.contains(contentType);
|
||||
}
|
||||
|
||||
private SourcePreview renderPdfFirstPage(String s3Key) throws IOException {
|
||||
try (InputStream in = fileService.downloadFileStream(s3Key);
|
||||
PDDocument pdf = Loader.loadPDF(new RandomAccessReadBuffer(in))) {
|
||||
PDFRenderer renderer = new PDFRenderer(pdf);
|
||||
BufferedImage image = renderer.renderImageWithDPI(0, PDF_RENDER_DPI, ImageType.RGB);
|
||||
return new SourcePreview(image, pdf.getNumberOfPages());
|
||||
}
|
||||
}
|
||||
|
||||
private BufferedImage readImage(String s3Key) throws IOException {
|
||||
try (InputStream in = fileService.downloadFileStream(s3Key)) {
|
||||
BufferedImage img = ImageIO.read(in);
|
||||
if (img == null) {
|
||||
throw new IOException("No ImageIO reader available for " + s3Key);
|
||||
}
|
||||
return img;
|
||||
}
|
||||
}
|
||||
|
||||
private BufferedImage scaleToWidth(BufferedImage source, int targetWidth) {
|
||||
int sourceWidth = source.getWidth();
|
||||
int sourceHeight = source.getHeight();
|
||||
int targetHeight = Math.max(1, Math.round((float) targetWidth * sourceHeight / sourceWidth));
|
||||
BufferedImage scaled = new BufferedImage(targetWidth, targetHeight, BufferedImage.TYPE_INT_RGB);
|
||||
Graphics2D g = scaled.createGraphics();
|
||||
g.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BILINEAR);
|
||||
g.drawImage(source, 0, 0, targetWidth, targetHeight, null);
|
||||
g.dispose();
|
||||
return scaled;
|
||||
}
|
||||
|
||||
private byte[] encodeJpeg(BufferedImage image, float quality) throws IOException {
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
ImageWriter writer = ImageIO.getImageWritersByFormatName("jpg").next();
|
||||
ImageWriteParam param = writer.getDefaultWriteParam();
|
||||
param.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
|
||||
param.setCompressionQuality(quality);
|
||||
try (ImageOutputStream out = ImageIO.createImageOutputStream(bos)) {
|
||||
writer.setOutput(out);
|
||||
writer.write(null, new IIOImage(image, null, null), param);
|
||||
} finally {
|
||||
writer.dispose();
|
||||
}
|
||||
return bos.toByteArray();
|
||||
}
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
package org.raddatz.familienarchiv.service;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.raddatz.familienarchiv.repository.CompletionStatsRow;
|
||||
import org.raddatz.familienarchiv.repository.TranscriptionBlockRepository;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class TranscriptionBlockQueryService {
|
||||
|
||||
private final TranscriptionBlockRepository blockRepository;
|
||||
|
||||
public Map<UUID, Integer> getCompletionStats(List<UUID> documentIds) {
|
||||
if (documentIds.isEmpty()) return Map.of();
|
||||
Map<UUID, Integer> result = new HashMap<>();
|
||||
for (CompletionStatsRow row : blockRepository.findCompletionStatsForDocuments(documentIds)) {
|
||||
result.put(row.getDocumentId(), row.getCompletionPercentage());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -205,18 +205,6 @@ public class TranscriptionService {
|
||||
return saved;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public List<TranscriptionBlock> markAllBlocksReviewed(UUID documentId, UUID userId) {
|
||||
List<TranscriptionBlock> blocks = blockRepository.findByDocumentIdOrderBySortOrderAsc(documentId);
|
||||
for (TranscriptionBlock block : blocks) {
|
||||
if (!block.isReviewed()) {
|
||||
block.setReviewed(true);
|
||||
auditService.logAfterCommit(AuditKind.BLOCK_REVIEWED, userId, documentId, null);
|
||||
}
|
||||
}
|
||||
return blockRepository.saveAll(blocks);
|
||||
}
|
||||
|
||||
public List<TranscriptionBlockVersion> getBlockHistory(UUID documentId, UUID blockId) {
|
||||
getBlock(documentId, blockId);
|
||||
return versionRepository.findByBlockIdOrderByChangedAtDesc(blockId);
|
||||
|
||||
@@ -3,8 +3,6 @@ package org.raddatz.familienarchiv.service;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import org.raddatz.familienarchiv.audit.AuditKind;
|
||||
import org.raddatz.familienarchiv.audit.AuditService;
|
||||
import org.raddatz.familienarchiv.dto.AdminUpdateUserRequest;
|
||||
import org.raddatz.familienarchiv.dto.ChangePasswordDTO;
|
||||
import org.raddatz.familienarchiv.dto.CreateUserRequest;
|
||||
@@ -23,13 +21,10 @@ import org.springframework.transaction.annotation.Transactional;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import static java.util.stream.Collectors.toSet;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@Slf4j
|
||||
@@ -38,10 +33,9 @@ public class UserService {
|
||||
private final AppUserRepository userRepository;
|
||||
private final UserGroupRepository groupRepository;
|
||||
private final PasswordEncoder passwordEncoder;
|
||||
private final AuditService auditService;
|
||||
|
||||
@Transactional
|
||||
public AppUser createUserOrUpdate(UUID actorId, CreateUserRequest request) {
|
||||
public AppUser createUserOrUpdate(CreateUserRequest request) {
|
||||
log.info("Creating or updating user: {}", request.getEmail());
|
||||
|
||||
Set<UserGroup> groups = new HashSet<>();
|
||||
@@ -51,12 +45,10 @@ public class UserService {
|
||||
|
||||
Optional<AppUser> existingUser = userRepository.findByEmail(request.getEmail());
|
||||
AppUser user;
|
||||
boolean isNew;
|
||||
|
||||
if (existingUser.isPresent()) {
|
||||
log.info("User exists, updating: {}", request.getEmail());
|
||||
user = existingUser.get().updateFromRequest(request, passwordEncoder, groups);
|
||||
isNew = false;
|
||||
} else {
|
||||
log.info("Creating new user: {}", request.getEmail());
|
||||
user = AppUser.builder()
|
||||
@@ -69,42 +61,8 @@ public class UserService {
|
||||
.contact(request.getContact())
|
||||
.enabled(true)
|
||||
.build();
|
||||
isNew = true;
|
||||
}
|
||||
|
||||
AppUser saved = userRepository.save(user);
|
||||
if (isNew) {
|
||||
auditService.logAfterCommit(AuditKind.USER_CREATED, actorId, null,
|
||||
Map.of("userId", saved.getId().toString(), "email", saved.getEmail()));
|
||||
}
|
||||
return saved;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public AppUser createUserForBootstrap(CreateUserRequest request) {
|
||||
log.info("Bootstrap user creation (no audit): {}", request.getEmail());
|
||||
|
||||
Set<UserGroup> groups = new HashSet<>();
|
||||
if (request.getGroupIds() != null && !request.getGroupIds().isEmpty()) {
|
||||
groups.addAll(groupRepository.findAllById(request.getGroupIds()));
|
||||
}
|
||||
|
||||
Optional<AppUser> existingUser = userRepository.findByEmail(request.getEmail());
|
||||
if (existingUser.isPresent()) {
|
||||
AppUser updated = existingUser.get().updateFromRequest(request, passwordEncoder, groups);
|
||||
return userRepository.save(updated);
|
||||
}
|
||||
|
||||
AppUser user = AppUser.builder()
|
||||
.email(request.getEmail())
|
||||
.password(passwordEncoder.encode(request.getInitialPassword()))
|
||||
.groups(groups)
|
||||
.firstName(request.getFirstName())
|
||||
.lastName(request.getLastName())
|
||||
.birthDate(request.getBirthDate())
|
||||
.contact(request.getContact())
|
||||
.enabled(true)
|
||||
.build();
|
||||
return userRepository.save(user);
|
||||
}
|
||||
|
||||
@@ -136,13 +94,10 @@ public class UserService {
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void deleteUser(UUID actorId, UUID userId) {
|
||||
public void deleteUser(UUID userId) {
|
||||
AppUser user = userRepository.findById(userId)
|
||||
.orElseThrow(() -> DomainException.notFound(ErrorCode.USER_NOT_FOUND, "No user found for id: " + userId));
|
||||
String email = user.getEmail();
|
||||
userRepository.delete(user);
|
||||
auditService.logAfterCommit(AuditKind.USER_DELETED, actorId, null,
|
||||
Map.of("userId", userId.toString(), "email", email));
|
||||
}
|
||||
|
||||
public AppUser getById(UUID id) {
|
||||
@@ -186,7 +141,7 @@ public class UserService {
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public AppUser adminUpdateUser(UUID actorId, UUID id, AdminUpdateUserRequest dto) {
|
||||
public AppUser adminUpdateUser(UUID id, AdminUpdateUserRequest dto) {
|
||||
AppUser user = getById(id);
|
||||
|
||||
if (dto.getEmail() != null && !dto.getEmail().isBlank()) {
|
||||
@@ -211,27 +166,13 @@ public class UserService {
|
||||
}
|
||||
|
||||
if (dto.getGroupIds() != null) {
|
||||
Set<UserGroup> before = new HashSet<>(user.getGroups());
|
||||
Set<UserGroup> after = new HashSet<>(groupRepository.findAllById(dto.getGroupIds()));
|
||||
user.setGroups(after);
|
||||
groupChangePayload(before, after, id, user.getEmail())
|
||||
.ifPresent(payload -> auditService.logAfterCommit(AuditKind.GROUP_MEMBERSHIP_CHANGED, actorId, null, payload));
|
||||
Set<UserGroup> groups = new HashSet<>(groupRepository.findAllById(dto.getGroupIds()));
|
||||
user.setGroups(groups);
|
||||
}
|
||||
|
||||
return userRepository.save(user);
|
||||
}
|
||||
|
||||
private Optional<Map<String, Object>> groupChangePayload(
|
||||
Set<UserGroup> before, Set<UserGroup> after, UUID userId, String email) {
|
||||
Set<UUID> beforeIds = before.stream().map(UserGroup::getId).collect(toSet());
|
||||
Set<UUID> afterIds = after.stream().map(UserGroup::getId).collect(toSet());
|
||||
if (beforeIds.equals(afterIds)) return Optional.empty();
|
||||
List<String> added = after.stream().filter(g -> !beforeIds.contains(g.getId())).map(UserGroup::getName).toList();
|
||||
List<String> removed = before.stream().filter(g -> !afterIds.contains(g.getId())).map(UserGroup::getName).toList();
|
||||
return Optional.of(Map.of("userId", userId.toString(), "email", email,
|
||||
"addedGroups", added, "removedGroups", removed));
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void changePassword(UUID userId, ChangePasswordDTO dto) {
|
||||
AppUser user = getById(userId);
|
||||
|
||||
@@ -23,8 +23,7 @@ spring:
|
||||
servlet:
|
||||
multipart:
|
||||
max-file-size: 50MB
|
||||
max-request-size: 500MB # supports 10-file chunk at max per-file size; see #317
|
||||
file-size-threshold: 2KB
|
||||
max-request-size: 50MB
|
||||
|
||||
mail:
|
||||
host: ${MAIL_HOST:}
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
CREATE INDEX IF NOT EXISTS idx_transcription_blocks_document_reviewed
|
||||
ON transcription_blocks (document_id, reviewed);
|
||||
@@ -1,7 +0,0 @@
|
||||
-- Partial covering index for the session-style activity feed rollup (#285).
|
||||
-- Matches the WHERE clause of AuditLogQueryRepository.findRolledUpActivityFeed
|
||||
-- exactly. DESC on happened_at supports the outer ORDER BY without a sort step.
|
||||
CREATE INDEX idx_audit_log_rollup
|
||||
ON audit_log (actor_id, document_id, kind, happened_at DESC)
|
||||
WHERE kind IN ('TEXT_SAVED','FILE_UPLOADED','ANNOTATION_CREATED',
|
||||
'BLOCK_REVIEWED','COMMENT_ADDED','MENTION_CREATED');
|
||||
@@ -1,39 +0,0 @@
|
||||
-- Backfill COMMENT_ADDED and MENTION_CREATED audit events for comments
|
||||
-- created before audit logging was added in commit 428c63a2.
|
||||
-- Without these rows the Chronik activity feed (which reads exclusively from
|
||||
-- audit_log) cannot surface pre-existing comments in "Für dich" or "Alle".
|
||||
|
||||
INSERT INTO audit_log (id, happened_at, actor_id, kind, document_id, payload)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
c.created_at,
|
||||
c.author_id,
|
||||
'COMMENT_ADDED',
|
||||
c.document_id,
|
||||
jsonb_build_object('commentId', c.id::text)
|
||||
FROM document_comments c
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM audit_log a
|
||||
WHERE a.kind = 'COMMENT_ADDED'
|
||||
AND a.payload->>'commentId' = c.id::text
|
||||
);
|
||||
|
||||
INSERT INTO audit_log (id, happened_at, actor_id, kind, document_id, payload)
|
||||
SELECT
|
||||
gen_random_uuid(),
|
||||
c.created_at,
|
||||
c.author_id,
|
||||
'MENTION_CREATED',
|
||||
c.document_id,
|
||||
jsonb_build_object(
|
||||
'commentId', c.id::text,
|
||||
'mentionedUserId', m.user_id::text
|
||||
)
|
||||
FROM comment_mentions m
|
||||
JOIN document_comments c ON c.id = m.comment_id
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM audit_log a
|
||||
WHERE a.kind = 'MENTION_CREATED'
|
||||
AND a.payload->>'commentId' = c.id::text
|
||||
AND a.payload->>'mentionedUserId' = m.user_id::text
|
||||
);
|
||||
@@ -1,24 +0,0 @@
|
||||
-- Backfill annotation_id on block comments and their notifications.
|
||||
--
|
||||
-- Before the upstream fix, CommentService.postBlockComment did not set
|
||||
-- DocumentComment.annotationId, so block comments were stored with
|
||||
-- annotation_id = NULL and every notification built from them inherited
|
||||
-- that NULL (see NotificationService.notifyMentions/notifyReply).
|
||||
--
|
||||
-- The frontend deep-link flow needs annotationId in the URL query string
|
||||
-- to open the correct annotation panel and scroll to the comment.
|
||||
-- Without this backfill, previously issued notifications would still
|
||||
-- carry annotation_id = NULL even after the code fix lands.
|
||||
|
||||
UPDATE document_comments dc
|
||||
SET annotation_id = tb.annotation_id
|
||||
FROM transcription_blocks tb
|
||||
WHERE dc.block_id = tb.id
|
||||
AND dc.annotation_id IS NULL;
|
||||
|
||||
UPDATE notifications n
|
||||
SET annotation_id = dc.annotation_id
|
||||
FROM document_comments dc
|
||||
WHERE n.reference_id = dc.id
|
||||
AND n.annotation_id IS NULL
|
||||
AND dc.annotation_id IS NOT NULL;
|
||||
@@ -1,3 +0,0 @@
|
||||
ALTER TABLE documents
|
||||
ADD COLUMN thumbnail_key VARCHAR(255),
|
||||
ADD COLUMN thumbnail_generated_at TIMESTAMP;
|
||||
@@ -1,8 +0,0 @@
|
||||
-- Adds two nullable metadata columns populated by ThumbnailService when it
|
||||
-- generates the JPEG preview: thumbnail_aspect (PORTRAIT | LANDSCAPE, from the
|
||||
-- source image w/h ratio with threshold 1.1) and page_count (from PDDocument
|
||||
-- for PDFs, 1 for image uploads). Both are null until the existing admin
|
||||
-- backfill endpoint (/api/admin/generate-thumbnails) reruns the service.
|
||||
ALTER TABLE documents
|
||||
ADD COLUMN thumbnail_aspect VARCHAR(16),
|
||||
ADD COLUMN page_count INTEGER;
|
||||
@@ -1,74 +0,0 @@
|
||||
package org.raddatz.familienarchiv.audit;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import org.raddatz.familienarchiv.model.AppUser;
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyCollection;
|
||||
import static org.mockito.ArgumentMatchers.argThat;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class AuditLogQueryServiceTest {
|
||||
|
||||
@Mock AuditLogQueryRepository queryRepository;
|
||||
@InjectMocks AuditLogQueryService auditLogQueryService;
|
||||
|
||||
@Test
|
||||
void findActivityFeed_withKinds_passesKindNamesToRepository() {
|
||||
UUID userId = UUID.randomUUID();
|
||||
Set<AuditKind> kinds = Set.of(AuditKind.FILE_UPLOADED);
|
||||
when(queryRepository.findRolledUpActivityFeed(eq(userId.toString()), eq(10), anyCollection()))
|
||||
.thenReturn(List.of());
|
||||
|
||||
List<ActivityFeedRow> result = auditLogQueryService.findActivityFeed(userId, 10, kinds);
|
||||
|
||||
assertThat(result).isEmpty();
|
||||
verify(queryRepository).findRolledUpActivityFeed(eq(userId.toString()), eq(10),
|
||||
eq(List.of("FILE_UPLOADED")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void findActivityFeed_twoArg_defaultsToAllRollupEligibleKinds() {
|
||||
UUID userId = UUID.randomUUID();
|
||||
when(queryRepository.findRolledUpActivityFeed(eq(userId.toString()), eq(10), anyCollection()))
|
||||
.thenReturn(List.of());
|
||||
|
||||
auditLogQueryService.findActivityFeed(userId, 10);
|
||||
|
||||
verify(queryRepository).findRolledUpActivityFeed(eq(userId.toString()), eq(10),
|
||||
eq(AuditKind.ROLLUP_ELIGIBLE.stream().map(Enum::name).toList()));
|
||||
}
|
||||
|
||||
@Test
|
||||
void findRecentUserManagementEvents_delegatesToRepositoryWithAllThreeKinds() {
|
||||
AuditLog entry = AuditLog.builder().id(UUID.randomUUID()).kind(AuditKind.USER_CREATED).build();
|
||||
when(queryRepository.findByKindIn(anyCollection(), any(Pageable.class)))
|
||||
.thenReturn(new PageImpl<>(List.of(entry)));
|
||||
|
||||
List<AuditLog> result = auditLogQueryService.findRecentUserManagementEvents(5);
|
||||
|
||||
assertThat(result).containsExactly(entry);
|
||||
verify(queryRepository).findByKindIn(
|
||||
argThat((Collection<AuditKind> kinds) ->
|
||||
kinds.contains(AuditKind.USER_CREATED) &&
|
||||
kinds.contains(AuditKind.USER_DELETED) &&
|
||||
kinds.contains(AuditKind.GROUP_MEMBERSHIP_CHANGED)),
|
||||
any(Pageable.class));
|
||||
}
|
||||
}
|
||||
@@ -1,122 +0,0 @@
|
||||
package org.raddatz.familienarchiv.audit;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.raddatz.familienarchiv.PostgresContainerConfig;
|
||||
import org.raddatz.familienarchiv.dto.AdminUpdateUserRequest;
|
||||
import org.raddatz.familienarchiv.dto.CreateUserRequest;
|
||||
import org.raddatz.familienarchiv.dto.GroupDTO;
|
||||
import org.raddatz.familienarchiv.model.AppUser;
|
||||
import org.raddatz.familienarchiv.model.UserGroup;
|
||||
import org.raddatz.familienarchiv.repository.AppUserRepository;
|
||||
import org.raddatz.familienarchiv.service.UserService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.test.context.ActiveProfiles;
|
||||
import org.springframework.test.context.bean.override.mockito.MockitoBean;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
import software.amazon.awssdk.services.s3.S3Client;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import static java.util.concurrent.TimeUnit.SECONDS;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.awaitility.Awaitility.await;
|
||||
|
||||
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE)
|
||||
@ActiveProfiles("test")
|
||||
@Import(PostgresContainerConfig.class)
|
||||
class UserManagementAuditIntegrationTest {
|
||||
|
||||
@MockitoBean S3Client s3Client;
|
||||
@Autowired UserService userService;
|
||||
@Autowired AppUserRepository userRepository;
|
||||
@Autowired AuditLogRepository auditLogRepository;
|
||||
@Autowired AuditLogQueryService auditLogQueryService;
|
||||
@Autowired TransactionTemplate transactionTemplate;
|
||||
|
||||
@BeforeEach
|
||||
void clearAuditLog() {
|
||||
transactionTemplate.execute(status -> { auditLogRepository.deleteAll(); return null; });
|
||||
}
|
||||
|
||||
@Test
|
||||
void createAndDeleteUser_producesOrderedAuditEntries() {
|
||||
// Bootstrap actor with no audit event — clean slate guaranteed by @BeforeEach
|
||||
CreateUserRequest adminReq = new CreateUserRequest();
|
||||
adminReq.setEmail("admin@test.example.com");
|
||||
adminReq.setInitialPassword("admin-secret");
|
||||
AppUser actor = transactionTemplate.execute(status -> userService.createUserForBootstrap(adminReq));
|
||||
UUID actorId = actor.getId();
|
||||
|
||||
// Create the target user — should emit USER_CREATED
|
||||
CreateUserRequest req = new CreateUserRequest();
|
||||
req.setEmail("audit-test@example.com");
|
||||
req.setInitialPassword("secret");
|
||||
transactionTemplate.execute(status -> {
|
||||
userService.createUserOrUpdate(actorId, req);
|
||||
return null;
|
||||
});
|
||||
await().atMost(10, SECONDS).until(() -> auditLogRepository.existsByKind(AuditKind.USER_CREATED));
|
||||
|
||||
// Delete the target user — should emit USER_DELETED
|
||||
AppUser created = userRepository.findByEmail("audit-test@example.com").orElseThrow();
|
||||
transactionTemplate.execute(status -> {
|
||||
userService.deleteUser(actorId, created.getId());
|
||||
return null;
|
||||
});
|
||||
await().atMost(10, SECONDS).until(() -> auditLogRepository.existsByKind(AuditKind.USER_DELETED));
|
||||
|
||||
List<AuditLog> events = auditLogQueryService.findRecentUserManagementEvents(10);
|
||||
assertThat(events).hasSize(2);
|
||||
assertThat(events.get(0).getKind()).isEqualTo(AuditKind.USER_DELETED);
|
||||
assertThat(events.get(1).getKind()).isEqualTo(AuditKind.USER_CREATED);
|
||||
}
|
||||
|
||||
@Test
|
||||
void updateUserGroups_producesGroupMembershipChangedEvent() {
|
||||
GroupDTO groupADto = new GroupDTO(); groupADto.setName("Viewers"); groupADto.setPermissions(Set.of("READ_ALL"));
|
||||
GroupDTO groupBDto = new GroupDTO(); groupBDto.setName("Editors"); groupBDto.setPermissions(Set.of("WRITE_ALL"));
|
||||
UserGroup gA = transactionTemplate.execute(status -> userService.createGroup(groupADto));
|
||||
UserGroup gB = transactionTemplate.execute(status -> userService.createGroup(groupBDto));
|
||||
|
||||
// Bootstrap actor with no audit event — clean slate guaranteed by @BeforeEach
|
||||
CreateUserRequest actorReq = new CreateUserRequest();
|
||||
actorReq.setEmail("actor-group-test@test.example.com");
|
||||
actorReq.setInitialPassword("secret");
|
||||
AppUser actor = transactionTemplate.execute(status -> userService.createUserForBootstrap(actorReq));
|
||||
|
||||
// Create target user pre-assigned to gA — emits USER_CREATED
|
||||
CreateUserRequest targetReq = new CreateUserRequest();
|
||||
targetReq.setEmail("target-group-test@test.example.com");
|
||||
targetReq.setInitialPassword("secret");
|
||||
targetReq.setGroupIds(List.of(gA.getId()));
|
||||
transactionTemplate.execute(status -> userService.createUserOrUpdate(actor.getId(), targetReq));
|
||||
await().atMost(10, SECONDS).until(() -> auditLogRepository.existsByKind(AuditKind.USER_CREATED));
|
||||
transactionTemplate.execute(status -> { auditLogRepository.deleteAll(); return null; });
|
||||
|
||||
AppUser target = userRepository.findByEmail("target-group-test@test.example.com").orElseThrow();
|
||||
|
||||
// Change groups: Viewers → Editors
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
dto.setGroupIds(List.of(gB.getId()));
|
||||
transactionTemplate.execute(status -> userService.adminUpdateUser(actor.getId(), target.getId(), dto));
|
||||
|
||||
await().atMost(10, SECONDS).until(() -> auditLogRepository.existsByKind(AuditKind.GROUP_MEMBERSHIP_CHANGED));
|
||||
|
||||
List<AuditLog> events = auditLogQueryService.findRecentUserManagementEvents(10);
|
||||
assertThat(events).hasSize(1);
|
||||
AuditLog event = events.get(0);
|
||||
assertThat(event.getKind()).isEqualTo(AuditKind.GROUP_MEMBERSHIP_CHANGED);
|
||||
assertThat(event.getPayload()).containsEntry("email", "target-group-test@test.example.com");
|
||||
@SuppressWarnings("unchecked")
|
||||
List<String> added = (List<String>) event.getPayload().get("addedGroups");
|
||||
@SuppressWarnings("unchecked")
|
||||
List<String> removed = (List<String>) event.getPayload().get("removedGroups");
|
||||
assertThat(added).containsExactlyInAnyOrder("Editors");
|
||||
assertThat(removed).containsExactlyInAnyOrder("Viewers");
|
||||
}
|
||||
}
|
||||
@@ -8,7 +8,6 @@ import org.raddatz.familienarchiv.service.CustomUserDetailsService;
|
||||
import org.raddatz.familienarchiv.service.DocumentService;
|
||||
import org.raddatz.familienarchiv.service.DocumentVersionService;
|
||||
import org.raddatz.familienarchiv.service.MassImportService;
|
||||
import org.raddatz.familienarchiv.service.ThumbnailBackfillService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.autoconfigure.aop.AopAutoConfiguration;
|
||||
import org.springframework.boot.webmvc.test.autoconfigure.WebMvcTest;
|
||||
@@ -17,13 +16,10 @@ import org.springframework.security.test.context.support.WithMockUser;
|
||||
import org.springframework.test.context.bean.override.mockito.MockitoBean;
|
||||
import org.springframework.test.web.servlet.MockMvc;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
|
||||
import static org.mockito.ArgumentMatchers.anyList;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
|
||||
@@ -37,7 +33,6 @@ class AdminControllerTest {
|
||||
@MockitoBean MassImportService massImportService;
|
||||
@MockitoBean DocumentService documentService;
|
||||
@MockitoBean DocumentVersionService documentVersionService;
|
||||
@MockitoBean ThumbnailBackfillService thumbnailBackfillService;
|
||||
@MockitoBean CustomUserDetailsService customUserDetailsService;
|
||||
|
||||
@Test
|
||||
@@ -88,57 +83,4 @@ class AdminControllerTest {
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.count").value(3));
|
||||
}
|
||||
|
||||
// ─── POST /api/admin/generate-thumbnails ───────────────────────────────────
|
||||
|
||||
@Test
|
||||
void generateThumbnails_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(post("/api/admin/generate-thumbnails"))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(roles = "USER")
|
||||
void generateThumbnails_returns403_whenNotAdmin() throws Exception {
|
||||
mockMvc.perform(post("/api/admin/generate-thumbnails"))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "ADMIN")
|
||||
void generateThumbnails_returns202_withStatus_whenAdmin() throws Exception {
|
||||
ThumbnailBackfillService.BackfillStatus status = new ThumbnailBackfillService.BackfillStatus(
|
||||
ThumbnailBackfillService.State.RUNNING, "running…", 10, 0, 0, 0, LocalDateTime.now());
|
||||
when(thumbnailBackfillService.getStatus()).thenReturn(status);
|
||||
|
||||
mockMvc.perform(post("/api/admin/generate-thumbnails"))
|
||||
.andExpect(status().isAccepted())
|
||||
.andExpect(jsonPath("$.state").value("RUNNING"))
|
||||
.andExpect(jsonPath("$.total").value(10));
|
||||
|
||||
verify(thumbnailBackfillService).runBackfillAsync();
|
||||
}
|
||||
|
||||
// ─── GET /api/admin/thumbnail-status ───────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void thumbnailStatus_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(get("/api/admin/thumbnail-status"))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "ADMIN")
|
||||
void thumbnailStatus_returns200_withCurrentStatus_whenAdmin() throws Exception {
|
||||
ThumbnailBackfillService.BackfillStatus status = new ThumbnailBackfillService.BackfillStatus(
|
||||
ThumbnailBackfillService.State.DONE, "Fertig: 5 erzeugt, 0 übersprungen, 0 fehlgeschlagen.",
|
||||
5, 5, 0, 0, LocalDateTime.now());
|
||||
when(thumbnailBackfillService.getStatus()).thenReturn(status);
|
||||
|
||||
mockMvc.perform(get("/api/admin/thumbnail-status"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.state").value("DONE"))
|
||||
.andExpect(jsonPath("$.processed").value(5))
|
||||
.andExpect(jsonPath("$.total").value(5));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -154,13 +154,6 @@ class AnnotationControllerTest {
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "READ_ALL")
|
||||
void deleteAnnotation_returns403_whenUserHasOnlyReadAllPermission() throws Exception {
|
||||
mockMvc.perform(delete("/api/documents/" + UUID.randomUUID() + "/annotations/" + UUID.randomUUID()))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "ANNOTATE_ALL")
|
||||
void deleteAnnotation_returns204_whenHasAnnotatePermission() throws Exception {
|
||||
|
||||
@@ -40,8 +40,246 @@ class CommentControllerTest {
|
||||
|
||||
private static final String COMMENT_JSON = "{\"content\":\"Test comment\"}";
|
||||
private static final UUID DOC_ID = UUID.randomUUID();
|
||||
private static final UUID ANN_ID = UUID.randomUUID();
|
||||
private static final UUID COMMENT_ID = UUID.randomUUID();
|
||||
|
||||
// ─── GET /api/documents/{documentId}/comments ─────────────────────────────
|
||||
|
||||
@Test
|
||||
void getDocumentComments_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(get("/api/documents/" + DOC_ID + "/comments"))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void getDocumentComments_returns200_whenAuthenticated() throws Exception {
|
||||
when(commentService.getCommentsForDocument(any())).thenReturn(List.of());
|
||||
mockMvc.perform(get("/api/documents/" + DOC_ID + "/comments"))
|
||||
.andExpect(status().isOk());
|
||||
}
|
||||
|
||||
// ─── POST /api/documents/{documentId}/comments ────────────────────────────
|
||||
|
||||
@Test
|
||||
void postDocumentComment_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/comments")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void postDocumentComment_returns403_whenMissingPermission() throws Exception {
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/comments")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "ANNOTATE_ALL")
|
||||
void postDocumentComment_returns201_whenHasPermission() throws Exception {
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(COMMENT_ID).documentId(DOC_ID).authorName("Hans").content("Test comment").build();
|
||||
when(commentService.postComment(any(), any(), any(), any(), any())).thenReturn(saved);
|
||||
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/comments")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isCreated())
|
||||
.andExpect(jsonPath("$.content").value("Test comment"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void postDocumentComment_returns201_whenHasWriteAllPermission() throws Exception {
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(COMMENT_ID).documentId(DOC_ID).authorName("Hans").content("Test comment").build();
|
||||
when(commentService.postComment(any(), any(), any(), any(), any())).thenReturn(saved);
|
||||
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/comments")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isCreated());
|
||||
}
|
||||
|
||||
// ─── POST /api/documents/{documentId}/comments/{commentId}/replies ────────
|
||||
|
||||
@Test
|
||||
void replyToComment_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/comments/" + COMMENT_ID + "/replies")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "ANNOTATE_ALL")
|
||||
void replyToComment_returns201_whenHasPermission() throws Exception {
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(DOC_ID).parentId(COMMENT_ID)
|
||||
.authorName("Anna").content("Test comment").build();
|
||||
when(commentService.replyToComment(any(), any(), any(), any(), any())).thenReturn(saved);
|
||||
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/comments/" + COMMENT_ID + "/replies")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isCreated());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void replyToComment_returns201_whenHasWriteAllPermission() throws Exception {
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(DOC_ID).parentId(COMMENT_ID)
|
||||
.authorName("Anna").content("Test comment").build();
|
||||
when(commentService.replyToComment(any(), any(), any(), any(), any())).thenReturn(saved);
|
||||
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/comments/" + COMMENT_ID + "/replies")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isCreated());
|
||||
}
|
||||
|
||||
// ─── PATCH /api/documents/{documentId}/comments/{commentId} ──────────────
|
||||
|
||||
@Test
|
||||
void editComment_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(patch("/api/documents/" + DOC_ID + "/comments/" + COMMENT_ID)
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "ANNOTATE_ALL")
|
||||
void editComment_returns200_whenHasPermission() throws Exception {
|
||||
DocumentComment updated = DocumentComment.builder()
|
||||
.id(COMMENT_ID).documentId(DOC_ID).authorName("Hans").content("Test comment").build();
|
||||
when(commentService.editComment(any(), any(), any(), any())).thenReturn(updated);
|
||||
|
||||
mockMvc.perform(patch("/api/documents/" + DOC_ID + "/comments/" + COMMENT_ID)
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isOk());
|
||||
}
|
||||
|
||||
// ─── DELETE /api/documents/{documentId}/comments/{commentId} ─────────────
|
||||
|
||||
@Test
|
||||
void deleteComment_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(delete("/api/documents/" + DOC_ID + "/comments/" + COMMENT_ID))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void deleteComment_returns204_whenAuthenticated() throws Exception {
|
||||
mockMvc.perform(delete("/api/documents/" + DOC_ID + "/comments/" + COMMENT_ID))
|
||||
.andExpect(status().isNoContent());
|
||||
}
|
||||
|
||||
// ─── GET /api/documents/{documentId}/annotations/{annId}/comments ─────────
|
||||
|
||||
@Test
|
||||
void getAnnotationComments_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(get("/api/documents/" + DOC_ID + "/annotations/" + ANN_ID + "/comments"))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void getAnnotationComments_returns200_whenAuthenticated() throws Exception {
|
||||
when(commentService.getCommentsForAnnotation(any())).thenReturn(List.of());
|
||||
mockMvc.perform(get("/api/documents/" + DOC_ID + "/annotations/" + ANN_ID + "/comments"))
|
||||
.andExpect(status().isOk());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void editComment_returns200_whenHasWriteAllPermission() throws Exception {
|
||||
DocumentComment updated = DocumentComment.builder()
|
||||
.id(COMMENT_ID).documentId(DOC_ID).authorName("Hans").content("Test comment").build();
|
||||
when(commentService.editComment(any(), any(), any(), any())).thenReturn(updated);
|
||||
|
||||
mockMvc.perform(patch("/api/documents/" + DOC_ID + "/comments/" + COMMENT_ID)
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isOk());
|
||||
}
|
||||
|
||||
// ─── POST /api/documents/{documentId}/annotations/{annId}/comments ────────
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void postAnnotationComment_returns403_whenMissingPermission() throws Exception {
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/annotations/" + ANN_ID + "/comments")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "ANNOTATE_ALL")
|
||||
void postAnnotationComment_returns201_whenHasPermission() throws Exception {
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(DOC_ID).annotationId(ANN_ID)
|
||||
.authorName("Hans").content("Test comment").build();
|
||||
when(commentService.postComment(any(), any(), any(), any(), any())).thenReturn(saved);
|
||||
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/annotations/" + ANN_ID + "/comments")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isCreated());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void postAnnotationComment_returns201_whenHasWriteAllPermission() throws Exception {
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(DOC_ID).annotationId(ANN_ID)
|
||||
.authorName("Hans").content("Test comment").build();
|
||||
when(commentService.postComment(any(), any(), any(), any(), any())).thenReturn(saved);
|
||||
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/annotations/" + ANN_ID + "/comments")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isCreated());
|
||||
}
|
||||
|
||||
// ─── POST /api/documents/{documentId}/annotations/{annId}/comments/{commentId}/replies ─
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "ANNOTATE_ALL")
|
||||
void replyToAnnotationComment_returns201_whenHasPermission() throws Exception {
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(DOC_ID).annotationId(ANN_ID)
|
||||
.parentId(COMMENT_ID).authorName("Anna").content("Test comment").build();
|
||||
when(commentService.replyToComment(any(), any(), any(), any(), any())).thenReturn(saved);
|
||||
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/annotations/" + ANN_ID + "/comments/" + COMMENT_ID + "/replies")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isCreated());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void replyToAnnotationComment_returns201_whenHasWriteAllPermission() throws Exception {
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(DOC_ID).annotationId(ANN_ID)
|
||||
.parentId(COMMENT_ID).authorName("Anna").content("Test comment").build();
|
||||
when(commentService.replyToComment(any(), any(), any(), any(), any())).thenReturn(saved);
|
||||
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/annotations/" + ANN_ID + "/comments/" + COMMENT_ID + "/replies")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isCreated());
|
||||
}
|
||||
|
||||
// ─── resolveUser — exception branch ──────────────────────────────────────
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void postDocumentComment_stillSucceeds_whenUserServiceThrows() throws Exception {
|
||||
// findByEmail throws → catch block in resolveUser → author null, saves anyway
|
||||
when(userService.findByEmail(any())).thenThrow(new RuntimeException("DB error"));
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(DOC_ID).content("Test comment").build();
|
||||
when(commentService.postComment(any(), any(), any(), any(), any())).thenReturn(saved);
|
||||
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/comments")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isCreated());
|
||||
}
|
||||
|
||||
// ─── Block comment endpoints ─────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
@@ -67,138 +305,4 @@ class CommentControllerTest {
|
||||
.andExpect(status().isCreated())
|
||||
.andExpect(jsonPath("$.blockId").value(blockId.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
void postBlockComment_returns401_whenUnauthenticated() throws Exception {
|
||||
UUID blockId = UUID.randomUUID();
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/transcription-blocks/" + blockId + "/comments")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void postBlockComment_returns403_whenMissingPermission() throws Exception {
|
||||
UUID blockId = UUID.randomUUID();
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/transcription-blocks/" + blockId + "/comments")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "ANNOTATE_ALL")
|
||||
void postBlockComment_returns201_whenHasAnnotatePermission() throws Exception {
|
||||
UUID blockId = UUID.randomUUID();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(DOC_ID).blockId(blockId).content("Nice").build();
|
||||
when(commentService.postBlockComment(any(), any(), any(), any(), any())).thenReturn(saved);
|
||||
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/transcription-blocks/" + blockId + "/comments")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isCreated());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void postBlockComment_stillSucceeds_whenUserServiceThrows() throws Exception {
|
||||
// findByEmail throws → catch block in resolveUser → author null, saves anyway
|
||||
UUID blockId = UUID.randomUUID();
|
||||
when(userService.findByEmail(any())).thenThrow(new RuntimeException("DB error"));
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(DOC_ID).blockId(blockId).content("Test comment").build();
|
||||
when(commentService.postBlockComment(any(), any(), any(), any(), any())).thenReturn(saved);
|
||||
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/transcription-blocks/" + blockId + "/comments")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isCreated());
|
||||
}
|
||||
|
||||
// ─── Block reply endpoints ───────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void replyToBlockComment_returns401_whenUnauthenticated() throws Exception {
|
||||
UUID blockId = UUID.randomUUID();
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/transcription-blocks/" + blockId
|
||||
+ "/comments/" + COMMENT_ID + "/replies")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "ANNOTATE_ALL")
|
||||
void replyToBlockComment_returns201_whenHasPermission() throws Exception {
|
||||
UUID blockId = UUID.randomUUID();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(DOC_ID).blockId(blockId).parentId(COMMENT_ID)
|
||||
.authorName("Anna").content("Reply").build();
|
||||
when(commentService.replyToComment(any(), any(), any(), any(), any())).thenReturn(saved);
|
||||
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/transcription-blocks/" + blockId
|
||||
+ "/comments/" + COMMENT_ID + "/replies")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isCreated());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void replyToBlockComment_returns201_whenHasWriteAllPermission() throws Exception {
|
||||
UUID blockId = UUID.randomUUID();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(DOC_ID).blockId(blockId).parentId(COMMENT_ID)
|
||||
.authorName("Anna").content("Reply").build();
|
||||
when(commentService.replyToComment(any(), any(), any(), any(), any())).thenReturn(saved);
|
||||
|
||||
mockMvc.perform(post("/api/documents/" + DOC_ID + "/transcription-blocks/" + blockId
|
||||
+ "/comments/" + COMMENT_ID + "/replies")
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isCreated());
|
||||
}
|
||||
|
||||
// ─── PATCH /api/documents/{documentId}/comments/{commentId} (shared edit) ──
|
||||
|
||||
@Test
|
||||
void editComment_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(patch("/api/documents/" + DOC_ID + "/comments/" + COMMENT_ID)
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "ANNOTATE_ALL")
|
||||
void editComment_returns200_whenHasPermission() throws Exception {
|
||||
DocumentComment updated = DocumentComment.builder()
|
||||
.id(COMMENT_ID).documentId(DOC_ID).authorName("Hans").content("Test comment").build();
|
||||
when(commentService.editComment(any(), any(), any(), any())).thenReturn(updated);
|
||||
|
||||
mockMvc.perform(patch("/api/documents/" + DOC_ID + "/comments/" + COMMENT_ID)
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isOk());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void editComment_returns200_whenHasWriteAllPermission() throws Exception {
|
||||
DocumentComment updated = DocumentComment.builder()
|
||||
.id(COMMENT_ID).documentId(DOC_ID).authorName("Hans").content("Test comment").build();
|
||||
when(commentService.editComment(any(), any(), any(), any())).thenReturn(updated);
|
||||
|
||||
mockMvc.perform(patch("/api/documents/" + DOC_ID + "/comments/" + COMMENT_ID)
|
||||
.contentType(MediaType.APPLICATION_JSON).content(COMMENT_JSON))
|
||||
.andExpect(status().isOk());
|
||||
}
|
||||
|
||||
// ─── DELETE /api/documents/{documentId}/comments/{commentId} (shared) ────
|
||||
|
||||
@Test
|
||||
void deleteComment_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(delete("/api/documents/" + DOC_ID + "/comments/" + COMMENT_ID))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void deleteComment_returns204_whenAuthenticated() throws Exception {
|
||||
mockMvc.perform(delete("/api/documents/" + DOC_ID + "/comments/" + COMMENT_ID))
|
||||
.andExpect(status().isNoContent());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
package org.raddatz.familienarchiv.controller;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO;
|
||||
import org.raddatz.familienarchiv.dto.DocumentSearchResult;
|
||||
import org.raddatz.familienarchiv.dto.DocumentVersionSummary;
|
||||
import org.raddatz.familienarchiv.exception.DomainException;
|
||||
import org.raddatz.familienarchiv.exception.ErrorCode;
|
||||
import org.raddatz.familienarchiv.model.AppUser;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||
import org.raddatz.familienarchiv.model.DocumentVersion;
|
||||
import org.raddatz.familienarchiv.model.Person;
|
||||
import org.raddatz.familienarchiv.security.PermissionAspect;
|
||||
import org.raddatz.familienarchiv.model.AppUser;
|
||||
import org.raddatz.familienarchiv.service.CustomUserDetailsService;
|
||||
import org.raddatz.familienarchiv.service.DocumentService;
|
||||
import org.raddatz.familienarchiv.service.DocumentVersionService;
|
||||
@@ -27,12 +25,10 @@ import org.springframework.security.test.context.support.WithMockUser;
|
||||
import org.springframework.test.context.bean.override.mockito.MockitoBean;
|
||||
import org.springframework.test.web.servlet.MockMvc;
|
||||
|
||||
import org.raddatz.familienarchiv.dto.DocumentSearchItem;
|
||||
import org.raddatz.familienarchiv.dto.SearchMatchData;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
@@ -44,7 +40,6 @@ import static org.mockito.Mockito.when;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.header;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
|
||||
|
||||
@@ -71,7 +66,7 @@ class DocumentControllerTest {
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_returns200_whenAuthenticated() throws Exception {
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
.thenReturn(DocumentSearchResult.of(List.of()));
|
||||
|
||||
mockMvc.perform(get("/api/documents/search"))
|
||||
@@ -81,13 +76,13 @@ class DocumentControllerTest {
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_withStatusParam_passesItToService() throws Exception {
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), eq(DocumentStatus.REVIEWED), any(), any(), any(), any()))
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), eq(DocumentStatus.REVIEWED), any(), any(), any()))
|
||||
.thenReturn(DocumentSearchResult.of(List.of()));
|
||||
|
||||
mockMvc.perform(get("/api/documents/search").param("status", "REVIEWED"))
|
||||
.andExpect(status().isOk());
|
||||
|
||||
verify(documentService).searchDocuments(any(), any(), any(), any(), any(), any(), any(), eq(DocumentStatus.REVIEWED), any(), any(), any(), any());
|
||||
verify(documentService).searchDocuments(any(), any(), any(), any(), any(), any(), any(), eq(DocumentStatus.REVIEWED), any(), any(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -114,18 +109,18 @@ class DocumentControllerTest {
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_responseContainsTotalCount() throws Exception {
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
.thenReturn(DocumentSearchResult.of(List.of()));
|
||||
|
||||
mockMvc.perform(get("/api/documents/search"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.totalElements").value(0))
|
||||
.andExpect(jsonPath("$.items").isArray());
|
||||
.andExpect(jsonPath("$.total").value(0))
|
||||
.andExpect(jsonPath("$.documents").isArray());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_responseBodyItemsContainMatchData() throws Exception {
|
||||
void search_responseBodyContainsMatchDataKey() throws Exception {
|
||||
UUID docId = UUID.randomUUID();
|
||||
Document doc = Document.builder()
|
||||
.id(docId)
|
||||
@@ -133,82 +128,18 @@ class DocumentControllerTest {
|
||||
.originalFilename("brief.pdf")
|
||||
.status(DocumentStatus.UPLOADED)
|
||||
.build();
|
||||
var matchData = new SearchMatchData(
|
||||
var matchData = new org.raddatz.familienarchiv.dto.SearchMatchData(
|
||||
"Er schrieb einen langen Brief", List.of(), false, List.of(), List.of(), List.of(), null, List.of());
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
.thenReturn(DocumentSearchResult.of(List.of(new DocumentSearchItem(doc, matchData, 0, List.of()))));
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
.thenReturn(DocumentSearchResult.withMatchData(List.of(doc), Map.of(docId, matchData)));
|
||||
|
||||
mockMvc.perform(get("/api/documents/search").param("q", "Brief"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.items").isArray())
|
||||
.andExpect(jsonPath("$.items[0].matchData.transcriptionSnippet")
|
||||
.andExpect(jsonPath("$.matchData").isMap())
|
||||
.andExpect(jsonPath("$.matchData." + docId + ".transcriptionSnippet")
|
||||
.value("Er schrieb einen langen Brief"));
|
||||
}
|
||||
|
||||
// ─── /api/documents/search pagination ─────────────────────────────────────
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_responseExposesPagingFields() throws Exception {
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
.thenReturn(DocumentSearchResult.of(List.of()));
|
||||
|
||||
mockMvc.perform(get("/api/documents/search"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.pageNumber").exists())
|
||||
.andExpect(jsonPath("$.pageSize").exists())
|
||||
.andExpect(jsonPath("$.totalPages").exists())
|
||||
.andExpect(jsonPath("$.totalElements").exists());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_returns400_whenSizeExceedsMax() throws Exception {
|
||||
// Locks @Validated on the controller — removing it silently reopens the
|
||||
// DoS window where a client could request all 1500 docs + enrichment.
|
||||
mockMvc.perform(get("/api/documents/search").param("size", "101"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_returns400_whenSizeBelowMin() throws Exception {
|
||||
mockMvc.perform(get("/api/documents/search").param("size", "0"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_returns400_whenPageNegative() throws Exception {
|
||||
mockMvc.perform(get("/api/documents/search").param("page", "-1"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_returns400_whenPageAboveMax() throws Exception {
|
||||
// Guards against page * size overflow into negative SQL OFFSET
|
||||
mockMvc.perform(get("/api/documents/search").param("page", "200000"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_passesPageRequestToService() throws Exception {
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
.thenReturn(DocumentSearchResult.of(List.of()));
|
||||
|
||||
mockMvc.perform(get("/api/documents/search").param("page", "2").param("size", "25"))
|
||||
.andExpect(status().isOk());
|
||||
|
||||
org.mockito.ArgumentCaptor<org.springframework.data.domain.Pageable> captor =
|
||||
org.mockito.ArgumentCaptor.forClass(org.springframework.data.domain.Pageable.class);
|
||||
verify(documentService).searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), captor.capture());
|
||||
org.springframework.data.domain.Pageable pageable = captor.getValue();
|
||||
org.assertj.core.api.Assertions.assertThat(pageable.getPageNumber()).isEqualTo(2);
|
||||
org.assertj.core.api.Assertions.assertThat(pageable.getPageSize()).isEqualTo(25);
|
||||
}
|
||||
|
||||
// ─── POST /api/documents ─────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
@@ -428,62 +359,6 @@ class DocumentControllerTest {
|
||||
.andExpect(status().isNotFound());
|
||||
}
|
||||
|
||||
// ─── GET /api/documents/{id}/thumbnail ───────────────────────────────────
|
||||
|
||||
@Test
|
||||
void getDocumentThumbnail_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(get("/api/documents/" + UUID.randomUUID() + "/thumbnail"))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void getDocumentThumbnail_returns404_whenDocHasNoThumbnail() throws Exception {
|
||||
UUID id = UUID.randomUUID();
|
||||
Document doc = Document.builder().id(id).title("t").originalFilename("f.pdf").build();
|
||||
when(documentService.getDocumentById(id)).thenReturn(doc);
|
||||
|
||||
mockMvc.perform(get("/api/documents/" + id + "/thumbnail"))
|
||||
.andExpect(status().isNotFound());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void getDocumentThumbnail_returns200_withPrivateCacheHeader() throws Exception {
|
||||
UUID id = UUID.randomUUID();
|
||||
Document doc = Document.builder().id(id).title("t").originalFilename("f.pdf")
|
||||
.thumbnailKey("thumbnails/" + id + ".jpg").build();
|
||||
when(documentService.getDocumentById(id)).thenReturn(doc);
|
||||
java.io.InputStream stream = new java.io.ByteArrayInputStream(new byte[]{(byte) 0xFF, (byte) 0xD8, (byte) 0xFF});
|
||||
when(fileService.downloadFile("thumbnails/" + id + ".jpg"))
|
||||
.thenReturn(new FileService.S3FileDownload(
|
||||
new org.springframework.core.io.InputStreamResource(stream), "image/jpeg"));
|
||||
|
||||
mockMvc.perform(get("/api/documents/" + id + "/thumbnail"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(header().string("Content-Type", "image/jpeg"))
|
||||
.andExpect(header().string("Cache-Control",
|
||||
org.hamcrest.Matchers.containsString("private")))
|
||||
.andExpect(header().string("Cache-Control",
|
||||
org.hamcrest.Matchers.not(org.hamcrest.Matchers.containsString("public"))))
|
||||
.andExpect(header().string("Cache-Control",
|
||||
org.hamcrest.Matchers.containsString("immutable")));
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void getDocumentThumbnail_returns404_whenStorageObjectMissing() throws Exception {
|
||||
UUID id = UUID.randomUUID();
|
||||
Document doc = Document.builder().id(id).title("t").originalFilename("f.pdf")
|
||||
.thumbnailKey("thumbnails/" + id + ".jpg").build();
|
||||
when(documentService.getDocumentById(id)).thenReturn(doc);
|
||||
when(fileService.downloadFile("thumbnails/" + id + ".jpg"))
|
||||
.thenThrow(new FileService.StorageFileNotFoundException("not found"));
|
||||
|
||||
mockMvc.perform(get("/api/documents/" + id + "/thumbnail"))
|
||||
.andExpect(status().isNotFound());
|
||||
}
|
||||
|
||||
// ─── POST /api/documents/quick-upload — null/empty files ─────────────────
|
||||
|
||||
@Test
|
||||
@@ -505,7 +380,7 @@ class DocumentControllerTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
@WithMockUser
|
||||
void getIncompleteCount_returns200_withCount() throws Exception {
|
||||
when(documentService.getIncompleteCount()).thenReturn(3L);
|
||||
|
||||
@@ -514,52 +389,14 @@ class DocumentControllerTest {
|
||||
.andExpect(jsonPath("$.count").value(3));
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "READ_ALL")
|
||||
void getIncompleteCount_returns403_forReaderOnly() throws Exception {
|
||||
mockMvc.perform(get("/api/documents/incomplete-count"))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
// ─── GET /api/documents/incomplete ───────────────────────────────────────
|
||||
// ─── GET /api/documents/incomplete (removed — superseded by dashboard) ────
|
||||
|
||||
@Test
|
||||
void getIncomplete_returns401_whenUnauthenticated() throws Exception {
|
||||
@WithMockUser
|
||||
void getIncomplete_endpointRemoved() throws Exception {
|
||||
// The path hits /{id} and fails UUID conversion — not a 200 anymore
|
||||
mockMvc.perform(get("/api/documents/incomplete"))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = {"WRITE_ALL"})
|
||||
void getIncomplete_returns200_forWriter_withDTOList() throws Exception {
|
||||
UUID id = UUID.randomUUID();
|
||||
java.time.LocalDateTime uploadedAt = java.time.LocalDateTime.of(2026, 4, 20, 12, 0);
|
||||
var dto = new org.raddatz.familienarchiv.dto.IncompleteDocumentDTO(id, "Unvollständig", uploadedAt);
|
||||
when(documentService.findIncompleteDocuments(anyInt())).thenReturn(List.of(dto));
|
||||
|
||||
mockMvc.perform(get("/api/documents/incomplete"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$[0].id").value(id.toString()))
|
||||
.andExpect(jsonPath("$[0].title").value("Unvollständig"))
|
||||
.andExpect(jsonPath("$[0].uploadedAt").exists());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "READ_ALL")
|
||||
void getIncomplete_returns403_forReaderOnly() throws Exception {
|
||||
mockMvc.perform(get("/api/documents/incomplete"))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void getIncomplete_capsSizeAt200() throws Exception {
|
||||
when(documentService.findIncompleteDocuments(anyInt())).thenReturn(List.of());
|
||||
|
||||
mockMvc.perform(get("/api/documents/incomplete").param("size", "9999"))
|
||||
.andExpect(status().isOk());
|
||||
|
||||
verify(documentService).findIncompleteDocuments(200);
|
||||
.andExpect(status().is4xxClientError());
|
||||
}
|
||||
|
||||
// ─── GET /api/documents/incomplete/next ──────────────────────────────────
|
||||
@@ -572,7 +409,7 @@ class DocumentControllerTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
@WithMockUser
|
||||
void getNextIncomplete_returns200_whenNextExists() throws Exception {
|
||||
UUID excludeId = UUID.randomUUID();
|
||||
Document next = Document.builder()
|
||||
@@ -586,15 +423,7 @@ class DocumentControllerTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "READ_ALL")
|
||||
void getNextIncomplete_returns403_forReaderOnly() throws Exception {
|
||||
mockMvc.perform(get("/api/documents/incomplete/next")
|
||||
.param("excludeId", UUID.randomUUID().toString()))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
@WithMockUser
|
||||
void getNextIncomplete_returns204_whenNoneRemain() throws Exception {
|
||||
UUID excludeId = UUID.randomUUID();
|
||||
when(documentService.findNextIncompleteDocument(excludeId)).thenReturn(Optional.empty());
|
||||
@@ -768,476 +597,4 @@ class DocumentControllerTest {
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.editorName").value("Otto"));
|
||||
}
|
||||
|
||||
// ─── POST /api/documents/quick-upload — metadata part ────────────────────
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void quickUpload_withMetadata_appliesSharedFieldsToAllCreatedDocuments() throws Exception {
|
||||
UUID senderId = UUID.randomUUID();
|
||||
Person sender = Person.builder().id(senderId).lastName("Müller").build();
|
||||
|
||||
Document doc1 = Document.builder().id(UUID.randomUUID()).title("Brief 1").originalFilename("a.pdf").sender(sender).build();
|
||||
Document doc2 = Document.builder().id(UUID.randomUUID()).title("Brief 2").originalFilename("b.pdf").sender(sender).build();
|
||||
Document doc3 = Document.builder().id(UUID.randomUUID()).title("Brief 3").originalFilename("c.pdf").sender(sender).build();
|
||||
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
when(documentService.storeDocumentWithBatchMetadata(any(), any(), eq(0), any()))
|
||||
.thenReturn(new DocumentService.StoreResult(doc1, true));
|
||||
when(documentService.storeDocumentWithBatchMetadata(any(), any(), eq(1), any()))
|
||||
.thenReturn(new DocumentService.StoreResult(doc2, true));
|
||||
when(documentService.storeDocumentWithBatchMetadata(any(), any(), eq(2), any()))
|
||||
.thenReturn(new DocumentService.StoreResult(doc3, true));
|
||||
|
||||
org.springframework.mock.web.MockMultipartFile f1 =
|
||||
new org.springframework.mock.web.MockMultipartFile("files", "a.pdf", "application/pdf", new byte[]{1});
|
||||
org.springframework.mock.web.MockMultipartFile f2 =
|
||||
new org.springframework.mock.web.MockMultipartFile("files", "b.pdf", "application/pdf", new byte[]{1});
|
||||
org.springframework.mock.web.MockMultipartFile f3 =
|
||||
new org.springframework.mock.web.MockMultipartFile("files", "c.pdf", "application/pdf", new byte[]{1});
|
||||
org.springframework.mock.web.MockMultipartFile metadata =
|
||||
new org.springframework.mock.web.MockMultipartFile("metadata", "metadata", "application/json",
|
||||
("{\"senderId\":\"" + senderId + "\"}").getBytes());
|
||||
|
||||
mockMvc.perform(multipart("/api/documents/quick-upload").file(f1).file(f2).file(f3).file(metadata))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.created.length()").value(3))
|
||||
.andExpect(jsonPath("$.created[0].sender.id").value(senderId.toString()))
|
||||
.andExpect(jsonPath("$.created[1].sender.id").value(senderId.toString()))
|
||||
.andExpect(jsonPath("$.created[2].sender.id").value(senderId.toString()))
|
||||
.andExpect(jsonPath("$.updated").isEmpty())
|
||||
.andExpect(jsonPath("$.errors").isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void quickUpload_withMetadata_appliesSharedFieldsToUpdatedDocuments() throws Exception {
|
||||
UUID senderId = UUID.randomUUID();
|
||||
Person sender = Person.builder().id(senderId).lastName("Müller").build();
|
||||
Document existing = Document.builder().id(UUID.randomUUID()).title("Alt").originalFilename("alt.pdf").sender(sender).build();
|
||||
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
when(documentService.storeDocumentWithBatchMetadata(any(), any(), eq(0), any()))
|
||||
.thenReturn(new DocumentService.StoreResult(existing, false));
|
||||
|
||||
org.springframework.mock.web.MockMultipartFile file =
|
||||
new org.springframework.mock.web.MockMultipartFile("files", "alt.pdf", "application/pdf", new byte[]{1});
|
||||
org.springframework.mock.web.MockMultipartFile metadata =
|
||||
new org.springframework.mock.web.MockMultipartFile("metadata", "metadata", "application/json",
|
||||
("{\"senderId\":\"" + senderId + "\"}").getBytes());
|
||||
|
||||
mockMvc.perform(multipart("/api/documents/quick-upload").file(file).file(metadata))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.created").isEmpty())
|
||||
.andExpect(jsonPath("$.updated[0].sender.id").value(senderId.toString()))
|
||||
.andExpect(jsonPath("$.errors").isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void quickUpload_withMetadata_mapsTitlesByIndex() throws Exception {
|
||||
Document docA = Document.builder().id(UUID.randomUUID()).title("Alpha").originalFilename("a.pdf").build();
|
||||
Document docB = Document.builder().id(UUID.randomUUID()).title("Beta").originalFilename("b.pdf").build();
|
||||
Document docC = Document.builder().id(UUID.randomUUID()).title("Gamma").originalFilename("c.pdf").build();
|
||||
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
when(documentService.storeDocumentWithBatchMetadata(any(), any(), eq(0), any()))
|
||||
.thenReturn(new DocumentService.StoreResult(docA, true));
|
||||
when(documentService.storeDocumentWithBatchMetadata(any(), any(), eq(1), any()))
|
||||
.thenReturn(new DocumentService.StoreResult(docB, true));
|
||||
when(documentService.storeDocumentWithBatchMetadata(any(), any(), eq(2), any()))
|
||||
.thenReturn(new DocumentService.StoreResult(docC, true));
|
||||
|
||||
org.springframework.mock.web.MockMultipartFile f1 =
|
||||
new org.springframework.mock.web.MockMultipartFile("files", "a.pdf", "application/pdf", new byte[]{1});
|
||||
org.springframework.mock.web.MockMultipartFile f2 =
|
||||
new org.springframework.mock.web.MockMultipartFile("files", "b.pdf", "application/pdf", new byte[]{1});
|
||||
org.springframework.mock.web.MockMultipartFile f3 =
|
||||
new org.springframework.mock.web.MockMultipartFile("files", "c.pdf", "application/pdf", new byte[]{1});
|
||||
org.springframework.mock.web.MockMultipartFile metadata =
|
||||
new org.springframework.mock.web.MockMultipartFile("metadata", "metadata", "application/json",
|
||||
"{\"titles\":[\"Alpha\",\"Beta\",\"Gamma\"]}".getBytes());
|
||||
|
||||
mockMvc.perform(multipart("/api/documents/quick-upload").file(f1).file(f2).file(f3).file(metadata))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.created[0].title").value("Alpha"))
|
||||
.andExpect(jsonPath("$.created[1].title").value("Beta"))
|
||||
.andExpect(jsonPath("$.created[2].title").value("Gamma"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void quickUpload_withMetadata_rejects400_whenTitlesSizeExceedsFilesSize() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
org.mockito.Mockito.doThrow(
|
||||
org.raddatz.familienarchiv.exception.DomainException.badRequest(
|
||||
org.raddatz.familienarchiv.exception.ErrorCode.VALIDATION_ERROR, "titles count must not exceed files count"))
|
||||
.when(documentService).validateBatch(eq(2), any());
|
||||
|
||||
org.springframework.mock.web.MockMultipartFile f1 =
|
||||
new org.springframework.mock.web.MockMultipartFile("files", "a.pdf", "application/pdf", new byte[]{1});
|
||||
org.springframework.mock.web.MockMultipartFile f2 =
|
||||
new org.springframework.mock.web.MockMultipartFile("files", "b.pdf", "application/pdf", new byte[]{1});
|
||||
org.springframework.mock.web.MockMultipartFile metadata =
|
||||
new org.springframework.mock.web.MockMultipartFile("metadata", "metadata", "application/json",
|
||||
"{\"titles\":[\"A\",\"B\",\"C\"]}".getBytes());
|
||||
|
||||
mockMvc.perform(multipart("/api/documents/quick-upload").file(f1).file(f2).file(metadata))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void quickUpload_withMetadata_tagNamesJsonArray_parsedCorrectly() throws Exception {
|
||||
Document doc = Document.builder().id(UUID.randomUUID()).title("brief").originalFilename("brief.pdf").build();
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
|
||||
org.mockito.ArgumentCaptor<DocumentBatchMetadataDTO> captor =
|
||||
org.mockito.ArgumentCaptor.forClass(DocumentBatchMetadataDTO.class);
|
||||
when(documentService.storeDocumentWithBatchMetadata(any(), captor.capture(), anyInt(), any()))
|
||||
.thenReturn(new DocumentService.StoreResult(doc, true));
|
||||
|
||||
org.springframework.mock.web.MockMultipartFile file =
|
||||
new org.springframework.mock.web.MockMultipartFile("files", "brief.pdf", "application/pdf", new byte[]{1});
|
||||
org.springframework.mock.web.MockMultipartFile metadata =
|
||||
new org.springframework.mock.web.MockMultipartFile("metadata", "metadata", "application/json",
|
||||
"{\"tagNames\":[\"Briefwechsel\",\"Krieg\"]}".getBytes());
|
||||
|
||||
mockMvc.perform(multipart("/api/documents/quick-upload").file(file).file(metadata))
|
||||
.andExpect(status().isOk());
|
||||
|
||||
org.assertj.core.api.Assertions.assertThat(captor.getValue().getTagNames())
|
||||
.containsExactly("Briefwechsel", "Krieg");
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void quickUpload_returns400_whenBatchExceedsCap() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
org.mockito.Mockito.doThrow(
|
||||
org.raddatz.familienarchiv.exception.DomainException.badRequest(
|
||||
org.raddatz.familienarchiv.exception.ErrorCode.BATCH_TOO_LARGE, "Batch exceeds maximum of 50 files per request"))
|
||||
.when(documentService).validateBatch(eq(51), any());
|
||||
|
||||
var builder = multipart("/api/documents/quick-upload");
|
||||
for (int i = 0; i < 51; i++) {
|
||||
builder.file(new org.springframework.mock.web.MockMultipartFile(
|
||||
"files", "f" + i + ".pdf", "application/pdf", new byte[]{1}));
|
||||
}
|
||||
|
||||
mockMvc.perform(builder)
|
||||
.andExpect(status().isBadRequest())
|
||||
.andExpect(jsonPath("$.code").value("BATCH_TOO_LARGE"));
|
||||
}
|
||||
|
||||
// ─── PATCH /api/documents/bulk ───────────────────────────────────────────
|
||||
|
||||
private static String bulkBody(String... uuids) {
|
||||
StringBuilder sb = new StringBuilder("{\"documentIds\":[");
|
||||
for (int i = 0; i < uuids.length; i++) {
|
||||
if (i > 0) sb.append(",");
|
||||
sb.append("\"").append(uuids[i]).append("\"");
|
||||
}
|
||||
sb.append("]}");
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
@Test
|
||||
void patchBulk_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(patch("/api/documents/bulk")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content(bulkBody(UUID.randomUUID().toString())))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void patchBulk_returns403_forReadAllUser() throws Exception {
|
||||
mockMvc.perform(patch("/api/documents/bulk")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content(bulkBody(UUID.randomUUID().toString())))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void patchBulk_returns400_whenDocumentIdsIsEmpty() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
|
||||
mockMvc.perform(patch("/api/documents/bulk")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"documentIds\":[]}"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void patchBulk_returns400_whenDocumentIdsIsMissing() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
|
||||
mockMvc.perform(patch("/api/documents/bulk")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{}"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void patchBulk_returns400_whenDocumentIdsExceedsCap() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
|
||||
String[] ids = new String[501];
|
||||
for (int i = 0; i < 501; i++) ids[i] = UUID.randomUUID().toString();
|
||||
|
||||
mockMvc.perform(patch("/api/documents/bulk")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content(bulkBody(ids)))
|
||||
.andExpect(status().isBadRequest())
|
||||
.andExpect(jsonPath("$.code").value("BULK_EDIT_TOO_MANY_IDS"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void patchBulk_returns400_whenArchiveBoxExceeds255Chars() throws Exception {
|
||||
// Tobias C2 — DocumentBulkEditDTO.archiveBox carries @Size(max=255).
|
||||
// Without @Valid on @RequestBody this would silently land an
|
||||
// arbitrarily long string; the test pins both the annotation and
|
||||
// the controller-level @Valid wiring.
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
UUID id = UUID.randomUUID();
|
||||
String tooLong = "x".repeat(256);
|
||||
|
||||
String body = "{\"documentIds\":[\"" + id + "\"],\"archiveBox\":\"" + tooLong + "\"}";
|
||||
mockMvc.perform(patch("/api/documents/bulk")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content(body))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void patchBulk_acceptsExactly500Ids_atTheCap() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
when(documentService.applyBulkEditToDocument(any(), any(), any()))
|
||||
.thenAnswer(inv -> Document.builder().id(inv.getArgument(0)).build());
|
||||
|
||||
String[] ids = new String[500];
|
||||
for (int i = 0; i < 500; i++) ids[i] = UUID.randomUUID().toString();
|
||||
|
||||
mockMvc.perform(patch("/api/documents/bulk")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content(bulkBody(ids)))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.updated").value(500));
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void patchBulk_dedupesDuplicateDocumentIds_doesNotInflateUpdatedCount() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
UUID id = UUID.randomUUID();
|
||||
when(documentService.applyBulkEditToDocument(eq(id), any(), any()))
|
||||
.thenAnswer(inv -> Document.builder().id(id).build());
|
||||
|
||||
// Same id sent three times — controller should dedupe and call the
|
||||
// service exactly once, returning updated=1, not 3.
|
||||
mockMvc.perform(patch("/api/documents/bulk")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content(bulkBody(id.toString(), id.toString(), id.toString())))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.updated").value(1));
|
||||
|
||||
verify(documentService, org.mockito.Mockito.times(1))
|
||||
.applyBulkEditToDocument(eq(id), any(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void patchBulk_returns200_andCallsServiceForEachId() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
UUID id1 = UUID.randomUUID();
|
||||
UUID id2 = UUID.randomUUID();
|
||||
when(documentService.applyBulkEditToDocument(any(), any(), any()))
|
||||
.thenAnswer(inv -> Document.builder().id(inv.getArgument(0)).build());
|
||||
|
||||
mockMvc.perform(patch("/api/documents/bulk")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content(bulkBody(id1.toString(), id2.toString())))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.updated").value(2))
|
||||
.andExpect(jsonPath("$.errors").isEmpty());
|
||||
|
||||
verify(documentService).applyBulkEditToDocument(eq(id1), any(), any());
|
||||
verify(documentService).applyBulkEditToDocument(eq(id2), any(), any());
|
||||
}
|
||||
|
||||
// ─── GET /api/documents/ids ──────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void getDocumentIds_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(get("/api/documents/ids"))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void getDocumentIds_returns403_forUserWithoutWriteAll() throws Exception {
|
||||
// /ids is gated WRITE_ALL because it powers the bulk-edit "Alle X
|
||||
// editieren" fast path; no other consumer needs it.
|
||||
mockMvc.perform(get("/api/documents/ids"))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void getDocumentIds_returns200_andDelegatesToService() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
UUID id = UUID.randomUUID();
|
||||
when(documentService.findIdsForFilter(any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
.thenReturn(List.of(id));
|
||||
|
||||
mockMvc.perform(get("/api/documents/ids"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$[0]").value(id.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void getDocumentIds_passesSenderIdParamToService() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
UUID senderId = UUID.randomUUID();
|
||||
when(documentService.findIdsForFilter(any(), any(), any(), eq(senderId), any(), any(), any(), any(), any()))
|
||||
.thenReturn(List.of());
|
||||
|
||||
mockMvc.perform(get("/api/documents/ids").param("senderId", senderId.toString()))
|
||||
.andExpect(status().isOk());
|
||||
|
||||
verify(documentService).findIdsForFilter(any(), any(), any(), eq(senderId), any(), any(), any(), any(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void getDocumentIds_returns400_whenResultExceedsFilterCap() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
// Service returns 5001 IDs — one over BULK_EDIT_FILTER_MAX_IDS (5000).
|
||||
java.util.List<UUID> tooMany = new java.util.ArrayList<>(5001);
|
||||
for (int i = 0; i < 5001; i++) tooMany.add(UUID.randomUUID());
|
||||
when(documentService.findIdsForFilter(any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
.thenReturn(tooMany);
|
||||
|
||||
mockMvc.perform(get("/api/documents/ids"))
|
||||
.andExpect(status().isBadRequest())
|
||||
.andExpect(jsonPath("$.code").value("BULK_EDIT_TOO_MANY_IDS"));
|
||||
}
|
||||
|
||||
// ─── POST /api/documents/batch-metadata ──────────────────────────────────
|
||||
|
||||
@Test
|
||||
void batchMetadata_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post("/api/documents/batch-metadata")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"ids\":[\"" + UUID.randomUUID() + "\"]}"))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void batchMetadata_returns403_forUserWithoutReadAll() throws Exception {
|
||||
mockMvc.perform(org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post("/api/documents/batch-metadata")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"ids\":[\"" + UUID.randomUUID() + "\"]}"))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "READ_ALL")
|
||||
void batchMetadata_returns400_whenIdsEmpty() throws Exception {
|
||||
mockMvc.perform(org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post("/api/documents/batch-metadata")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"ids\":[]}"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "READ_ALL")
|
||||
void batchMetadata_returns400_whenIdsExceedsCap() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
StringBuilder sb = new StringBuilder("{\"ids\":[");
|
||||
for (int i = 0; i < 501; i++) {
|
||||
if (i > 0) sb.append(",");
|
||||
sb.append("\"").append(UUID.randomUUID()).append("\"");
|
||||
}
|
||||
sb.append("]}");
|
||||
|
||||
mockMvc.perform(org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post("/api/documents/batch-metadata")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content(sb.toString()))
|
||||
.andExpect(status().isBadRequest())
|
||||
.andExpect(jsonPath("$.code").value("BULK_EDIT_TOO_MANY_IDS"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "READ_ALL")
|
||||
void batchMetadata_returnsSummaries_forExistingIds() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
UUID id = UUID.randomUUID();
|
||||
when(documentService.batchMetadata(any())).thenReturn(List.of(
|
||||
new org.raddatz.familienarchiv.dto.DocumentBatchSummary(id, "Brief", "/api/documents/" + id + "/file")));
|
||||
|
||||
mockMvc.perform(org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post("/api/documents/batch-metadata")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"ids\":[\"" + id + "\"]}"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$[0].id").value(id.toString()))
|
||||
.andExpect(jsonPath("$[0].title").value("Brief"))
|
||||
.andExpect(jsonPath("$[0].pdfUrl").value("/api/documents/" + id + "/file"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void patchBulk_stripsCarriageReturnsAndNewlinesFromErrorMessages() throws Exception {
|
||||
// Nora C4 — DocumentController.sanitizeForLog defends against
|
||||
// CWE-117 (log injection) by replacing CR/LF in any free-form string
|
||||
// it interpolates. Same helper now sanitises BulkEditError.message
|
||||
// before it round-trips to the frontend.
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
UUID badId = UUID.randomUUID();
|
||||
when(documentService.applyBulkEditToDocument(eq(badId), any(), any()))
|
||||
.thenThrow(org.raddatz.familienarchiv.exception.DomainException.notFound(
|
||||
org.raddatz.familienarchiv.exception.ErrorCode.DOCUMENT_NOT_FOUND,
|
||||
"evil\r\nFAKE LOG ENTRY: admin logged in"));
|
||||
|
||||
mockMvc.perform(patch("/api/documents/bulk")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content(bulkBody(badId.toString())))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.errors[0].message",
|
||||
org.hamcrest.Matchers.not(org.hamcrest.Matchers.containsString("\n"))))
|
||||
.andExpect(jsonPath("$.errors[0].message",
|
||||
org.hamcrest.Matchers.not(org.hamcrest.Matchers.containsString("\r"))))
|
||||
.andExpect(jsonPath("$.errors[0].message",
|
||||
org.hamcrest.Matchers.containsString("evil_")));
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void patchBulk_returnsPartialFailureShape_whenServiceThrowsForOneDocument() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(AppUser.builder().id(UUID.randomUUID()).build());
|
||||
UUID okId = UUID.randomUUID();
|
||||
UUID badId = UUID.randomUUID();
|
||||
when(documentService.applyBulkEditToDocument(eq(okId), any(), any()))
|
||||
.thenAnswer(inv -> Document.builder().id(okId).build());
|
||||
when(documentService.applyBulkEditToDocument(eq(badId), any(), any()))
|
||||
.thenThrow(org.raddatz.familienarchiv.exception.DomainException.notFound(
|
||||
org.raddatz.familienarchiv.exception.ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + badId));
|
||||
|
||||
mockMvc.perform(patch("/api/documents/bulk")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content(bulkBody(okId.toString(), badId.toString())))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.updated").value(1))
|
||||
.andExpect(jsonPath("$.errors[0].id").value(badId.toString()))
|
||||
.andExpect(jsonPath("$.errors[0].message").value(
|
||||
org.hamcrest.Matchers.containsString("not found")));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
package org.raddatz.familienarchiv.controller;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.raddatz.familienarchiv.exception.DomainException;
|
||||
import org.raddatz.familienarchiv.exception.ErrorCode;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.Person;
|
||||
import org.raddatz.familienarchiv.model.PersonNameAlias;
|
||||
@@ -28,7 +25,6 @@ import java.util.UUID;
|
||||
|
||||
import org.raddatz.familienarchiv.dto.PersonSummaryDTO;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.verify;
|
||||
@@ -187,19 +183,19 @@ class PersonControllerTest {
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void createPerson_returns400_whenPersonTypeIsPerson_andFirstNameIsMissing() throws Exception {
|
||||
void createPerson_returns400_whenFirstNameIsMissing() throws Exception {
|
||||
mockMvc.perform(post("/api/persons")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"lastName\":\"Müller\",\"personType\":\"PERSON\"}"))
|
||||
.content("{\"lastName\":\"Müller\"}"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void createPerson_returns400_whenPersonTypeIsPerson_andFirstNameIsBlank() throws Exception {
|
||||
void createPerson_returns400_whenFirstNameIsBlank() throws Exception {
|
||||
mockMvc.perform(post("/api/persons")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"firstName\":\" \",\"lastName\":\"Müller\",\"personType\":\"PERSON\"}"))
|
||||
.content("{\"firstName\":\" \",\"lastName\":\"Müller\"}"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@@ -208,7 +204,7 @@ class PersonControllerTest {
|
||||
void createPerson_returns400_whenLastNameIsMissing() throws Exception {
|
||||
mockMvc.perform(post("/api/persons")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"firstName\":\"Hans\",\"personType\":\"PERSON\"}"))
|
||||
.content("{\"firstName\":\"Hans\"}"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@@ -217,7 +213,7 @@ class PersonControllerTest {
|
||||
void createPerson_returns400_whenLastNameIsBlank() throws Exception {
|
||||
mockMvc.perform(post("/api/persons")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"firstName\":\"Hans\",\"lastName\":\" \",\"personType\":\"PERSON\"}"))
|
||||
.content("{\"firstName\":\"Hans\",\"lastName\":\" \"}"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@@ -229,53 +225,11 @@ class PersonControllerTest {
|
||||
|
||||
mockMvc.perform(post("/api/persons")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"firstName\":\"Hans\",\"lastName\":\"Müller\",\"personType\":\"PERSON\"}"))
|
||||
.content("{\"firstName\":\"Hans\",\"lastName\":\"Müller\"}"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.firstName").value("Hans"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void createPerson_returns200_forInstitution_withoutFirstName() throws Exception {
|
||||
Person saved = Person.builder().id(UUID.randomUUID()).lastName("Verlag GmbH").build();
|
||||
when(personService.createPerson(any(org.raddatz.familienarchiv.dto.PersonUpdateDTO.class))).thenReturn(saved);
|
||||
|
||||
mockMvc.perform(post("/api/persons")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"lastName\":\"Verlag GmbH\",\"personType\":\"INSTITUTION\"}"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.lastName").value("Verlag GmbH"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void createPerson_trimsTitle_beforePersisting() throws Exception {
|
||||
ArgumentCaptor<org.raddatz.familienarchiv.dto.PersonUpdateDTO> captor =
|
||||
ArgumentCaptor.forClass(org.raddatz.familienarchiv.dto.PersonUpdateDTO.class);
|
||||
Person saved = Person.builder().id(UUID.randomUUID()).firstName("Hans").lastName("Müller").build();
|
||||
when(personService.createPerson(captor.capture())).thenReturn(saved);
|
||||
|
||||
mockMvc.perform(post("/api/persons")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"firstName\":\"Hans\",\"lastName\":\"Müller\",\"title\":\" Prof. \",\"personType\":\"PERSON\"}"))
|
||||
.andExpect(status().isOk());
|
||||
|
||||
assertThat(captor.getValue().getTitle()).isEqualTo("Prof.");
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void createPerson_returns400_whenPersonTypeIsSkip() throws Exception {
|
||||
when(personService.createPerson(any())).thenThrow(
|
||||
DomainException.badRequest(ErrorCode.INVALID_PERSON_TYPE, "SKIP is not a valid person type"));
|
||||
|
||||
mockMvc.perform(post("/api/persons")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"lastName\":\"Müller\",\"personType\":\"SKIP\"}"))
|
||||
.andExpect(status().isBadRequest())
|
||||
.andExpect(jsonPath("$.code").value("INVALID_PERSON_TYPE"));
|
||||
}
|
||||
|
||||
// ─── PUT /api/persons/{id} ────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
@@ -288,10 +242,10 @@ class PersonControllerTest {
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void updatePerson_returns400_whenPersonTypeIsPerson_andFirstNameIsBlank() throws Exception {
|
||||
void updatePerson_returns400_whenFirstNameIsBlank() throws Exception {
|
||||
mockMvc.perform(put("/api/persons/{id}", UUID.randomUUID())
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"firstName\":\"\",\"lastName\":\"Müller\",\"personType\":\"PERSON\"}"))
|
||||
.content("{\"firstName\":\"\",\"lastName\":\"Müller\"}"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@@ -300,7 +254,7 @@ class PersonControllerTest {
|
||||
void updatePerson_returns400_whenLastNameIsNull() throws Exception {
|
||||
mockMvc.perform(put("/api/persons/{id}", UUID.randomUUID())
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"firstName\":\"Hans\",\"personType\":\"PERSON\"}"))
|
||||
.content("{\"firstName\":\"Hans\"}"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@@ -313,7 +267,7 @@ class PersonControllerTest {
|
||||
|
||||
mockMvc.perform(put("/api/persons/{id}", id)
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"firstName\":\"Hans\",\"lastName\":\"Müller\",\"personType\":\"PERSON\"}"))
|
||||
.content("{\"firstName\":\"Hans\",\"lastName\":\"Müller\"}"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.lastName").value("Müller"));
|
||||
}
|
||||
@@ -363,10 +317,11 @@ class PersonControllerTest {
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void updatePerson_returns400_whenLastNameIsBlank() throws Exception {
|
||||
// firstName valid, lastName blank → second || operand = true → 400
|
||||
UUID id = UUID.randomUUID();
|
||||
mockMvc.perform(put("/api/persons/{id}", id)
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"firstName\":\"Hans\",\"lastName\":\" \",\"personType\":\"PERSON\"}"))
|
||||
.content("{\"firstName\":\"Hans\",\"lastName\":\" \"}"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@@ -384,7 +339,7 @@ class PersonControllerTest {
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"firstName\":\"Maria\",\"lastName\":\"Raddatz\"," +
|
||||
"\"alias\":\"Oma Maria\",\"birthYear\":1901,\"deathYear\":1975," +
|
||||
"\"notes\":\"Some notes\",\"personType\":\"PERSON\"}"))
|
||||
"\"notes\":\"Some notes\"}"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.firstName").value("Maria"))
|
||||
.andExpect(jsonPath("$.alias").value("Oma Maria"))
|
||||
@@ -400,7 +355,7 @@ class PersonControllerTest {
|
||||
UUID id = UUID.randomUUID();
|
||||
mockMvc.perform(put("/api/persons/{id}", id)
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"firstName\":\"Hans\",\"lastName\":\"Müller\",\"notes\":\"" + oversizedNotes + "\",\"personType\":\"PERSON\"}"))
|
||||
.content("{\"firstName\":\"Hans\",\"lastName\":\"Müller\",\"notes\":\"" + oversizedNotes + "\"}"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@@ -411,7 +366,7 @@ class PersonControllerTest {
|
||||
UUID id = UUID.randomUUID();
|
||||
mockMvc.perform(put("/api/persons/{id}", id)
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"firstName\":\"" + oversizedFirstName + "\",\"lastName\":\"Müller\",\"personType\":\"PERSON\"}"))
|
||||
.content("{\"firstName\":\"" + oversizedFirstName + "\",\"lastName\":\"Müller\"}"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@@ -422,7 +377,7 @@ class PersonControllerTest {
|
||||
void createPerson_returns403_whenUserHasOnlyReadPermission() throws Exception {
|
||||
mockMvc.perform(post("/api/persons")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"firstName\":\"Hans\",\"lastName\":\"Müller\",\"personType\":\"PERSON\"}"))
|
||||
.content("{\"firstName\":\"Hans\",\"lastName\":\"Müller\"}"))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@@ -431,7 +386,7 @@ class PersonControllerTest {
|
||||
void updatePerson_returns403_whenUserHasOnlyReadPermission() throws Exception {
|
||||
mockMvc.perform(put("/api/persons/{id}", UUID.randomUUID())
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content("{\"firstName\":\"Hans\",\"lastName\":\"Müller\",\"personType\":\"PERSON\"}"))
|
||||
.content("{\"firstName\":\"Hans\",\"lastName\":\"Müller\"}"))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
|
||||
@@ -260,13 +260,6 @@ class TranscriptionBlockControllerTest {
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "READ_ALL")
|
||||
void deleteBlock_returns403_whenUserHasOnlyReadAllPermission() throws Exception {
|
||||
mockMvc.perform(delete(URL_BLOCK))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void deleteBlock_returns204_whenAuthorised() throws Exception {
|
||||
@@ -380,63 +373,4 @@ class TranscriptionBlockControllerTest {
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.reviewed").value(true));
|
||||
}
|
||||
|
||||
// ─── PUT .../review-all ───────────────────────────────────────────────────
|
||||
|
||||
private static final String URL_REVIEW_ALL = URL_BASE + "/review-all";
|
||||
|
||||
@Test
|
||||
void markAllBlocksReviewed_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(put(URL_REVIEW_ALL))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "READ_ALL")
|
||||
void markAllBlocksReviewed_returns403_whenMissingWriteAllPermission() throws Exception {
|
||||
mockMvc.perform(put(URL_REVIEW_ALL))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void markAllBlocksReviewed_returns200_withAllReviewedBlocks_whenAuthorised() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(mockUser());
|
||||
TranscriptionBlock b1 = TranscriptionBlock.builder()
|
||||
.id(UUID.randomUUID()).documentId(DOC_ID).annotationId(UUID.randomUUID())
|
||||
.text("Block 1").sortOrder(0).reviewed(true).build();
|
||||
TranscriptionBlock b2 = TranscriptionBlock.builder()
|
||||
.id(UUID.randomUUID()).documentId(DOC_ID).annotationId(UUID.randomUUID())
|
||||
.text("Block 2").sortOrder(1).reviewed(true).build();
|
||||
when(transcriptionService.markAllBlocksReviewed(eq(DOC_ID), any()))
|
||||
.thenReturn(List.of(b1, b2));
|
||||
|
||||
mockMvc.perform(put(URL_REVIEW_ALL))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$").isArray())
|
||||
.andExpect(jsonPath("$[0].reviewed").value(true))
|
||||
.andExpect(jsonPath("$[1].reviewed").value(true));
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void markAllBlocksReviewed_returns200_withEmptyList_whenNoBlocksExist() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(mockUser());
|
||||
when(transcriptionService.markAllBlocksReviewed(eq(DOC_ID), any()))
|
||||
.thenReturn(List.of());
|
||||
|
||||
mockMvc.perform(put(URL_REVIEW_ALL))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$").isArray())
|
||||
.andExpect(jsonPath("$").isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "WRITE_ALL")
|
||||
void markAllBlocksReviewed_returns401_whenUserNotFoundInDatabase() throws Exception {
|
||||
when(userService.findByEmail(any())).thenReturn(null);
|
||||
|
||||
mockMvc.perform(put(URL_REVIEW_ALL))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,10 +18,8 @@ import java.util.UUID;
|
||||
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.when;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
|
||||
|
||||
@@ -106,55 +104,4 @@ class UserControllerTest {
|
||||
.content("{\"email\":\"\",\"initialPassword\":\"secret123\"}"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
// ─── permission enforcement ───────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
@WithMockUser(username = "reader@example.com")
|
||||
void createUser_returns403_whenCallerLacksAdminUserPermission() throws Exception {
|
||||
mockMvc.perform(post("/api/users")
|
||||
.contentType(org.springframework.http.MediaType.APPLICATION_JSON)
|
||||
.content("{\"email\":\"x@x.com\",\"initialPassword\":\"secret123\"}"))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(username = "reader@example.com")
|
||||
void adminUpdateUser_returns403_whenCallerLacksAdminUserPermission() throws Exception {
|
||||
mockMvc.perform(put("/api/users/" + UUID.randomUUID())
|
||||
.contentType(org.springframework.http.MediaType.APPLICATION_JSON)
|
||||
.content("{}"))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(username = "reader@example.com")
|
||||
void deleteUser_returns403_whenCallerLacksAdminUserPermission() throws Exception {
|
||||
mockMvc.perform(delete("/api/users/" + UUID.randomUUID()))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
// ─── unauthenticated access ───────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void createUser_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(post("/api/users")
|
||||
.contentType(org.springframework.http.MediaType.APPLICATION_JSON)
|
||||
.content("{\"email\":\"x@x.com\",\"initialPassword\":\"secret123\"}"))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
void adminUpdateUser_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(put("/api/users/" + UUID.randomUUID())
|
||||
.contentType(org.springframework.http.MediaType.APPLICATION_JSON)
|
||||
.content("{}"))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
void deleteUser_returns401_whenUnauthenticated() throws Exception {
|
||||
mockMvc.perform(delete("/api/users/" + UUID.randomUUID()))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,94 +0,0 @@
|
||||
package org.raddatz.familienarchiv.dashboard;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.raddatz.familienarchiv.PostgresContainerConfig;
|
||||
import org.raddatz.familienarchiv.audit.AuditLogQueryRepository;
|
||||
import org.raddatz.familienarchiv.audit.ContributorRow;
|
||||
import org.raddatz.familienarchiv.config.FlywayConfig;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.data.jpa.test.autoconfigure.DataJpaTest;
|
||||
import org.springframework.boot.jdbc.test.autoconfigure.AutoConfigureTestDatabase;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.test.context.jdbc.Sql;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@DataJpaTest
|
||||
@AutoConfigureTestDatabase(replace = AutoConfigureTestDatabase.Replace.NONE)
|
||||
@Import({PostgresContainerConfig.class, FlywayConfig.class})
|
||||
class AuditLogQueryRepositoryContributorsTest {
|
||||
|
||||
static final UUID DOC_ID = UUID.fromString("bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb");
|
||||
static final UUID USER_A = UUID.fromString("aaaaaaaa-aaaa-aaaa-aaaa-000000000001");
|
||||
static final UUID USER_B = UUID.fromString("aaaaaaaa-aaaa-aaaa-aaaa-000000000002");
|
||||
static final UUID USER_C = UUID.fromString("aaaaaaaa-aaaa-aaaa-aaaa-000000000003");
|
||||
static final UUID USER_D = UUID.fromString("aaaaaaaa-aaaa-aaaa-aaaa-000000000004");
|
||||
static final UUID USER_E = UUID.fromString("aaaaaaaa-aaaa-aaaa-aaaa-000000000005");
|
||||
|
||||
@Autowired AuditLogQueryRepository auditLogQueryRepository;
|
||||
|
||||
@Test
|
||||
@Sql(statements = {
|
||||
"INSERT INTO users (id, enabled, email, password, first_name, last_name, color) VALUES ('aaaaaaaa-aaaa-aaaa-aaaa-000000000001', true, 'a@test.com', 'pw', 'Anna', 'Meier', '#f00')",
|
||||
"INSERT INTO documents (id, title, original_filename, status) VALUES ('bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', 'Test', 'test.pdf', 'PLACEHOLDER')",
|
||||
"INSERT INTO audit_log (kind, actor_id, document_id) VALUES ('ANNOTATION_CREATED', 'aaaaaaaa-aaaa-aaaa-aaaa-000000000001', 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb')"
|
||||
})
|
||||
void findRecentContributors_returns_contributor_with_initials_and_color() {
|
||||
List<ContributorRow> rows = auditLogQueryRepository.findRecentContributorsForDocuments(List.of(DOC_ID));
|
||||
|
||||
assertThat(rows).hasSize(1);
|
||||
assertThat(rows.get(0).getDocumentId()).isEqualTo(DOC_ID);
|
||||
assertThat(rows.get(0).getActorInitials()).isEqualTo("AM");
|
||||
assertThat(rows.get(0).getActorColor()).isEqualTo("#f00");
|
||||
}
|
||||
|
||||
@Test
|
||||
@Sql(statements = {
|
||||
"INSERT INTO users (id, enabled, email, password, first_name, last_name, color) VALUES ('aaaaaaaa-aaaa-aaaa-aaaa-000000000001', true, 'a@test.com', 'pw', 'Anna', 'Meier', '#aaa')",
|
||||
"INSERT INTO users (id, enabled, email, password, first_name, last_name, color) VALUES ('aaaaaaaa-aaaa-aaaa-aaaa-000000000002', true, 'b@test.com', 'pw', 'Ben', 'Wolf', '#bbb')",
|
||||
"INSERT INTO users (id, enabled, email, password, first_name, last_name, color) VALUES ('aaaaaaaa-aaaa-aaaa-aaaa-000000000003', true, 'c@test.com', 'pw', 'Clara', 'Zorn', '#ccc')",
|
||||
"INSERT INTO users (id, enabled, email, password, first_name, last_name, color) VALUES ('aaaaaaaa-aaaa-aaaa-aaaa-000000000004', true, 'd@test.com', 'pw', 'Dirk', 'Ott', '#ddd')",
|
||||
"INSERT INTO users (id, enabled, email, password, first_name, last_name, color) VALUES ('aaaaaaaa-aaaa-aaaa-aaaa-000000000005', true, 'e@test.com', 'pw', 'Eva', 'Kern', '#eee')",
|
||||
"INSERT INTO documents (id, title, original_filename, status) VALUES ('bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', 'Test', 'test.pdf', 'PLACEHOLDER')",
|
||||
"INSERT INTO audit_log (kind, actor_id, document_id, happened_at) VALUES ('ANNOTATION_CREATED', 'aaaaaaaa-aaaa-aaaa-aaaa-000000000001', 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', now() - interval '5 hours')",
|
||||
"INSERT INTO audit_log (kind, actor_id, document_id, happened_at) VALUES ('TEXT_SAVED', 'aaaaaaaa-aaaa-aaaa-aaaa-000000000002', 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', now() - interval '4 hours')",
|
||||
"INSERT INTO audit_log (kind, actor_id, document_id, happened_at) VALUES ('TEXT_SAVED', 'aaaaaaaa-aaaa-aaaa-aaaa-000000000003', 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', now() - interval '3 hours')",
|
||||
"INSERT INTO audit_log (kind, actor_id, document_id, happened_at) VALUES ('BLOCK_REVIEWED', 'aaaaaaaa-aaaa-aaaa-aaaa-000000000004', 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', now() - interval '2 hours')",
|
||||
"INSERT INTO audit_log (kind, actor_id, document_id, happened_at) VALUES ('BLOCK_REVIEWED', 'aaaaaaaa-aaaa-aaaa-aaaa-000000000005', 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', now() - interval '1 hour')"
|
||||
})
|
||||
void findRecentContributors_limits_to_4_most_recent() {
|
||||
List<ContributorRow> rows = auditLogQueryRepository.findRecentContributorsForDocuments(List.of(DOC_ID));
|
||||
|
||||
assertThat(rows).hasSize(4);
|
||||
// Most recent first: E, D, C, B (A is 5th, excluded)
|
||||
assertThat(rows.get(0).getActorInitials()).isEqualTo("EK");
|
||||
assertThat(rows.get(1).getActorInitials()).isEqualTo("DO");
|
||||
assertThat(rows.get(2).getActorInitials()).isEqualTo("CZ");
|
||||
assertThat(rows.get(3).getActorInitials()).isEqualTo("BW");
|
||||
}
|
||||
|
||||
@Test
|
||||
@Sql(statements = {
|
||||
"INSERT INTO documents (id, title, original_filename, status) VALUES ('bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', 'Test', 'test.pdf', 'PLACEHOLDER')"
|
||||
})
|
||||
void findRecentContributors_returns_empty_when_no_audit_entries() {
|
||||
List<ContributorRow> rows = auditLogQueryRepository.findRecentContributorsForDocuments(List.of(DOC_ID));
|
||||
|
||||
assertThat(rows).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
@Sql(statements = {
|
||||
"INSERT INTO documents (id, title, original_filename, status) VALUES ('bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', 'Test', 'test.pdf', 'PLACEHOLDER')",
|
||||
// Deleted user: ON DELETE SET NULL makes actor_id NULL — query excludes these rows
|
||||
"INSERT INTO audit_log (kind, actor_id, document_id) VALUES ('TEXT_SAVED', NULL, 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb')"
|
||||
})
|
||||
void findRecentContributors_excludes_entries_from_deleted_users() {
|
||||
List<ContributorRow> rows = auditLogQueryRepository.findRecentContributorsForDocuments(List.of(DOC_ID));
|
||||
|
||||
assertThat(rows).isEmpty();
|
||||
}
|
||||
}
|
||||
@@ -49,16 +49,13 @@ class AuditLogQueryRepositoryIntegrationTest {
|
||||
"INSERT INTO documents (id, title, original_filename, status) VALUES ('bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', 'Test Doc', 'test.pdf', 'PLACEHOLDER')",
|
||||
"INSERT INTO audit_log (kind, actor_id, document_id, payload) VALUES ('ANNOTATION_CREATED', 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', '{\"pageNumber\":1}')"
|
||||
})
|
||||
void findRolledUpActivityFeed_returnsAnnotationEntry() {
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 10,
|
||||
List.of("TEXT_SAVED","FILE_UPLOADED","ANNOTATION_CREATED","BLOCK_REVIEWED","COMMENT_ADDED","MENTION_CREATED"));
|
||||
void findDedupedActivityFeed_returnsAnnotationEntry() {
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findDedupedActivityFeed(USER_ID.toString(), 10);
|
||||
|
||||
assertThat(rows).hasSize(1);
|
||||
assertThat(rows.get(0).getKind()).isEqualTo("ANNOTATION_CREATED");
|
||||
assertThat(rows.get(0).getDocumentId()).isEqualTo(DOC_ID);
|
||||
assertThat(rows.get(0).getHappenedAt()).isNotNull();
|
||||
assertThat(rows.get(0).getCount()).isEqualTo(1);
|
||||
assertThat(rows.get(0).getHappenedAtUntil()).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -1,411 +0,0 @@
|
||||
package org.raddatz.familienarchiv.dashboard;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.raddatz.familienarchiv.PostgresContainerConfig;
|
||||
import org.raddatz.familienarchiv.audit.ActivityFeedRow;
|
||||
import org.raddatz.familienarchiv.audit.AuditKind;
|
||||
import org.raddatz.familienarchiv.audit.AuditLogQueryRepository;
|
||||
import org.raddatz.familienarchiv.config.FlywayConfig;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.data.jpa.test.autoconfigure.DataJpaTest;
|
||||
import org.springframework.boot.jdbc.test.autoconfigure.AutoConfigureTestDatabase;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
|
||||
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@DataJpaTest
|
||||
@AutoConfigureTestDatabase(replace = AutoConfigureTestDatabase.Replace.NONE)
|
||||
@Import({PostgresContainerConfig.class, FlywayConfig.class})
|
||||
@Transactional
|
||||
class AuditLogQueryRepositoryRolledUpTest {
|
||||
|
||||
static final UUID USER_ID = UUID.fromString("dddddddd-dddd-dddd-dddd-dddddddddddd");
|
||||
static final UUID OTHER_USER_ID = UUID.fromString("cccccccc-cccc-cccc-cccc-cccccccccccc");
|
||||
static final UUID DOC_ID = UUID.fromString("eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee");
|
||||
static final UUID OTHER_DOC_ID = UUID.fromString("ffffffff-ffff-ffff-ffff-ffffffffffff");
|
||||
|
||||
static final List<String> ALL_ELIGIBLE_KINDS =
|
||||
AuditKind.ROLLUP_ELIGIBLE.stream().map(Enum::name).toList();
|
||||
|
||||
private static final ObjectMapper MAPPER = new ObjectMapper();
|
||||
|
||||
@Autowired AuditLogQueryRepository auditLogQueryRepository;
|
||||
@Autowired JdbcTemplate jdbcTemplate;
|
||||
|
||||
private NamedParameterJdbcTemplate named() {
|
||||
return new NamedParameterJdbcTemplate(jdbcTemplate);
|
||||
}
|
||||
|
||||
private void insertUserAndDocs() {
|
||||
jdbcTemplate.update(
|
||||
"INSERT INTO users (id, enabled, email, password) VALUES (?, true, ?, 'pw')",
|
||||
USER_ID, "rollup-" + USER_ID + "@test.com");
|
||||
jdbcTemplate.update(
|
||||
"INSERT INTO users (id, enabled, email, password) VALUES (?, true, ?, 'pw')",
|
||||
OTHER_USER_ID, "rollup-" + OTHER_USER_ID + "@test.com");
|
||||
jdbcTemplate.update(
|
||||
"INSERT INTO documents (id, title, original_filename, status) VALUES (?, 'Brief A', 'a.pdf', 'PLACEHOLDER')",
|
||||
DOC_ID);
|
||||
jdbcTemplate.update(
|
||||
"INSERT INTO documents (id, title, original_filename, status) VALUES (?, 'Brief B', 'b.pdf', 'PLACEHOLDER')",
|
||||
OTHER_DOC_ID);
|
||||
}
|
||||
|
||||
private void insertAuditEvent(UUID actorId, UUID docId, String kind, Instant happenedAt) {
|
||||
insertAuditEvent(actorId, docId, kind, happenedAt, Map.of());
|
||||
}
|
||||
|
||||
private void insertAuditEvent(UUID actorId, UUID docId, String kind, Instant happenedAt, Map<String, String> payload) {
|
||||
String payloadJson;
|
||||
try {
|
||||
payloadJson = payload.isEmpty() ? null : MAPPER.writeValueAsString(payload);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
MapSqlParameterSource params = new MapSqlParameterSource()
|
||||
.addValue("kind", kind)
|
||||
.addValue("actor", actorId)
|
||||
.addValue("doc", docId)
|
||||
.addValue("t", OffsetDateTime.ofInstant(happenedAt, java.time.ZoneOffset.UTC))
|
||||
.addValue("payload", payloadJson, java.sql.Types.OTHER);
|
||||
named().update(
|
||||
"INSERT INTO audit_log (kind, actor_id, document_id, happened_at, payload) "
|
||||
+ "VALUES (:kind, :actor, :doc, :t, :payload::jsonb)",
|
||||
params);
|
||||
}
|
||||
|
||||
private void insertReplyNotification(UUID recipientId, UUID docId, UUID commentId) {
|
||||
jdbcTemplate.update(
|
||||
"INSERT INTO notifications (recipient_id, type, document_id, reference_id) VALUES (?, 'REPLY', ?, ?)",
|
||||
recipientId, docId, commentId);
|
||||
}
|
||||
|
||||
@Test
|
||||
void rolledUpFeed_combines_same_actor_same_doc_within_2h() {
|
||||
insertUserAndDocs();
|
||||
Instant base = Instant.parse("2026-04-20T09:00:00Z");
|
||||
for (int i = 0; i < 20; i++) {
|
||||
insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", base.plusSeconds(i * 480L));
|
||||
}
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40, ALL_ELIGIBLE_KINDS);
|
||||
|
||||
assertThat(rows).hasSize(1);
|
||||
ActivityFeedRow row = rows.get(0);
|
||||
assertThat(row.getKind()).isEqualTo("TEXT_SAVED");
|
||||
assertThat(row.getDocumentId()).isEqualTo(DOC_ID);
|
||||
assertThat(row.getCount()).isEqualTo(20);
|
||||
assertThat(row.getHappenedAt()).isEqualTo(base);
|
||||
assertThat(row.getHappenedAtUntil()).isEqualTo(base.plusSeconds(19 * 480L));
|
||||
}
|
||||
|
||||
@Test
|
||||
void rolledUpFeed_splits_at_2h_boundary() {
|
||||
insertUserAndDocs();
|
||||
Instant sessionOneStart = Instant.parse("2026-04-20T08:00:00Z");
|
||||
Instant sessionOneLast = sessionOneStart.plusSeconds(600);
|
||||
insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", sessionOneStart);
|
||||
insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", sessionOneLast);
|
||||
Instant sessionTwoStart = sessionOneLast.plusSeconds(2L * 60L * 60L + 60L);
|
||||
insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", sessionTwoStart);
|
||||
insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", sessionTwoStart.plusSeconds(300));
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40, ALL_ELIGIBLE_KINDS);
|
||||
|
||||
assertThat(rows).hasSize(2);
|
||||
assertThat(rows.get(0).getCount()).isEqualTo(2);
|
||||
assertThat(rows.get(0).getHappenedAt()).isEqualTo(sessionTwoStart);
|
||||
assertThat(rows.get(1).getCount()).isEqualTo(2);
|
||||
assertThat(rows.get(1).getHappenedAt()).isEqualTo(sessionOneStart);
|
||||
}
|
||||
|
||||
@Test
|
||||
void rolledUpFeed_has_no_hard_cap_on_long_session() {
|
||||
insertUserAndDocs();
|
||||
Instant base = Instant.parse("2026-04-20T06:00:00Z");
|
||||
for (int i = 0; i < 30; i++) {
|
||||
insertAuditEvent(USER_ID, DOC_ID, "ANNOTATION_CREATED", base.plusSeconds(i * 60L * 30L));
|
||||
}
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40, ALL_ELIGIBLE_KINDS);
|
||||
|
||||
assertThat(rows).hasSize(1);
|
||||
assertThat(rows.get(0).getCount()).isEqualTo(30);
|
||||
assertThat(rows.get(0).getHappenedAt()).isEqualTo(base);
|
||||
assertThat(rows.get(0).getHappenedAtUntil()).isEqualTo(base.plusSeconds(29 * 60L * 30L));
|
||||
}
|
||||
|
||||
@Test
|
||||
void rolledUpFeed_never_rolls_up_COMMENT_ADDED_or_MENTION_CREATED() {
|
||||
insertUserAndDocs();
|
||||
Instant base = Instant.parse("2026-04-20T10:00:00Z");
|
||||
insertAuditEvent(USER_ID, DOC_ID, "COMMENT_ADDED", base);
|
||||
insertAuditEvent(USER_ID, DOC_ID, "COMMENT_ADDED", base.plusSeconds(60));
|
||||
insertAuditEvent(USER_ID, DOC_ID, "COMMENT_ADDED", base.plusSeconds(120));
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40, ALL_ELIGIBLE_KINDS);
|
||||
|
||||
assertThat(rows).hasSize(3);
|
||||
assertThat(rows).allSatisfy(r -> {
|
||||
assertThat(r.getKind()).isEqualTo("COMMENT_ADDED");
|
||||
assertThat(r.getCount()).isEqualTo(1);
|
||||
assertThat(r.getHappenedAtUntil()).isNull();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
void rolledUpFeed_excludes_non_eligible_kinds() {
|
||||
insertUserAndDocs();
|
||||
Instant base = Instant.parse("2026-04-20T12:00:00Z");
|
||||
insertAuditEvent(USER_ID, DOC_ID, "STATUS_CHANGED", base);
|
||||
insertAuditEvent(USER_ID, DOC_ID, "METADATA_UPDATED", base.plusSeconds(60));
|
||||
insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", base.plusSeconds(120));
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40, ALL_ELIGIBLE_KINDS);
|
||||
|
||||
assertThat(rows).hasSize(1);
|
||||
assertThat(rows.get(0).getKind()).isEqualTo("TEXT_SAVED");
|
||||
}
|
||||
|
||||
@Test
|
||||
void rolledUpFeed_exposes_count_and_happenedAtUntil_on_singletons_and_rollups() {
|
||||
insertUserAndDocs();
|
||||
Instant rollupStart = Instant.parse("2026-04-20T11:00:00Z");
|
||||
insertAuditEvent(USER_ID, DOC_ID, "FILE_UPLOADED", rollupStart);
|
||||
insertAuditEvent(USER_ID, DOC_ID, "FILE_UPLOADED", rollupStart.plusSeconds(300));
|
||||
insertAuditEvent(USER_ID, OTHER_DOC_ID, "FILE_UPLOADED", rollupStart.plusSeconds(900));
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40, ALL_ELIGIBLE_KINDS);
|
||||
|
||||
assertThat(rows).hasSize(2);
|
||||
assertThat(rows).anySatisfy(r -> {
|
||||
assertThat(r.getDocumentId()).isEqualTo(DOC_ID);
|
||||
assertThat(r.getCount()).isEqualTo(2);
|
||||
assertThat(r.getHappenedAt()).isEqualTo(rollupStart);
|
||||
assertThat(r.getHappenedAtUntil()).isEqualTo(rollupStart.plusSeconds(300));
|
||||
});
|
||||
assertThat(rows).anySatisfy(r -> {
|
||||
assertThat(r.getDocumentId()).isEqualTo(OTHER_DOC_ID);
|
||||
assertThat(r.getCount()).isEqualTo(1);
|
||||
assertThat(r.getHappenedAt()).isEqualTo(rollupStart.plusSeconds(900));
|
||||
assertThat(r.getHappenedAtUntil()).isNull();
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
void youParticipated_is_true_when_user_has_reply_notification_for_comment() {
|
||||
insertUserAndDocs();
|
||||
UUID commentId = UUID.randomUUID();
|
||||
insertAuditEvent(OTHER_USER_ID, DOC_ID, "COMMENT_ADDED",
|
||||
Instant.parse("2026-04-20T10:00:00Z"), Map.of("commentId", commentId.toString()));
|
||||
insertReplyNotification(USER_ID, DOC_ID, commentId);
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40, ALL_ELIGIBLE_KINDS);
|
||||
|
||||
assertThat(rows).anySatisfy(r ->
|
||||
assertThat(r.isYouParticipated()).isTrue()
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
void youParticipated_is_false_for_comment_with_no_reply_notification() {
|
||||
insertUserAndDocs();
|
||||
UUID commentId = UUID.randomUUID();
|
||||
insertAuditEvent(OTHER_USER_ID, DOC_ID, "COMMENT_ADDED",
|
||||
Instant.parse("2026-04-20T10:00:00Z"), Map.of("commentId", commentId.toString()));
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40, ALL_ELIGIBLE_KINDS);
|
||||
|
||||
assertThat(rows).allSatisfy(r ->
|
||||
assertThat(r.isYouParticipated()).isFalse()
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
void youParticipated_is_false_when_comment_added_has_no_commentId_in_payload() {
|
||||
insertUserAndDocs();
|
||||
insertAuditEvent(OTHER_USER_ID, DOC_ID, "COMMENT_ADDED",
|
||||
Instant.parse("2026-04-20T10:00:00Z"), Map.of());
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40, ALL_ELIGIBLE_KINDS);
|
||||
|
||||
assertThat(rows).allSatisfy(r ->
|
||||
assertThat(r.isYouParticipated()).isFalse()
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
void youParticipated_is_false_when_reply_notification_belongs_to_other_user() {
|
||||
insertUserAndDocs();
|
||||
UUID commentId = UUID.randomUUID();
|
||||
insertAuditEvent(OTHER_USER_ID, DOC_ID, "COMMENT_ADDED",
|
||||
Instant.parse("2026-04-20T10:00:00Z"), Map.of("commentId", commentId.toString()));
|
||||
insertReplyNotification(OTHER_USER_ID, DOC_ID, commentId);
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40, ALL_ELIGIBLE_KINDS);
|
||||
|
||||
assertThat(rows).allSatisfy(r ->
|
||||
assertThat(r.isYouParticipated()).isFalse()
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
void youMentioned_is_true_when_mention_created_payload_matches_current_user() {
|
||||
insertUserAndDocs();
|
||||
insertAuditEvent(OTHER_USER_ID, DOC_ID, "MENTION_CREATED",
|
||||
Instant.parse("2026-04-20T10:00:00Z"), Map.of("mentionedUserId", USER_ID.toString()));
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40, ALL_ELIGIBLE_KINDS);
|
||||
|
||||
assertThat(rows).anySatisfy(r ->
|
||||
assertThat(r.isYouMentioned()).isTrue()
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
void rolledUpFeed_exposes_commentId_for_COMMENT_ADDED_events() {
|
||||
insertUserAndDocs();
|
||||
UUID commentId = UUID.randomUUID();
|
||||
insertAuditEvent(USER_ID, DOC_ID, "COMMENT_ADDED",
|
||||
Instant.parse("2026-04-20T10:00:00Z"), Map.of("commentId", commentId.toString()));
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40, ALL_ELIGIBLE_KINDS);
|
||||
|
||||
assertThat(rows).hasSize(1);
|
||||
assertThat(rows.get(0).getCommentId()).isEqualTo(commentId);
|
||||
}
|
||||
|
||||
@Test
|
||||
void rolledUpFeed_exposes_commentId_for_MENTION_CREATED_events() {
|
||||
insertUserAndDocs();
|
||||
UUID commentId = UUID.randomUUID();
|
||||
insertAuditEvent(OTHER_USER_ID, DOC_ID, "MENTION_CREATED",
|
||||
Instant.parse("2026-04-20T10:00:00Z"),
|
||||
Map.of("commentId", commentId.toString(), "mentionedUserId", USER_ID.toString()));
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40, ALL_ELIGIBLE_KINDS);
|
||||
|
||||
assertThat(rows).hasSize(1);
|
||||
assertThat(rows.get(0).getCommentId()).isEqualTo(commentId);
|
||||
}
|
||||
|
||||
@Test
|
||||
void rolledUpFeed_commentId_is_null_for_non_comment_kinds() {
|
||||
insertUserAndDocs();
|
||||
insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED",
|
||||
Instant.parse("2026-04-20T10:00:00Z"), Map.of("blockId", "ccc", "pageNumber", "1"));
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40, ALL_ELIGIBLE_KINDS);
|
||||
|
||||
assertThat(rows).hasSize(1);
|
||||
assertThat(rows.get(0).getCommentId()).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void youMentioned_is_false_when_mention_created_payload_targets_different_user() {
|
||||
insertUserAndDocs();
|
||||
insertAuditEvent(USER_ID, DOC_ID, "MENTION_CREATED",
|
||||
Instant.parse("2026-04-20T10:00:00Z"), Map.of("mentionedUserId", OTHER_USER_ID.toString()));
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40, ALL_ELIGIBLE_KINDS);
|
||||
|
||||
assertThat(rows).allSatisfy(r ->
|
||||
assertThat(r.isYouMentioned()).isFalse()
|
||||
);
|
||||
}
|
||||
|
||||
// ─── kinds filter ─────────────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void rolledUpFeed_with_single_kind_returns_only_that_kind() {
|
||||
insertUserAndDocs();
|
||||
Instant base = Instant.parse("2026-04-20T10:00:00Z");
|
||||
insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", base);
|
||||
insertAuditEvent(USER_ID, DOC_ID, "FILE_UPLOADED", base.plusSeconds(60));
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(
|
||||
USER_ID.toString(), 40, List.of("FILE_UPLOADED"));
|
||||
|
||||
assertThat(rows).hasSize(1);
|
||||
assertThat(rows.get(0).getKind()).isEqualTo("FILE_UPLOADED");
|
||||
}
|
||||
|
||||
@Test
|
||||
void rolledUpFeed_with_multiple_kinds_returns_union() {
|
||||
insertUserAndDocs();
|
||||
Instant base = Instant.parse("2026-04-20T10:00:00Z");
|
||||
insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", base);
|
||||
insertAuditEvent(USER_ID, OTHER_DOC_ID, "FILE_UPLOADED", base.plusSeconds(60));
|
||||
insertAuditEvent(USER_ID, DOC_ID, "ANNOTATION_CREATED", base.plusSeconds(120));
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(
|
||||
USER_ID.toString(), 40, List.of("TEXT_SAVED", "FILE_UPLOADED"));
|
||||
|
||||
assertThat(rows).hasSize(2);
|
||||
assertThat(rows).extracting(ActivityFeedRow::getKind)
|
||||
.containsExactlyInAnyOrder("TEXT_SAVED", "FILE_UPLOADED");
|
||||
}
|
||||
|
||||
@Test
|
||||
void rolledUpFeed_with_default_returns_all_six_eligible_kinds() {
|
||||
insertUserAndDocs();
|
||||
Instant base = Instant.parse("2026-04-20T10:00:00Z");
|
||||
insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", base);
|
||||
insertAuditEvent(USER_ID, DOC_ID, "FILE_UPLOADED", base.plusSeconds(60));
|
||||
insertAuditEvent(USER_ID, DOC_ID, "ANNOTATION_CREATED", base.plusSeconds(120));
|
||||
insertAuditEvent(USER_ID, DOC_ID, "BLOCK_REVIEWED", base.plusSeconds(7300));
|
||||
insertAuditEvent(USER_ID, DOC_ID, "COMMENT_ADDED", base.plusSeconds(7360));
|
||||
insertAuditEvent(USER_ID, DOC_ID, "MENTION_CREATED", base.plusSeconds(7420));
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(
|
||||
USER_ID.toString(), 40,
|
||||
List.of("TEXT_SAVED", "FILE_UPLOADED", "ANNOTATION_CREATED",
|
||||
"BLOCK_REVIEWED", "COMMENT_ADDED", "MENTION_CREATED"));
|
||||
|
||||
assertThat(rows).hasSize(6);
|
||||
}
|
||||
|
||||
@Test
|
||||
void rolledUpFeed_excludes_rows_not_in_filter_set() {
|
||||
insertUserAndDocs();
|
||||
Instant base = Instant.parse("2026-04-20T10:00:00Z");
|
||||
insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", base);
|
||||
insertAuditEvent(USER_ID, OTHER_DOC_ID, "FILE_UPLOADED", base.plusSeconds(60));
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(
|
||||
USER_ID.toString(), 40, List.of("TEXT_SAVED"));
|
||||
|
||||
assertThat(rows).hasSize(1);
|
||||
assertThat(rows.get(0).getKind()).isEqualTo("TEXT_SAVED");
|
||||
}
|
||||
|
||||
@Test
|
||||
void rolledUpFeed_rollup_still_works_when_kind_set_is_filtered_to_single_rollable_kind() {
|
||||
insertUserAndDocs();
|
||||
Instant base = Instant.parse("2026-04-20T09:00:00Z");
|
||||
for (int i = 0; i < 10; i++) {
|
||||
insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", base.plusSeconds(i * 480L));
|
||||
}
|
||||
insertAuditEvent(USER_ID, DOC_ID, "FILE_UPLOADED", base.plusSeconds(20));
|
||||
|
||||
List<ActivityFeedRow> rows = auditLogQueryRepository.findRolledUpActivityFeed(
|
||||
USER_ID.toString(), 40, List.of("TEXT_SAVED"));
|
||||
|
||||
assertThat(rows).hasSize(1);
|
||||
assertThat(rows.get(0).getKind()).isEqualTo("TEXT_SAVED");
|
||||
assertThat(rows.get(0).getCount()).isEqualTo(10);
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
package org.raddatz.familienarchiv.dashboard;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.raddatz.familienarchiv.audit.AuditKind;
|
||||
import org.raddatz.familienarchiv.config.SecurityConfig;
|
||||
import org.raddatz.familienarchiv.model.AppUser;
|
||||
import org.raddatz.familienarchiv.security.PermissionAspect;
|
||||
@@ -16,12 +15,10 @@ import org.springframework.test.context.bean.override.mockito.MockitoBean;
|
||||
import org.springframework.test.web.servlet.MockMvc;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyInt;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
|
||||
@@ -137,79 +134,10 @@ class DashboardControllerTest {
|
||||
UUID userId = UUID.randomUUID();
|
||||
when(userService.findByEmail(any())).thenReturn(
|
||||
AppUser.builder().id(userId).email("u@test.com").password("pw").build());
|
||||
when(dashboardService.getActivity(any(UUID.class), anyInt(), any())).thenReturn(List.of());
|
||||
when(dashboardService.getActivity(any(UUID.class), anyInt())).thenReturn(List.of());
|
||||
|
||||
mockMvc.perform(get("/api/dashboard/activity"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$").isArray());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "READ_ALL")
|
||||
void activity_clamps_limit_to_40() throws Exception {
|
||||
UUID userId = UUID.randomUUID();
|
||||
when(userService.findByEmail(any())).thenReturn(
|
||||
AppUser.builder().id(userId).email("u@test.com").password("pw").build());
|
||||
when(dashboardService.getActivity(any(UUID.class), anyInt(), any())).thenReturn(List.of());
|
||||
|
||||
mockMvc.perform(get("/api/dashboard/activity").param("limit", "9999"))
|
||||
.andExpect(status().isOk());
|
||||
|
||||
verify(dashboardService).getActivity(any(UUID.class), org.mockito.ArgumentMatchers.eq(40), any());
|
||||
}
|
||||
|
||||
// ─── GET /api/dashboard/activity — kinds param ───────────────────────────
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "READ_ALL")
|
||||
void activity_parsesKinds_fromCsvQueryParam() throws Exception {
|
||||
UUID userId = UUID.randomUUID();
|
||||
when(userService.findByEmail(any())).thenReturn(
|
||||
AppUser.builder().id(userId).email("u@test.com").password("pw").build());
|
||||
when(dashboardService.getActivity(any(UUID.class), anyInt(), any())).thenReturn(List.of());
|
||||
|
||||
mockMvc.perform(get("/api/dashboard/activity")
|
||||
.param("kinds", "FILE_UPLOADED", "TEXT_SAVED"))
|
||||
.andExpect(status().isOk());
|
||||
|
||||
verify(dashboardService).getActivity(any(UUID.class), anyInt(),
|
||||
org.mockito.ArgumentMatchers.eq(Set.of(AuditKind.FILE_UPLOADED, AuditKind.TEXT_SAVED)));
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "READ_ALL")
|
||||
void activity_returns400_forUnknownKindValue() throws Exception {
|
||||
mockMvc.perform(get("/api/dashboard/activity").param("kinds", "INVALID_KIND"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "READ_ALL")
|
||||
void activity_defaults_to_rollupEligible_whenKindsAbsent() throws Exception {
|
||||
UUID userId = UUID.randomUUID();
|
||||
when(userService.findByEmail(any())).thenReturn(
|
||||
AppUser.builder().id(userId).email("u@test.com").password("pw").build());
|
||||
when(dashboardService.getActivity(any(UUID.class), anyInt(), any())).thenReturn(List.of());
|
||||
|
||||
mockMvc.perform(get("/api/dashboard/activity"))
|
||||
.andExpect(status().isOk());
|
||||
|
||||
verify(dashboardService).getActivity(any(UUID.class), anyInt(),
|
||||
org.mockito.ArgumentMatchers.eq(AuditKind.ROLLUP_ELIGIBLE));
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser(authorities = "READ_ALL")
|
||||
void activity_treats_single_valid_kind_as_filter() throws Exception {
|
||||
UUID userId = UUID.randomUUID();
|
||||
when(userService.findByEmail(any())).thenReturn(
|
||||
AppUser.builder().id(userId).email("u@test.com").password("pw").build());
|
||||
when(dashboardService.getActivity(any(UUID.class), anyInt(), any())).thenReturn(List.of());
|
||||
|
||||
mockMvc.perform(get("/api/dashboard/activity").param("kinds", "COMMENT_ADDED"))
|
||||
.andExpect(status().isOk());
|
||||
|
||||
verify(dashboardService).getActivity(any(UUID.class), anyInt(),
|
||||
org.mockito.ArgumentMatchers.eq(Set.of(AuditKind.COMMENT_ADDED)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,30 +6,21 @@ import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.raddatz.familienarchiv.audit.ActivityFeedRow;
|
||||
import org.raddatz.familienarchiv.audit.AuditKind;
|
||||
import org.raddatz.familienarchiv.audit.AuditLogQueryService;
|
||||
import org.raddatz.familienarchiv.audit.PulseStatsRow;
|
||||
import org.raddatz.familienarchiv.model.AppUser;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.TranscriptionBlock;
|
||||
import org.raddatz.familienarchiv.service.CommentService;
|
||||
import org.raddatz.familienarchiv.service.DocumentService;
|
||||
import org.raddatz.familienarchiv.service.TranscriptionService;
|
||||
import org.raddatz.familienarchiv.service.UserService;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyInt;
|
||||
import static org.mockito.ArgumentMatchers.anyList;
|
||||
import static org.mockito.Mockito.never;
|
||||
import static org.mockito.Mockito.verify;
|
||||
@@ -42,35 +33,9 @@ class DashboardServiceTest {
|
||||
@Mock DocumentService documentService;
|
||||
@Mock TranscriptionService transcriptionService;
|
||||
@Mock UserService userService;
|
||||
@Mock CommentService commentService;
|
||||
|
||||
@InjectMocks DashboardService dashboardService;
|
||||
|
||||
// ─── getResume wires thumbnailUrl from Document ───────────────────────────
|
||||
|
||||
@Test
|
||||
void getResume_populatesThumbnailUrl_fromDocument() {
|
||||
UUID userId = UUID.randomUUID();
|
||||
UUID docId = UUID.fromString("12345678-aaaa-bbbb-cccc-1234567890ab");
|
||||
|
||||
Document doc = Document.builder()
|
||||
.id(docId).title("Brief").originalFilename("brief.pdf")
|
||||
.thumbnailKey("thumbnails/" + docId + ".jpg")
|
||||
.thumbnailGeneratedAt(LocalDateTime.of(2026, 4, 23, 9, 0, 0))
|
||||
.receivers(new HashSet<>())
|
||||
.build();
|
||||
|
||||
when(auditLogQueryService.findMostRecentDocumentForUser(userId)).thenReturn(Optional.of(docId));
|
||||
when(documentService.getDocumentById(docId)).thenReturn(doc);
|
||||
when(transcriptionService.listBlocks(docId)).thenReturn(List.of());
|
||||
|
||||
DashboardResumeDTO result = dashboardService.getResume(userId);
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.thumbnailUrl()).isEqualTo(doc.getThumbnailUrl());
|
||||
assertThat(result.thumbnailUrl()).startsWith("/api/documents/" + docId + "/thumbnail?v=");
|
||||
}
|
||||
|
||||
// ─── toActorDTO (via getResume collaborators) ─────────────────────────────
|
||||
|
||||
@Test
|
||||
@@ -114,7 +79,7 @@ class DashboardServiceTest {
|
||||
UUID docId = UUID.randomUUID();
|
||||
|
||||
ActivityFeedRow row = mockFeedRow(docId, "ANNOTATION_CREATED");
|
||||
when(auditLogQueryService.findActivityFeed(userId, 5, AuditKind.ROLLUP_ELIGIBLE)).thenReturn(List.of(row, row));
|
||||
when(auditLogQueryService.findActivityFeed(userId, 5)).thenReturn(List.of(row, row));
|
||||
|
||||
Document doc = Document.builder()
|
||||
.id(docId).title("Familienbrief").originalFilename("f.pdf")
|
||||
@@ -122,101 +87,14 @@ class DashboardServiceTest {
|
||||
.build();
|
||||
when(documentService.getDocumentsByIds(List.of(docId))).thenReturn(List.of(doc));
|
||||
|
||||
List<ActivityFeedItemDTO> items = dashboardService.getActivity(userId, 5, AuditKind.ROLLUP_ELIGIBLE);
|
||||
List<ActivityFeedItemDTO> items = dashboardService.getActivity(userId, 5);
|
||||
|
||||
assertThat(items).hasSize(2);
|
||||
assertThat(items.get(0).documentTitle()).isEqualTo("Familienbrief");
|
||||
verify(documentService, never()).getDocumentById(docId);
|
||||
}
|
||||
|
||||
// ─── getActivity comment/annotation enrichment ────────────────────────────
|
||||
|
||||
@Test
|
||||
void getActivity_populatesCommentId_forCommentEvents() {
|
||||
UUID userId = UUID.randomUUID();
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID commentId = UUID.randomUUID();
|
||||
|
||||
ActivityFeedRow row = mockFeedRow(docId, "COMMENT_ADDED", commentId);
|
||||
when(auditLogQueryService.findActivityFeed(userId, 5, AuditKind.ROLLUP_ELIGIBLE)).thenReturn(List.of(row));
|
||||
when(documentService.getDocumentsByIds(List.of(docId))).thenReturn(List.of(
|
||||
Document.builder().id(docId).title("B").originalFilename("b.pdf").receivers(new HashSet<>()).build()
|
||||
));
|
||||
when(commentService.findAnnotationIdsByIds(List.of(commentId))).thenReturn(Map.of());
|
||||
|
||||
List<ActivityFeedItemDTO> items = dashboardService.getActivity(userId, 5, AuditKind.ROLLUP_ELIGIBLE);
|
||||
|
||||
assertThat(items).hasSize(1);
|
||||
assertThat(items.get(0).commentId()).isEqualTo(commentId);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getActivity_populatesAnnotationId_viaCommentService() {
|
||||
UUID userId = UUID.randomUUID();
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID commentId = UUID.randomUUID();
|
||||
UUID annotationId = UUID.randomUUID();
|
||||
|
||||
ActivityFeedRow row = mockFeedRow(docId, "COMMENT_ADDED", commentId);
|
||||
when(auditLogQueryService.findActivityFeed(userId, 5, AuditKind.ROLLUP_ELIGIBLE)).thenReturn(List.of(row));
|
||||
when(documentService.getDocumentsByIds(List.of(docId))).thenReturn(List.of(
|
||||
Document.builder().id(docId).title("B").originalFilename("b.pdf").receivers(new HashSet<>()).build()
|
||||
));
|
||||
when(commentService.findAnnotationIdsByIds(List.of(commentId)))
|
||||
.thenReturn(Map.of(commentId, annotationId));
|
||||
|
||||
List<ActivityFeedItemDTO> items = dashboardService.getActivity(userId, 5, AuditKind.ROLLUP_ELIGIBLE);
|
||||
|
||||
assertThat(items).hasSize(1);
|
||||
assertThat(items.get(0).annotationId()).isEqualTo(annotationId);
|
||||
}
|
||||
|
||||
@Test
|
||||
void getActivity_leavesBothNull_forNonCommentKinds() {
|
||||
UUID userId = UUID.randomUUID();
|
||||
UUID docId = UUID.randomUUID();
|
||||
|
||||
ActivityFeedRow row = mockFeedRow(docId, "TEXT_SAVED", null);
|
||||
when(auditLogQueryService.findActivityFeed(userId, 5, AuditKind.ROLLUP_ELIGIBLE)).thenReturn(List.of(row));
|
||||
when(documentService.getDocumentsByIds(List.of(docId))).thenReturn(List.of(
|
||||
Document.builder().id(docId).title("B").originalFilename("b.pdf").receivers(new HashSet<>()).build()
|
||||
));
|
||||
|
||||
List<ActivityFeedItemDTO> items = dashboardService.getActivity(userId, 5, AuditKind.ROLLUP_ELIGIBLE);
|
||||
|
||||
assertThat(items).hasSize(1);
|
||||
assertThat(items.get(0).commentId()).isNull();
|
||||
assertThat(items.get(0).annotationId()).isNull();
|
||||
verify(commentService, never()).findAnnotationIdsByIds(anyList());
|
||||
}
|
||||
|
||||
// ─── getPulse — always uses full ROLLUP_ELIGIBLE set ─────────────────────
|
||||
|
||||
@Test
|
||||
void pulse_uses_all_rollup_eligible_kinds_never_calls_kinds_filtered_overload() {
|
||||
UUID userId = UUID.randomUUID();
|
||||
PulseStatsRow stats = new PulseStatsRow() {
|
||||
public long getPages() { return 0; }
|
||||
public long getAnnotated() { return 0; }
|
||||
public long getTranscribed() { return 0; }
|
||||
public long getUploaded() { return 0; }
|
||||
public long getYourPages() { return 0; }
|
||||
};
|
||||
when(auditLogQueryService.getPulseStats(any(OffsetDateTime.class), any(UUID.class)))
|
||||
.thenReturn(stats);
|
||||
when(auditLogQueryService.findActivityFeed(userId, 50)).thenReturn(List.of());
|
||||
|
||||
dashboardService.getPulse(userId);
|
||||
|
||||
verify(auditLogQueryService).findActivityFeed(userId, 50);
|
||||
verify(auditLogQueryService, never()).findActivityFeed(any(UUID.class), anyInt(), any(Set.class));
|
||||
}
|
||||
|
||||
private ActivityFeedRow mockFeedRow(UUID docId, String kind) {
|
||||
return mockFeedRow(docId, kind, null);
|
||||
}
|
||||
|
||||
private ActivityFeedRow mockFeedRow(UUID docId, String kind, UUID commentId) {
|
||||
return new ActivityFeedRow() {
|
||||
public String getKind() { return kind; }
|
||||
public UUID getActorId() { return null; }
|
||||
@@ -226,10 +104,6 @@ class DashboardServiceTest {
|
||||
public UUID getDocumentId() { return docId; }
|
||||
public Instant getHappenedAt() { return Instant.now(); }
|
||||
public boolean isYouMentioned() { return false; }
|
||||
public boolean isYouParticipated() { return false; }
|
||||
public int getCount() { return 1; }
|
||||
public Instant getHappenedAtUntil() { return null; }
|
||||
public UUID getCommentId() { return commentId; }
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,103 +2,67 @@ package org.raddatz.familienarchiv.dto;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class DocumentSearchResultTest {
|
||||
|
||||
private DocumentSearchItem item(UUID docId) {
|
||||
Document doc = Document.builder()
|
||||
.id(docId)
|
||||
private Document doc(UUID id) {
|
||||
return Document.builder()
|
||||
.id(id)
|
||||
.title("Test")
|
||||
.originalFilename("test.pdf")
|
||||
.status(DocumentStatus.UPLOADED)
|
||||
.build();
|
||||
return new DocumentSearchItem(doc, SearchMatchData.empty(), 0, List.of());
|
||||
}
|
||||
|
||||
@Test
|
||||
void of_totalElements_equals_list_size_for_unpaged_shortcut() {
|
||||
DocumentSearchResult result = DocumentSearchResult.of(
|
||||
List.of(item(UUID.randomUUID()), item(UUID.randomUUID())));
|
||||
|
||||
assertThat(result.totalElements()).isEqualTo(2L);
|
||||
assertThat(result.pageNumber()).isZero();
|
||||
assertThat(result.pageSize()).isEqualTo(2);
|
||||
assertThat(result.totalPages()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void of_empty_shortcut_has_zero_totalPages() {
|
||||
DocumentSearchResult result = DocumentSearchResult.of(List.of());
|
||||
|
||||
assertThat(result.totalElements()).isZero();
|
||||
assertThat(result.totalPages()).isZero();
|
||||
}
|
||||
|
||||
@Test
|
||||
void paged_factory_populates_paging_fields_from_pageable_and_total() {
|
||||
List<DocumentSearchItem> slice = List.of(item(UUID.randomUUID()), item(UUID.randomUUID()));
|
||||
|
||||
DocumentSearchResult result = DocumentSearchResult.paged(slice, PageRequest.of(1, 50), 120L);
|
||||
|
||||
assertThat(result.items()).hasSize(2);
|
||||
assertThat(result.totalElements()).isEqualTo(120L);
|
||||
assertThat(result.pageNumber()).isEqualTo(1);
|
||||
assertThat(result.pageSize()).isEqualTo(50);
|
||||
assertThat(result.totalPages()).isEqualTo(3); // ceil(120 / 50)
|
||||
}
|
||||
|
||||
@Test
|
||||
void paged_factory_totalPages_rounds_up_on_remainder() {
|
||||
DocumentSearchResult result =
|
||||
DocumentSearchResult.paged(List.of(), PageRequest.of(0, 7), 30L);
|
||||
|
||||
assertThat(result.totalPages()).isEqualTo(5); // ceil(30 / 7)
|
||||
}
|
||||
|
||||
@Test
|
||||
void of_exposes_items_with_completion_and_contributors() {
|
||||
void withMatchData_total_equals_list_size() {
|
||||
UUID id = UUID.randomUUID();
|
||||
ActivityActorDTO actor = new ActivityActorDTO("AB", "#f00", "Anna Braun");
|
||||
Document doc = Document.builder().id(id).title("T").originalFilename("t.pdf")
|
||||
.status(DocumentStatus.UPLOADED).build();
|
||||
DocumentSearchItem item = new DocumentSearchItem(doc, SearchMatchData.empty(), 75, List.of(actor));
|
||||
List<Document> docs = List.of(doc(id));
|
||||
Map<UUID, SearchMatchData> matchData = Map.of(id, SearchMatchData.empty());
|
||||
|
||||
DocumentSearchResult result = DocumentSearchResult.of(List.of(item));
|
||||
DocumentSearchResult result = DocumentSearchResult.withMatchData(docs, matchData);
|
||||
|
||||
assertThat(result.items()).hasSize(1);
|
||||
assertThat(result.items().get(0).completionPercentage()).isEqualTo(75);
|
||||
assertThat(result.items().get(0).contributors()).containsExactly(actor);
|
||||
assertThat(result.total()).isEqualTo(1L);
|
||||
}
|
||||
|
||||
@Test
|
||||
void items_component_is_annotated_as_required_in_openapi_schema() throws NoSuchFieldException {
|
||||
Schema schema = DocumentSearchResult.class.getDeclaredField("items").getAnnotation(Schema.class);
|
||||
void withMatchData_exposes_match_data_map() {
|
||||
UUID id = UUID.randomUUID();
|
||||
SearchMatchData data = new SearchMatchData("snippet", List.of(), false, List.of(), List.of(), List.of(), null, List.of());
|
||||
DocumentSearchResult result = DocumentSearchResult.withMatchData(List.of(doc(id)), Map.of(id, data));
|
||||
|
||||
assertThat(result.matchData()).containsKey(id);
|
||||
assertThat(result.matchData().get(id).transcriptionSnippet()).isEqualTo("snippet");
|
||||
}
|
||||
|
||||
@Test
|
||||
void of_factory_returns_empty_match_data() {
|
||||
UUID id = UUID.randomUUID();
|
||||
DocumentSearchResult result = DocumentSearchResult.of(List.of(doc(id)));
|
||||
|
||||
assertThat(result.matchData()).isEmpty();
|
||||
assertThat(result.total()).isEqualTo(1L);
|
||||
}
|
||||
|
||||
@Test
|
||||
void documents_component_is_annotated_as_required_in_openapi_schema() throws NoSuchFieldException {
|
||||
Schema schema = DocumentSearchResult.class.getDeclaredField("documents").getAnnotation(Schema.class);
|
||||
assertThat(schema).isNotNull();
|
||||
assertThat(schema.requiredMode()).isEqualTo(Schema.RequiredMode.REQUIRED);
|
||||
}
|
||||
|
||||
@Test
|
||||
void totalElements_component_is_annotated_as_required_in_openapi_schema() throws NoSuchFieldException {
|
||||
Schema schema = DocumentSearchResult.class.getDeclaredField("totalElements").getAnnotation(Schema.class);
|
||||
void total_component_is_annotated_as_required_in_openapi_schema() throws NoSuchFieldException {
|
||||
Schema schema = DocumentSearchResult.class.getDeclaredField("total").getAnnotation(Schema.class);
|
||||
assertThat(schema).isNotNull();
|
||||
assertThat(schema.requiredMode()).isEqualTo(Schema.RequiredMode.REQUIRED);
|
||||
}
|
||||
|
||||
@Test
|
||||
void paging_components_are_annotated_as_required_in_openapi_schema() throws NoSuchFieldException {
|
||||
for (String name : List.of("pageNumber", "pageSize", "totalPages")) {
|
||||
Schema schema = DocumentSearchResult.class.getDeclaredField(name).getAnnotation(Schema.class);
|
||||
assertThat(schema).as(name + " must have @Schema").isNotNull();
|
||||
assertThat(schema.requiredMode()).isEqualTo(Schema.RequiredMode.REQUIRED);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
package org.raddatz.familienarchiv.model;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class DocumentTest {
|
||||
|
||||
@Test
|
||||
void getThumbnailUrl_returnsNull_whenThumbnailKeyNull() {
|
||||
Document doc = Document.builder()
|
||||
.id(UUID.randomUUID())
|
||||
.title("Brief")
|
||||
.originalFilename("brief.pdf")
|
||||
.status(DocumentStatus.UPLOADED)
|
||||
.thumbnailKey(null)
|
||||
.build();
|
||||
|
||||
assertThat(doc.getThumbnailUrl()).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void getThumbnailUrl_omitsCacheBuster_whenThumbnailKeyPresentButGeneratedAtNull() {
|
||||
UUID id = UUID.fromString("11111111-2222-3333-4444-555555555555");
|
||||
Document doc = Document.builder()
|
||||
.id(id)
|
||||
.title("Brief")
|
||||
.originalFilename("brief.pdf")
|
||||
.status(DocumentStatus.UPLOADED)
|
||||
.thumbnailKey("thumbnails/" + id + ".jpg")
|
||||
.thumbnailGeneratedAt(null)
|
||||
.build();
|
||||
|
||||
assertThat(doc.getThumbnailUrl())
|
||||
.isEqualTo("/api/documents/" + id + "/thumbnail");
|
||||
}
|
||||
|
||||
@Test
|
||||
void getThumbnailUrl_includesCacheBuster_whenBothKeyAndGeneratedAtPresent() {
|
||||
UUID id = UUID.fromString("aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee");
|
||||
LocalDateTime generatedAt = LocalDateTime.of(2026, 4, 23, 14, 30, 45);
|
||||
Document doc = Document.builder()
|
||||
.id(id)
|
||||
.title("Brief")
|
||||
.originalFilename("brief.pdf")
|
||||
.status(DocumentStatus.UPLOADED)
|
||||
.thumbnailKey("thumbnails/" + id + ".jpg")
|
||||
.thumbnailGeneratedAt(generatedAt)
|
||||
.build();
|
||||
|
||||
// frontend equivalent: `?v=${encodeURIComponent(doc.thumbnailGeneratedAt)}`
|
||||
// where thumbnailGeneratedAt is the ISO-8601 string Jackson serialises.
|
||||
// LocalDateTime.toString() produces "2026-04-23T14:30:45"; encodeURIComponent
|
||||
// turns ":" into "%3A" but leaves "T" and digits alone.
|
||||
String expected = "/api/documents/" + id + "/thumbnail?v=2026-04-23T14%3A30%3A45";
|
||||
assertThat(doc.getThumbnailUrl()).isEqualTo(expected);
|
||||
}
|
||||
|
||||
@Test
|
||||
void thumbnailUrl_isSerialisedToJson_soFrontendReceivesIt() throws Exception {
|
||||
UUID id = UUID.fromString("99999999-aaaa-bbbb-cccc-111122223333");
|
||||
Document doc = Document.builder()
|
||||
.id(id)
|
||||
.title("Brief")
|
||||
.originalFilename("brief.pdf")
|
||||
.status(DocumentStatus.UPLOADED)
|
||||
.thumbnailKey("thumbnails/" + id + ".jpg")
|
||||
.thumbnailGeneratedAt(LocalDateTime.of(2026, 4, 23, 9, 0, 0))
|
||||
.build();
|
||||
|
||||
ObjectMapper mapper = new ObjectMapper().registerModule(new JavaTimeModule());
|
||||
String json = mapper.writeValueAsString(doc);
|
||||
|
||||
// Locks the wire contract, not just the Java API: every Document JSON must carry
|
||||
// `thumbnailUrl`. Protects against silent breakage if the getter gets renamed,
|
||||
// hidden behind @JsonIgnore, or visibility-reduced — any of which would leave the
|
||||
// frontend rendering the fallback icon on every surface.
|
||||
assertThat(json).contains("\"thumbnailUrl\":\"" + doc.getThumbnailUrl() + "\"");
|
||||
}
|
||||
}
|
||||
@@ -179,22 +179,6 @@ class DocumentFtsTest {
|
||||
assertThat(ids).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void should_find_document_whose_transcription_contains_word_that_stems_to_german_stop_word() {
|
||||
// "Wille" stems to "will" via the German Snowball stemmer.
|
||||
// "will" is also a German stop word, so to_tsquery('german','will:*') drops it.
|
||||
// The prefix-transform step must use to_tsquery('simple',...) to avoid this.
|
||||
Document doc = documentRepository.saveAndFlush(document("Foto"));
|
||||
UUID annotationId = annotation(doc.getId());
|
||||
blockRepository.saveAndFlush(block(doc.getId(), annotationId, "Der Wille des Volkes", 0));
|
||||
em.flush();
|
||||
em.clear();
|
||||
|
||||
List<UUID> ids = documentRepository.findRankedIdsByFts("Wille");
|
||||
|
||||
assertThat(ids).contains(doc.getId());
|
||||
}
|
||||
|
||||
@Test
|
||||
void should_not_throw_when_query_contains_invalid_tsquery_syntax() {
|
||||
documentRepository.saveAndFlush(document("Brief"));
|
||||
|
||||
@@ -8,7 +8,6 @@ import org.raddatz.familienarchiv.model.DocumentAnnotation;
|
||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||
import org.raddatz.familienarchiv.model.Person;
|
||||
import org.raddatz.familienarchiv.model.Tag;
|
||||
import org.raddatz.familienarchiv.model.ThumbnailAspect;
|
||||
import org.raddatz.familienarchiv.model.TranscriptionBlock;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.jdbc.test.autoconfigure.AutoConfigureTestDatabase;
|
||||
@@ -66,40 +65,6 @@ class DocumentRepositoryTest {
|
||||
assertThat(found.get().getStatus()).isEqualTo(DocumentStatus.PLACEHOLDER);
|
||||
}
|
||||
|
||||
// ─── thumbnailAspect + pageCount round-trip ───────────────────────────────
|
||||
|
||||
@Test
|
||||
void save_persistsThumbnailAspectAndPageCount() {
|
||||
Document document = Document.builder()
|
||||
.title("Mit Aspekt")
|
||||
.originalFilename("aspect.pdf")
|
||||
.status(DocumentStatus.UPLOADED)
|
||||
.thumbnailAspect(ThumbnailAspect.LANDSCAPE)
|
||||
.pageCount(7)
|
||||
.build();
|
||||
|
||||
Document saved = documentRepository.save(document);
|
||||
Document found = documentRepository.findById(saved.getId()).orElseThrow();
|
||||
|
||||
assertThat(found.getThumbnailAspect()).isEqualTo(ThumbnailAspect.LANDSCAPE);
|
||||
assertThat(found.getPageCount()).isEqualTo(7);
|
||||
}
|
||||
|
||||
@Test
|
||||
void save_thumbnailAspectAndPageCount_defaultToNull() {
|
||||
Document document = Document.builder()
|
||||
.title("Ohne Aspekt")
|
||||
.originalFilename("no_aspect.pdf")
|
||||
.status(DocumentStatus.PLACEHOLDER)
|
||||
.build();
|
||||
|
||||
Document saved = documentRepository.save(document);
|
||||
Document found = documentRepository.findById(saved.getId()).orElseThrow();
|
||||
|
||||
assertThat(found.getThumbnailAspect()).isNull();
|
||||
assertThat(found.getPageCount()).isNull();
|
||||
}
|
||||
|
||||
// ─── findByStatus ─────────────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
|
||||
@@ -302,102 +302,6 @@ class MigrationIntegrationTest {
|
||||
).isInstanceOf(DataIntegrityViolationException.class);
|
||||
}
|
||||
|
||||
// ─── V53: add thumbnail_aspect + page_count columns to documents ─────────
|
||||
|
||||
@Test
|
||||
void v53_thumbnailAspectColumn_existsAndIsNullable() {
|
||||
UUID docId = createDocument();
|
||||
|
||||
// Column must exist and accept NULL (freshly-created doc has no thumbnail yet)
|
||||
String aspect = jdbc.queryForObject(
|
||||
"SELECT thumbnail_aspect FROM documents WHERE id = ?", String.class, docId);
|
||||
assertThat(aspect).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void v53_pageCountColumn_existsAndIsNullable() {
|
||||
UUID docId = createDocument();
|
||||
|
||||
Integer pageCount = jdbc.queryForObject(
|
||||
"SELECT page_count FROM documents WHERE id = ?", Integer.class, docId);
|
||||
assertThat(pageCount).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void v53_thumbnailAspectColumn_acceptsPortraitAndLandscape() {
|
||||
UUID docId = createDocument();
|
||||
|
||||
int portraitRows = jdbc.update(
|
||||
"UPDATE documents SET thumbnail_aspect = 'PORTRAIT' WHERE id = ?", docId);
|
||||
assertThat(portraitRows).isEqualTo(1);
|
||||
|
||||
int landscapeRows = jdbc.update(
|
||||
"UPDATE documents SET thumbnail_aspect = 'LANDSCAPE' WHERE id = ?", docId);
|
||||
assertThat(landscapeRows).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void v53_pageCountColumn_storesInteger() {
|
||||
UUID docId = createDocument();
|
||||
|
||||
jdbc.update("UPDATE documents SET page_count = 4 WHERE id = ?", docId);
|
||||
|
||||
Integer stored = jdbc.queryForObject(
|
||||
"SELECT page_count FROM documents WHERE id = ?", Integer.class, docId);
|
||||
assertThat(stored).isEqualTo(4);
|
||||
}
|
||||
|
||||
// ─── V51: backfill annotation_id on block comments and notifications ─────
|
||||
|
||||
@Test
|
||||
void v51_backfillsAnnotationIdOnBlockCommentsFromTheirBlocks() {
|
||||
UUID docId = createDocument();
|
||||
UUID annotationId = insertAnnotation(docId);
|
||||
UUID blockId = insertBlock(docId, annotationId);
|
||||
UUID commentId = insertBlockCommentWithNullAnnotationId(docId, blockId);
|
||||
|
||||
jdbc.update(V51_BACKFILL_COMMENTS_SQL);
|
||||
|
||||
UUID stored = jdbc.queryForObject(
|
||||
"SELECT annotation_id FROM document_comments WHERE id = ?",
|
||||
UUID.class, commentId);
|
||||
assertThat(stored).isEqualTo(annotationId);
|
||||
}
|
||||
|
||||
@Test
|
||||
void v51_backfillsAnnotationIdOnNotificationsFromTheirReferencedComment() {
|
||||
UUID docId = createDocument();
|
||||
UUID userId = insertUser("recipient-" + UUID.randomUUID() + "@example.com");
|
||||
UUID annotationId = insertAnnotation(docId);
|
||||
UUID blockId = insertBlock(docId, annotationId);
|
||||
UUID commentId = insertBlockCommentWithAnnotationId(docId, blockId, annotationId);
|
||||
UUID notificationId = insertNotificationWithNullAnnotationId(docId, commentId, userId);
|
||||
|
||||
jdbc.update(V51_BACKFILL_NOTIFICATIONS_SQL);
|
||||
|
||||
UUID stored = jdbc.queryForObject(
|
||||
"SELECT annotation_id FROM notifications WHERE id = ?",
|
||||
UUID.class, notificationId);
|
||||
assertThat(stored).isEqualTo(annotationId);
|
||||
}
|
||||
|
||||
private static final String V51_BACKFILL_COMMENTS_SQL = """
|
||||
UPDATE document_comments dc
|
||||
SET annotation_id = tb.annotation_id
|
||||
FROM transcription_blocks tb
|
||||
WHERE dc.block_id = tb.id
|
||||
AND dc.annotation_id IS NULL
|
||||
""";
|
||||
|
||||
private static final String V51_BACKFILL_NOTIFICATIONS_SQL = """
|
||||
UPDATE notifications n
|
||||
SET annotation_id = dc.annotation_id
|
||||
FROM document_comments dc
|
||||
WHERE n.reference_id = dc.id
|
||||
AND n.annotation_id IS NULL
|
||||
AND dc.annotation_id IS NOT NULL
|
||||
""";
|
||||
|
||||
// ─── helpers ─────────────────────────────────────────────────────────────
|
||||
|
||||
private UUID createPerson(String firstName, String lastName) {
|
||||
@@ -422,63 +326,4 @@ class MigrationIntegrationTest {
|
||||
em.flush();
|
||||
return doc.getId();
|
||||
}
|
||||
|
||||
private UUID insertAnnotation(UUID docId) {
|
||||
UUID id = UUID.randomUUID();
|
||||
jdbc.update("""
|
||||
INSERT INTO document_annotations
|
||||
(id, document_id, page_number, x, y, width, height, color)
|
||||
VALUES (?, ?, 1, 0.1, 0.1, 0.3, 0.1, '#00C7B1')
|
||||
""", id, docId);
|
||||
return id;
|
||||
}
|
||||
|
||||
private UUID insertBlock(UUID docId, UUID annotationId) {
|
||||
UUID id = UUID.randomUUID();
|
||||
jdbc.update("""
|
||||
INSERT INTO transcription_blocks
|
||||
(id, annotation_id, document_id, text, sort_order)
|
||||
VALUES (?, ?, ?, '', 0)
|
||||
""", id, annotationId, docId);
|
||||
return id;
|
||||
}
|
||||
|
||||
private UUID insertUser(String email) {
|
||||
UUID id = UUID.randomUUID();
|
||||
jdbc.update("""
|
||||
INSERT INTO users (id, email, password, enabled, notify_on_reply, notify_on_mention)
|
||||
VALUES (?, ?, 'hash', true, false, false)
|
||||
""", id, email);
|
||||
return id;
|
||||
}
|
||||
|
||||
private UUID insertBlockCommentWithNullAnnotationId(UUID docId, UUID blockId) {
|
||||
UUID id = UUID.randomUUID();
|
||||
jdbc.update("""
|
||||
INSERT INTO document_comments
|
||||
(id, document_id, block_id, annotation_id, author_name, content)
|
||||
VALUES (?, ?, ?, NULL, 'Tester', 'Hi')
|
||||
""", id, docId, blockId);
|
||||
return id;
|
||||
}
|
||||
|
||||
private UUID insertBlockCommentWithAnnotationId(UUID docId, UUID blockId, UUID annotationId) {
|
||||
UUID id = UUID.randomUUID();
|
||||
jdbc.update("""
|
||||
INSERT INTO document_comments
|
||||
(id, document_id, block_id, annotation_id, author_name, content)
|
||||
VALUES (?, ?, ?, ?, 'Tester', 'Hi')
|
||||
""", id, docId, blockId, annotationId);
|
||||
return id;
|
||||
}
|
||||
|
||||
private UUID insertNotificationWithNullAnnotationId(UUID docId, UUID commentId, UUID recipientId) {
|
||||
UUID id = UUID.randomUUID();
|
||||
jdbc.update("""
|
||||
INSERT INTO notifications
|
||||
(id, recipient_id, type, document_id, reference_id, annotation_id, read, actor_name)
|
||||
VALUES (?, ?, 'MENTION', ?, ?, NULL, false, 'Tester')
|
||||
""", id, recipientId, docId, commentId);
|
||||
return id;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,104 +0,0 @@
|
||||
package org.raddatz.familienarchiv.repository;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.raddatz.familienarchiv.PostgresContainerConfig;
|
||||
import org.raddatz.familienarchiv.config.FlywayConfig;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.data.jpa.test.autoconfigure.DataJpaTest;
|
||||
import org.springframework.boot.jdbc.test.autoconfigure.AutoConfigureTestDatabase;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.test.context.jdbc.Sql;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@DataJpaTest
|
||||
@AutoConfigureTestDatabase(replace = AutoConfigureTestDatabase.Replace.NONE)
|
||||
@Import({PostgresContainerConfig.class, FlywayConfig.class})
|
||||
class TranscriptionBlockRepositoryIntegrationTest {
|
||||
|
||||
static final UUID DOC_A = UUID.fromString("aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa");
|
||||
static final UUID DOC_B = UUID.fromString("bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb");
|
||||
static final UUID ANN_A = UUID.fromString("cccccccc-cccc-cccc-cccc-cccccccccccc");
|
||||
static final UUID ANN_B = UUID.fromString("dddddddd-dddd-dddd-dddd-dddddddddddd");
|
||||
|
||||
@Autowired TranscriptionBlockRepository repository;
|
||||
|
||||
@Test
|
||||
@Sql(statements = {
|
||||
"INSERT INTO documents (id, title, original_filename, status) VALUES ('aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 'Doc A', 'a.pdf', 'PLACEHOLDER')",
|
||||
"INSERT INTO document_annotations (id, document_id, page_number, x, y, width, height, color) VALUES ('cccccccc-cccc-cccc-cccc-cccccccccccc', 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 1, 0, 0, 1, 1, '#fff')",
|
||||
"INSERT INTO transcription_blocks (annotation_id, document_id, sort_order, reviewed) VALUES ('cccccccc-cccc-cccc-cccc-cccccccccccc', 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 0, true)",
|
||||
"INSERT INTO transcription_blocks (annotation_id, document_id, sort_order, reviewed) VALUES ('cccccccc-cccc-cccc-cccc-cccccccccccc', 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 1, true)"
|
||||
})
|
||||
void findCompletionStats_returns_100_when_all_blocks_reviewed() {
|
||||
List<CompletionStatsRow> rows = repository.findCompletionStatsForDocuments(List.of(DOC_A));
|
||||
|
||||
assertThat(rows).hasSize(1);
|
||||
assertThat(rows.get(0).getDocumentId()).isEqualTo(DOC_A);
|
||||
assertThat(rows.get(0).getCompletionPercentage()).isEqualTo(100);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Sql(statements = {
|
||||
"INSERT INTO documents (id, title, original_filename, status) VALUES ('aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 'Doc A', 'a.pdf', 'PLACEHOLDER')",
|
||||
"INSERT INTO document_annotations (id, document_id, page_number, x, y, width, height, color) VALUES ('cccccccc-cccc-cccc-cccc-cccccccccccc', 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 1, 0, 0, 1, 1, '#fff')",
|
||||
"INSERT INTO transcription_blocks (annotation_id, document_id, sort_order, reviewed) VALUES ('cccccccc-cccc-cccc-cccc-cccccccccccc', 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 0, false)",
|
||||
"INSERT INTO transcription_blocks (annotation_id, document_id, sort_order, reviewed) VALUES ('cccccccc-cccc-cccc-cccc-cccccccccccc', 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 1, false)"
|
||||
})
|
||||
void findCompletionStats_returns_0_when_no_blocks_reviewed() {
|
||||
List<CompletionStatsRow> rows = repository.findCompletionStatsForDocuments(List.of(DOC_A));
|
||||
|
||||
assertThat(rows).hasSize(1);
|
||||
assertThat(rows.get(0).getCompletionPercentage()).isEqualTo(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Sql(statements = {
|
||||
"INSERT INTO documents (id, title, original_filename, status) VALUES ('aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 'Doc A', 'a.pdf', 'PLACEHOLDER')"
|
||||
})
|
||||
void findCompletionStats_returns_empty_when_document_has_no_blocks() {
|
||||
List<CompletionStatsRow> rows = repository.findCompletionStatsForDocuments(List.of(DOC_A));
|
||||
|
||||
assertThat(rows).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
@Sql(statements = {
|
||||
"INSERT INTO documents (id, title, original_filename, status) VALUES ('aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 'Doc A', 'a.pdf', 'PLACEHOLDER')",
|
||||
"INSERT INTO document_annotations (id, document_id, page_number, x, y, width, height, color) VALUES ('cccccccc-cccc-cccc-cccc-cccccccccccc', 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 1, 0, 0, 1, 1, '#fff')",
|
||||
"INSERT INTO transcription_blocks (annotation_id, document_id, sort_order, reviewed) VALUES ('cccccccc-cccc-cccc-cccc-cccccccccccc', 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 0, true)",
|
||||
"INSERT INTO transcription_blocks (annotation_id, document_id, sort_order, reviewed) VALUES ('cccccccc-cccc-cccc-cccc-cccccccccccc', 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 1, false)",
|
||||
"INSERT INTO transcription_blocks (annotation_id, document_id, sort_order, reviewed) VALUES ('cccccccc-cccc-cccc-cccc-cccccccccccc', 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 2, false)",
|
||||
"INSERT INTO transcription_blocks (annotation_id, document_id, sort_order, reviewed) VALUES ('cccccccc-cccc-cccc-cccc-cccccccccccc', 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 3, false)"
|
||||
})
|
||||
void findCompletionStats_rounds_partial_completion_correctly() {
|
||||
List<CompletionStatsRow> rows = repository.findCompletionStatsForDocuments(List.of(DOC_A));
|
||||
|
||||
assertThat(rows).hasSize(1);
|
||||
assertThat(rows.get(0).getCompletionPercentage()).isEqualTo(25);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Sql(statements = {
|
||||
"INSERT INTO documents (id, title, original_filename, status) VALUES ('aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 'Doc A', 'a.pdf', 'PLACEHOLDER')",
|
||||
"INSERT INTO documents (id, title, original_filename, status) VALUES ('bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', 'Doc B', 'b.pdf', 'PLACEHOLDER')",
|
||||
"INSERT INTO document_annotations (id, document_id, page_number, x, y, width, height, color) VALUES ('cccccccc-cccc-cccc-cccc-cccccccccccc', 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 1, 0, 0, 1, 1, '#fff')",
|
||||
"INSERT INTO document_annotations (id, document_id, page_number, x, y, width, height, color) VALUES ('dddddddd-dddd-dddd-dddd-dddddddddddd', 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', 1, 0, 0, 1, 1, '#fff')",
|
||||
"INSERT INTO transcription_blocks (annotation_id, document_id, sort_order, reviewed) VALUES ('cccccccc-cccc-cccc-cccc-cccccccccccc', 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 0, true)",
|
||||
"INSERT INTO transcription_blocks (annotation_id, document_id, sort_order, reviewed) VALUES ('dddddddd-dddd-dddd-dddd-dddddddddddd', 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', 0, false)"
|
||||
})
|
||||
void findCompletionStats_handles_multiple_documents_in_one_call() {
|
||||
List<CompletionStatsRow> rows = repository.findCompletionStatsForDocuments(List.of(DOC_A, DOC_B));
|
||||
|
||||
Map<UUID, Integer> byDoc = rows.stream()
|
||||
.collect(Collectors.toMap(CompletionStatsRow::getDocumentId, CompletionStatsRow::getCompletionPercentage));
|
||||
|
||||
assertThat(byDoc).containsEntry(DOC_A, 100);
|
||||
assertThat(byDoc).containsEntry(DOC_B, 0);
|
||||
}
|
||||
}
|
||||
@@ -10,7 +10,6 @@ import org.raddatz.familienarchiv.audit.AuditService;
|
||||
import org.raddatz.familienarchiv.exception.DomainException;
|
||||
import org.raddatz.familienarchiv.model.AppUser;
|
||||
import org.raddatz.familienarchiv.model.DocumentComment;
|
||||
import org.raddatz.familienarchiv.model.TranscriptionBlock;
|
||||
import org.raddatz.familienarchiv.model.UserGroup;
|
||||
import org.raddatz.familienarchiv.repository.CommentRepository;
|
||||
|
||||
@@ -40,9 +39,54 @@ class CommentServiceTest {
|
||||
@Mock UserService userService;
|
||||
@Mock NotificationService notificationService;
|
||||
@Mock AuditService auditService;
|
||||
@Mock TranscriptionService transcriptionService;
|
||||
@InjectMocks CommentService commentService;
|
||||
|
||||
// ─── postComment ──────────────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void postComment_capturesAuthorNameAtWriteTime() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder()
|
||||
.id(UUID.randomUUID()).email("hans@example.com").firstName("Hans").lastName("Müller").build();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).authorName("Hans Müller").content("Test").build();
|
||||
when(commentRepository.save(any())).thenReturn(saved);
|
||||
|
||||
DocumentComment result = commentService.postComment(docId, null, "Test", List.of(), author);
|
||||
|
||||
assertThat(result.getAuthorName()).isEqualTo("Hans Müller");
|
||||
}
|
||||
|
||||
@Test
|
||||
void postComment_fallsBackToUsername_whenNamesAreBlank() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("hans42@example.com").build();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).authorName("hans42").content("Test").build();
|
||||
when(commentRepository.save(any())).thenReturn(saved);
|
||||
|
||||
DocumentComment result = commentService.postComment(docId, null, "Test", List.of(), author);
|
||||
|
||||
assertThat(result.getAuthorName()).isEqualTo("hans42");
|
||||
}
|
||||
|
||||
@Test
|
||||
void postComment_triggersNotifyMentions_whenMentionedUserIdsProvided() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID mentionedId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("hans@example.com").firstName("Hans").lastName("M").build();
|
||||
AppUser mentioned = AppUser.builder().id(mentionedId).email("anna@example.com").firstName("Anna").lastName("S").build();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).authorName("Hans M").content("Hey @Anna S").build();
|
||||
|
||||
when(userService.findAllById(List.of(mentionedId))).thenReturn(List.of(mentioned));
|
||||
when(commentRepository.save(any())).thenReturn(saved);
|
||||
|
||||
commentService.postComment(docId, null, "Hey @Anna S", List.of(mentionedId), author);
|
||||
|
||||
verify(notificationService).notifyMentions(eq(List.of(mentionedId)), eq(saved));
|
||||
}
|
||||
|
||||
// ─── replyToComment ───────────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
@@ -178,7 +222,7 @@ class CommentServiceTest {
|
||||
.id(commentId).documentId(docId).authorId(authorId)
|
||||
.content("Original").authorName("Hans").createdAt(created).build();
|
||||
when(commentRepository.findById(commentId)).thenReturn(Optional.of(comment));
|
||||
stubSaveAssigningRandomId();
|
||||
when(commentRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
DocumentComment result = commentService.editComment(docId, commentId, "Updated", author);
|
||||
|
||||
@@ -238,6 +282,28 @@ class CommentServiceTest {
|
||||
verify(commentRepository).delete(comment);
|
||||
}
|
||||
|
||||
// ─── getCommentsForDocument ───────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void getCommentsForDocument_returnsRootsWithRepliesAttached() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID rootId = UUID.randomUUID();
|
||||
|
||||
DocumentComment root = DocumentComment.builder()
|
||||
.id(rootId).documentId(docId).authorName("Hans").content("Root").build();
|
||||
DocumentComment reply = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).parentId(rootId).authorName("Anna").content("Reply").build();
|
||||
|
||||
when(commentRepository.findByDocumentIdAndAnnotationIdIsNullAndParentIdIsNull(docId))
|
||||
.thenReturn(List.of(root));
|
||||
when(commentRepository.findByParentId(rootId)).thenReturn(List.of(reply));
|
||||
|
||||
List<DocumentComment> result = commentService.getCommentsForDocument(docId);
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
assertThat(result.get(0).getReplies()).containsExactly(reply);
|
||||
}
|
||||
|
||||
// ─── replyToComment — reply with null authorId in thread ─────────────────
|
||||
|
||||
@Test
|
||||
@@ -264,6 +330,82 @@ class CommentServiceTest {
|
||||
verify(notificationService).notifyReply(eq(saved), anySet());
|
||||
}
|
||||
|
||||
// ─── resolveAuthorName edge cases ─────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void postComment_fallsBackToUsername_whenFirstNameBlankAndLastNameNull() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("user42@example.com")
|
||||
.firstName(" ").lastName(null).build();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).authorName("user42").content("Hi").build();
|
||||
when(commentRepository.save(any())).thenReturn(saved);
|
||||
|
||||
DocumentComment result = commentService.postComment(docId, null, "Hi", List.of(), author);
|
||||
|
||||
assertThat(result.getAuthorName()).isEqualTo("user42");
|
||||
}
|
||||
|
||||
@Test
|
||||
void postComment_fallsBackToUsername_whenFirstNameNullAndLastNameBlank() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("user42@example.com")
|
||||
.firstName(null).lastName(" ").build();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).authorName("user42").content("Hi").build();
|
||||
when(commentRepository.save(any())).thenReturn(saved);
|
||||
|
||||
DocumentComment result = commentService.postComment(docId, null, "Hi", List.of(), author);
|
||||
|
||||
assertThat(result.getAuthorName()).isEqualTo("user42");
|
||||
}
|
||||
|
||||
@Test
|
||||
void postComment_includesOnlyFirstName_whenLastNameIsNull() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("user42@example.com")
|
||||
.firstName("Hans").lastName(null).build();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).authorName("Hans").content("Hi").build();
|
||||
when(commentRepository.save(any())).thenReturn(saved);
|
||||
|
||||
commentService.postComment(docId, null, "Hi", List.of(), author);
|
||||
|
||||
// first != null && !blank → true; last == null → entire condition false → returns stripped first
|
||||
verify(commentRepository).save(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void postComment_includesOnlyLastName_whenFirstNameIsNull() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("user42@example.com")
|
||||
.firstName(null).lastName("Müller").build();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).authorName("Müller").content("Hi").build();
|
||||
when(commentRepository.save(any())).thenReturn(saved);
|
||||
|
||||
commentService.postComment(docId, null, "Hi", List.of(), author);
|
||||
|
||||
// No exception — name resolution with null first name strips cleanly
|
||||
verify(commentRepository).save(any());
|
||||
}
|
||||
|
||||
// ─── saveMentions — null/empty guard ─────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void postComment_doesNotCallUserService_whenMentionedUserIdsIsNull() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("hans@example.com")
|
||||
.firstName("Hans").lastName("M").build();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).authorName("Hans M").content("Hi").build();
|
||||
when(commentRepository.save(any())).thenReturn(saved);
|
||||
|
||||
commentService.postComment(docId, null, "Hi", null, author);
|
||||
|
||||
verify(userService, never()).findAllById(anyList());
|
||||
}
|
||||
|
||||
// ─── collectParticipantIds — non-null authorId in reply ──────────────────
|
||||
|
||||
@Test
|
||||
@@ -317,6 +459,26 @@ class CommentServiceTest {
|
||||
verify(notificationService).notifyReply(eq(saved), anySet());
|
||||
}
|
||||
|
||||
// ─── getCommentsForAnnotation ─────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void getCommentsForAnnotation_returnsRootsForAnnotation() {
|
||||
UUID annotationId = UUID.randomUUID();
|
||||
UUID rootId = UUID.randomUUID();
|
||||
|
||||
DocumentComment root = DocumentComment.builder()
|
||||
.id(rootId).annotationId(annotationId).authorName("Hans").content("Root").build();
|
||||
|
||||
when(commentRepository.findByAnnotationIdAndParentIdIsNull(annotationId))
|
||||
.thenReturn(List.of(root));
|
||||
when(commentRepository.findByParentId(rootId)).thenReturn(List.of());
|
||||
|
||||
List<DocumentComment> result = commentService.getCommentsForAnnotation(annotationId);
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
assertThat(result.get(0).getAnnotationId()).isEqualTo(annotationId);
|
||||
}
|
||||
|
||||
// ─── helpers ──────────────────────────────────────────────────────────────
|
||||
|
||||
private AppUser buildAdmin() {
|
||||
@@ -333,6 +495,65 @@ class CommentServiceTest {
|
||||
|
||||
// ─── audit: COMMENT_ADDED and MENTION_CREATED ─────────────────────────────
|
||||
|
||||
@Test
|
||||
void postComment_logsCommentAdded() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID savedId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("hans@example.com").firstName("Hans").lastName("M").build();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(savedId).documentId(docId).authorName("Hans M").content("Hello").build();
|
||||
when(commentRepository.save(any())).thenReturn(saved);
|
||||
|
||||
commentService.postComment(docId, null, "Hello", List.of(), author);
|
||||
|
||||
verify(auditService).logAfterCommit(
|
||||
eq(AuditKind.COMMENT_ADDED),
|
||||
eq(author.getId()),
|
||||
eq(docId),
|
||||
argThat(p -> savedId.toString().equals(p.get("commentId"))));
|
||||
}
|
||||
|
||||
@Test
|
||||
void postComment_logsMentionCreated_oncePerMentionedUser() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID savedId = UUID.randomUUID();
|
||||
UUID mentionedId1 = UUID.randomUUID();
|
||||
UUID mentionedId2 = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("hans@example.com").firstName("Hans").lastName("M").build();
|
||||
AppUser mentioned1 = AppUser.builder().id(mentionedId1).email("anna@example.com").firstName("Anna").lastName("S").build();
|
||||
AppUser mentioned2 = AppUser.builder().id(mentionedId2).email("bob@example.com").firstName("Bob").lastName("J").build();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(savedId).documentId(docId).authorName("Hans M").content("Hey @Anna @Bob").build();
|
||||
when(userService.findAllById(List.of(mentionedId1, mentionedId2))).thenReturn(List.of(mentioned1, mentioned2));
|
||||
when(commentRepository.save(any())).thenReturn(saved);
|
||||
|
||||
commentService.postComment(docId, null, "Hey @Anna @Bob", List.of(mentionedId1, mentionedId2), author);
|
||||
|
||||
verify(auditService).logAfterCommit(
|
||||
eq(AuditKind.MENTION_CREATED),
|
||||
eq(author.getId()),
|
||||
eq(docId),
|
||||
argThat(p -> mentionedId1.toString().equals(p.get("mentionedUserId"))));
|
||||
verify(auditService).logAfterCommit(
|
||||
eq(AuditKind.MENTION_CREATED),
|
||||
eq(author.getId()),
|
||||
eq(docId),
|
||||
argThat(p -> mentionedId2.toString().equals(p.get("mentionedUserId"))));
|
||||
}
|
||||
|
||||
@Test
|
||||
void postComment_doesNotLogMentionCreated_whenNoMentions() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("hans@example.com").firstName("Hans").lastName("M").build();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).authorName("Hans M").content("Hello").build();
|
||||
when(commentRepository.save(any())).thenReturn(saved);
|
||||
|
||||
commentService.postComment(docId, null, "Hello", List.of(), author);
|
||||
|
||||
verify(auditService, never()).logAfterCommit(eq(AuditKind.MENTION_CREATED), any(), any(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void replyToComment_logsCommentAdded() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
@@ -390,8 +611,6 @@ class CommentServiceTest {
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("felix@example.com").firstName("Felix").lastName("B").build();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(savedId).documentId(docId).blockId(blockId).authorName("Felix B").content("Nice").build();
|
||||
when(transcriptionService.getBlock(docId, blockId))
|
||||
.thenReturn(TranscriptionBlock.builder().id(blockId).documentId(docId).annotationId(UUID.randomUUID()).sortOrder(0).build());
|
||||
when(commentRepository.save(any())).thenReturn(saved);
|
||||
|
||||
commentService.postBlockComment(docId, blockId, "Nice", List.of(), author);
|
||||
@@ -424,10 +643,7 @@ class CommentServiceTest {
|
||||
void postBlockComment_setsBlockIdOnComment() {
|
||||
UUID documentId = UUID.randomUUID();
|
||||
UUID blockId = UUID.randomUUID();
|
||||
UUID annotationId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("felix@example.com").firstName("Felix").lastName("Brandt").build();
|
||||
when(transcriptionService.getBlock(documentId, blockId))
|
||||
.thenReturn(TranscriptionBlock.builder().id(blockId).documentId(documentId).annotationId(annotationId).sortOrder(0).build());
|
||||
when(commentRepository.save(any())).thenAnswer(inv -> {
|
||||
DocumentComment c = inv.getArgument(0);
|
||||
c.setId(UUID.randomUUID());
|
||||
@@ -441,275 +657,4 @@ class CommentServiceTest {
|
||||
assertThat(result.getDocumentId()).isEqualTo(documentId);
|
||||
assertThat(result.getContent()).isEqualTo("Looks like Breslau");
|
||||
}
|
||||
|
||||
@Test
|
||||
void postBlockComment_setsAnnotationIdFromBlock() {
|
||||
UUID documentId = UUID.randomUUID();
|
||||
UUID blockId = UUID.randomUUID();
|
||||
UUID annotationId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("felix@example.com").firstName("Felix").lastName("Brandt").build();
|
||||
when(transcriptionService.getBlock(documentId, blockId))
|
||||
.thenReturn(TranscriptionBlock.builder().id(blockId).documentId(documentId).annotationId(annotationId).sortOrder(0).build());
|
||||
when(commentRepository.save(any())).thenAnswer(inv -> {
|
||||
DocumentComment c = inv.getArgument(0);
|
||||
c.setId(UUID.randomUUID());
|
||||
return c;
|
||||
});
|
||||
|
||||
DocumentComment result = commentService.postBlockComment(
|
||||
documentId, blockId, "Nice work", List.of(), author);
|
||||
|
||||
assertThat(result.getAnnotationId()).isEqualTo(annotationId);
|
||||
}
|
||||
|
||||
@Test
|
||||
void postBlockComment_propagatesNotFound_whenBlockDoesNotExist() {
|
||||
UUID documentId = UUID.randomUUID();
|
||||
UUID blockId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("felix@example.com").firstName("Felix").lastName("Brandt").build();
|
||||
when(transcriptionService.getBlock(documentId, blockId))
|
||||
.thenThrow(DomainException.notFound(
|
||||
org.raddatz.familienarchiv.exception.ErrorCode.TRANSCRIPTION_BLOCK_NOT_FOUND,
|
||||
"Transcription block not found: " + blockId));
|
||||
|
||||
assertThatThrownBy(() -> commentService.postBlockComment(documentId, blockId, "Hi", List.of(), author))
|
||||
.isInstanceOf(DomainException.class)
|
||||
.hasMessageContaining("Transcription block not found");
|
||||
}
|
||||
|
||||
// ─── postBlockComment — authorName resolution ────────────────────────────
|
||||
|
||||
@Test
|
||||
void postBlockComment_capturesAuthorNameAtWriteTime() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID blockId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder()
|
||||
.id(UUID.randomUUID()).email("hans@example.com").firstName("Hans").lastName("Müller").build();
|
||||
stubBlock(docId, blockId);
|
||||
stubSaveAssigningRandomId();
|
||||
|
||||
DocumentComment result = commentService.postBlockComment(docId, blockId, "Test", List.of(), author);
|
||||
|
||||
assertThat(result.getAuthorName()).isEqualTo("Hans Müller");
|
||||
}
|
||||
|
||||
@Test
|
||||
void postBlockComment_fallsBackToEmail_whenNamesAreBlank() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID blockId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("hans42@example.com").build();
|
||||
stubBlock(docId, blockId);
|
||||
stubSaveAssigningRandomId();
|
||||
|
||||
DocumentComment result = commentService.postBlockComment(docId, blockId, "Test", List.of(), author);
|
||||
|
||||
assertThat(result.getAuthorName()).isEqualTo("hans42@example.com");
|
||||
}
|
||||
|
||||
@Test
|
||||
void postBlockComment_fallsBackToEmail_whenFirstNameBlankAndLastNameNull() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID blockId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("user42@example.com")
|
||||
.firstName(" ").lastName(null).build();
|
||||
stubBlock(docId, blockId);
|
||||
stubSaveAssigningRandomId();
|
||||
|
||||
DocumentComment result = commentService.postBlockComment(docId, blockId, "Hi", List.of(), author);
|
||||
|
||||
assertThat(result.getAuthorName()).isEqualTo("user42@example.com");
|
||||
}
|
||||
|
||||
@Test
|
||||
void postBlockComment_fallsBackToEmail_whenFirstNameNullAndLastNameBlank() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID blockId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("user42@example.com")
|
||||
.firstName(null).lastName(" ").build();
|
||||
stubBlock(docId, blockId);
|
||||
stubSaveAssigningRandomId();
|
||||
|
||||
DocumentComment result = commentService.postBlockComment(docId, blockId, "Hi", List.of(), author);
|
||||
|
||||
assertThat(result.getAuthorName()).isEqualTo("user42@example.com");
|
||||
}
|
||||
|
||||
@Test
|
||||
void postBlockComment_usesFirstNameAlone_whenLastNameIsNull() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID blockId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("user42@example.com")
|
||||
.firstName("Hans").lastName(null).build();
|
||||
stubBlock(docId, blockId);
|
||||
stubSaveAssigningRandomId();
|
||||
|
||||
DocumentComment result = commentService.postBlockComment(docId, blockId, "Hi", List.of(), author);
|
||||
|
||||
assertThat(result.getAuthorName()).isEqualTo("Hans");
|
||||
}
|
||||
|
||||
@Test
|
||||
void postBlockComment_usesLastNameAlone_whenFirstNameIsNull() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID blockId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("user42@example.com")
|
||||
.firstName(null).lastName("Müller").build();
|
||||
stubBlock(docId, blockId);
|
||||
stubSaveAssigningRandomId();
|
||||
|
||||
DocumentComment result = commentService.postBlockComment(docId, blockId, "Hi", List.of(), author);
|
||||
|
||||
assertThat(result.getAuthorName()).isEqualTo("Müller");
|
||||
}
|
||||
|
||||
// ─── postBlockComment — mentions ─────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void postBlockComment_triggersNotifyMentions_whenMentionedUserIdsProvided() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID blockId = UUID.randomUUID();
|
||||
UUID mentionedId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("hans@example.com").firstName("Hans").lastName("M").build();
|
||||
AppUser mentioned = AppUser.builder().id(mentionedId).email("anna@example.com").firstName("Anna").lastName("S").build();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).blockId(blockId).authorName("Hans M").content("Hey @Anna S").build();
|
||||
stubBlock(docId, blockId);
|
||||
when(userService.findAllById(List.of(mentionedId))).thenReturn(List.of(mentioned));
|
||||
when(commentRepository.save(any())).thenReturn(saved);
|
||||
|
||||
commentService.postBlockComment(docId, blockId, "Hey @Anna S", List.of(mentionedId), author);
|
||||
|
||||
verify(notificationService).notifyMentions(eq(List.of(mentionedId)), eq(saved));
|
||||
}
|
||||
|
||||
@Test
|
||||
void postBlockComment_doesNotCallUserService_whenMentionedUserIdsIsNull() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID blockId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("hans@example.com")
|
||||
.firstName("Hans").lastName("M").build();
|
||||
stubBlock(docId, blockId);
|
||||
stubSaveAssigningRandomId();
|
||||
|
||||
commentService.postBlockComment(docId, blockId, "Hi", null, author);
|
||||
|
||||
verify(userService, never()).findAllById(anyList());
|
||||
}
|
||||
|
||||
@Test
|
||||
void postBlockComment_logsMentionCreated_oncePerMentionedUser() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID blockId = UUID.randomUUID();
|
||||
UUID savedId = UUID.randomUUID();
|
||||
UUID mentionedId1 = UUID.randomUUID();
|
||||
UUID mentionedId2 = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("hans@example.com").firstName("Hans").lastName("M").build();
|
||||
AppUser mentioned1 = AppUser.builder().id(mentionedId1).email("anna@example.com").firstName("Anna").lastName("S").build();
|
||||
AppUser mentioned2 = AppUser.builder().id(mentionedId2).email("bob@example.com").firstName("Bob").lastName("J").build();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(savedId).documentId(docId).blockId(blockId).authorName("Hans M").content("Hey @Anna @Bob").build();
|
||||
stubBlock(docId, blockId);
|
||||
when(userService.findAllById(List.of(mentionedId1, mentionedId2))).thenReturn(List.of(mentioned1, mentioned2));
|
||||
when(commentRepository.save(any())).thenReturn(saved);
|
||||
|
||||
commentService.postBlockComment(docId, blockId, "Hey @Anna @Bob", List.of(mentionedId1, mentionedId2), author);
|
||||
|
||||
verify(auditService).logAfterCommit(
|
||||
eq(AuditKind.MENTION_CREATED),
|
||||
eq(author.getId()),
|
||||
eq(docId),
|
||||
argThat(p -> mentionedId1.toString().equals(p.get("mentionedUserId"))));
|
||||
verify(auditService).logAfterCommit(
|
||||
eq(AuditKind.MENTION_CREATED),
|
||||
eq(author.getId()),
|
||||
eq(docId),
|
||||
argThat(p -> mentionedId2.toString().equals(p.get("mentionedUserId"))));
|
||||
}
|
||||
|
||||
@Test
|
||||
void postBlockComment_doesNotLogMentionCreated_whenNoMentions() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID blockId = UUID.randomUUID();
|
||||
AppUser author = AppUser.builder().id(UUID.randomUUID()).email("hans@example.com").firstName("Hans").lastName("M").build();
|
||||
DocumentComment saved = DocumentComment.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).blockId(blockId).authorName("Hans M").content("Hello").build();
|
||||
stubBlock(docId, blockId);
|
||||
when(commentRepository.save(any())).thenReturn(saved);
|
||||
|
||||
commentService.postBlockComment(docId, blockId, "Hello", List.of(), author);
|
||||
|
||||
verify(auditService, never()).logAfterCommit(eq(AuditKind.MENTION_CREATED), any(), any(), any());
|
||||
}
|
||||
|
||||
// ─── findAnnotationIdsByIds ───────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void findAnnotationIdsByIds_returnsMap_forKnownIds() {
|
||||
UUID commentA = UUID.randomUUID();
|
||||
UUID annotationA = UUID.randomUUID();
|
||||
UUID commentB = UUID.randomUUID();
|
||||
UUID annotationB = UUID.randomUUID();
|
||||
when(commentRepository.findAllById(List.of(commentA, commentB)))
|
||||
.thenReturn(List.of(
|
||||
DocumentComment.builder().id(commentA).annotationId(annotationA).build(),
|
||||
DocumentComment.builder().id(commentB).annotationId(annotationB).build()
|
||||
));
|
||||
|
||||
assertThat(commentService.findAnnotationIdsByIds(List.of(commentA, commentB)))
|
||||
.containsOnly(
|
||||
java.util.Map.entry(commentA, annotationA),
|
||||
java.util.Map.entry(commentB, annotationB)
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAnnotationIdsByIds_returnsEmptyMap_forEmptyInput() {
|
||||
assertThat(commentService.findAnnotationIdsByIds(List.of())).isEmpty();
|
||||
verify(commentRepository, never()).findAllById(anyList());
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAnnotationIdsByIds_omitsUnknownIds() {
|
||||
UUID known = UUID.randomUUID();
|
||||
UUID knownAnnotation = UUID.randomUUID();
|
||||
UUID missing = UUID.randomUUID();
|
||||
when(commentRepository.findAllById(List.of(known, missing)))
|
||||
.thenReturn(List.of(
|
||||
DocumentComment.builder().id(known).annotationId(knownAnnotation).build()
|
||||
));
|
||||
|
||||
assertThat(commentService.findAnnotationIdsByIds(List.of(known, missing)))
|
||||
.containsOnly(java.util.Map.entry(known, knownAnnotation))
|
||||
.doesNotContainKey(missing);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAnnotationIdsByIds_omitsCommentsWithNullAnnotationId() {
|
||||
UUID legacy = UUID.randomUUID();
|
||||
UUID block = UUID.randomUUID();
|
||||
UUID annotation = UUID.randomUUID();
|
||||
when(commentRepository.findAllById(List.of(legacy, block)))
|
||||
.thenReturn(List.of(
|
||||
DocumentComment.builder().id(legacy).annotationId(null).build(),
|
||||
DocumentComment.builder().id(block).annotationId(annotation).build()
|
||||
));
|
||||
|
||||
assertThat(commentService.findAnnotationIdsByIds(List.of(legacy, block)))
|
||||
.containsOnly(java.util.Map.entry(block, annotation))
|
||||
.doesNotContainKey(legacy);
|
||||
}
|
||||
|
||||
private void stubBlock(UUID docId, UUID blockId) {
|
||||
when(transcriptionService.getBlock(docId, blockId))
|
||||
.thenReturn(TranscriptionBlock.builder()
|
||||
.id(blockId).documentId(docId).annotationId(UUID.randomUUID()).sortOrder(0).build());
|
||||
}
|
||||
|
||||
private void stubSaveAssigningRandomId() {
|
||||
when(commentRepository.save(any())).thenAnswer(inv -> {
|
||||
DocumentComment c = inv.getArgument(0);
|
||||
if (c.getId() == null) c.setId(UUID.randomUUID());
|
||||
return c;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,137 +0,0 @@
|
||||
package org.raddatz.familienarchiv.service;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.raddatz.familienarchiv.PostgresContainerConfig;
|
||||
import org.raddatz.familienarchiv.dto.DocumentSearchResult;
|
||||
import org.raddatz.familienarchiv.dto.DocumentSort;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ActiveProfiles;
|
||||
import org.springframework.test.context.bean.override.mockito.MockitoBean;
|
||||
import software.amazon.awssdk.services.s3.S3Client;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
/**
|
||||
* End-to-end paged search test with real PostgreSQL (Testcontainers). Covers the
|
||||
* Specification→Pageable→Page→DTO path that unit tests mock around. Seeds 120
|
||||
* UPLOADED documents and asserts the slice/total/totalPages arithmetic holds
|
||||
* against the actual JPA query.
|
||||
*
|
||||
* <p>Closes the integration-coverage gap Sara flagged on PR #316.
|
||||
*/
|
||||
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE)
|
||||
@ActiveProfiles("test")
|
||||
@Import(PostgresContainerConfig.class)
|
||||
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_EACH_TEST_METHOD)
|
||||
class DocumentSearchPagedIntegrationTest {
|
||||
|
||||
private static final int FIXTURE_SIZE = 120;
|
||||
|
||||
@MockitoBean S3Client s3Client;
|
||||
@Autowired DocumentService documentService;
|
||||
@Autowired DocumentRepository documentRepository;
|
||||
|
||||
@BeforeEach
|
||||
void seed() {
|
||||
// Deterministic date spread so DATE-DESC order is predictable:
|
||||
// document #0 has the oldest date, document #119 has the newest.
|
||||
for (int i = 0; i < FIXTURE_SIZE; i++) {
|
||||
Document doc = Document.builder()
|
||||
.title("Dok-" + String.format("%03d", i))
|
||||
.originalFilename("dok-" + i + ".pdf")
|
||||
.status(DocumentStatus.UPLOADED)
|
||||
.documentDate(LocalDate.of(1900, 1, 1).plusDays(i))
|
||||
.build();
|
||||
documentRepository.save(doc);
|
||||
}
|
||||
assertThat(documentRepository.count()).isEqualTo(FIXTURE_SIZE);
|
||||
}
|
||||
|
||||
@Test
|
||||
void search_firstPage_returnsExactlyPageSizeItems_andCorrectTotalElements() {
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null,
|
||||
DocumentSort.DATE, "DESC", null,
|
||||
PageRequest.of(0, 50));
|
||||
|
||||
assertThat(result.items()).hasSize(50);
|
||||
assertThat(result.totalElements()).isEqualTo(FIXTURE_SIZE);
|
||||
assertThat(result.pageNumber()).isZero();
|
||||
assertThat(result.pageSize()).isEqualTo(50);
|
||||
assertThat(result.totalPages()).isEqualTo(3); // ceil(120 / 50)
|
||||
}
|
||||
|
||||
@Test
|
||||
void search_lastPartialPage_returnsRemainingItems() {
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null,
|
||||
DocumentSort.DATE, "DESC", null,
|
||||
PageRequest.of(2, 50));
|
||||
|
||||
// Page 2 (offset 100) of 120 docs → exactly 20 items on the tail.
|
||||
assertThat(result.items()).hasSize(20);
|
||||
assertThat(result.totalElements()).isEqualTo(FIXTURE_SIZE);
|
||||
assertThat(result.pageNumber()).isEqualTo(2);
|
||||
}
|
||||
|
||||
@Test
|
||||
void search_pageBeyondLast_returnsEmptyContent_totalElementsStillCorrect() {
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null,
|
||||
DocumentSort.DATE, "DESC", null,
|
||||
PageRequest.of(99, 50));
|
||||
|
||||
assertThat(result.items()).isEmpty();
|
||||
assertThat(result.totalElements()).isEqualTo(FIXTURE_SIZE);
|
||||
}
|
||||
|
||||
@Test
|
||||
void search_senderSort_pageOne_slicesInMemory_withCorrectTotal() {
|
||||
// SENDER sort path fetches all + sorts + slices in-memory (see scaling
|
||||
// comment in DocumentService). Proves that the in-memory slice path
|
||||
// returns the correct total from a real repository fetch.
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null,
|
||||
DocumentSort.SENDER, "asc", null,
|
||||
PageRequest.of(1, 50));
|
||||
|
||||
assertThat(result.items()).hasSize(50);
|
||||
assertThat(result.totalElements()).isEqualTo(FIXTURE_SIZE);
|
||||
assertThat(result.pageNumber()).isEqualTo(1);
|
||||
assertThat(result.totalPages()).isEqualTo(3);
|
||||
}
|
||||
|
||||
@Test
|
||||
void search_differentPagesReturnDisjointSlices() {
|
||||
DocumentSearchResult page0 = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null,
|
||||
DocumentSort.DATE, "DESC", null,
|
||||
PageRequest.of(0, 50));
|
||||
DocumentSearchResult page1 = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null,
|
||||
DocumentSort.DATE, "DESC", null,
|
||||
PageRequest.of(1, 50));
|
||||
|
||||
// No document id should appear on both pages — slicing must be exclusive.
|
||||
var idsOnPage0 = page0.items().stream()
|
||||
.map(item -> item.document().getId())
|
||||
.toList();
|
||||
var idsOnPage1 = page1.items().stream()
|
||||
.map(item -> item.document().getId())
|
||||
.toList();
|
||||
for (UUID id : idsOnPage0) {
|
||||
assertThat(idsOnPage1).doesNotContain(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -5,14 +5,12 @@ import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.raddatz.familienarchiv.audit.AuditLogQueryService;
|
||||
import org.raddatz.familienarchiv.dto.DocumentSearchResult;
|
||||
import org.raddatz.familienarchiv.dto.DocumentSort;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.jpa.domain.Specification;
|
||||
|
||||
import java.time.LocalDate;
|
||||
@@ -26,16 +24,12 @@ import static org.mockito.Mockito.when;
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class DocumentServiceSortTest {
|
||||
|
||||
private static final Pageable UNPAGED = org.springframework.data.domain.PageRequest.of(0, 10_000);
|
||||
|
||||
@Mock DocumentRepository documentRepository;
|
||||
@Mock PersonService personService;
|
||||
@Mock FileService fileService;
|
||||
@Mock TagService tagService;
|
||||
@Mock DocumentVersionService documentVersionService;
|
||||
@Mock AnnotationService annotationService;
|
||||
@Mock AuditLogQueryService auditLogQueryService;
|
||||
@Mock TranscriptionBlockQueryService transcriptionBlockQueryService;
|
||||
@InjectMocks DocumentService documentService;
|
||||
|
||||
// ─── searchDocuments — DATE sort ──────────────────────────────────────────
|
||||
@@ -54,16 +48,16 @@ class DocumentServiceSortTest {
|
||||
|
||||
// FTS returns id1 first (higher rank), id2 second
|
||||
when(documentRepository.findRankedIdsByFts("Brief")).thenReturn(List.of(id1, id2));
|
||||
// findAll(spec, pageable) — the correct date path — returns date-DESC order
|
||||
when(documentRepository.findAll(any(Specification.class), any(Pageable.class)))
|
||||
.thenReturn(new PageImpl<>(List.of(newer, older)));
|
||||
// findAll(spec, sort) — the correct date path — returns date-DESC order
|
||||
when(documentRepository.findAll(any(Specification.class), any(Sort.class)))
|
||||
.thenReturn(List.of(newer, older));
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
"Brief", null, null, null, null, null, null, null, DocumentSort.DATE, "DESC", null, UNPAGED);
|
||||
"Brief", null, null, null, null, null, null, null, DocumentSort.DATE, "DESC", null);
|
||||
|
||||
// Expect: date order (newer 1960 first), NOT rank order (older 1940 first)
|
||||
assertThat(result.items()).hasSize(2);
|
||||
assertThat(result.items().get(0).document().getId()).isEqualTo(id2); // newer doc first
|
||||
assertThat(result.documents()).hasSize(2);
|
||||
assertThat(result.documents().get(0).getId()).isEqualTo(id2); // newer doc first
|
||||
}
|
||||
|
||||
// ─── searchDocuments — RELEVANCE sort ─────────────────────────────────────
|
||||
@@ -81,10 +75,10 @@ class DocumentServiceSortTest {
|
||||
.thenReturn(List.of(doc2, doc1)); // unordered from DB
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
"Brief", null, null, null, null, null, null, null, DocumentSort.RELEVANCE, null, null, UNPAGED);
|
||||
"Brief", null, null, null, null, null, null, null, DocumentSort.RELEVANCE, null, null);
|
||||
|
||||
// Expect: rank order restored (id1 first)
|
||||
assertThat(result.items().get(0).document().getId()).isEqualTo(id1);
|
||||
assertThat(result.documents().get(0).getId()).isEqualTo(id1);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -100,8 +94,8 @@ class DocumentServiceSortTest {
|
||||
.thenReturn(List.of(doc2, doc1));
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
"Brief", null, null, null, null, null, null, null, null, null, null, UNPAGED);
|
||||
"Brief", null, null, null, null, null, null, null, null, null, null);
|
||||
|
||||
assertThat(result.items().get(0).document().getId()).isEqualTo(id1);
|
||||
assertThat(result.documents().get(0).getId()).isEqualTo(id1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,16 +7,13 @@ import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.raddatz.familienarchiv.audit.AuditKind;
|
||||
import org.raddatz.familienarchiv.audit.AuditLogQueryService;
|
||||
import org.raddatz.familienarchiv.audit.AuditService;
|
||||
import org.raddatz.familienarchiv.dto.DocumentSearchItem;
|
||||
import org.raddatz.familienarchiv.dto.DocumentSearchResult;
|
||||
import org.raddatz.familienarchiv.dto.DocumentSort;
|
||||
import org.raddatz.familienarchiv.dto.DocumentUpdateDTO;
|
||||
import org.raddatz.familienarchiv.dto.IncompleteDocumentDTO;
|
||||
import org.raddatz.familienarchiv.dto.MatchOffset;
|
||||
import org.raddatz.familienarchiv.dto.SearchMatchData;
|
||||
import org.raddatz.familienarchiv.dto.TagOperator;
|
||||
import org.raddatz.familienarchiv.exception.DomainException;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||
@@ -25,7 +22,6 @@ import org.raddatz.familienarchiv.model.Tag;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.mock.web.MockMultipartFile;
|
||||
@@ -48,12 +44,6 @@ import static org.mockito.Mockito.*;
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class DocumentServiceTest {
|
||||
|
||||
// Used by tests that don't care about paging. 10 000 is chosen large enough
|
||||
// to hold any fixture in this file but small enough that totalPages math
|
||||
// stays in int range. Swap to `PageRequest.of(0, 10_000)` elsewhere is a
|
||||
// red flag — use this constant.
|
||||
private static final Pageable UNPAGED = PageRequest.of(0, 10_000);
|
||||
|
||||
@Mock DocumentRepository documentRepository;
|
||||
@Mock PersonService personService;
|
||||
@Mock FileService fileService;
|
||||
@@ -61,9 +51,6 @@ class DocumentServiceTest {
|
||||
@Mock DocumentVersionService documentVersionService;
|
||||
@Mock AnnotationService annotationService;
|
||||
@Mock AuditService auditService;
|
||||
@Mock AuditLogQueryService auditLogQueryService;
|
||||
@Mock TranscriptionBlockQueryService transcriptionBlockQueryService;
|
||||
@Mock ThumbnailAsyncRunner thumbnailAsyncRunner;
|
||||
@InjectMocks DocumentService documentService;
|
||||
|
||||
// ─── deleteDocument ───────────────────────────────────────────────────────
|
||||
@@ -121,23 +108,6 @@ class DocumentServiceTest {
|
||||
.isInstanceOf(DomainException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void updateDocument_setsArchiveBoxAndFolder() throws Exception {
|
||||
UUID id = UUID.randomUUID();
|
||||
Document doc = Document.builder().id(id).receivers(new HashSet<>()).tags(new HashSet<>()).build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(doc));
|
||||
when(documentRepository.save(any())).thenReturn(doc);
|
||||
|
||||
DocumentUpdateDTO dto = new DocumentUpdateDTO();
|
||||
dto.setArchiveBox("K-03");
|
||||
dto.setArchiveFolder("Mappe B");
|
||||
|
||||
documentService.updateDocument(id, dto, null, null);
|
||||
|
||||
assertThat(doc.getArchiveBox()).isEqualTo("K-03");
|
||||
assertThat(doc.getArchiveFolder()).isEqualTo("Mappe B");
|
||||
}
|
||||
|
||||
// ─── deleteTagCascading ───────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
@@ -283,107 +253,6 @@ class DocumentServiceTest {
|
||||
verify(documentVersionService).recordVersion(any(Document.class));
|
||||
}
|
||||
|
||||
// ─── thumbnail dispatch ───────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void storeDocument_dispatchesThumbnailAfterSave() throws Exception {
|
||||
org.springframework.mock.web.MockMultipartFile file =
|
||||
new org.springframework.mock.web.MockMultipartFile("file", "new.pdf", "application/pdf", new byte[]{1});
|
||||
UUID savedId = UUID.randomUUID();
|
||||
Document saved = Document.builder().id(savedId).originalFilename("new.pdf").build();
|
||||
when(documentRepository.findFirstByOriginalFilename("new.pdf")).thenReturn(Optional.empty());
|
||||
when(documentRepository.save(any())).thenReturn(saved);
|
||||
when(fileService.uploadFile(any(), any())).thenReturn(new FileService.UploadResult("documents/new.pdf", "hash"));
|
||||
|
||||
documentService.storeDocument(file, null);
|
||||
|
||||
verify(thumbnailAsyncRunner, times(1)).dispatchAfterCommit(savedId);
|
||||
}
|
||||
|
||||
@Test
|
||||
void createDocument_dispatchesThumbnail_onlyWhenFileProvided() throws Exception {
|
||||
DocumentUpdateDTO dto = new DocumentUpdateDTO();
|
||||
dto.setTitle("No file");
|
||||
UUID savedId = UUID.randomUUID();
|
||||
Document saved = Document.builder().id(savedId).title("No file")
|
||||
.originalFilename("No file").status(DocumentStatus.PLACEHOLDER).build();
|
||||
when(documentRepository.save(any())).thenReturn(saved);
|
||||
when(documentRepository.findById(any())).thenReturn(Optional.of(saved));
|
||||
|
||||
documentService.createDocument(dto, null);
|
||||
|
||||
verifyNoInteractions(thumbnailAsyncRunner);
|
||||
}
|
||||
|
||||
@Test
|
||||
void createDocument_dispatchesThumbnail_whenFileProvided() throws Exception {
|
||||
DocumentUpdateDTO dto = new DocumentUpdateDTO();
|
||||
dto.setTitle("With file");
|
||||
org.springframework.mock.web.MockMultipartFile file =
|
||||
new org.springframework.mock.web.MockMultipartFile("file", "scan.pdf", "application/pdf", new byte[]{1});
|
||||
UUID savedId = UUID.randomUUID();
|
||||
Document saved = Document.builder().id(savedId).title("With file")
|
||||
.originalFilename("scan.pdf").status(DocumentStatus.PLACEHOLDER).build();
|
||||
when(documentRepository.save(any())).thenReturn(saved);
|
||||
when(documentRepository.findById(any())).thenReturn(Optional.of(saved));
|
||||
when(fileService.uploadFile(any(), any()))
|
||||
.thenReturn(new FileService.UploadResult("documents/scan.pdf", "hash"));
|
||||
|
||||
documentService.createDocument(dto, file);
|
||||
|
||||
verify(thumbnailAsyncRunner, times(1)).dispatchAfterCommit(savedId);
|
||||
}
|
||||
|
||||
@Test
|
||||
void updateDocument_dispatchesThumbnail_onlyWhenFileReplaced() throws Exception {
|
||||
UUID id = UUID.randomUUID();
|
||||
Document existing = Document.builder()
|
||||
.id(id).title("Doc").originalFilename("old.pdf")
|
||||
.status(DocumentStatus.UPLOADED).build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(existing));
|
||||
when(documentRepository.save(any())).thenReturn(existing);
|
||||
|
||||
documentService.updateDocument(id, new DocumentUpdateDTO(), null, null);
|
||||
|
||||
verifyNoInteractions(thumbnailAsyncRunner);
|
||||
}
|
||||
|
||||
@Test
|
||||
void updateDocument_dispatchesThumbnail_whenNewFileProvided() throws Exception {
|
||||
UUID id = UUID.randomUUID();
|
||||
Document existing = Document.builder()
|
||||
.id(id).title("Doc").originalFilename("old.pdf")
|
||||
.status(DocumentStatus.UPLOADED).build();
|
||||
org.springframework.mock.web.MockMultipartFile newFile =
|
||||
new org.springframework.mock.web.MockMultipartFile("file", "new.pdf", "application/pdf", new byte[]{1});
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(existing));
|
||||
when(documentRepository.save(any())).thenReturn(existing);
|
||||
when(fileService.uploadFile(any(), any()))
|
||||
.thenReturn(new FileService.UploadResult("documents/new.pdf", "hash"));
|
||||
|
||||
documentService.updateDocument(id, new DocumentUpdateDTO(), newFile, null);
|
||||
|
||||
verify(thumbnailAsyncRunner, times(1)).dispatchAfterCommit(id);
|
||||
}
|
||||
|
||||
@Test
|
||||
void attachFile_dispatchesThumbnailAfterSave() throws Exception {
|
||||
UUID id = UUID.randomUUID();
|
||||
Document existing = Document.builder()
|
||||
.id(id).title("Placeholder").originalFilename("placeholder")
|
||||
.status(DocumentStatus.PLACEHOLDER).build();
|
||||
org.springframework.mock.web.MockMultipartFile file =
|
||||
new org.springframework.mock.web.MockMultipartFile("file", "scan.pdf", "application/pdf", new byte[]{1});
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(existing));
|
||||
when(documentRepository.save(any())).thenReturn(existing);
|
||||
when(fileService.uploadFile(any(), any()))
|
||||
.thenReturn(new FileService.UploadResult("documents/scan.pdf", "hash"));
|
||||
|
||||
documentService.attachFile(id, file, null);
|
||||
|
||||
verify(thumbnailAsyncRunner, times(1)).dispatchAfterCommit(id);
|
||||
}
|
||||
|
||||
// ─── storeDocument ───────────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
@@ -517,22 +386,6 @@ class DocumentServiceTest {
|
||||
assertThat(result.get(0).title()).isEqualTo("Unvollständig");
|
||||
}
|
||||
|
||||
@Test
|
||||
void findIncompleteDocuments_mapsUploadedAtFromCreatedAt() {
|
||||
java.time.LocalDateTime createdAt = java.time.LocalDateTime.of(2026, 4, 20, 12, 0);
|
||||
Document doc = Document.builder()
|
||||
.id(UUID.randomUUID())
|
||||
.title("Recent")
|
||||
.createdAt(createdAt)
|
||||
.build();
|
||||
when(documentRepository.findByMetadataCompleteFalse(any(Pageable.class)))
|
||||
.thenReturn(new PageImpl<>(List.of(doc)));
|
||||
|
||||
List<IncompleteDocumentDTO> result = documentService.findIncompleteDocuments(3);
|
||||
|
||||
assertThat(result.get(0).uploadedAt()).isEqualTo(createdAt);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findIncompleteDocuments_passesSizeToPageable() {
|
||||
when(documentRepository.findByMetadataCompleteFalse(any(Pageable.class)))
|
||||
@@ -1348,124 +1201,26 @@ class DocumentServiceTest {
|
||||
assertThat(result).isNull();
|
||||
}
|
||||
|
||||
// ─── searchDocuments — pagination ────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void searchDocuments_fastPath_usesFindAllWithPageable_notWithSort() {
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class)))
|
||||
.thenReturn(new PageImpl<>(List.of()));
|
||||
|
||||
documentService.searchDocuments(null, null, null, null, null, null, null, null,
|
||||
org.raddatz.familienarchiv.dto.DocumentSort.DATE, "DESC", null,
|
||||
org.springframework.data.domain.PageRequest.of(1, 50));
|
||||
|
||||
verify(documentRepository).findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class));
|
||||
verify(documentRepository, never()).findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Sort.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchDocuments_fastPath_propagatesPageableToDatabase() {
|
||||
ArgumentCaptor<Pageable> captor = ArgumentCaptor.forClass(Pageable.class);
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class)))
|
||||
.thenReturn(new PageImpl<>(List.of()));
|
||||
|
||||
documentService.searchDocuments(null, null, null, null, null, null, null, null,
|
||||
org.raddatz.familienarchiv.dto.DocumentSort.DATE, "DESC", null,
|
||||
org.springframework.data.domain.PageRequest.of(3, 25));
|
||||
|
||||
verify(documentRepository).findAll(any(org.springframework.data.jpa.domain.Specification.class), captor.capture());
|
||||
assertThat(captor.getValue().getPageNumber()).isEqualTo(3);
|
||||
assertThat(captor.getValue().getPageSize()).isEqualTo(25);
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchDocuments_fastPath_returnsPageableTotalsOnResult() {
|
||||
// The service MUST report the full match count from Page.getTotalElements(),
|
||||
// not the slice size — otherwise the frontend's "N Briefe gefunden" label is wrong.
|
||||
Document d = Document.builder().id(UUID.randomUUID()).title("T").build();
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class)))
|
||||
.thenReturn(new PageImpl<>(List.of(d), org.springframework.data.domain.PageRequest.of(0, 50), 120L));
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(null, null, null, null, null, null, null, null,
|
||||
org.raddatz.familienarchiv.dto.DocumentSort.DATE, "DESC", null,
|
||||
org.springframework.data.domain.PageRequest.of(0, 50));
|
||||
|
||||
assertThat(result.totalElements()).isEqualTo(120L);
|
||||
assertThat(result.pageNumber()).isZero();
|
||||
assertThat(result.pageSize()).isEqualTo(50);
|
||||
assertThat(result.totalPages()).isEqualTo(3); // ceil(120/50)
|
||||
assertThat(result.items()).hasSize(1); // only the slice is enriched
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchDocuments_senderSort_slicesInMemoryAndReportsFullTotal() {
|
||||
// Fixture: 120 docs with senders; request page 1, size 50 → expect 50 items
|
||||
// back with totalElements = 120.
|
||||
List<Document> all = new java.util.ArrayList<>();
|
||||
for (int i = 0; i < 120; i++) {
|
||||
Person p = Person.builder()
|
||||
.id(UUID.randomUUID())
|
||||
.firstName("F" + i)
|
||||
.lastName(String.format("L%03d", i))
|
||||
.build();
|
||||
all.add(Document.builder().id(UUID.randomUUID()).title("D" + i).sender(p).build());
|
||||
}
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class)))
|
||||
.thenReturn(all);
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(null, null, null, null, null, null, null, null,
|
||||
org.raddatz.familienarchiv.dto.DocumentSort.SENDER, "asc", null,
|
||||
org.springframework.data.domain.PageRequest.of(1, 50));
|
||||
|
||||
assertThat(result.totalElements()).isEqualTo(120L);
|
||||
assertThat(result.pageNumber()).isEqualTo(1);
|
||||
assertThat(result.pageSize()).isEqualTo(50);
|
||||
assertThat(result.totalPages()).isEqualTo(3);
|
||||
assertThat(result.items()).hasSize(50);
|
||||
// Page 1 (offset 50) under ascending sender sort should start at L050
|
||||
assertThat(result.items().get(0).document().getSender().getLastName()).isEqualTo("L050");
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchDocuments_pageBeyondLast_returnsEmptyContentAndCorrectTotal() {
|
||||
// Guards the JPA edge case where page * size > totalElements.
|
||||
// Must not throw, must return empty content + correct totalElements.
|
||||
List<Document> all = new java.util.ArrayList<>();
|
||||
for (int i = 0; i < 30; i++) {
|
||||
Person p = Person.builder().id(UUID.randomUUID()).lastName(String.format("L%02d", i)).build();
|
||||
all.add(Document.builder().id(UUID.randomUUID()).title("D" + i).sender(p).build());
|
||||
}
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class)))
|
||||
.thenReturn(all);
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(null, null, null, null, null, null, null, null,
|
||||
org.raddatz.familienarchiv.dto.DocumentSort.SENDER, "asc", null,
|
||||
org.springframework.data.domain.PageRequest.of(10, 50));
|
||||
|
||||
assertThat(result.items()).isEmpty();
|
||||
assertThat(result.totalElements()).isEqualTo(30L);
|
||||
}
|
||||
|
||||
// ─── searchDocuments — status filter ─────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void searchDocuments_passesStatusSpecificationToRepository() {
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class)))
|
||||
.thenReturn(new PageImpl<>(List.of()));
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Sort.class)))
|
||||
.thenReturn(List.of());
|
||||
|
||||
documentService.searchDocuments(null, null, null, null, null, null, null, DocumentStatus.REVIEWED, null, null, null, UNPAGED);
|
||||
documentService.searchDocuments(null, null, null, null, null, null, null, DocumentStatus.REVIEWED, null, null, null);
|
||||
|
||||
verify(documentRepository).findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class));
|
||||
verify(documentRepository).findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Sort.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchDocuments_withNullStatus_doesNotFilterByStatus() {
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class)))
|
||||
.thenReturn(new PageImpl<>(List.of()));
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Sort.class)))
|
||||
.thenReturn(List.of());
|
||||
|
||||
documentService.searchDocuments(null, null, null, null, null, null, null, null, null, null, null, UNPAGED);
|
||||
documentService.searchDocuments(null, null, null, null, null, null, null, null, null, null, null);
|
||||
|
||||
verify(documentRepository).findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class));
|
||||
verify(documentRepository).findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Sort.class));
|
||||
}
|
||||
|
||||
// ─── getRecentActivity ────────────────────────────────────────────────────
|
||||
@@ -1541,10 +1296,10 @@ class DocumentServiceTest {
|
||||
.thenReturn(List.of(withSender, noSender));
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null, DocumentSort.SENDER, "asc", null, UNPAGED);
|
||||
null, null, null, null, null, null, null, null, DocumentSort.SENDER, "asc", null);
|
||||
|
||||
assertThat(result.items()).hasSize(2);
|
||||
assertThat(result.items()).extracting(item -> item.document().getTitle()).containsExactly("Has Sender", "No Sender");
|
||||
assertThat(result.documents()).hasSize(2);
|
||||
assertThat(result.documents()).extracting(Document::getTitle).containsExactly("Has Sender", "No Sender");
|
||||
}
|
||||
|
||||
// ─── searchDocuments — RECEIVER sort, empty receivers ───────────────────────
|
||||
@@ -1561,9 +1316,9 @@ class DocumentServiceTest {
|
||||
.thenReturn(List.of(noReceivers, withReceiver));
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null, DocumentSort.RECEIVER, "asc", null, UNPAGED);
|
||||
null, null, null, null, null, null, null, null, DocumentSort.RECEIVER, "asc", null);
|
||||
|
||||
assertThat(result.items()).extracting(item -> item.document().getTitle())
|
||||
assertThat(result.documents()).extracting(Document::getTitle)
|
||||
.containsExactly("Has Receiver", "No Receivers");
|
||||
}
|
||||
|
||||
@@ -1583,10 +1338,10 @@ class DocumentServiceTest {
|
||||
.thenReturn(List.of(docNullName, docSmith));
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null, DocumentSort.SENDER, "asc", null, UNPAGED);
|
||||
null, null, null, null, null, null, null, null, DocumentSort.SENDER, "asc", null);
|
||||
|
||||
// null lastName should sort to end (treated as empty), not before "smith" (as "null")
|
||||
assertThat(result.items()).extracting(item -> item.document().getTitle())
|
||||
assertThat(result.documents()).extracting(Document::getTitle)
|
||||
.containsExactly("smith doc", "Null lastname doc");
|
||||
}
|
||||
|
||||
@@ -1605,24 +1360,23 @@ class DocumentServiceTest {
|
||||
when(documentRepository.findEnrichmentData(any(), eq("Brief"))).thenReturn(rows);
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
"Brief", null, null, null, null, null, null, null, DocumentSort.RELEVANCE, null, null, UNPAGED);
|
||||
"Brief", null, null, null, null, null, null, null, DocumentSort.RELEVANCE, null, null);
|
||||
|
||||
assertThat(result.items()).hasSize(1);
|
||||
SearchMatchData md = result.items().get(0).matchData();
|
||||
assertThat(result.matchData()).containsKey(docId);
|
||||
SearchMatchData md = result.matchData().get(docId);
|
||||
assertThat(md.titleOffsets()).hasSize(1);
|
||||
assertThat(md.titleOffsets().get(0)).isEqualTo(new MatchOffset(0, 5)); // "Brief" = 5 chars at pos 0
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchDocuments_withoutTextQuery_returnsEmptyMatchData() {
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class)))
|
||||
.thenReturn(new PageImpl<>(List.of()));
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Sort.class)))
|
||||
.thenReturn(List.of());
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null, null, null, null,
|
||||
UNPAGED);
|
||||
null, null, null, null, null, null, null, null, null, null, null);
|
||||
|
||||
assertThat(result.items()).isEmpty();
|
||||
assertThat(result.matchData()).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1639,9 +1393,9 @@ class DocumentServiceTest {
|
||||
when(documentRepository.findEnrichmentData(any(), eq("Brief"))).thenReturn(rows);
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
"Brief", null, null, null, null, null, null, null, DocumentSort.RELEVANCE, null, null, UNPAGED);
|
||||
"Brief", null, null, null, null, null, null, null, DocumentSort.RELEVANCE, null, null);
|
||||
|
||||
SearchMatchData md = result.items().get(0).matchData();
|
||||
SearchMatchData md = result.matchData().get(docId);
|
||||
assertThat(md.transcriptionSnippet()).isEqualTo("Hier ist der Brief aus Berlin");
|
||||
assertThat(md.snippetOffsets()).containsExactly(new MatchOffset(13, 5)); // "Brief" at pos 13
|
||||
}
|
||||
@@ -1831,437 +1585,4 @@ class DocumentServiceTest {
|
||||
|
||||
verify(auditService).logAfterCommit(eq(AuditKind.FILE_UPLOADED), isNull(), eq(id), isNull());
|
||||
}
|
||||
|
||||
// ─── storeDocumentWithBatchMetadata ──────────────────────────────────────
|
||||
|
||||
private MockMultipartFile pdfFile(String name) {
|
||||
return new MockMultipartFile("file", name, "application/pdf", new byte[]{1});
|
||||
}
|
||||
|
||||
private void stubStoreDocument(String filename) throws Exception {
|
||||
when(documentRepository.findFirstByOriginalFilename(filename)).thenReturn(Optional.empty());
|
||||
when(fileService.uploadFile(any(), any())).thenReturn(new FileService.UploadResult("key", "hash"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void storeDocumentWithBatchMetadata_appliesTitleByIndex() throws Exception {
|
||||
stubStoreDocument("scan01.pdf");
|
||||
when(documentRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO meta = new org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO();
|
||||
meta.setTitles(List.of("Erster Brief", "Zweiter Brief"));
|
||||
|
||||
DocumentService.StoreResult result = documentService.storeDocumentWithBatchMetadata(pdfFile("scan01.pdf"), meta, 0, null);
|
||||
|
||||
assertThat(result.document().getTitle()).isEqualTo("Erster Brief");
|
||||
}
|
||||
|
||||
@Test
|
||||
void storeDocumentWithBatchMetadata_resolvesSenderViaPersonService() throws Exception {
|
||||
UUID senderId = UUID.randomUUID();
|
||||
stubStoreDocument("scan02.pdf");
|
||||
when(documentRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
Person sender = Person.builder().id(senderId).firstName("Anna").build();
|
||||
when(personService.getById(senderId)).thenReturn(sender);
|
||||
|
||||
org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO meta = new org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO();
|
||||
meta.setSenderId(senderId);
|
||||
|
||||
DocumentService.StoreResult result = documentService.storeDocumentWithBatchMetadata(pdfFile("scan02.pdf"), meta, 0, null);
|
||||
|
||||
assertThat(result.document().getSender()).isEqualTo(sender);
|
||||
}
|
||||
|
||||
@Test
|
||||
void storeDocumentWithBatchMetadata_appliesTagsViaUpdateDocumentTags() throws Exception {
|
||||
UUID docId = UUID.randomUUID();
|
||||
when(documentRepository.findFirstByOriginalFilename("scan03.pdf")).thenReturn(Optional.empty());
|
||||
when(fileService.uploadFile(any(), any())).thenReturn(new FileService.UploadResult("key", "hash"));
|
||||
when(documentRepository.save(any())).thenAnswer(inv -> {
|
||||
Document d = inv.getArgument(0);
|
||||
if (d.getId() == null) d.setId(docId);
|
||||
return d;
|
||||
});
|
||||
when(documentRepository.findById(docId)).thenAnswer(inv -> {
|
||||
Document d = new Document();
|
||||
d.setId(docId);
|
||||
return Optional.of(d);
|
||||
});
|
||||
Tag tag = Tag.builder().id(UUID.randomUUID()).name("Familie").build();
|
||||
when(tagService.findOrCreate("Familie")).thenReturn(tag);
|
||||
|
||||
org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO meta = new org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO();
|
||||
meta.setTagNames(List.of("Familie"));
|
||||
|
||||
documentService.storeDocumentWithBatchMetadata(pdfFile("scan03.pdf"), meta, 0, null);
|
||||
|
||||
verify(tagService).findOrCreate("Familie");
|
||||
}
|
||||
|
||||
@Test
|
||||
void storeDocumentWithBatchMetadata_leavesTitle_whenIndexExceedsTitlesList() throws Exception {
|
||||
stubStoreDocument("scan04.pdf");
|
||||
when(documentRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO meta = new org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO();
|
||||
meta.setTitles(List.of("Only One Title"));
|
||||
|
||||
DocumentService.StoreResult result = documentService.storeDocumentWithBatchMetadata(pdfFile("scan04.pdf"), meta, 5, null);
|
||||
|
||||
assertThat(result.document().getTitle()).isEqualTo("scan04");
|
||||
}
|
||||
|
||||
// ─── validateBatch ───────────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void validateBatch_throwsBatchTooLarge_whenFileCountExceedsCap() {
|
||||
assertThatThrownBy(() -> documentService.validateBatch(51, null))
|
||||
.isInstanceOf(DomainException.class)
|
||||
.hasMessageContaining("50");
|
||||
}
|
||||
|
||||
@Test
|
||||
void validateBatch_doesNotThrow_whenFileCountEqualsCapExactly() {
|
||||
documentService.validateBatch(50, null);
|
||||
}
|
||||
|
||||
@Test
|
||||
void validateBatch_throwsValidationError_whenTitlesSizeExceedsFileCount() {
|
||||
org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO metadata =
|
||||
new org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO();
|
||||
metadata.setTitles(java.util.List.of("A", "B", "C"));
|
||||
|
||||
assertThatThrownBy(() -> documentService.validateBatch(2, metadata))
|
||||
.isInstanceOf(DomainException.class)
|
||||
.hasMessageContaining("titles");
|
||||
}
|
||||
|
||||
// ─── applyBulkEditToDocument ─────────────────────────────────────────────
|
||||
|
||||
private static org.raddatz.familienarchiv.dto.DocumentBulkEditDTO bulkDto() {
|
||||
return new org.raddatz.familienarchiv.dto.DocumentBulkEditDTO();
|
||||
}
|
||||
|
||||
@Test
|
||||
void applyBulkEditToDocument_throwsNotFound_whenDocumentMissing() {
|
||||
UUID id = UUID.randomUUID();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> documentService.applyBulkEditToDocument(id, bulkDto(), null))
|
||||
.isInstanceOf(DomainException.class)
|
||||
.hasMessageContaining(id.toString());
|
||||
}
|
||||
|
||||
@Test
|
||||
void applyBulkEditToDocument_appliesTagsAdditively_preservesExistingTags() {
|
||||
UUID id = UUID.randomUUID();
|
||||
Tag existing = Tag.builder().id(UUID.randomUUID()).name("Brief").build();
|
||||
Tag added = Tag.builder().id(UUID.randomUUID()).name("Kurrent").build();
|
||||
Document doc = Document.builder().id(id).title("T")
|
||||
.tags(new HashSet<>(Set.of(existing)))
|
||||
.receivers(new HashSet<>())
|
||||
.build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(doc));
|
||||
when(documentRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
when(tagService.findOrCreate("Kurrent")).thenReturn(added);
|
||||
|
||||
var dto = bulkDto();
|
||||
dto.setTagNames(List.of("Kurrent"));
|
||||
documentService.applyBulkEditToDocument(id, dto, null);
|
||||
|
||||
assertThat(doc.getTags()).containsExactlyInAnyOrder(existing, added);
|
||||
}
|
||||
|
||||
@Test
|
||||
void applyBulkEditToDocument_skipsTags_whenTagNamesIsNull() {
|
||||
UUID id = UUID.randomUUID();
|
||||
Tag existing = Tag.builder().id(UUID.randomUUID()).name("Brief").build();
|
||||
Document doc = Document.builder().id(id).title("T")
|
||||
.tags(new HashSet<>(Set.of(existing)))
|
||||
.receivers(new HashSet<>())
|
||||
.build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(doc));
|
||||
when(documentRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
documentService.applyBulkEditToDocument(id, bulkDto(), null);
|
||||
|
||||
assertThat(doc.getTags()).containsExactly(existing);
|
||||
verify(tagService, never()).findOrCreate(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void applyBulkEditToDocument_skipsTags_whenTagNamesIsEmpty() {
|
||||
UUID id = UUID.randomUUID();
|
||||
Tag existing = Tag.builder().id(UUID.randomUUID()).name("Brief").build();
|
||||
Document doc = Document.builder().id(id).title("T")
|
||||
.tags(new HashSet<>(Set.of(existing)))
|
||||
.receivers(new HashSet<>())
|
||||
.build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(doc));
|
||||
when(documentRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
var dto = bulkDto();
|
||||
dto.setTagNames(List.of());
|
||||
documentService.applyBulkEditToDocument(id, dto, null);
|
||||
|
||||
assertThat(doc.getTags()).containsExactly(existing);
|
||||
verify(tagService, never()).findOrCreate(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void applyBulkEditToDocument_replacesSender_whenSenderIdProvided() {
|
||||
UUID id = UUID.randomUUID();
|
||||
UUID senderId = UUID.randomUUID();
|
||||
Person oldSender = Person.builder().id(UUID.randomUUID()).firstName("Old").build();
|
||||
Person newSender = Person.builder().id(senderId).firstName("New").build();
|
||||
Document doc = Document.builder().id(id).title("T")
|
||||
.sender(oldSender)
|
||||
.receivers(new HashSet<>())
|
||||
.build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(doc));
|
||||
when(documentRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
when(personService.getById(senderId)).thenReturn(newSender);
|
||||
|
||||
var dto = bulkDto();
|
||||
dto.setSenderId(senderId);
|
||||
documentService.applyBulkEditToDocument(id, dto, null);
|
||||
|
||||
assertThat(doc.getSender()).isEqualTo(newSender);
|
||||
}
|
||||
|
||||
@Test
|
||||
void applyBulkEditToDocument_skipsSender_whenSenderIdIsNull() {
|
||||
UUID id = UUID.randomUUID();
|
||||
Person existing = Person.builder().id(UUID.randomUUID()).firstName("X").build();
|
||||
Document doc = Document.builder().id(id).title("T")
|
||||
.sender(existing)
|
||||
.receivers(new HashSet<>())
|
||||
.build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(doc));
|
||||
when(documentRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
documentService.applyBulkEditToDocument(id, bulkDto(), null);
|
||||
|
||||
assertThat(doc.getSender()).isEqualTo(existing);
|
||||
verify(personService, never()).getById(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void applyBulkEditToDocument_addsReceiversAdditively_preservesExistingReceivers() {
|
||||
UUID id = UUID.randomUUID();
|
||||
UUID newReceiverId = UUID.randomUUID();
|
||||
Person existing = Person.builder().id(UUID.randomUUID()).firstName("Old").build();
|
||||
Person added = Person.builder().id(newReceiverId).firstName("New").build();
|
||||
Document doc = Document.builder().id(id).title("T")
|
||||
.receivers(new HashSet<>(Set.of(existing)))
|
||||
.build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(doc));
|
||||
when(documentRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
when(personService.getAllById(List.of(newReceiverId))).thenReturn(List.of(added));
|
||||
|
||||
var dto = bulkDto();
|
||||
dto.setReceiverIds(List.of(newReceiverId));
|
||||
documentService.applyBulkEditToDocument(id, dto, null);
|
||||
|
||||
assertThat(doc.getReceivers()).containsExactlyInAnyOrder(existing, added);
|
||||
}
|
||||
|
||||
@Test
|
||||
void applyBulkEditToDocument_skipsReceivers_whenReceiverIdsIsNullOrEmpty() {
|
||||
UUID id = UUID.randomUUID();
|
||||
Person existing = Person.builder().id(UUID.randomUUID()).firstName("Old").build();
|
||||
Document doc = Document.builder().id(id).title("T")
|
||||
.receivers(new HashSet<>(Set.of(existing)))
|
||||
.build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(doc));
|
||||
when(documentRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
var dto = bulkDto();
|
||||
dto.setReceiverIds(List.of());
|
||||
documentService.applyBulkEditToDocument(id, dto, null);
|
||||
|
||||
assertThat(doc.getReceivers()).containsExactly(existing);
|
||||
verify(personService, never()).getAllById(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void applyBulkEditToDocument_recordsVersion_andLogsAuditEvent_taggedSourceBulkEdit() {
|
||||
UUID id = UUID.randomUUID();
|
||||
UUID actorId = UUID.randomUUID();
|
||||
Document doc = Document.builder().id(id).title("T").receivers(new HashSet<>()).build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(doc));
|
||||
when(documentRepository.save(any())).thenReturn(doc);
|
||||
|
||||
documentService.applyBulkEditToDocument(id, bulkDto(), actorId);
|
||||
|
||||
verify(documentVersionService).recordVersion(doc);
|
||||
verify(auditService).logAfterCommit(
|
||||
eq(AuditKind.METADATA_UPDATED),
|
||||
eq(actorId),
|
||||
eq(id),
|
||||
eq(java.util.Map.of("source", "BULK_EDIT")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void applyBulkEditToDocument_replacesArchiveBoxAndFolderAndDocumentLocation_whenProvided() {
|
||||
UUID id = UUID.randomUUID();
|
||||
Document doc = Document.builder().id(id).title("T")
|
||||
.archiveBox("OldBox")
|
||||
.archiveFolder("OldFolder")
|
||||
.documentLocation("OldLocation")
|
||||
.receivers(new HashSet<>())
|
||||
.build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(doc));
|
||||
when(documentRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
var dto = bulkDto();
|
||||
dto.setArchiveBox("NewBox");
|
||||
dto.setArchiveFolder("NewFolder");
|
||||
dto.setDocumentLocation("NewLocation");
|
||||
documentService.applyBulkEditToDocument(id, dto, null);
|
||||
|
||||
assertThat(doc.getArchiveBox()).isEqualTo("NewBox");
|
||||
assertThat(doc.getArchiveFolder()).isEqualTo("NewFolder");
|
||||
assertThat(doc.getDocumentLocation()).isEqualTo("NewLocation");
|
||||
}
|
||||
|
||||
@Test
|
||||
void applyBulkEditToDocument_propagatesDomainException_whenSenderIdUnresolvable() {
|
||||
// Sara C1 — unresolvable sender flows up as a per-document error chip
|
||||
// rather than aborting the controller's batch loop.
|
||||
UUID id = UUID.randomUUID();
|
||||
UUID unknownSender = UUID.randomUUID();
|
||||
Document doc = Document.builder().id(id).title("T").receivers(new HashSet<>()).build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(doc));
|
||||
when(personService.getById(unknownSender))
|
||||
.thenThrow(DomainException.notFound(
|
||||
org.raddatz.familienarchiv.exception.ErrorCode.PERSON_NOT_FOUND,
|
||||
"Person not found: " + unknownSender));
|
||||
|
||||
var dto = bulkDto();
|
||||
dto.setSenderId(unknownSender);
|
||||
|
||||
assertThatThrownBy(() -> documentService.applyBulkEditToDocument(id, dto, null))
|
||||
.isInstanceOf(DomainException.class)
|
||||
.hasMessageContaining(unknownSender.toString());
|
||||
}
|
||||
|
||||
// ─── findIdsForFilter ────────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void findIdsForFilter_returnsAllMatchingIds_uncapped() {
|
||||
Document d1 = Document.builder().id(UUID.randomUUID()).title("A").build();
|
||||
Document d2 = Document.builder().id(UUID.randomUUID()).title("B").build();
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class)))
|
||||
.thenReturn(List.of(d1, d2));
|
||||
|
||||
List<UUID> result = documentService.findIdsForFilter(
|
||||
null, null, null, null, null, null, null, null, null);
|
||||
|
||||
assertThat(result).containsExactly(d1.getId(), d2.getId());
|
||||
}
|
||||
|
||||
@Test
|
||||
void findIdsForFilter_passesTagOperatorOR_throughBuildSearchSpec() {
|
||||
// Sara C3 — tagOp=OR flips useOrLogic at the spec layer; without a
|
||||
// test pinning this, a refactor that wired OR to AND (or vice versa)
|
||||
// would slip through.
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class)))
|
||||
.thenReturn(List.of());
|
||||
when(tagService.expandTagNamesToDescendantIdSets(any())).thenReturn(List.of());
|
||||
|
||||
documentService.findIdsForFilter(
|
||||
null, null, null, null, null, List.of("Brief"), null, null, TagOperator.OR);
|
||||
|
||||
// Spec built without throwing → OR branch was exercised. Coverage gain
|
||||
// is in not-throwing on the OR-specific code path; the actual SQL is
|
||||
// covered by JPA itself.
|
||||
verify(documentRepository).findAll(any(org.springframework.data.jpa.domain.Specification.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
void findIdsForFilter_returnsEmpty_whenFtsHasNoMatches() {
|
||||
when(documentRepository.findRankedIdsByFts("xyz")).thenReturn(List.of());
|
||||
|
||||
List<UUID> result = documentService.findIdsForFilter(
|
||||
"xyz", null, null, null, null, null, null, null, null);
|
||||
|
||||
assertThat(result).isEmpty();
|
||||
verify(documentRepository, never()).findAll(any(org.springframework.data.jpa.domain.Specification.class));
|
||||
}
|
||||
|
||||
// ─── batchMetadata ───────────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void batchMetadata_returnsEmpty_whenIdsIsNull() {
|
||||
assertThat(documentService.batchMetadata(null)).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void batchMetadata_returnsEmpty_whenIdsIsEmpty() {
|
||||
assertThat(documentService.batchMetadata(List.of())).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void batchMetadata_returnsSummariesWithPdfUrl_forExistingIds() {
|
||||
UUID id1 = UUID.randomUUID();
|
||||
UUID id2 = UUID.randomUUID();
|
||||
Document d1 = Document.builder().id(id1).title("Brief 1").build();
|
||||
Document d2 = Document.builder().id(id2).title("Brief 2").build();
|
||||
when(documentRepository.findAllById(List.of(id1, id2))).thenReturn(List.of(d1, d2));
|
||||
|
||||
var result = documentService.batchMetadata(List.of(id1, id2));
|
||||
|
||||
assertThat(result).hasSize(2);
|
||||
assertThat(result.get(0).id()).isEqualTo(id1);
|
||||
assertThat(result.get(0).title()).isEqualTo("Brief 1");
|
||||
assertThat(result.get(0).pdfUrl()).isEqualTo("/api/documents/" + id1 + "/file");
|
||||
}
|
||||
|
||||
@Test
|
||||
void batchMetadata_silentlyDropsUnknownIds() {
|
||||
UUID known = UUID.randomUUID();
|
||||
UUID missing = UUID.randomUUID();
|
||||
Document d = Document.builder().id(known).title("Found").build();
|
||||
when(documentRepository.findAllById(List.of(known, missing))).thenReturn(List.of(d));
|
||||
|
||||
var result = documentService.batchMetadata(List.of(known, missing));
|
||||
|
||||
assertThat(result).hasSize(1);
|
||||
assertThat(result.get(0).id()).isEqualTo(known);
|
||||
}
|
||||
|
||||
@Test
|
||||
void batchMetadata_fallsBackToOriginalFilename_whenTitleIsNull() {
|
||||
UUID id = UUID.randomUUID();
|
||||
Document d = Document.builder().id(id).originalFilename("scan001.pdf").build();
|
||||
when(documentRepository.findAllById(List.of(id))).thenReturn(List.of(d));
|
||||
|
||||
var result = documentService.batchMetadata(List.of(id));
|
||||
|
||||
assertThat(result.get(0).title()).isEqualTo("scan001.pdf");
|
||||
}
|
||||
|
||||
@Test
|
||||
void applyBulkEditToDocument_skipsLocationFields_whenBlankOrNull() {
|
||||
UUID id = UUID.randomUUID();
|
||||
Document doc = Document.builder().id(id).title("T")
|
||||
.archiveBox("KeepBox")
|
||||
.archiveFolder("KeepFolder")
|
||||
.documentLocation("KeepLocation")
|
||||
.receivers(new HashSet<>())
|
||||
.build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(doc));
|
||||
when(documentRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
var dto = bulkDto();
|
||||
dto.setArchiveBox(" ");
|
||||
dto.setArchiveFolder("");
|
||||
// documentLocation left null
|
||||
documentService.applyBulkEditToDocument(id, dto, null);
|
||||
|
||||
assertThat(doc.getArchiveBox()).isEqualTo("KeepBox");
|
||||
assertThat(doc.getArchiveFolder()).isEqualTo("KeepFolder");
|
||||
assertThat(doc.getDocumentLocation()).isEqualTo("KeepLocation");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -197,39 +197,4 @@ class FileServiceTest {
|
||||
.isInstanceOf(IOException.class)
|
||||
.hasMessageContaining("Failed to download");
|
||||
}
|
||||
|
||||
// ─── downloadFileStream ────────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void downloadFileStream_returnsStreamableContent() throws IOException {
|
||||
byte[] content = "streamed bytes".getBytes();
|
||||
GetObjectResponse response = GetObjectResponse.builder().contentType("application/pdf").build();
|
||||
ResponseInputStream<GetObjectResponse> stream = new ResponseInputStream<>(
|
||||
response, AbortableInputStream.create(new ByteArrayInputStream(content)));
|
||||
when(s3Client.getObject(any(GetObjectRequest.class))).thenReturn(stream);
|
||||
|
||||
try (java.io.InputStream result = fileService.downloadFileStream("documents/file.pdf")) {
|
||||
assertThat(result.readAllBytes()).isEqualTo(content);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void downloadFileStream_throwsStorageFileNotFoundException_whenNoSuchKey() {
|
||||
NoSuchKeyException ex = NoSuchKeyException.builder().message("not found").statusCode(404).build();
|
||||
when(s3Client.getObject(any(GetObjectRequest.class))).thenThrow(ex);
|
||||
|
||||
assertThatThrownBy(() -> fileService.downloadFileStream("missing/key.pdf"))
|
||||
.isInstanceOf(FileService.StorageFileNotFoundException.class)
|
||||
.hasMessageContaining("missing/key.pdf");
|
||||
}
|
||||
|
||||
@Test
|
||||
void downloadFileStream_throwsIOException_whenS3Exception() {
|
||||
S3Exception ex = (S3Exception) S3Exception.builder().message("storage error").statusCode(503).build();
|
||||
when(s3Client.getObject(any(GetObjectRequest.class))).thenThrow(ex);
|
||||
|
||||
assertThatThrownBy(() -> fileService.downloadFileStream("documents/file.pdf"))
|
||||
.isInstanceOf(IOException.class)
|
||||
.hasMessageContaining("Failed to open stream");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,13 +39,12 @@ class MassImportServiceTest {
|
||||
@Mock PersonService personService;
|
||||
@Mock TagService tagService;
|
||||
@Mock S3Client s3Client;
|
||||
@Mock ThumbnailAsyncRunner thumbnailAsyncRunner;
|
||||
|
||||
MassImportService service;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
service = new MassImportService(documentRepository, personService, tagService, s3Client, thumbnailAsyncRunner);
|
||||
service = new MassImportService(documentRepository, personService, tagService, s3Client);
|
||||
ReflectionTestUtils.setField(service, "bucketName", "test-bucket");
|
||||
ReflectionTestUtils.setField(service, "colIndex", 0);
|
||||
ReflectionTestUtils.setField(service, "colBox", 1);
|
||||
|
||||
@@ -114,43 +114,6 @@ class PersonServiceTest {
|
||||
assertThat(result.getAlias()).isEqualTo("Hans Müller");
|
||||
}
|
||||
|
||||
// ─── personType + title in createPerson(PersonUpdateDTO) ─────────────────
|
||||
|
||||
@Test
|
||||
void createPerson_dto_persistsPersonType() {
|
||||
when(personRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
PersonUpdateDTO dto = new PersonUpdateDTO();
|
||||
dto.setFirstName("Walter"); dto.setLastName("de Gruyter"); dto.setPersonType(PersonType.INSTITUTION);
|
||||
|
||||
Person result = personService.createPerson(dto);
|
||||
|
||||
assertThat(result.getPersonType()).isEqualTo(PersonType.INSTITUTION);
|
||||
}
|
||||
|
||||
@Test
|
||||
void createPerson_dto_throwsInvalidPersonType_whenSkip() {
|
||||
PersonUpdateDTO dto = new PersonUpdateDTO();
|
||||
dto.setFirstName("Anna"); dto.setLastName("Test"); dto.setPersonType(PersonType.SKIP);
|
||||
|
||||
assertThatThrownBy(() -> personService.createPerson(dto))
|
||||
.isInstanceOf(DomainException.class)
|
||||
.extracting(e -> ((DomainException) e).getStatus().value())
|
||||
.isEqualTo(400);
|
||||
}
|
||||
|
||||
@Test
|
||||
void createPerson_dto_persistsTitle() {
|
||||
when(personRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
PersonUpdateDTO dto = new PersonUpdateDTO();
|
||||
dto.setFirstName("Dr."); dto.setLastName("Müller"); dto.setTitle("Prof."); dto.setPersonType(PersonType.PERSON);
|
||||
|
||||
Person result = personService.createPerson(dto);
|
||||
|
||||
assertThat(result.getTitle()).isEqualTo("Prof.");
|
||||
}
|
||||
|
||||
// ─── Phase 2.1: createPerson(PersonUpdateDTO) ─────────────────────────────
|
||||
|
||||
@Test
|
||||
@@ -182,36 +145,6 @@ class PersonServiceTest {
|
||||
.isEqualTo(400);
|
||||
}
|
||||
|
||||
// ─── updatePerson (personType) ───────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void updatePerson_throwsInvalidPersonType_whenSkip() {
|
||||
UUID id = UUID.randomUUID();
|
||||
|
||||
PersonUpdateDTO dto = new PersonUpdateDTO();
|
||||
dto.setFirstName("Anna"); dto.setLastName("Alt"); dto.setPersonType(PersonType.SKIP);
|
||||
|
||||
assertThatThrownBy(() -> personService.updatePerson(id, dto))
|
||||
.isInstanceOf(DomainException.class)
|
||||
.extracting(e -> ((DomainException) e).getStatus().value())
|
||||
.isEqualTo(400);
|
||||
}
|
||||
|
||||
@Test
|
||||
void updatePerson_persistsPersonType() {
|
||||
UUID id = UUID.randomUUID();
|
||||
Person person = Person.builder().id(id).firstName("Anna").lastName("Alt").personType(PersonType.PERSON).build();
|
||||
when(personRepository.findById(id)).thenReturn(Optional.of(person));
|
||||
when(personRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
PersonUpdateDTO dto = new PersonUpdateDTO();
|
||||
dto.setFirstName("Anna"); dto.setLastName("Alt"); dto.setPersonType(PersonType.INSTITUTION);
|
||||
|
||||
Person result = personService.updatePerson(id, dto);
|
||||
|
||||
assertThat(result.getPersonType()).isEqualTo(PersonType.INSTITUTION);
|
||||
}
|
||||
|
||||
// ─── updatePerson (alias) ─────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
|
||||
@@ -1,118 +0,0 @@
|
||||
package org.raddatz.familienarchiv.service;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
import org.springframework.transaction.support.TransactionSynchronization;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
class ThumbnailAsyncRunnerTest {
|
||||
|
||||
private DocumentRepository documentRepository;
|
||||
private ThumbnailService thumbnailService;
|
||||
private ThumbnailAsyncRunner runner;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
documentRepository = mock(DocumentRepository.class);
|
||||
thumbnailService = mock(ThumbnailService.class);
|
||||
runner = new ThumbnailAsyncRunner(documentRepository, thumbnailService);
|
||||
}
|
||||
|
||||
@Test
|
||||
void dispatchAfterCommit_whenNoTransaction_dispatchesImmediately() {
|
||||
UUID id = UUID.randomUUID();
|
||||
Document doc = Document.builder().id(id).originalFilename("f.pdf").title("t").build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(doc));
|
||||
|
||||
runner.dispatchAfterCommit(id);
|
||||
|
||||
verify(thumbnailService).generate(doc);
|
||||
}
|
||||
|
||||
@Test
|
||||
void dispatchAfterCommit_whenTransactionActive_registersAfterCommitSynchronization() {
|
||||
UUID id = UUID.randomUUID();
|
||||
Document doc = Document.builder().id(id).originalFilename("f.pdf").title("t").build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(doc));
|
||||
|
||||
TransactionSynchronizationManager.initSynchronization();
|
||||
try {
|
||||
runner.dispatchAfterCommit(id);
|
||||
|
||||
// Nothing fired yet — registered, not executed
|
||||
verify(thumbnailService, never()).generate(any());
|
||||
|
||||
// Simulate commit
|
||||
ArgumentCaptor<TransactionSynchronization> captor =
|
||||
ArgumentCaptor.forClass(TransactionSynchronization.class);
|
||||
assertThat(TransactionSynchronizationManager.getSynchronizations()).hasSize(1);
|
||||
TransactionSynchronizationManager.getSynchronizations().get(0).afterCommit();
|
||||
|
||||
verify(thumbnailService).generate(doc);
|
||||
} finally {
|
||||
TransactionSynchronizationManager.clearSynchronization();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void dispatchAfterCommit_whenRollback_doesNotDispatch() {
|
||||
UUID id = UUID.randomUUID();
|
||||
Document doc = Document.builder().id(id).originalFilename("f.pdf").title("t").build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(doc));
|
||||
|
||||
TransactionSynchronizationManager.initSynchronization();
|
||||
try {
|
||||
runner.dispatchAfterCommit(id);
|
||||
|
||||
// Simulate rollback — afterCompletion with STATUS_ROLLED_BACK, no afterCommit fired
|
||||
TransactionSynchronizationManager.getSynchronizations().get(0)
|
||||
.afterCompletion(TransactionSynchronization.STATUS_ROLLED_BACK);
|
||||
|
||||
verify(thumbnailService, never()).generate(any());
|
||||
} finally {
|
||||
TransactionSynchronizationManager.clearSynchronization();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void generateAsync_skipsWhenDocumentMissing() {
|
||||
UUID id = UUID.randomUUID();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.empty());
|
||||
|
||||
runner.generateAsync(id);
|
||||
|
||||
verifyNoInteractions(thumbnailService);
|
||||
}
|
||||
|
||||
@Test
|
||||
void generateAsync_timesOutWhenGenerateExceedsLimit() throws Exception {
|
||||
UUID id = UUID.randomUUID();
|
||||
Document doc = Document.builder().id(id).originalFilename("f.pdf").title("t").build();
|
||||
when(documentRepository.findById(id)).thenReturn(Optional.of(doc));
|
||||
// generate sleeps longer than the timeout — simulates a hung PDFBox render
|
||||
when(thumbnailService.generate(doc)).thenAnswer(inv -> {
|
||||
Thread.sleep(5_000);
|
||||
return ThumbnailService.Outcome.SUCCESS;
|
||||
});
|
||||
// Shrink timeout for the test
|
||||
ReflectionTestUtils.setField(runner, "generateTimeoutSeconds", 1L);
|
||||
|
||||
long start = System.currentTimeMillis();
|
||||
runner.generateAsync(id);
|
||||
long elapsed = System.currentTimeMillis() - start;
|
||||
|
||||
// Must return before the 5s sleep — within ~2s with timeout=1s plus overhead
|
||||
assertThat(elapsed).isLessThan(3_000);
|
||||
}
|
||||
}
|
||||
@@ -1,154 +0,0 @@
|
||||
package org.raddatz.familienarchiv.service;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.raddatz.familienarchiv.exception.DomainException;
|
||||
import org.raddatz.familienarchiv.exception.ErrorCode;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
class ThumbnailBackfillServiceTest {
|
||||
|
||||
private DocumentRepository documentRepository;
|
||||
private ThumbnailService thumbnailService;
|
||||
private ThumbnailBackfillService backfillService;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
documentRepository = mock(DocumentRepository.class);
|
||||
thumbnailService = mock(ThumbnailService.class);
|
||||
backfillService = new ThumbnailBackfillService(documentRepository, thumbnailService);
|
||||
}
|
||||
|
||||
@Test
|
||||
void initialStatus_isIdle() {
|
||||
ThumbnailBackfillService.BackfillStatus status = backfillService.getStatus();
|
||||
|
||||
assertThat(status.state()).isEqualTo(ThumbnailBackfillService.State.IDLE);
|
||||
assertThat(status.total()).isZero();
|
||||
assertThat(status.processed()).isZero();
|
||||
assertThat(status.startedAt()).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void runBackfillAsync_processesAllDocumentsAndFinishesDone() {
|
||||
Document a = doc();
|
||||
Document b = doc();
|
||||
Document c = doc();
|
||||
when(documentRepository.findByFilePathIsNotNullAndThumbnailKeyIsNull())
|
||||
.thenReturn(List.of(a, b, c));
|
||||
when(thumbnailService.generate(any())).thenReturn(ThumbnailService.Outcome.SUCCESS);
|
||||
|
||||
backfillService.runBackfillAsync();
|
||||
|
||||
ThumbnailBackfillService.BackfillStatus status = backfillService.getStatus();
|
||||
assertThat(status.state()).isEqualTo(ThumbnailBackfillService.State.DONE);
|
||||
assertThat(status.total()).isEqualTo(3);
|
||||
assertThat(status.processed()).isEqualTo(3);
|
||||
assertThat(status.skipped()).isZero();
|
||||
assertThat(status.failed()).isZero();
|
||||
verify(thumbnailService, times(3)).generate(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void runBackfillAsync_countsSkippedSeparately() {
|
||||
Document a = doc();
|
||||
Document b = doc();
|
||||
when(documentRepository.findByFilePathIsNotNullAndThumbnailKeyIsNull())
|
||||
.thenReturn(List.of(a, b));
|
||||
when(thumbnailService.generate(a)).thenReturn(ThumbnailService.Outcome.SUCCESS);
|
||||
when(thumbnailService.generate(b)).thenReturn(ThumbnailService.Outcome.SKIPPED);
|
||||
|
||||
backfillService.runBackfillAsync();
|
||||
|
||||
ThumbnailBackfillService.BackfillStatus status = backfillService.getStatus();
|
||||
assertThat(status.state()).isEqualTo(ThumbnailBackfillService.State.DONE);
|
||||
assertThat(status.processed()).isEqualTo(1);
|
||||
assertThat(status.skipped()).isEqualTo(1);
|
||||
assertThat(status.failed()).isZero();
|
||||
}
|
||||
|
||||
@Test
|
||||
void runBackfillAsync_continuesAfterFailureAndCountsIt() {
|
||||
Document a = doc();
|
||||
Document b = doc();
|
||||
Document c = doc();
|
||||
when(documentRepository.findByFilePathIsNotNullAndThumbnailKeyIsNull())
|
||||
.thenReturn(List.of(a, b, c));
|
||||
when(thumbnailService.generate(a)).thenReturn(ThumbnailService.Outcome.SUCCESS);
|
||||
when(thumbnailService.generate(b)).thenReturn(ThumbnailService.Outcome.FAILED);
|
||||
when(thumbnailService.generate(c)).thenReturn(ThumbnailService.Outcome.SUCCESS);
|
||||
|
||||
backfillService.runBackfillAsync();
|
||||
|
||||
ThumbnailBackfillService.BackfillStatus status = backfillService.getStatus();
|
||||
assertThat(status.state()).isEqualTo(ThumbnailBackfillService.State.DONE);
|
||||
assertThat(status.processed()).isEqualTo(2);
|
||||
assertThat(status.failed()).isEqualTo(1);
|
||||
verify(thumbnailService, times(3)).generate(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void runBackfillAsync_continuesWhenServiceThrowsUnexpectedException() {
|
||||
Document a = doc();
|
||||
Document b = doc();
|
||||
when(documentRepository.findByFilePathIsNotNullAndThumbnailKeyIsNull())
|
||||
.thenReturn(List.of(a, b));
|
||||
when(thumbnailService.generate(a)).thenThrow(new RuntimeException("boom"));
|
||||
when(thumbnailService.generate(b)).thenReturn(ThumbnailService.Outcome.SUCCESS);
|
||||
|
||||
backfillService.runBackfillAsync();
|
||||
|
||||
ThumbnailBackfillService.BackfillStatus status = backfillService.getStatus();
|
||||
assertThat(status.state()).isEqualTo(ThumbnailBackfillService.State.DONE);
|
||||
assertThat(status.processed()).isEqualTo(1);
|
||||
assertThat(status.failed()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void runBackfillAsync_rejectsConcurrentStart() {
|
||||
// Force state=RUNNING via reflection
|
||||
ThumbnailBackfillService.BackfillStatus running = new ThumbnailBackfillService.BackfillStatus(
|
||||
ThumbnailBackfillService.State.RUNNING, "running", 10, 5, 0, 0, LocalDateTime.now());
|
||||
ReflectionTestUtils.setField(backfillService, "currentStatus", running);
|
||||
|
||||
assertThatThrownBy(() -> backfillService.runBackfillAsync())
|
||||
.isInstanceOf(DomainException.class)
|
||||
.satisfies(ex -> assertThat(((DomainException) ex).getCode())
|
||||
.isEqualTo(ErrorCode.THUMBNAIL_BACKFILL_ALREADY_RUNNING));
|
||||
}
|
||||
|
||||
@Test
|
||||
void runBackfillAsync_setsStartedAtAndMessage() {
|
||||
when(documentRepository.findByFilePathIsNotNullAndThumbnailKeyIsNull())
|
||||
.thenReturn(List.of(doc()));
|
||||
when(thumbnailService.generate(any())).thenReturn(ThumbnailService.Outcome.SUCCESS);
|
||||
|
||||
LocalDateTime before = LocalDateTime.now().minusSeconds(1);
|
||||
backfillService.runBackfillAsync();
|
||||
|
||||
ThumbnailBackfillService.BackfillStatus status = backfillService.getStatus();
|
||||
assertThat(status.startedAt()).isAfter(before);
|
||||
assertThat(status.message()).isNotBlank();
|
||||
}
|
||||
|
||||
private Document doc() {
|
||||
return Document.builder()
|
||||
.id(UUID.randomUUID())
|
||||
.title("t")
|
||||
.originalFilename("f.pdf")
|
||||
.filePath("documents/f.pdf")
|
||||
.contentType("application/pdf")
|
||||
.build();
|
||||
}
|
||||
}
|
||||
@@ -1,142 +0,0 @@
|
||||
package org.raddatz.familienarchiv.service;
|
||||
|
||||
import org.apache.pdfbox.pdmodel.PDDocument;
|
||||
import org.apache.pdfbox.pdmodel.PDPage;
|
||||
import org.apache.pdfbox.pdmodel.common.PDRectangle;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.raddatz.familienarchiv.PostgresContainerConfig;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.test.context.DynamicPropertyRegistry;
|
||||
import org.springframework.test.context.DynamicPropertySource;
|
||||
import org.testcontainers.containers.GenericContainer;
|
||||
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
|
||||
import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
|
||||
import software.amazon.awssdk.core.sync.RequestBody;
|
||||
import software.amazon.awssdk.regions.Region;
|
||||
import software.amazon.awssdk.services.s3.S3Client;
|
||||
import software.amazon.awssdk.services.s3.S3Configuration;
|
||||
import software.amazon.awssdk.services.s3.model.CreateBucketRequest;
|
||||
import software.amazon.awssdk.services.s3.model.GetObjectRequest;
|
||||
import software.amazon.awssdk.services.s3.model.PutObjectRequest;
|
||||
|
||||
import javax.imageio.ImageIO;
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URI;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
/**
|
||||
* Full round-trip integration test against real MinIO and real Postgres. Catches S3
|
||||
* signing / presigning issues that a mocked S3Client would miss — the rest of the
|
||||
* test pyramid mocks at the FileService boundary.
|
||||
*/
|
||||
@SpringBootTest
|
||||
@Import(PostgresContainerConfig.class)
|
||||
class ThumbnailServiceIntegrationTest {
|
||||
|
||||
private static final String BUCKET = "archive-documents";
|
||||
private static final String ACCESS_KEY = "minioadmin";
|
||||
private static final String SECRET_KEY = "minioadmin";
|
||||
|
||||
static GenericContainer<?> minio = new GenericContainer<>("minio/minio:RELEASE.2024-06-13T22-53-53Z")
|
||||
.withEnv("MINIO_ROOT_USER", ACCESS_KEY)
|
||||
.withEnv("MINIO_ROOT_PASSWORD", SECRET_KEY)
|
||||
.withCommand("server /data")
|
||||
.withExposedPorts(9000);
|
||||
|
||||
static {
|
||||
minio.start();
|
||||
}
|
||||
|
||||
@DynamicPropertySource
|
||||
static void s3Properties(DynamicPropertyRegistry registry) {
|
||||
registry.add("app.s3.endpoint", () -> "http://" + minio.getHost() + ":" + minio.getMappedPort(9000));
|
||||
registry.add("app.s3.access-key", () -> ACCESS_KEY);
|
||||
registry.add("app.s3.secret-key", () -> SECRET_KEY);
|
||||
registry.add("app.s3.bucket", () -> BUCKET);
|
||||
registry.add("app.s3.region", () -> "eu-central-1");
|
||||
}
|
||||
|
||||
@Autowired S3Client s3Client;
|
||||
@Autowired ThumbnailService thumbnailService;
|
||||
@Autowired DocumentRepository documentRepository;
|
||||
|
||||
@Test
|
||||
void generate_writesDecodableJpegToMinio_readbackMatches() throws IOException {
|
||||
// Ensure bucket exists (the real app has a bootstrap container for this; in tests we do it here).
|
||||
// Re-creating is a no-op; wrap in try/catch because the SDK throws on "already owned".
|
||||
try (S3Client bootstrap = buildClient()) {
|
||||
try {
|
||||
bootstrap.createBucket(CreateBucketRequest.builder().bucket(BUCKET).build());
|
||||
} catch (Exception ignored) {
|
||||
// already exists
|
||||
}
|
||||
}
|
||||
|
||||
// Persist first so Hibernate assigns the UUID — avoids StaleObjectState on a pre-set id
|
||||
Document persisted = documentRepository.save(Document.builder()
|
||||
.title("IT Doc")
|
||||
.originalFilename("test.pdf")
|
||||
.status(DocumentStatus.UPLOADED)
|
||||
.contentType("application/pdf")
|
||||
.build());
|
||||
UUID docId = persisted.getId();
|
||||
String pdfKey = "documents/" + docId + "_test.pdf";
|
||||
|
||||
s3Client.putObject(PutObjectRequest.builder()
|
||||
.bucket(BUCKET)
|
||||
.key(pdfKey)
|
||||
.contentType("application/pdf")
|
||||
.build(),
|
||||
RequestBody.fromBytes(createSamplePdf()));
|
||||
|
||||
persisted.setFilePath(pdfKey);
|
||||
persisted = documentRepository.save(persisted);
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(persisted);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.SUCCESS);
|
||||
|
||||
Document reloaded = documentRepository.findById(docId).orElseThrow();
|
||||
assertThat(reloaded.getThumbnailKey()).isEqualTo("thumbnails/" + docId + ".jpg");
|
||||
assertThat(reloaded.getThumbnailGeneratedAt()).isNotNull();
|
||||
|
||||
// Read back from MinIO and verify it decodes as a JPEG of the expected width
|
||||
try (InputStream in = s3Client.getObject(GetObjectRequest.builder()
|
||||
.bucket(BUCKET).key(reloaded.getThumbnailKey()).build())) {
|
||||
byte[] jpegBytes = in.readAllBytes();
|
||||
BufferedImage decoded = ImageIO.read(new ByteArrayInputStream(jpegBytes));
|
||||
assertThat(decoded).isNotNull();
|
||||
assertThat(decoded.getWidth()).isEqualTo(240);
|
||||
}
|
||||
}
|
||||
|
||||
private static S3Client buildClient() {
|
||||
return S3Client.builder()
|
||||
.endpointOverride(URI.create("http://" + minio.getHost() + ":" + minio.getMappedPort(9000)))
|
||||
.serviceConfiguration(S3Configuration.builder().pathStyleAccessEnabled(true).build())
|
||||
.region(Region.of("eu-central-1"))
|
||||
.credentialsProvider(StaticCredentialsProvider.create(
|
||||
AwsBasicCredentials.create(ACCESS_KEY, SECRET_KEY)))
|
||||
.build();
|
||||
}
|
||||
|
||||
private static byte[] createSamplePdf() throws IOException {
|
||||
try (PDDocument pdf = new PDDocument()) {
|
||||
pdf.addPage(new PDPage(PDRectangle.A4));
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
pdf.save(bos);
|
||||
return bos.toByteArray();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,367 +0,0 @@
|
||||
package org.raddatz.familienarchiv.service;
|
||||
|
||||
import org.apache.pdfbox.pdmodel.PDDocument;
|
||||
import org.apache.pdfbox.pdmodel.PDPage;
|
||||
import org.apache.pdfbox.pdmodel.PDPageContentStream;
|
||||
import org.apache.pdfbox.pdmodel.common.PDRectangle;
|
||||
import org.apache.pdfbox.pdmodel.font.PDType1Font;
|
||||
import org.apache.pdfbox.pdmodel.font.Standard14Fonts;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||
import org.raddatz.familienarchiv.model.ThumbnailAspect;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
import software.amazon.awssdk.core.sync.RequestBody;
|
||||
import software.amazon.awssdk.services.s3.S3Client;
|
||||
import software.amazon.awssdk.services.s3.model.PutObjectRequest;
|
||||
import software.amazon.awssdk.services.s3.model.S3Exception;
|
||||
|
||||
import javax.imageio.ImageIO;
|
||||
import java.awt.Color;
|
||||
import java.awt.Graphics2D;
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
class ThumbnailServiceTest {
|
||||
|
||||
private FileService fileService;
|
||||
private S3Client s3Client;
|
||||
private DocumentRepository documentRepository;
|
||||
private ThumbnailService thumbnailService;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
fileService = mock(FileService.class);
|
||||
s3Client = mock(S3Client.class);
|
||||
documentRepository = mock(DocumentRepository.class);
|
||||
thumbnailService = new ThumbnailService(fileService, s3Client, documentRepository);
|
||||
ReflectionTestUtils.setField(thumbnailService, "bucketName", "test-bucket");
|
||||
when(documentRepository.save(any(Document.class))).thenAnswer(i -> i.getArgument(0));
|
||||
}
|
||||
|
||||
@Test
|
||||
void generate_returnsSkipped_whenDocumentHasNoFilePath() {
|
||||
Document doc = makeDoc("application/pdf", null);
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(doc);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.SKIPPED);
|
||||
verifyNoInteractions(s3Client);
|
||||
assertThat(doc.getThumbnailKey()).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void generate_returnsSkipped_forUnsupportedContentType() throws IOException {
|
||||
Document doc = makeDoc("application/msword", "documents/letter.doc");
|
||||
when(fileService.downloadFileStream(anyString()))
|
||||
.thenReturn(new ByteArrayInputStream(new byte[]{1, 2, 3}));
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(doc);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.SKIPPED);
|
||||
verifyNoInteractions(s3Client);
|
||||
assertThat(doc.getThumbnailKey()).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void generate_rendersPdf_uploadsJpeg_updatesEntity() throws IOException {
|
||||
Document doc = makeDoc("application/pdf", "documents/letter.pdf");
|
||||
byte[] pdfBytes = createSamplePdf();
|
||||
when(fileService.downloadFileStream(anyString()))
|
||||
.thenReturn(new ByteArrayInputStream(pdfBytes));
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(doc);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.SUCCESS);
|
||||
|
||||
ArgumentCaptor<PutObjectRequest> putCaptor = ArgumentCaptor.forClass(PutObjectRequest.class);
|
||||
ArgumentCaptor<RequestBody> bodyCaptor = ArgumentCaptor.forClass(RequestBody.class);
|
||||
verify(s3Client).putObject(putCaptor.capture(), bodyCaptor.capture());
|
||||
|
||||
PutObjectRequest req = putCaptor.getValue();
|
||||
assertThat(req.bucket()).isEqualTo("test-bucket");
|
||||
assertThat(req.key()).isEqualTo("thumbnails/" + doc.getId() + ".jpg");
|
||||
assertThat(req.contentType()).isEqualTo("image/jpeg");
|
||||
|
||||
byte[] uploaded = readAll(bodyCaptor.getValue().contentStreamProvider().newStream());
|
||||
BufferedImage jpg = ImageIO.read(new ByteArrayInputStream(uploaded));
|
||||
assertThat(jpg).isNotNull();
|
||||
assertThat(jpg.getWidth()).isEqualTo(240);
|
||||
|
||||
assertThat(doc.getThumbnailKey()).isEqualTo("thumbnails/" + doc.getId() + ".jpg");
|
||||
assertThat(doc.getThumbnailGeneratedAt()).isNotNull();
|
||||
verify(documentRepository).save(doc);
|
||||
}
|
||||
|
||||
@Test
|
||||
void generate_rendersPng_uploadsJpegAtWidth240() throws IOException {
|
||||
Document doc = makeDoc("image/png", "documents/scan.png");
|
||||
when(fileService.downloadFileStream(anyString()))
|
||||
.thenReturn(new ByteArrayInputStream(createSamplePng(600, 800)));
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(doc);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.SUCCESS);
|
||||
ArgumentCaptor<RequestBody> bodyCaptor = ArgumentCaptor.forClass(RequestBody.class);
|
||||
verify(s3Client).putObject(any(PutObjectRequest.class), bodyCaptor.capture());
|
||||
byte[] uploaded = readAll(bodyCaptor.getValue().contentStreamProvider().newStream());
|
||||
BufferedImage jpg = ImageIO.read(new ByteArrayInputStream(uploaded));
|
||||
assertThat(jpg.getWidth()).isEqualTo(240);
|
||||
assertThat(jpg.getHeight()).isEqualTo(320); // 600x800 -> 240x320
|
||||
}
|
||||
|
||||
@Test
|
||||
void generate_rendersJpeg_uploadsScaledJpeg() throws IOException {
|
||||
Document doc = makeDoc("image/jpeg", "documents/photo.jpg");
|
||||
when(fileService.downloadFileStream(anyString()))
|
||||
.thenReturn(new ByteArrayInputStream(createSampleJpeg(800, 400)));
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(doc);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.SUCCESS);
|
||||
ArgumentCaptor<RequestBody> bodyCaptor = ArgumentCaptor.forClass(RequestBody.class);
|
||||
verify(s3Client).putObject(any(PutObjectRequest.class), bodyCaptor.capture());
|
||||
BufferedImage jpg = ImageIO.read(new ByteArrayInputStream(
|
||||
readAll(bodyCaptor.getValue().contentStreamProvider().newStream())));
|
||||
assertThat(jpg.getWidth()).isEqualTo(240);
|
||||
assertThat(jpg.getHeight()).isEqualTo(120); // 800x400 -> 240x120
|
||||
}
|
||||
|
||||
@Test
|
||||
void generate_returnsFailed_whenS3PutThrows() throws IOException {
|
||||
Document doc = makeDoc("application/pdf", "documents/letter.pdf");
|
||||
when(fileService.downloadFileStream(anyString()))
|
||||
.thenReturn(new ByteArrayInputStream(createSamplePdf()));
|
||||
when(s3Client.putObject(any(PutObjectRequest.class), any(RequestBody.class)))
|
||||
.thenThrow((S3Exception) S3Exception.builder().message("quota exceeded").statusCode(507).build());
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(doc);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.FAILED);
|
||||
assertThat(doc.getThumbnailKey()).isNull();
|
||||
verify(documentRepository, never()).save(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void generate_returnsFailed_whenSourceStreamThrows() throws IOException {
|
||||
Document doc = makeDoc("application/pdf", "documents/letter.pdf");
|
||||
when(fileService.downloadFileStream(anyString()))
|
||||
.thenThrow(new IOException("network blip"));
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(doc);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.FAILED);
|
||||
verifyNoInteractions(s3Client);
|
||||
verify(documentRepository, never()).save(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void generate_persistsPageCount_ofOne_forSingleImageUpload() throws IOException {
|
||||
// Image uploads are always a single page from the user's perspective.
|
||||
Document doc = makeDoc("image/png", "documents/scan.png");
|
||||
when(fileService.downloadFileStream(anyString()))
|
||||
.thenReturn(new ByteArrayInputStream(createSamplePng(600, 800)));
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(doc);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.SUCCESS);
|
||||
assertThat(doc.getPageCount()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void generate_persistsPageCount_fromPdfDocument() throws IOException {
|
||||
Document doc = makeDoc("application/pdf", "documents/multi.pdf");
|
||||
when(fileService.downloadFileStream(anyString()))
|
||||
.thenReturn(new ByteArrayInputStream(createSamplePdf(3)));
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(doc);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.SUCCESS);
|
||||
assertThat(doc.getPageCount()).isEqualTo(3);
|
||||
}
|
||||
|
||||
@Test
|
||||
void generate_persistsPortraitAspect_forTypicalPortraitSourceImage() throws IOException {
|
||||
// 600x800 → ratio w/h = 0.75 → below 1.1 threshold → PORTRAIT.
|
||||
Document doc = makeDoc("image/png", "documents/portrait.png");
|
||||
when(fileService.downloadFileStream(anyString()))
|
||||
.thenReturn(new ByteArrayInputStream(createSamplePng(600, 800)));
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(doc);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.SUCCESS);
|
||||
assertThat(doc.getThumbnailAspect()).isEqualTo(ThumbnailAspect.PORTRAIT);
|
||||
}
|
||||
|
||||
@Test
|
||||
void generate_persistsLandscapeAspect_whenWidthIsWellAboveHeight() throws IOException {
|
||||
// 800x400 → ratio 2.0 → clearly above 1.1 → LANDSCAPE.
|
||||
Document doc = makeDoc("image/jpeg", "documents/wide.jpg");
|
||||
when(fileService.downloadFileStream(anyString()))
|
||||
.thenReturn(new ByteArrayInputStream(createSampleJpeg(800, 400)));
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(doc);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.SUCCESS);
|
||||
assertThat(doc.getThumbnailAspect()).isEqualTo(ThumbnailAspect.LANDSCAPE);
|
||||
}
|
||||
|
||||
@Test
|
||||
void generate_persistsPortraitAspect_whenSquareImage_belowLandscapeThreshold() throws IOException {
|
||||
// 500x500 → ratio 1.0 → below 1.1 threshold → PORTRAIT (A4 scans often
|
||||
// come in at near-square and we want them to live in the portrait tile).
|
||||
Document doc = makeDoc("image/png", "documents/square.png");
|
||||
when(fileService.downloadFileStream(anyString()))
|
||||
.thenReturn(new ByteArrayInputStream(createSamplePng(500, 500)));
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(doc);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.SUCCESS);
|
||||
assertThat(doc.getThumbnailAspect()).isEqualTo(ThumbnailAspect.PORTRAIT);
|
||||
}
|
||||
|
||||
@Test
|
||||
void generate_persistsPortraitAspect_justUnderLandscapeThreshold() throws IOException {
|
||||
// 1099x1000 → ratio 1.099 → just under 1.1 threshold → PORTRAIT.
|
||||
Document doc = makeDoc("image/png", "documents/near_threshold.png");
|
||||
when(fileService.downloadFileStream(anyString()))
|
||||
.thenReturn(new ByteArrayInputStream(createSamplePng(1099, 1000)));
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(doc);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.SUCCESS);
|
||||
assertThat(doc.getThumbnailAspect()).isEqualTo(ThumbnailAspect.PORTRAIT);
|
||||
}
|
||||
|
||||
@Test
|
||||
void generate_returnsFailed_whenImageBytesAreCorrupt() throws IOException {
|
||||
// Truncated JPEG header — ImageIO returns null rather than throwing.
|
||||
// Without the corrupt-image guard this would later NPE inside the aspect /
|
||||
// dimension computation in scaleToWidth.
|
||||
Document doc = makeDoc("image/jpeg", "documents/corrupt.jpg");
|
||||
byte[] truncated = new byte[]{(byte) 0xFF, (byte) 0xD8, (byte) 0xFF, (byte) 0xE0};
|
||||
when(fileService.downloadFileStream(anyString()))
|
||||
.thenReturn(new ByteArrayInputStream(truncated));
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(doc);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.FAILED);
|
||||
verifyNoInteractions(s3Client);
|
||||
verify(documentRepository, never()).save(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void generate_returnsFailed_whenPdfBytesAreCorrupt() throws IOException {
|
||||
// "PDF" header but no body — PDFBox throws IOException while loading.
|
||||
Document doc = makeDoc("application/pdf", "documents/corrupt.pdf");
|
||||
byte[] fakePdf = "%PDF-1.4\n".getBytes();
|
||||
when(fileService.downloadFileStream(anyString()))
|
||||
.thenReturn(new ByteArrayInputStream(fakePdf));
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(doc);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.FAILED);
|
||||
verifyNoInteractions(s3Client);
|
||||
verify(documentRepository, never()).save(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void generate_returnsFailed_whenPersistThrows_butUploadSucceeded() throws IOException {
|
||||
// Covers the "orphan thumbnail" edge case: S3 upload succeeded but the
|
||||
// entity update blew up. We must still return FAILED so the backfill
|
||||
// tally is honest, without losing the fact that we already put bytes in S3.
|
||||
Document doc = makeDoc("application/pdf", "documents/letter.pdf");
|
||||
when(fileService.downloadFileStream(anyString()))
|
||||
.thenReturn(new ByteArrayInputStream(createSamplePdf()));
|
||||
when(documentRepository.save(any()))
|
||||
.thenThrow(new RuntimeException("constraint violation"));
|
||||
|
||||
ThumbnailService.Outcome outcome = thumbnailService.generate(doc);
|
||||
|
||||
assertThat(outcome).isEqualTo(ThumbnailService.Outcome.FAILED);
|
||||
verify(s3Client).putObject(any(PutObjectRequest.class), any(RequestBody.class));
|
||||
verify(documentRepository).save(any());
|
||||
}
|
||||
|
||||
// ─── helpers ──────────────────────────────────────────────────────────────
|
||||
|
||||
private Document makeDoc(String contentType, String filePath) {
|
||||
Document doc = Document.builder()
|
||||
.id(UUID.randomUUID())
|
||||
.title("Test Doc")
|
||||
.originalFilename("test.pdf")
|
||||
.status(DocumentStatus.UPLOADED)
|
||||
.contentType(contentType)
|
||||
.filePath(filePath)
|
||||
.build();
|
||||
doc.setCreatedAt(LocalDateTime.now());
|
||||
doc.setUpdatedAt(LocalDateTime.now());
|
||||
return doc;
|
||||
}
|
||||
|
||||
private static byte[] createSamplePdf() throws IOException {
|
||||
return createSamplePdf(1);
|
||||
}
|
||||
|
||||
private static byte[] createSamplePdf(int pageCount) throws IOException {
|
||||
try (PDDocument doc = new PDDocument()) {
|
||||
for (int i = 0; i < pageCount; i++) {
|
||||
PDPage page = new PDPage(PDRectangle.A4);
|
||||
doc.addPage(page);
|
||||
try (PDPageContentStream content = new PDPageContentStream(doc, page)) {
|
||||
content.beginText();
|
||||
content.setFont(new PDType1Font(Standard14Fonts.FontName.HELVETICA), 24);
|
||||
content.newLineAtOffset(100, 700);
|
||||
content.showText("Lieber Hans,");
|
||||
content.endText();
|
||||
}
|
||||
}
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
doc.save(bos);
|
||||
return bos.toByteArray();
|
||||
}
|
||||
}
|
||||
|
||||
private static byte[] createSamplePng(int width, int height) throws IOException {
|
||||
BufferedImage img = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
|
||||
Graphics2D g = img.createGraphics();
|
||||
g.setColor(Color.LIGHT_GRAY);
|
||||
g.fillRect(0, 0, width, height);
|
||||
g.setColor(Color.DARK_GRAY);
|
||||
g.fillRect(0, 0, width, height / 4);
|
||||
g.dispose();
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
ImageIO.write(img, "png", bos);
|
||||
return bos.toByteArray();
|
||||
}
|
||||
|
||||
private static byte[] createSampleJpeg(int width, int height) throws IOException {
|
||||
BufferedImage img = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
|
||||
Graphics2D g = img.createGraphics();
|
||||
g.setColor(Color.WHITE);
|
||||
g.fillRect(0, 0, width, height);
|
||||
g.dispose();
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
ImageIO.write(img, "jpg", bos);
|
||||
return bos.toByteArray();
|
||||
}
|
||||
|
||||
private static byte[] readAll(InputStream stream) throws IOException {
|
||||
try (stream) {
|
||||
return stream.readAllBytes();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -506,86 +506,4 @@ class TranscriptionServiceTest {
|
||||
|
||||
verify(auditService, never()).logAfterCommit(any(), any(), any(), any());
|
||||
}
|
||||
|
||||
// ─── markAllBlocksReviewed ───────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void markAllBlocksReviewed_setsAllUnreviewedBlocksToReviewed() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID userId = UUID.randomUUID();
|
||||
TranscriptionBlock block1 = TranscriptionBlock.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).reviewed(false).build();
|
||||
TranscriptionBlock block2 = TranscriptionBlock.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).reviewed(false).build();
|
||||
when(blockRepository.findByDocumentIdOrderBySortOrderAsc(docId))
|
||||
.thenReturn(List.of(block1, block2));
|
||||
when(blockRepository.saveAll(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
List<TranscriptionBlock> result = transcriptionService.markAllBlocksReviewed(docId, userId);
|
||||
|
||||
assertThat(result).allMatch(TranscriptionBlock::isReviewed);
|
||||
verify(blockRepository).saveAll(List.of(block1, block2));
|
||||
}
|
||||
|
||||
@Test
|
||||
void markAllBlocksReviewed_isIdempotent_whenAllBlocksAlreadyReviewed() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID userId = UUID.randomUUID();
|
||||
TranscriptionBlock block = TranscriptionBlock.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).reviewed(true).build();
|
||||
when(blockRepository.findByDocumentIdOrderBySortOrderAsc(docId))
|
||||
.thenReturn(List.of(block));
|
||||
when(blockRepository.saveAll(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
List<TranscriptionBlock> result = transcriptionService.markAllBlocksReviewed(docId, userId);
|
||||
|
||||
assertThat(result).allMatch(TranscriptionBlock::isReviewed);
|
||||
verify(blockRepository).saveAll(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void markAllBlocksReviewed_emitsBlockReviewedAuditEvent_forEachUnreviewedBlock() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID userId = UUID.randomUUID();
|
||||
TranscriptionBlock block1 = TranscriptionBlock.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).reviewed(false).build();
|
||||
TranscriptionBlock block2 = TranscriptionBlock.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).reviewed(false).build();
|
||||
when(blockRepository.findByDocumentIdOrderBySortOrderAsc(docId))
|
||||
.thenReturn(List.of(block1, block2));
|
||||
when(blockRepository.saveAll(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
transcriptionService.markAllBlocksReviewed(docId, userId);
|
||||
|
||||
verify(auditService, times(2)).logAfterCommit(AuditKind.BLOCK_REVIEWED, userId, docId, null);
|
||||
}
|
||||
|
||||
@Test
|
||||
void markAllBlocksReviewed_doesNotEmitAuditEvent_forAlreadyReviewedBlocks() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID userId = UUID.randomUUID();
|
||||
TranscriptionBlock alreadyReviewed = TranscriptionBlock.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).reviewed(true).build();
|
||||
TranscriptionBlock unreviewed = TranscriptionBlock.builder()
|
||||
.id(UUID.randomUUID()).documentId(docId).reviewed(false).build();
|
||||
when(blockRepository.findByDocumentIdOrderBySortOrderAsc(docId))
|
||||
.thenReturn(List.of(alreadyReviewed, unreviewed));
|
||||
when(blockRepository.saveAll(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
transcriptionService.markAllBlocksReviewed(docId, userId);
|
||||
|
||||
verify(auditService, times(1)).logAfterCommit(AuditKind.BLOCK_REVIEWED, userId, docId, null);
|
||||
}
|
||||
|
||||
@Test
|
||||
void markAllBlocksReviewed_returnsEmptyList_whenNoBlocksExist() {
|
||||
UUID docId = UUID.randomUUID();
|
||||
UUID userId = UUID.randomUUID();
|
||||
when(blockRepository.findByDocumentIdOrderBySortOrderAsc(docId)).thenReturn(List.of());
|
||||
when(blockRepository.saveAll(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
List<TranscriptionBlock> result = transcriptionService.markAllBlocksReviewed(docId, userId);
|
||||
|
||||
assertThat(result).isEmpty();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,12 +2,9 @@ package org.raddatz.familienarchiv.service;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.raddatz.familienarchiv.audit.AuditKind;
|
||||
import org.raddatz.familienarchiv.audit.AuditService;
|
||||
import org.raddatz.familienarchiv.dto.AdminUpdateUserRequest;
|
||||
import org.raddatz.familienarchiv.dto.ChangePasswordDTO;
|
||||
import org.raddatz.familienarchiv.dto.CreateUserRequest;
|
||||
@@ -37,7 +34,6 @@ class UserServiceTest {
|
||||
@Mock AppUserRepository userRepository;
|
||||
@Mock UserGroupRepository groupRepository;
|
||||
@Mock PasswordEncoder passwordEncoder;
|
||||
@Mock AuditService auditService;
|
||||
@InjectMocks UserService userService;
|
||||
|
||||
// ─── findByEmail ──────────────────────────────────────────────────────────
|
||||
@@ -65,7 +61,7 @@ class UserServiceTest {
|
||||
UUID id = UUID.randomUUID();
|
||||
when(userRepository.findById(id)).thenReturn(Optional.empty());
|
||||
|
||||
assertThatThrownBy(() -> userService.deleteUser(UUID.randomUUID(), id))
|
||||
assertThatThrownBy(() -> userService.deleteUser(id))
|
||||
.isInstanceOf(DomainException.class);
|
||||
}
|
||||
|
||||
@@ -75,7 +71,7 @@ class UserServiceTest {
|
||||
AppUser user = AppUser.builder().id(id).email("gast@example.com").build();
|
||||
when(userRepository.findById(id)).thenReturn(Optional.of(user));
|
||||
|
||||
userService.deleteUser(UUID.randomUUID(), id);
|
||||
userService.deleteUser(id);
|
||||
|
||||
verify(userRepository).delete(user);
|
||||
}
|
||||
@@ -94,7 +90,7 @@ class UserServiceTest {
|
||||
AppUser saved = AppUser.builder().id(UUID.randomUUID()).email("new@example.com").build();
|
||||
when(userRepository.save(any())).thenReturn(saved);
|
||||
|
||||
AppUser result = userService.createUserOrUpdate(UUID.randomUUID(), req);
|
||||
AppUser result = userService.createUserOrUpdate(req);
|
||||
|
||||
assertThat(result).isEqualTo(saved);
|
||||
verify(userRepository).save(any());
|
||||
@@ -112,7 +108,7 @@ class UserServiceTest {
|
||||
when(passwordEncoder.encode(any())).thenReturn("encoded");
|
||||
when(userRepository.save(any())).thenReturn(existing);
|
||||
|
||||
userService.createUserOrUpdate(UUID.randomUUID(), req);
|
||||
userService.createUserOrUpdate(req);
|
||||
|
||||
verify(userRepository, times(1)).save(existing);
|
||||
}
|
||||
@@ -233,7 +229,7 @@ class UserServiceTest {
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
dto.setFirstName("Ada"); dto.setLastName("Lovelace");
|
||||
|
||||
AppUser result = userService.adminUpdateUser(UUID.randomUUID(), id, dto);
|
||||
AppUser result = userService.adminUpdateUser(id, dto);
|
||||
|
||||
assertThat(result.getFirstName()).isEqualTo("Ada");
|
||||
assertThat(result.getLastName()).isEqualTo("Lovelace");
|
||||
@@ -250,7 +246,7 @@ class UserServiceTest {
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
dto.setFirstName("Ada");
|
||||
|
||||
AppUser result = userService.adminUpdateUser(UUID.randomUUID(), id, dto);
|
||||
AppUser result = userService.adminUpdateUser(id, dto);
|
||||
|
||||
assertThat(result.getGroups()).containsExactly(adminGroup);
|
||||
}
|
||||
@@ -268,7 +264,7 @@ class UserServiceTest {
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
dto.setGroupIds(List.of(newGroup.getId()));
|
||||
|
||||
AppUser result = userService.adminUpdateUser(UUID.randomUUID(), id, dto);
|
||||
AppUser result = userService.adminUpdateUser(id, dto);
|
||||
|
||||
assertThat(result.getGroups()).containsExactly(newGroup);
|
||||
}
|
||||
@@ -285,7 +281,7 @@ class UserServiceTest {
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
dto.setGroupIds(List.of());
|
||||
|
||||
AppUser result = userService.adminUpdateUser(UUID.randomUUID(), id, dto);
|
||||
AppUser result = userService.adminUpdateUser(id, dto);
|
||||
|
||||
assertThat(result.getGroups()).isEmpty();
|
||||
}
|
||||
@@ -317,7 +313,7 @@ class UserServiceTest {
|
||||
AppUser saved = AppUser.builder().id(UUID.randomUUID()).email("u@example.com").build();
|
||||
when(userRepository.save(any())).thenReturn(saved);
|
||||
|
||||
AppUser result = userService.createUserOrUpdate(UUID.randomUUID(), req);
|
||||
AppUser result = userService.createUserOrUpdate(req);
|
||||
|
||||
assertThat(result).isEqualTo(saved);
|
||||
verify(groupRepository).findAllById(List.of(group.getId()));
|
||||
@@ -382,7 +378,7 @@ class UserServiceTest {
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
dto.setNewPassword("newSecret");
|
||||
|
||||
AppUser result = userService.adminUpdateUser(UUID.randomUUID(), id, dto);
|
||||
AppUser result = userService.adminUpdateUser(id, dto);
|
||||
|
||||
assertThat(result.getPassword()).isEqualTo("newHashed");
|
||||
}
|
||||
@@ -397,7 +393,7 @@ class UserServiceTest {
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
dto.setNewPassword(" ");
|
||||
|
||||
AppUser result = userService.adminUpdateUser(UUID.randomUUID(), id, dto);
|
||||
AppUser result = userService.adminUpdateUser(id, dto);
|
||||
|
||||
assertThat(result.getPassword()).isEqualTo("original");
|
||||
verify(passwordEncoder, never()).encode(any());
|
||||
@@ -412,7 +408,7 @@ class UserServiceTest {
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
dto.setEmail(" ");
|
||||
|
||||
assertThatThrownBy(() -> userService.adminUpdateUser(UUID.randomUUID(), id, dto))
|
||||
assertThatThrownBy(() -> userService.adminUpdateUser(id, dto))
|
||||
.isInstanceOf(DomainException.class)
|
||||
.hasMessageContaining("blank");
|
||||
}
|
||||
@@ -429,7 +425,7 @@ class UserServiceTest {
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
dto.setEmail("taken@example.com");
|
||||
|
||||
assertThatThrownBy(() -> userService.adminUpdateUser(UUID.randomUUID(), id, dto))
|
||||
assertThatThrownBy(() -> userService.adminUpdateUser(id, dto))
|
||||
.isInstanceOf(DomainException.class)
|
||||
.hasMessageContaining("E-Mail");
|
||||
}
|
||||
@@ -501,7 +497,7 @@ class UserServiceTest {
|
||||
AppUser saved = AppUser.builder().id(UUID.randomUUID()).email("u@example.com").build();
|
||||
when(userRepository.save(any())).thenReturn(saved);
|
||||
|
||||
userService.createUserOrUpdate(UUID.randomUUID(), req);
|
||||
userService.createUserOrUpdate(req);
|
||||
|
||||
verify(groupRepository, never()).findAllById(any());
|
||||
}
|
||||
@@ -565,7 +561,7 @@ class UserServiceTest {
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
dto.setContact(null);
|
||||
|
||||
AppUser result = userService.adminUpdateUser(UUID.randomUUID(), id, dto);
|
||||
AppUser result = userService.adminUpdateUser(id, dto);
|
||||
|
||||
assertThat(result.getContact()).isNull();
|
||||
}
|
||||
@@ -580,7 +576,7 @@ class UserServiceTest {
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
dto.setContact(" ");
|
||||
|
||||
AppUser result = userService.adminUpdateUser(UUID.randomUUID(), id, dto);
|
||||
AppUser result = userService.adminUpdateUser(id, dto);
|
||||
|
||||
assertThat(result.getContact()).isNull();
|
||||
}
|
||||
@@ -595,7 +591,7 @@ class UserServiceTest {
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
dto.setContact(" phone: 555 ");
|
||||
|
||||
AppUser result = userService.adminUpdateUser(UUID.randomUUID(), id, dto);
|
||||
AppUser result = userService.adminUpdateUser(id, dto);
|
||||
|
||||
assertThat(result.getContact()).isEqualTo("phone: 555");
|
||||
}
|
||||
@@ -610,7 +606,7 @@ class UserServiceTest {
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
dto.setEmail(null);
|
||||
|
||||
AppUser result = userService.adminUpdateUser(UUID.randomUUID(), id, dto);
|
||||
AppUser result = userService.adminUpdateUser(id, dto);
|
||||
|
||||
assertThat(result.getEmail()).isEqualTo("keep@example.com");
|
||||
}
|
||||
@@ -626,7 +622,7 @@ class UserServiceTest {
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
dto.setEmail("me@example.com");
|
||||
|
||||
AppUser result = userService.adminUpdateUser(UUID.randomUUID(), id, dto);
|
||||
AppUser result = userService.adminUpdateUser(id, dto);
|
||||
assertThat(result.getEmail()).isEqualTo("me@example.com");
|
||||
}
|
||||
|
||||
@@ -644,7 +640,7 @@ class UserServiceTest {
|
||||
AppUser saved = AppUser.builder().id(UUID.randomUUID()).email("ng@example.com").build();
|
||||
when(userRepository.save(any())).thenReturn(saved);
|
||||
|
||||
userService.createUserOrUpdate(UUID.randomUUID(), req);
|
||||
userService.createUserOrUpdate(req);
|
||||
|
||||
verify(groupRepository, never()).findAllById(any());
|
||||
}
|
||||
@@ -703,160 +699,6 @@ class UserServiceTest {
|
||||
assertThat(result).containsExactly(g);
|
||||
}
|
||||
|
||||
// ─── audit: GROUP_MEMBERSHIP_CHANGED ─────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void adminUpdateUser_logsGroupMembershipChanged_whenGroupSetChanges() {
|
||||
UUID actorId = UUID.randomUUID();
|
||||
UUID userId = UUID.randomUUID();
|
||||
UserGroup oldGroup = UserGroup.builder().id(UUID.randomUUID()).name("Viewers").permissions(Set.of("READ_ALL")).build();
|
||||
UserGroup newGroup = UserGroup.builder().id(UUID.randomUUID()).name("Editors").permissions(Set.of("WRITE_ALL")).build();
|
||||
AppUser user = AppUser.builder().id(userId).email("u@example.com").groups(Set.of(oldGroup)).build();
|
||||
when(userRepository.findById(userId)).thenReturn(Optional.of(user));
|
||||
when(groupRepository.findAllById(List.of(newGroup.getId()))).thenReturn(List.of(newGroup));
|
||||
when(userRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
dto.setGroupIds(List.of(newGroup.getId()));
|
||||
|
||||
userService.adminUpdateUser(actorId, userId, dto);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
ArgumentCaptor<java.util.Map<String, Object>> payloadCaptor = ArgumentCaptor.forClass(java.util.Map.class);
|
||||
verify(auditService).logAfterCommit(
|
||||
org.mockito.ArgumentMatchers.eq(AuditKind.GROUP_MEMBERSHIP_CHANGED),
|
||||
org.mockito.ArgumentMatchers.eq(actorId),
|
||||
org.mockito.ArgumentMatchers.isNull(),
|
||||
payloadCaptor.capture());
|
||||
java.util.Map<String, Object> payload = payloadCaptor.getValue();
|
||||
assertThat(payload).containsEntry("email", "u@example.com");
|
||||
assertThat((java.util.List<String>) payload.get("addedGroups")).containsExactly("Editors");
|
||||
assertThat((java.util.List<String>) payload.get("removedGroups")).containsExactly("Viewers");
|
||||
}
|
||||
|
||||
@Test
|
||||
void adminUpdateUser_doesNotLogGroupMembershipChanged_whenGroupsUnchanged() {
|
||||
UUID actorId = UUID.randomUUID();
|
||||
UUID userId = UUID.randomUUID();
|
||||
UserGroup group = UserGroup.builder().id(UUID.randomUUID()).name("Admins").build();
|
||||
AppUser user = AppUser.builder().id(userId).email("u@example.com").groups(Set.of(group)).build();
|
||||
when(userRepository.findById(userId)).thenReturn(Optional.of(user));
|
||||
when(groupRepository.findAllById(List.of(group.getId()))).thenReturn(List.of(group));
|
||||
when(userRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
dto.setGroupIds(List.of(group.getId()));
|
||||
|
||||
userService.adminUpdateUser(actorId, userId, dto);
|
||||
|
||||
verify(auditService, never()).logAfterCommit(any(), any(), any(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void adminUpdateUser_doesNotLogGroupMembershipChanged_whenGroupIdsIsNull() {
|
||||
UUID actorId = UUID.randomUUID();
|
||||
UUID userId = UUID.randomUUID();
|
||||
UserGroup group = UserGroup.builder().id(UUID.randomUUID()).name("Admins").build();
|
||||
AppUser user = AppUser.builder().id(userId).email("u@example.com").groups(Set.of(group)).build();
|
||||
when(userRepository.findById(userId)).thenReturn(Optional.of(user));
|
||||
when(userRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||
|
||||
AdminUpdateUserRequest dto = new AdminUpdateUserRequest();
|
||||
// groupIds not set → null
|
||||
|
||||
userService.adminUpdateUser(actorId, userId, dto);
|
||||
|
||||
verify(auditService, never()).logAfterCommit(any(), any(), any(), any());
|
||||
}
|
||||
|
||||
// ─── audit: USER_DELETED ──────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void deleteUser_logsUserDeleted_withEmailInPayload() {
|
||||
UUID actorId = UUID.randomUUID();
|
||||
UUID userId = UUID.randomUUID();
|
||||
AppUser user = AppUser.builder().id(userId).email("gone@example.com").build();
|
||||
when(userRepository.findById(userId)).thenReturn(Optional.of(user));
|
||||
|
||||
userService.deleteUser(actorId, userId);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
ArgumentCaptor<java.util.Map<String, Object>> payloadCaptor = ArgumentCaptor.forClass(java.util.Map.class);
|
||||
verify(auditService).logAfterCommit(
|
||||
org.mockito.ArgumentMatchers.eq(AuditKind.USER_DELETED),
|
||||
org.mockito.ArgumentMatchers.eq(actorId),
|
||||
org.mockito.ArgumentMatchers.isNull(),
|
||||
payloadCaptor.capture());
|
||||
assertThat(payloadCaptor.getValue()).containsEntry("email", "gone@example.com");
|
||||
assertThat(payloadCaptor.getValue()).containsKey("userId");
|
||||
}
|
||||
|
||||
// ─── audit: USER_CREATED ──────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void createUserOrUpdate_logsUserCreated_whenUserIsNew() {
|
||||
UUID actorId = UUID.randomUUID();
|
||||
CreateUserRequest req = new CreateUserRequest();
|
||||
req.setEmail("new@example.com");
|
||||
req.setInitialPassword("secret");
|
||||
req.setGroupIds(List.of());
|
||||
|
||||
when(userRepository.findByEmail("new@example.com")).thenReturn(Optional.empty());
|
||||
when(passwordEncoder.encode("secret")).thenReturn("encoded");
|
||||
AppUser saved = AppUser.builder().id(UUID.randomUUID()).email("new@example.com").build();
|
||||
when(userRepository.save(any())).thenReturn(saved);
|
||||
|
||||
userService.createUserOrUpdate(actorId, req);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
ArgumentCaptor<java.util.Map<String, Object>> payloadCaptor = ArgumentCaptor.forClass(java.util.Map.class);
|
||||
verify(auditService).logAfterCommit(
|
||||
org.mockito.ArgumentMatchers.eq(AuditKind.USER_CREATED),
|
||||
org.mockito.ArgumentMatchers.eq(actorId),
|
||||
org.mockito.ArgumentMatchers.isNull(),
|
||||
payloadCaptor.capture());
|
||||
assertThat(payloadCaptor.getValue()).containsKey("userId");
|
||||
assertThat(payloadCaptor.getValue()).containsEntry("email", "new@example.com");
|
||||
}
|
||||
|
||||
@Test
|
||||
void createUserOrUpdate_doesNotLogUserCreated_whenUserAlreadyExists() {
|
||||
UUID actorId = UUID.randomUUID();
|
||||
CreateUserRequest req = new CreateUserRequest();
|
||||
req.setEmail("existing@example.com");
|
||||
req.setInitialPassword("pass");
|
||||
req.setGroupIds(List.of());
|
||||
|
||||
AppUser existing = AppUser.builder().id(UUID.randomUUID()).email("existing@example.com").build();
|
||||
when(userRepository.findByEmail("existing@example.com")).thenReturn(Optional.of(existing));
|
||||
when(passwordEncoder.encode(any())).thenReturn("encoded");
|
||||
when(userRepository.save(any())).thenReturn(existing);
|
||||
|
||||
userService.createUserOrUpdate(actorId, req);
|
||||
|
||||
verify(auditService, never()).logAfterCommit(any(), any(), any(), any());
|
||||
}
|
||||
|
||||
// ─── createUserForBootstrap ───────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void createUserForBootstrap_createsUserWithoutAuditEvent() {
|
||||
CreateUserRequest req = new CreateUserRequest();
|
||||
req.setEmail("bootstrap@example.com");
|
||||
req.setInitialPassword("secret");
|
||||
req.setGroupIds(List.of());
|
||||
|
||||
when(userRepository.findByEmail("bootstrap@example.com")).thenReturn(Optional.empty());
|
||||
when(passwordEncoder.encode("secret")).thenReturn("encoded");
|
||||
AppUser saved = AppUser.builder().id(UUID.randomUUID()).email("bootstrap@example.com").build();
|
||||
when(userRepository.save(any())).thenReturn(saved);
|
||||
|
||||
AppUser result = userService.createUserForBootstrap(req);
|
||||
|
||||
assertThat(result).isEqualTo(saved);
|
||||
verify(auditService, never()).logAfterCommit(any(), any(), any(), any());
|
||||
}
|
||||
|
||||
// ─── createGroup ──────────────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
# ADR-003: Session-Rollup Unified Activity Feed on `/chronik`
|
||||
|
||||
## Status
|
||||
|
||||
Accepted
|
||||
|
||||
## Context
|
||||
|
||||
The app had two disconnected ways to see what was happening in the archive:
|
||||
|
||||
1. `/notifications` — personal mentions/replies only, delivered via the `notifications` table and a Bell dropdown.
|
||||
2. Dashboard activity feed — ambient events (uploads, transcription, annotations, comments, mentions) via `/api/dashboard/activity`, which deduplicated using `DISTINCT ON (actor_id, document_id, kind, date_trunc('hour', happened_at))`.
|
||||
|
||||
Two separate lists was a poor mental model (personal vs. ambient feel the same to the user), the `/notifications` page wasted horizontal space, the dashboard's "Alle anzeigen" pointed to `/documents` (dead-end), and the hour-trunc dedupe produced ugly splits on natural sessions — saving 20 transcription blocks at 08:58, 08:59, 09:01 yielded two rows.
|
||||
|
||||
We needed one page that merges both streams, keeps personal mentions visually loud, and aggregates ambient noise coherently.
|
||||
|
||||
## Decision
|
||||
|
||||
**One page `/chronik` backed by two endpoints.** The SvelteKit `+page.server.ts` composes data from `/api/dashboard/activity` (for the ambient timeline) and `/api/notifications` (for the "Für dich" box). No new `/api/chronik` orchestrator — the frontend load function is the composition seam.
|
||||
|
||||
**Session-style rollup replaces hour-trunc dedupe everywhere.** `AuditLogQueryRepository.findDedupedActivityFeed` is renamed to `findRolledUpActivityFeed` and rewritten using a `LAG()`-based session algorithm:
|
||||
|
||||
```
|
||||
LAG(happened_at) OVER (PARTITION BY actor_id, document_id, kind ORDER BY happened_at)
|
||||
→ is_new_session = gap > 7200s (or first event in partition, or kind ∈ {COMMENT_ADDED, MENTION_CREATED})
|
||||
→ SUM(is_new_session) OVER (... ROWS UNBOUNDED PRECEDING) = session_id
|
||||
→ GROUP BY (actor_id, document_id, kind, session_id) → MIN(happened_at), MAX(...), COUNT(*)
|
||||
```
|
||||
|
||||
Events within 120 min on the same `(actor, document, kind)` become one row with `count` and `happenedAtUntil` fields. `COMMENT_ADDED` and `MENTION_CREATED` always start a new session — these kinds never roll up. No hard cap on total session span (a 4-hour transcription sitting is one row). The hour-trunc dedupe SQL is **deleted**, not kept alongside — one aggregation strategy per query.
|
||||
|
||||
**URL is universal German `/chronik` across all locales**, matching the existing convention (`/dokumente`, `/personen`, `/briefwechsel`). Content is translated via Paraglide; the URL is a stable German identifier, not a translatable route.
|
||||
|
||||
**DTO extended, not replaced.** `ActivityFeedItemDTO` gains `count: int` (required, `1` for singletons) and `happenedAtUntil: OffsetDateTime?` (null for singletons, end-of-session for rollups). One DTO shape serves both the Chronik timeline and the dashboard side-rail.
|
||||
|
||||
**`/notifications` route is deleted outright.** The app is pre-production — no 301 redirect, no zombie page.
|
||||
|
||||
## Alternatives Considered
|
||||
|
||||
| Alternative | Why rejected |
|
||||
|---|---|
|
||||
| Fixed 2-hour wall-clock buckets (`date_trunc('hour', happened_at / 2)`) | Splits natural sessions at bucket boundaries (e.g. events at 13:58 / 13:59 / 14:01 land in two rollup rows) |
|
||||
| Keep `DISTINCT ON hour-trunc` alongside new rollup query | Two aggregation strategies = zombie logic; dashboard and Chronik would drift |
|
||||
| New `/api/chronik` endpoint that merges both streams | Couples two domains (notifications + audit) at the API layer; composition belongs in `+page.server.ts` |
|
||||
| Localized URL slugs (`/chronik` / `/chronicle` / `/crónica`) | Breaks the project's existing German-URL convention and adds Paraglide routing overhead for zero UX value |
|
||||
| Per-locale rollup in the SQL (e.g. align to local-day boundaries) | Timezone-aware SQL is brittle; rollup is a time-gap concept, not a calendar-day concept |
|
||||
|
||||
## Consequences
|
||||
|
||||
**Easier:**
|
||||
- One hot path — `/api/dashboard/activity` is backed by a single partial covering index (`V49__add_audit_log_rollup_index.sql`) that matches the rollup query's WHERE clause exactly.
|
||||
- Dashboard side-rail gets rollup for free — 20 block-saves appear as one "Papa transkribierte 20 Blöcke" row with a time range, not 20 dedup'd hour buckets.
|
||||
- Component reuse — `ChronikRow.svelte` renders both singleton and rollup variants via a `$derived` discriminator; `DashboardActivityFeed.svelte` consumes the same DTO shape.
|
||||
|
||||
**Harder:**
|
||||
- The session SQL is ~15 lines longer than `DISTINCT ON`. That's the price for not splitting natural sessions at fixed boundaries — worth it on day one.
|
||||
- Historical `/api/dashboard/activity` consumers now see `count` and `happenedAtUntil`. No breaking change — `count` defaults to `1`, `happenedAtUntil` is nullable — but pre-existing tests needed updating.
|
||||
- Rollup is load-bearing for the UX — if the index is missing or the query regresses, the page either runs slow or returns duplicate rows. Covered by the rolledUp integration tests and the partial covering index; worth a follow-up Grafana panel on `/api/dashboard/activity` p95 latency.
|
||||
@@ -1,49 +0,0 @@
|
||||
# ADR-004: In-Process PDFBox Thumbnails (not ocr-service)
|
||||
|
||||
## Status
|
||||
|
||||
Accepted
|
||||
|
||||
## Context
|
||||
|
||||
The archive lists documents as text-only rows everywhere (home search, person detail, conversation timeline, Chronik). For a fundamentally visual archive — letters, scans, handwritten pages — this is a real discoverability problem. Issue #307 introduces a small JPEG thumbnail for every document.
|
||||
|
||||
A viable alternative to rendering in Spring Boot is delegating to the existing `ocr-service` (Python), which already has PyMuPDF/PIL available and is the project's designated place for PDF pixel work. The comparison is not obvious: either place works.
|
||||
|
||||
## Decision
|
||||
|
||||
Render thumbnails in-process in Spring Boot using **Apache PDFBox 3.0.4** (already a dependency for training-data export). A dedicated `thumbnailExecutor` pool isolates the work from the shared task pool used by OCR.
|
||||
|
||||
- PDF first page rendered via `PDFRenderer.renderImageWithDPI(0, 100, ImageType.RGB)`, scaled to 240 px width (bilinear) and encoded as JPEG quality 85.
|
||||
- Non-PDF image types (JPEG, PNG, TIFF) decoded via `javax.imageio` — TIFF requires the `twelvemonkeys-imageio-tiff` plugin on the classpath.
|
||||
- Upload paths fire-and-forget via `ThumbnailAsyncRunner.dispatchAfterCommit(docId)`; a `ThumbnailBackfillService` covers anything the async task missed or that pre-dates this feature.
|
||||
|
||||
## Alternatives Considered
|
||||
|
||||
| Alternative | Why rejected |
|
||||
|---|---|
|
||||
| Delegate to `ocr-service` (PyMuPDF) | Adds a network hop and a failure mode to every document upload. `ocr-service` is not guaranteed healthy at upload time (model-loading start period is 60 s). PDFBox is already a backend dependency — delegating is a net complexity increase. |
|
||||
| Render on the frontend with `pdfjs-dist` at display time | Would work for PDFs but not for scans / images; list pages would need to render dozens of PDFs on first paint; no server-side caching. |
|
||||
| Thumbor / imaginary / a dedicated thumbnail service | Overkill for a single-operator household tool; new container to operate and secure. |
|
||||
|
||||
## Consequences
|
||||
|
||||
**Easier:**
|
||||
- Zero new infrastructure. `thumbnails/` is a prefix in the existing MinIO bucket — production migration to Hetzner Object Storage works identically.
|
||||
- Backfill is a plain sequential loop; no inter-service retry semantics.
|
||||
- Integration test runs against real MinIO without needing `ocr-service` to be healthy.
|
||||
|
||||
**Harder:**
|
||||
- PDFBox is a parser attack surface. Mitigated by a 30-second watchdog timeout in `ThumbnailAsyncRunner` and by the fire-and-forget contract (failures never break upload).
|
||||
- Memory ceiling: the `thumbnailExecutor` is capped at 2 threads on the CX32 (8 GB). A busy backfill alongside OCR can approach the 3 GB heap — acceptable but not comfortable. Streaming via `FileService.downloadFileStream` keeps this bounded for PDFs up to 50 MB.
|
||||
|
||||
### Operational caveats (intentional)
|
||||
|
||||
**Backfill state is in-memory and single-node.** `ThumbnailBackfillService.currentStatus` is a volatile reference updated on the thumbnail executor thread. Restarting the backend mid-run loses progress and the next `runBackfillAsync()` starts over. This mirrors `MassImportService.ImportStatus` and is acceptable because the household archive runs as a single Spring Boot process, backfill is a rare one-shot admin action, and re-running the backfill is idempotent (`findByFilePathIsNotNullAndThumbnailKeyIsNull()` naturally skips completed documents).
|
||||
|
||||
**`ThumbnailService` and `ThumbnailBackfillService` inject `DocumentRepository` directly.** This is a deliberate exception to the project's "services never reach into another domain's repository" rule. Treating thumbnails as a cross-cutting aspect of `Document` rather than a sub-domain avoids a circular dependency (`DocumentService` → `ThumbnailAsyncRunner` → `DocumentService` would close the loop). If thumbnail state grows beyond two columns into its own domain model, extract a proper `ThumbnailRepository` at that point — not before.
|
||||
|
||||
## Future Direction
|
||||
|
||||
- If a second image-processing job (OCR region crops, sharing previews) arrives, revisit moving all image work to `ocr-service` so the two share a single PyMuPDF instance.
|
||||
- If thumbnails ever need to be generated at multiple sizes, switch the key pattern from `thumbnails/{docId}.jpg` to `thumbnails/{docId}/{width}.jpg` — the endpoint and cache-bust URL are already structured to accommodate that.
|
||||
@@ -1,52 +0,0 @@
|
||||
# ADR-005: thumbnailAspect + pageCount alongside the thumbnail
|
||||
|
||||
## Status
|
||||
|
||||
Accepted
|
||||
|
||||
## Context
|
||||
|
||||
Issue #305 rebalances the /briefwechsel correspondence list into PDF-thumbnail rows. Two pieces of metadata are needed at row-render time:
|
||||
|
||||
- **Aspect ratio** — postcards are landscape (7:5), letters are portrait (5:7). Forcing landscape scans into a portrait tile crops away the signature; forcing portrait scans into a landscape tile wastes horizontal real estate.
|
||||
- **Page count** — multi-page letters should show a "N" badge on their thumbnail so the reader can tell a single-page note from a seven-page letter without clicking in.
|
||||
|
||||
Both values are cheap to derive at the point the thumbnail is generated (the source image is already decoded; the PDF is already loaded) and impossible to derive cheaply later (requires re-reading the S3 object).
|
||||
|
||||
## Decision
|
||||
|
||||
Persist both values as columns on `documents` and populate them inside `ThumbnailService.generate()` — the same code path that writes the JPEG to S3 and stamps `thumbnail_generated_at`.
|
||||
|
||||
- `thumbnail_aspect VARCHAR(16)` mapped to a Java enum `ThumbnailAspect` with two values: `PORTRAIT`, `LANDSCAPE`.
|
||||
- `page_count INTEGER` — `PDDocument.getNumberOfPages()` for PDFs, `1` for image uploads.
|
||||
- Aspect threshold is `source.width / source.height > 1.1` → `LANDSCAPE`; everything else (including near-square A4 scans at ratio ≈ 1.0) stays `PORTRAIT`. The 1.1 margin keeps borderline scans from flipping across the threshold on a rounding error.
|
||||
- Both columns are nullable and remain `null` for historical documents until the existing `/api/admin/generate-thumbnails` backfill rerun populates them.
|
||||
|
||||
## Alternatives Considered
|
||||
|
||||
| Alternative | Why rejected |
|
||||
|---|---|
|
||||
| Derive aspect client-side after image load | First-paint would have all tiles in portrait, then reshuffle into landscape when the JPEG decodes — a visible jank on slow networks. The backend already has the dimensions; client-side recomputation is a waste. |
|
||||
| Store full `width` / `height` columns | Not needed anywhere — consumers want the categorical answer. If a future feature needs exact dimensions, they can be added later without migrating existing rows. |
|
||||
| A separate `thumbnail_metadata` table | Two scalar nullable columns aren't worth a join. See ADR-004 — thumbnails are modeled as a cross-cutting aspect of `Document`, not a sub-domain. |
|
||||
| Derive page count from the existing PDF at render time on the frontend | Duplicates work already done on the backend and requires a separate byte-range fetch of the PDF header. Frontend already gets `pageCount` "for free" via the Document response. |
|
||||
|
||||
## Consequences
|
||||
|
||||
**Easier:**
|
||||
- `ConversationThumbnail.svelte` picks the tile dimensions from `thumbnailAspect` directly — no async measurement, no layout shift.
|
||||
- `ThumbnailRow` reads `pageCount` synchronously for the badge. Multi-page letters are distinguishable at first paint.
|
||||
- Backfill runs the same migration path for every old document — re-executing generates the aspect + pageCount columns along with the JPEG, so operators don't have a second admin button to click.
|
||||
|
||||
**Harder:**
|
||||
- Both columns are `null` for every document until the backfill runs on a given instance. Frontend components guard with `?? 'PORTRAIT'` / `?? 1` so the UI stays sensible during the rollout window. The backfill is idempotent and cheap (reuses existing S3 object), so re-running it is the simplest recovery path.
|
||||
- The aspect threshold is a single constant in Java. A future need to tune per-type (e.g. postcards vs photos) means a code change, not a configuration change — acceptable for a single-operator archive.
|
||||
|
||||
### Ordering inside `ThumbnailService.generate()`
|
||||
|
||||
Aspect computation happens AFTER the JPEG upload succeeds but BEFORE the entity save — if the save throws, the columns rewind with it. Page count is captured while the `PDDocument` is still open; the `SourcePreview` record carries both the rendered first-page image and the page count back to the top of the pipeline so the PDF isn't reopened later.
|
||||
|
||||
## Future Direction
|
||||
|
||||
- If a postcard-specific "photo" chip is ever reintroduced, reuse `thumbnailAspect === 'LANDSCAPE' && pageCount === 1` rather than adding a new `kind` column.
|
||||
- If multi-size thumbnails are introduced (per ADR-004's future note), the aspect + pageCount are per-document and do not need to be duplicated per size.
|
||||
@@ -1,195 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Spec 1 — Rich Rows · Briefwechsel</title>
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link href="https://fonts.googleapis.com/css2?family=Merriweather:wght@400;700&family=Montserrat:wght@400;500;600;700;800;900&display=swap" rel="stylesheet">
|
||||
<link rel="stylesheet" href="_shared.css">
|
||||
<style>
|
||||
/* Spec 1 specific */
|
||||
.rlist{background:#fff;border:1px solid var(--line);border-radius:2px;overflow:hidden}
|
||||
.row{display:grid;grid-template-columns:20px minmax(0,1fr) auto;column-gap:12px;align-items:stretch;padding:14px 18px;border-bottom:1px solid var(--line-2);border-left:3px solid transparent;cursor:pointer;transition:background .1s}
|
||||
.row:hover{background:var(--muted)}
|
||||
.row:last-child{border-bottom:0}
|
||||
.row.out{border-left-color:var(--primary)}
|
||||
.row.in{border-left-color:var(--accent)}
|
||||
.row-arrow{align-self:center;font-size:14px;opacity:.55;display:flex;justify-content:center}
|
||||
.row-body{min-width:0;display:flex;flex-direction:column;gap:4px}
|
||||
.row-title{font-family:'Merriweather',serif;font-size:15px;font-weight:700;color:var(--ink);white-space:nowrap;overflow:hidden;text-overflow:ellipsis}
|
||||
.row-summary{font-size:12.5px;color:#555;font-style:italic;white-space:nowrap;overflow:hidden;text-overflow:ellipsis;max-width:90%}
|
||||
.row-meta{display:flex;flex-wrap:wrap;gap:4px 10px;font-size:11.5px;color:var(--ink-3);align-items:center}
|
||||
.row-meta .sep{color:#bbb}
|
||||
.row-meta .ico{width:12px;height:12px;opacity:.55;display:inline-flex;align-items:center;justify-content:center}
|
||||
.row-tags{display:flex;flex-wrap:wrap;gap:4px;margin-top:2px}
|
||||
.row-right{display:flex;flex-direction:column;align-items:flex-end;justify-content:center;gap:4px;min-width:130px;padding-left:16px;border-left:1px dashed var(--line)}
|
||||
.row-archive{font-size:10px;font-weight:800;letter-spacing:.8px;color:#888;text-transform:uppercase;background:#F4F1EA;padding:3px 8px;border-radius:2px}
|
||||
.row-archive small{display:block;font-weight:600;color:#aaa;margin-top:1px;text-transform:none;letter-spacing:0;font-size:9.5px}
|
||||
@media (max-width: 900px){ .row-right{display:none} }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div class="spec-meta">
|
||||
<div class="spec-meta-inner">
|
||||
<div>
|
||||
<h1>Briefwechsel — <span>Fill the Empty Rows</span></h1>
|
||||
<p>Five approaches to turning the empty right-hand space into information that helps users scan and decide.</p>
|
||||
</div>
|
||||
<div class="spec-meta-right">
|
||||
<div><strong>Concept</strong>Rich Rows</div>
|
||||
<div><strong>Spec</strong>1 / 5</div>
|
||||
<div><strong>Effort</strong>Small — no new backend data</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<nav class="spec-nav">
|
||||
<div class="spec-nav-inner">
|
||||
<span class="lbl">Specs</span>
|
||||
<a href="index.html">Overview</a>
|
||||
<a class="on" href="01-rich-rows.html">1 · Rich Rows</a>
|
||||
<a href="02-thumbnail-rows.html">2 · Thumbnail Rows</a>
|
||||
<a href="03-master-detail.html">3 · Master-Detail Split</a>
|
||||
<a href="04-gallery-cards.html">4 · Gallery Cards</a>
|
||||
<a href="05-person-dashboard.html">5 · Person Dashboard</a>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div class="page-wrap">
|
||||
|
||||
<!-- Real Familienarchiv chrome -->
|
||||
<div class="hdr">
|
||||
<div class="hdr-logo">FAMILIENARCHIV</div>
|
||||
<div class="hdr-nav">
|
||||
<a>Documents</a><a>Persons</a><a class="on">Letters</a><a>Admin</a>
|
||||
</div>
|
||||
<div class="hdr-right">
|
||||
<div class="hdr-upload">⬆ UPLOAD</div>
|
||||
<span>DE · EN · ES</span>
|
||||
<div class="hdr-avatar">MR</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="page">
|
||||
|
||||
<div class="concept-intro">
|
||||
<h2>Concept 1 · Rich Rows — pack more metadata into each row</h2>
|
||||
No visuals, no structural change. Each row grows from a single line to a layered block: title (serif), summary (italic), meta row with icons, tag chips, and a right-hand column with archive box, script type and status.
|
||||
<div><span class="gain">✚ Zero backend changes</span><span class="gain">✚ Still one scrollable list</span><span class="cost">− Heavier rows; 10-row view becomes ~6–7 rows</span><span class="cost">− Empty-looking when a doc has no summary/tags</span></div>
|
||||
</div>
|
||||
|
||||
<!-- Filter card (same as production) -->
|
||||
<div class="card">
|
||||
<div class="filter-row">
|
||||
<div><div class="fl">Person</div><div class="fi">Walter de Gruyter</div></div>
|
||||
<div><div class="fl">Korrespondent — optional</div><div class="fi empty">Alle Korrespondenten</div></div>
|
||||
</div>
|
||||
<div class="filter-actions">
|
||||
<div class="btn">Newest ↓</div>
|
||||
<div class="btn">▾ Filter</div>
|
||||
<div class="count"><b>851</b> Briefe</div>
|
||||
</div>
|
||||
<div class="hintbar">📋 Alle Briefe von <b>Walter de Gruyter</b> — wähle einen Korrespondenten oben um einzugrenzen</div>
|
||||
</div>
|
||||
|
||||
<div class="rlist">
|
||||
<div class="year-divider"><span class="y">1940</span><span class="n">1 Brief</span></div>
|
||||
|
||||
<div class="row in">
|
||||
<div class="row-arrow">←</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">Demo leserlicher Brief</div>
|
||||
<div class="row-summary">„letzte Lebenstage von W. Dörpfeld in Griechenland"</div>
|
||||
<div class="row-meta"><span>31. Mai 1940</span><span class="sep">·</span><span>📍 Belgard</span><span class="sep">·</span><span>von <b>Gertrud von Rofden</b></span></div>
|
||||
<div class="row-tags"><span class="tag">Dörpfeld</span><span class="tag">Griechenland</span><span class="tag muted">privat</span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-archive">Kasten VII · Mappe 5</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="year-divider"><span class="y">1923</span><span class="n">5 Briefe</span></div>
|
||||
|
||||
<div class="row out">
|
||||
<div class="row-arrow">→</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0397 – 2. September 1923 – B.Lichterfelde</div>
|
||||
<div class="row-summary">„von Elsbeth geschriebener Kommentar, den Herbert zum Brief erzählte"</div>
|
||||
<div class="row-meta"><span>2. September 1923</span><span class="sep">·</span><span>📍 B.Lichterfelde</span><span class="sep">·</span><span>an <b>Herbert Cram</b></span></div>
|
||||
<div class="row-tags"><span class="tag">Verlag</span><span class="tag">Familie</span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-archive">Kasten VI · Mappe 7</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row out">
|
||||
<div class="row-arrow">→</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0396 – 2. September 1923 – B.Lichterfelde</div>
|
||||
<div class="row-summary">—</div>
|
||||
<div class="row-meta"><span>2. September 1923</span><span class="sep">·</span><span>📍 B.Lichterfelde</span><span class="sep">·</span><span>an <b>Herbert Cram</b></span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-archive">Kasten VI · Mappe 7</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row out">
|
||||
<div class="row-arrow">→</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0524 – 31. Juli 1923 – Berlin</div>
|
||||
<div class="row-summary">„Glückwunsch zum 60. Geburtstag, Bericht über den Verlag"</div>
|
||||
<div class="row-meta"><span>31. Juli 1923</span><span class="sep">·</span><span>📍 Berlin</span><span class="sep">·</span><span>an <b>Walter Dieckmann</b></span></div>
|
||||
<div class="row-tags"><span class="tag">Geburtstag</span><span class="tag">Verlag</span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-archive">Kasten VI · Mappe 7</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="year-divider"><span class="y">1922</span><span class="n">37 Briefe</span></div>
|
||||
|
||||
<div class="row out">
|
||||
<div class="row-arrow">→</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0521 – 24. Dezember 1922 – Berlin</div>
|
||||
<div class="row-summary">„Weihnachtsbrief, Erinnerungen an das Jahr und Bitte um ein Bild der Kinder"</div>
|
||||
<div class="row-meta"><span>24. Dezember 1922</span><span class="sep">·</span><span>📍 Berlin</span><span class="sep">·</span><span>an <b>Walter Dieckmann</b></span></div>
|
||||
<div class="row-tags"><span class="tag">Weihnachten</span><span class="tag">Familie</span><span class="tag muted">persönlich</span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-archive">Kasten V · Mappe 3</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row out">
|
||||
<div class="row-arrow">→</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0392 – 23. November 1921 – Bad Kissingen</div>
|
||||
<div class="row-summary">„Kurbericht, Gesundheitsupdate, Grüße an die Familie Cram"</div>
|
||||
<div class="row-meta"><span>23. November 1921</span><span class="sep">·</span><span>📍 Bad Kissingen</span><span class="sep">·</span><span>an <b>Herbert Cram</b></span></div>
|
||||
<div class="row-tags"><span class="tag">Kuraufenthalt</span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-archive">Kasten V · Mappe 1</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row out">
|
||||
<div class="row-arrow">→</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0501 – 13. Dezember 1920 – Berlin</div>
|
||||
<div class="row-summary">—</div>
|
||||
<div class="row-meta"><span>13. Dezember 1920</span><span class="sep">·</span><span>📍 Berlin</span><span class="sep">·</span><span>an <b>Walter Dieckmann</b></span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-archive">Kasten IV · Mappe 8</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,391 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Spec 2 — Thumbnail Rows · Briefwechsel</title>
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link href="https://fonts.googleapis.com/css2?family=Merriweather:wght@400;700&family=Montserrat:wght@400;500;600;700;800;900&display=swap" rel="stylesheet">
|
||||
<link rel="stylesheet" href="_shared.css">
|
||||
<style>
|
||||
/* Spec 2 v2 — bigger thumbnails, postcard support, bilateral distribution bar */
|
||||
.rlist{background:#fff;border:1px solid var(--line);border-radius:2px;overflow:hidden}
|
||||
|
||||
.row{display:grid;grid-template-columns:104px 1fr auto;column-gap:20px;align-items:center;padding:14px 20px;border-bottom:1px solid var(--line-2);border-left:3px solid transparent;cursor:pointer;transition:background .12s,box-shadow .12s}
|
||||
.row:hover{background:var(--muted)}
|
||||
.row:hover .row-thumb .thumb{transform:translateY(-1px);box-shadow:0 4px 12px rgba(0,0,0,.1),inset 0 0 0 1px #fff}
|
||||
.row:last-child{border-bottom:0}
|
||||
.row.out{border-left-color:var(--primary)}
|
||||
.row.in{border-left-color:var(--accent)}
|
||||
|
||||
/* Thumbnail wrapper — fixed 104×104 cell, thumb centered */
|
||||
.row-thumb{width:104px;height:120px;display:flex;align-items:center;justify-content:center;position:relative}
|
||||
.thumb{transition:transform .12s,box-shadow .12s;box-shadow:0 1px 3px rgba(0,0,0,.08),inset 0 0 0 1px #fff}
|
||||
.thumb.portrait{width:82px;height:106px}
|
||||
.thumb.landscape{width:104px;height:72px}
|
||||
.thumb.postcard{width:104px;height:66px}
|
||||
.thumb-badge{position:absolute;top:2px;right:0;background:var(--brand-navy);color:#fff;font-size:9px;font-weight:800;padding:2px 6px;border-radius:10px;box-shadow:0 0 0 2px #fff}
|
||||
|
||||
/* Subtle paper variations for natural feel */
|
||||
.thumb.paper-1{background:linear-gradient(180deg,#fdfcf7 0%,#f4efdf 100%)}
|
||||
.thumb.paper-2{background:linear-gradient(180deg,#fefdf8 0%,#eee8d3 100%)}
|
||||
.thumb.paper-3{background:linear-gradient(180deg,#fbf8ed 0%,#efe7cb 100%)}
|
||||
.thumb.paper-4{background:linear-gradient(180deg,#fdfcf5 0%,#f0e9d5 100%)}
|
||||
|
||||
/* Kurrent-style handwriting — denser, angled */
|
||||
.thumb.kurrent .thumb-lines{padding:14% 9%;gap:3.5px}
|
||||
.thumb.kurrent .thumb-lines i{height:1.3px;background:rgba(24,40,70,.45);transform:rotate(-.5deg)}
|
||||
.thumb.kurrent .thumb-lines i:nth-child(3n){width:65%}
|
||||
.thumb.kurrent .thumb-lines i:nth-child(4n){width:92%}
|
||||
.thumb.kurrent .thumb-lines i:nth-child(5n){width:48%;transform:rotate(.4deg)}
|
||||
|
||||
/* Typewriter — regular, crisp */
|
||||
.thumb.typed .thumb-lines{padding:16% 12%;gap:2.5px}
|
||||
.thumb.typed .thumb-lines i{height:1px;background:rgba(40,40,40,.45)}
|
||||
.thumb.typed .thumb-lines i:nth-child(odd){width:93%}
|
||||
.thumb.typed .thumb-lines i:nth-child(even){width:88%}
|
||||
.thumb.typed .thumb-lines i:nth-child(7n){width:45%}
|
||||
|
||||
/* Postcard — stamp corner + postmark + short address lines */
|
||||
.thumb.postcard .thumb-lines{padding:10% 10% 14% 10%;gap:4px}
|
||||
.thumb.postcard .thumb-lines i{height:1.1px;background:rgba(24,40,70,.45)}
|
||||
.thumb.postcard .thumb-lines i:nth-child(1){width:60%}
|
||||
.thumb.postcard .thumb-lines i:nth-child(2){width:45%}
|
||||
.thumb.postcard .thumb-lines i:nth-child(3){width:70%}
|
||||
.thumb.postcard .thumb-lines i:nth-child(4){width:40%}
|
||||
.thumb.postcard::after{content:'';position:absolute;top:6px;right:6px;width:16px;height:18px;background:linear-gradient(135deg,#b6c9d3,#8ba9b6);border:1px dashed rgba(0,0,0,.15);box-shadow:0 0 0 1px #fff}
|
||||
.thumb.postcard::before{content:'';position:absolute;top:10px;right:26px;width:14px;height:14px;border:1.5px solid rgba(150,30,30,.4);border-radius:50%;background:radial-gradient(circle,rgba(150,30,30,.1) 40%,transparent 60%)}
|
||||
|
||||
/* Letter heading (typed with date/address at top) */
|
||||
.thumb.typed::before{content:'';position:absolute;top:10%;left:12%;right:12%;height:2px;background:transparent;border-bottom:1.5px solid rgba(40,40,40,.35)}
|
||||
|
||||
.row-body{min-width:0;display:flex;flex-direction:column;gap:4px}
|
||||
.row-title{font-family:'Merriweather',serif;font-size:16px;font-weight:700;color:var(--ink);line-height:1.35;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}
|
||||
.row-summary{font-family:'Merriweather',serif;font-size:14px;color:#444;line-height:1.55;font-style:italic;display:-webkit-box;-webkit-line-clamp:2;-webkit-box-orient:vertical;overflow:hidden}
|
||||
.row-summary::before{content:'„';color:var(--brand-mint);font-size:22px;font-weight:700;line-height:0;position:relative;top:6px;margin-right:2px}
|
||||
.row-summary::after{content:'”';color:var(--brand-mint);font-size:22px;font-weight:700;line-height:0;position:relative;top:6px;margin-left:2px}
|
||||
.row-meta{display:flex;flex-wrap:wrap;gap:4px 12px;font-size:12px;color:var(--ink-3);align-items:center;margin-top:2px}
|
||||
.row-meta .sep{color:#ccc}
|
||||
.row-meta .dir-ch{color:var(--primary);font-weight:800;font-size:13px}
|
||||
.row-meta .dir-ch.in{color:var(--accent)}
|
||||
.row-meta .kind-chip{display:inline-flex;align-items:center;gap:3px;background:#F4F1EA;color:#666;font-size:10px;font-weight:700;padding:2px 7px;border-radius:10px;letter-spacing:.3px;text-transform:uppercase}
|
||||
.row-tags{display:flex;gap:4px;flex-wrap:wrap}
|
||||
|
||||
.row-right{display:flex;flex-direction:column;align-items:flex-end;gap:2px}
|
||||
.row-date{font-family:'Merriweather',serif;font-size:14px;color:#444;white-space:nowrap;font-weight:700}
|
||||
.row-date-rel{font-size:10.5px;color:#aaa;font-weight:600;letter-spacing:.3px}
|
||||
|
||||
/* Bilateral distribution bar — lifted from production ConversationTimeline */
|
||||
.distbar{display:flex;flex-direction:column;gap:6px;background:var(--muted);border:1px solid var(--line);border-bottom:0;padding:12px 20px}
|
||||
.distbar-labels{display:flex;justify-content:space-between;font-size:13px;font-weight:700}
|
||||
.distbar-labels .out{color:var(--primary);display:inline-flex;align-items:center;gap:6px}
|
||||
.distbar-labels .in{color:var(--accent);display:inline-flex;align-items:center;gap:6px}
|
||||
.distbar-labels .cnt{font-variant-numeric:tabular-nums}
|
||||
.distbar-bar{height:6px;display:flex;border-radius:3px;overflow:hidden;background:var(--line)}
|
||||
.distbar-bar .out{background:var(--primary)}
|
||||
.distbar-bar .in{background:var(--accent)}
|
||||
.distbar + .rlist{border-radius:0 0 2px 2px}
|
||||
|
||||
/* Section headings within the spec */
|
||||
.example-h{font-family:'Merriweather',serif;font-size:18px;color:var(--brand-navy);margin:36px 0 10px;padding-top:24px;border-top:1px dashed var(--line);font-weight:700;display:flex;align-items:baseline;gap:10px}
|
||||
.example-h .lbl{font-family:'Montserrat',sans-serif;font-size:10px;font-weight:800;color:#888;letter-spacing:1px;text-transform:uppercase}
|
||||
.example-h:first-of-type{border-top:0;padding-top:0;margin-top:20px}
|
||||
.example-sub{font-size:12px;color:#777;margin-bottom:14px;line-height:1.55}
|
||||
|
||||
/* Swap-buttons and filter chrome for bilateral filter card */
|
||||
.swap-inline{display:inline-flex;align-items:center;justify-content:center;width:32px;height:32px;border:1px solid #C8C4BE;border-radius:50%;background:#F0EDE8;font-size:13px;color:var(--brand-navy);margin:0 -12px;position:relative;z-index:1}
|
||||
|
||||
@media (max-width: 760px){
|
||||
.row{grid-template-columns:82px 1fr;column-gap:14px}
|
||||
.row-right{grid-column:2;align-items:flex-start;margin-top:4px}
|
||||
.thumb.portrait{width:72px;height:94px}
|
||||
.thumb.landscape, .thumb.postcard{width:82px;height:58px}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div class="spec-meta">
|
||||
<div class="spec-meta-inner">
|
||||
<div>
|
||||
<h1>Briefwechsel — <span>Fill the Empty Rows</span></h1>
|
||||
<p>Five approaches to turning the empty right-hand space into information that helps users scan and decide.</p>
|
||||
</div>
|
||||
<div class="spec-meta-right">
|
||||
<div><strong>Concept</strong>Thumbnail Rows <span style="color:var(--brand-mint);margin-left:6px">v2</span></div>
|
||||
<div><strong>Spec</strong>2 / 5</div>
|
||||
<div><strong>Effort</strong>Medium — needs PDF thumbnail service</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<nav class="spec-nav">
|
||||
<div class="spec-nav-inner">
|
||||
<span class="lbl">Specs</span>
|
||||
<a href="index.html">Overview</a>
|
||||
<a href="01-rich-rows.html">1 · Rich Rows</a>
|
||||
<a class="on" href="02-thumbnail-rows.html">2 · Thumbnail Rows</a>
|
||||
<a href="03-master-detail.html">3 · Master-Detail Split</a>
|
||||
<a href="04-gallery-cards.html">4 · Gallery Cards</a>
|
||||
<a href="05-person-dashboard.html">5 · Person Dashboard</a>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div class="page-wrap">
|
||||
|
||||
<div class="hdr">
|
||||
<div class="hdr-logo">FAMILIENARCHIV</div>
|
||||
<div class="hdr-nav"><a>Documents</a><a>Persons</a><a class="on">Letters</a><a>Admin</a></div>
|
||||
<div class="hdr-right"><div class="hdr-upload">⬆ UPLOAD</div><span>DE · EN · ES</span><div class="hdr-avatar">MR</div></div>
|
||||
</div>
|
||||
|
||||
<div class="page">
|
||||
|
||||
<div class="concept-intro">
|
||||
<h2>Concept 2 · Thumbnail Rows — discovery through visual + summary</h2>
|
||||
/briefwechsel is for fun discovery, not dense scanning. The row gets a bigger first-page thumbnail (portrait for letters, landscape for postcards); the <b>summary</b> reads like a quote next to it; the right column stays calm — just the date. Rows without a summary remain clean and uncrowded.
|
||||
<div><span class="gain">✚ Visual recognition — letters and postcards look like what they are</span><span class="gain">✚ Summary reads as a quote, invites opening the letter</span><span class="gain">✚ Distribution bar gives the bilateral pair its own identity</span><span class="cost">− Depends on the PDF-thumbnail service (open issue)</span></div>
|
||||
</div>
|
||||
|
||||
<!-- ───────── Example 1 · single person ───────── -->
|
||||
<div class="example-h">Beispiel 1 <span class="lbl">alle Briefe von Walter de Gruyter · 851</span></div>
|
||||
<div class="example-sub">Single-sender case: sender is filled, correspondent is open. Direction arrows tell sent vs received.</div>
|
||||
|
||||
<div class="card">
|
||||
<div class="filter-row">
|
||||
<div><div class="fl">Person</div><div class="fi">Walter de Gruyter</div></div>
|
||||
<div><div class="fl">Korrespondent — optional</div><div class="fi empty">Alle Korrespondenten</div></div>
|
||||
</div>
|
||||
<div class="filter-actions">
|
||||
<div class="btn">Newest ↓</div><div class="btn">▾ Filter</div>
|
||||
<div class="count"><b>851</b> Briefe</div>
|
||||
</div>
|
||||
<div class="hintbar">📋 Alle Briefe von <b>Walter de Gruyter</b> — wähle einen Korrespondenten oben um einzugrenzen</div>
|
||||
</div>
|
||||
|
||||
<div class="rlist">
|
||||
<div class="year-divider"><span class="y">1940</span><span class="n">1 Brief</span></div>
|
||||
|
||||
<div class="row in">
|
||||
<div class="row-thumb">
|
||||
<div class="thumb portrait typed paper-1">
|
||||
<div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">Demo leserlicher Brief</div>
|
||||
<div class="row-summary">letzte Lebenstage von W. Dörpfeld in Griechenland — ausführlicher Bericht aus Belgard mit persönlichen Anmerkungen</div>
|
||||
<div class="row-meta"><span class="dir-ch in">← eingehend</span><span>Gertrud von Rofden</span><span class="sep">·</span><span>📍 Belgard</span><span class="sep">·</span><span class="row-tags"><span class="tag">Dörpfeld</span><span class="tag">Griechenland</span></span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-date">31. Mai 1940</div>
|
||||
<div class="row-date-rel">vor 85 Jahren</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="year-divider"><span class="y">1923</span><span class="n">5 Briefe</span></div>
|
||||
|
||||
<div class="row out">
|
||||
<div class="row-thumb">
|
||||
<div class="thumb portrait kurrent paper-2">
|
||||
<div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0397 – 2. September 1923 – B.Lichterfelde</div>
|
||||
<div class="row-summary">von Elsbeth geschriebener Kommentar, den Herbert zum Brief erzählte — Notiz auf der Rückseite mit Korrekturen</div>
|
||||
<div class="row-meta"><span class="dir-ch">→ ausgehend</span><span>an Herbert Cram</span><span class="sep">·</span><span>📍 B.Lichterfelde</span><span class="sep">·</span><span class="row-tags"><span class="tag">Verlag</span><span class="tag">Familie</span></span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-date">2. September 1923</div>
|
||||
<div class="row-date-rel">vor 102 Jahren</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Postcard example -->
|
||||
<div class="row out">
|
||||
<div class="row-thumb">
|
||||
<div class="thumb postcard kurrent paper-4">
|
||||
<div class="thumb-lines"><i></i><i></i><i></i><i></i></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">Ansichtskarte – 2. September 1923 – B.Lichterfelde</div>
|
||||
<div class="row-summary">kurze Grüße aus B.Lichterfelde, Hinweis auf den kommenden Besuch</div>
|
||||
<div class="row-meta"><span class="dir-ch">→ ausgehend</span><span>an Herbert Cram</span><span class="sep">·</span><span>📍 B.Lichterfelde</span><span class="sep">·</span><span class="kind-chip">✉ Postkarte</span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-date">2. September 1923</div>
|
||||
<div class="row-date-rel">vor 102 Jahren</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Multi-page letter -->
|
||||
<div class="row out">
|
||||
<div class="row-thumb">
|
||||
<div class="thumb portrait kurrent paper-3">
|
||||
<div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i></div>
|
||||
</div>
|
||||
<span class="thumb-badge">4 S.</span>
|
||||
</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0524 – 31. Juli 1923 – Berlin</div>
|
||||
<div class="row-summary">Glückwunsch zum 60. Geburtstag, Bericht über den Verlag und den anstehenden Umzug nach B.Lichterfelde im kommenden Herbst</div>
|
||||
<div class="row-meta"><span class="dir-ch">→ ausgehend</span><span>an Walter Dieckmann</span><span class="sep">·</span><span>📍 Berlin</span><span class="sep">·</span><span class="row-tags"><span class="tag">Geburtstag</span><span class="tag">Verlag</span></span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-date">31. Juli 1923</div>
|
||||
<div class="row-date-rel">vor 102 Jahren</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Without summary — still clean -->
|
||||
<div class="row out">
|
||||
<div class="row-thumb">
|
||||
<div class="thumb portrait kurrent paper-1">
|
||||
<div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0396 – 2. September 1923 – B.Lichterfelde</div>
|
||||
<div class="row-meta"><span class="dir-ch">→ ausgehend</span><span>an Herbert Cram</span><span class="sep">·</span><span>📍 B.Lichterfelde</span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-date">2. September 1923</div>
|
||||
<div class="row-date-rel">vor 102 Jahren</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="year-divider"><span class="y">1922</span><span class="n">37 Briefe</span></div>
|
||||
|
||||
<div class="row out">
|
||||
<div class="row-thumb">
|
||||
<div class="thumb portrait kurrent paper-2">
|
||||
<div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0521 – 24. Dezember 1922 – Berlin</div>
|
||||
<div class="row-summary">Weihnachtsbrief, Erinnerungen an das Jahr und Bitte um ein Bild der Kinder zum Christfest</div>
|
||||
<div class="row-meta"><span class="dir-ch">→ ausgehend</span><span>an Walter Dieckmann</span><span class="sep">·</span><span>📍 Berlin</span><span class="sep">·</span><span class="row-tags"><span class="tag">Weihnachten</span><span class="tag">Familie</span></span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-date">24. Dezember 1922</div>
|
||||
<div class="row-date-rel">vor 103 Jahren</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- ───────── Example 2 · bilateral ───────── -->
|
||||
<div class="example-h">Beispiel 2 <span class="lbl">Briefwechsel Walter ↔ Herbert · 143</span></div>
|
||||
<div class="example-sub">Bilateral case: both filters are set. The distribution bar above the list shows how the correspondence is split — instantly visible who wrote more.</div>
|
||||
|
||||
<div class="card">
|
||||
<div class="filter-row">
|
||||
<div><div class="fl">Person</div><div class="fi">Walter de Gruyter</div></div>
|
||||
<div><div class="fl">Korrespondent</div><div class="fi">Herbert Cram</div></div>
|
||||
</div>
|
||||
<div class="filter-actions">
|
||||
<div class="btn">⇄ Tauschen</div>
|
||||
<div class="btn">Newest ↓</div><div class="btn">▾ Filter</div>
|
||||
<div class="count"><b>143</b> Briefe im Zeitraum</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="distbar" role="img" aria-label="Briefverteilung: 87 von Walter de Gruyter, 56 von Herbert Cram">
|
||||
<div class="distbar-labels">
|
||||
<span class="out"><span class="cnt">87</span> von Walter de Gruyter →</span>
|
||||
<span class="in">← <span class="cnt">56</span> von Herbert Cram</span>
|
||||
</div>
|
||||
<div class="distbar-bar">
|
||||
<span class="out" style="width:60.8%"></span>
|
||||
<span class="in" style="width:39.2%"></span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="rlist" style="border-radius:0 0 2px 2px">
|
||||
<div class="year-divider"><span class="y">1923</span><span class="n">12 Briefe</span></div>
|
||||
|
||||
<div class="row out">
|
||||
<div class="row-thumb">
|
||||
<div class="thumb portrait kurrent paper-2">
|
||||
<div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0397 – 2. September 1923 – B.Lichterfelde</div>
|
||||
<div class="row-summary">von Elsbeth geschriebener Kommentar, den Herbert zum Brief erzählte</div>
|
||||
<div class="row-meta"><span class="dir-ch">→</span><span>Walter an Herbert</span><span class="sep">·</span><span>📍 B.Lichterfelde</span><span class="sep">·</span><span class="row-tags"><span class="tag">Verlag</span></span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-date">2. September 1923</div>
|
||||
<div class="row-date-rel">vor 102 Jahren</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row in">
|
||||
<div class="row-thumb">
|
||||
<div class="thumb portrait kurrent paper-3">
|
||||
<div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i><i></i></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">H-0213 – 29. August 1923 – Leipzig</div>
|
||||
<div class="row-summary">Antwort auf Walters Anfrage zur Herbstauslieferung, Bitte um Rückmeldung bis Monatsende</div>
|
||||
<div class="row-meta"><span class="dir-ch in">←</span><span>Herbert an Walter</span><span class="sep">·</span><span>📍 Leipzig</span><span class="sep">·</span><span class="row-tags"><span class="tag">Verlag</span></span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-date">29. August 1923</div>
|
||||
<div class="row-date-rel">vor 102 Jahren</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row in">
|
||||
<div class="row-thumb">
|
||||
<div class="thumb postcard kurrent paper-4">
|
||||
<div class="thumb-lines"><i></i><i></i><i></i><i></i></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">Ansichtskarte – 20. August 1923 – Thüringer Wald</div>
|
||||
<div class="row-summary">Urlaubsgruß, kurze Notiz über Wetter und geplante Rückkehr</div>
|
||||
<div class="row-meta"><span class="dir-ch in">←</span><span>Herbert an Walter</span><span class="sep">·</span><span>📍 Thüringer Wald</span><span class="sep">·</span><span class="kind-chip">✉ Postkarte</span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-date">20. August 1923</div>
|
||||
<div class="row-date-rel">vor 102 Jahren</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row out">
|
||||
<div class="row-thumb">
|
||||
<div class="thumb portrait kurrent paper-1">
|
||||
<div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i></div>
|
||||
</div>
|
||||
<span class="thumb-badge">3 S.</span>
|
||||
</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0392 – 23. November 1921 – Bad Kissingen</div>
|
||||
<div class="row-summary">Kurbericht aus Bad Kissingen, Gesundheitsupdate nach der ersten Woche, Grüße an die Familie Cram</div>
|
||||
<div class="row-meta"><span class="dir-ch">→</span><span>Walter an Herbert</span><span class="sep">·</span><span>📍 Bad Kissingen</span><span class="sep">·</span><span class="row-tags"><span class="tag">Kuraufenthalt</span></span></div>
|
||||
</div>
|
||||
<div class="row-right">
|
||||
<div class="row-date">23. November 1921</div>
|
||||
<div class="row-date-rel">vor 104 Jahren</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Notes footer -->
|
||||
<div style="margin-top:32px;padding:16px 20px;background:#fff;border-left:4px solid var(--brand-navy);font-size:13px;color:#333;line-height:1.7">
|
||||
<b style="color:var(--brand-navy)">Details:</b>
|
||||
<ul style="margin:8px 0 0 20px;padding:0">
|
||||
<li><b>Thumbnail</b> — 82×106 for portrait, 104×72 for landscape/postcards. Postcards also get a stamp + postmark corner. Kurrent handwriting rendered with slight line skew; typewriter rendered with clean parallel lines. Multi-page letters get a "<code>4 S.</code>" badge.</li>
|
||||
<li><b>Summary</b> — shown in serif italic with colored quote marks. Reads like a quote from the letter. If empty, the row simply omits the line — no apologetic placeholder.</li>
|
||||
<li><b>Right column</b> — date only, in serif. We dropped archive box (only meaningful for one family archive) and any lookup metadata. The right column stays calm on purpose.</li>
|
||||
<li><b>Distribution bar</b> — appears only in bilateral mode (both sender and receiver set). Pattern lifted from the existing <code>ConversationTimeline</code> so it's familiar.</li>
|
||||
<li><b>Mobile</b> — thumbnail shrinks (72×94 portrait / 82×58 landscape) and the right column wraps under the body.</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,222 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Spec 3 — Master-Detail · Briefwechsel</title>
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link href="https://fonts.googleapis.com/css2?family=Merriweather:wght@400;700&family=Montserrat:wght@400;500;600;700;800;900&display=swap" rel="stylesheet">
|
||||
<link rel="stylesheet" href="_shared.css">
|
||||
<style>
|
||||
/* Spec 3 specific */
|
||||
.split{display:grid;grid-template-columns:minmax(0,1fr) minmax(0,1.15fr);gap:24px;align-items:start}
|
||||
@media (max-width: 1100px){ .split{grid-template-columns:1fr} .preview-col{position:static !important} }
|
||||
|
||||
/* List (compact) */
|
||||
.rlist{background:#fff;border:1px solid var(--line);border-radius:2px;overflow:hidden}
|
||||
.row{display:grid;grid-template-columns:16px 1fr auto;column-gap:10px;align-items:center;padding:10px 14px;border-bottom:1px solid var(--line-2);border-left:3px solid transparent;cursor:pointer}
|
||||
.row:hover{background:var(--muted)}
|
||||
.row.out{border-left-color:var(--primary)}
|
||||
.row.in{border-left-color:var(--accent)}
|
||||
.row.sel{background:#e7f4f3;border-left-color:var(--brand-mint);box-shadow:inset 2px 0 0 var(--accent)}
|
||||
.row-arrow{font-size:13px;opacity:.55}
|
||||
.row-body{min-width:0}
|
||||
.row-title{font-family:'Merriweather',serif;font-size:13.5px;font-weight:700;color:var(--ink);white-space:nowrap;overflow:hidden;text-overflow:ellipsis;margin-bottom:2px}
|
||||
.row-sub{font-size:11px;color:var(--ink-3);display:flex;gap:6px;align-items:center}
|
||||
.row-sub .sep{color:#ccc}
|
||||
.row-right{font-size:11px;color:var(--ink-3);display:flex;align-items:center;gap:6px}
|
||||
|
||||
/* Preview panel */
|
||||
.preview-col{position:sticky;top:20px}
|
||||
.preview{background:#fff;border:1px solid var(--line);border-radius:2px;overflow:hidden;box-shadow:0 2px 8px rgba(0,0,0,.04)}
|
||||
.prev-hdr{padding:16px 20px;background:#fafaf5;border-bottom:1px solid var(--line);display:flex;justify-content:space-between;align-items:flex-start;gap:12px}
|
||||
.prev-hdr-left .prev-kind{font-size:10px;font-weight:800;text-transform:uppercase;letter-spacing:1px;color:var(--accent);margin-bottom:4px}
|
||||
.prev-hdr-left h2{font-family:'Merriweather',serif;font-size:18px;color:var(--brand-navy);line-height:1.35}
|
||||
.prev-hdr .close{font-size:18px;color:#999;cursor:pointer}
|
||||
.prev-body{display:grid;grid-template-columns:200px 1fr;gap:20px;padding:20px}
|
||||
.prev-thumb{width:200px;height:260px;flex-shrink:0}
|
||||
.prev-thumb .thumb{width:100%;height:100%}
|
||||
.prev-meta{display:flex;flex-direction:column;gap:14px}
|
||||
.mkv{display:grid;grid-template-columns:92px 1fr;row-gap:6px;column-gap:10px;font-size:12px;color:#444}
|
||||
.mkv .k{font-size:10px;font-weight:800;text-transform:uppercase;letter-spacing:.6px;color:#888;padding-top:2px}
|
||||
.mkv .v{color:var(--ink)}
|
||||
.mkv .v b{color:var(--brand-navy)}
|
||||
.prev-summary{background:#fbfaf5;border-left:3px solid var(--brand-mint);padding:10px 14px;font-family:'Merriweather',serif;font-size:13px;color:#333;line-height:1.65;font-style:italic}
|
||||
.prev-tags{display:flex;flex-wrap:wrap;gap:5px}
|
||||
.prev-excerpt{padding:0 20px 18px;font-family:'Merriweather',serif;font-size:13px;color:#444;line-height:1.75;border-top:1px dashed var(--line);padding-top:18px}
|
||||
.prev-excerpt .lbl{font-family:'Montserrat',sans-serif;font-size:10px;font-weight:800;text-transform:uppercase;letter-spacing:1px;color:#888;margin-bottom:8px;display:block;font-style:normal}
|
||||
.prev-actions{border-top:1px solid var(--line);padding:12px 20px;background:#fafaf5;display:flex;gap:8px;justify-content:flex-end}
|
||||
.prev-actions .btn.primary{background:var(--brand-navy);color:#fff;border-color:var(--brand-navy)}
|
||||
.prev-hint{padding:14px 20px;background:#f7f5f2;color:#777;font-size:11.5px;border-top:1px dashed var(--line);font-style:italic}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div class="spec-meta">
|
||||
<div class="spec-meta-inner">
|
||||
<div>
|
||||
<h1>Briefwechsel — <span>Fill the Empty Rows</span></h1>
|
||||
<p>Five approaches to turning the empty right-hand space into information that helps users scan and decide.</p>
|
||||
</div>
|
||||
<div class="spec-meta-right">
|
||||
<div><strong>Concept</strong>Master-Detail Split</div>
|
||||
<div><strong>Spec</strong>3 / 5</div>
|
||||
<div><strong>Effort</strong>Medium — requires selection state</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<nav class="spec-nav">
|
||||
<div class="spec-nav-inner">
|
||||
<span class="lbl">Specs</span>
|
||||
<a href="index.html">Overview</a>
|
||||
<a href="01-rich-rows.html">1 · Rich Rows</a>
|
||||
<a href="02-thumbnail-rows.html">2 · Thumbnail Rows</a>
|
||||
<a class="on" href="03-master-detail.html">3 · Master-Detail Split</a>
|
||||
<a href="04-gallery-cards.html">4 · Gallery Cards</a>
|
||||
<a href="05-person-dashboard.html">5 · Person Dashboard</a>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div class="page-wrap">
|
||||
|
||||
<div class="hdr">
|
||||
<div class="hdr-logo">FAMILIENARCHIV</div>
|
||||
<div class="hdr-nav"><a>Documents</a><a>Persons</a><a class="on">Letters</a><a>Admin</a></div>
|
||||
<div class="hdr-right"><div class="hdr-upload">⬆ UPLOAD</div><span>DE · EN · ES</span><div class="hdr-avatar">MR</div></div>
|
||||
</div>
|
||||
|
||||
<div class="page">
|
||||
|
||||
<div class="concept-intro">
|
||||
<h2>Concept 3 · Master-Detail Split — list on left, preview panel on right</h2>
|
||||
Keep the list compact (it stays scannable). Add a persistent right-hand panel that fills the empty space with a preview of the selected letter — thumbnail, metadata, summary, and a transcription excerpt when available. Click a row to swap its content; "Open" button still navigates to the full document page.
|
||||
<div><span class="gain">✚ Users browse 10× more letters per session (no back-nav)</span><span class="gain">✚ Right column is always doing work</span><span class="cost">− On narrow viewports collapses to stacked</span><span class="cost">− Mobile pattern differs — needs sheet/drawer</span></div>
|
||||
</div>
|
||||
|
||||
<!-- Filter card -->
|
||||
<div class="card">
|
||||
<div class="filter-row">
|
||||
<div><div class="fl">Person</div><div class="fi">Walter de Gruyter</div></div>
|
||||
<div><div class="fl">Korrespondent — optional</div><div class="fi empty">Alle Korrespondenten</div></div>
|
||||
</div>
|
||||
<div class="filter-actions">
|
||||
<div class="btn">Newest ↓</div><div class="btn">▾ Filter</div>
|
||||
<div class="count"><b>851</b> Briefe</div>
|
||||
</div>
|
||||
<div class="hintbar">📋 Alle Briefe von <b>Walter de Gruyter</b> — wähle einen Korrespondenten oben um einzugrenzen</div>
|
||||
</div>
|
||||
|
||||
<div class="split">
|
||||
<!-- Master list -->
|
||||
<div class="rlist">
|
||||
<div class="year-divider"><span class="y">1940</span><span class="n">1 Brief</span></div>
|
||||
<div class="row in">
|
||||
<div class="row-arrow">←</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">Demo leserlicher Brief</div>
|
||||
<div class="row-sub"><span>31. Mai 1940</span><span class="sep">·</span><span>Belgard</span><span class="sep">·</span><span>Gertrud von Rofden</span></div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="year-divider"><span class="y">1923</span><span class="n">5 Briefe</span></div>
|
||||
|
||||
<div class="row out sel">
|
||||
<div class="row-arrow">→</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0397 – 2. September 1923 – B.Lichterfelde</div>
|
||||
<div class="row-sub"><span>2. Sep 1923</span><span class="sep">·</span><span>B.Lichterfelde</span><span class="sep">·</span><span>Herbert Cram</span></div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="row out">
|
||||
<div class="row-arrow">→</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0396 – 2. September 1923 – B.Lichterfelde</div>
|
||||
<div class="row-sub"><span>2. Sep 1923</span><span class="sep">·</span><span>B.Lichterfelde</span><span class="sep">·</span><span>Herbert Cram</span></div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="row out">
|
||||
<div class="row-arrow">→</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0524 – 31. Juli 1923 – Berlin</div>
|
||||
<div class="row-sub"><span>31. Juli 1923</span><span class="sep">·</span><span>Berlin</span><span class="sep">·</span><span>Walter Dieckmann</span></div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="row out">
|
||||
<div class="row-arrow">→</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0523 – 12. Mai 1923 – Berlin</div>
|
||||
<div class="row-sub"><span>12. Mai 1923</span><span class="sep">·</span><span>Berlin</span><span class="sep">·</span><span>Walter Dieckmann</span></div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="row out">
|
||||
<div class="row-arrow">→</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0522 – 7. März 1923 – Lindau Bodensee</div>
|
||||
<div class="row-sub"><span>7. März 1923</span><span class="sep">·</span><span>Lindau Bodensee</span><span class="sep">·</span><span>Walter Dieckmann</span></div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="year-divider"><span class="y">1922</span><span class="n">37 Briefe</span></div>
|
||||
|
||||
<div class="row out">
|
||||
<div class="row-arrow">→</div>
|
||||
<div class="row-body">
|
||||
<div class="row-title">W-0521 – 24. Dezember 1922 – Berlin</div>
|
||||
<div class="row-sub"><span>24. Dez 1922</span><span class="sep">·</span><span>Berlin</span><span class="sep">·</span><span>Walter Dieckmann</span></div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Detail preview -->
|
||||
<div class="preview-col">
|
||||
<div class="preview">
|
||||
<div class="prev-hdr">
|
||||
<div class="prev-hdr-left">
|
||||
<div class="prev-kind">→ Ausgehend · an Herbert Cram</div>
|
||||
<h2>W-0397 – 2. September 1923 – B.Lichterfelde</h2>
|
||||
</div>
|
||||
<div class="close">✕</div>
|
||||
</div>
|
||||
<div class="prev-body">
|
||||
<div class="prev-thumb">
|
||||
<div class="thumb handwritten"><div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i></div></div>
|
||||
</div>
|
||||
<div class="prev-meta">
|
||||
<div class="mkv">
|
||||
<div class="k">Datum</div><div class="v"><b>2. September 1923</b></div>
|
||||
<div class="k">Ort</div><div class="v">B.Lichterfelde</div>
|
||||
<div class="k">Absender</div><div class="v"><b>Walter de Gruyter</b></div>
|
||||
<div class="k">Empfänger</div><div class="v">Herbert Cram</div>
|
||||
<div class="k">Archiv</div><div class="v">Kasten VI · Mappe 7</div>
|
||||
</div>
|
||||
<div class="prev-summary">„von Elsbeth geschriebener Kommentar, den Herbert zum Brief erzählte — Notiz auf der Rückseite mit Korrekturen zur Publikationsliste."</div>
|
||||
<div class="prev-tags"><span class="tag">Verlag</span><span class="tag">Familie</span><span class="tag muted">Korrespondenz</span></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="prev-excerpt">
|
||||
<span class="lbl">Transkription (Auszug)</span>
|
||||
„Lieber Herbert, heute erreichte mich Dein Brief vom 29. August, und ich danke Dir herzlich für die ausführliche Schilderung. Die Angelegenheit mit dem Verlag soll am Montag abschließend besprochen werden — ich werde Dir sogleich Bescheid geben. In Belgard waren wir alle wohl, und die Kinder grüßen …"
|
||||
</div>
|
||||
<div class="prev-actions">
|
||||
<div class="btn">◎ In Briefwechsel öffnen</div>
|
||||
<div class="btn primary">↗ Vollständiges Dokument</div>
|
||||
</div>
|
||||
</div>
|
||||
<div style="margin-top:8px;font-size:11px;color:#888;padding:0 4px">Tipp: Pfeiltasten ↑↓ zum Durchblättern, Enter zum Öffnen.</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,227 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Spec 4 — Gallery Cards · Briefwechsel</title>
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link href="https://fonts.googleapis.com/css2?family=Merriweather:wght@400;700&family=Montserrat:wght@400;500;600;700;800;900&display=swap" rel="stylesheet">
|
||||
<link rel="stylesheet" href="_shared.css">
|
||||
<style>
|
||||
/* Spec 4 specific */
|
||||
.view-toggle{display:flex;gap:4px;background:#F0EDE5;padding:3px;border-radius:3px;margin-left:8px}
|
||||
.view-toggle span{padding:5px 10px;font-size:11px;font-weight:700;color:#888;cursor:pointer;border-radius:2px}
|
||||
.view-toggle span.on{background:#fff;color:var(--brand-navy);box-shadow:0 1px 2px rgba(0,0,0,.08)}
|
||||
|
||||
.year-band{font-family:'Merriweather',serif;font-size:28px;font-weight:900;color:var(--brand-navy);letter-spacing:-.5px;margin:24px 0 10px 2px;display:flex;align-items:baseline;gap:12px}
|
||||
.year-band .n{font-family:'Montserrat',sans-serif;font-size:12px;font-weight:700;color:#888;letter-spacing:0}
|
||||
|
||||
.grid{display:grid;grid-template-columns:repeat(4, 1fr);gap:18px}
|
||||
@media (max-width: 1280px){ .grid{grid-template-columns:repeat(3, 1fr)} }
|
||||
@media (max-width: 900px){ .grid{grid-template-columns:repeat(2, 1fr)} }
|
||||
|
||||
.card-doc{background:#fff;border:1px solid var(--line);border-radius:2px;overflow:hidden;cursor:pointer;transition:transform .12s,box-shadow .12s;display:flex;flex-direction:column}
|
||||
.card-doc:hover{transform:translateY(-2px);box-shadow:0 6px 20px rgba(0,0,0,.08)}
|
||||
.card-doc .thumb-wrap{position:relative;aspect-ratio:3/4;overflow:hidden;background:#f5f2ea;border-bottom:1px solid var(--line)}
|
||||
.card-doc .thumb{width:100%;height:100%}
|
||||
.card-doc .dir-badge{position:absolute;top:8px;left:8px;background:rgba(0,40,80,.9);color:#fff;font-size:10px;font-weight:800;padding:3px 8px;border-radius:10px;display:flex;align-items:center;gap:3px;letter-spacing:.3px}
|
||||
.card-doc .dir-badge.in{background:rgba(47,158,149,.9)}
|
||||
.card-doc .body{padding:12px 14px 14px;display:flex;flex-direction:column;gap:4px;flex:1}
|
||||
.card-doc .body h3{font-family:'Merriweather',serif;font-size:13px;font-weight:700;color:var(--brand-navy);line-height:1.4;display:-webkit-box;-webkit-line-clamp:2;-webkit-box-orient:vertical;overflow:hidden}
|
||||
.card-doc .body .date{font-size:11px;color:var(--ink-3);font-weight:600}
|
||||
.card-doc .body .party{font-size:11px;color:var(--ink-3);font-style:italic;margin-top:2px;display:-webkit-box;-webkit-line-clamp:1;-webkit-box-orient:vertical;overflow:hidden}
|
||||
.card-doc .body .summary{font-size:11.5px;color:#666;font-family:'Merriweather',serif;font-style:italic;line-height:1.45;margin-top:6px;display:-webkit-box;-webkit-line-clamp:3;-webkit-box-orient:vertical;overflow:hidden}
|
||||
.card-doc .body .tags{margin-top:auto;padding-top:10px;display:flex;gap:4px;flex-wrap:wrap}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div class="spec-meta">
|
||||
<div class="spec-meta-inner">
|
||||
<div>
|
||||
<h1>Briefwechsel — <span>Fill the Empty Rows</span></h1>
|
||||
<p>Five approaches to turning the empty right-hand space into information that helps users scan and decide.</p>
|
||||
</div>
|
||||
<div class="spec-meta-right">
|
||||
<div><strong>Concept</strong>Gallery Cards</div>
|
||||
<div><strong>Spec</strong>4 / 5</div>
|
||||
<div><strong>Effort</strong>Large — structural change + thumbnail service</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<nav class="spec-nav">
|
||||
<div class="spec-nav-inner">
|
||||
<span class="lbl">Specs</span>
|
||||
<a href="index.html">Overview</a>
|
||||
<a href="01-rich-rows.html">1 · Rich Rows</a>
|
||||
<a href="02-thumbnail-rows.html">2 · Thumbnail Rows</a>
|
||||
<a href="03-master-detail.html">3 · Master-Detail Split</a>
|
||||
<a class="on" href="04-gallery-cards.html">4 · Gallery Cards</a>
|
||||
<a href="05-person-dashboard.html">5 · Person Dashboard</a>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div class="page-wrap">
|
||||
|
||||
<div class="hdr">
|
||||
<div class="hdr-logo">FAMILIENARCHIV</div>
|
||||
<div class="hdr-nav"><a>Documents</a><a>Persons</a><a class="on">Letters</a><a>Admin</a></div>
|
||||
<div class="hdr-right"><div class="hdr-upload">⬆ UPLOAD</div><span>DE · EN · ES</span><div class="hdr-avatar">MR</div></div>
|
||||
</div>
|
||||
|
||||
<div class="page">
|
||||
|
||||
<div class="concept-intro">
|
||||
<h2>Concept 4 · Gallery Cards — grid of letter cards, album style</h2>
|
||||
Replace the list with a 4-column (collapses to 3 → 2) grid. Each card is a miniature letter card with thumbnail, title, date, correspondent, and a 3-line summary. Year bands replace dividers. The page becomes a family-letter-album.
|
||||
<div><span class="gain">✚ Beautiful browsing for archive overview</span><span class="gain">✚ Most "filled" of all concepts — no empty space</span><span class="cost">− Chronological scanning is worse than lists</span><span class="cost">− 851 letters → ~213 rows in grid, long page or needs paging</span></div>
|
||||
</div>
|
||||
|
||||
<!-- Filter card -->
|
||||
<div class="card">
|
||||
<div class="filter-row">
|
||||
<div><div class="fl">Person</div><div class="fi">Walter de Gruyter</div></div>
|
||||
<div><div class="fl">Korrespondent — optional</div><div class="fi empty">Alle Korrespondenten</div></div>
|
||||
</div>
|
||||
<div class="filter-actions">
|
||||
<div class="btn">Newest ↓</div><div class="btn">▾ Filter</div>
|
||||
<div class="view-toggle"><span>☰ Liste</span><span class="on">▦ Galerie</span></div>
|
||||
<div class="count"><b>851</b> Briefe</div>
|
||||
</div>
|
||||
<div class="hintbar">📋 Alle Briefe von <b>Walter de Gruyter</b> — wähle einen Korrespondenten oben um einzugrenzen</div>
|
||||
</div>
|
||||
|
||||
<div class="year-band">1940 <span class="n">1 Brief</span></div>
|
||||
<div class="grid">
|
||||
<div class="card-doc">
|
||||
<div class="thumb-wrap">
|
||||
<div class="thumb"><div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i></div></div>
|
||||
<span class="dir-badge in">← eingehend</span>
|
||||
</div>
|
||||
<div class="body">
|
||||
<h3>Demo leserlicher Brief</h3>
|
||||
<div class="date">31. Mai 1940 · Belgard</div>
|
||||
<div class="party">von <b>Gertrud von Rofden</b></div>
|
||||
<div class="summary">„letzte Lebenstage von W. Dörpfeld in Griechenland — ausführlicher Bericht"</div>
|
||||
<div class="tags"><span class="tag">Dörpfeld</span><span class="tag">Griechenland</span></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="year-band">1923 <span class="n">5 Briefe</span></div>
|
||||
<div class="grid">
|
||||
<div class="card-doc">
|
||||
<div class="thumb-wrap">
|
||||
<div class="thumb handwritten"><div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i></div></div>
|
||||
<span class="dir-badge">→ ausgehend</span>
|
||||
</div>
|
||||
<div class="body">
|
||||
<h3>W-0397 – 2. September 1923 – B.Lichterfelde</h3>
|
||||
<div class="date">2. September 1923 · B.Lichterfelde</div>
|
||||
<div class="party">an <b>Herbert Cram</b></div>
|
||||
<div class="summary">„von Elsbeth geschriebener Kommentar, den Herbert zum Brief erzählte"</div>
|
||||
<div class="tags"><span class="tag">Verlag</span><span class="tag">Familie</span></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card-doc">
|
||||
<div class="thumb-wrap">
|
||||
<div class="thumb handwritten"><div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i></div></div>
|
||||
<span class="dir-badge">→ ausgehend</span>
|
||||
</div>
|
||||
<div class="body">
|
||||
<h3>W-0396 – 2. September 1923 – B.Lichterfelde</h3>
|
||||
<div class="date">2. September 1923 · B.Lichterfelde</div>
|
||||
<div class="party">an <b>Herbert Cram</b></div>
|
||||
<div class="tags"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card-doc">
|
||||
<div class="thumb-wrap">
|
||||
<div class="thumb handwritten"><div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i></div></div>
|
||||
<span class="dir-badge">→ ausgehend</span>
|
||||
</div>
|
||||
<div class="body">
|
||||
<h3>W-0524 – 31. Juli 1923 – Berlin</h3>
|
||||
<div class="date">31. Juli 1923 · Berlin</div>
|
||||
<div class="party">an <b>Walter Dieckmann</b></div>
|
||||
<div class="summary">„Glückwunsch zum 60. Geburtstag, Bericht über den Verlag"</div>
|
||||
<div class="tags"><span class="tag">Geburtstag</span><span class="tag">Verlag</span></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card-doc">
|
||||
<div class="thumb-wrap">
|
||||
<div class="thumb handwritten"><div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i><i></i></div></div>
|
||||
<span class="dir-badge">→ ausgehend</span>
|
||||
</div>
|
||||
<div class="body">
|
||||
<h3>W-0523 – 12. Mai 1923 – Berlin</h3>
|
||||
<div class="date">12. Mai 1923 · Berlin</div>
|
||||
<div class="party">an <b>Walter Dieckmann</b></div>
|
||||
<div class="summary">„Routinebericht des Verlagsgeschäfts, Rückfragen zu Auftragslage"</div>
|
||||
<div class="tags"><span class="tag">Verlag</span></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="year-band">1922 <span class="n">37 Briefe</span></div>
|
||||
<div class="grid">
|
||||
<div class="card-doc">
|
||||
<div class="thumb-wrap">
|
||||
<div class="thumb handwritten"><div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i><i></i></div></div>
|
||||
<span class="dir-badge">→ ausgehend</span>
|
||||
</div>
|
||||
<div class="body">
|
||||
<h3>W-0521 – 24. Dezember 1922 – Berlin</h3>
|
||||
<div class="date">24. Dezember 1922 · Berlin</div>
|
||||
<div class="party">an <b>Walter Dieckmann</b></div>
|
||||
<div class="summary">„Weihnachtsbrief, Erinnerungen an das Jahr und Bitte um ein Bild der Kinder"</div>
|
||||
<div class="tags"><span class="tag">Weihnachten</span><span class="tag">Familie</span></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card-doc">
|
||||
<div class="thumb-wrap">
|
||||
<div class="thumb handwritten"><div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i></div></div>
|
||||
<span class="dir-badge">→ ausgehend</span>
|
||||
</div>
|
||||
<div class="body">
|
||||
<h3>W-0392 – 23. Nov 1921 – Bad Kissingen</h3>
|
||||
<div class="date">23. November 1921 · Bad Kissingen</div>
|
||||
<div class="party">an <b>Herbert Cram</b></div>
|
||||
<div class="summary">„Kurbericht, Gesundheitsupdate, Grüße an die Familie"</div>
|
||||
<div class="tags"><span class="tag">Kuraufenthalt</span></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card-doc">
|
||||
<div class="thumb-wrap">
|
||||
<div class="thumb"><div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i><i></i><i></i></div></div>
|
||||
<span class="dir-badge">→ ausgehend</span>
|
||||
</div>
|
||||
<div class="body">
|
||||
<h3>W-0501 – 13. Dez 1920 – Berlin</h3>
|
||||
<div class="date">13. Dezember 1920 · Berlin</div>
|
||||
<div class="party">an <b>Walter Dieckmann</b></div>
|
||||
<div class="tags"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card-doc">
|
||||
<div class="thumb-wrap">
|
||||
<div class="thumb handwritten"><div class="thumb-lines"><i></i><i></i><i></i><i></i><i></i><i></i><i></i></div></div>
|
||||
<span class="dir-badge">→ ausgehend</span>
|
||||
</div>
|
||||
<div class="body">
|
||||
<h3>W-0484 – 27. Dez 1919 – Berlin</h3>
|
||||
<div class="date">27. Dezember 1919 · Berlin</div>
|
||||
<div class="party">an <b>Walter Dieckmann</b></div>
|
||||
<div class="summary">„Jahreswechselgrüße, kurzer Bericht über den Zustand des Verlags"</div>
|
||||
<div class="tags"><span class="tag">Neujahr</span></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,303 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Spec 5 — Person Dashboard · Briefwechsel Insights</title>
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link href="https://fonts.googleapis.com/css2?family=Merriweather:wght@400;700&family=Montserrat:wght@400;500;600;700;800;900&display=swap" rel="stylesheet">
|
||||
<link rel="stylesheet" href="_shared.css">
|
||||
<style>
|
||||
/* Spec 5 specific */
|
||||
.person-grid{display:grid;grid-template-columns:35% 1fr;gap:32px;align-items:start}
|
||||
@media (max-width: 1100px){ .person-grid{grid-template-columns:1fr} }
|
||||
|
||||
/* Person card (left) */
|
||||
.pcard{background:#fff;border:1px solid var(--line);border-radius:2px;padding:24px;display:flex;flex-direction:column;align-items:center;gap:14px}
|
||||
.pavatar{width:100px;height:100px;border-radius:50%;background:var(--brand-mint);color:var(--brand-navy);display:flex;align-items:center;justify-content:center;font-size:36px;font-weight:900;font-family:'Merriweather',serif}
|
||||
.pname{font-family:'Merriweather',serif;font-size:22px;font-weight:700;color:var(--brand-navy);text-align:center;line-height:1.3}
|
||||
.pdates{font-size:12px;color:#888;font-weight:600;letter-spacing:.5px}
|
||||
.pnotes{font-size:13px;color:#555;line-height:1.6;margin-top:6px;padding-top:14px;border-top:1px dashed var(--line);width:100%;font-family:'Merriweather',serif;font-style:italic}
|
||||
.pactions{margin-top:14px;display:flex;gap:8px;width:100%}
|
||||
.pactions .btn{flex:1;justify-content:center;font-size:10px}
|
||||
.pactions .btn.primary{background:var(--brand-navy);color:#fff;border-color:var(--brand-navy)}
|
||||
|
||||
/* Name history card */
|
||||
.ncard{background:#fff;border:1px solid var(--line);border-radius:2px;padding:18px 20px;margin-top:20px}
|
||||
.ncard h3{font-size:10px;font-weight:800;text-transform:uppercase;letter-spacing:1.2px;color:#888;margin-bottom:10px}
|
||||
.ncard ul{list-style:none}
|
||||
.ncard li{padding:5px 0;font-size:12.5px;color:#444;border-top:1px dashed var(--line);display:flex;justify-content:space-between}
|
||||
.ncard li:first-child{border-top:0}
|
||||
|
||||
/* Right column */
|
||||
.dash{background:#fff;border:1px solid var(--line);border-radius:2px;overflow:hidden}
|
||||
.dash-hdr{background:var(--brand-navy);color:#fff;padding:14px 20px;display:flex;justify-content:space-between;align-items:center}
|
||||
.dash-hdr h2{font-family:'Merriweather',serif;font-size:16px;font-weight:700}
|
||||
.dash-hdr .open-conv{background:var(--brand-mint);color:var(--brand-navy);font-size:11px;font-weight:800;padding:6px 14px;border-radius:2px;text-transform:uppercase;letter-spacing:.6px;text-decoration:none}
|
||||
|
||||
/* Stat strip */
|
||||
.stats{display:grid;grid-template-columns:repeat(4,1fr);gap:1px;background:var(--line-2);border-bottom:1px solid var(--line)}
|
||||
.stats div{background:#fafaf5;padding:14px 18px;text-align:center}
|
||||
.stats .v{font-family:'Merriweather',serif;font-size:22px;font-weight:900;color:var(--brand-navy);letter-spacing:-.5px}
|
||||
.stats .k{font-size:10px;color:#888;font-weight:700;text-transform:uppercase;letter-spacing:.6px;margin-top:2px}
|
||||
.stats .out{color:var(--primary)}
|
||||
.stats .in{color:var(--accent)}
|
||||
|
||||
/* Sections */
|
||||
.dsec{padding:18px 22px;border-top:1px solid var(--line-2)}
|
||||
.dsec:first-of-type{border-top:0}
|
||||
.dsec h3{font-size:10px;font-weight:800;text-transform:uppercase;letter-spacing:1.2px;color:#888;margin-bottom:12px;display:flex;justify-content:space-between;align-items:baseline}
|
||||
.dsec h3 .note{font-size:11px;color:#555;text-transform:none;letter-spacing:0;font-weight:600}
|
||||
|
||||
/* Activity histogram */
|
||||
.hist{display:flex;align-items:flex-end;gap:2px;height:90px;padding:4px 0 0}
|
||||
.hist .bar{flex:1;background:var(--brand-mint);opacity:.55;border-radius:1px 1px 0 0;position:relative;cursor:pointer;transition:opacity .12s}
|
||||
.hist .bar:hover{opacity:1}
|
||||
.hist .bar.peak{background:var(--brand-navy);opacity:.85}
|
||||
.hist-labels{display:flex;justify-content:space-between;font-size:10px;color:#888;margin-top:6px;font-weight:700}
|
||||
|
||||
/* Split bar direction */
|
||||
.dsplit{display:flex;justify-content:space-between;font-size:12px;font-weight:700;margin-bottom:8px}
|
||||
.dsplit .out{color:var(--primary)}
|
||||
.dsplit .in{color:var(--accent)}
|
||||
.dbar{height:10px;display:flex;border-radius:5px;overflow:hidden;background:#F0EDE5}
|
||||
.dbar .out{background:var(--brand-navy)}
|
||||
.dbar .in{background:var(--accent)}
|
||||
|
||||
/* Top list */
|
||||
.toplist{display:flex;flex-direction:column;gap:8px}
|
||||
.toplist .ti{display:flex;align-items:center;gap:10px;font-size:13px;padding:4px 6px;border-radius:2px;cursor:pointer}
|
||||
.toplist .ti:hover{background:var(--muted)}
|
||||
.toplist .ti .name{flex:1;color:var(--ink);font-weight:600;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}
|
||||
.toplist .ti .bar-wrap{width:120px;height:7px;background:#F0EDE5;border-radius:4px;overflow:hidden;flex-shrink:0}
|
||||
.toplist .ti .bar{height:100%;background:var(--brand-navy);border-radius:4px}
|
||||
.toplist .ti .val{width:38px;text-align:right;font-size:12px;color:#888;font-weight:700;font-variant-numeric:tabular-nums}
|
||||
.toplist .ti .dir{font-size:13px;width:16px;font-weight:800}
|
||||
.toplist .ti .dir.out{color:var(--primary)}
|
||||
.toplist .ti .dir.in{color:var(--accent)}
|
||||
.toplist .ti .dir.both{color:#888}
|
||||
|
||||
/* Cloud */
|
||||
.cloud{display:flex;flex-wrap:wrap;gap:6px}
|
||||
.cloud .tag{cursor:pointer;padding:3px 10px;border-radius:12px;font-weight:700;transition:transform .1s}
|
||||
.cloud .tag:hover{transform:translateY(-1px)}
|
||||
.cloud .tag.s-xl{font-size:15px;padding:4px 12px}
|
||||
.cloud .tag.s-l{font-size:13px}
|
||||
.cloud .tag.s-m{font-size:12px}
|
||||
.cloud .tag.s-s{font-size:11px}
|
||||
|
||||
/* Two-col arrangement */
|
||||
.twocol{display:grid;grid-template-columns:1fr 1fr;gap:32px}
|
||||
@media (max-width: 900px){ .twocol{grid-template-columns:1fr} }
|
||||
|
||||
/* Existing doc lists below (compressed) */
|
||||
.doclist-card{background:#fff;border:1px solid var(--line);border-radius:2px;margin-top:20px;overflow:hidden}
|
||||
.doclist-card .dh{padding:14px 20px;border-bottom:1px solid var(--line-2);font-size:13px;font-weight:700;color:var(--brand-navy);display:flex;justify-content:space-between;align-items:center}
|
||||
.doclist-card .dh .cnt{font-size:11px;color:#888;font-weight:600}
|
||||
.doclist-card .di{padding:10px 20px;border-bottom:1px solid var(--line-2);display:flex;justify-content:space-between;align-items:center;font-size:12.5px}
|
||||
.doclist-card .di:last-child{border-bottom:0}
|
||||
.doclist-card .di .t{font-family:'Merriweather',serif;font-weight:700;color:var(--ink);min-width:0;white-space:nowrap;overflow:hidden;text-overflow:ellipsis;flex:1;margin-right:10px}
|
||||
.doclist-card .di .m{color:#888;font-size:11px;white-space:nowrap}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div class="spec-meta">
|
||||
<div class="spec-meta-inner">
|
||||
<div>
|
||||
<h1>Briefwechsel — <span>Fill the Empty Rows</span></h1>
|
||||
<p>Insights belong on the person detail page, not on the letter list. This spec mocks the dashboard that replaces the "empty feeling" problem at its proper home.</p>
|
||||
</div>
|
||||
<div class="spec-meta-right">
|
||||
<div><strong>Concept</strong>Person Dashboard</div>
|
||||
<div><strong>Spec</strong>5 / 5</div>
|
||||
<div><strong>Page</strong>/persons/[id] (not /briefwechsel)</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<nav class="spec-nav">
|
||||
<div class="spec-nav-inner">
|
||||
<span class="lbl">Specs</span>
|
||||
<a href="index.html">Overview</a>
|
||||
<a href="01-rich-rows.html">1 · Rich Rows</a>
|
||||
<a href="02-thumbnail-rows.html">2 · Thumbnail Rows</a>
|
||||
<a href="03-master-detail.html">3 · Master-Detail Split</a>
|
||||
<a href="04-gallery-cards.html">4 · Gallery Cards</a>
|
||||
<a class="on" href="05-person-dashboard.html">5 · Person Dashboard</a>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div class="page-wrap">
|
||||
|
||||
<div class="hdr">
|
||||
<div class="hdr-logo">FAMILIENARCHIV</div>
|
||||
<div class="hdr-nav"><a>Documents</a><a class="on">Persons</a><a>Letters</a><a>Admin</a></div>
|
||||
<div class="hdr-right"><div class="hdr-upload">⬆ UPLOAD</div><span>DE · EN · ES</span><div class="hdr-avatar">MR</div></div>
|
||||
</div>
|
||||
|
||||
<div class="page">
|
||||
|
||||
<div class="concept-intro">
|
||||
<h2>Concept 5 · Person Dashboard — insights live on /persons/[id], not on /briefwechsel</h2>
|
||||
The /briefwechsel page stays focused on reading letters. The archive-level understanding (how much, with whom, when, about what) is moved to the person detail page, where it's useful in all contexts — not only when a correspondent is selected. A "Briefwechsel öffnen" button ties the two pages together.
|
||||
<div><span class="gain">✚ /briefwechsel stays calm and focused</span><span class="gain">✚ Dashboard is useful on every visit to a person, not just during letter review</span><span class="gain">✚ Each dashboard element links back into /briefwechsel with filters</span><span class="cost">− Separates the work over two pages — needs a clear handoff button</span></div>
|
||||
</div>
|
||||
|
||||
<div style="font-size:12px;color:#888;margin-bottom:18px"><a href="#" style="color:#888;text-decoration:none">← Zurück</a></div>
|
||||
|
||||
<div class="person-grid">
|
||||
<!-- Left column: person identity -->
|
||||
<div>
|
||||
<div class="pcard">
|
||||
<div class="pavatar">WG</div>
|
||||
<div class="pname">Walter de Gruyter</div>
|
||||
<div class="pdates">1862 – 1923</div>
|
||||
<div class="pnotes">Verleger und Namensgeber des Verlags Walter de Gruyter. Langjährige Korrespondenz mit Familie Dieckmann und Herbert Cram rund um Verlag, Familie und Kuraufenthalte.</div>
|
||||
<div class="pactions">
|
||||
<a class="btn">◎ Bearbeiten</a>
|
||||
<a class="btn primary">↗ Briefwechsel öffnen</a>
|
||||
</div>
|
||||
</div>
|
||||
<div class="ncard">
|
||||
<h3>Namensvarianten</h3>
|
||||
<ul>
|
||||
<li><span>Walter de Gruyter</span><span style="color:#888">Hauptname</span></li>
|
||||
<li><span>W. de Gruyter</span><span style="color:#888">Abkürzung</span></li>
|
||||
<li><span>Dr. Walter de Gruyter</span><span style="color:#888">mit Titel</span></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Right column: dashboard -->
|
||||
<div>
|
||||
<div class="dash">
|
||||
<div class="dash-hdr">
|
||||
<h2>Korrespondenz-Überblick</h2>
|
||||
<a class="open-conv" href="#">↗ Briefwechsel öffnen</a>
|
||||
</div>
|
||||
|
||||
<div class="stats">
|
||||
<div><div class="v">851</div><div class="k">Briefe gesamt</div></div>
|
||||
<div><div class="v out">612</div><div class="k out">ausgehend</div></div>
|
||||
<div><div class="v in">239</div><div class="k in">eingehend</div></div>
|
||||
<div><div class="v">42</div><div class="k">Jahre</div></div>
|
||||
</div>
|
||||
|
||||
<div class="dsec">
|
||||
<h3>Aktivität über die Jahre <span class="note">Spitzenjahr <b style="color:var(--brand-navy)">1922 · 78 Briefe</b></span></h3>
|
||||
<div class="hist">
|
||||
<div class="bar" style="height:12%" title="1898 · 8"></div>
|
||||
<div class="bar" style="height:18%" title="1899 · 12"></div>
|
||||
<div class="bar" style="height:26%" title="1900 · 18"></div>
|
||||
<div class="bar" style="height:38%" title="1901 · 26"></div>
|
||||
<div class="bar" style="height:44%" title="1902 · 30"></div>
|
||||
<div class="bar" style="height:52%" title="1903 · 36"></div>
|
||||
<div class="bar" style="height:60%" title="1904 · 42"></div>
|
||||
<div class="bar" style="height:68%" title="1905 · 48"></div>
|
||||
<div class="bar" style="height:80%" title="1920 · 62"></div>
|
||||
<div class="bar" style="height:88%" title="1921 · 68"></div>
|
||||
<div class="bar peak" style="height:100%" title="1922 · 78"></div>
|
||||
<div class="bar" style="height:72%" title="1923 · 54"></div>
|
||||
<div class="bar" style="height:58%" title="1924 · 42"></div>
|
||||
<div class="bar" style="height:48%" title="1925 · 34"></div>
|
||||
<div class="bar" style="height:38%" title="1926 · 27"></div>
|
||||
<div class="bar" style="height:28%" title="1927 · 20"></div>
|
||||
<div class="bar" style="height:22%" title="1928 · 16"></div>
|
||||
<div class="bar" style="height:18%" title="1929 · 13"></div>
|
||||
<div class="bar" style="height:14%" title="1930 · 10"></div>
|
||||
<div class="bar" style="height:10%" title="1932 · 7"></div>
|
||||
<div class="bar" style="height:6%" title="1935 · 4"></div>
|
||||
<div class="bar" style="height:4%" title="1938 · 3"></div>
|
||||
<div class="bar" style="height:2%" title="1940 · 1"></div>
|
||||
</div>
|
||||
<div class="hist-labels"><span>1898</span><span>1922 ▲</span><span>1940</span></div>
|
||||
</div>
|
||||
|
||||
<div class="dsec">
|
||||
<h3>Richtungsverteilung</h3>
|
||||
<div class="dsplit">
|
||||
<span class="out">→ 612 ausgehend · 72%</span>
|
||||
<span class="in">← 239 eingehend · 28%</span>
|
||||
</div>
|
||||
<div class="dbar">
|
||||
<span class="out" style="width:72%"></span>
|
||||
<span class="in" style="width:28%"></span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="twocol">
|
||||
<div class="dsec" style="border-top:1px solid var(--line-2)">
|
||||
<h3>Häufigste Korrespondenten <span class="note">Top 6 von 87</span></h3>
|
||||
<div class="toplist">
|
||||
<div class="ti"><span class="dir both">⇄</span><span class="name">Walter Dieckmann</span><span class="bar-wrap"><span class="bar" style="width:100%"></span></span><span class="val">184</span></div>
|
||||
<div class="ti"><span class="dir both">⇄</span><span class="name">Herbert Cram</span><span class="bar-wrap"><span class="bar" style="width:78%"></span></span><span class="val">143</span></div>
|
||||
<div class="ti"><span class="dir both">⇄</span><span class="name">Ella Dieckmann</span><span class="bar-wrap"><span class="bar" style="width:48%"></span></span><span class="val">88</span></div>
|
||||
<div class="ti"><span class="dir both">⇄</span><span class="name">Eugenie de Gruyter</span><span class="bar-wrap"><span class="bar" style="width:42%"></span></span><span class="val">77</span></div>
|
||||
<div class="ti"><span class="dir both">⇄</span><span class="name">Gertrud von Rofden</span><span class="bar-wrap"><span class="bar" style="width:32%"></span></span><span class="val">58</span></div>
|
||||
<div class="ti"><span class="dir both">⇄</span><span class="name">Käthe Dieckmann</span><span class="bar-wrap"><span class="bar" style="width:26%"></span></span><span class="val">47</span></div>
|
||||
</div>
|
||||
<div style="margin-top:10px"><a style="font-size:11px;color:var(--primary);font-weight:700;text-decoration:none;border-bottom:1px dashed var(--primary)">Alle 87 Korrespondenten →</a></div>
|
||||
</div>
|
||||
|
||||
<div class="dsec" style="border-top:1px solid var(--line-2)">
|
||||
<h3>Häufigste Orte <span class="note">Top 5 von 42</span></h3>
|
||||
<div class="toplist">
|
||||
<div class="ti"><span class="name">📍 Berlin</span><span class="bar-wrap"><span class="bar" style="width:100%"></span></span><span class="val">412</span></div>
|
||||
<div class="ti"><span class="name">📍 B.Lichterfelde</span><span class="bar-wrap"><span class="bar" style="width:44%"></span></span><span class="val">180</span></div>
|
||||
<div class="ti"><span class="name">📍 Bad Kissingen</span><span class="bar-wrap"><span class="bar" style="width:14%"></span></span><span class="val">58</span></div>
|
||||
<div class="ti"><span class="name">📍 Cöln</span><span class="bar-wrap"><span class="bar" style="width:9%"></span></span><span class="val">37</span></div>
|
||||
<div class="ti"><span class="name">📍 Belgard</span><span class="bar-wrap"><span class="bar" style="width:6%"></span></span><span class="val">26</span></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="dsec">
|
||||
<h3>Beliebte Schlagwörter <span class="note">Klick filtert den Briefwechsel</span></h3>
|
||||
<div class="cloud">
|
||||
<span class="tag s-xl">Verlag</span>
|
||||
<span class="tag s-xl">Familie</span>
|
||||
<span class="tag s-l">Geburtstag</span>
|
||||
<span class="tag s-l">Weihnachten</span>
|
||||
<span class="tag s-m">Kuraufenthalt</span>
|
||||
<span class="tag s-m">Reise</span>
|
||||
<span class="tag s-m">Geschäft</span>
|
||||
<span class="tag s-s">Krieg</span>
|
||||
<span class="tag s-s muted">Krankheit</span>
|
||||
<span class="tag s-s muted">Schule</span>
|
||||
<span class="tag s-s muted">Hochzeit</span>
|
||||
<span class="tag s-s muted">Tod</span>
|
||||
<span class="tag s-s muted">Namenstag</span>
|
||||
<span class="tag s-s muted">Neujahr</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Existing doc lists stay below the dashboard -->
|
||||
<div class="doclist-card">
|
||||
<div class="dh">Geschriebene Briefe <span class="cnt">612 Briefe · <a style="color:var(--primary);text-decoration:none;border-bottom:1px dashed">Alle anzeigen →</a></span></div>
|
||||
<div class="di"><span class="t">W-0397 – 2. September 1923 – B.Lichterfelde</span><span class="m">an Herbert Cram</span></div>
|
||||
<div class="di"><span class="t">W-0521 – 24. Dezember 1922 – Berlin</span><span class="m">an Walter Dieckmann</span></div>
|
||||
<div class="di"><span class="t">W-0392 – 23. November 1921 – Bad Kissingen</span><span class="m">an Herbert Cram</span></div>
|
||||
</div>
|
||||
|
||||
<div class="doclist-card">
|
||||
<div class="dh">Empfangene Briefe <span class="cnt">239 Briefe · <a style="color:var(--primary);text-decoration:none;border-bottom:1px dashed">Alle anzeigen →</a></span></div>
|
||||
<div class="di"><span class="t">Demo leserlicher Brief</span><span class="m">von Gertrud von Rofden</span></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div style="margin-top:36px;padding:16px 20px;background:#fff;border-left:4px solid var(--brand-navy);font-size:13px;color:#333;line-height:1.65">
|
||||
<b style="color:var(--brand-navy)">How this ties back to /briefwechsel:</b>
|
||||
<ul style="margin:8px 0 0 18px;padding:0">
|
||||
<li>Every correspondent row → <code>/briefwechsel?senderId=<person>&receiverId=<other></code> (bilateral view)</li>
|
||||
<li>Every location → <code>/briefwechsel?senderId=<person>&location=<x></code></li>
|
||||
<li>Every tag → <code>/briefwechsel?senderId=<person>&tag=<x></code></li>
|
||||
<li>Every histogram year → <code>/briefwechsel?…&from=YYYY-01-01&to=YYYY-12-31</code></li>
|
||||
</ul>
|
||||
The dashboard is the discovery surface; /briefwechsel is the reading surface.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,102 +0,0 @@
|
||||
/* Shared CSS for briefwechsel-fill specs */
|
||||
:root {
|
||||
--brand-navy: #002850;
|
||||
--brand-mint: #A6DAD8;
|
||||
--brand-sand: #E4E2D7;
|
||||
--bg: #ECEAE4;
|
||||
--surface: #ffffff;
|
||||
--line: #E4E2D7;
|
||||
--line-2: #EFEDE6;
|
||||
--muted: #F7F5F2;
|
||||
--ink: #1A1A1A;
|
||||
--ink-3: #666;
|
||||
--primary: #002850;
|
||||
--accent: #2F9E95;
|
||||
}
|
||||
*,*::before,*::after{box-sizing:border-box;margin:0;padding:0}
|
||||
body{font-family:'Montserrat',system-ui,sans-serif;background:var(--bg);color:var(--ink);line-height:1.5}
|
||||
.font-serif{font-family:'Merriweather',Georgia,serif}
|
||||
|
||||
/* ── Spec meta header ─── */
|
||||
.spec-meta{background:var(--brand-navy);color:#fff;padding:18px 28px;border-bottom:3px solid var(--brand-mint)}
|
||||
.spec-meta-inner{max-width:1440px;margin:0 auto;display:flex;justify-content:space-between;align-items:flex-end;gap:24px;flex-wrap:wrap}
|
||||
.spec-meta h1{font-size:18px;font-weight:800;letter-spacing:.4px}
|
||||
.spec-meta h1 span{opacity:.5;font-weight:400}
|
||||
.spec-meta p{font-size:12px;color:rgba(255,255,255,.7);margin-top:4px;max-width:600px}
|
||||
.spec-meta-right{font-size:11px;display:flex;gap:20px;flex-wrap:wrap}
|
||||
.spec-meta-right div strong{display:block;font-size:9px;text-transform:uppercase;letter-spacing:.8px;color:rgba(255,255,255,.45);font-weight:800;margin-bottom:2px}
|
||||
.spec-nav{background:#0a1e36;padding:10px 28px;border-bottom:1px solid rgba(255,255,255,.08)}
|
||||
.spec-nav-inner{max-width:1440px;margin:0 auto;display:flex;gap:4px;font-size:11px;flex-wrap:wrap;align-items:center}
|
||||
.spec-nav a{color:rgba(255,255,255,.55);text-decoration:none;padding:4px 10px;border-radius:3px;font-weight:600}
|
||||
.spec-nav a:hover{color:#fff;background:rgba(255,255,255,.06)}
|
||||
.spec-nav a.on{color:var(--brand-navy);background:var(--brand-mint)}
|
||||
.spec-nav .lbl{color:rgba(255,255,255,.3);font-size:9px;text-transform:uppercase;letter-spacing:1px;margin-right:8px}
|
||||
|
||||
/* ── Real page chrome ─── */
|
||||
.page-wrap{background:var(--bg);min-height:calc(100vh - 120px)}
|
||||
.hdr{background:var(--brand-navy);color:#fff;padding:0 32px;height:64px;display:flex;align-items:center;gap:40px}
|
||||
.hdr-logo{font-size:16px;font-weight:900;letter-spacing:1.5px}
|
||||
.hdr-nav{display:flex;gap:28px;font-size:12px;font-weight:700;text-transform:uppercase;letter-spacing:.8px}
|
||||
.hdr-nav a{color:rgba(255,255,255,.7);text-decoration:none;padding:4px 0;border-bottom:2px solid transparent}
|
||||
.hdr-nav a.on{color:#fff;border-color:var(--brand-mint)}
|
||||
.hdr-right{margin-left:auto;display:flex;gap:14px;align-items:center;font-size:11px;color:rgba(255,255,255,.65)}
|
||||
.hdr-upload{border:1px solid rgba(255,255,255,.3);border-radius:4px;padding:6px 12px;font-weight:700;letter-spacing:.5px}
|
||||
.hdr-avatar{width:32px;height:32px;border-radius:50%;background:var(--brand-mint);color:var(--brand-navy);display:flex;align-items:center;justify-content:center;font-weight:800;font-size:12px}
|
||||
|
||||
.page{max-width:1440px;margin:0 auto;padding:28px 32px}
|
||||
|
||||
.card{background:var(--surface);border:1px solid var(--line);border-radius:2px;box-shadow:0 1px 2px rgba(0,0,0,.02);padding:20px 24px;margin-bottom:24px}
|
||||
.filter-row{display:grid;grid-template-columns:1fr 1fr;gap:24px;margin-bottom:14px}
|
||||
.fl{font-size:10px;font-weight:800;text-transform:uppercase;letter-spacing:1px;color:#888;margin-bottom:6px}
|
||||
.fi{height:40px;border:1px solid #D1D5DB;border-radius:2px;background:#fff;padding:0 12px;font-size:13px;display:flex;align-items:center;font-family:'Merriweather',serif}
|
||||
.fi.empty{color:#bbb;font-style:italic}
|
||||
.filter-actions{display:flex;gap:8px;align-items:center}
|
||||
.btn{height:34px;border:1px solid #D1D5DB;background:#F7F5F2;border-radius:2px;padding:0 12px;font-size:11px;font-weight:800;text-transform:uppercase;letter-spacing:.5px;color:#444;display:inline-flex;align-items:center;gap:6px}
|
||||
.count{margin-left:auto;font-size:13px;color:#555}
|
||||
.count b{color:var(--brand-navy)}
|
||||
|
||||
.hintbar{margin-top:14px;background:#e9f5f4;border:1px solid #c8e6e4;border-radius:2px;padding:10px 14px;font-size:12px;color:#1a3e3c;display:flex;align-items:center;gap:8px}
|
||||
.hintbar b{color:var(--brand-navy)}
|
||||
|
||||
/* ── Year divider ─── */
|
||||
.year-divider{display:flex;align-items:baseline;gap:12px;background:var(--muted);border-top:2px solid var(--line);border-bottom:1px solid var(--line);padding:8px 14px}
|
||||
.year-divider .y{font-size:22px;font-weight:900;color:var(--brand-navy);letter-spacing:-.5px}
|
||||
.year-divider .n{font-size:12px;font-weight:700;color:#888}
|
||||
|
||||
/* ── Status dots ─── */
|
||||
.dot{display:inline-block;width:7px;height:7px;border-radius:50%;flex-shrink:0}
|
||||
.dot.uploaded{background:var(--brand-mint)}
|
||||
.dot.transcribed{background:var(--brand-mint)}
|
||||
.dot.reviewed{background:rgba(0,40,80,.55)}
|
||||
.dot.archived{background:var(--brand-navy)}
|
||||
|
||||
/* ── Direction indicator ─── */
|
||||
.dir{width:16px;height:16px;flex-shrink:0;opacity:.6}
|
||||
.dir-out{color:var(--primary)}
|
||||
.dir-in{color:var(--accent)}
|
||||
|
||||
/* ── Concept intro banner ─── */
|
||||
.concept-intro{background:#fff;border-left:4px solid var(--brand-mint);padding:14px 20px;margin-bottom:24px;font-size:13px;color:#333;border-radius:2px;box-shadow:0 1px 2px rgba(0,0,0,.04)}
|
||||
.concept-intro h2{font-size:14px;font-weight:800;color:var(--brand-navy);margin-bottom:4px}
|
||||
.concept-intro .gain,.concept-intro .cost{display:inline-block;font-size:11px;margin-right:14px;margin-top:6px}
|
||||
.concept-intro .gain{color:#166534}
|
||||
.concept-intro .cost{color:#92400E}
|
||||
|
||||
/* ── Mock letter thumbnail ─── */
|
||||
.thumb{background:linear-gradient(180deg,#fdfcf7 0%,#f6f3ea 100%);border:1px solid #d9d4c6;box-shadow:inset 0 0 0 1px #fff,0 1px 2px rgba(0,0,0,.05);position:relative;overflow:hidden;flex-shrink:0;border-radius:1px}
|
||||
.thumb::before{content:'';position:absolute;top:0;left:0;right:0;height:28%;background:linear-gradient(135deg,rgba(166,218,216,.25) 0%,transparent 60%)}
|
||||
.thumb-lines{position:absolute;inset:0;display:flex;flex-direction:column;justify-content:center;gap:2px;padding:20% 12% 20% 12%}
|
||||
.thumb-lines i{display:block;height:1px;background:rgba(0,40,80,.2)}
|
||||
.thumb-lines i:nth-child(odd){width:95%}
|
||||
.thumb-lines i:nth-child(3n){width:70%}
|
||||
.thumb-lines i:nth-child(5n){width:80%}
|
||||
.thumb.handwritten .thumb-lines{padding:15% 10%;gap:4px}
|
||||
.thumb.handwritten .thumb-lines i{height:1.2px;background:rgba(0,40,80,.35);transform:rotate(-.6deg)}
|
||||
|
||||
/* ── Tag chip ─── */
|
||||
.tag{display:inline-flex;align-items:center;font-size:10px;font-weight:700;background:var(--brand-mint);color:var(--brand-navy);padding:2px 7px;border-radius:10px;letter-spacing:.3px}
|
||||
.tag.muted{background:#EEE8DC;color:#666}
|
||||
|
||||
/* ── Links & generic ─── */
|
||||
a{color:inherit}
|
||||
.hide{display:none}
|
||||
@@ -1,290 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="de">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Briefwechsel — Fill the Empty Rows · Overview</title>
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link href="https://fonts.googleapis.com/css2?family=Merriweather:wght@400;700&family=Montserrat:wght@400;500;600;700;800;900&display=swap" rel="stylesheet">
|
||||
<link rel="stylesheet" href="_shared.css">
|
||||
<style>
|
||||
.ov{max-width:1440px;margin:0 auto;padding:36px 32px 80px}
|
||||
.lead{display:grid;grid-template-columns:1fr 320px;gap:40px;margin-bottom:36px;align-items:start}
|
||||
@media (max-width: 900px){ .lead{grid-template-columns:1fr} }
|
||||
.lead h2{font-family:'Merriweather',serif;font-size:24px;font-weight:700;color:var(--brand-navy);margin-bottom:10px;line-height:1.3}
|
||||
.lead p{color:#444;font-size:14px;line-height:1.65}
|
||||
.lead .kit{background:#fff;border:1px solid var(--line);border-radius:2px;padding:16px 18px}
|
||||
.lead .kit h3{font-size:10px;font-weight:800;text-transform:uppercase;letter-spacing:1.2px;color:#888;margin-bottom:10px}
|
||||
.lead .kit li{font-size:12px;padding:5px 0;border-top:1px dashed var(--line);display:flex;justify-content:space-between}
|
||||
.lead .kit li:first-child{border-top:0}
|
||||
.lead .kit li b{color:var(--brand-navy);font-weight:700}
|
||||
|
||||
.section-h{font-family:'Merriweather',serif;font-size:16px;font-weight:700;color:var(--brand-navy);margin:28px 0 12px;padding-top:20px;border-top:1px dashed var(--line)}
|
||||
.section-h .sub{font-family:'Montserrat',sans-serif;font-size:12px;color:#888;font-weight:400;margin-left:10px}
|
||||
|
||||
.grid4{display:grid;grid-template-columns:repeat(4,1fr);gap:16px}
|
||||
@media (max-width: 1200px){ .grid4{grid-template-columns:repeat(2,1fr)} }
|
||||
.grid1{display:grid;grid-template-columns:1fr;gap:16px}
|
||||
.spec-card{background:#fff;border:1px solid var(--line);border-radius:2px;overflow:hidden;text-decoration:none;color:inherit;display:flex;flex-direction:column;transition:transform .12s,box-shadow .12s}
|
||||
.spec-card:hover{transform:translateY(-3px);box-shadow:0 8px 26px rgba(0,0,0,.1)}
|
||||
.spec-card.wide{flex-direction:row}
|
||||
.spec-card.wide .mini{width:45%;aspect-ratio:auto;min-height:280px;border-right:1px solid var(--line);border-bottom:0}
|
||||
.spec-card.wide .sc-body{flex:1}
|
||||
.spec-card .sc-num{background:var(--brand-navy);color:#fff;padding:3px 8px;font-size:10px;font-weight:800;letter-spacing:.8px;display:inline-block;border-radius:0 0 3px 0;width:max-content}
|
||||
.spec-card .sc-body{padding:14px 16px;flex:1;display:flex;flex-direction:column;gap:8px}
|
||||
.spec-card h3{font-family:'Merriweather',serif;font-size:16px;font-weight:700;color:var(--brand-navy);line-height:1.35}
|
||||
.spec-card p{font-size:12px;color:#555;line-height:1.55}
|
||||
.spec-card .tags{display:flex;gap:4px;flex-wrap:wrap;margin-top:auto;padding-top:8px}
|
||||
.spec-card .tag{font-size:9.5px;letter-spacing:.3px}
|
||||
|
||||
/* Mini previews */
|
||||
.mini{aspect-ratio:16/10;border-bottom:1px solid var(--line);background:#fafaf5;display:flex;padding:8px;gap:4px}
|
||||
.mini.col{flex-direction:column}
|
||||
.mini .minirow{background:#fff;border:1px solid var(--line-2);border-left:2px solid var(--primary);padding:4px 6px;display:flex;flex-direction:column;gap:2px;flex:1;font-size:7px;color:#888}
|
||||
.mini .minirow b{color:var(--brand-navy);font-size:8px;font-weight:700}
|
||||
.mini .minirow.in{border-left-color:var(--accent)}
|
||||
|
||||
.mini-1{display:flex;flex-direction:column;gap:3px;padding:8px}
|
||||
.mini-1 .r{background:#fff;border:1px solid var(--line-2);border-left:2px solid var(--primary);padding:4px 6px;display:grid;grid-template-columns:1fr 48px;gap:4px;flex:1;font-size:7px;color:#888;align-items:center}
|
||||
.mini-1 .r b{color:var(--brand-navy);font-size:8px;font-weight:700;font-family:'Merriweather',serif}
|
||||
.mini-1 .r .ar{font-size:6.5px;color:#888;background:#F4F1EA;padding:2px 4px;text-align:center}
|
||||
.mini-1 .r .tg{display:flex;gap:2px;margin-top:1px}
|
||||
.mini-1 .r .tg span{background:var(--brand-mint);padding:0 3px;font-size:6px;border-radius:3px;color:var(--brand-navy)}
|
||||
|
||||
.mini-2{display:flex;flex-direction:column;gap:3px;padding:8px}
|
||||
.mini-2 .r2{background:#fff;border:1px solid var(--line-2);border-left:2px solid var(--primary);padding:4px;display:flex;gap:4px;align-items:center;flex:1}
|
||||
.mini-2 .th2{width:22px;height:28px;background:linear-gradient(#fdfcf7,#f1eadb);border:1px solid #d9d4c6;flex-shrink:0;position:relative}
|
||||
.mini-2 .th2::before{content:'';position:absolute;inset:2px 3px;border-top:1px solid rgba(0,40,80,.25);border-bottom:1px solid rgba(0,40,80,.25);opacity:.6}
|
||||
.mini-2 .content{flex:1;display:flex;flex-direction:column;gap:2px;font-size:7px;color:#888}
|
||||
.mini-2 .content b{color:var(--brand-navy);font-size:8px;font-family:'Merriweather',serif}
|
||||
.mini-2 .content i{color:#555;font-style:italic}
|
||||
.mini-2 .content .dt{font-size:7px;color:#888;align-self:flex-end;font-family:'Merriweather',serif}
|
||||
|
||||
.mini-3{display:grid;grid-template-columns:1fr 1.1fr;gap:4px;padding:8px}
|
||||
.mini-3 .left-list{display:flex;flex-direction:column;gap:2px}
|
||||
.mini-3 .left-list .r{background:#fff;border:1px solid var(--line-2);border-left:2px solid var(--primary);padding:2px 4px;font-size:6.5px;color:#888}
|
||||
.mini-3 .left-list .r.sel{background:#e7f4f3;border-left-color:var(--accent)}
|
||||
.mini-3 .left-list .r b{color:var(--brand-navy);font-size:7px}
|
||||
.mini-3 .preview-mini{background:#fff;border:1px solid var(--line-2);padding:5px;display:flex;gap:4px}
|
||||
.mini-3 .pv-th{width:28px;height:38px;background:linear-gradient(#fdfcf7,#f1eadb);border:1px solid #d9d4c6;flex-shrink:0}
|
||||
.mini-3 .pv-meta{font-size:6px;color:#888;line-height:1.4}
|
||||
.mini-3 .pv-meta b{color:var(--brand-navy);font-size:7px;display:block;font-family:'Merriweather',serif}
|
||||
|
||||
.mini-4{display:grid;grid-template-columns:repeat(3,1fr);gap:3px;padding:8px}
|
||||
.mini-4 .card-s{background:#fff;border:1px solid var(--line-2);overflow:hidden;display:flex;flex-direction:column}
|
||||
.mini-4 .card-s .th{aspect-ratio:3/4;background:linear-gradient(#fdfcf7,#f1eadb);border-bottom:1px solid var(--line-2)}
|
||||
.mini-4 .card-s .t{font-size:5.5px;color:var(--brand-navy);font-weight:700;padding:3px 4px;font-family:'Merriweather',serif}
|
||||
|
||||
.mini-5{display:grid;grid-template-columns:32% 1fr;gap:6px;padding:10px;align-items:start;min-height:260px}
|
||||
.mini-5 .person-left{background:#fff;border:1px solid var(--line-2);padding:8px;display:flex;flex-direction:column;align-items:center;gap:4px}
|
||||
.mini-5 .person-left .av{width:28px;height:28px;border-radius:50%;background:var(--brand-mint);color:var(--brand-navy);display:flex;align-items:center;justify-content:center;font-size:8px;font-weight:800;font-family:'Merriweather',serif}
|
||||
.mini-5 .person-left .nm{font-family:'Merriweather',serif;font-size:7.5px;font-weight:700;color:var(--brand-navy);text-align:center}
|
||||
.mini-5 .person-left .dt{font-size:6px;color:#888}
|
||||
.mini-5 .dash-mini{background:#fff;border:1px solid var(--line-2);display:flex;flex-direction:column}
|
||||
.mini-5 .dh{background:var(--brand-navy);color:#fff;font-family:'Merriweather',serif;font-size:6.5px;padding:4px 6px;display:flex;justify-content:space-between;align-items:center}
|
||||
.mini-5 .dh span{background:var(--brand-mint);color:var(--brand-navy);font-size:5.5px;padding:1px 4px;border-radius:2px;font-weight:800}
|
||||
.mini-5 .stats-m{display:grid;grid-template-columns:repeat(4,1fr);gap:1px;background:var(--line-2);border-bottom:1px solid var(--line-2)}
|
||||
.mini-5 .stats-m div{background:#fafaf5;padding:3px;text-align:center}
|
||||
.mini-5 .stats-m .v{font-family:'Merriweather',serif;font-size:8px;font-weight:900;color:var(--brand-navy)}
|
||||
.mini-5 .stats-m .k{font-size:5px;color:#888;font-weight:700}
|
||||
.mini-5 .hist-m{display:flex;align-items:flex-end;gap:1px;height:22px;padding:4px 5px 0}
|
||||
.mini-5 .hist-m i{flex:1;background:var(--brand-mint);opacity:.6}
|
||||
.mini-5 .hist-m i.p{background:var(--brand-navy);opacity:.9}
|
||||
.mini-5 .bars-m{display:flex;flex-direction:column;gap:1.5px;padding:4px 5px}
|
||||
.mini-5 .bars-m span{display:flex;align-items:center;gap:2px;font-size:5.5px;color:#555}
|
||||
.mini-5 .bars-m span .bar{flex:1;height:2px;background:var(--brand-navy);border-radius:1px}
|
||||
.mini-5 .cloud-m{padding:4px 5px;display:flex;flex-wrap:wrap;gap:2px}
|
||||
.mini-5 .cloud-m span{background:var(--brand-mint);padding:0 3px;border-radius:3px;font-size:5.5px;color:var(--brand-navy);font-weight:700}
|
||||
|
||||
/* Comparison table */
|
||||
.cmp{background:#fff;border:1px solid var(--line);border-radius:2px;overflow:hidden;margin-top:36px}
|
||||
.cmp table{width:100%;border-collapse:collapse}
|
||||
.cmp th,.cmp td{padding:10px 14px;text-align:left;font-size:12px;border-bottom:1px solid var(--line-2);vertical-align:top}
|
||||
.cmp th{background:#fafaf5;font-size:10px;text-transform:uppercase;letter-spacing:.8px;color:#888;font-weight:800}
|
||||
.cmp td:first-child{font-weight:700;color:var(--brand-navy);font-family:'Merriweather',serif}
|
||||
.cmp .yes{color:#166534}
|
||||
.cmp .no{color:#B91C1C}
|
||||
.cmp .mid{color:#92400E}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div class="spec-meta">
|
||||
<div class="spec-meta-inner">
|
||||
<div>
|
||||
<h1>Briefwechsel — <span>Fill the Empty Rows</span></h1>
|
||||
<p>Five approaches. The first four change the <b>/briefwechsel</b> row itself; the fifth puts insights where they belong — on the person detail page.</p>
|
||||
</div>
|
||||
<div class="spec-meta-right">
|
||||
<div><strong>Page</strong>/briefwechsel · /persons/[id]</div>
|
||||
<div><strong>Brief</strong>Rows feel sparse — ~60% of row width is empty</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<nav class="spec-nav">
|
||||
<div class="spec-nav-inner">
|
||||
<span class="lbl">Specs</span>
|
||||
<a class="on" href="index.html">Overview</a>
|
||||
<a href="01-rich-rows.html">1 · Rich Rows</a>
|
||||
<a href="02-thumbnail-rows.html">2 · Thumbnail Rows</a>
|
||||
<a href="03-master-detail.html">3 · Master-Detail Split</a>
|
||||
<a href="04-gallery-cards.html">4 · Gallery Cards</a>
|
||||
<a href="05-person-dashboard.html">5 · Person Dashboard</a>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div class="ov">
|
||||
|
||||
<div class="lead">
|
||||
<div>
|
||||
<h2>The situation</h2>
|
||||
<p>On <code>/briefwechsel?senderId=…</code> (e.g. 851 letters for Walter de Gruyter), each row shows only title, date, location and counterpart — leaving the right half empty. The question is: what belongs there?</p>
|
||||
<p style="margin-top:10px">The five specs answer differently. Specs 1–4 rework the row itself. <b>Spec 5</b> argues the archive-level view (top correspondents, activity, tag cloud) belongs on <code>/persons/[id]</code>, not here — and mocks the dashboard that lives there instead.</p>
|
||||
</div>
|
||||
<div class="kit">
|
||||
<h3>Data we can use today</h3>
|
||||
<ul style="list-style:none">
|
||||
<li>Title / Filename <b>✓</b></li>
|
||||
<li>Document date <b>✓</b></li>
|
||||
<li>Location <b>✓</b></li>
|
||||
<li>Sender / receivers <b>✓</b></li>
|
||||
<li>Summary text <b>✓</b></li>
|
||||
<li>Tags <b>✓</b></li>
|
||||
<li>Archive box & folder <b>✓</b></li>
|
||||
<li>PDF thumbnail <b>open issue</b></li>
|
||||
</ul>
|
||||
<p style="font-size:11px;color:#888;margin-top:10px;font-style:italic">Removed from earlier drafts: status lifecycle (will be dropped from the product) and script type (only set after OCR, unreliable).</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="section-h">Concepts that rework /briefwechsel <span class="sub">Specs 1–4</span></div>
|
||||
|
||||
<div class="grid4">
|
||||
<a class="spec-card" href="01-rich-rows.html">
|
||||
<span class="sc-num">01</span>
|
||||
<div class="mini mini-1">
|
||||
<div class="r"><span><b>Demo leserlicher Brief</b><br>31. Mai 1940 · Belgard<br>„letzte Lebenstage…"<div class="tg"><span>Dörpfeld</span></div></span><div class="ar">VII · 5</div></div>
|
||||
<div class="r"><span><b>W-0397 – 2. Sep 1923</b><br>B.Lichterfelde · H. Cram<br>„von Elsbeth…"<div class="tg"><span>Verlag</span></div></span><div class="ar">VI · 7</div></div>
|
||||
<div class="r"><span><b>W-0521 – 24. Dez 1922</b><br>Berlin · W. Dieckmann<br>„Weihnachtsbrief…"<div class="tg"><span>Weihn.</span></div></span><div class="ar">V · 3</div></div>
|
||||
</div>
|
||||
<div class="sc-body">
|
||||
<h3>Rich Rows</h3>
|
||||
<p>Pack summary, tags and archive box into each row. No images, no structural change. Fastest to ship.</p>
|
||||
<div class="tags"><span class="tag">Scanning</span><span class="tag muted">Small effort</span></div>
|
||||
</div>
|
||||
</a>
|
||||
|
||||
<a class="spec-card" href="02-thumbnail-rows.html">
|
||||
<span class="sc-num">02</span>
|
||||
<div class="mini mini-2">
|
||||
<div class="r2"><div class="th2"></div><div class="content"><b>Demo leserlicher Brief</b><i>„letzte Lebenstage von W. Dörpfeld…"</i><span>← Gertrud · Belgard</span><span class="dt">31. Mai 1940</span></div></div>
|
||||
<div class="r2"><div class="th2"></div><div class="content"><b>W-0397 – 2. Sep 1923</b><i>„von Elsbeth geschriebener Kommentar…"</i><span>→ H. Cram · B.Lichterfelde</span><span class="dt">2. Sep 1923</span></div></div>
|
||||
<div class="r2"><div class="th2"></div><div class="content"><b>W-0521 – 24. Dez 1922</b><i>„Weihnachtsbrief, Bitte um Bild…"</i><span>→ W. Dieckmann · Berlin</span><span class="dt">24. Dez 1922</span></div></div>
|
||||
</div>
|
||||
<div class="sc-body">
|
||||
<h3>Thumbnail Rows</h3>
|
||||
<p>PDF preview on the left anchors each row. Summary (when filled) becomes the readable context line.</p>
|
||||
<div class="tags"><span class="tag">Recognition</span><span class="tag muted">Needs thumbnails</span></div>
|
||||
</div>
|
||||
</a>
|
||||
|
||||
<a class="spec-card" href="03-master-detail.html">
|
||||
<span class="sc-num">03</span>
|
||||
<div class="mini mini-3">
|
||||
<div class="left-list">
|
||||
<div class="r"><b>Demo Brief</b></div>
|
||||
<div class="r sel"><b>W-0397</b></div>
|
||||
<div class="r"><b>W-0396</b></div>
|
||||
<div class="r"><b>W-0524</b></div>
|
||||
<div class="r"><b>W-0523</b></div>
|
||||
</div>
|
||||
<div class="preview-mini">
|
||||
<div class="pv-th"></div>
|
||||
<div class="pv-meta"><b>W-0397</b>2. Sep 1923<br>B.Lichterfelde<br>→ H. Cram<br><br>„von Elsbeth…"</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="sc-body">
|
||||
<h3>Master-Detail Split</h3>
|
||||
<p>Compact list left, sticky preview right. Click a row → thumbnail, metadata, summary, excerpt. Browse without losing context.</p>
|
||||
<div class="tags"><span class="tag">Reading flow</span><span class="tag muted">Mobile pattern needed</span></div>
|
||||
</div>
|
||||
</a>
|
||||
|
||||
<a class="spec-card" href="04-gallery-cards.html">
|
||||
<span class="sc-num">04</span>
|
||||
<div class="mini mini-4">
|
||||
<div class="card-s"><div class="th"></div><div class="t">Demo Brief</div></div>
|
||||
<div class="card-s"><div class="th"></div><div class="t">W-0397 · 2 Sep</div></div>
|
||||
<div class="card-s"><div class="th"></div><div class="t">W-0396 · 2 Sep</div></div>
|
||||
<div class="card-s"><div class="th"></div><div class="t">W-0524 · 31 Jul</div></div>
|
||||
<div class="card-s"><div class="th"></div><div class="t">W-0523 · 12 Mai</div></div>
|
||||
<div class="card-s"><div class="th"></div><div class="t">W-0522 · 7 Mär</div></div>
|
||||
</div>
|
||||
<div class="sc-body">
|
||||
<h3>Gallery Cards</h3>
|
||||
<p>Abandon the list for a 4-column grid. Thumbnail-first, family-album feel. Biggest visual change.</p>
|
||||
<div class="tags"><span class="tag">Browsing</span><span class="tag muted">Poor date-scanning</span></div>
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<div class="section-h">The archive-level view moves to /persons/[id] <span class="sub">Spec 5 · separate page</span></div>
|
||||
|
||||
<div class="grid1">
|
||||
<a class="spec-card wide" href="05-person-dashboard.html">
|
||||
<span class="sc-num" style="position:absolute">05</span>
|
||||
<div class="mini mini-5">
|
||||
<div class="person-left">
|
||||
<div class="av">WG</div>
|
||||
<div class="nm">Walter de Gruyter</div>
|
||||
<div class="dt">1862–1923</div>
|
||||
</div>
|
||||
<div class="dash-mini">
|
||||
<div class="dh">Korrespondenz-Überblick <span>↗ Briefwechsel</span></div>
|
||||
<div class="stats-m">
|
||||
<div><div class="v">851</div><div class="k">gesamt</div></div>
|
||||
<div><div class="v" style="color:var(--primary)">612</div><div class="k">→</div></div>
|
||||
<div><div class="v" style="color:var(--accent)">239</div><div class="k">←</div></div>
|
||||
<div><div class="v">42J</div><div class="k">Jahre</div></div>
|
||||
</div>
|
||||
<div class="hist-m"><i style="height:15%"></i><i style="height:30%"></i><i style="height:45%"></i><i style="height:60%"></i><i style="height:80%"></i><i class="p" style="height:100%"></i><i style="height:75%"></i><i style="height:55%"></i><i style="height:40%"></i><i style="height:25%"></i><i style="height:15%"></i><i style="height:8%"></i></div>
|
||||
<div class="bars-m"><span>W. Dieckmann<span class="bar" style="flex:1"></span>184</span><span>H. Cram<span class="bar" style="flex:.78"></span>143</span><span>E. Dieckmann<span class="bar" style="flex:.48"></span>88</span></div>
|
||||
<div class="cloud-m"><span>Verlag</span><span>Familie</span><span>Weihn.</span><span>Kur</span><span>Reise</span></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="sc-body">
|
||||
<h3>Person Dashboard (/persons/[id])</h3>
|
||||
<p>The /briefwechsel list stays calm and reading-focused; the archive-level view — activity over years, top correspondents, top locations, tag cloud — lives on the person detail page, where it's useful <i>every time</i> you open a person, not only during letter review.</p>
|
||||
<p>Every correspondent, location, tag and year on the dashboard links into <code>/briefwechsel</code> with pre-filled filters, so the dashboard is the discovery surface and /briefwechsel is the reading surface.</p>
|
||||
<div class="tags"><span class="tag">Discovery</span><span class="tag">Works for bilateral too (always scoped to the person)</span><span class="tag muted">Needs aggregation endpoints</span></div>
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<div class="cmp">
|
||||
<table>
|
||||
<thead><tr>
|
||||
<th>Concept</th><th>Best for user who wants to…</th><th>Visual change</th><th>New backend</th><th>Effort</th><th>Mobile</th>
|
||||
</tr></thead>
|
||||
<tbody>
|
||||
<tr><td>1 · Rich Rows</td><td>Scan quickly, see summary + tags on every letter</td><td>Row height 2× current</td><td>None</td><td class="yes">Small</td><td class="yes">Right column collapses</td></tr>
|
||||
<tr><td>2 · Thumbnail Rows</td><td>Recognise letters visually, pick up where they left off</td><td>Thumbnail on the left</td><td>PDF thumbnail service (open issue)</td><td class="mid">Medium</td><td class="yes">Fine</td></tr>
|
||||
<tr><td>3 · Master-Detail</td><td>Flip through letters in a reading session</td><td>Two-column split</td><td>None mandatory</td><td class="mid">Medium</td><td class="no">Drawer / sheet needed</td></tr>
|
||||
<tr><td>4 · Gallery Cards</td><td>Browse the collection as an album</td><td>Full structural change (list → grid)</td><td>PDF thumbnail service</td><td class="no">Large</td><td class="yes">Grid reflows 4 → 2</td></tr>
|
||||
<tr><td>5 · Person Dashboard</td><td>Understand a person's correspondence at a glance</td><td>New section on /persons/[id]</td><td>Aggregation endpoints (per-year, per-correspondent, per-location, per-tag)</td><td class="mid">Medium</td><td class="yes">Stacks naturally</td></tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div style="margin-top:28px;padding:16px 20px;background:#fff;border-left:4px solid var(--brand-navy);font-size:13px;color:#333;line-height:1.65">
|
||||
<b style="color:var(--brand-navy)">Proposed path forward:</b>
|
||||
<ol style="margin:8px 0 0 20px;padding:0">
|
||||
<li><b>Ship Spec 1 (Rich Rows)</b> now — it uses data we already have and tests whether "empty rows" is really the problem, or whether it's "not enough context to decide which letter to open".</li>
|
||||
<li><b>Build Spec 5 (Person Dashboard)</b> next — it's independent of /briefwechsel and turns the person page into a real archive overview.</li>
|
||||
<li><b>Upgrade to Spec 2 (Thumbnail Rows)</b> once the thumbnail service lands — it layers cleanly on top of Spec 1 without throwing work away.</li>
|
||||
</ol>
|
||||
<div style="margin-top:10px">Specs 3 and 4 remain on the table but are bigger re-architectures — revisit after watching how users behave with 1 + 5 + 2.</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,996 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
|
||||
<title>Bulk Upload — 3 Concept Designs · Familienarchiv</title>
|
||||
<link href="https://fonts.googleapis.com/css2?family=Merriweather:ital,wght@0,300;0,400;0,700;1,300&family=Montserrat:wght@400;500;600;700;800;900&display=swap" rel="stylesheet"/>
|
||||
<style>
|
||||
/* ── Reset ── */
|
||||
*,*::before,*::after{box-sizing:border-box;margin:0;padding:0}
|
||||
body{font-family:'Montserrat',system-ui,sans-serif;background:#ECEAE4;color:#1A1A1A;line-height:1.5;font-size:13px}
|
||||
.doc{max-width:1300px;margin:0 auto;padding:48px 32px 120px}
|
||||
|
||||
/* ── Masthead ── */
|
||||
.mh{padding-bottom:24px;border-bottom:3px solid #002850;margin-bottom:60px}
|
||||
.mh .kicker{font-size:9px;font-weight:800;letter-spacing:2px;text-transform:uppercase;color:#A6DAD8}
|
||||
.mh h1{font-size:28px;font-weight:900;color:#002850;letter-spacing:-.4px;margin-top:6px}
|
||||
.mh p{font-size:13px;color:#555;max-width:780px;line-height:1.75;margin-top:10px}
|
||||
.mh .byline{font-size:9px;color:#999;font-weight:700;letter-spacing:1.5px;text-transform:uppercase;margin-top:14px}
|
||||
.tag-row{display:flex;gap:6px;margin-top:10px;flex-wrap:wrap}
|
||||
.tag{background:#002850;color:#A6DAD8;padding:3px 9px;border-radius:2px;font-size:8.5px;font-weight:700;letter-spacing:.8px;text-transform:uppercase}
|
||||
.tag.amber{background:#7c4a00;color:#fde68a}
|
||||
.tag.green{background:#1e5e34;color:#d1fae5}
|
||||
.tag.gray{background:#4b5563;color:#e5e7eb}
|
||||
.tag.mint{background:#A6DAD8;color:#002850}
|
||||
|
||||
/* ── Goals card ── */
|
||||
.goals{background:#fff;border:1px solid #E4E2D7;border-radius:4px;padding:22px 26px;margin:0 0 60px;box-shadow:0 1px 3px rgba(0,0,0,.04)}
|
||||
.goals h2{font-size:11px;font-weight:800;letter-spacing:1.5px;text-transform:uppercase;color:#002850;margin-bottom:14px}
|
||||
.goals ul{list-style:none;display:grid;grid-template-columns:1fr 1fr;gap:10px 28px}
|
||||
.goals li{font-size:12.5px;color:#333;padding-left:20px;position:relative;line-height:1.55}
|
||||
.goals li::before{content:"→";position:absolute;left:0;top:0;color:#A6DAD8;font-weight:800}
|
||||
|
||||
/* ── Concept section ── */
|
||||
.concept{margin-bottom:88px;padding-bottom:88px;border-bottom:2px dashed #C8C4BE}
|
||||
.concept:last-of-type{border-bottom:none;margin-bottom:0;padding-bottom:0}
|
||||
.concept-header{display:flex;align-items:flex-start;gap:24px;margin-bottom:36px}
|
||||
.concept-num{font-size:84px;font-weight:900;color:#E0DDD6;line-height:1;flex-shrink:0;width:96px}
|
||||
.concept-label{font-size:8.5px;font-weight:800;text-transform:uppercase;letter-spacing:1.2px;color:#A6DAD8;margin-bottom:5px}
|
||||
.concept-title{font-family:'Merriweather',Georgia,serif;font-size:24px;font-weight:700;color:#002850;margin-bottom:10px}
|
||||
.concept-desc{font-size:13.5px;color:#555;max-width:740px;line-height:1.75}
|
||||
.concept-best{margin-top:14px;display:flex;align-items:center;gap:8px;flex-wrap:wrap}
|
||||
.best-label{background:#A6DAD8;color:#002850;padding:3px 9px;border-radius:2px;font-size:8.5px;font-weight:800;letter-spacing:.6px;text-transform:uppercase}
|
||||
.best-text{font-size:12px;font-weight:600;color:#444}
|
||||
.concept-tradeoff{margin-top:8px;font-size:12px;color:#888;font-style:italic;max-width:680px;line-height:1.7}
|
||||
|
||||
/* ── Browser chrome ── */
|
||||
.screen{max-width:980px;margin:0 auto}
|
||||
.screen.narrow{max-width:400px}
|
||||
.chrome{background:#F5F4EE;border:1.5px solid #C4C0BA;border-radius:8px;overflow:hidden;box-shadow:0 4px 20px rgba(0,0,0,.1)}
|
||||
.chrome-bar{height:22px;background:#E8E6E0;border-bottom:1px solid #C4C0BA;display:flex;align-items:center;padding:0 9px;gap:5px;flex-shrink:0}
|
||||
.chrome-dot{width:7px;height:7px;border-radius:50%;background:#BDB8B1}
|
||||
.chrome-url{flex:1;height:10px;background:#CCC8C2;border-radius:5px;margin-left:8px}
|
||||
.viewport-hint{font-size:7.5px;font-weight:800;color:#A6DAD8;letter-spacing:1px;text-transform:uppercase;padding:4px 9px;background:#002850;border-radius:2px;margin-left:8px}
|
||||
|
||||
/* ── App nav ── */
|
||||
.app-nav{height:32px;background:#002850;display:flex;align-items:center;padding:0 14px;gap:12px;flex-shrink:0}
|
||||
.app-logo{font-family:'Merriweather',Georgia,serif;font-size:8px;font-weight:700;color:#fff;border-bottom:2px solid #A6DAD8;padding-bottom:1px}
|
||||
.app-link{font-size:6px;font-weight:700;text-transform:uppercase;letter-spacing:.5px;color:rgba(255,255,255,.45);white-space:nowrap}
|
||||
.app-link.on{color:rgba(255,255,255,.9)}
|
||||
.app-nav-r{margin-left:auto;display:flex;gap:8px;align-items:center}
|
||||
.app-avatar{width:18px;height:18px;border-radius:50%;background:rgba(255,255,255,.12);display:flex;align-items:center;justify-content:center;font-size:6px;font-weight:800;color:rgba(255,255,255,.5)}
|
||||
|
||||
/* ── Common form element styles ── */
|
||||
.f-label{font-size:6.5px;font-weight:700;color:#666;letter-spacing:.2px;text-transform:uppercase}
|
||||
.f-req{color:#C0392B}
|
||||
.f-input{height:20px;border:1px solid #D4D0CA;border-radius:2px;background:#fff;font-size:7.5px;padding:0 7px;color:#333;display:flex;align-items:center}
|
||||
.f-input.focus{border-color:#002850;box-shadow:0 0 0 2px rgba(0,40,80,.12)}
|
||||
.f-input.filled{color:#002850;font-weight:600;background:#FAFBFF}
|
||||
.f-input.suggested{border-color:#A6DAD8;background:#F0FAFA;color:#005858;font-weight:600}
|
||||
.f-input.empty{color:#BBB;font-style:italic}
|
||||
.f-input.tall{height:28px}
|
||||
|
||||
.f-tags{display:flex;gap:3px;flex-wrap:wrap;min-height:20px;border:1px solid #D4D0CA;border-radius:2px;padding:2px 4px;background:#fff;align-items:center}
|
||||
.f-chip{background:#002850;color:#A6DAD8;border-radius:2px;font-size:6px;font-weight:700;padding:1px 4px 1px 5px;display:flex;align-items:center;gap:2px}
|
||||
.f-chip-rm{color:rgba(166,218,216,.5);font-weight:400}
|
||||
|
||||
/* ── Action bar ── */
|
||||
.action-bar{height:46px;background:#F5F4EE;border-top:1px solid #E4E2D7;display:flex;align-items:center;padding:0 14px;gap:8px;flex-shrink:0}
|
||||
.btn-skip{font-size:7px;font-weight:700;color:#AAA;letter-spacing:.2px;cursor:pointer}
|
||||
.btn-spacer{flex:1}
|
||||
.btn-outline{height:24px;padding:0 12px;border:1px solid #C0BDB6;border-radius:2px;font-size:6.5px;font-weight:800;text-transform:uppercase;letter-spacing:.5px;color:#777;display:flex;align-items:center;cursor:pointer;background:#fff}
|
||||
.btn-primary{height:24px;padding:0 12px;border-radius:2px;font-size:6.5px;font-weight:800;text-transform:uppercase;letter-spacing:.5px;background:#002850;color:#fff;display:flex;align-items:center;cursor:pointer;gap:4px}
|
||||
.btn-primary.green{background:#1A7040}
|
||||
|
||||
/* ─────────────────────────────────────── */
|
||||
/* ── CONCEPT A — Stack (mobile-first) ── */
|
||||
/* ─────────────────────────────────────── */
|
||||
.ca-top-bar{height:34px;background:#F5F4EE;border-bottom:1px solid #E4E2D7;display:flex;align-items:center;padding:0 12px;gap:8px}
|
||||
.ca-back{font-size:7px;font-weight:800;text-transform:uppercase;letter-spacing:.8px;color:#888}
|
||||
.ca-title{flex:1;text-align:center;font-family:'Merriweather',Georgia,serif;font-size:9px;color:#002850;font-weight:600}
|
||||
.ca-count{font-size:7px;font-weight:700;color:#002850;background:#A6DAD8;padding:2px 6px;border-radius:10px;letter-spacing:.3px}
|
||||
|
||||
.ca-body{background:#ECEAE4;padding:14px 12px;overflow-y:auto}
|
||||
|
||||
.ca-drop{background:#fff;border:2px dashed #A6DAD8;border-radius:4px;padding:14px;text-align:center;margin-bottom:14px}
|
||||
.ca-drop-icon{font-size:18px;color:#A6DAD8;margin-bottom:4px}
|
||||
.ca-drop-title{font-size:8.5px;font-weight:700;color:#002850;margin-bottom:2px}
|
||||
.ca-drop-sub{font-size:6.5px;color:#999}
|
||||
|
||||
.ca-shared-card{background:#fff;border:1px solid #E4E2D7;border-radius:3px;padding:12px 14px;margin-bottom:14px;box-shadow:0 1px 2px rgba(0,0,0,.03)}
|
||||
.ca-shared-head{display:flex;align-items:center;gap:6px;margin-bottom:11px}
|
||||
.ca-shared-badge{background:#A6DAD8;color:#002850;padding:2px 7px;border-radius:2px;font-size:6px;font-weight:800;letter-spacing:.4px;text-transform:uppercase}
|
||||
.ca-shared-title{font-family:'Merriweather',Georgia,serif;font-size:9.5px;color:#002850;font-weight:700}
|
||||
.ca-shared-grid{display:grid;grid-template-columns:1fr 1fr;gap:8px 10px}
|
||||
.ca-shared-grid .full{grid-column:1/-1}
|
||||
.ca-shared-field{display:flex;flex-direction:column;gap:3px}
|
||||
|
||||
.ca-files-head{display:flex;align-items:center;justify-content:space-between;margin-bottom:8px;padding:0 2px}
|
||||
.ca-files-title{font-size:7px;font-weight:800;letter-spacing:1px;text-transform:uppercase;color:#B0ADA6}
|
||||
.ca-files-add{font-size:7px;font-weight:700;color:#002850;display:flex;align-items:center;gap:3px}
|
||||
|
||||
.ca-file{background:#fff;border:1px solid #E4E2D7;border-radius:3px;padding:9px 10px;margin-bottom:7px;display:flex;align-items:center;gap:10px}
|
||||
.ca-file.active{border-color:#002850;box-shadow:0 0 0 2px rgba(0,40,80,.08)}
|
||||
.ca-thumb{width:28px;height:36px;background:#FFFEF8;border:1px solid #E4E2D7;border-radius:1px;flex-shrink:0;display:flex;flex-direction:column;padding:3px;gap:1px}
|
||||
.ca-thumb .tl{height:2px;background:#C4BDB0;opacity:.6;border-radius:1px}
|
||||
.ca-thumb .tl.s{width:60%;opacity:.35}
|
||||
.ca-thumb .tl.m{width:82%}
|
||||
.ca-file-body{flex:1;min-width:0;display:flex;flex-direction:column;gap:2px}
|
||||
.ca-file-title{font-size:8px;color:#002850;font-weight:700;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}
|
||||
.ca-file-title.placeholder{color:#888;font-weight:400;font-style:italic}
|
||||
.ca-file-meta{font-size:6.5px;color:#AAA}
|
||||
.ca-file-rm{font-size:10px;color:#B0ADA6;padding:0 4px;cursor:pointer}
|
||||
|
||||
/* ───────────────────────────────────────────── */
|
||||
/* ── CONCEPT B — Split-panel + file switcher ── */
|
||||
/* ───────────────────────────────────────────── */
|
||||
.cb-top-bar{height:38px;background:#F5F4EE;border-bottom:1px solid #E4E2D7;display:flex;align-items:center;padding:0 14px;gap:10px}
|
||||
.cb-back{font-size:7px;font-weight:800;text-transform:uppercase;letter-spacing:.8px;color:#888}
|
||||
.cb-title{font-family:'Merriweather',Georgia,serif;font-size:9px;font-weight:700;color:#002850}
|
||||
.cb-count{background:#A6DAD8;color:#002850;padding:2px 7px;border-radius:10px;font-size:7px;font-weight:800;letter-spacing:.3px}
|
||||
.cb-discard{margin-left:auto;font-size:7px;font-weight:700;color:#C0392B;letter-spacing:.2px}
|
||||
|
||||
.cb-split{display:flex;min-height:440px}
|
||||
.cb-pdf{flex:55;background:#5E5C59;display:flex;flex-direction:column;border-right:1px solid #3A3836}
|
||||
.cb-pdf-toolbar{height:28px;background:#3A3836;display:flex;align-items:center;padding:0 10px;gap:8px}
|
||||
.cb-pdf-btn{width:16px;height:16px;border-radius:2px;background:rgba(255,255,255,.1);display:flex;align-items:center;justify-content:center;font-size:7px;color:rgba(255,255,255,.6)}
|
||||
.cb-pdf-page{font-size:6.5px;color:rgba(255,255,255,.4);margin-left:auto;font-weight:700;letter-spacing:.5px}
|
||||
.cb-pdf-view{flex:1;display:flex;justify-content:center;padding:14px;overflow:hidden}
|
||||
.cb-paper{background:#FFFEF8;box-shadow:0 2px 10px rgba(0,0,0,.3);border-radius:1px;padding:14px 16px;display:flex;flex-direction:column;gap:0;width:180px;flex-shrink:0}
|
||||
.pl{height:4px;background:#C4BDB0;border-radius:1px;opacity:.55;margin-bottom:3px}
|
||||
.pl.h{height:6px;opacity:.75;margin-bottom:5px}
|
||||
.pl.s{width:55%;opacity:.3}
|
||||
.pl.m{width:80%}
|
||||
.pl.sp{height:7px;background:transparent}
|
||||
.cb-filebar{background:#434140;border-top:1px solid #3A3836;display:flex;align-items:center;padding:0 8px;gap:3px;height:36px;flex-shrink:0}
|
||||
.cb-fb-arrow{width:18px;height:22px;border-radius:2px;background:rgba(255,255,255,.08);display:flex;align-items:center;justify-content:center;font-size:9px;color:rgba(255,255,255,.6)}
|
||||
.cb-fb-track{flex:1;display:flex;gap:3px;padding:0 3px;overflow:hidden}
|
||||
.cb-fb-item{padding:3px 6px;border-radius:2px;font-size:6px;font-weight:700;color:rgba(255,255,255,.55);background:rgba(255,255,255,.06);display:flex;align-items:center;gap:4px;white-space:nowrap}
|
||||
.cb-fb-item.on{background:#A6DAD8;color:#002850}
|
||||
.cb-fb-num{background:rgba(0,0,0,.15);border-radius:2px;padding:0 3px;font-size:5.5px;font-weight:800}
|
||||
.cb-fb-item.on .cb-fb-num{background:rgba(0,40,80,.25);color:#002850}
|
||||
|
||||
.cb-form{flex:45;background:#fff;display:flex;flex-direction:column}
|
||||
.cb-form-scroll{flex:1;overflow-y:auto;padding:14px}
|
||||
|
||||
.cb-only-card{background:#F0FAFA;border:1px solid #A6DAD8;border-radius:3px;padding:10px 12px;margin-bottom:12px}
|
||||
.cb-only-head{display:flex;align-items:center;gap:6px;margin-bottom:7px}
|
||||
.cb-only-badge{background:#005858;color:#A6DAD8;padding:2px 7px;border-radius:2px;font-size:6px;font-weight:800;letter-spacing:.4px;text-transform:uppercase}
|
||||
.cb-only-subtitle{font-size:6.5px;color:#005858;font-weight:600;letter-spacing:.3px}
|
||||
|
||||
.cb-shared-card{background:#F9F8F5;border:1px solid #E4E2D7;border-radius:3px;padding:10px 12px;margin-bottom:10px}
|
||||
.cb-shared-head{display:flex;align-items:center;gap:6px;margin-bottom:9px}
|
||||
.cb-shared-badge{background:#A6DAD8;color:#002850;padding:2px 7px;border-radius:2px;font-size:6px;font-weight:800;letter-spacing:.4px;text-transform:uppercase}
|
||||
.cb-shared-subtitle{font-size:6.5px;color:#002850;font-weight:600}
|
||||
.cb-row{display:grid;grid-template-columns:1fr 1fr;gap:7px;margin-bottom:7px}
|
||||
.cb-row.full{grid-template-columns:1fr}
|
||||
.cb-field{display:flex;flex-direction:column;gap:3px}
|
||||
|
||||
/* ─────────────────────────────────────── */
|
||||
/* ── CONCEPT C — Progressive accordion ── */
|
||||
/* ─────────────────────────────────────── */
|
||||
.cc-top-bar{height:34px;background:#F5F4EE;border-bottom:1px solid #E4E2D7;display:flex;align-items:center;padding:0 14px;gap:8px}
|
||||
|
||||
.cc-body{background:#ECEAE4;padding:14px;display:flex;flex-direction:column;gap:11px;max-height:540px;overflow-y:auto}
|
||||
|
||||
.cc-shared{background:#fff;border:1px solid #E4E2D7;border-radius:3px;padding:12px 14px;box-shadow:0 1px 2px rgba(0,0,0,.03);position:sticky;top:0;z-index:2}
|
||||
.cc-shared-head{display:flex;align-items:center;gap:7px;margin-bottom:11px}
|
||||
.cc-shared-badge{background:#A6DAD8;color:#002850;padding:2px 7px;border-radius:2px;font-size:6px;font-weight:800;letter-spacing:.4px;text-transform:uppercase}
|
||||
.cc-shared-title{font-family:'Merriweather',Georgia,serif;font-size:10px;color:#002850;font-weight:700}
|
||||
.cc-grid{display:grid;grid-template-columns:1fr 1fr 1fr;gap:8px 10px}
|
||||
.cc-grid .span2{grid-column:span 2}
|
||||
|
||||
.cc-files-label{font-size:7px;font-weight:800;letter-spacing:1px;text-transform:uppercase;color:#B0ADA6;padding:0 2px;margin-top:6px}
|
||||
|
||||
.cc-file{background:#fff;border:1px solid #E4E2D7;border-radius:3px;overflow:hidden}
|
||||
.cc-file.open{border-color:#002850;box-shadow:0 2px 6px rgba(0,40,80,.08)}
|
||||
.cc-file-head{display:flex;align-items:center;gap:10px;padding:9px 12px;cursor:pointer}
|
||||
.cc-file-head.open{border-bottom:1px solid #E4E2D7;background:#F9F8F5}
|
||||
.cc-caret{font-size:9px;color:#A6DAD8;width:10px}
|
||||
.cc-file-thumb{width:22px;height:28px;background:#FFFEF8;border:1px solid #E4E2D7;border-radius:1px;padding:2px;display:flex;flex-direction:column;gap:1px;flex-shrink:0}
|
||||
.cc-file-thumb .tl{height:2px;background:#C4BDB0;opacity:.55;border-radius:1px}
|
||||
.cc-file-body{flex:1;min-width:0}
|
||||
.cc-file-titlerow{display:flex;align-items:center;gap:7px}
|
||||
.cc-file-title{font-size:8.5px;color:#002850;font-weight:700;white-space:nowrap;overflow:hidden;text-overflow:ellipsis}
|
||||
.cc-file-title.placeholder{color:#888;font-weight:400;font-style:italic}
|
||||
.cc-file-meta{font-size:6.5px;color:#AAA;margin-top:2px}
|
||||
.cc-file-rm{font-size:11px;color:#B0ADA6;padding:0 4px}
|
||||
|
||||
.cc-file-open{display:flex;background:#F5F4EE}
|
||||
.cc-preview{flex:45;background:#5E5C59;padding:12px;display:flex;justify-content:center}
|
||||
.cc-preview-paper{background:#FFFEF8;border-radius:1px;padding:8px 10px;width:110px;flex-shrink:0;display:flex;flex-direction:column;box-shadow:0 2px 6px rgba(0,0,0,.25)}
|
||||
.cc-file-form{flex:55;padding:12px 14px;background:#fff;display:flex;flex-direction:column;gap:7px}
|
||||
|
||||
/* ─────────── Decision matrix ─────────── */
|
||||
.decision{background:#fff;border:1px solid #E4E2D7;border-radius:4px;padding:28px 32px;margin:88px 0 60px;box-shadow:0 1px 3px rgba(0,0,0,.04)}
|
||||
.decision h2{font-size:11px;font-weight:800;letter-spacing:1.5px;text-transform:uppercase;color:#002850;margin-bottom:6px}
|
||||
.decision p.lead{font-size:13.5px;color:#555;line-height:1.7;margin-bottom:22px;max-width:820px}
|
||||
.dm{width:100%;border-collapse:collapse;margin-top:12px;font-size:12px}
|
||||
.dm th{text-align:left;font-size:9.5px;font-weight:800;letter-spacing:.5px;text-transform:uppercase;color:#002850;padding:9px 12px;background:#F9F8F5;border-bottom:2px solid #E4E2D7}
|
||||
.dm td{padding:13px 12px;border-bottom:1px solid #EFEDE7;vertical-align:top;line-height:1.6}
|
||||
.dm td:first-child{font-weight:700;color:#002850;width:18%;white-space:nowrap}
|
||||
.dm td.score{font-size:15px;text-align:center;width:12%}
|
||||
.dm td.ok{color:#1A7040}
|
||||
.dm td.mid{color:#A07100}
|
||||
.dm td.bad{color:#C0392B}
|
||||
|
||||
/* ─────────── Recommendation ─────────── */
|
||||
.reco{background:#002850;color:#fff;border-radius:6px;padding:36px 40px;margin:48px 0 64px;box-shadow:0 4px 20px rgba(0,40,80,.15)}
|
||||
.reco .kicker{font-size:9px;font-weight:800;letter-spacing:2px;text-transform:uppercase;color:#A6DAD8}
|
||||
.reco h2{font-family:'Merriweather',Georgia,serif;font-size:26px;font-weight:700;margin-top:6px}
|
||||
.reco .why{font-size:13.5px;line-height:1.85;color:rgba(255,255,255,.88);max-width:780px;margin-top:14px}
|
||||
.reco ul{list-style:none;margin-top:14px;display:grid;grid-template-columns:1fr 1fr;gap:9px 26px}
|
||||
.reco ul li{font-size:12.5px;color:rgba(255,255,255,.9);padding-left:22px;position:relative;line-height:1.6}
|
||||
.reco ul li::before{content:"✓";position:absolute;left:0;top:0;color:#A6DAD8;font-weight:800}
|
||||
|
||||
/* ─────────── Impl-ref ─────────── */
|
||||
.impl{background:#fff;border:1px solid #E4E2D7;border-radius:4px;padding:28px 32px;box-shadow:0 1px 3px rgba(0,0,0,.04)}
|
||||
.impl h2{font-size:11px;font-weight:800;letter-spacing:1.5px;text-transform:uppercase;color:#002850;margin-bottom:16px}
|
||||
.impl h3{font-family:'Merriweather',Georgia,serif;font-size:15px;color:#002850;margin:22px 0 10px}
|
||||
.impl-table{width:100%;border-collapse:collapse;margin-top:6px;font-size:12px}
|
||||
.impl-table th{text-align:left;font-size:9px;font-weight:800;letter-spacing:.6px;text-transform:uppercase;color:#002850;padding:8px 10px;background:#F9F8F5;border-bottom:2px solid #E4E2D7}
|
||||
.impl-table td{padding:10px;border-bottom:1px solid #EFEDE7;vertical-align:top;line-height:1.55}
|
||||
.impl-table td:first-child{font-weight:700;color:#002850;width:22%}
|
||||
.impl-table td code{font-family:'SF Mono','Menlo',monospace;font-size:11px;background:#F0EEE8;padding:1px 6px;border-radius:2px;color:#002850}
|
||||
.impl-table td.px{color:#777;font-size:11.5px;width:16%}
|
||||
.impl-table td.note{color:#888;font-size:11.5px;font-style:italic;width:22%}
|
||||
.impl h3.ix{margin-top:32px}
|
||||
|
||||
.notes{background:#F9F8F5;border-left:3px solid #A6DAD8;padding:16px 22px;border-radius:0 4px 4px 0;margin-top:26px}
|
||||
.notes .nh{font-size:9px;font-weight:800;letter-spacing:1px;text-transform:uppercase;color:#002850;margin-bottom:8px}
|
||||
.notes ul{list-style:none;display:flex;flex-direction:column;gap:6px}
|
||||
.notes li{font-size:12px;color:#333;padding-left:18px;position:relative;line-height:1.7}
|
||||
.notes li::before{content:"•";position:absolute;left:0;top:0;color:#A6DAD8;font-weight:800}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="doc">
|
||||
|
||||
<!-- ════════════════════════════════════════════ -->
|
||||
<!-- ═══════════════ MASTHEAD ══════════════ -->
|
||||
<!-- ════════════════════════════════════════════ -->
|
||||
<div class="mh">
|
||||
<div class="kicker">UX Spec · Bulk Upload</div>
|
||||
<h1>Uploading multiple documents in a single pass</h1>
|
||||
<p>
|
||||
Extends issue <strong>#294</strong> (new-document split-panel) with bulk uploads. When a user drops
|
||||
N files, every metadata field applies once to all of them — only the <em>title</em> is per-file,
|
||||
pre-filled from the filename and editable inline. A single save POST creates N documents.
|
||||
</p>
|
||||
<div class="byline">Prepared by Leonie Voss · 2026-04-24 · Draft 1 · References: #294, #305</div>
|
||||
<div class="tag-row">
|
||||
<span class="tag">feature</span>
|
||||
<span class="tag mint">ui</span>
|
||||
<span class="tag gray">a11y 320px+</span>
|
||||
<span class="tag green">backend ready</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Goals -->
|
||||
<div class="goals">
|
||||
<h2>Design goals</h2>
|
||||
<ul>
|
||||
<li><strong>One-pass feel</strong>: drop → fill shared fields → save. No wizard, no per-file detour.</li>
|
||||
<li><strong>Every field is shared except the title</strong>, which is always set (filename-derived).</li>
|
||||
<li><strong>No mode switch</strong>: 1 file and N files use the same screen — more files reveal more chrome.</li>
|
||||
<li><strong>Scales to 20+ files</strong> without the form losing scan-ability on mobile.</li>
|
||||
<li><strong>Reuses the #294 split-panel layout</strong> (DocumentEditLayout) — minimum new surface.</li>
|
||||
<li><strong>a11y-first</strong>: 44px targets, focus states, `aria-current` on active file, keyboard-navigable.</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<!-- ════════════════════════════════════════════ -->
|
||||
<!-- ═════════ CONCEPT A — STACK ═════════ -->
|
||||
<!-- ════════════════════════════════════════════ -->
|
||||
<section class="concept">
|
||||
<div class="concept-header">
|
||||
<div class="concept-num">A</div>
|
||||
<div>
|
||||
<div class="concept-label">Concept A</div>
|
||||
<div class="concept-title">Flat Stack — shared header · file cards · sticky save</div>
|
||||
<p class="concept-desc">
|
||||
A single vertical flow: drop zone on top, then a <em>Gilt für alle</em> metadata card,
|
||||
then stacked file cards (thumbnail · editable title · remove). No split panel, no tabs.
|
||||
Scrolling down reveals all files; the save bar sticks to the bottom.
|
||||
</p>
|
||||
<div class="concept-best">
|
||||
<span class="best-label">Best for</span>
|
||||
<span class="best-text">Small-screen workflows. Seniors who prefer linear flows over tabs.</span>
|
||||
</div>
|
||||
<div class="concept-tradeoff">
|
||||
Trade-off: no PDF preview until you click through to the document after save. Harder to verify
|
||||
you grabbed the right files before committing.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- mobile mockup -->
|
||||
<div class="screen narrow">
|
||||
<div class="chrome">
|
||||
<div class="chrome-bar">
|
||||
<div class="chrome-dot"></div><div class="chrome-dot"></div><div class="chrome-dot"></div>
|
||||
<div class="chrome-url"></div>
|
||||
<div class="viewport-hint">375 · mobile</div>
|
||||
</div>
|
||||
<div class="app-nav">
|
||||
<div class="app-logo">Familienarchiv</div>
|
||||
<div class="app-nav-r">
|
||||
<div class="app-avatar">MR</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="ca-top-bar">
|
||||
<div class="ca-back">← Zurück</div>
|
||||
<div class="ca-title">Neue Dokumente</div>
|
||||
<div class="ca-count">5</div>
|
||||
</div>
|
||||
<div class="ca-body" style="height:500px">
|
||||
<!-- drop zone -->
|
||||
<div class="ca-drop">
|
||||
<div class="ca-drop-icon">⇪</div>
|
||||
<div class="ca-drop-title">Weitere Dateien hinzufügen</div>
|
||||
<div class="ca-drop-sub">PDF, JPEG, PNG, TIFF · max 50 MB</div>
|
||||
</div>
|
||||
|
||||
<!-- shared card -->
|
||||
<div class="ca-shared-card">
|
||||
<div class="ca-shared-head">
|
||||
<span class="ca-shared-badge">Gilt für alle 5</span>
|
||||
<span class="ca-shared-title">Angaben</span>
|
||||
</div>
|
||||
<div class="ca-shared-grid">
|
||||
<div class="ca-shared-field">
|
||||
<span class="f-label">Absender</span>
|
||||
<div class="f-input filled">Hans Müller</div>
|
||||
</div>
|
||||
<div class="ca-shared-field">
|
||||
<span class="f-label">Empfänger</span>
|
||||
<div class="f-input filled">Anna Schmidt</div>
|
||||
</div>
|
||||
<div class="ca-shared-field">
|
||||
<span class="f-label">Datum</span>
|
||||
<div class="f-input filled">1950-06</div>
|
||||
</div>
|
||||
<div class="ca-shared-field">
|
||||
<span class="f-label">Ort</span>
|
||||
<div class="f-input empty">Berlin</div>
|
||||
</div>
|
||||
<div class="ca-shared-field full">
|
||||
<span class="f-label">Tags</span>
|
||||
<div class="f-tags">
|
||||
<span class="f-chip">Familie <span class="f-chip-rm">×</span></span>
|
||||
<span class="f-chip">Krieg <span class="f-chip-rm">×</span></span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- files list -->
|
||||
<div class="ca-files-head">
|
||||
<div class="ca-files-title">5 Dateien · Titel bearbeiten</div>
|
||||
</div>
|
||||
<div class="ca-file active">
|
||||
<div class="ca-thumb"><div class="tl h"></div><div class="tl"></div><div class="tl m"></div><div class="tl s"></div></div>
|
||||
<div class="ca-file-body">
|
||||
<div class="ca-file-title">Brief_1940_Hans</div>
|
||||
<div class="ca-file-meta">Brief_1940_Hans.pdf · 2.4 MB</div>
|
||||
</div>
|
||||
<div class="ca-file-rm">✕</div>
|
||||
</div>
|
||||
<div class="ca-file">
|
||||
<div class="ca-thumb"><div class="tl h"></div><div class="tl"></div><div class="tl"></div><div class="tl s"></div></div>
|
||||
<div class="ca-file-body">
|
||||
<div class="ca-file-title">Brief_1940_Anna</div>
|
||||
<div class="ca-file-meta">Brief_1940_Anna.pdf · 1.8 MB</div>
|
||||
</div>
|
||||
<div class="ca-file-rm">✕</div>
|
||||
</div>
|
||||
<div class="ca-file">
|
||||
<div class="ca-thumb"><div class="tl h"></div><div class="tl m"></div><div class="tl"></div><div class="tl"></div></div>
|
||||
<div class="ca-file-body">
|
||||
<div class="ca-file-title">Brief_1941_Clara</div>
|
||||
<div class="ca-file-meta">Brief_1941_Clara.pdf · 890 kB</div>
|
||||
</div>
|
||||
<div class="ca-file-rm">✕</div>
|
||||
</div>
|
||||
<div class="ca-file">
|
||||
<div class="ca-thumb"><div class="tl h"></div><div class="tl"></div><div class="tl s"></div><div class="tl m"></div></div>
|
||||
<div class="ca-file-body">
|
||||
<div class="ca-file-title placeholder">Postkarte_Venedig</div>
|
||||
<div class="ca-file-meta">Postkarte_Venedig.jpg · 1.1 MB</div>
|
||||
</div>
|
||||
<div class="ca-file-rm">✕</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="action-bar">
|
||||
<div class="btn-skip">Alle verwerfen</div>
|
||||
<div class="btn-spacer"></div>
|
||||
<div class="btn-outline">Als Platzhalter</div>
|
||||
<div class="btn-primary">5 speichern →</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<!-- ════════════════════════════════════════════ -->
|
||||
<!-- ═══ CONCEPT B — SPLIT-PANEL + SWITCHER ══ -->
|
||||
<!-- ════════════════════════════════════════════ -->
|
||||
<section class="concept">
|
||||
<div class="concept-header">
|
||||
<div class="concept-num">B</div>
|
||||
<div>
|
||||
<div class="concept-label">Concept B · RECOMMENDED</div>
|
||||
<div class="concept-title">Split-Panel with File Switcher</div>
|
||||
<p class="concept-desc">
|
||||
Reuses the <em>DocumentEditLayout</em> from issue #294 and adds a horizontal file-switcher strip
|
||||
under the PDF preview. Right column splits into two cards: <em>Gilt nur für diese Datei</em>
|
||||
(title only, mint accent) and <em>Gilt für alle N Dokumente</em> (everything else).
|
||||
When N=1 the switcher disappears and the screen is byte-identical to #294.
|
||||
</p>
|
||||
<div class="concept-best">
|
||||
<span class="best-label">Best for</span>
|
||||
<span class="best-text">The project's primary use case. Desktop + tablet, matches #294 DNA.</span>
|
||||
</div>
|
||||
<div class="concept-tradeoff">
|
||||
Trade-off: on mobile the split has to collapse into tabs ("Vorschau / Angaben"). We reuse the
|
||||
same responsive pattern that DocumentEditLayout already ships with.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- desktop mockup -->
|
||||
<div class="screen">
|
||||
<div class="chrome">
|
||||
<div class="chrome-bar">
|
||||
<div class="chrome-dot"></div><div class="chrome-dot"></div><div class="chrome-dot"></div>
|
||||
<div class="chrome-url"></div>
|
||||
<div class="viewport-hint">1280 · desktop</div>
|
||||
</div>
|
||||
<div class="app-nav">
|
||||
<div class="app-logo">Familienarchiv</div>
|
||||
<div class="app-link on">Dokumente</div>
|
||||
<div class="app-link">Personen</div>
|
||||
<div class="app-link">Briefwechsel</div>
|
||||
<div class="app-link">Chronik</div>
|
||||
<div class="app-nav-r">
|
||||
<div class="app-avatar">MR</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="cb-top-bar">
|
||||
<div class="cb-back">← Dokumente</div>
|
||||
<div class="cb-title">Neue Dokumente</div>
|
||||
<div class="cb-count">5 werden erstellt</div>
|
||||
<div class="cb-discard">Alle verwerfen</div>
|
||||
</div>
|
||||
|
||||
<div class="cb-split">
|
||||
<!-- PDF side -->
|
||||
<div class="cb-pdf">
|
||||
<div class="cb-pdf-toolbar">
|
||||
<div class="cb-pdf-btn">◀</div>
|
||||
<div class="cb-pdf-btn">▶</div>
|
||||
<div class="cb-pdf-btn">+</div>
|
||||
<div class="cb-pdf-btn">−</div>
|
||||
<div class="cb-pdf-page">Seite 1 / 2 · Datei 1 von 5</div>
|
||||
</div>
|
||||
<div class="cb-pdf-view">
|
||||
<div class="cb-paper">
|
||||
<div class="pl h"></div><div class="pl h s"></div><div class="pl sp"></div>
|
||||
<div class="pl"></div><div class="pl m"></div><div class="pl"></div>
|
||||
<div class="pl s"></div><div class="pl sp"></div>
|
||||
<div class="pl"></div><div class="pl m"></div><div class="pl"></div>
|
||||
<div class="pl"></div><div class="pl s"></div><div class="pl sp"></div>
|
||||
<div class="pl"></div><div class="pl m"></div><div class="pl s"></div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- file switcher -->
|
||||
<div class="cb-filebar">
|
||||
<div class="cb-fb-arrow">‹</div>
|
||||
<div class="cb-fb-track">
|
||||
<div class="cb-fb-item on"><span class="cb-fb-num">1</span> Brief_1940_Hans.pdf</div>
|
||||
<div class="cb-fb-item"><span class="cb-fb-num">2</span> Brief_1940_Anna.pdf</div>
|
||||
<div class="cb-fb-item"><span class="cb-fb-num">3</span> Brief_1941_Clara.pdf</div>
|
||||
<div class="cb-fb-item"><span class="cb-fb-num">4</span> Postkarte_Venedig.jpg</div>
|
||||
<div class="cb-fb-item"><span class="cb-fb-num">5</span> Urkunde_1942.pdf</div>
|
||||
</div>
|
||||
<div class="cb-fb-arrow">›</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Form side -->
|
||||
<div class="cb-form">
|
||||
<div class="cb-form-scroll">
|
||||
<!-- PER-FILE card -->
|
||||
<div class="cb-only-card">
|
||||
<div class="cb-only-head">
|
||||
<span class="cb-only-badge">Nur diese Datei</span>
|
||||
<span class="cb-only-subtitle">1 / 5 · Brief_1940_Hans.pdf</span>
|
||||
</div>
|
||||
<div class="cb-row full">
|
||||
<div class="cb-field">
|
||||
<span class="f-label">Titel <span class="f-req">*</span></span>
|
||||
<div class="f-input filled tall">Brief an Anna, 1940</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- SHARED card -->
|
||||
<div class="cb-shared-card">
|
||||
<div class="cb-shared-head">
|
||||
<span class="cb-shared-badge">Gilt für alle 5</span>
|
||||
<span class="cb-shared-subtitle">Gemeinsame Angaben</span>
|
||||
</div>
|
||||
<div class="cb-row">
|
||||
<div class="cb-field">
|
||||
<span class="f-label">Absender</span>
|
||||
<div class="f-input filled">Hans Müller</div>
|
||||
</div>
|
||||
<div class="cb-field">
|
||||
<span class="f-label">Empfänger</span>
|
||||
<div class="f-input filled">Anna Schmidt</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="cb-row">
|
||||
<div class="cb-field">
|
||||
<span class="f-label">Datum</span>
|
||||
<div class="f-input filled">15.06.1950</div>
|
||||
</div>
|
||||
<div class="cb-field">
|
||||
<span class="f-label">Ort</span>
|
||||
<div class="f-input empty">z.B. Berlin</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="cb-row full">
|
||||
<div class="cb-field">
|
||||
<span class="f-label">Tags</span>
|
||||
<div class="f-tags">
|
||||
<span class="f-chip">Familie <span class="f-chip-rm">×</span></span>
|
||||
<span class="f-chip">Krieg <span class="f-chip-rm">×</span></span>
|
||||
<span class="f-chip">Briefwechsel <span class="f-chip-rm">×</span></span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="cb-row">
|
||||
<div class="cb-field">
|
||||
<span class="f-label">Archivbox</span>
|
||||
<div class="f-input empty">z.B. B-12</div>
|
||||
</div>
|
||||
<div class="cb-field">
|
||||
<span class="f-label">Mappe</span>
|
||||
<div class="f-input empty">z.B. M-3</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="action-bar">
|
||||
<div class="btn-skip">Alle verwerfen</div>
|
||||
<div class="btn-spacer"></div>
|
||||
<div class="btn-outline">Als Platzhalter</div>
|
||||
<div class="btn-primary green">5 speichern →</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<!-- ════════════════════════════════════════════ -->
|
||||
<!-- ══ CONCEPT C — PROGRESSIVE ACCORDION ══ -->
|
||||
<!-- ════════════════════════════════════════════ -->
|
||||
<section class="concept">
|
||||
<div class="concept-header">
|
||||
<div class="concept-num">C</div>
|
||||
<div>
|
||||
<div class="concept-label">Concept C</div>
|
||||
<div class="concept-title">Progressive Accordion — shared sticky header · file cards expand inline</div>
|
||||
<p class="concept-desc">
|
||||
Shared metadata sticks at the top of the page. Below, each file is a collapsed card; clicking
|
||||
a card expands it to show the PDF preview + title field inline. Only one card is expanded at a
|
||||
time. Scales well to 20+ files — the list stays readable, you only look at the PDFs you want
|
||||
to verify.
|
||||
</p>
|
||||
<div class="concept-best">
|
||||
<span class="best-label">Best for</span>
|
||||
<span class="best-text">Large batches (10+ files) where you want to spot-check a few.</span>
|
||||
</div>
|
||||
<div class="concept-tradeoff">
|
||||
Trade-off: two different visual languages — cards collapsed vs. cards expanded with PDF. New
|
||||
pattern for the project; costs familiarity.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="screen">
|
||||
<div class="chrome">
|
||||
<div class="chrome-bar">
|
||||
<div class="chrome-dot"></div><div class="chrome-dot"></div><div class="chrome-dot"></div>
|
||||
<div class="chrome-url"></div>
|
||||
<div class="viewport-hint">1280 · desktop</div>
|
||||
</div>
|
||||
<div class="app-nav">
|
||||
<div class="app-logo">Familienarchiv</div>
|
||||
<div class="app-link on">Dokumente</div>
|
||||
<div class="app-link">Personen</div>
|
||||
<div class="app-nav-r"><div class="app-avatar">MR</div></div>
|
||||
</div>
|
||||
<div class="cc-top-bar">
|
||||
<div class="ca-back">← Zurück</div>
|
||||
<div class="ca-title">Neue Dokumente</div>
|
||||
<div class="ca-count">5</div>
|
||||
</div>
|
||||
|
||||
<div class="cc-body">
|
||||
<!-- sticky shared card -->
|
||||
<div class="cc-shared">
|
||||
<div class="cc-shared-head">
|
||||
<span class="cc-shared-badge">Gilt für alle 5</span>
|
||||
<span class="cc-shared-title">Gemeinsame Angaben</span>
|
||||
</div>
|
||||
<div class="cc-grid">
|
||||
<div class="cb-field"><span class="f-label">Absender</span><div class="f-input filled">Hans Müller</div></div>
|
||||
<div class="cb-field"><span class="f-label">Empfänger</span><div class="f-input filled">Anna Schmidt</div></div>
|
||||
<div class="cb-field"><span class="f-label">Datum</span><div class="f-input filled">15.06.1950</div></div>
|
||||
<div class="cb-field span2"><span class="f-label">Tags</span><div class="f-tags"><span class="f-chip">Familie <span class="f-chip-rm">×</span></span><span class="f-chip">Krieg <span class="f-chip-rm">×</span></span></div></div>
|
||||
<div class="cb-field"><span class="f-label">Ort</span><div class="f-input empty">z.B. Berlin</div></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="cc-files-label">5 Dateien</div>
|
||||
|
||||
<!-- collapsed card -->
|
||||
<div class="cc-file">
|
||||
<div class="cc-file-head">
|
||||
<div class="cc-caret">▸</div>
|
||||
<div class="cc-file-thumb"><div class="tl"></div><div class="tl"></div><div class="tl"></div></div>
|
||||
<div class="cc-file-body">
|
||||
<div class="cc-file-titlerow">
|
||||
<div class="cc-file-title">Brief an Anna, 1940</div>
|
||||
</div>
|
||||
<div class="cc-file-meta">Brief_1940_Hans.pdf · 2.4 MB</div>
|
||||
</div>
|
||||
<div class="cc-file-rm">✕</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- expanded card -->
|
||||
<div class="cc-file open">
|
||||
<div class="cc-file-head open">
|
||||
<div class="cc-caret" style="color:#002850">▾</div>
|
||||
<div class="cc-file-thumb"><div class="tl"></div><div class="tl"></div><div class="tl"></div></div>
|
||||
<div class="cc-file-body">
|
||||
<div class="cc-file-titlerow">
|
||||
<div class="cc-file-title">Brief von Anna, Antwort</div>
|
||||
</div>
|
||||
<div class="cc-file-meta">Brief_1940_Anna.pdf · 1.8 MB</div>
|
||||
</div>
|
||||
<div class="cc-file-rm">✕</div>
|
||||
</div>
|
||||
<div class="cc-file-open">
|
||||
<div class="cc-preview">
|
||||
<div class="cc-preview-paper">
|
||||
<div class="pl h"></div><div class="pl h s"></div><div class="pl sp"></div>
|
||||
<div class="pl"></div><div class="pl m"></div><div class="pl s"></div>
|
||||
<div class="pl sp"></div>
|
||||
<div class="pl"></div><div class="pl"></div><div class="pl m"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="cc-file-form">
|
||||
<div class="cb-only-head">
|
||||
<span class="cb-only-badge">Nur diese Datei</span>
|
||||
<span class="cb-only-subtitle">2 / 5</span>
|
||||
</div>
|
||||
<div class="cb-field">
|
||||
<span class="f-label">Titel <span class="f-req">*</span></span>
|
||||
<div class="f-input filled tall">Brief von Anna, Antwort</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- more collapsed -->
|
||||
<div class="cc-file">
|
||||
<div class="cc-file-head">
|
||||
<div class="cc-caret">▸</div>
|
||||
<div class="cc-file-thumb"><div class="tl"></div><div class="tl"></div><div class="tl"></div></div>
|
||||
<div class="cc-file-body">
|
||||
<div class="cc-file-titlerow">
|
||||
<div class="cc-file-title placeholder">Brief_1941_Clara</div>
|
||||
</div>
|
||||
<div class="cc-file-meta">Brief_1941_Clara.pdf · 890 kB</div>
|
||||
</div>
|
||||
<div class="cc-file-rm">✕</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="cc-file">
|
||||
<div class="cc-file-head">
|
||||
<div class="cc-caret">▸</div>
|
||||
<div class="cc-file-thumb"><div class="tl"></div><div class="tl"></div><div class="tl"></div></div>
|
||||
<div class="cc-file-body">
|
||||
<div class="cc-file-titlerow">
|
||||
<div class="cc-file-title placeholder">Postkarte_Venedig</div>
|
||||
</div>
|
||||
<div class="cc-file-meta">Postkarte_Venedig.jpg · 1.1 MB</div>
|
||||
</div>
|
||||
<div class="cc-file-rm">✕</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="cc-file">
|
||||
<div class="cc-file-head">
|
||||
<div class="cc-caret">▸</div>
|
||||
<div class="cc-file-thumb"><div class="tl"></div><div class="tl"></div><div class="tl"></div></div>
|
||||
<div class="cc-file-body">
|
||||
<div class="cc-file-titlerow">
|
||||
<div class="cc-file-title placeholder">Urkunde_1942</div>
|
||||
</div>
|
||||
<div class="cc-file-meta">Urkunde_1942.pdf · 3.1 MB</div>
|
||||
</div>
|
||||
<div class="cc-file-rm">✕</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="action-bar">
|
||||
<div class="btn-skip">Alle verwerfen</div>
|
||||
<div class="btn-spacer"></div>
|
||||
<div class="btn-outline">Als Platzhalter</div>
|
||||
<div class="btn-primary green">5 speichern →</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<!-- ════════════════════════════════════════════ -->
|
||||
<!-- ══════════ DECISION MATRIX ════════════ -->
|
||||
<!-- ════════════════════════════════════════════ -->
|
||||
<div class="decision">
|
||||
<h2>Decision matrix</h2>
|
||||
<p class="lead">
|
||||
All three concepts meet the core requirement (shared metadata + per-file title + one save).
|
||||
Graded against what matters for the senior audience, the responsive constraint, and the #294
|
||||
architectural commitment.
|
||||
</p>
|
||||
<table class="dm">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Dimension</th>
|
||||
<th>A · Stack</th>
|
||||
<th>B · Split-Panel</th>
|
||||
<th>C · Accordion</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Reuses #294 layout</td>
|
||||
<td class="score bad">✕</td>
|
||||
<td class="score ok">✓</td>
|
||||
<td class="score bad">✕</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Single-file mode unchanged</td>
|
||||
<td class="score mid">rewrite</td>
|
||||
<td class="score ok">identical</td>
|
||||
<td class="score bad">different</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>PDF visible before save</td>
|
||||
<td class="score bad">no</td>
|
||||
<td class="score ok">always</td>
|
||||
<td class="score mid">one at a time</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Works at 320px</td>
|
||||
<td class="score ok">native</td>
|
||||
<td class="score mid">via tab collapse</td>
|
||||
<td class="score ok">native</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Scales to 20 files</td>
|
||||
<td class="score mid">long scroll</td>
|
||||
<td class="score ok">switcher scrolls</td>
|
||||
<td class="score ok">collapsed list</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>New Svelte components</td>
|
||||
<td class="score bad">3 new</td>
|
||||
<td class="score ok">1 new (switcher)</td>
|
||||
<td class="score bad">4 new</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Familiar pattern</td>
|
||||
<td class="score ok">yes</td>
|
||||
<td class="score ok">yes (post-#294)</td>
|
||||
<td class="score mid">new to app</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- ════════════════════════════════════════════ -->
|
||||
<!-- ══════════ RECOMMENDATION ════════════ -->
|
||||
<!-- ════════════════════════════════════════════ -->
|
||||
<div class="reco">
|
||||
<div class="kicker">Recommendation</div>
|
||||
<h2>Ship Concept B</h2>
|
||||
<p class="why">
|
||||
Concept B treats bulk upload as a <em>polymorphic state</em> of the existing single-document
|
||||
layout rather than a separate screen. A user who drops one file gets exactly the #294 experience.
|
||||
A user who drops five gets the same screen plus a horizontal file-switcher and a two-card split
|
||||
(<em>Nur diese Datei</em> vs. <em>Gilt für alle</em>). Nothing about the single-file flow changes.
|
||||
</p>
|
||||
<ul>
|
||||
<li>Keeps the mental model: "one form, one save" regardless of file count.</li>
|
||||
<li>PDF preview is persistent — you can spot-check each scan before committing.</li>
|
||||
<li>The per-file title is visually promoted with a mint border so it reads as the one thing that differs per file.</li>
|
||||
<li>Reuses DocumentEditLayout: the delta is ~1 new component (<code>FileSwitcherStrip</code>) + two cards in the form.</li>
|
||||
<li>Single-file mode is byte-identical to #294 — no regression risk for existing users.</li>
|
||||
<li>Backend is already ready (<code>POST /api/documents/quick-upload</code> accepts N files in one multipart).</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<!-- ════════════════════════════════════════════ -->
|
||||
<!-- ══════════ IMPL-REF · CONCEPT B ═══════ -->
|
||||
<!-- ════════════════════════════════════════════ -->
|
||||
<div class="impl">
|
||||
<h2>Implementation reference — Concept B</h2>
|
||||
|
||||
<h3>Top bar (when N > 1)</h3>
|
||||
<table class="impl-table">
|
||||
<tr><th>Element</th><th>Tailwind</th><th>Px / value</th><th>Note</th></tr>
|
||||
<tr>
|
||||
<td>Count pill "N werden erstellt"</td>
|
||||
<td><code>bg-accent text-primary rounded-full px-3 py-1 text-sm font-bold</code></td>
|
||||
<td class="px">14px · 700</td>
|
||||
<td class="note">brand-mint on brand-navy</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>"Alle verwerfen" link</td>
|
||||
<td><code>ml-auto text-sm font-bold text-red-600 hover:text-red-800 focus-visible:outline-2 focus-visible:outline-red-600</code></td>
|
||||
<td class="px">14px / 44px target</td>
|
||||
<td class="note">confirm dialog before wiping</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<h3 class="ix">FileSwitcherStrip (new component)</h3>
|
||||
<table class="impl-table">
|
||||
<tr><th>Element</th><th>Tailwind</th><th>Px / value</th><th>Note</th></tr>
|
||||
<tr>
|
||||
<td>Strip container</td>
|
||||
<td><code>flex items-center gap-1 bg-ink/95 px-2 py-2 border-t border-ink/80</code></td>
|
||||
<td class="px">height 48px</td>
|
||||
<td class="note">under the PDF toolbar, on the dark panel</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Arrow buttons</td>
|
||||
<td><code>h-10 w-10 rounded-sm bg-white/8 text-surface/60 hover:bg-white/15 focus-visible:outline-2</code></td>
|
||||
<td class="px">40×40 (44 w/padding)</td>
|
||||
<td class="note"><code>aria-label="Vorherige Datei"</code> / "Nächste Datei"</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>File chip (inactive)</td>
|
||||
<td><code>px-3 py-2 rounded-sm bg-white/6 text-sm font-bold text-surface/55 whitespace-nowrap hover:bg-white/12</code></td>
|
||||
<td class="px">14px / h 40px</td>
|
||||
<td class="note">horizontal scroll container uses <code>snap-x snap-mandatory</code></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>File chip (active)</td>
|
||||
<td><code>... bg-accent text-primary</code> + <code>aria-current="true"</code></td>
|
||||
<td class="px">14px / h 40px</td>
|
||||
<td class="note">mint pill, primary text — 7.2:1 contrast passes AAA</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Chip number prefix</td>
|
||||
<td><code>bg-primary/25 rounded-sm px-1 mr-2 text-xs font-extrabold</code></td>
|
||||
<td class="px">12px / 800</td>
|
||||
<td class="note">"1", "2", … — for quick scanning</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<h3 class="ix">"Nur diese Datei" card (per-file scope)</h3>
|
||||
<table class="impl-table">
|
||||
<tr><th>Element</th><th>Tailwind</th><th>Px / value</th><th>Note</th></tr>
|
||||
<tr>
|
||||
<td>Card container</td>
|
||||
<td><code>bg-accent/20 border border-accent rounded-sm p-4 mb-4</code></td>
|
||||
<td class="px">padding 16px</td>
|
||||
<td class="note">mint tint signals "different per file"</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Scope badge</td>
|
||||
<td><code>bg-primary/90 text-accent rounded-sm px-2 py-1 text-xs font-extrabold uppercase tracking-wide</code></td>
|
||||
<td class="px">12px · 800</td>
|
||||
<td class="note">Paraglide key: <code>bulk_only_this_file</code></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Title input</td>
|
||||
<td><code>h-11 text-base font-semibold text-ink bg-white border border-line rounded-sm px-3 focus-visible:border-ink focus-visible:ring-2 focus-visible:ring-ink/20</code></td>
|
||||
<td class="px">44px min-height · 16px</td>
|
||||
<td class="note">pre-filled from filename <em>without extension</em></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<h3 class="ix">"Gilt für alle" card (shared scope)</h3>
|
||||
<table class="impl-table">
|
||||
<tr><th>Element</th><th>Tailwind</th><th>Px / value</th><th>Note</th></tr>
|
||||
<tr>
|
||||
<td>Card container</td>
|
||||
<td><code>bg-surface border border-line rounded-sm p-4 mb-3</code></td>
|
||||
<td class="px">padding 16px</td>
|
||||
<td class="note">neutral (no accent tint)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Scope badge</td>
|
||||
<td><code>bg-accent text-primary rounded-sm px-2 py-1 text-xs font-extrabold uppercase tracking-wide</code></td>
|
||||
<td class="px">12px · 800</td>
|
||||
<td class="note">Paraglide: <code>bulk_shared_count</code> ("Gilt für alle {count}")</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Field grid</td>
|
||||
<td><code>grid grid-cols-1 md:grid-cols-2 gap-3</code></td>
|
||||
<td class="px">12px gap</td>
|
||||
<td class="note">single column at 320px, two at ≥ 768px</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<h3 class="ix">Save bar</h3>
|
||||
<table class="impl-table">
|
||||
<tr><th>Element</th><th>Tailwind</th><th>Px / value</th><th>Note</th></tr>
|
||||
<tr>
|
||||
<td>Primary save button</td>
|
||||
<td><code>h-11 px-5 bg-green-700 hover:bg-green-800 text-white font-extrabold rounded-sm text-sm focus-visible:ring-2 focus-visible:ring-green-900</code></td>
|
||||
<td class="px">44px min · 14px</td>
|
||||
<td class="note">label <code>{count} speichern →</code> (plural-aware Paraglide)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>"Als Platzhalter" (outline)</td>
|
||||
<td><code>h-11 px-4 border border-line bg-white text-ink-3 font-bold rounded-sm text-sm</code></td>
|
||||
<td class="px">44px</td>
|
||||
<td class="note">posts with <code>metadataComplete=false</code> for all</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<h3 class="ix">Responsive collapse (≤ 767px)</h3>
|
||||
<table class="impl-table">
|
||||
<tr><th>Element</th><th>Tailwind</th><th>Px / value</th><th>Note</th></tr>
|
||||
<tr>
|
||||
<td>Panel mode switch</td>
|
||||
<td>reuses DocumentEditLayout's existing tab collapse — "Vorschau / Angaben" tabs</td>
|
||||
<td class="px">tab height 48px</td>
|
||||
<td class="note">already shipped with #294</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>File switcher stays on "Vorschau" tab</td>
|
||||
<td><code>snap-x snap-mandatory overflow-x-auto</code></td>
|
||||
<td class="px">h 44px</td>
|
||||
<td class="note">horizontal swipe; arrow buttons removed at mobile</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<div class="notes">
|
||||
<div class="nh">Interactions + behaviour</div>
|
||||
<ul>
|
||||
<li><strong>Drop a file after the initial batch</strong>: append to the end of the list and switch focus to the newly added file. No modal, no confirmation.</li>
|
||||
<li><strong>Remove a file</strong> (X on the chip) → confirm only if it's the currently-previewed one; otherwise silent. When count drops to 1 the switcher strip animates away (200ms); when it drops to 0 we redirect back to the drop-zone state.</li>
|
||||
<li><strong>Title auto-fill</strong>: <code>filename.replace(/\.(pdf|jpe?g|png|tiff?)$/i, '').replace(/[_-]+/g, ' ').trim()</code>. Marks the title input as <code>suggested</code> until the user edits it (mint left border, same treatment as #294's filename-derived fields).</li>
|
||||
<li><strong>Title field visibility</strong>: always rendered (never collapsed) even in single-file mode, so there's zero layout jump when N changes from 1 to 2.</li>
|
||||
<li><strong>Save flow</strong>: single POST to <code>/api/documents/quick-upload</code> with N files + JSON metadata object containing shared fields + titles array. Backend maps title[i] to files[i] by index. Response splits into <code>created[] / updated[] / errors[]</code> — show a summary toast + inline error markers per file for the <code>errors[]</code> list.</li>
|
||||
<li><strong>Keyboard navigation</strong>: <kbd>←</kbd>/<kbd>→</kbd> on the switcher strip moves file focus; <kbd>Tab</kbd> cycles through form fields inside whichever card is active; <kbd>Esc</kbd> on the discard button opens the confirm dialog.</li>
|
||||
<li><strong>Focus management on file switch</strong>: when the user clicks a different file, the title input of the new file receives focus automatically (so the main editable field is always reachable).</li>
|
||||
<li><strong>Progress indicator during save</strong>: replace the save button with a determinate progress bar showing "Lade Datei 3 von 5…" for batches that take > 500ms.</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="notes" style="margin-top:14px;border-left-color:#C0392B">
|
||||
<div class="nh" style="color:#C0392B">Edge cases + a11y</div>
|
||||
<ul>
|
||||
<li><strong>Duplicate filenames in the batch</strong>: accept, but show a warning icon next to both — backend will create both with unique IDs.</li>
|
||||
<li><strong>Mixed content types</strong>: PDF + image in the same batch is fine; the preview panel renders whichever the active file is (DocumentEditLayout already handles both).</li>
|
||||
<li><strong>Large batches (> 20 files)</strong>: the switcher strip becomes scrollable; consider a "Jump to file…" combobox at > 30 files (out of scope for v1).</li>
|
||||
<li><strong>Upload failure per file</strong>: mark the chip red (<code>bg-red-600/20 text-red-800 border border-red-600</code>), show inline error in the chip's tooltip, don't block the rest of the batch from retrying.</li>
|
||||
<li><strong>Screen reader announcement</strong>: when file count changes, fire a polite live region announce — "5 Dateien bereit zum Speichern" via <code>role="status" aria-live="polite"</code>.</li>
|
||||
<li><strong>Colour-alone warning</strong>: active file chip uses color + <code>aria-current="true"</code> + a ▸ caret prefix so it's distinguishable for color-blind users.</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user