diff --git a/backend/src/main/java/org/raddatz/familienarchiv/audit/ActivityFeedRow.java b/backend/src/main/java/org/raddatz/familienarchiv/audit/ActivityFeedRow.java
index 384b311e..59cf64c9 100644
--- a/backend/src/main/java/org/raddatz/familienarchiv/audit/ActivityFeedRow.java
+++ b/backend/src/main/java/org/raddatz/familienarchiv/audit/ActivityFeedRow.java
@@ -12,4 +12,6 @@ public interface ActivityFeedRow {
UUID getDocumentId();
Instant getHappenedAt();
boolean isYouMentioned();
+ int getCount();
+ Instant getHappenedAtUntil();
}
diff --git a/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryRepository.java b/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryRepository.java
index 2d725c69..59e930d3 100644
--- a/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryRepository.java
+++ b/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryRepository.java
@@ -23,34 +23,80 @@ public interface AuditLogQueryRepository extends JpaRepository {
Optional findMostRecentDocumentIdByActor(@Param("userId") UUID userId);
@Query(value = """
- SELECT * FROM (
- SELECT DISTINCT ON (a.actor_id, a.document_id, a.kind, date_trunc('hour', a.happened_at))
- a.kind AS kind,
- a.actor_id AS actorId,
- CASE
- WHEN u.first_name IS NOT NULL AND u.last_name IS NOT NULL
- THEN UPPER(LEFT(u.first_name, 1)) || UPPER(LEFT(u.last_name, 1))
- WHEN u.first_name IS NOT NULL THEN UPPER(LEFT(u.first_name, 1))
- WHEN u.last_name IS NOT NULL THEN UPPER(LEFT(u.last_name, 1))
- ELSE '?'
- END AS actorInitials,
- COALESCE(u.color, '') AS actorColor,
- CONCAT_WS(' ', u.first_name, u.last_name) AS actorName,
- a.document_id AS documentId,
- a.happened_at AS happened_at,
- (a.kind = 'MENTION_CREATED'
- AND a.payload->>'mentionedUserId' = :currentUserId) AS youMentioned
+ WITH events AS (
+ SELECT
+ a.kind,
+ a.actor_id,
+ a.document_id,
+ a.happened_at,
+ a.payload,
+ LAG(a.happened_at) OVER (
+ PARTITION BY a.actor_id, a.document_id, a.kind
+ ORDER BY a.happened_at
+ ) AS prev_happened_at
FROM audit_log a
- LEFT JOIN users u ON u.id = a.actor_id
- WHERE a.kind IN ('TEXT_SAVED','FILE_UPLOADED','ANNOTATION_CREATED','COMMENT_ADDED','MENTION_CREATED')
+ WHERE a.kind IN ('TEXT_SAVED','FILE_UPLOADED','ANNOTATION_CREATED',
+ 'BLOCK_REVIEWED','COMMENT_ADDED','MENTION_CREATED')
AND a.document_id IS NOT NULL
- ORDER BY a.actor_id, a.document_id, a.kind,
- date_trunc('hour', a.happened_at), a.happened_at DESC
- ) deduped
- ORDER BY happened_at DESC
+ ),
+ sessions_marked AS (
+ SELECT
+ kind, actor_id, document_id, happened_at, payload,
+ CASE
+ WHEN kind IN ('COMMENT_ADDED','MENTION_CREATED') THEN 1
+ WHEN prev_happened_at IS NULL THEN 1
+ WHEN EXTRACT(EPOCH FROM (happened_at - prev_happened_at)) > 7200 THEN 1
+ ELSE 0
+ END AS is_new_session
+ FROM events
+ ),
+ sessions AS (
+ SELECT
+ kind, actor_id, document_id, happened_at, payload,
+ SUM(is_new_session) OVER (
+ PARTITION BY actor_id, document_id, kind
+ ORDER BY happened_at
+ ROWS UNBOUNDED PRECEDING
+ ) AS session_id
+ FROM sessions_marked
+ ),
+ aggregated AS (
+ SELECT
+ s.kind,
+ s.actor_id,
+ s.document_id,
+ s.session_id,
+ MIN(s.happened_at) AS happened_at,
+ CASE WHEN COUNT(*) > 1 THEN MAX(s.happened_at) ELSE NULL END AS happened_at_until,
+ COUNT(*)::int AS count,
+ BOOL_OR(s.kind = 'MENTION_CREATED'
+ AND s.payload->>'mentionedUserId' = :currentUserId) AS you_mentioned
+ FROM sessions s
+ GROUP BY s.kind, s.actor_id, s.document_id, s.session_id
+ )
+ SELECT
+ ag.kind AS kind,
+ ag.actor_id AS actorId,
+ CASE
+ WHEN u.first_name IS NOT NULL AND u.last_name IS NOT NULL
+ THEN UPPER(LEFT(u.first_name, 1)) || UPPER(LEFT(u.last_name, 1))
+ WHEN u.first_name IS NOT NULL THEN UPPER(LEFT(u.first_name, 1))
+ WHEN u.last_name IS NOT NULL THEN UPPER(LEFT(u.last_name, 1))
+ ELSE '?'
+ END AS actorInitials,
+ COALESCE(u.color, '') AS actorColor,
+ CONCAT_WS(' ', u.first_name, u.last_name) AS actorName,
+ ag.document_id AS documentId,
+ ag.happened_at AS happened_at,
+ ag.you_mentioned AS youMentioned,
+ ag.count AS count,
+ ag.happened_at_until AS happenedAtUntil
+ FROM aggregated ag
+ LEFT JOIN users u ON u.id = ag.actor_id
+ ORDER BY ag.happened_at DESC
LIMIT :limit
""", nativeQuery = true)
- List findDedupedActivityFeed(
+ List findRolledUpActivityFeed(
@Param("currentUserId") String currentUserId,
@Param("limit") int limit);
diff --git a/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryService.java b/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryService.java
index c007f4bb..930ef1a4 100644
--- a/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryService.java
+++ b/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryService.java
@@ -17,7 +17,7 @@ public class AuditLogQueryService {
}
public List findActivityFeed(UUID currentUserId, int limit) {
- return queryRepository.findDedupedActivityFeed(currentUserId.toString(), limit);
+ return queryRepository.findRolledUpActivityFeed(currentUserId.toString(), limit);
}
public PulseStatsRow getPulseStats(OffsetDateTime weekStart, UUID userId) {
diff --git a/backend/src/main/java/org/raddatz/familienarchiv/dashboard/ActivityFeedItemDTO.java b/backend/src/main/java/org/raddatz/familienarchiv/dashboard/ActivityFeedItemDTO.java
index 0fcdd312..444c838d 100644
--- a/backend/src/main/java/org/raddatz/familienarchiv/dashboard/ActivityFeedItemDTO.java
+++ b/backend/src/main/java/org/raddatz/familienarchiv/dashboard/ActivityFeedItemDTO.java
@@ -14,5 +14,7 @@ public record ActivityFeedItemDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID documentId,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String documentTitle,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) OffsetDateTime happenedAt,
- @Schema(requiredMode = Schema.RequiredMode.REQUIRED) boolean youMentioned
+ @Schema(requiredMode = Schema.RequiredMode.REQUIRED) boolean youMentioned,
+ @Schema(requiredMode = Schema.RequiredMode.REQUIRED) int count,
+ @Nullable OffsetDateTime happenedAtUntil
) {}
diff --git a/backend/src/main/java/org/raddatz/familienarchiv/dashboard/DashboardController.java b/backend/src/main/java/org/raddatz/familienarchiv/dashboard/DashboardController.java
index 1869c2f4..b7b34b7e 100644
--- a/backend/src/main/java/org/raddatz/familienarchiv/dashboard/DashboardController.java
+++ b/backend/src/main/java/org/raddatz/familienarchiv/dashboard/DashboardController.java
@@ -37,6 +37,6 @@ public class DashboardController {
Authentication authentication,
@RequestParam(defaultValue = "7") int limit) {
UUID userId = SecurityUtils.requireUserId(authentication, userService);
- return dashboardService.getActivity(userId, Math.min(limit, 20));
+ return dashboardService.getActivity(userId, Math.min(limit, 40));
}
}
diff --git a/backend/src/main/java/org/raddatz/familienarchiv/dashboard/DashboardService.java b/backend/src/main/java/org/raddatz/familienarchiv/dashboard/DashboardService.java
index d749a164..b7be271f 100644
--- a/backend/src/main/java/org/raddatz/familienarchiv/dashboard/DashboardService.java
+++ b/backend/src/main/java/org/raddatz/familienarchiv/dashboard/DashboardService.java
@@ -130,13 +130,18 @@ public class DashboardService {
? new ActivityActorDTO(row.getActorInitials(), row.getActorColor(), row.getActorName())
: null;
String docTitle = titleCache.getOrDefault(row.getDocumentId(), "");
+ OffsetDateTime happenedAtUntil = row.getHappenedAtUntil() != null
+ ? row.getHappenedAtUntil().atOffset(ZoneOffset.UTC)
+ : null;
return new ActivityFeedItemDTO(
org.raddatz.familienarchiv.audit.AuditKind.valueOf(row.getKind()),
actor,
row.getDocumentId(),
docTitle,
row.getHappenedAt().atOffset(ZoneOffset.UTC),
- row.isYouMentioned()
+ row.isYouMentioned(),
+ row.getCount(),
+ happenedAtUntil
);
}).toList();
}
diff --git a/backend/src/main/resources/db/migration/V49__add_audit_log_rollup_index.sql b/backend/src/main/resources/db/migration/V49__add_audit_log_rollup_index.sql
new file mode 100644
index 00000000..7327df6a
--- /dev/null
+++ b/backend/src/main/resources/db/migration/V49__add_audit_log_rollup_index.sql
@@ -0,0 +1,7 @@
+-- Partial covering index for the session-style activity feed rollup (#285).
+-- Matches the WHERE clause of AuditLogQueryRepository.findRolledUpActivityFeed
+-- exactly. DESC on happened_at supports the outer ORDER BY without a sort step.
+CREATE INDEX idx_audit_log_rollup
+ ON audit_log (actor_id, document_id, kind, happened_at DESC)
+ WHERE kind IN ('TEXT_SAVED','FILE_UPLOADED','ANNOTATION_CREATED',
+ 'BLOCK_REVIEWED','COMMENT_ADDED','MENTION_CREATED');
diff --git a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryIntegrationTest.java b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryIntegrationTest.java
index 5875baf5..e73553c5 100644
--- a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryIntegrationTest.java
+++ b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryIntegrationTest.java
@@ -49,13 +49,15 @@ class AuditLogQueryRepositoryIntegrationTest {
"INSERT INTO documents (id, title, original_filename, status) VALUES ('bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', 'Test Doc', 'test.pdf', 'PLACEHOLDER')",
"INSERT INTO audit_log (kind, actor_id, document_id, payload) VALUES ('ANNOTATION_CREATED', 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', '{\"pageNumber\":1}')"
})
- void findDedupedActivityFeed_returnsAnnotationEntry() {
- List rows = auditLogQueryRepository.findDedupedActivityFeed(USER_ID.toString(), 10);
+ void findRolledUpActivityFeed_returnsAnnotationEntry() {
+ List rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 10);
assertThat(rows).hasSize(1);
assertThat(rows.get(0).getKind()).isEqualTo("ANNOTATION_CREATED");
assertThat(rows.get(0).getDocumentId()).isEqualTo(DOC_ID);
assertThat(rows.get(0).getHappenedAt()).isNotNull();
+ assertThat(rows.get(0).getCount()).isEqualTo(1);
+ assertThat(rows.get(0).getHappenedAtUntil()).isNull();
}
@Test
diff --git a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryRolledUpTest.java b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryRolledUpTest.java
new file mode 100644
index 00000000..1f38a8a0
--- /dev/null
+++ b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryRolledUpTest.java
@@ -0,0 +1,176 @@
+package org.raddatz.familienarchiv.dashboard;
+
+import org.junit.jupiter.api.Test;
+import org.raddatz.familienarchiv.PostgresContainerConfig;
+import org.raddatz.familienarchiv.audit.ActivityFeedRow;
+import org.raddatz.familienarchiv.audit.AuditLogQueryRepository;
+import org.raddatz.familienarchiv.config.FlywayConfig;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.data.jpa.test.autoconfigure.DataJpaTest;
+import org.springframework.boot.jdbc.test.autoconfigure.AutoConfigureTestDatabase;
+import org.springframework.context.annotation.Import;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
+import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
+import org.springframework.transaction.annotation.Transactional;
+
+import java.time.Instant;
+import java.time.OffsetDateTime;
+import java.util.List;
+import java.util.UUID;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+@DataJpaTest
+@AutoConfigureTestDatabase(replace = AutoConfigureTestDatabase.Replace.NONE)
+@Import({PostgresContainerConfig.class, FlywayConfig.class})
+@Transactional
+class AuditLogQueryRepositoryRolledUpTest {
+
+ static final UUID USER_ID = UUID.fromString("dddddddd-dddd-dddd-dddd-dddddddddddd");
+ static final UUID DOC_ID = UUID.fromString("eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee");
+ static final UUID OTHER_DOC_ID = UUID.fromString("ffffffff-ffff-ffff-ffff-ffffffffffff");
+
+ @Autowired AuditLogQueryRepository auditLogQueryRepository;
+ @Autowired JdbcTemplate jdbcTemplate;
+
+ private NamedParameterJdbcTemplate named() {
+ return new NamedParameterJdbcTemplate(jdbcTemplate);
+ }
+
+ private void insertUserAndDocs() {
+ jdbcTemplate.update(
+ "INSERT INTO users (id, enabled, email, password) VALUES (?, true, ?, 'pw')",
+ USER_ID, "rollup-" + USER_ID + "@test.com");
+ jdbcTemplate.update(
+ "INSERT INTO documents (id, title, original_filename, status) VALUES (?, 'Brief A', 'a.pdf', 'PLACEHOLDER')",
+ DOC_ID);
+ jdbcTemplate.update(
+ "INSERT INTO documents (id, title, original_filename, status) VALUES (?, 'Brief B', 'b.pdf', 'PLACEHOLDER')",
+ OTHER_DOC_ID);
+ }
+
+ private void insertAuditEvent(UUID actorId, UUID docId, String kind, Instant happenedAt) {
+ MapSqlParameterSource params = new MapSqlParameterSource()
+ .addValue("kind", kind)
+ .addValue("actor", actorId)
+ .addValue("doc", docId)
+ .addValue("t", OffsetDateTime.ofInstant(happenedAt, java.time.ZoneOffset.UTC));
+ named().update(
+ "INSERT INTO audit_log (kind, actor_id, document_id, happened_at) "
+ + "VALUES (:kind, :actor, :doc, :t)",
+ params);
+ }
+
+ @Test
+ void rolledUpFeed_combines_same_actor_same_doc_within_2h() {
+ insertUserAndDocs();
+ Instant base = Instant.parse("2026-04-20T09:00:00Z");
+ for (int i = 0; i < 20; i++) {
+ insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", base.plusSeconds(i * 480L));
+ }
+
+ List rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40);
+
+ assertThat(rows).hasSize(1);
+ ActivityFeedRow row = rows.get(0);
+ assertThat(row.getKind()).isEqualTo("TEXT_SAVED");
+ assertThat(row.getDocumentId()).isEqualTo(DOC_ID);
+ assertThat(row.getCount()).isEqualTo(20);
+ assertThat(row.getHappenedAt()).isEqualTo(base);
+ assertThat(row.getHappenedAtUntil()).isEqualTo(base.plusSeconds(19 * 480L));
+ }
+
+ @Test
+ void rolledUpFeed_splits_at_2h_boundary() {
+ insertUserAndDocs();
+ Instant sessionOneStart = Instant.parse("2026-04-20T08:00:00Z");
+ Instant sessionOneLast = sessionOneStart.plusSeconds(600);
+ insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", sessionOneStart);
+ insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", sessionOneLast);
+ Instant sessionTwoStart = sessionOneLast.plusSeconds(2L * 60L * 60L + 60L);
+ insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", sessionTwoStart);
+ insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", sessionTwoStart.plusSeconds(300));
+
+ List rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40);
+
+ assertThat(rows).hasSize(2);
+ assertThat(rows.get(0).getCount()).isEqualTo(2);
+ assertThat(rows.get(0).getHappenedAt()).isEqualTo(sessionTwoStart);
+ assertThat(rows.get(1).getCount()).isEqualTo(2);
+ assertThat(rows.get(1).getHappenedAt()).isEqualTo(sessionOneStart);
+ }
+
+ @Test
+ void rolledUpFeed_has_no_hard_cap_on_long_session() {
+ insertUserAndDocs();
+ Instant base = Instant.parse("2026-04-20T06:00:00Z");
+ for (int i = 0; i < 30; i++) {
+ insertAuditEvent(USER_ID, DOC_ID, "ANNOTATION_CREATED", base.plusSeconds(i * 60L * 30L));
+ }
+
+ List rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40);
+
+ assertThat(rows).hasSize(1);
+ assertThat(rows.get(0).getCount()).isEqualTo(30);
+ assertThat(rows.get(0).getHappenedAt()).isEqualTo(base);
+ assertThat(rows.get(0).getHappenedAtUntil()).isEqualTo(base.plusSeconds(29 * 60L * 30L));
+ }
+
+ @Test
+ void rolledUpFeed_never_rolls_up_COMMENT_ADDED_or_MENTION_CREATED() {
+ insertUserAndDocs();
+ Instant base = Instant.parse("2026-04-20T10:00:00Z");
+ insertAuditEvent(USER_ID, DOC_ID, "COMMENT_ADDED", base);
+ insertAuditEvent(USER_ID, DOC_ID, "COMMENT_ADDED", base.plusSeconds(60));
+ insertAuditEvent(USER_ID, DOC_ID, "COMMENT_ADDED", base.plusSeconds(120));
+
+ List rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40);
+
+ assertThat(rows).hasSize(3);
+ assertThat(rows).allSatisfy(r -> {
+ assertThat(r.getKind()).isEqualTo("COMMENT_ADDED");
+ assertThat(r.getCount()).isEqualTo(1);
+ assertThat(r.getHappenedAtUntil()).isNull();
+ });
+ }
+
+ @Test
+ void rolledUpFeed_excludes_non_eligible_kinds() {
+ insertUserAndDocs();
+ Instant base = Instant.parse("2026-04-20T12:00:00Z");
+ insertAuditEvent(USER_ID, DOC_ID, "STATUS_CHANGED", base);
+ insertAuditEvent(USER_ID, DOC_ID, "METADATA_UPDATED", base.plusSeconds(60));
+ insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", base.plusSeconds(120));
+
+ List rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40);
+
+ assertThat(rows).hasSize(1);
+ assertThat(rows.get(0).getKind()).isEqualTo("TEXT_SAVED");
+ }
+
+ @Test
+ void rolledUpFeed_exposes_count_and_happenedAtUntil_on_singletons_and_rollups() {
+ insertUserAndDocs();
+ Instant rollupStart = Instant.parse("2026-04-20T11:00:00Z");
+ insertAuditEvent(USER_ID, DOC_ID, "FILE_UPLOADED", rollupStart);
+ insertAuditEvent(USER_ID, DOC_ID, "FILE_UPLOADED", rollupStart.plusSeconds(300));
+ insertAuditEvent(USER_ID, OTHER_DOC_ID, "FILE_UPLOADED", rollupStart.plusSeconds(900));
+
+ List rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40);
+
+ assertThat(rows).hasSize(2);
+ assertThat(rows).anySatisfy(r -> {
+ assertThat(r.getDocumentId()).isEqualTo(DOC_ID);
+ assertThat(r.getCount()).isEqualTo(2);
+ assertThat(r.getHappenedAt()).isEqualTo(rollupStart);
+ assertThat(r.getHappenedAtUntil()).isEqualTo(rollupStart.plusSeconds(300));
+ });
+ assertThat(rows).anySatisfy(r -> {
+ assertThat(r.getDocumentId()).isEqualTo(OTHER_DOC_ID);
+ assertThat(r.getCount()).isEqualTo(1);
+ assertThat(r.getHappenedAt()).isEqualTo(rollupStart.plusSeconds(900));
+ assertThat(r.getHappenedAtUntil()).isNull();
+ });
+ }
+}
diff --git a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/DashboardControllerTest.java b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/DashboardControllerTest.java
index 0f1e4922..1bfc37ce 100644
--- a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/DashboardControllerTest.java
+++ b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/DashboardControllerTest.java
@@ -140,4 +140,18 @@ class DashboardControllerTest {
.andExpect(status().isOk())
.andExpect(jsonPath("$").isArray());
}
+
+ @Test
+ @WithMockUser(authorities = "READ_ALL")
+ void activity_clamps_limit_to_40() throws Exception {
+ UUID userId = UUID.randomUUID();
+ when(userService.findByEmail(any())).thenReturn(
+ AppUser.builder().id(userId).email("u@test.com").password("pw").build());
+ when(dashboardService.getActivity(any(UUID.class), anyInt())).thenReturn(List.of());
+
+ mockMvc.perform(get("/api/dashboard/activity").param("limit", "9999"))
+ .andExpect(status().isOk());
+
+ org.mockito.Mockito.verify(dashboardService).getActivity(any(UUID.class), org.mockito.ArgumentMatchers.eq(40));
+ }
}
diff --git a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/DashboardServiceTest.java b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/DashboardServiceTest.java
index c62fdb8c..19692ad4 100644
--- a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/DashboardServiceTest.java
+++ b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/DashboardServiceTest.java
@@ -104,6 +104,8 @@ class DashboardServiceTest {
public UUID getDocumentId() { return docId; }
public Instant getHappenedAt() { return Instant.now(); }
public boolean isYouMentioned() { return false; }
+ public int getCount() { return 1; }
+ public Instant getHappenedAtUntil() { return null; }
};
}
}
diff --git a/docs/adr/003-chronik-unified-activity-feed.md b/docs/adr/003-chronik-unified-activity-feed.md
new file mode 100644
index 00000000..1c6e25de
--- /dev/null
+++ b/docs/adr/003-chronik-unified-activity-feed.md
@@ -0,0 +1,59 @@
+# ADR-003: Session-Rollup Unified Activity Feed on `/chronik`
+
+## Status
+
+Accepted
+
+## Context
+
+The app had two disconnected ways to see what was happening in the archive:
+
+1. `/notifications` — personal mentions/replies only, delivered via the `notifications` table and a Bell dropdown.
+2. Dashboard activity feed — ambient events (uploads, transcription, annotations, comments, mentions) via `/api/dashboard/activity`, which deduplicated using `DISTINCT ON (actor_id, document_id, kind, date_trunc('hour', happened_at))`.
+
+Two separate lists was a poor mental model (personal vs. ambient feel the same to the user), the `/notifications` page wasted horizontal space, the dashboard's "Alle anzeigen" pointed to `/documents` (dead-end), and the hour-trunc dedupe produced ugly splits on natural sessions — saving 20 transcription blocks at 08:58, 08:59, 09:01 yielded two rows.
+
+We needed one page that merges both streams, keeps personal mentions visually loud, and aggregates ambient noise coherently.
+
+## Decision
+
+**One page `/chronik` backed by two endpoints.** The SvelteKit `+page.server.ts` composes data from `/api/dashboard/activity` (for the ambient timeline) and `/api/notifications` (for the "Für dich" box). No new `/api/chronik` orchestrator — the frontend load function is the composition seam.
+
+**Session-style rollup replaces hour-trunc dedupe everywhere.** `AuditLogQueryRepository.findDedupedActivityFeed` is renamed to `findRolledUpActivityFeed` and rewritten using a `LAG()`-based session algorithm:
+
+```
+LAG(happened_at) OVER (PARTITION BY actor_id, document_id, kind ORDER BY happened_at)
+ → is_new_session = gap > 7200s (or first event in partition, or kind ∈ {COMMENT_ADDED, MENTION_CREATED})
+ → SUM(is_new_session) OVER (... ROWS UNBOUNDED PRECEDING) = session_id
+ → GROUP BY (actor_id, document_id, kind, session_id) → MIN(happened_at), MAX(...), COUNT(*)
+```
+
+Events within 120 min on the same `(actor, document, kind)` become one row with `count` and `happenedAtUntil` fields. `COMMENT_ADDED` and `MENTION_CREATED` always start a new session — these kinds never roll up. No hard cap on total session span (a 4-hour transcription sitting is one row). The hour-trunc dedupe SQL is **deleted**, not kept alongside — one aggregation strategy per query.
+
+**URL is universal German `/chronik` across all locales**, matching the existing convention (`/dokumente`, `/personen`, `/briefwechsel`). Content is translated via Paraglide; the URL is a stable German identifier, not a translatable route.
+
+**DTO extended, not replaced.** `ActivityFeedItemDTO` gains `count: int` (required, `1` for singletons) and `happenedAtUntil: OffsetDateTime?` (null for singletons, end-of-session for rollups). One DTO shape serves both the Chronik timeline and the dashboard side-rail.
+
+**`/notifications` route is deleted outright.** The app is pre-production — no 301 redirect, no zombie page.
+
+## Alternatives Considered
+
+| Alternative | Why rejected |
+|---|---|
+| Fixed 2-hour wall-clock buckets (`date_trunc('hour', happened_at / 2)`) | Splits natural sessions at bucket boundaries (e.g. events at 13:58 / 13:59 / 14:01 land in two rollup rows) |
+| Keep `DISTINCT ON hour-trunc` alongside new rollup query | Two aggregation strategies = zombie logic; dashboard and Chronik would drift |
+| New `/api/chronik` endpoint that merges both streams | Couples two domains (notifications + audit) at the API layer; composition belongs in `+page.server.ts` |
+| Localized URL slugs (`/chronik` / `/chronicle` / `/crónica`) | Breaks the project's existing German-URL convention and adds Paraglide routing overhead for zero UX value |
+| Per-locale rollup in the SQL (e.g. align to local-day boundaries) | Timezone-aware SQL is brittle; rollup is a time-gap concept, not a calendar-day concept |
+
+## Consequences
+
+**Easier:**
+- One hot path — `/api/dashboard/activity` is backed by a single partial covering index (`V49__add_audit_log_rollup_index.sql`) that matches the rollup query's WHERE clause exactly.
+- Dashboard side-rail gets rollup for free — 20 block-saves appear as one "Papa transkribierte 20 Blöcke" row with a time range, not 20 dedup'd hour buckets.
+- Component reuse — `ChronikRow.svelte` renders both singleton and rollup variants via a `$derived` discriminator; `DashboardActivityFeed.svelte` consumes the same DTO shape.
+
+**Harder:**
+- The session SQL is ~15 lines longer than `DISTINCT ON`. That's the price for not splitting natural sessions at fixed boundaries — worth it on day one.
+- Historical `/api/dashboard/activity` consumers now see `count` and `happenedAtUntil`. No breaking change — `count` defaults to `1`, `happenedAtUntil` is nullable — but pre-existing tests needed updating.
+- Rollup is load-bearing for the UX — if the index is missing or the query regresses, the page either runs slow or returns duplicate rows. Covered by the rolledUp integration tests and the partial covering index; worth a follow-up Grafana panel on `/api/dashboard/activity` p95 latency.
diff --git a/frontend/e2e/accessibility.spec.ts b/frontend/e2e/accessibility.spec.ts
index c5b23944..5722390d 100644
--- a/frontend/e2e/accessibility.spec.ts
+++ b/frontend/e2e/accessibility.spec.ts
@@ -10,6 +10,7 @@ import { test, expect } from '@playwright/test';
const AUTHENTICATED_PAGES = [
{ name: 'home', path: '/' },
{ name: 'persons', path: '/persons' },
+ { name: 'chronik', path: '/chronik' },
{ name: 'admin', path: '/admin' }
];
diff --git a/frontend/messages/de.json b/frontend/messages/de.json
index bcf31c72..253449f6 100644
--- a/frontend/messages/de.json
+++ b/frontend/messages/de.json
@@ -751,5 +751,44 @@
"audit_action_comment_added": "hat kommentiert:",
"audit_action_mention_created": "hat dich erwähnt in",
- "dropzone_release": "Loslassen zum Hochladen"
+ "dropzone_release": "Loslassen zum Hochladen",
+
+ "chronik_page_title": "Chronik",
+ "chronik_for_you_caption": "Für dich",
+ "chronik_for_you_count": "{count} neu",
+ "chronik_mark_read_aria": "Als gelesen markieren",
+ "chronik_mark_all_read": "Alle gelesen",
+ "chronik_inbox_zero_title": "Keine neuen Erwähnungen",
+ "chronik_inbox_zero_link": "Ältere Erwähnungen ansehen →",
+ "chronik_filter_label": "Aktivitäten filtern",
+ "chronik_filter_all": "Alle",
+ "chronik_filter_for_you": "Für dich",
+ "chronik_filter_uploaded": "Hochgeladen",
+ "chronik_filter_transcription": "Transkription",
+ "chronik_filter_comments": "Kommentare",
+ "chronik_day_today": "Heute",
+ "chronik_day_yesterday": "Gestern",
+ "chronik_day_this_week": "Diese Woche",
+ "chronik_day_older": "Älter",
+ "chronik_singleton_text_saved": "{actor} transkribierte einen Block in {doc}",
+ "chronik_rollup_text_saved": "{actor} transkribierte {count} Blöcke in {doc}",
+ "chronik_singleton_uploaded": "{actor} lud {doc} hoch",
+ "chronik_rollup_uploaded": "{actor} lud {count} Dokumente hoch",
+ "chronik_singleton_reviewed": "{actor} überprüfte einen Block in {doc}",
+ "chronik_rollup_reviewed": "{actor} überprüfte {count} Blöcke in {doc}",
+ "chronik_singleton_annotated": "{actor} annotierte {doc}",
+ "chronik_rollup_annotated": "{actor} annotierte {doc} {count}×",
+ "chronik_comment_added": "{actor} kommentierte {doc}",
+ "chronik_mention_created": "{actor} erwähnte dich in {doc}",
+ "chronik_reply_received": "{actor} antwortete dir in {doc}",
+ "chronik_empty_first_run_title": "Noch nichts geschehen",
+ "chronik_empty_first_run_body": "Sobald jemand aus der Familie Dokumente hochlädt oder transkribiert, erscheint hier die Aktivität.",
+ "chronik_empty_filter_title": "Nichts in dieser Ansicht",
+ "chronik_empty_filter_body": "In diesem Filter gibt es keine Einträge.",
+ "chronik_error_title": "Die Chronik konnte nicht geladen werden.",
+ "chronik_error_retry": "Erneut versuchen",
+ "chronik_load_more": "Mehr laden",
+ "chronik_loading": "Lädt …",
+ "chronik_load_more_announcement": "{count} weitere Einträge geladen",
+ "chronik_view_all": "Zur Chronik →"
}
diff --git a/frontend/messages/en.json b/frontend/messages/en.json
index ae06a435..80be4863 100644
--- a/frontend/messages/en.json
+++ b/frontend/messages/en.json
@@ -751,5 +751,44 @@
"audit_action_comment_added": "commented:",
"audit_action_mention_created": "mentioned you in",
- "dropzone_release": "Release to upload"
+ "dropzone_release": "Release to upload",
+
+ "chronik_page_title": "Chronicle",
+ "chronik_for_you_caption": "For you",
+ "chronik_for_you_count": "{count} new",
+ "chronik_mark_read_aria": "Mark as read",
+ "chronik_mark_all_read": "Mark all read",
+ "chronik_inbox_zero_title": "No new mentions",
+ "chronik_inbox_zero_link": "See older mentions →",
+ "chronik_filter_label": "Filter activity",
+ "chronik_filter_all": "All",
+ "chronik_filter_for_you": "For you",
+ "chronik_filter_uploaded": "Uploaded",
+ "chronik_filter_transcription": "Transcription",
+ "chronik_filter_comments": "Comments",
+ "chronik_day_today": "Today",
+ "chronik_day_yesterday": "Yesterday",
+ "chronik_day_this_week": "This week",
+ "chronik_day_older": "Older",
+ "chronik_singleton_text_saved": "{actor} transcribed a block in {doc}",
+ "chronik_rollup_text_saved": "{actor} transcribed {count} blocks in {doc}",
+ "chronik_singleton_uploaded": "{actor} uploaded {doc}",
+ "chronik_rollup_uploaded": "{actor} uploaded {count} documents",
+ "chronik_singleton_reviewed": "{actor} reviewed a block in {doc}",
+ "chronik_rollup_reviewed": "{actor} reviewed {count} blocks in {doc}",
+ "chronik_singleton_annotated": "{actor} annotated {doc}",
+ "chronik_rollup_annotated": "{actor} annotated {doc} {count}×",
+ "chronik_comment_added": "{actor} commented on {doc}",
+ "chronik_mention_created": "{actor} mentioned you in {doc}",
+ "chronik_reply_received": "{actor} replied to you in {doc}",
+ "chronik_empty_first_run_title": "Nothing has happened yet",
+ "chronik_empty_first_run_body": "As soon as someone in the family uploads or transcribes a document, the activity will show up here.",
+ "chronik_empty_filter_title": "Nothing in this view",
+ "chronik_empty_filter_body": "There are no entries for this filter.",
+ "chronik_error_title": "The chronicle could not be loaded.",
+ "chronik_error_retry": "Try again",
+ "chronik_load_more": "Load more",
+ "chronik_loading": "Loading …",
+ "chronik_load_more_announcement": "{count} more entries loaded",
+ "chronik_view_all": "Open chronicle →"
}
diff --git a/frontend/messages/es.json b/frontend/messages/es.json
index 57af4ab6..84e2cfeb 100644
--- a/frontend/messages/es.json
+++ b/frontend/messages/es.json
@@ -751,5 +751,44 @@
"audit_action_comment_added": "comentó:",
"audit_action_mention_created": "te mencionó en",
- "dropzone_release": "Suelta para subir"
+ "dropzone_release": "Suelta para subir",
+
+ "chronik_page_title": "Crónica",
+ "chronik_for_you_caption": "Para ti",
+ "chronik_for_you_count": "{count} nuevas",
+ "chronik_mark_read_aria": "Marcar como leído",
+ "chronik_mark_all_read": "Marcar todas leídas",
+ "chronik_inbox_zero_title": "Sin nuevas menciones",
+ "chronik_inbox_zero_link": "Ver menciones anteriores →",
+ "chronik_filter_label": "Filtrar actividad",
+ "chronik_filter_all": "Todas",
+ "chronik_filter_for_you": "Para ti",
+ "chronik_filter_uploaded": "Subidos",
+ "chronik_filter_transcription": "Transcripción",
+ "chronik_filter_comments": "Comentarios",
+ "chronik_day_today": "Hoy",
+ "chronik_day_yesterday": "Ayer",
+ "chronik_day_this_week": "Esta semana",
+ "chronik_day_older": "Anterior",
+ "chronik_singleton_text_saved": "{actor} transcribió un bloque en {doc}",
+ "chronik_rollup_text_saved": "{actor} transcribió {count} bloques en {doc}",
+ "chronik_singleton_uploaded": "{actor} subió {doc}",
+ "chronik_rollup_uploaded": "{actor} subió {count} documentos",
+ "chronik_singleton_reviewed": "{actor} revisó un bloque en {doc}",
+ "chronik_rollup_reviewed": "{actor} revisó {count} bloques en {doc}",
+ "chronik_singleton_annotated": "{actor} anotó {doc}",
+ "chronik_rollup_annotated": "{actor} anotó {doc} {count}×",
+ "chronik_comment_added": "{actor} comentó en {doc}",
+ "chronik_mention_created": "{actor} te mencionó en {doc}",
+ "chronik_reply_received": "{actor} te respondió en {doc}",
+ "chronik_empty_first_run_title": "Aún no ha pasado nada",
+ "chronik_empty_first_run_body": "En cuanto alguien de la familia suba o transcriba un documento, la actividad aparecerá aquí.",
+ "chronik_empty_filter_title": "Nada en esta vista",
+ "chronik_empty_filter_body": "No hay entradas para este filtro.",
+ "chronik_error_title": "No se pudo cargar la crónica.",
+ "chronik_error_retry": "Reintentar",
+ "chronik_load_more": "Cargar más",
+ "chronik_loading": "Cargando …",
+ "chronik_load_more_announcement": "{count} entradas más cargadas",
+ "chronik_view_all": "Abrir crónica →"
}
diff --git a/frontend/src/lib/components/DashboardActivityFeed.svelte b/frontend/src/lib/components/DashboardActivityFeed.svelte
index d24fdd91..2595fd3e 100644
--- a/frontend/src/lib/components/DashboardActivityFeed.svelte
+++ b/frontend/src/lib/components/DashboardActivityFeed.svelte
@@ -1,6 +1,8 @@
@@ -35,7 +56,7 @@ function formatDate(iso: string): string {
{m.feed_caption()}
{m.feed_show_all()}
@@ -66,6 +87,14 @@ function formatDate(iso: string): string {
{item.documentTitle}
+ {#if item.count > 1}
+
+ {item.count}
+
+ {/if}
{#if item.youMentioned}
{/if}
- {formatDate(item.happenedAt)}
+ {timestamp(item)}
{/each}
diff --git a/frontend/src/lib/components/DashboardActivityFeed.svelte.spec.ts b/frontend/src/lib/components/DashboardActivityFeed.svelte.spec.ts
index b12c682c..51bfd384 100644
--- a/frontend/src/lib/components/DashboardActivityFeed.svelte.spec.ts
+++ b/frontend/src/lib/components/DashboardActivityFeed.svelte.spec.ts
@@ -17,7 +17,8 @@ const baseItem: ActivityFeedItemDTO = {
documentId: 'doc-1',
documentTitle: 'Brief 1920',
happenedAt: '2026-04-19T10:00:00Z',
- youMentioned: false
+ youMentioned: false,
+ count: 1
};
describe('DashboardActivityFeed', () => {
@@ -39,4 +40,30 @@ describe('DashboardActivityFeed', () => {
const section = page.getByText('Kommentare & Aktivität');
await expect.element(section).toBeInTheDocument();
});
+
+ it('renders count badge and en-dash time range for rollup rows (count > 1)', async () => {
+ const rollup: ActivityFeedItemDTO = {
+ ...baseItem,
+ count: 20,
+ happenedAtUntil: '2026-04-19T10:32:00Z'
+ };
+ render(DashboardActivityFeed, { feed: [rollup] });
+ const badge = page.getByTestId('feed-rollup-count');
+ await expect.element(badge).toHaveTextContent('20');
+ // "–" is U+2013 en-dash
+ const stamp = page.getByText(/\u2013/);
+ await expect.element(stamp).toBeInTheDocument();
+ });
+
+ it('does not render count badge for singleton rows (count === 1)', async () => {
+ render(DashboardActivityFeed, { feed: [baseItem] });
+ const badge = page.getByTestId('feed-rollup-count');
+ await expect.element(badge).not.toBeInTheDocument();
+ });
+
+ it('links the "show all" footer to /chronik, not /documents', async () => {
+ render(DashboardActivityFeed, { feed: [] });
+ const link = page.getByRole('link', { name: /alle anzeigen/i });
+ await expect.element(link).toHaveAttribute('href', '/chronik');
+ });
});
diff --git a/frontend/src/lib/components/NotificationBell.svelte b/frontend/src/lib/components/NotificationBell.svelte
index 9781e366..03f5398a 100644
--- a/frontend/src/lib/components/NotificationBell.svelte
+++ b/frontend/src/lib/components/NotificationBell.svelte
@@ -3,13 +3,13 @@ import { onMount, onDestroy } from 'svelte';
import { goto } from '$app/navigation';
import { m } from '$lib/paraglide/messages.js';
import { clickOutside } from '$lib/actions/clickOutside';
-import { createNotificationStream } from '$lib/hooks/useNotificationStream.svelte';
+import { notificationStore } from '$lib/stores/notifications.svelte';
import NotificationDropdown from './NotificationDropdown.svelte';
let open = $state(false);
let bellButtonEl: HTMLButtonElement | null = null;
-const stream = createNotificationStream();
+const stream = notificationStore;
async function toggleDropdown() {
open = !open;
diff --git a/frontend/src/lib/components/NotificationDropdown.svelte b/frontend/src/lib/components/NotificationDropdown.svelte
index b3b161fb..4cf775dd 100644
--- a/frontend/src/lib/components/NotificationDropdown.svelte
+++ b/frontend/src/lib/components/NotificationDropdown.svelte
@@ -1,7 +1,7 @@
+
+
+ {#if variant === 'first-run'}
+
+ {:else if variant === 'filter-empty'}
+
+ {:else}
+
+ {/if}
+
+
+ {title}
+
+ {#if body}
+
+ {body}
+
+ {/if}
+
diff --git a/frontend/src/lib/components/chronik/ChronikEmptyState.svelte.spec.ts b/frontend/src/lib/components/chronik/ChronikEmptyState.svelte.spec.ts
new file mode 100644
index 00000000..4e3446a4
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikEmptyState.svelte.spec.ts
@@ -0,0 +1,30 @@
+import { describe, it, expect, afterEach } from 'vitest';
+import { cleanup, render } from 'vitest-browser-svelte';
+import { page } from 'vitest/browser';
+
+import ChronikEmptyState from './ChronikEmptyState.svelte';
+
+afterEach(cleanup);
+
+describe('ChronikEmptyState', () => {
+ it('renders first-run variant title', async () => {
+ render(ChronikEmptyState, { variant: 'first-run' });
+ await expect.element(page.getByText('Noch nichts geschehen')).toBeInTheDocument();
+ });
+
+ it('renders filter-empty variant title', async () => {
+ render(ChronikEmptyState, { variant: 'filter-empty' });
+ await expect.element(page.getByText('Nichts in dieser Ansicht')).toBeInTheDocument();
+ });
+
+ it('renders inbox-zero variant title', async () => {
+ render(ChronikEmptyState, { variant: 'inbox-zero' });
+ await expect.element(page.getByText('Keine neuen Erwähnungen')).toBeInTheDocument();
+ });
+
+ it('applies the expected data-variant attribute', async () => {
+ render(ChronikEmptyState, { variant: 'first-run' });
+ const wrapper = document.querySelector('[data-testid="chronik-empty-state"]');
+ expect(wrapper?.getAttribute('data-variant')).toBe('first-run');
+ });
+});
diff --git a/frontend/src/lib/components/chronik/ChronikErrorCard.svelte b/frontend/src/lib/components/chronik/ChronikErrorCard.svelte
new file mode 100644
index 00000000..98de6321
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikErrorCard.svelte
@@ -0,0 +1,46 @@
+
+
+
+
+
+
+
+
+ {displayMessage}
+
+
+
+
diff --git a/frontend/src/lib/components/chronik/ChronikErrorCard.svelte.spec.ts b/frontend/src/lib/components/chronik/ChronikErrorCard.svelte.spec.ts
new file mode 100644
index 00000000..2fecf383
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikErrorCard.svelte.spec.ts
@@ -0,0 +1,39 @@
+import { describe, it, expect, vi, afterEach } from 'vitest';
+import { cleanup, render } from 'vitest-browser-svelte';
+import { page, userEvent } from 'vitest/browser';
+
+import ChronikErrorCard from './ChronikErrorCard.svelte';
+
+afterEach(cleanup);
+
+describe('ChronikErrorCard', () => {
+ it('renders the default error message', async () => {
+ render(ChronikErrorCard, { onRetry: vi.fn() });
+ await expect
+ .element(page.getByText('Die Chronik konnte nicht geladen werden.'))
+ .toBeInTheDocument();
+ });
+
+ it('renders the retry button with the expected label', async () => {
+ render(ChronikErrorCard, { onRetry: vi.fn() });
+ await expect.element(page.getByText('Erneut versuchen')).toBeInTheDocument();
+ });
+
+ it('renders a custom message when provided', async () => {
+ render(ChronikErrorCard, { onRetry: vi.fn(), message: 'Netzwerkfehler' });
+ await expect.element(page.getByText('Netzwerkfehler')).toBeInTheDocument();
+ });
+
+ it('calls onRetry when the retry button is clicked', async () => {
+ const onRetry = vi.fn();
+ render(ChronikErrorCard, { onRetry });
+ await userEvent.click(page.getByText('Erneut versuchen'));
+ expect(onRetry).toHaveBeenCalledTimes(1);
+ });
+
+ it('has role="alert" on the wrapper', async () => {
+ render(ChronikErrorCard, { onRetry: vi.fn() });
+ const alert = document.querySelector('[role="alert"]');
+ expect(alert).not.toBeNull();
+ });
+});
diff --git a/frontend/src/lib/components/chronik/ChronikFilterPills.svelte b/frontend/src/lib/components/chronik/ChronikFilterPills.svelte
new file mode 100644
index 00000000..b692147c
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikFilterPills.svelte
@@ -0,0 +1,68 @@
+
+
+
+ {#each pills as p (p.value)}
+ {@const active = p.value === value}
+
+ {/each}
+
diff --git a/frontend/src/lib/components/chronik/ChronikFilterPills.svelte.spec.ts b/frontend/src/lib/components/chronik/ChronikFilterPills.svelte.spec.ts
new file mode 100644
index 00000000..54c450ff
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikFilterPills.svelte.spec.ts
@@ -0,0 +1,85 @@
+import { describe, it, expect, vi, afterEach } from 'vitest';
+import { cleanup, render } from 'vitest-browser-svelte';
+import { userEvent } from 'vitest/browser';
+
+import ChronikFilterPills from './ChronikFilterPills.svelte';
+
+afterEach(cleanup);
+
+describe('ChronikFilterPills', () => {
+ it('renders all 5 filter pills', async () => {
+ render(ChronikFilterPills, { value: 'alle', onChange: vi.fn() });
+ const pills = document.querySelectorAll('[role="radio"]');
+ expect(pills.length).toBe(5);
+ });
+
+ it('marks the active pill with aria-checked="true"', async () => {
+ render(ChronikFilterPills, { value: 'hochgeladen', onChange: vi.fn() });
+ const pills = document.querySelectorAll('[role="radio"]');
+ const checked = Array.from(pills).filter((p) => p.getAttribute('aria-checked') === 'true');
+ expect(checked.length).toBe(1);
+ expect(checked[0].getAttribute('data-filter-value')).toBe('hochgeladen');
+ });
+
+ it('calls onChange with the clicked pill value', async () => {
+ const onChange = vi.fn();
+ render(ChronikFilterPills, { value: 'alle', onChange });
+ const pill = document.querySelector(
+ '[data-filter-value="kommentare"]'
+ ) as HTMLButtonElement | null;
+ expect(pill).not.toBeNull();
+ pill?.click();
+ expect(onChange).toHaveBeenCalledWith('kommentare');
+ });
+
+ it('applies active classes to the selected pill', async () => {
+ render(ChronikFilterPills, { value: 'fuer-dich', onChange: vi.fn() });
+ const active = document.querySelector('[data-filter-value="fuer-dich"]');
+ expect(active?.className).toContain('bg-primary');
+ const inactive = document.querySelector('[data-filter-value="alle"]');
+ expect(inactive?.className).toContain('bg-muted');
+ });
+
+ it('ArrowRight moves focus to the next pill', async () => {
+ render(ChronikFilterPills, { value: 'alle', onChange: vi.fn() });
+ const first = document.querySelector('[data-filter-value="alle"]') as HTMLButtonElement | null;
+ const second = document.querySelector(
+ '[data-filter-value="fuer-dich"]'
+ ) as HTMLButtonElement | null;
+ expect(first).not.toBeNull();
+ expect(second).not.toBeNull();
+ first?.focus();
+ await userEvent.keyboard('{ArrowRight}');
+ expect(document.activeElement).toBe(second);
+ });
+
+ it('ArrowLeft moves focus to the previous pill', async () => {
+ render(ChronikFilterPills, { value: 'alle', onChange: vi.fn() });
+ const first = document.querySelector('[data-filter-value="alle"]') as HTMLButtonElement | null;
+ const second = document.querySelector(
+ '[data-filter-value="fuer-dich"]'
+ ) as HTMLButtonElement | null;
+ second?.focus();
+ await userEvent.keyboard('{ArrowLeft}');
+ expect(document.activeElement).toBe(first);
+ });
+
+ it('wraps focus from last to first with ArrowRight', async () => {
+ render(ChronikFilterPills, { value: 'alle', onChange: vi.fn() });
+ const last = document.querySelector(
+ '[data-filter-value="kommentare"]'
+ ) as HTMLButtonElement | null;
+ const first = document.querySelector('[data-filter-value="alle"]') as HTMLButtonElement | null;
+ last?.focus();
+ await userEvent.keyboard('{ArrowRight}');
+ expect(document.activeElement).toBe(first);
+ });
+
+ it('has role="radiogroup" on the container', async () => {
+ render(ChronikFilterPills, { value: 'alle', onChange: vi.fn() });
+ const group = document.querySelector('[role="radiogroup"]');
+ expect(group).not.toBeNull();
+ // Paraglide provides "Aktivitäten filtern" as the filter label
+ expect(group?.getAttribute('aria-label')).toBe('Aktivitäten filtern');
+ });
+});
diff --git a/frontend/src/lib/components/chronik/ChronikFuerDichBox.svelte b/frontend/src/lib/components/chronik/ChronikFuerDichBox.svelte
new file mode 100644
index 00000000..fd4b94f9
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikFuerDichBox.svelte
@@ -0,0 +1,148 @@
+
+
+
+ {#if unread.length === 0}
+
+
+
+ {m.chronik_inbox_zero_title()}
+
+
+ {m.chronik_inbox_zero_link()}
+
+
+ {:else}
+
+
+
+ {m.chronik_for_you_caption()}
+
+
+ {m.chronik_for_you_count({ count: unread.length })}
+
+
+
+
+
+
+ {/if}
+
+
+
diff --git a/frontend/src/lib/components/chronik/ChronikFuerDichBox.svelte.spec.ts b/frontend/src/lib/components/chronik/ChronikFuerDichBox.svelte.spec.ts
new file mode 100644
index 00000000..edb53951
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikFuerDichBox.svelte.spec.ts
@@ -0,0 +1,129 @@
+import { describe, it, expect, vi, afterEach } from 'vitest';
+import { cleanup, render } from 'vitest-browser-svelte';
+import { page, userEvent } from 'vitest/browser';
+
+import ChronikFuerDichBox from './ChronikFuerDichBox.svelte';
+import type { NotificationItem } from '$lib/stores/notifications.svelte';
+
+afterEach(cleanup);
+
+function notif(partial: Partial): NotificationItem {
+ return {
+ id: 'n1',
+ type: 'MENTION',
+ documentId: 'doc-1',
+ documentTitle: 'Ein Dokument',
+ referenceId: 'ref-1',
+ annotationId: null,
+ read: false,
+ createdAt: new Date(Date.now() - 5 * 60_000).toISOString(),
+ actorName: 'Anna',
+ ...partial
+ };
+}
+
+describe('ChronikFuerDichBox', () => {
+ it('renders inbox-zero state when there are no unread items', async () => {
+ render(ChronikFuerDichBox, {
+ unread: [],
+ onMarkRead: vi.fn(),
+ onMarkAllRead: vi.fn()
+ });
+ const zero = document.querySelector('[data-testid="chronik-inbox-zero"]');
+ expect(zero).not.toBeNull();
+ await expect.element(page.getByText('Keine neuen Erwähnungen')).toBeInTheDocument();
+ });
+
+ it('links to the archived mentions in the inbox-zero state', async () => {
+ render(ChronikFuerDichBox, {
+ unread: [],
+ onMarkRead: vi.fn(),
+ onMarkAllRead: vi.fn()
+ });
+ const link = document.querySelector('a[href="/chronik?filter=fuer-dich"]');
+ expect(link).not.toBeNull();
+ });
+
+ it('renders the count badge with correct total when unread exists', async () => {
+ render(ChronikFuerDichBox, {
+ unread: [notif({ id: 'a' }), notif({ id: 'b' })],
+ onMarkRead: vi.fn(),
+ onMarkAllRead: vi.fn()
+ });
+ await expect.element(page.getByText('2 neu')).toBeInTheDocument();
+ });
+
+ it('count badge has aria-live=polite when unread exists', async () => {
+ render(ChronikFuerDichBox, {
+ unread: [notif({ id: 'a' })],
+ onMarkRead: vi.fn(),
+ onMarkAllRead: vi.fn()
+ });
+ // Wait for render
+ await expect.element(page.getByText('1 neu')).toBeInTheDocument();
+ const badge = document.querySelector('[data-testid="chronik-fuerdich-count"]');
+ expect(badge?.getAttribute('aria-live')).toBe('polite');
+ expect(badge?.getAttribute('aria-atomic')).toBe('true');
+ });
+
+ it('does not render the "Alle gelesen" button when there are no unread items', async () => {
+ render(ChronikFuerDichBox, {
+ unread: [],
+ onMarkRead: vi.fn(),
+ onMarkAllRead: vi.fn()
+ });
+ await expect.element(page.getByText('Keine neuen Erwähnungen')).toBeInTheDocument();
+ const all = document.querySelector('[data-testid="chronik-mark-all-read"]');
+ expect(all).toBeNull();
+ });
+
+ it('renders the "Alle gelesen" button when unread exists', async () => {
+ render(ChronikFuerDichBox, {
+ unread: [notif({ id: 'a' })],
+ onMarkRead: vi.fn(),
+ onMarkAllRead: vi.fn()
+ });
+ await expect.element(page.getByText('Alle gelesen')).toBeInTheDocument();
+ });
+
+ it('calls onMarkAllRead when the "Alle gelesen" button is clicked', async () => {
+ const onMarkAllRead = vi.fn();
+ render(ChronikFuerDichBox, {
+ unread: [notif({ id: 'a' })],
+ onMarkRead: vi.fn(),
+ onMarkAllRead
+ });
+ await userEvent.click(page.getByText('Alle gelesen'));
+ expect(onMarkAllRead).toHaveBeenCalledTimes(1);
+ });
+
+ it('calls onMarkRead (and not navigation) when a per-item Dismiss button is clicked', async () => {
+ const onMarkRead = vi.fn();
+ const n = notif({ id: 'xyz' });
+ render(ChronikFuerDichBox, {
+ unread: [n],
+ onMarkRead,
+ onMarkAllRead: vi.fn()
+ });
+ const dismiss = document.querySelector(
+ '[data-testid="chronik-fuerdich-dismiss"]'
+ ) as HTMLButtonElement | null;
+ expect(dismiss).not.toBeNull();
+ dismiss?.click();
+ expect(onMarkRead).toHaveBeenCalledTimes(1);
+ expect(onMarkRead.mock.calls[0][0]).toEqual(n);
+ });
+
+ it('Dismiss button is a sibling of the document link, never nested inside ', async () => {
+ render(ChronikFuerDichBox, {
+ unread: [notif({ id: 'x' })],
+ onMarkRead: vi.fn(),
+ onMarkAllRead: vi.fn()
+ });
+ const dismiss = document.querySelector('[data-testid="chronik-fuerdich-dismiss"]');
+ expect(dismiss).not.toBeNull();
+ // HTML spec forbids interactive content descendants of .
+ // Prevents the senior-audience tap-drag bug flagged by Leonie.
+ expect(dismiss?.closest('a')).toBeNull();
+ });
+});
diff --git a/frontend/src/lib/components/chronik/ChronikRow.svelte b/frontend/src/lib/components/chronik/ChronikRow.svelte
new file mode 100644
index 00000000..1df3c75e
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikRow.svelte
@@ -0,0 +1,174 @@
+
+
+
+
+ {#if item.actor}
+
+ {item.actor.initials}
+
+ {:else}
+
+ ?
+
+ {/if}
+
+
+ {#if variant === 'for-you'}
+
+ @
+
+ {/if}
+
+
+
+
+ {verbParts.before}{docTitle}{verbParts.after}
+ {#if variant === 'rollup'}
+
+ {item.count}
+
+ {/if}
+
+
+ {#if variant === 'comment'}
+
+
+ „…“
+
+ {/if}
+
+
{timeLabel}
+
+
diff --git a/frontend/src/lib/components/chronik/ChronikRow.svelte.spec.ts b/frontend/src/lib/components/chronik/ChronikRow.svelte.spec.ts
new file mode 100644
index 00000000..a4234361
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikRow.svelte.spec.ts
@@ -0,0 +1,154 @@
+import { describe, it, expect, afterEach } from 'vitest';
+import { cleanup, render } from 'vitest-browser-svelte';
+import { page } from 'vitest/browser';
+
+import ChronikRow from './ChronikRow.svelte';
+import type { components } from '$lib/generated/api';
+
+type ActivityFeedItemDTO = components['schemas']['ActivityFeedItemDTO'];
+
+afterEach(cleanup);
+
+const baseItem: ActivityFeedItemDTO = {
+ kind: 'TEXT_SAVED',
+ actor: { initials: 'MR', color: '#7a4f9a', name: 'Max Raddatz' },
+ documentId: 'doc-1',
+ documentTitle: 'Brief 1920',
+ happenedAt: '2026-04-19T10:00:00Z',
+ youMentioned: false,
+ count: 1
+};
+
+describe('ChronikRow', () => {
+ it('renders the document title', async () => {
+ render(ChronikRow, { item: baseItem });
+ await expect.element(page.getByText('Brief 1920')).toBeInTheDocument();
+ });
+
+ it('renders actor initials in avatar', async () => {
+ render(ChronikRow, { item: baseItem });
+ await expect.element(page.getByText('MR')).toBeInTheDocument();
+ });
+
+ it('renders "?" fallback avatar when actor is missing', async () => {
+ const item: ActivityFeedItemDTO = { ...baseItem, actor: undefined };
+ render(ChronikRow, { item });
+ const fallback = document.querySelector('[data-testid="chronik-avatar-fallback"]');
+ expect(fallback).not.toBeNull();
+ expect(fallback?.textContent?.trim()).toBe('?');
+ });
+
+ it('wraps the row in a link to the document', async () => {
+ render(ChronikRow, { item: baseItem });
+ const link = document.querySelector('a[href="/documents/doc-1"]');
+ expect(link).not.toBeNull();
+ });
+
+ // --- simple variant ---
+ it('renders simple variant when count === 1 and not a mention', async () => {
+ render(ChronikRow, { item: baseItem });
+ // No rollup count badge
+ expect(document.querySelector('[data-testid="chronik-count-badge"]')).toBeNull();
+ // No for-you marker
+ expect(document.querySelector('[data-testid="chronik-foryou-marker"]')).toBeNull();
+ // No comment preview
+ expect(document.querySelector('[data-testid="chronik-comment-preview"]')).toBeNull();
+ });
+
+ // --- rollup variant ---
+ it('renders rollup variant with count badge when count > 1', async () => {
+ const item: ActivityFeedItemDTO = {
+ ...baseItem,
+ kind: 'TEXT_SAVED',
+ count: 3,
+ happenedAt: '2026-04-19T10:00:00Z',
+ happenedAtUntil: '2026-04-19T11:30:00Z'
+ };
+ render(ChronikRow, { item });
+ const badge = document.querySelector('[data-testid="chronik-count-badge"]');
+ expect(badge).not.toBeNull();
+ expect(badge?.textContent).toContain('3');
+ });
+
+ it('renders a time range with an en-dash for rollup variant', async () => {
+ const item: ActivityFeedItemDTO = {
+ ...baseItem,
+ kind: 'FILE_UPLOADED',
+ count: 5,
+ happenedAt: '2026-04-19T10:00:00Z',
+ happenedAtUntil: '2026-04-19T11:30:00Z'
+ };
+ render(ChronikRow, { item });
+ // en-dash character U+2013
+ const body = document.body.textContent ?? '';
+ expect(body).toContain('\u2013');
+ });
+
+ // --- for-you variant ---
+ it('renders for-you marker when youMentioned is true', async () => {
+ const item: ActivityFeedItemDTO = {
+ ...baseItem,
+ kind: 'MENTION_CREATED',
+ youMentioned: true
+ };
+ render(ChronikRow, { item });
+ const marker = document.querySelector('[data-testid="chronik-foryou-marker"]');
+ expect(marker).not.toBeNull();
+ });
+
+ it('applies accent border to for-you variant outer wrapper', async () => {
+ const item: ActivityFeedItemDTO = {
+ ...baseItem,
+ kind: 'MENTION_CREATED',
+ youMentioned: true
+ };
+ render(ChronikRow, { item });
+ const wrapper = document.querySelector('[data-variant="for-you"]');
+ expect(wrapper).not.toBeNull();
+ expect(wrapper?.className).toContain('border-accent');
+ });
+
+ // --- comment variant ---
+ it('renders comment preview for COMMENT_ADDED kind', async () => {
+ const item: ActivityFeedItemDTO = {
+ ...baseItem,
+ kind: 'COMMENT_ADDED'
+ };
+ render(ChronikRow, { item });
+ const preview = document.querySelector('[data-testid="chronik-comment-preview"]');
+ expect(preview).not.toBeNull();
+ });
+
+ it('comment preview does NOT duplicate the document title verbatim', async () => {
+ // Leonie: user sees the title twice otherwise — looks like the comment is quoting itself.
+ // Until the backend exposes item.commentPreview, the placeholder must be distinct.
+ const item: ActivityFeedItemDTO = {
+ ...baseItem,
+ kind: 'COMMENT_ADDED',
+ documentTitle: 'Brief vom 12. Juli 1920'
+ };
+ render(ChronikRow, { item });
+ const preview = document.querySelector('[data-testid="chronik-comment-preview"]');
+ expect(preview).not.toBeNull();
+ expect(preview?.textContent).not.toContain('Brief vom 12. Juli 1920');
+ });
+
+ // --- robustness: title rendering for edge cases ---
+ it('still renders the row link when documentTitle is an empty string', async () => {
+ // Felix: verbText.indexOf(docTitle) returned 0 for empty titles — the span
+ // collapsed and before/after both emptied out. Swap to a sentinel-based
+ // approach so this case renders like every other row.
+ const empty: ActivityFeedItemDTO = { ...baseItem, documentTitle: '' };
+ render(ChronikRow, { item: empty });
+ const link = document.querySelector('a[href="/documents/doc-1"]');
+ expect(link).not.toBeNull();
+ });
+
+ it('renders a short document title that could substring-match the verb', async () => {
+ const short: ActivityFeedItemDTO = { ...baseItem, documentTitle: 'Brief' };
+ render(ChronikRow, { item: short });
+ const titleEls = document.querySelectorAll('[data-testid="chronik-doc-title"]');
+ expect(titleEls.length).toBe(1);
+ expect(titleEls[0].textContent).toBe('Brief');
+ });
+});
diff --git a/frontend/src/lib/components/chronik/ChronikTimeline.svelte b/frontend/src/lib/components/chronik/ChronikTimeline.svelte
new file mode 100644
index 00000000..f083ba7b
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikTimeline.svelte
@@ -0,0 +1,66 @@
+
+
+
+ {#each BUCKET_ORDER as bucket (bucket)}
+ {#if grouped[bucket].length > 0}
+
+
+
+ {bucketLabel(bucket)}
+
+
+
+
+ {#each grouped[bucket] as it (it.kind + it.happenedAt + it.documentId)}
+ -
+
+
+ {/each}
+
+
+ {/if}
+ {/each}
+
diff --git a/frontend/src/lib/components/chronik/ChronikTimeline.svelte.spec.ts b/frontend/src/lib/components/chronik/ChronikTimeline.svelte.spec.ts
new file mode 100644
index 00000000..f65c1e70
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikTimeline.svelte.spec.ts
@@ -0,0 +1,99 @@
+import { describe, it, expect, afterEach } from 'vitest';
+import { cleanup, render } from 'vitest-browser-svelte';
+import { page } from 'vitest/browser';
+
+import ChronikTimeline from './ChronikTimeline.svelte';
+import type { components } from '$lib/generated/api';
+
+type ActivityFeedItemDTO = components['schemas']['ActivityFeedItemDTO'];
+
+afterEach(cleanup);
+
+function item(partial: Partial): ActivityFeedItemDTO {
+ return {
+ kind: 'TEXT_SAVED',
+ actor: { initials: 'AB', color: '#123456', name: 'Anna Beta' },
+ documentId: 'doc-x',
+ documentTitle: 'Some document',
+ happenedAt: new Date().toISOString(),
+ youMentioned: false,
+ count: 1,
+ ...partial
+ };
+}
+
+function atOffsetDays(days: number): string {
+ const d = new Date();
+ d.setDate(d.getDate() - days);
+ return d.toISOString();
+}
+
+describe('ChronikTimeline', () => {
+ it('renders nothing / no bucket headers when items is empty', async () => {
+ render(ChronikTimeline, { items: [] });
+ expect(document.querySelector('[data-testid="chronik-bucket-today"]')).toBeNull();
+ expect(document.querySelector('[data-testid="chronik-bucket-yesterday"]')).toBeNull();
+ expect(document.querySelector('[data-testid="chronik-bucket-thisWeek"]')).toBeNull();
+ expect(document.querySelector('[data-testid="chronik-bucket-older"]')).toBeNull();
+ });
+
+ it('places today items in the today bucket with a "Heute" header', async () => {
+ render(ChronikTimeline, {
+ items: [
+ item({
+ documentId: 'doc-today',
+ documentTitle: 'Frisches Dokument',
+ happenedAt: new Date().toISOString()
+ })
+ ]
+ });
+ const today = document.querySelector('[data-testid="chronik-bucket-today"]');
+ expect(today).not.toBeNull();
+ await expect.element(page.getByText('Heute', { exact: true })).toBeInTheDocument();
+ // The row for the today item should be inside the today bucket.
+ expect(today?.textContent).toContain('Frisches Dokument');
+ });
+
+ it('does not render an empty bucket header when no items fall into it', async () => {
+ render(ChronikTimeline, {
+ items: [item({ happenedAt: new Date().toISOString() })]
+ });
+ // Only today bucket should exist.
+ expect(document.querySelector('[data-testid="chronik-bucket-today"]')).not.toBeNull();
+ expect(document.querySelector('[data-testid="chronik-bucket-older"]')).toBeNull();
+ });
+
+ it('places older items in the older bucket', async () => {
+ render(ChronikTimeline, {
+ items: [
+ item({
+ documentId: 'doc-old',
+ documentTitle: 'Alt Doc',
+ happenedAt: atOffsetDays(30)
+ })
+ ]
+ });
+ const older = document.querySelector('[data-testid="chronik-bucket-older"]');
+ expect(older).not.toBeNull();
+ expect(older?.textContent).toContain('Alt Doc');
+ });
+
+ it('groups multiple items into their respective buckets', async () => {
+ render(ChronikTimeline, {
+ items: [
+ item({
+ documentId: 'd1',
+ documentTitle: 'Heute Item',
+ happenedAt: new Date().toISOString()
+ }),
+ item({ documentId: 'd2', documentTitle: 'Alt Item', happenedAt: atOffsetDays(30) })
+ ]
+ });
+ const today = document.querySelector('[data-testid="chronik-bucket-today"]');
+ const older = document.querySelector('[data-testid="chronik-bucket-older"]');
+ expect(today?.textContent).toContain('Heute Item');
+ expect(today?.textContent).not.toContain('Alt Item');
+ expect(older?.textContent).toContain('Alt Item');
+ expect(older?.textContent).not.toContain('Heute Item');
+ });
+});
diff --git a/frontend/src/lib/generated/api.ts b/frontend/src/lib/generated/api.ts
index aa4e5cd1..d15c12e1 100644
--- a/frontend/src/lib/generated/api.ts
+++ b/frontend/src/lib/generated/api.ts
@@ -1799,6 +1799,7 @@ export interface components {
/** Format: uuid */
id?: string;
displayName?: string;
+ personType?: string;
firstName?: string;
lastName?: string;
/** Format: int64 */
@@ -1809,7 +1810,6 @@ export interface components {
deathYear?: number;
alias?: string;
notes?: string;
- personType?: string;
};
SenderModel: {
/** Format: uuid */
@@ -1877,10 +1877,10 @@ export interface components {
timeout?: number;
};
PageNotificationDTO: {
- /** Format: int32 */
- totalPages?: number;
/** Format: int64 */
totalElements?: number;
+ /** Format: int32 */
+ totalPages?: number;
pageable?: components["schemas"]["PageableObject"];
first?: boolean;
last?: boolean;
@@ -2015,6 +2015,10 @@ export interface components {
/** Format: date-time */
happenedAt: string;
youMentioned: boolean;
+ /** Format: int32 */
+ count: number;
+ /** Format: date-time */
+ happenedAtUntil?: string;
};
InvitePrefillDTO: {
firstName: string;
@@ -4455,7 +4459,3 @@ export interface operations {
};
};
}
-
-export type DashboardResumeDTO = components['schemas']['DashboardResumeDTO'];
-export type DashboardPulseDTO = components['schemas']['DashboardPulseDTO'];
-export type ActivityFeedItemDTO = components['schemas']['ActivityFeedItemDTO'];
diff --git a/frontend/src/lib/hooks/__tests__/useNotificationStream.svelte.test.ts b/frontend/src/lib/hooks/__tests__/useNotificationStream.svelte.test.ts
deleted file mode 100644
index 143844c5..00000000
--- a/frontend/src/lib/hooks/__tests__/useNotificationStream.svelte.test.ts
+++ /dev/null
@@ -1,142 +0,0 @@
-import { describe, it, expect, vi, beforeEach } from 'vitest';
-import type { NotificationItem } from '../useNotificationStream.svelte';
-
-// Track the last created EventSource instance
-let lastEventSource: {
- close: ReturnType;
- onopen: (() => void) | null;
- onerror: (() => void) | null;
- simulate: (type: string, data: string) => void;
-} | null = null;
-
-class MockEventSource {
- onopen: (() => void) | null = null;
- onerror: (() => void) | null = null;
- close = vi.fn();
- private listeners: Record void)[]> = {};
-
- constructor() {
- // eslint-disable-next-line @typescript-eslint/no-this-alias
- lastEventSource = this;
- }
-
- addEventListener(type: string, fn: (e: MessageEvent) => void) {
- if (!this.listeners[type]) this.listeners[type] = [];
- this.listeners[type].push(fn);
- }
-
- simulate(type: string, data: string) {
- const event = new MessageEvent(type, { data });
- for (const fn of this.listeners[type] ?? []) {
- fn(event);
- }
- }
-}
-
-vi.stubGlobal('EventSource', MockEventSource);
-
-const mockFetch = vi.fn();
-vi.stubGlobal('fetch', mockFetch);
-
-// Import after stubs are set up
-const { createNotificationStream } = await import('../useNotificationStream.svelte');
-
-beforeEach(() => {
- mockFetch.mockReset();
- lastEventSource = null;
-});
-
-function makeNotification(overrides: Partial = {}): NotificationItem {
- return {
- id: 'n1',
- type: 'REPLY',
- actorName: 'Hans',
- documentId: 'doc-1',
- referenceId: 'ref-1',
- annotationId: null,
- read: false,
- createdAt: new Date().toISOString(),
- ...overrides
- };
-}
-
-describe('createNotificationStream', () => {
- it('starts with empty notifications and zero unreadCount', () => {
- const stream = createNotificationStream();
- expect(stream.notifications).toHaveLength(0);
- expect(stream.unreadCount).toBe(0);
- });
-
- it('fetchUnreadCount updates unreadCount from API', async () => {
- mockFetch.mockResolvedValueOnce(new Response(JSON.stringify({ count: 3 }), { status: 200 }));
- const stream = createNotificationStream();
- await stream.fetchUnreadCount();
- expect(stream.unreadCount).toBe(3);
- });
-
- it('fetchNotifications populates notifications from API', async () => {
- const items = [makeNotification()];
- mockFetch.mockResolvedValueOnce(
- new Response(JSON.stringify({ content: items }), { status: 200 })
- );
- const stream = createNotificationStream();
- await stream.fetchNotifications();
- expect(stream.notifications).toHaveLength(1);
- expect(stream.notifications[0].id).toBe('n1');
- });
-
- it('markRead marks notification as read and decrements unreadCount', async () => {
- mockFetch
- .mockResolvedValueOnce(new Response(JSON.stringify({ count: 2 }), { status: 200 }))
- .mockResolvedValueOnce(new Response(null, { status: 200 }));
- const stream = createNotificationStream();
- await stream.fetchUnreadCount();
-
- const notification = makeNotification({ read: false });
- await stream.markRead(notification);
- expect(notification.read).toBe(true);
- expect(stream.unreadCount).toBe(1);
- });
-
- it('markAllRead calls the API and resets unreadCount', async () => {
- mockFetch.mockResolvedValueOnce(new Response(null, { status: 200 }));
- const stream = createNotificationStream();
- await stream.markAllRead();
- expect(mockFetch).toHaveBeenCalledWith('/api/notifications/read-all', { method: 'POST' });
- expect(stream.unreadCount).toBe(0);
- });
-
- it('destroy closes the EventSource', async () => {
- mockFetch.mockResolvedValue(new Response(JSON.stringify({ count: 0 }), { status: 200 }));
- const stream = createNotificationStream();
- stream.init();
- expect(lastEventSource).not.toBeNull();
- stream.destroy();
- expect(lastEventSource!.close).toHaveBeenCalled();
- });
-
- it('SSE notification event prepends notification and increments unreadCount', async () => {
- mockFetch.mockResolvedValue(new Response(JSON.stringify({ count: 0 }), { status: 200 }));
- const stream = createNotificationStream();
- stream.init();
-
- const notification = makeNotification({ id: 'sse-1', read: false });
- lastEventSource!.simulate('notification', JSON.stringify(notification));
-
- expect(stream.notifications).toHaveLength(1);
- expect(stream.notifications[0].id).toBe('sse-1');
- expect(stream.unreadCount).toBe(1);
- });
-
- it('SSE notification event with read:true does not increment unreadCount', async () => {
- mockFetch.mockResolvedValue(new Response(JSON.stringify({ count: 0 }), { status: 200 }));
- const stream = createNotificationStream();
- stream.init();
-
- const notification = makeNotification({ id: 'sse-2', read: true });
- lastEventSource!.simulate('notification', JSON.stringify(notification));
-
- expect(stream.notifications).toHaveLength(1);
- expect(stream.unreadCount).toBe(0);
- });
-});
diff --git a/frontend/src/lib/hooks/useNotificationStream.svelte.ts b/frontend/src/lib/hooks/useNotificationStream.svelte.ts
deleted file mode 100644
index 0a03dada..00000000
--- a/frontend/src/lib/hooks/useNotificationStream.svelte.ts
+++ /dev/null
@@ -1,95 +0,0 @@
-import { type NotificationItem, parseNotificationEvent } from '$lib/utils/notifications';
-
-export type { NotificationItem };
-
-export function createNotificationStream() {
- let notifications = $state([]);
- let unreadCount = $state(0);
- let eventSource: EventSource | null = null;
-
- async function fetchNotifications(): Promise {
- try {
- const res = await fetch('/api/notifications?size=10');
- if (res.ok) {
- const data = await res.json();
- notifications = data.content ?? [];
- }
- } catch (e) {
- console.error('Failed to fetch notifications', e);
- }
- }
-
- async function fetchUnreadCount(): Promise {
- try {
- const res = await fetch('/api/notifications/unread-count');
- if (res.ok) {
- const data = await res.json();
- unreadCount = data.count;
- }
- } catch (e) {
- console.error('Failed to fetch unread count', e);
- }
- }
-
- async function markRead(notification: NotificationItem): Promise {
- if (!notification.read) {
- try {
- await fetch(`/api/notifications/${notification.id}/read`, { method: 'PATCH' });
- notification.read = true;
- unreadCount = Math.max(0, unreadCount - 1);
- } catch (e) {
- console.error('Failed to mark notification as read', e);
- }
- }
- }
-
- async function markAllRead(): Promise {
- try {
- await fetch('/api/notifications/read-all', { method: 'POST' });
- for (const n of notifications) {
- n.read = true;
- }
- unreadCount = 0;
- } catch (e) {
- console.error('Failed to mark all notifications as read', e);
- }
- }
-
- function init(): void {
- fetchUnreadCount();
- eventSource = new EventSource('/api/notifications/stream');
- eventSource.addEventListener('notification', (e) => {
- const notification = parseNotificationEvent(e.data);
- if (!notification) return;
- notifications = [notification, ...notifications];
- if (!notification.read) unreadCount += 1;
- });
- eventSource.onopen = () => {
- fetchUnreadCount();
- };
- eventSource.onerror = () => {
- // Close on error to avoid repeated reconnect noise
- eventSource?.close();
- };
- }
-
- function destroy(): void {
- eventSource?.close();
- eventSource = null;
- }
-
- return {
- get notifications() {
- return notifications;
- },
- get unreadCount() {
- return unreadCount;
- },
- fetchNotifications,
- fetchUnreadCount,
- markRead,
- markAllRead,
- init,
- destroy
- };
-}
diff --git a/frontend/src/lib/stores/notifications.svelte.spec.ts b/frontend/src/lib/stores/notifications.svelte.spec.ts
new file mode 100644
index 00000000..01acbd21
--- /dev/null
+++ b/frontend/src/lib/stores/notifications.svelte.spec.ts
@@ -0,0 +1,108 @@
+import { describe, it, expect, vi, beforeEach } from 'vitest';
+import type { NotificationItem } from '$lib/utils/notifications';
+
+let lastEventSource: MockEventSource | null = null;
+let eventSourceCount = 0;
+
+class MockEventSource {
+ onopen: (() => void) | null = null;
+ onerror: (() => void) | null = null;
+ close = vi.fn();
+ private listeners: Record void)[]> = {};
+
+ constructor() {
+ eventSourceCount += 1;
+ // eslint-disable-next-line @typescript-eslint/no-this-alias
+ lastEventSource = this;
+ }
+
+ addEventListener(type: string, fn: (e: MessageEvent) => void) {
+ if (!this.listeners[type]) this.listeners[type] = [];
+ this.listeners[type].push(fn);
+ }
+
+ simulate(type: string, data: string) {
+ const event = new MessageEvent(type, { data });
+ for (const fn of this.listeners[type] ?? []) {
+ fn(event);
+ }
+ }
+}
+
+vi.stubGlobal('EventSource', MockEventSource);
+
+const mockFetch = vi.fn();
+vi.stubGlobal('fetch', mockFetch);
+
+const { notificationStore, __resetForTest } = await import('./notifications.svelte');
+
+beforeEach(() => {
+ mockFetch.mockReset();
+ mockFetch.mockResolvedValue(new Response(JSON.stringify({ count: 0 }), { status: 200 }));
+ lastEventSource = null;
+ eventSourceCount = 0;
+ __resetForTest();
+});
+
+function makeNotification(overrides: Partial = {}): NotificationItem {
+ return {
+ id: 'n1',
+ type: 'REPLY',
+ actorName: 'Hans',
+ documentId: 'doc-1',
+ documentTitle: null,
+ referenceId: 'ref-1',
+ annotationId: null,
+ read: false,
+ createdAt: new Date().toISOString(),
+ ...overrides
+ };
+}
+
+describe('notificationStore (singleton)', () => {
+ it('opens a single EventSource across multiple init() calls', () => {
+ notificationStore.init();
+ notificationStore.init();
+ notificationStore.init();
+
+ expect(eventSourceCount).toBe(1);
+ });
+
+ it('closes the EventSource only after every init() is matched with destroy()', () => {
+ notificationStore.init();
+ notificationStore.init();
+ const es = lastEventSource!;
+
+ notificationStore.destroy();
+ expect(es.close).not.toHaveBeenCalled();
+
+ notificationStore.destroy();
+ expect(es.close).toHaveBeenCalledTimes(1);
+ });
+
+ it('reopens a fresh EventSource after full teardown', () => {
+ notificationStore.init();
+ notificationStore.destroy();
+ notificationStore.init();
+
+ expect(eventSourceCount).toBe(2);
+ });
+
+ it('SSE notification event prepends notification and increments unreadCount', () => {
+ notificationStore.init();
+
+ const notification = makeNotification({ id: 'sse-1', read: false });
+ lastEventSource!.simulate('notification', JSON.stringify(notification));
+
+ expect(notificationStore.notifications[0].id).toBe('sse-1');
+ expect(notificationStore.unreadCount).toBe(1);
+ });
+
+ it('markAllRead resets unreadCount', async () => {
+ mockFetch.mockResolvedValue(new Response(null, { status: 200 }));
+ await notificationStore.markAllRead();
+
+ expect(mockFetch).toHaveBeenCalledWith('/api/notifications/read-all', { method: 'POST' });
+ expect(notificationStore.unreadCount).toBe(0);
+ });
+});
diff --git a/frontend/src/lib/stores/notifications.svelte.ts b/frontend/src/lib/stores/notifications.svelte.ts
new file mode 100644
index 00000000..28ac9eb9
--- /dev/null
+++ b/frontend/src/lib/stores/notifications.svelte.ts
@@ -0,0 +1,108 @@
+import { type NotificationItem, parseNotificationEvent } from '$lib/utils/notifications';
+
+export type { NotificationItem };
+
+let notifications = $state([]);
+let unreadCount = $state(0);
+let eventSource: EventSource | null = null;
+let refCount = 0;
+
+async function fetchNotifications(): Promise {
+ try {
+ const res = await fetch('/api/notifications?size=10');
+ if (res.ok) {
+ const data = await res.json();
+ notifications = data.content ?? [];
+ }
+ } catch (e) {
+ console.error('Failed to fetch notifications', e);
+ }
+}
+
+async function fetchUnreadCount(): Promise {
+ try {
+ const res = await fetch('/api/notifications/unread-count');
+ if (res.ok) {
+ const data = await res.json();
+ unreadCount = data.count;
+ }
+ } catch (e) {
+ console.error('Failed to fetch unread count', e);
+ }
+}
+
+async function markRead(notification: NotificationItem): Promise {
+ if (!notification.read) {
+ try {
+ await fetch(`/api/notifications/${notification.id}/read`, { method: 'PATCH' });
+ notification.read = true;
+ unreadCount = Math.max(0, unreadCount - 1);
+ } catch (e) {
+ console.error('Failed to mark notification as read', e);
+ }
+ }
+}
+
+async function markAllRead(): Promise {
+ try {
+ await fetch('/api/notifications/read-all', { method: 'POST' });
+ for (const n of notifications) {
+ n.read = true;
+ }
+ unreadCount = 0;
+ } catch (e) {
+ console.error('Failed to mark all notifications as read', e);
+ }
+}
+
+function init(): void {
+ refCount += 1;
+ if (refCount > 1) return;
+
+ fetchUnreadCount();
+ eventSource = new EventSource('/api/notifications/stream');
+ eventSource.addEventListener('notification', (e) => {
+ const notification = parseNotificationEvent((e as MessageEvent).data);
+ if (!notification) return;
+ notifications = [notification, ...notifications];
+ if (!notification.read) unreadCount += 1;
+ });
+ eventSource.onopen = () => {
+ fetchUnreadCount();
+ };
+ eventSource.onerror = () => {
+ eventSource?.close();
+ };
+}
+
+function destroy(): void {
+ if (refCount === 0) return;
+ refCount -= 1;
+ if (refCount === 0) {
+ eventSource?.close();
+ eventSource = null;
+ }
+}
+
+export function __resetForTest(): void {
+ eventSource?.close();
+ eventSource = null;
+ refCount = 0;
+ notifications = [];
+ unreadCount = 0;
+}
+
+export const notificationStore = {
+ get notifications() {
+ return notifications;
+ },
+ get unreadCount() {
+ return unreadCount;
+ },
+ fetchNotifications,
+ fetchUnreadCount,
+ markRead,
+ markAllRead,
+ init,
+ destroy
+};
diff --git a/frontend/src/lib/utils/date-buckets.spec.ts b/frontend/src/lib/utils/date-buckets.spec.ts
new file mode 100644
index 00000000..3593481a
--- /dev/null
+++ b/frontend/src/lib/utils/date-buckets.spec.ts
@@ -0,0 +1,50 @@
+import { describe, it, expect } from 'vitest';
+import { bucketByDay } from './date-buckets';
+
+function date(iso: string): Date {
+ return new Date(iso);
+}
+
+describe('bucketByDay', () => {
+ // Wednesday 2026-04-22 at 12:00 Berlin. Week start (Mon) = 2026-04-20.
+ const now = date('2026-04-22T12:00:00+02:00');
+
+ it('returns "today" for a time earlier today', () => {
+ expect(bucketByDay(date('2026-04-22T06:00:00+02:00'), now, 'de-DE')).toBe('today');
+ });
+
+ it('returns "today" at exact midnight start of today', () => {
+ expect(bucketByDay(date('2026-04-22T00:00:00+02:00'), now, 'de-DE')).toBe('today');
+ });
+
+ it('returns "yesterday" for any time on the previous day', () => {
+ expect(bucketByDay(date('2026-04-21T23:59:59+02:00'), now, 'de-DE')).toBe('yesterday');
+ expect(bucketByDay(date('2026-04-21T00:00:00+02:00'), now, 'de-DE')).toBe('yesterday');
+ });
+
+ it('returns "thisWeek" for the Monday that starts this week (Monday-anchored, de-DE)', () => {
+ expect(bucketByDay(date('2026-04-20T10:00:00+02:00'), now, 'de-DE')).toBe('thisWeek');
+ });
+
+ it('returns "older" for anything before the start of this week (de-DE)', () => {
+ expect(bucketByDay(date('2026-04-19T23:00:00+02:00'), now, 'de-DE')).toBe('older');
+ expect(bucketByDay(date('2026-04-13T10:00:00+02:00'), now, 'de-DE')).toBe('older');
+ });
+
+ it('uses Sunday-start week for en-US', () => {
+ const sundayRef = date('2026-04-19T12:00:00+02:00');
+ expect(bucketByDay(date('2026-04-19T06:00:00+02:00'), sundayRef, 'en-US')).toBe('today');
+ expect(
+ bucketByDay(date('2026-04-13T10:00:00+02:00'), date('2026-04-18T12:00:00+02:00'), 'en-US')
+ ).toBe('thisWeek');
+ expect(
+ bucketByDay(date('2026-04-11T10:00:00+02:00'), date('2026-04-18T12:00:00+02:00'), 'en-US')
+ ).toBe('older');
+ });
+
+ it('handles DST spring-forward correctly (Europe/Berlin 2026-03-29)', () => {
+ const justAfterDst = date('2026-03-29T03:15:00+02:00');
+ const sameDay = date('2026-03-29T10:00:00+02:00');
+ expect(bucketByDay(justAfterDst, sameDay, 'de-DE')).toBe('today');
+ });
+});
diff --git a/frontend/src/lib/utils/date-buckets.ts b/frontend/src/lib/utils/date-buckets.ts
new file mode 100644
index 00000000..7561380c
--- /dev/null
+++ b/frontend/src/lib/utils/date-buckets.ts
@@ -0,0 +1,35 @@
+export type DayBucket = 'today' | 'yesterday' | 'thisWeek' | 'older';
+
+const DAY_MS = 24 * 60 * 60 * 1000;
+const SUNDAY_START_LOCALES = new Set(['en-us', 'en-ca', 'en-ph', 'ja-jp', 'he-il', 'pt-br']);
+
+function weekStartDay(locale?: string): 0 | 1 {
+ if (!locale) return 1;
+ return SUNDAY_START_LOCALES.has(locale.toLowerCase()) ? 0 : 1;
+}
+
+function startOfDay(d: Date): Date {
+ const x = new Date(d);
+ x.setHours(0, 0, 0, 0);
+ return x;
+}
+
+function startOfWeek(d: Date, firstDay: 0 | 1): Date {
+ const x = startOfDay(d);
+ const diff = (x.getDay() - firstDay + 7) % 7;
+ x.setDate(x.getDate() - diff);
+ return x;
+}
+
+export function bucketByDay(date: Date, now: Date = new Date(), locale?: string): DayBucket {
+ const today = startOfDay(now);
+ const target = startOfDay(date);
+
+ if (target.getTime() === today.getTime()) return 'today';
+ if (today.getTime() - target.getTime() <= DAY_MS) return 'yesterday';
+
+ const weekStart = startOfWeek(today, weekStartDay(locale));
+ if (target.getTime() >= weekStart.getTime()) return 'thisWeek';
+
+ return 'older';
+}
diff --git a/frontend/src/routes/chronik/+page.server.ts b/frontend/src/routes/chronik/+page.server.ts
new file mode 100644
index 00000000..71a03f00
--- /dev/null
+++ b/frontend/src/routes/chronik/+page.server.ts
@@ -0,0 +1,54 @@
+import { createApiClient } from '$lib/api.server';
+import type { components } from '$lib/generated/api';
+
+type ActivityFeedItemDTO = components['schemas']['ActivityFeedItemDTO'];
+type NotificationDTO = components['schemas']['NotificationDTO'];
+
+export type FilterValue = 'alle' | 'fuer-dich' | 'hochgeladen' | 'transkription' | 'kommentare';
+
+const VALID_FILTERS: FilterValue[] = [
+ 'alle',
+ 'fuer-dich',
+ 'hochgeladen',
+ 'transkription',
+ 'kommentare'
+];
+
+function parseFilter(raw: string | null): FilterValue {
+ if (raw && (VALID_FILTERS as string[]).includes(raw)) return raw as FilterValue;
+ return 'alle';
+}
+
+export async function load({ fetch, url }) {
+ const api = createApiClient(fetch);
+ const filter = parseFilter(url.searchParams.get('filter'));
+ const limit = Math.min(Number(url.searchParams.get('limit')) || 40, 40);
+
+ const [activityResult, unreadResult] = await Promise.allSettled([
+ api.GET('/api/dashboard/activity', { params: { query: { limit } } }),
+ api.GET('/api/notifications', {
+ params: { query: { read: false, page: 0, size: 20 } }
+ })
+ ]);
+
+ let activityFeed: ActivityFeedItemDTO[] = [];
+ let unreadNotifications: NotificationDTO[] = [];
+ let loadError: string | null = null;
+
+ if (activityResult.status === 'fulfilled' && activityResult.value.response.ok) {
+ activityFeed = (activityResult.value.data as ActivityFeedItemDTO[]) ?? [];
+ } else if (activityResult.status === 'fulfilled') {
+ loadError = 'activity';
+ }
+
+ if (unreadResult.status === 'fulfilled' && unreadResult.value.response.ok) {
+ unreadNotifications = unreadResult.value.data?.content ?? [];
+ }
+
+ return {
+ filter,
+ activityFeed,
+ unreadNotifications,
+ loadError
+ };
+}
diff --git a/frontend/src/routes/chronik/+page.svelte b/frontend/src/routes/chronik/+page.svelte
new file mode 100644
index 00000000..54b64d63
--- /dev/null
+++ b/frontend/src/routes/chronik/+page.svelte
@@ -0,0 +1,154 @@
+
+
+
+ {m.chronik_page_title()}
+
+
+
+
+ {m.chronik_page_title()}
+
+
+ {#if data.loadError === 'activity'}
+
+ {:else}
+
+
+
+
+
+
+ {#if isEmpty}
+
+
+
+ {:else}
+
+ {/if}
+ {/if}
+
diff --git a/frontend/src/routes/chronik/page.server.spec.ts b/frontend/src/routes/chronik/page.server.spec.ts
new file mode 100644
index 00000000..7a8dc135
--- /dev/null
+++ b/frontend/src/routes/chronik/page.server.spec.ts
@@ -0,0 +1,93 @@
+import { beforeEach, describe, expect, it, vi } from 'vitest';
+import { load } from './+page.server';
+
+const mockApi = {
+ GET: vi.fn()
+};
+
+vi.mock('$lib/api.server', () => ({
+ createApiClient: () => mockApi
+}));
+
+function buildUrl(search = ''): URL {
+ return new URL(`http://localhost/chronik${search}`);
+}
+
+beforeEach(() => {
+ vi.clearAllMocks();
+});
+
+describe('chronik/load', () => {
+ it('requests the activity feed with a 40-item limit', async () => {
+ mockApi.GET.mockImplementation((path: string) => {
+ if (path === '/api/dashboard/activity') {
+ return Promise.resolve({ response: { ok: true }, data: [] });
+ }
+ return Promise.resolve({ response: { ok: true }, data: { content: [] } });
+ });
+
+ await load({ fetch, url: buildUrl() } as never);
+
+ expect(mockApi.GET).toHaveBeenCalledWith('/api/dashboard/activity', {
+ params: { query: { limit: 40 } }
+ });
+ });
+
+ it('requests only unread notifications for Für-dich', async () => {
+ mockApi.GET.mockImplementation((path: string) => {
+ if (path === '/api/dashboard/activity') {
+ return Promise.resolve({ response: { ok: true }, data: [] });
+ }
+ return Promise.resolve({ response: { ok: true }, data: { content: [] } });
+ });
+
+ await load({ fetch, url: buildUrl() } as never);
+
+ expect(mockApi.GET).toHaveBeenCalledWith('/api/notifications', {
+ params: { query: { read: false, page: 0, size: 20 } }
+ });
+ });
+
+ it('returns the activity feed and unread notifications on success', async () => {
+ const feed = [{ kind: 'FILE_UPLOADED', documentId: 'd1' }];
+ const unread = [{ id: 'n1', type: 'MENTION' }];
+ mockApi.GET.mockImplementation((path: string) => {
+ if (path === '/api/dashboard/activity') {
+ return Promise.resolve({ response: { ok: true }, data: feed });
+ }
+ return Promise.resolve({ response: { ok: true }, data: { content: unread } });
+ });
+
+ const result = await load({ fetch, url: buildUrl() } as never);
+
+ expect(result.activityFeed).toEqual(feed);
+ expect(result.unreadNotifications).toEqual(unread);
+ expect(result.filter).toBe('alle');
+ expect(result.loadError).toBeNull();
+ });
+
+ it('surfaces "activity" loadError when the dashboard endpoint returns non-ok', async () => {
+ mockApi.GET.mockImplementation((path: string) => {
+ if (path === '/api/dashboard/activity') {
+ return Promise.resolve({ response: { ok: false, status: 500 }, error: {} });
+ }
+ return Promise.resolve({ response: { ok: true }, data: { content: [] } });
+ });
+
+ const result = await load({ fetch, url: buildUrl() } as never);
+
+ expect(result.loadError).toBe('activity');
+ expect(result.activityFeed).toEqual([]);
+ });
+
+ it('parses the filter query param, falling back to "alle" for invalid values', async () => {
+ mockApi.GET.mockResolvedValue({ response: { ok: true }, data: [] });
+
+ const validResult = await load({ fetch, url: buildUrl('?filter=fuer-dich') } as never);
+ expect(validResult.filter).toBe('fuer-dich');
+
+ mockApi.GET.mockResolvedValue({ response: { ok: true }, data: [] });
+ const invalidResult = await load({ fetch, url: buildUrl('?filter=bogus') } as never);
+ expect(invalidResult.filter).toBe('alle');
+ });
+});
diff --git a/frontend/src/routes/notifications/+page.server.ts b/frontend/src/routes/notifications/+page.server.ts
deleted file mode 100644
index 42485660..00000000
--- a/frontend/src/routes/notifications/+page.server.ts
+++ /dev/null
@@ -1,35 +0,0 @@
-import { error, redirect } from '@sveltejs/kit';
-import { createApiClient } from '$lib/api.server';
-import { getErrorMessage } from '$lib/errors';
-import type { PageServerLoad, Actions } from './$types';
-
-export const load: PageServerLoad = async ({ fetch, url }) => {
- const api = createApiClient(fetch);
-
- const type = url.searchParams.get('type') ?? undefined;
- const readParam = url.searchParams.get('read');
- const read = readParam !== null ? readParam === 'true' : undefined;
-
- const result = await api.GET('/api/notifications', {
- params: { query: { type: type as 'MENTION' | 'REPLY' | undefined, read, page: 0, size: 20 } }
- });
-
- if (!result.response.ok) {
- const code = (result.error as unknown as { code?: string })?.code;
- throw error(result.response.status, getErrorMessage(code));
- }
-
- const page = result.data!;
- const notifications = page.content ?? [];
- const unreadCount = notifications.filter((n) => !n.read).length;
-
- return { notifications, unreadCount, totalPages: page.totalPages ?? 1 };
-};
-
-export const actions: Actions = {
- 'mark-all': async ({ fetch }) => {
- const api = createApiClient(fetch);
- await api.POST('/api/notifications/read-all');
- redirect(303, '/notifications');
- }
-};
diff --git a/frontend/src/routes/notifications/+page.svelte b/frontend/src/routes/notifications/+page.svelte
deleted file mode 100644
index 42a35391..00000000
--- a/frontend/src/routes/notifications/+page.svelte
+++ /dev/null
@@ -1,279 +0,0 @@
-
-
-
- {m.notification_history_heading()}
-
-
-
-
-
-
-
- {m.btn_back_to_overview()}
-
-
-
-
-
- {m.notification_history_heading()}
-
- {#if data.unreadCount > 0}
-
- {/if}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {#if allNotifications.length === 0}
-
-
-
- {m.notification_empty_history()}
-
-
- {m.notification_empty_history_body()}
-
-
- {:else}
-
- {/if}
-
-
- {#if hasMore}
-
- {/if}
-
-
diff --git a/frontend/src/routes/notifications/page.server.spec.ts b/frontend/src/routes/notifications/page.server.spec.ts
deleted file mode 100644
index 05d4fb2a..00000000
--- a/frontend/src/routes/notifications/page.server.spec.ts
+++ /dev/null
@@ -1,136 +0,0 @@
-import { describe, expect, it, vi, beforeEach } from 'vitest';
-
-vi.mock('$lib/api.server', () => ({ createApiClient: vi.fn() }));
-
-import { load, actions } from './+page.server';
-import { createApiClient } from '$lib/api.server';
-
-beforeEach(() => vi.clearAllMocks());
-
-function makeUrl(params: Record = {}) {
- const url = new URL('http://localhost/notifications');
- for (const [key, value] of Object.entries(params)) {
- url.searchParams.set(key, value);
- }
- return url;
-}
-
-// ─── load ─────────────────────────────────────────────────────────────────────
-
-describe('notifications page load', () => {
- it('returns notifications and unreadCount from API response', async () => {
- const mockGet = vi.fn().mockResolvedValueOnce({
- response: { ok: true },
- data: {
- content: [
- { id: 'n1', read: false },
- { id: 'n2', read: true },
- { id: 'n3', read: false }
- ],
- totalElements: 3,
- totalPages: 1,
- number: 0
- }
- });
- vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
- typeof createApiClient
- >);
-
- const result = await load({ url: makeUrl(), fetch: vi.fn() as unknown as typeof fetch });
-
- expect(result.notifications).toHaveLength(3);
- expect(result.unreadCount).toBe(2);
- });
-
- it('passes type param to API when ?type=MENTION is in URL', async () => {
- const mockGet = vi.fn().mockResolvedValueOnce({
- response: { ok: true },
- data: { content: [], totalElements: 0, totalPages: 0, number: 0 }
- });
- vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
- typeof createApiClient
- >);
-
- await load({ url: makeUrl({ type: 'MENTION' }), fetch: vi.fn() as unknown as typeof fetch });
-
- const queryParams = mockGet.mock.calls[0][1].params.query;
- expect(queryParams.type).toBe('MENTION');
- });
-
- it('passes read=false to API when ?read=false is in URL', async () => {
- const mockGet = vi.fn().mockResolvedValueOnce({
- response: { ok: true },
- data: { content: [], totalElements: 0, totalPages: 0, number: 0 }
- });
- vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
- typeof createApiClient
- >);
-
- await load({ url: makeUrl({ read: 'false' }), fetch: vi.fn() as unknown as typeof fetch });
-
- const queryParams = mockGet.mock.calls[0][1].params.query;
- expect(queryParams.read).toBe(false);
- });
-
- it('passes no filter params when no search params present', async () => {
- const mockGet = vi.fn().mockResolvedValueOnce({
- response: { ok: true },
- data: { content: [], totalElements: 0, totalPages: 0, number: 0 }
- });
- vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
- typeof createApiClient
- >);
-
- await load({ url: makeUrl(), fetch: vi.fn() as unknown as typeof fetch });
-
- const queryParams = mockGet.mock.calls[0][1].params.query;
- expect(queryParams.type).toBeUndefined();
- expect(queryParams.read).toBeUndefined();
- });
-
- it('calls the API exactly once — no separate round-trip for unreadCount', async () => {
- const mockGet = vi.fn().mockResolvedValueOnce({
- response: { ok: true },
- data: { content: [], totalElements: 0, totalPages: 0, number: 0 }
- });
- vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
- typeof createApiClient
- >);
-
- await load({ url: makeUrl(), fetch: vi.fn() as unknown as typeof fetch });
-
- expect(mockGet).toHaveBeenCalledTimes(1);
- });
-
- it('throws 401 error when API returns 401', async () => {
- const mockGet = vi.fn().mockResolvedValueOnce({
- response: { ok: false, status: 401 },
- data: null
- });
- vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
- typeof createApiClient
- >);
-
- await expect(
- load({ url: makeUrl(), fetch: vi.fn() as unknown as typeof fetch })
- ).rejects.toMatchObject({ status: 401 });
- });
-});
-
-// ─── mark-all action ──────────────────────────────────────────────────────────
-
-describe('notifications mark-all action', () => {
- it('calls POST /api/notifications/read-all and redirects', async () => {
- const mockPost = vi.fn().mockResolvedValueOnce({ response: { ok: true } });
- vi.mocked(createApiClient).mockReturnValue({ POST: mockPost } as ReturnType<
- typeof createApiClient
- >);
-
- const markAll = actions['mark-all'] as (ctx: { fetch: typeof fetch }) => Promise;
- await expect(markAll({ fetch: vi.fn() as unknown as typeof fetch })).rejects.toMatchObject({
- location: '/notifications'
- });
-
- expect(mockPost).toHaveBeenCalledTimes(1);
- });
-});
diff --git a/frontend/src/routes/profile/+page.svelte b/frontend/src/routes/profile/+page.svelte
index 39de19a7..24d25c12 100644
--- a/frontend/src/routes/profile/+page.svelte
+++ b/frontend/src/routes/profile/+page.svelte
@@ -102,10 +102,7 @@ const hasEmail = $derived(!!data.user?.email);