From 10f092c906d17451c7e407b76757323566df3321 Mon Sep 17 00:00:00 2001
From: Marcel
Date: Mon, 20 Apr 2026 15:53:58 +0200
Subject: [PATCH 01/19] refactor(audit): extend activity feed row/DTO with
count and happenedAtUntil (singletons default)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Prepares the activity feed data shape for session-style rollup (#285). Adds two
new fields that carry null-operation defaults for the existing hour-truncated
dedupe query:
- count: int (required) — always 1 for singleton rows
- happenedAtUntil: OffsetDateTime (nullable) — end-of-session timestamp for
future rollup rows; null for singletons
No behavioral change yet — the rollup SQL rewrite lands in a follow-up commit.
Co-Authored-By: Claude Opus 4.7 (1M context)
---
.../org/raddatz/familienarchiv/audit/ActivityFeedRow.java | 2 ++
.../familienarchiv/audit/AuditLogQueryRepository.java | 4 +++-
.../familienarchiv/dashboard/ActivityFeedItemDTO.java | 4 +++-
.../raddatz/familienarchiv/dashboard/DashboardService.java | 7 ++++++-
.../dashboard/AuditLogQueryRepositoryIntegrationTest.java | 2 ++
.../familienarchiv/dashboard/DashboardServiceTest.java | 2 ++
6 files changed, 18 insertions(+), 3 deletions(-)
diff --git a/backend/src/main/java/org/raddatz/familienarchiv/audit/ActivityFeedRow.java b/backend/src/main/java/org/raddatz/familienarchiv/audit/ActivityFeedRow.java
index 384b311e..59cf64c9 100644
--- a/backend/src/main/java/org/raddatz/familienarchiv/audit/ActivityFeedRow.java
+++ b/backend/src/main/java/org/raddatz/familienarchiv/audit/ActivityFeedRow.java
@@ -12,4 +12,6 @@ public interface ActivityFeedRow {
UUID getDocumentId();
Instant getHappenedAt();
boolean isYouMentioned();
+ int getCount();
+ Instant getHappenedAtUntil();
}
diff --git a/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryRepository.java b/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryRepository.java
index 2d725c69..82ba27a2 100644
--- a/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryRepository.java
+++ b/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryRepository.java
@@ -39,7 +39,9 @@ public interface AuditLogQueryRepository extends JpaRepository {
a.document_id AS documentId,
a.happened_at AS happened_at,
(a.kind = 'MENTION_CREATED'
- AND a.payload->>'mentionedUserId' = :currentUserId) AS youMentioned
+ AND a.payload->>'mentionedUserId' = :currentUserId) AS youMentioned,
+ 1 AS count,
+ CAST(NULL AS TIMESTAMPTZ) AS happenedAtUntil
FROM audit_log a
LEFT JOIN users u ON u.id = a.actor_id
WHERE a.kind IN ('TEXT_SAVED','FILE_UPLOADED','ANNOTATION_CREATED','COMMENT_ADDED','MENTION_CREATED')
diff --git a/backend/src/main/java/org/raddatz/familienarchiv/dashboard/ActivityFeedItemDTO.java b/backend/src/main/java/org/raddatz/familienarchiv/dashboard/ActivityFeedItemDTO.java
index 0fcdd312..444c838d 100644
--- a/backend/src/main/java/org/raddatz/familienarchiv/dashboard/ActivityFeedItemDTO.java
+++ b/backend/src/main/java/org/raddatz/familienarchiv/dashboard/ActivityFeedItemDTO.java
@@ -14,5 +14,7 @@ public record ActivityFeedItemDTO(
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID documentId,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String documentTitle,
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) OffsetDateTime happenedAt,
- @Schema(requiredMode = Schema.RequiredMode.REQUIRED) boolean youMentioned
+ @Schema(requiredMode = Schema.RequiredMode.REQUIRED) boolean youMentioned,
+ @Schema(requiredMode = Schema.RequiredMode.REQUIRED) int count,
+ @Nullable OffsetDateTime happenedAtUntil
) {}
diff --git a/backend/src/main/java/org/raddatz/familienarchiv/dashboard/DashboardService.java b/backend/src/main/java/org/raddatz/familienarchiv/dashboard/DashboardService.java
index d749a164..b7be271f 100644
--- a/backend/src/main/java/org/raddatz/familienarchiv/dashboard/DashboardService.java
+++ b/backend/src/main/java/org/raddatz/familienarchiv/dashboard/DashboardService.java
@@ -130,13 +130,18 @@ public class DashboardService {
? new ActivityActorDTO(row.getActorInitials(), row.getActorColor(), row.getActorName())
: null;
String docTitle = titleCache.getOrDefault(row.getDocumentId(), "");
+ OffsetDateTime happenedAtUntil = row.getHappenedAtUntil() != null
+ ? row.getHappenedAtUntil().atOffset(ZoneOffset.UTC)
+ : null;
return new ActivityFeedItemDTO(
org.raddatz.familienarchiv.audit.AuditKind.valueOf(row.getKind()),
actor,
row.getDocumentId(),
docTitle,
row.getHappenedAt().atOffset(ZoneOffset.UTC),
- row.isYouMentioned()
+ row.isYouMentioned(),
+ row.getCount(),
+ happenedAtUntil
);
}).toList();
}
diff --git a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryIntegrationTest.java b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryIntegrationTest.java
index 5875baf5..3862c0d7 100644
--- a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryIntegrationTest.java
+++ b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryIntegrationTest.java
@@ -56,6 +56,8 @@ class AuditLogQueryRepositoryIntegrationTest {
assertThat(rows.get(0).getKind()).isEqualTo("ANNOTATION_CREATED");
assertThat(rows.get(0).getDocumentId()).isEqualTo(DOC_ID);
assertThat(rows.get(0).getHappenedAt()).isNotNull();
+ assertThat(rows.get(0).getCount()).isEqualTo(1);
+ assertThat(rows.get(0).getHappenedAtUntil()).isNull();
}
@Test
diff --git a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/DashboardServiceTest.java b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/DashboardServiceTest.java
index c62fdb8c..19692ad4 100644
--- a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/DashboardServiceTest.java
+++ b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/DashboardServiceTest.java
@@ -104,6 +104,8 @@ class DashboardServiceTest {
public UUID getDocumentId() { return docId; }
public Instant getHappenedAt() { return Instant.now(); }
public boolean isYouMentioned() { return false; }
+ public int getCount() { return 1; }
+ public Instant getHappenedAtUntil() { return null; }
};
}
}
--
2.49.1
From 1c2dd518b3cb9d98345066ddc7dd13494f658887 Mon Sep 17 00:00:00 2001
From: Marcel
Date: Mon, 20 Apr 2026 16:02:16 +0200
Subject: [PATCH 02/19] feat(audit): replace hour-trunc dedupe with LAG()
session rollup (120-min gap)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Rewrites the activity feed query to group consecutive events on the same
(actor, document, kind) into sessions separated by >120 min gaps. A session
becomes one row with count = events-in-session and happenedAtUntil = last
event timestamp. Singletons keep count=1 / happenedAtUntil=null.
Algorithm: LAG() to get the previous event's timestamp in the same partition,
mark a new session when gap > 7200s, then SUM() over an unbounded preceding
window yields a running session_id. Aggregation groups by session_id.
COMMENT_ADDED and MENTION_CREATED always start a new session — these kinds
never roll up so each event stays its own row.
Also adds BLOCK_REVIEWED to the eligible-kinds WHERE clause (Chronik spec §02)
so reviewed blocks appear in the activity feed.
Five new integration tests cover combine-within-2h, split-at-boundary,
no-hard-cap-on-long-session, never-rolls-up-comments/mentions, and the
count/happenedAtUntil contract on both singletons and rollups.
Part of #285.
Co-Authored-By: Claude Opus 4.7 (1M context)
---
.../audit/AuditLogQueryRepository.java | 94 +++++++---
.../AuditLogQueryRepositoryRolledUpTest.java | 162 ++++++++++++++++++
2 files changed, 231 insertions(+), 25 deletions(-)
create mode 100644 backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryRolledUpTest.java
diff --git a/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryRepository.java b/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryRepository.java
index 82ba27a2..cc5df3ec 100644
--- a/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryRepository.java
+++ b/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryRepository.java
@@ -23,33 +23,77 @@ public interface AuditLogQueryRepository extends JpaRepository {
Optional findMostRecentDocumentIdByActor(@Param("userId") UUID userId);
@Query(value = """
- SELECT * FROM (
- SELECT DISTINCT ON (a.actor_id, a.document_id, a.kind, date_trunc('hour', a.happened_at))
- a.kind AS kind,
- a.actor_id AS actorId,
- CASE
- WHEN u.first_name IS NOT NULL AND u.last_name IS NOT NULL
- THEN UPPER(LEFT(u.first_name, 1)) || UPPER(LEFT(u.last_name, 1))
- WHEN u.first_name IS NOT NULL THEN UPPER(LEFT(u.first_name, 1))
- WHEN u.last_name IS NOT NULL THEN UPPER(LEFT(u.last_name, 1))
- ELSE '?'
- END AS actorInitials,
- COALESCE(u.color, '') AS actorColor,
- CONCAT_WS(' ', u.first_name, u.last_name) AS actorName,
- a.document_id AS documentId,
- a.happened_at AS happened_at,
- (a.kind = 'MENTION_CREATED'
- AND a.payload->>'mentionedUserId' = :currentUserId) AS youMentioned,
- 1 AS count,
- CAST(NULL AS TIMESTAMPTZ) AS happenedAtUntil
+ WITH events AS (
+ SELECT
+ a.kind,
+ a.actor_id,
+ a.document_id,
+ a.happened_at,
+ a.payload,
+ LAG(a.happened_at) OVER (
+ PARTITION BY a.actor_id, a.document_id, a.kind
+ ORDER BY a.happened_at
+ ) AS prev_happened_at
FROM audit_log a
- LEFT JOIN users u ON u.id = a.actor_id
- WHERE a.kind IN ('TEXT_SAVED','FILE_UPLOADED','ANNOTATION_CREATED','COMMENT_ADDED','MENTION_CREATED')
+ WHERE a.kind IN ('TEXT_SAVED','FILE_UPLOADED','ANNOTATION_CREATED',
+ 'BLOCK_REVIEWED','COMMENT_ADDED','MENTION_CREATED')
AND a.document_id IS NOT NULL
- ORDER BY a.actor_id, a.document_id, a.kind,
- date_trunc('hour', a.happened_at), a.happened_at DESC
- ) deduped
- ORDER BY happened_at DESC
+ ),
+ sessions_marked AS (
+ SELECT
+ kind, actor_id, document_id, happened_at, payload,
+ CASE
+ WHEN kind IN ('COMMENT_ADDED','MENTION_CREATED') THEN 1
+ WHEN prev_happened_at IS NULL THEN 1
+ WHEN EXTRACT(EPOCH FROM (happened_at - prev_happened_at)) > 7200 THEN 1
+ ELSE 0
+ END AS is_new_session
+ FROM events
+ ),
+ sessions AS (
+ SELECT
+ kind, actor_id, document_id, happened_at, payload,
+ SUM(is_new_session) OVER (
+ PARTITION BY actor_id, document_id, kind
+ ORDER BY happened_at
+ ROWS UNBOUNDED PRECEDING
+ ) AS session_id
+ FROM sessions_marked
+ ),
+ aggregated AS (
+ SELECT
+ s.kind,
+ s.actor_id,
+ s.document_id,
+ s.session_id,
+ MIN(s.happened_at) AS happened_at,
+ CASE WHEN COUNT(*) > 1 THEN MAX(s.happened_at) ELSE NULL END AS happened_at_until,
+ COUNT(*)::int AS count,
+ BOOL_OR(s.kind = 'MENTION_CREATED'
+ AND s.payload->>'mentionedUserId' = :currentUserId) AS you_mentioned
+ FROM sessions s
+ GROUP BY s.kind, s.actor_id, s.document_id, s.session_id
+ )
+ SELECT
+ ag.kind AS kind,
+ ag.actor_id AS actorId,
+ CASE
+ WHEN u.first_name IS NOT NULL AND u.last_name IS NOT NULL
+ THEN UPPER(LEFT(u.first_name, 1)) || UPPER(LEFT(u.last_name, 1))
+ WHEN u.first_name IS NOT NULL THEN UPPER(LEFT(u.first_name, 1))
+ WHEN u.last_name IS NOT NULL THEN UPPER(LEFT(u.last_name, 1))
+ ELSE '?'
+ END AS actorInitials,
+ COALESCE(u.color, '') AS actorColor,
+ CONCAT_WS(' ', u.first_name, u.last_name) AS actorName,
+ ag.document_id AS documentId,
+ ag.happened_at AS happened_at,
+ ag.you_mentioned AS youMentioned,
+ ag.count AS count,
+ ag.happened_at_until AS happenedAtUntil
+ FROM aggregated ag
+ LEFT JOIN users u ON u.id = ag.actor_id
+ ORDER BY ag.happened_at DESC
LIMIT :limit
""", nativeQuery = true)
List findDedupedActivityFeed(
diff --git a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryRolledUpTest.java b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryRolledUpTest.java
new file mode 100644
index 00000000..353a4bd4
--- /dev/null
+++ b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryRolledUpTest.java
@@ -0,0 +1,162 @@
+package org.raddatz.familienarchiv.dashboard;
+
+import org.junit.jupiter.api.Test;
+import org.raddatz.familienarchiv.PostgresContainerConfig;
+import org.raddatz.familienarchiv.audit.ActivityFeedRow;
+import org.raddatz.familienarchiv.audit.AuditLogQueryRepository;
+import org.raddatz.familienarchiv.config.FlywayConfig;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.data.jpa.test.autoconfigure.DataJpaTest;
+import org.springframework.boot.jdbc.test.autoconfigure.AutoConfigureTestDatabase;
+import org.springframework.context.annotation.Import;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
+import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
+import org.springframework.transaction.annotation.Transactional;
+
+import java.time.Instant;
+import java.time.OffsetDateTime;
+import java.util.List;
+import java.util.UUID;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+@DataJpaTest
+@AutoConfigureTestDatabase(replace = AutoConfigureTestDatabase.Replace.NONE)
+@Import({PostgresContainerConfig.class, FlywayConfig.class})
+@Transactional
+class AuditLogQueryRepositoryRolledUpTest {
+
+ static final UUID USER_ID = UUID.fromString("dddddddd-dddd-dddd-dddd-dddddddddddd");
+ static final UUID DOC_ID = UUID.fromString("eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee");
+ static final UUID OTHER_DOC_ID = UUID.fromString("ffffffff-ffff-ffff-ffff-ffffffffffff");
+
+ @Autowired AuditLogQueryRepository auditLogQueryRepository;
+ @Autowired JdbcTemplate jdbcTemplate;
+
+ private NamedParameterJdbcTemplate named() {
+ return new NamedParameterJdbcTemplate(jdbcTemplate);
+ }
+
+ private void insertUserAndDocs() {
+ jdbcTemplate.update(
+ "INSERT INTO users (id, enabled, email, password) VALUES (?, true, ?, 'pw')",
+ USER_ID, "rollup-" + USER_ID + "@test.com");
+ jdbcTemplate.update(
+ "INSERT INTO documents (id, title, original_filename, status) VALUES (?, 'Brief A', 'a.pdf', 'PLACEHOLDER')",
+ DOC_ID);
+ jdbcTemplate.update(
+ "INSERT INTO documents (id, title, original_filename, status) VALUES (?, 'Brief B', 'b.pdf', 'PLACEHOLDER')",
+ OTHER_DOC_ID);
+ }
+
+ private void insertAuditEvent(UUID actorId, UUID docId, String kind, Instant happenedAt) {
+ MapSqlParameterSource params = new MapSqlParameterSource()
+ .addValue("kind", kind)
+ .addValue("actor", actorId)
+ .addValue("doc", docId)
+ .addValue("t", OffsetDateTime.ofInstant(happenedAt, java.time.ZoneOffset.UTC));
+ named().update(
+ "INSERT INTO audit_log (kind, actor_id, document_id, happened_at) "
+ + "VALUES (:kind, :actor, :doc, :t)",
+ params);
+ }
+
+ @Test
+ void rolledUpFeed_combines_same_actor_same_doc_within_2h() {
+ insertUserAndDocs();
+ Instant base = Instant.parse("2026-04-20T09:00:00Z");
+ for (int i = 0; i < 20; i++) {
+ insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", base.plusSeconds(i * 480L));
+ }
+
+ List rows = auditLogQueryRepository.findDedupedActivityFeed(USER_ID.toString(), 40);
+
+ assertThat(rows).hasSize(1);
+ ActivityFeedRow row = rows.get(0);
+ assertThat(row.getKind()).isEqualTo("TEXT_SAVED");
+ assertThat(row.getDocumentId()).isEqualTo(DOC_ID);
+ assertThat(row.getCount()).isEqualTo(20);
+ assertThat(row.getHappenedAt()).isEqualTo(base);
+ assertThat(row.getHappenedAtUntil()).isEqualTo(base.plusSeconds(19 * 480L));
+ }
+
+ @Test
+ void rolledUpFeed_splits_at_2h_boundary() {
+ insertUserAndDocs();
+ Instant sessionOneStart = Instant.parse("2026-04-20T08:00:00Z");
+ Instant sessionOneLast = sessionOneStart.plusSeconds(600);
+ insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", sessionOneStart);
+ insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", sessionOneLast);
+ Instant sessionTwoStart = sessionOneLast.plusSeconds(2L * 60L * 60L + 60L);
+ insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", sessionTwoStart);
+ insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", sessionTwoStart.plusSeconds(300));
+
+ List rows = auditLogQueryRepository.findDedupedActivityFeed(USER_ID.toString(), 40);
+
+ assertThat(rows).hasSize(2);
+ assertThat(rows.get(0).getCount()).isEqualTo(2);
+ assertThat(rows.get(0).getHappenedAt()).isEqualTo(sessionTwoStart);
+ assertThat(rows.get(1).getCount()).isEqualTo(2);
+ assertThat(rows.get(1).getHappenedAt()).isEqualTo(sessionOneStart);
+ }
+
+ @Test
+ void rolledUpFeed_has_no_hard_cap_on_long_session() {
+ insertUserAndDocs();
+ Instant base = Instant.parse("2026-04-20T06:00:00Z");
+ for (int i = 0; i < 30; i++) {
+ insertAuditEvent(USER_ID, DOC_ID, "ANNOTATION_CREATED", base.plusSeconds(i * 60L * 30L));
+ }
+
+ List rows = auditLogQueryRepository.findDedupedActivityFeed(USER_ID.toString(), 40);
+
+ assertThat(rows).hasSize(1);
+ assertThat(rows.get(0).getCount()).isEqualTo(30);
+ assertThat(rows.get(0).getHappenedAt()).isEqualTo(base);
+ assertThat(rows.get(0).getHappenedAtUntil()).isEqualTo(base.plusSeconds(29 * 60L * 30L));
+ }
+
+ @Test
+ void rolledUpFeed_never_rolls_up_COMMENT_ADDED_or_MENTION_CREATED() {
+ insertUserAndDocs();
+ Instant base = Instant.parse("2026-04-20T10:00:00Z");
+ insertAuditEvent(USER_ID, DOC_ID, "COMMENT_ADDED", base);
+ insertAuditEvent(USER_ID, DOC_ID, "COMMENT_ADDED", base.plusSeconds(60));
+ insertAuditEvent(USER_ID, DOC_ID, "COMMENT_ADDED", base.plusSeconds(120));
+
+ List rows = auditLogQueryRepository.findDedupedActivityFeed(USER_ID.toString(), 40);
+
+ assertThat(rows).hasSize(3);
+ assertThat(rows).allSatisfy(r -> {
+ assertThat(r.getKind()).isEqualTo("COMMENT_ADDED");
+ assertThat(r.getCount()).isEqualTo(1);
+ assertThat(r.getHappenedAtUntil()).isNull();
+ });
+ }
+
+ @Test
+ void rolledUpFeed_exposes_count_and_happenedAtUntil_on_singletons_and_rollups() {
+ insertUserAndDocs();
+ Instant rollupStart = Instant.parse("2026-04-20T11:00:00Z");
+ insertAuditEvent(USER_ID, DOC_ID, "FILE_UPLOADED", rollupStart);
+ insertAuditEvent(USER_ID, DOC_ID, "FILE_UPLOADED", rollupStart.plusSeconds(300));
+ insertAuditEvent(USER_ID, OTHER_DOC_ID, "FILE_UPLOADED", rollupStart.plusSeconds(900));
+
+ List rows = auditLogQueryRepository.findDedupedActivityFeed(USER_ID.toString(), 40);
+
+ assertThat(rows).hasSize(2);
+ assertThat(rows).anySatisfy(r -> {
+ assertThat(r.getDocumentId()).isEqualTo(DOC_ID);
+ assertThat(r.getCount()).isEqualTo(2);
+ assertThat(r.getHappenedAt()).isEqualTo(rollupStart);
+ assertThat(r.getHappenedAtUntil()).isEqualTo(rollupStart.plusSeconds(300));
+ });
+ assertThat(rows).anySatisfy(r -> {
+ assertThat(r.getDocumentId()).isEqualTo(OTHER_DOC_ID);
+ assertThat(r.getCount()).isEqualTo(1);
+ assertThat(r.getHappenedAt()).isEqualTo(rollupStart.plusSeconds(900));
+ assertThat(r.getHappenedAtUntil()).isNull();
+ });
+ }
+}
--
2.49.1
From 372c839266087cee4b531ca78cea577bf1477e32 Mon Sep 17 00:00:00 2001
From: Marcel
Date: Mon, 20 Apr 2026 16:04:39 +0200
Subject: [PATCH 03/19] refactor(audit): rename findDedupedActivityFeed to
findRolledUpActivityFeed
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
The method no longer deduplicates by hour-trunc — it performs session-style
rollup via LAG()+120-min gap. Rename aligns the public name with the
behavior.
Part of #285.
Co-Authored-By: Claude Opus 4.7 (1M context)
---
.../familienarchiv/audit/AuditLogQueryRepository.java | 2 +-
.../familienarchiv/audit/AuditLogQueryService.java | 2 +-
.../AuditLogQueryRepositoryIntegrationTest.java | 4 ++--
.../dashboard/AuditLogQueryRepositoryRolledUpTest.java | 10 +++++-----
4 files changed, 9 insertions(+), 9 deletions(-)
diff --git a/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryRepository.java b/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryRepository.java
index cc5df3ec..59e930d3 100644
--- a/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryRepository.java
+++ b/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryRepository.java
@@ -96,7 +96,7 @@ public interface AuditLogQueryRepository extends JpaRepository {
ORDER BY ag.happened_at DESC
LIMIT :limit
""", nativeQuery = true)
- List findDedupedActivityFeed(
+ List findRolledUpActivityFeed(
@Param("currentUserId") String currentUserId,
@Param("limit") int limit);
diff --git a/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryService.java b/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryService.java
index c007f4bb..930ef1a4 100644
--- a/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryService.java
+++ b/backend/src/main/java/org/raddatz/familienarchiv/audit/AuditLogQueryService.java
@@ -17,7 +17,7 @@ public class AuditLogQueryService {
}
public List findActivityFeed(UUID currentUserId, int limit) {
- return queryRepository.findDedupedActivityFeed(currentUserId.toString(), limit);
+ return queryRepository.findRolledUpActivityFeed(currentUserId.toString(), limit);
}
public PulseStatsRow getPulseStats(OffsetDateTime weekStart, UUID userId) {
diff --git a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryIntegrationTest.java b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryIntegrationTest.java
index 3862c0d7..e73553c5 100644
--- a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryIntegrationTest.java
+++ b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryIntegrationTest.java
@@ -49,8 +49,8 @@ class AuditLogQueryRepositoryIntegrationTest {
"INSERT INTO documents (id, title, original_filename, status) VALUES ('bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', 'Test Doc', 'test.pdf', 'PLACEHOLDER')",
"INSERT INTO audit_log (kind, actor_id, document_id, payload) VALUES ('ANNOTATION_CREATED', 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb', '{\"pageNumber\":1}')"
})
- void findDedupedActivityFeed_returnsAnnotationEntry() {
- List rows = auditLogQueryRepository.findDedupedActivityFeed(USER_ID.toString(), 10);
+ void findRolledUpActivityFeed_returnsAnnotationEntry() {
+ List rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 10);
assertThat(rows).hasSize(1);
assertThat(rows.get(0).getKind()).isEqualTo("ANNOTATION_CREATED");
diff --git a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryRolledUpTest.java b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryRolledUpTest.java
index 353a4bd4..22b5f89a 100644
--- a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryRolledUpTest.java
+++ b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/AuditLogQueryRepositoryRolledUpTest.java
@@ -70,7 +70,7 @@ class AuditLogQueryRepositoryRolledUpTest {
insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", base.plusSeconds(i * 480L));
}
- List rows = auditLogQueryRepository.findDedupedActivityFeed(USER_ID.toString(), 40);
+ List rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40);
assertThat(rows).hasSize(1);
ActivityFeedRow row = rows.get(0);
@@ -92,7 +92,7 @@ class AuditLogQueryRepositoryRolledUpTest {
insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", sessionTwoStart);
insertAuditEvent(USER_ID, DOC_ID, "TEXT_SAVED", sessionTwoStart.plusSeconds(300));
- List rows = auditLogQueryRepository.findDedupedActivityFeed(USER_ID.toString(), 40);
+ List rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40);
assertThat(rows).hasSize(2);
assertThat(rows.get(0).getCount()).isEqualTo(2);
@@ -109,7 +109,7 @@ class AuditLogQueryRepositoryRolledUpTest {
insertAuditEvent(USER_ID, DOC_ID, "ANNOTATION_CREATED", base.plusSeconds(i * 60L * 30L));
}
- List rows = auditLogQueryRepository.findDedupedActivityFeed(USER_ID.toString(), 40);
+ List rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40);
assertThat(rows).hasSize(1);
assertThat(rows.get(0).getCount()).isEqualTo(30);
@@ -125,7 +125,7 @@ class AuditLogQueryRepositoryRolledUpTest {
insertAuditEvent(USER_ID, DOC_ID, "COMMENT_ADDED", base.plusSeconds(60));
insertAuditEvent(USER_ID, DOC_ID, "COMMENT_ADDED", base.plusSeconds(120));
- List rows = auditLogQueryRepository.findDedupedActivityFeed(USER_ID.toString(), 40);
+ List rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40);
assertThat(rows).hasSize(3);
assertThat(rows).allSatisfy(r -> {
@@ -143,7 +143,7 @@ class AuditLogQueryRepositoryRolledUpTest {
insertAuditEvent(USER_ID, DOC_ID, "FILE_UPLOADED", rollupStart.plusSeconds(300));
insertAuditEvent(USER_ID, OTHER_DOC_ID, "FILE_UPLOADED", rollupStart.plusSeconds(900));
- List rows = auditLogQueryRepository.findDedupedActivityFeed(USER_ID.toString(), 40);
+ List rows = auditLogQueryRepository.findRolledUpActivityFeed(USER_ID.toString(), 40);
assertThat(rows).hasSize(2);
assertThat(rows).anySatisfy(r -> {
--
2.49.1
From 2a5c402ef6255354a9e6658041ff1574f457856e Mon Sep 17 00:00:00 2001
From: Marcel
Date: Mon, 20 Apr 2026 16:08:35 +0200
Subject: [PATCH 04/19] feat(audit): add V49 rollup covering index + raise
/api/dashboard/activity cap to 40
- V49__add_audit_log_rollup_index.sql: partial covering index on
(actor_id, document_id, kind, happened_at DESC) filtered by the 6 rollup
kinds. Matches the WHERE clause of findRolledUpActivityFeed exactly so the
session-grouping window scan is index-backed.
- DashboardController: clamp limit to 40 (was 20). Chronik requests up to 40
activity items per page; dashboard side-rail still passes 7.
Part of #285.
Co-Authored-By: Claude Opus 4.7 (1M context)
---
.../dashboard/DashboardController.java | 2 +-
.../migration/V49__add_audit_log_rollup_index.sql | 7 +++++++
.../dashboard/DashboardControllerTest.java | 14 ++++++++++++++
3 files changed, 22 insertions(+), 1 deletion(-)
create mode 100644 backend/src/main/resources/db/migration/V49__add_audit_log_rollup_index.sql
diff --git a/backend/src/main/java/org/raddatz/familienarchiv/dashboard/DashboardController.java b/backend/src/main/java/org/raddatz/familienarchiv/dashboard/DashboardController.java
index 1869c2f4..b7b34b7e 100644
--- a/backend/src/main/java/org/raddatz/familienarchiv/dashboard/DashboardController.java
+++ b/backend/src/main/java/org/raddatz/familienarchiv/dashboard/DashboardController.java
@@ -37,6 +37,6 @@ public class DashboardController {
Authentication authentication,
@RequestParam(defaultValue = "7") int limit) {
UUID userId = SecurityUtils.requireUserId(authentication, userService);
- return dashboardService.getActivity(userId, Math.min(limit, 20));
+ return dashboardService.getActivity(userId, Math.min(limit, 40));
}
}
diff --git a/backend/src/main/resources/db/migration/V49__add_audit_log_rollup_index.sql b/backend/src/main/resources/db/migration/V49__add_audit_log_rollup_index.sql
new file mode 100644
index 00000000..7327df6a
--- /dev/null
+++ b/backend/src/main/resources/db/migration/V49__add_audit_log_rollup_index.sql
@@ -0,0 +1,7 @@
+-- Partial covering index for the session-style activity feed rollup (#285).
+-- Matches the WHERE clause of AuditLogQueryRepository.findRolledUpActivityFeed
+-- exactly. DESC on happened_at supports the outer ORDER BY without a sort step.
+CREATE INDEX idx_audit_log_rollup
+ ON audit_log (actor_id, document_id, kind, happened_at DESC)
+ WHERE kind IN ('TEXT_SAVED','FILE_UPLOADED','ANNOTATION_CREATED',
+ 'BLOCK_REVIEWED','COMMENT_ADDED','MENTION_CREATED');
diff --git a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/DashboardControllerTest.java b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/DashboardControllerTest.java
index 0f1e4922..1bfc37ce 100644
--- a/backend/src/test/java/org/raddatz/familienarchiv/dashboard/DashboardControllerTest.java
+++ b/backend/src/test/java/org/raddatz/familienarchiv/dashboard/DashboardControllerTest.java
@@ -140,4 +140,18 @@ class DashboardControllerTest {
.andExpect(status().isOk())
.andExpect(jsonPath("$").isArray());
}
+
+ @Test
+ @WithMockUser(authorities = "READ_ALL")
+ void activity_clamps_limit_to_40() throws Exception {
+ UUID userId = UUID.randomUUID();
+ when(userService.findByEmail(any())).thenReturn(
+ AppUser.builder().id(userId).email("u@test.com").password("pw").build());
+ when(dashboardService.getActivity(any(UUID.class), anyInt())).thenReturn(List.of());
+
+ mockMvc.perform(get("/api/dashboard/activity").param("limit", "9999"))
+ .andExpect(status().isOk());
+
+ org.mockito.Mockito.verify(dashboardService).getActivity(any(UUID.class), org.mockito.ArgumentMatchers.eq(40));
+ }
}
--
2.49.1
From 3d53974d762ee4eea2a4776b0d9c80ee975df0ac Mon Sep 17 00:00:00 2001
From: Marcel
Date: Mon, 20 Apr 2026 16:11:45 +0200
Subject: [PATCH 05/19] chore(types): regenerate OpenAPI types for
ActivityFeedItemDTO rollup fields
Adds count (required) and happenedAtUntil (optional) to the TypeScript DTO so
Chronik + DashboardActivityFeed can consume rollup rows type-safely.
Part of #285.
Co-Authored-By: Claude Opus 4.7 (1M context)
---
frontend/src/lib/generated/api.ts | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
diff --git a/frontend/src/lib/generated/api.ts b/frontend/src/lib/generated/api.ts
index aa4e5cd1..d15c12e1 100644
--- a/frontend/src/lib/generated/api.ts
+++ b/frontend/src/lib/generated/api.ts
@@ -1799,6 +1799,7 @@ export interface components {
/** Format: uuid */
id?: string;
displayName?: string;
+ personType?: string;
firstName?: string;
lastName?: string;
/** Format: int64 */
@@ -1809,7 +1810,6 @@ export interface components {
deathYear?: number;
alias?: string;
notes?: string;
- personType?: string;
};
SenderModel: {
/** Format: uuid */
@@ -1877,10 +1877,10 @@ export interface components {
timeout?: number;
};
PageNotificationDTO: {
- /** Format: int32 */
- totalPages?: number;
/** Format: int64 */
totalElements?: number;
+ /** Format: int32 */
+ totalPages?: number;
pageable?: components["schemas"]["PageableObject"];
first?: boolean;
last?: boolean;
@@ -2015,6 +2015,10 @@ export interface components {
/** Format: date-time */
happenedAt: string;
youMentioned: boolean;
+ /** Format: int32 */
+ count: number;
+ /** Format: date-time */
+ happenedAtUntil?: string;
};
InvitePrefillDTO: {
firstName: string;
@@ -4455,7 +4459,3 @@ export interface operations {
};
};
}
-
-export type DashboardResumeDTO = components['schemas']['DashboardResumeDTO'];
-export type DashboardPulseDTO = components['schemas']['DashboardPulseDTO'];
-export type ActivityFeedItemDTO = components['schemas']['ActivityFeedItemDTO'];
--
2.49.1
From 2ff5073bfbb2694dbea24bdd7e3897ba52303f18 Mon Sep 17 00:00:00 2001
From: Marcel
Date: Mon, 20 Apr 2026 16:21:47 +0200
Subject: [PATCH 06/19] refactor(notifications): convert per-component stream
hook to module-level singleton
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Replaces the per-component createNotificationStream() factory with a shared
$lib/stores/notifications.svelte.ts singleton. Ref-counted init()/destroy()
ensures one EventSource per tab no matter how many consumers mount
simultaneously.
Motivation: the /chronik "Für dich" box (#285) needs the same live-arrival
stream that NotificationBell already consumes. Two factories would open two
SSE connections per tab — this refactor avoids the silent regression before
it ships.
- New: src/lib/stores/notifications.svelte.ts (module state, refcount)
- New: src/lib/stores/notifications.svelte.spec.ts (proves single EventSource
across multiple consumers + ref-counted teardown)
- Deleted: src/lib/hooks/useNotificationStream.svelte.ts (factory)
- Deleted: src/lib/hooks/__tests__/useNotificationStream.svelte.test.ts
- NotificationBell now imports the singleton
Part of #285.
Co-Authored-By: Claude Opus 4.7 (1M context)
---
.../lib/components/NotificationBell.svelte | 4 +-
.../useNotificationStream.svelte.test.ts | 142 ------------------
.../lib/hooks/useNotificationStream.svelte.ts | 95 ------------
.../lib/stores/notifications.svelte.spec.ts | 108 +++++++++++++
.../src/lib/stores/notifications.svelte.ts | 108 +++++++++++++
5 files changed, 218 insertions(+), 239 deletions(-)
delete mode 100644 frontend/src/lib/hooks/__tests__/useNotificationStream.svelte.test.ts
delete mode 100644 frontend/src/lib/hooks/useNotificationStream.svelte.ts
create mode 100644 frontend/src/lib/stores/notifications.svelte.spec.ts
create mode 100644 frontend/src/lib/stores/notifications.svelte.ts
diff --git a/frontend/src/lib/components/NotificationBell.svelte b/frontend/src/lib/components/NotificationBell.svelte
index 9781e366..03f5398a 100644
--- a/frontend/src/lib/components/NotificationBell.svelte
+++ b/frontend/src/lib/components/NotificationBell.svelte
@@ -3,13 +3,13 @@ import { onMount, onDestroy } from 'svelte';
import { goto } from '$app/navigation';
import { m } from '$lib/paraglide/messages.js';
import { clickOutside } from '$lib/actions/clickOutside';
-import { createNotificationStream } from '$lib/hooks/useNotificationStream.svelte';
+import { notificationStore } from '$lib/stores/notifications.svelte';
import NotificationDropdown from './NotificationDropdown.svelte';
let open = $state(false);
let bellButtonEl: HTMLButtonElement | null = null;
-const stream = createNotificationStream();
+const stream = notificationStore;
async function toggleDropdown() {
open = !open;
diff --git a/frontend/src/lib/hooks/__tests__/useNotificationStream.svelte.test.ts b/frontend/src/lib/hooks/__tests__/useNotificationStream.svelte.test.ts
deleted file mode 100644
index 143844c5..00000000
--- a/frontend/src/lib/hooks/__tests__/useNotificationStream.svelte.test.ts
+++ /dev/null
@@ -1,142 +0,0 @@
-import { describe, it, expect, vi, beforeEach } from 'vitest';
-import type { NotificationItem } from '../useNotificationStream.svelte';
-
-// Track the last created EventSource instance
-let lastEventSource: {
- close: ReturnType;
- onopen: (() => void) | null;
- onerror: (() => void) | null;
- simulate: (type: string, data: string) => void;
-} | null = null;
-
-class MockEventSource {
- onopen: (() => void) | null = null;
- onerror: (() => void) | null = null;
- close = vi.fn();
- private listeners: Record void)[]> = {};
-
- constructor() {
- // eslint-disable-next-line @typescript-eslint/no-this-alias
- lastEventSource = this;
- }
-
- addEventListener(type: string, fn: (e: MessageEvent) => void) {
- if (!this.listeners[type]) this.listeners[type] = [];
- this.listeners[type].push(fn);
- }
-
- simulate(type: string, data: string) {
- const event = new MessageEvent(type, { data });
- for (const fn of this.listeners[type] ?? []) {
- fn(event);
- }
- }
-}
-
-vi.stubGlobal('EventSource', MockEventSource);
-
-const mockFetch = vi.fn();
-vi.stubGlobal('fetch', mockFetch);
-
-// Import after stubs are set up
-const { createNotificationStream } = await import('../useNotificationStream.svelte');
-
-beforeEach(() => {
- mockFetch.mockReset();
- lastEventSource = null;
-});
-
-function makeNotification(overrides: Partial = {}): NotificationItem {
- return {
- id: 'n1',
- type: 'REPLY',
- actorName: 'Hans',
- documentId: 'doc-1',
- referenceId: 'ref-1',
- annotationId: null,
- read: false,
- createdAt: new Date().toISOString(),
- ...overrides
- };
-}
-
-describe('createNotificationStream', () => {
- it('starts with empty notifications and zero unreadCount', () => {
- const stream = createNotificationStream();
- expect(stream.notifications).toHaveLength(0);
- expect(stream.unreadCount).toBe(0);
- });
-
- it('fetchUnreadCount updates unreadCount from API', async () => {
- mockFetch.mockResolvedValueOnce(new Response(JSON.stringify({ count: 3 }), { status: 200 }));
- const stream = createNotificationStream();
- await stream.fetchUnreadCount();
- expect(stream.unreadCount).toBe(3);
- });
-
- it('fetchNotifications populates notifications from API', async () => {
- const items = [makeNotification()];
- mockFetch.mockResolvedValueOnce(
- new Response(JSON.stringify({ content: items }), { status: 200 })
- );
- const stream = createNotificationStream();
- await stream.fetchNotifications();
- expect(stream.notifications).toHaveLength(1);
- expect(stream.notifications[0].id).toBe('n1');
- });
-
- it('markRead marks notification as read and decrements unreadCount', async () => {
- mockFetch
- .mockResolvedValueOnce(new Response(JSON.stringify({ count: 2 }), { status: 200 }))
- .mockResolvedValueOnce(new Response(null, { status: 200 }));
- const stream = createNotificationStream();
- await stream.fetchUnreadCount();
-
- const notification = makeNotification({ read: false });
- await stream.markRead(notification);
- expect(notification.read).toBe(true);
- expect(stream.unreadCount).toBe(1);
- });
-
- it('markAllRead calls the API and resets unreadCount', async () => {
- mockFetch.mockResolvedValueOnce(new Response(null, { status: 200 }));
- const stream = createNotificationStream();
- await stream.markAllRead();
- expect(mockFetch).toHaveBeenCalledWith('/api/notifications/read-all', { method: 'POST' });
- expect(stream.unreadCount).toBe(0);
- });
-
- it('destroy closes the EventSource', async () => {
- mockFetch.mockResolvedValue(new Response(JSON.stringify({ count: 0 }), { status: 200 }));
- const stream = createNotificationStream();
- stream.init();
- expect(lastEventSource).not.toBeNull();
- stream.destroy();
- expect(lastEventSource!.close).toHaveBeenCalled();
- });
-
- it('SSE notification event prepends notification and increments unreadCount', async () => {
- mockFetch.mockResolvedValue(new Response(JSON.stringify({ count: 0 }), { status: 200 }));
- const stream = createNotificationStream();
- stream.init();
-
- const notification = makeNotification({ id: 'sse-1', read: false });
- lastEventSource!.simulate('notification', JSON.stringify(notification));
-
- expect(stream.notifications).toHaveLength(1);
- expect(stream.notifications[0].id).toBe('sse-1');
- expect(stream.unreadCount).toBe(1);
- });
-
- it('SSE notification event with read:true does not increment unreadCount', async () => {
- mockFetch.mockResolvedValue(new Response(JSON.stringify({ count: 0 }), { status: 200 }));
- const stream = createNotificationStream();
- stream.init();
-
- const notification = makeNotification({ id: 'sse-2', read: true });
- lastEventSource!.simulate('notification', JSON.stringify(notification));
-
- expect(stream.notifications).toHaveLength(1);
- expect(stream.unreadCount).toBe(0);
- });
-});
diff --git a/frontend/src/lib/hooks/useNotificationStream.svelte.ts b/frontend/src/lib/hooks/useNotificationStream.svelte.ts
deleted file mode 100644
index 0a03dada..00000000
--- a/frontend/src/lib/hooks/useNotificationStream.svelte.ts
+++ /dev/null
@@ -1,95 +0,0 @@
-import { type NotificationItem, parseNotificationEvent } from '$lib/utils/notifications';
-
-export type { NotificationItem };
-
-export function createNotificationStream() {
- let notifications = $state([]);
- let unreadCount = $state(0);
- let eventSource: EventSource | null = null;
-
- async function fetchNotifications(): Promise {
- try {
- const res = await fetch('/api/notifications?size=10');
- if (res.ok) {
- const data = await res.json();
- notifications = data.content ?? [];
- }
- } catch (e) {
- console.error('Failed to fetch notifications', e);
- }
- }
-
- async function fetchUnreadCount(): Promise {
- try {
- const res = await fetch('/api/notifications/unread-count');
- if (res.ok) {
- const data = await res.json();
- unreadCount = data.count;
- }
- } catch (e) {
- console.error('Failed to fetch unread count', e);
- }
- }
-
- async function markRead(notification: NotificationItem): Promise {
- if (!notification.read) {
- try {
- await fetch(`/api/notifications/${notification.id}/read`, { method: 'PATCH' });
- notification.read = true;
- unreadCount = Math.max(0, unreadCount - 1);
- } catch (e) {
- console.error('Failed to mark notification as read', e);
- }
- }
- }
-
- async function markAllRead(): Promise {
- try {
- await fetch('/api/notifications/read-all', { method: 'POST' });
- for (const n of notifications) {
- n.read = true;
- }
- unreadCount = 0;
- } catch (e) {
- console.error('Failed to mark all notifications as read', e);
- }
- }
-
- function init(): void {
- fetchUnreadCount();
- eventSource = new EventSource('/api/notifications/stream');
- eventSource.addEventListener('notification', (e) => {
- const notification = parseNotificationEvent(e.data);
- if (!notification) return;
- notifications = [notification, ...notifications];
- if (!notification.read) unreadCount += 1;
- });
- eventSource.onopen = () => {
- fetchUnreadCount();
- };
- eventSource.onerror = () => {
- // Close on error to avoid repeated reconnect noise
- eventSource?.close();
- };
- }
-
- function destroy(): void {
- eventSource?.close();
- eventSource = null;
- }
-
- return {
- get notifications() {
- return notifications;
- },
- get unreadCount() {
- return unreadCount;
- },
- fetchNotifications,
- fetchUnreadCount,
- markRead,
- markAllRead,
- init,
- destroy
- };
-}
diff --git a/frontend/src/lib/stores/notifications.svelte.spec.ts b/frontend/src/lib/stores/notifications.svelte.spec.ts
new file mode 100644
index 00000000..01acbd21
--- /dev/null
+++ b/frontend/src/lib/stores/notifications.svelte.spec.ts
@@ -0,0 +1,108 @@
+import { describe, it, expect, vi, beforeEach } from 'vitest';
+import type { NotificationItem } from '$lib/utils/notifications';
+
+let lastEventSource: MockEventSource | null = null;
+let eventSourceCount = 0;
+
+class MockEventSource {
+ onopen: (() => void) | null = null;
+ onerror: (() => void) | null = null;
+ close = vi.fn();
+ private listeners: Record void)[]> = {};
+
+ constructor() {
+ eventSourceCount += 1;
+ // eslint-disable-next-line @typescript-eslint/no-this-alias
+ lastEventSource = this;
+ }
+
+ addEventListener(type: string, fn: (e: MessageEvent) => void) {
+ if (!this.listeners[type]) this.listeners[type] = [];
+ this.listeners[type].push(fn);
+ }
+
+ simulate(type: string, data: string) {
+ const event = new MessageEvent(type, { data });
+ for (const fn of this.listeners[type] ?? []) {
+ fn(event);
+ }
+ }
+}
+
+vi.stubGlobal('EventSource', MockEventSource);
+
+const mockFetch = vi.fn();
+vi.stubGlobal('fetch', mockFetch);
+
+const { notificationStore, __resetForTest } = await import('./notifications.svelte');
+
+beforeEach(() => {
+ mockFetch.mockReset();
+ mockFetch.mockResolvedValue(new Response(JSON.stringify({ count: 0 }), { status: 200 }));
+ lastEventSource = null;
+ eventSourceCount = 0;
+ __resetForTest();
+});
+
+function makeNotification(overrides: Partial = {}): NotificationItem {
+ return {
+ id: 'n1',
+ type: 'REPLY',
+ actorName: 'Hans',
+ documentId: 'doc-1',
+ documentTitle: null,
+ referenceId: 'ref-1',
+ annotationId: null,
+ read: false,
+ createdAt: new Date().toISOString(),
+ ...overrides
+ };
+}
+
+describe('notificationStore (singleton)', () => {
+ it('opens a single EventSource across multiple init() calls', () => {
+ notificationStore.init();
+ notificationStore.init();
+ notificationStore.init();
+
+ expect(eventSourceCount).toBe(1);
+ });
+
+ it('closes the EventSource only after every init() is matched with destroy()', () => {
+ notificationStore.init();
+ notificationStore.init();
+ const es = lastEventSource!;
+
+ notificationStore.destroy();
+ expect(es.close).not.toHaveBeenCalled();
+
+ notificationStore.destroy();
+ expect(es.close).toHaveBeenCalledTimes(1);
+ });
+
+ it('reopens a fresh EventSource after full teardown', () => {
+ notificationStore.init();
+ notificationStore.destroy();
+ notificationStore.init();
+
+ expect(eventSourceCount).toBe(2);
+ });
+
+ it('SSE notification event prepends notification and increments unreadCount', () => {
+ notificationStore.init();
+
+ const notification = makeNotification({ id: 'sse-1', read: false });
+ lastEventSource!.simulate('notification', JSON.stringify(notification));
+
+ expect(notificationStore.notifications[0].id).toBe('sse-1');
+ expect(notificationStore.unreadCount).toBe(1);
+ });
+
+ it('markAllRead resets unreadCount', async () => {
+ mockFetch.mockResolvedValue(new Response(null, { status: 200 }));
+ await notificationStore.markAllRead();
+
+ expect(mockFetch).toHaveBeenCalledWith('/api/notifications/read-all', { method: 'POST' });
+ expect(notificationStore.unreadCount).toBe(0);
+ });
+});
diff --git a/frontend/src/lib/stores/notifications.svelte.ts b/frontend/src/lib/stores/notifications.svelte.ts
new file mode 100644
index 00000000..28ac9eb9
--- /dev/null
+++ b/frontend/src/lib/stores/notifications.svelte.ts
@@ -0,0 +1,108 @@
+import { type NotificationItem, parseNotificationEvent } from '$lib/utils/notifications';
+
+export type { NotificationItem };
+
+let notifications = $state([]);
+let unreadCount = $state(0);
+let eventSource: EventSource | null = null;
+let refCount = 0;
+
+async function fetchNotifications(): Promise {
+ try {
+ const res = await fetch('/api/notifications?size=10');
+ if (res.ok) {
+ const data = await res.json();
+ notifications = data.content ?? [];
+ }
+ } catch (e) {
+ console.error('Failed to fetch notifications', e);
+ }
+}
+
+async function fetchUnreadCount(): Promise {
+ try {
+ const res = await fetch('/api/notifications/unread-count');
+ if (res.ok) {
+ const data = await res.json();
+ unreadCount = data.count;
+ }
+ } catch (e) {
+ console.error('Failed to fetch unread count', e);
+ }
+}
+
+async function markRead(notification: NotificationItem): Promise {
+ if (!notification.read) {
+ try {
+ await fetch(`/api/notifications/${notification.id}/read`, { method: 'PATCH' });
+ notification.read = true;
+ unreadCount = Math.max(0, unreadCount - 1);
+ } catch (e) {
+ console.error('Failed to mark notification as read', e);
+ }
+ }
+}
+
+async function markAllRead(): Promise {
+ try {
+ await fetch('/api/notifications/read-all', { method: 'POST' });
+ for (const n of notifications) {
+ n.read = true;
+ }
+ unreadCount = 0;
+ } catch (e) {
+ console.error('Failed to mark all notifications as read', e);
+ }
+}
+
+function init(): void {
+ refCount += 1;
+ if (refCount > 1) return;
+
+ fetchUnreadCount();
+ eventSource = new EventSource('/api/notifications/stream');
+ eventSource.addEventListener('notification', (e) => {
+ const notification = parseNotificationEvent((e as MessageEvent).data);
+ if (!notification) return;
+ notifications = [notification, ...notifications];
+ if (!notification.read) unreadCount += 1;
+ });
+ eventSource.onopen = () => {
+ fetchUnreadCount();
+ };
+ eventSource.onerror = () => {
+ eventSource?.close();
+ };
+}
+
+function destroy(): void {
+ if (refCount === 0) return;
+ refCount -= 1;
+ if (refCount === 0) {
+ eventSource?.close();
+ eventSource = null;
+ }
+}
+
+export function __resetForTest(): void {
+ eventSource?.close();
+ eventSource = null;
+ refCount = 0;
+ notifications = [];
+ unreadCount = 0;
+}
+
+export const notificationStore = {
+ get notifications() {
+ return notifications;
+ },
+ get unreadCount() {
+ return unreadCount;
+ },
+ fetchNotifications,
+ fetchUnreadCount,
+ markRead,
+ markAllRead,
+ init,
+ destroy
+};
--
2.49.1
From e5be1ecbd29d87fc16df0d9aa45ec550d37948b5 Mon Sep 17 00:00:00 2001
From: Marcel
Date: Mon, 20 Apr 2026 16:25:31 +0200
Subject: [PATCH 07/19] feat(utils): add date-buckets helper for Chronik day
grouping
Pure function bucketByDay(date, now?, locale?) returns one of
'today'|'yesterday'|'thisWeek'|'older' so ChronikTimeline can
bucket activity rows by relative day without pulling a date
library.
Handles:
- midnight boundary (startOfDay comparison)
- locale-aware week start (Monday for most locales, Sunday for en-US,
en-CA, en-PH, ja-JP, he-IL, pt-BR)
- DST transitions (works off local calendar days)
Part of #285.
Co-Authored-By: Claude Opus 4.7 (1M context)
---
frontend/src/lib/utils/date-buckets.spec.ts | 50 +++++++++++++++++++++
frontend/src/lib/utils/date-buckets.ts | 35 +++++++++++++++
2 files changed, 85 insertions(+)
create mode 100644 frontend/src/lib/utils/date-buckets.spec.ts
create mode 100644 frontend/src/lib/utils/date-buckets.ts
diff --git a/frontend/src/lib/utils/date-buckets.spec.ts b/frontend/src/lib/utils/date-buckets.spec.ts
new file mode 100644
index 00000000..3593481a
--- /dev/null
+++ b/frontend/src/lib/utils/date-buckets.spec.ts
@@ -0,0 +1,50 @@
+import { describe, it, expect } from 'vitest';
+import { bucketByDay } from './date-buckets';
+
+function date(iso: string): Date {
+ return new Date(iso);
+}
+
+describe('bucketByDay', () => {
+ // Wednesday 2026-04-22 at 12:00 Berlin. Week start (Mon) = 2026-04-20.
+ const now = date('2026-04-22T12:00:00+02:00');
+
+ it('returns "today" for a time earlier today', () => {
+ expect(bucketByDay(date('2026-04-22T06:00:00+02:00'), now, 'de-DE')).toBe('today');
+ });
+
+ it('returns "today" at exact midnight start of today', () => {
+ expect(bucketByDay(date('2026-04-22T00:00:00+02:00'), now, 'de-DE')).toBe('today');
+ });
+
+ it('returns "yesterday" for any time on the previous day', () => {
+ expect(bucketByDay(date('2026-04-21T23:59:59+02:00'), now, 'de-DE')).toBe('yesterday');
+ expect(bucketByDay(date('2026-04-21T00:00:00+02:00'), now, 'de-DE')).toBe('yesterday');
+ });
+
+ it('returns "thisWeek" for the Monday that starts this week (Monday-anchored, de-DE)', () => {
+ expect(bucketByDay(date('2026-04-20T10:00:00+02:00'), now, 'de-DE')).toBe('thisWeek');
+ });
+
+ it('returns "older" for anything before the start of this week (de-DE)', () => {
+ expect(bucketByDay(date('2026-04-19T23:00:00+02:00'), now, 'de-DE')).toBe('older');
+ expect(bucketByDay(date('2026-04-13T10:00:00+02:00'), now, 'de-DE')).toBe('older');
+ });
+
+ it('uses Sunday-start week for en-US', () => {
+ const sundayRef = date('2026-04-19T12:00:00+02:00');
+ expect(bucketByDay(date('2026-04-19T06:00:00+02:00'), sundayRef, 'en-US')).toBe('today');
+ expect(
+ bucketByDay(date('2026-04-13T10:00:00+02:00'), date('2026-04-18T12:00:00+02:00'), 'en-US')
+ ).toBe('thisWeek');
+ expect(
+ bucketByDay(date('2026-04-11T10:00:00+02:00'), date('2026-04-18T12:00:00+02:00'), 'en-US')
+ ).toBe('older');
+ });
+
+ it('handles DST spring-forward correctly (Europe/Berlin 2026-03-29)', () => {
+ const justAfterDst = date('2026-03-29T03:15:00+02:00');
+ const sameDay = date('2026-03-29T10:00:00+02:00');
+ expect(bucketByDay(justAfterDst, sameDay, 'de-DE')).toBe('today');
+ });
+});
diff --git a/frontend/src/lib/utils/date-buckets.ts b/frontend/src/lib/utils/date-buckets.ts
new file mode 100644
index 00000000..7561380c
--- /dev/null
+++ b/frontend/src/lib/utils/date-buckets.ts
@@ -0,0 +1,35 @@
+export type DayBucket = 'today' | 'yesterday' | 'thisWeek' | 'older';
+
+const DAY_MS = 24 * 60 * 60 * 1000;
+const SUNDAY_START_LOCALES = new Set(['en-us', 'en-ca', 'en-ph', 'ja-jp', 'he-il', 'pt-br']);
+
+function weekStartDay(locale?: string): 0 | 1 {
+ if (!locale) return 1;
+ return SUNDAY_START_LOCALES.has(locale.toLowerCase()) ? 0 : 1;
+}
+
+function startOfDay(d: Date): Date {
+ const x = new Date(d);
+ x.setHours(0, 0, 0, 0);
+ return x;
+}
+
+function startOfWeek(d: Date, firstDay: 0 | 1): Date {
+ const x = startOfDay(d);
+ const diff = (x.getDay() - firstDay + 7) % 7;
+ x.setDate(x.getDate() - diff);
+ return x;
+}
+
+export function bucketByDay(date: Date, now: Date = new Date(), locale?: string): DayBucket {
+ const today = startOfDay(now);
+ const target = startOfDay(date);
+
+ if (target.getTime() === today.getTime()) return 'today';
+ if (today.getTime() - target.getTime() <= DAY_MS) return 'yesterday';
+
+ const weekStart = startOfWeek(today, weekStartDay(locale));
+ if (target.getTime() >= weekStart.getTime()) return 'thisWeek';
+
+ return 'older';
+}
--
2.49.1
From ef9a3d8eb104dfb701c98a71c2f8ba57916bb0bb Mon Sep 17 00:00:00 2001
From: Marcel
Date: Mon, 20 Apr 2026 16:27:53 +0200
Subject: [PATCH 08/19] fix(notifications): retarget NotificationItem import to
singleton store
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Left over from the hook→singleton refactor — NotificationDropdown still
imported from the deleted $lib/hooks path.
Part of #285.
Co-Authored-By: Claude Opus 4.7 (1M context)
---
frontend/src/lib/components/NotificationDropdown.svelte | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/frontend/src/lib/components/NotificationDropdown.svelte b/frontend/src/lib/components/NotificationDropdown.svelte
index b3b161fb..14cc78df 100644
--- a/frontend/src/lib/components/NotificationDropdown.svelte
+++ b/frontend/src/lib/components/NotificationDropdown.svelte
@@ -1,7 +1,7 @@
+
+
+ {#if variant === 'first-run'}
+
+ {:else if variant === 'filter-empty'}
+
+ {:else}
+
+ {/if}
+
+
+ {title}
+
+ {#if body}
+
+ {body}
+
+ {/if}
+
diff --git a/frontend/src/lib/components/chronik/ChronikEmptyState.svelte.spec.ts b/frontend/src/lib/components/chronik/ChronikEmptyState.svelte.spec.ts
new file mode 100644
index 00000000..4e3446a4
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikEmptyState.svelte.spec.ts
@@ -0,0 +1,30 @@
+import { describe, it, expect, afterEach } from 'vitest';
+import { cleanup, render } from 'vitest-browser-svelte';
+import { page } from 'vitest/browser';
+
+import ChronikEmptyState from './ChronikEmptyState.svelte';
+
+afterEach(cleanup);
+
+describe('ChronikEmptyState', () => {
+ it('renders first-run variant title', async () => {
+ render(ChronikEmptyState, { variant: 'first-run' });
+ await expect.element(page.getByText('Noch nichts geschehen')).toBeInTheDocument();
+ });
+
+ it('renders filter-empty variant title', async () => {
+ render(ChronikEmptyState, { variant: 'filter-empty' });
+ await expect.element(page.getByText('Nichts in dieser Ansicht')).toBeInTheDocument();
+ });
+
+ it('renders inbox-zero variant title', async () => {
+ render(ChronikEmptyState, { variant: 'inbox-zero' });
+ await expect.element(page.getByText('Keine neuen Erwähnungen')).toBeInTheDocument();
+ });
+
+ it('applies the expected data-variant attribute', async () => {
+ render(ChronikEmptyState, { variant: 'first-run' });
+ const wrapper = document.querySelector('[data-testid="chronik-empty-state"]');
+ expect(wrapper?.getAttribute('data-variant')).toBe('first-run');
+ });
+});
diff --git a/frontend/src/lib/components/chronik/ChronikErrorCard.svelte b/frontend/src/lib/components/chronik/ChronikErrorCard.svelte
new file mode 100644
index 00000000..98de6321
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikErrorCard.svelte
@@ -0,0 +1,46 @@
+
+
+
+
+
+
+
+
+ {displayMessage}
+
+
+
+
diff --git a/frontend/src/lib/components/chronik/ChronikErrorCard.svelte.spec.ts b/frontend/src/lib/components/chronik/ChronikErrorCard.svelte.spec.ts
new file mode 100644
index 00000000..2fecf383
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikErrorCard.svelte.spec.ts
@@ -0,0 +1,39 @@
+import { describe, it, expect, vi, afterEach } from 'vitest';
+import { cleanup, render } from 'vitest-browser-svelte';
+import { page, userEvent } from 'vitest/browser';
+
+import ChronikErrorCard from './ChronikErrorCard.svelte';
+
+afterEach(cleanup);
+
+describe('ChronikErrorCard', () => {
+ it('renders the default error message', async () => {
+ render(ChronikErrorCard, { onRetry: vi.fn() });
+ await expect
+ .element(page.getByText('Die Chronik konnte nicht geladen werden.'))
+ .toBeInTheDocument();
+ });
+
+ it('renders the retry button with the expected label', async () => {
+ render(ChronikErrorCard, { onRetry: vi.fn() });
+ await expect.element(page.getByText('Erneut versuchen')).toBeInTheDocument();
+ });
+
+ it('renders a custom message when provided', async () => {
+ render(ChronikErrorCard, { onRetry: vi.fn(), message: 'Netzwerkfehler' });
+ await expect.element(page.getByText('Netzwerkfehler')).toBeInTheDocument();
+ });
+
+ it('calls onRetry when the retry button is clicked', async () => {
+ const onRetry = vi.fn();
+ render(ChronikErrorCard, { onRetry });
+ await userEvent.click(page.getByText('Erneut versuchen'));
+ expect(onRetry).toHaveBeenCalledTimes(1);
+ });
+
+ it('has role="alert" on the wrapper', async () => {
+ render(ChronikErrorCard, { onRetry: vi.fn() });
+ const alert = document.querySelector('[role="alert"]');
+ expect(alert).not.toBeNull();
+ });
+});
diff --git a/frontend/src/lib/components/chronik/ChronikFilterPills.svelte b/frontend/src/lib/components/chronik/ChronikFilterPills.svelte
new file mode 100644
index 00000000..b692147c
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikFilterPills.svelte
@@ -0,0 +1,68 @@
+
+
+
+ {#each pills as p (p.value)}
+ {@const active = p.value === value}
+
+ {/each}
+
diff --git a/frontend/src/lib/components/chronik/ChronikFilterPills.svelte.spec.ts b/frontend/src/lib/components/chronik/ChronikFilterPills.svelte.spec.ts
new file mode 100644
index 00000000..54c450ff
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikFilterPills.svelte.spec.ts
@@ -0,0 +1,85 @@
+import { describe, it, expect, vi, afterEach } from 'vitest';
+import { cleanup, render } from 'vitest-browser-svelte';
+import { userEvent } from 'vitest/browser';
+
+import ChronikFilterPills from './ChronikFilterPills.svelte';
+
+afterEach(cleanup);
+
+describe('ChronikFilterPills', () => {
+ it('renders all 5 filter pills', async () => {
+ render(ChronikFilterPills, { value: 'alle', onChange: vi.fn() });
+ const pills = document.querySelectorAll('[role="radio"]');
+ expect(pills.length).toBe(5);
+ });
+
+ it('marks the active pill with aria-checked="true"', async () => {
+ render(ChronikFilterPills, { value: 'hochgeladen', onChange: vi.fn() });
+ const pills = document.querySelectorAll('[role="radio"]');
+ const checked = Array.from(pills).filter((p) => p.getAttribute('aria-checked') === 'true');
+ expect(checked.length).toBe(1);
+ expect(checked[0].getAttribute('data-filter-value')).toBe('hochgeladen');
+ });
+
+ it('calls onChange with the clicked pill value', async () => {
+ const onChange = vi.fn();
+ render(ChronikFilterPills, { value: 'alle', onChange });
+ const pill = document.querySelector(
+ '[data-filter-value="kommentare"]'
+ ) as HTMLButtonElement | null;
+ expect(pill).not.toBeNull();
+ pill?.click();
+ expect(onChange).toHaveBeenCalledWith('kommentare');
+ });
+
+ it('applies active classes to the selected pill', async () => {
+ render(ChronikFilterPills, { value: 'fuer-dich', onChange: vi.fn() });
+ const active = document.querySelector('[data-filter-value="fuer-dich"]');
+ expect(active?.className).toContain('bg-primary');
+ const inactive = document.querySelector('[data-filter-value="alle"]');
+ expect(inactive?.className).toContain('bg-muted');
+ });
+
+ it('ArrowRight moves focus to the next pill', async () => {
+ render(ChronikFilterPills, { value: 'alle', onChange: vi.fn() });
+ const first = document.querySelector('[data-filter-value="alle"]') as HTMLButtonElement | null;
+ const second = document.querySelector(
+ '[data-filter-value="fuer-dich"]'
+ ) as HTMLButtonElement | null;
+ expect(first).not.toBeNull();
+ expect(second).not.toBeNull();
+ first?.focus();
+ await userEvent.keyboard('{ArrowRight}');
+ expect(document.activeElement).toBe(second);
+ });
+
+ it('ArrowLeft moves focus to the previous pill', async () => {
+ render(ChronikFilterPills, { value: 'alle', onChange: vi.fn() });
+ const first = document.querySelector('[data-filter-value="alle"]') as HTMLButtonElement | null;
+ const second = document.querySelector(
+ '[data-filter-value="fuer-dich"]'
+ ) as HTMLButtonElement | null;
+ second?.focus();
+ await userEvent.keyboard('{ArrowLeft}');
+ expect(document.activeElement).toBe(first);
+ });
+
+ it('wraps focus from last to first with ArrowRight', async () => {
+ render(ChronikFilterPills, { value: 'alle', onChange: vi.fn() });
+ const last = document.querySelector(
+ '[data-filter-value="kommentare"]'
+ ) as HTMLButtonElement | null;
+ const first = document.querySelector('[data-filter-value="alle"]') as HTMLButtonElement | null;
+ last?.focus();
+ await userEvent.keyboard('{ArrowRight}');
+ expect(document.activeElement).toBe(first);
+ });
+
+ it('has role="radiogroup" on the container', async () => {
+ render(ChronikFilterPills, { value: 'alle', onChange: vi.fn() });
+ const group = document.querySelector('[role="radiogroup"]');
+ expect(group).not.toBeNull();
+ // Paraglide provides "Aktivitäten filtern" as the filter label
+ expect(group?.getAttribute('aria-label')).toBe('Aktivitäten filtern');
+ });
+});
diff --git a/frontend/src/lib/components/chronik/ChronikFuerDichBox.svelte b/frontend/src/lib/components/chronik/ChronikFuerDichBox.svelte
new file mode 100644
index 00000000..39e7e754
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikFuerDichBox.svelte
@@ -0,0 +1,151 @@
+
+
+
+ {#if unread.length === 0}
+
+
+
+ {m.chronik_inbox_zero_title()}
+
+
+ {m.chronik_inbox_zero_link()}
+
+
+ {:else}
+
+
+
+ {m.chronik_for_you_caption()}
+
+
+ {m.chronik_for_you_count({ count: unread.length })}
+
+
+
+
+
+
+ {/if}
+
+
+
diff --git a/frontend/src/lib/components/chronik/ChronikFuerDichBox.svelte.spec.ts b/frontend/src/lib/components/chronik/ChronikFuerDichBox.svelte.spec.ts
new file mode 100644
index 00000000..c8f709a8
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikFuerDichBox.svelte.spec.ts
@@ -0,0 +1,116 @@
+import { describe, it, expect, vi, afterEach } from 'vitest';
+import { cleanup, render } from 'vitest-browser-svelte';
+import { page, userEvent } from 'vitest/browser';
+
+import ChronikFuerDichBox from './ChronikFuerDichBox.svelte';
+import type { NotificationItem } from '$lib/stores/notifications.svelte';
+
+afterEach(cleanup);
+
+function notif(partial: Partial): NotificationItem {
+ return {
+ id: 'n1',
+ type: 'MENTION',
+ documentId: 'doc-1',
+ documentTitle: 'Ein Dokument',
+ referenceId: 'ref-1',
+ annotationId: null,
+ read: false,
+ createdAt: new Date(Date.now() - 5 * 60_000).toISOString(),
+ actorName: 'Anna',
+ ...partial
+ };
+}
+
+describe('ChronikFuerDichBox', () => {
+ it('renders inbox-zero state when there are no unread items', async () => {
+ render(ChronikFuerDichBox, {
+ unread: [],
+ onMarkRead: vi.fn(),
+ onMarkAllRead: vi.fn()
+ });
+ const zero = document.querySelector('[data-testid="chronik-inbox-zero"]');
+ expect(zero).not.toBeNull();
+ await expect.element(page.getByText('Keine neuen Erwähnungen')).toBeInTheDocument();
+ });
+
+ it('links to the archived mentions in the inbox-zero state', async () => {
+ render(ChronikFuerDichBox, {
+ unread: [],
+ onMarkRead: vi.fn(),
+ onMarkAllRead: vi.fn()
+ });
+ const link = document.querySelector('a[href="/chronik?filter=fuer-dich"]');
+ expect(link).not.toBeNull();
+ });
+
+ it('renders the count badge with correct total when unread exists', async () => {
+ render(ChronikFuerDichBox, {
+ unread: [notif({ id: 'a' }), notif({ id: 'b' })],
+ onMarkRead: vi.fn(),
+ onMarkAllRead: vi.fn()
+ });
+ await expect.element(page.getByText('2 neu')).toBeInTheDocument();
+ });
+
+ it('count badge has aria-live=polite when unread exists', async () => {
+ render(ChronikFuerDichBox, {
+ unread: [notif({ id: 'a' })],
+ onMarkRead: vi.fn(),
+ onMarkAllRead: vi.fn()
+ });
+ // Wait for render
+ await expect.element(page.getByText('1 neu')).toBeInTheDocument();
+ const badge = document.querySelector('[data-testid="chronik-fuerdich-count"]');
+ expect(badge?.getAttribute('aria-live')).toBe('polite');
+ expect(badge?.getAttribute('aria-atomic')).toBe('true');
+ });
+
+ it('does not render the "Alle gelesen" button when there are no unread items', async () => {
+ render(ChronikFuerDichBox, {
+ unread: [],
+ onMarkRead: vi.fn(),
+ onMarkAllRead: vi.fn()
+ });
+ await expect.element(page.getByText('Keine neuen Erwähnungen')).toBeInTheDocument();
+ const all = document.querySelector('[data-testid="chronik-mark-all-read"]');
+ expect(all).toBeNull();
+ });
+
+ it('renders the "Alle gelesen" button when unread exists', async () => {
+ render(ChronikFuerDichBox, {
+ unread: [notif({ id: 'a' })],
+ onMarkRead: vi.fn(),
+ onMarkAllRead: vi.fn()
+ });
+ await expect.element(page.getByText('Alle gelesen')).toBeInTheDocument();
+ });
+
+ it('calls onMarkAllRead when the "Alle gelesen" button is clicked', async () => {
+ const onMarkAllRead = vi.fn();
+ render(ChronikFuerDichBox, {
+ unread: [notif({ id: 'a' })],
+ onMarkRead: vi.fn(),
+ onMarkAllRead
+ });
+ await userEvent.click(page.getByText('Alle gelesen'));
+ expect(onMarkAllRead).toHaveBeenCalledTimes(1);
+ });
+
+ it('calls onMarkRead (and not navigation) when a per-item Dismiss button is clicked', async () => {
+ const onMarkRead = vi.fn();
+ const n = notif({ id: 'xyz' });
+ render(ChronikFuerDichBox, {
+ unread: [n],
+ onMarkRead,
+ onMarkAllRead: vi.fn()
+ });
+ const dismiss = document.querySelector(
+ '[data-testid="chronik-fuerdich-dismiss"]'
+ ) as HTMLButtonElement | null;
+ expect(dismiss).not.toBeNull();
+ dismiss?.click();
+ expect(onMarkRead).toHaveBeenCalledTimes(1);
+ expect(onMarkRead.mock.calls[0][0]).toEqual(n);
+ });
+});
diff --git a/frontend/src/lib/components/chronik/ChronikRow.svelte b/frontend/src/lib/components/chronik/ChronikRow.svelte
new file mode 100644
index 00000000..a739f6db
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikRow.svelte
@@ -0,0 +1,166 @@
+
+
+
+
+ {#if item.actor}
+
+ {item.actor.initials}
+
+ {:else}
+
+ ?
+
+ {/if}
+
+
+ {#if variant === 'for-you'}
+
+ @
+
+ {/if}
+
+
+
+
+ {verbParts.before}{docTitle}{verbParts.after}
+ {#if variant === 'rollup'}
+
+ {item.count}
+
+ {/if}
+
+
+ {#if variant === 'comment'}
+
+
+ „{docTitle}“
+
+ {/if}
+
+
{timeLabel}
+
+
diff --git a/frontend/src/lib/components/chronik/ChronikRow.svelte.spec.ts b/frontend/src/lib/components/chronik/ChronikRow.svelte.spec.ts
new file mode 100644
index 00000000..41800bf8
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikRow.svelte.spec.ts
@@ -0,0 +1,121 @@
+import { describe, it, expect, afterEach } from 'vitest';
+import { cleanup, render } from 'vitest-browser-svelte';
+import { page } from 'vitest/browser';
+
+import ChronikRow from './ChronikRow.svelte';
+import type { components } from '$lib/generated/api';
+
+type ActivityFeedItemDTO = components['schemas']['ActivityFeedItemDTO'];
+
+afterEach(cleanup);
+
+const baseItem: ActivityFeedItemDTO = {
+ kind: 'TEXT_SAVED',
+ actor: { initials: 'MR', color: '#7a4f9a', name: 'Max Raddatz' },
+ documentId: 'doc-1',
+ documentTitle: 'Brief 1920',
+ happenedAt: '2026-04-19T10:00:00Z',
+ youMentioned: false,
+ count: 1
+};
+
+describe('ChronikRow', () => {
+ it('renders the document title', async () => {
+ render(ChronikRow, { item: baseItem });
+ await expect.element(page.getByText('Brief 1920')).toBeInTheDocument();
+ });
+
+ it('renders actor initials in avatar', async () => {
+ render(ChronikRow, { item: baseItem });
+ await expect.element(page.getByText('MR')).toBeInTheDocument();
+ });
+
+ it('renders "?" fallback avatar when actor is missing', async () => {
+ const item: ActivityFeedItemDTO = { ...baseItem, actor: undefined };
+ render(ChronikRow, { item });
+ const fallback = document.querySelector('[data-testid="chronik-avatar-fallback"]');
+ expect(fallback).not.toBeNull();
+ expect(fallback?.textContent?.trim()).toBe('?');
+ });
+
+ it('wraps the row in a link to the document', async () => {
+ render(ChronikRow, { item: baseItem });
+ const link = document.querySelector('a[href="/documents/doc-1"]');
+ expect(link).not.toBeNull();
+ });
+
+ // --- simple variant ---
+ it('renders simple variant when count === 1 and not a mention', async () => {
+ render(ChronikRow, { item: baseItem });
+ // No rollup count badge
+ expect(document.querySelector('[data-testid="chronik-count-badge"]')).toBeNull();
+ // No for-you marker
+ expect(document.querySelector('[data-testid="chronik-foryou-marker"]')).toBeNull();
+ // No comment preview
+ expect(document.querySelector('[data-testid="chronik-comment-preview"]')).toBeNull();
+ });
+
+ // --- rollup variant ---
+ it('renders rollup variant with count badge when count > 1', async () => {
+ const item: ActivityFeedItemDTO = {
+ ...baseItem,
+ kind: 'TEXT_SAVED',
+ count: 3,
+ happenedAt: '2026-04-19T10:00:00Z',
+ happenedAtUntil: '2026-04-19T11:30:00Z'
+ };
+ render(ChronikRow, { item });
+ const badge = document.querySelector('[data-testid="chronik-count-badge"]');
+ expect(badge).not.toBeNull();
+ expect(badge?.textContent).toContain('3');
+ });
+
+ it('renders a time range with an en-dash for rollup variant', async () => {
+ const item: ActivityFeedItemDTO = {
+ ...baseItem,
+ kind: 'FILE_UPLOADED',
+ count: 5,
+ happenedAt: '2026-04-19T10:00:00Z',
+ happenedAtUntil: '2026-04-19T11:30:00Z'
+ };
+ render(ChronikRow, { item });
+ // en-dash character U+2013
+ const body = document.body.textContent ?? '';
+ expect(body).toContain('\u2013');
+ });
+
+ // --- for-you variant ---
+ it('renders for-you marker when youMentioned is true', async () => {
+ const item: ActivityFeedItemDTO = {
+ ...baseItem,
+ kind: 'MENTION_CREATED',
+ youMentioned: true
+ };
+ render(ChronikRow, { item });
+ const marker = document.querySelector('[data-testid="chronik-foryou-marker"]');
+ expect(marker).not.toBeNull();
+ });
+
+ it('applies accent border to for-you variant outer wrapper', async () => {
+ const item: ActivityFeedItemDTO = {
+ ...baseItem,
+ kind: 'MENTION_CREATED',
+ youMentioned: true
+ };
+ render(ChronikRow, { item });
+ const wrapper = document.querySelector('[data-variant="for-you"]');
+ expect(wrapper).not.toBeNull();
+ expect(wrapper?.className).toContain('border-accent');
+ });
+
+ // --- comment variant ---
+ it('renders comment preview for COMMENT_ADDED kind', async () => {
+ const item: ActivityFeedItemDTO = {
+ ...baseItem,
+ kind: 'COMMENT_ADDED'
+ };
+ render(ChronikRow, { item });
+ const preview = document.querySelector('[data-testid="chronik-comment-preview"]');
+ expect(preview).not.toBeNull();
+ });
+});
diff --git a/frontend/src/lib/components/chronik/ChronikTimeline.svelte b/frontend/src/lib/components/chronik/ChronikTimeline.svelte
new file mode 100644
index 00000000..f083ba7b
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikTimeline.svelte
@@ -0,0 +1,66 @@
+
+
+
+ {#each BUCKET_ORDER as bucket (bucket)}
+ {#if grouped[bucket].length > 0}
+
+
+
+ {bucketLabel(bucket)}
+
+
+
+
+ {#each grouped[bucket] as it (it.kind + it.happenedAt + it.documentId)}
+ -
+
+
+ {/each}
+
+
+ {/if}
+ {/each}
+
diff --git a/frontend/src/lib/components/chronik/ChronikTimeline.svelte.spec.ts b/frontend/src/lib/components/chronik/ChronikTimeline.svelte.spec.ts
new file mode 100644
index 00000000..f65c1e70
--- /dev/null
+++ b/frontend/src/lib/components/chronik/ChronikTimeline.svelte.spec.ts
@@ -0,0 +1,99 @@
+import { describe, it, expect, afterEach } from 'vitest';
+import { cleanup, render } from 'vitest-browser-svelte';
+import { page } from 'vitest/browser';
+
+import ChronikTimeline from './ChronikTimeline.svelte';
+import type { components } from '$lib/generated/api';
+
+type ActivityFeedItemDTO = components['schemas']['ActivityFeedItemDTO'];
+
+afterEach(cleanup);
+
+function item(partial: Partial): ActivityFeedItemDTO {
+ return {
+ kind: 'TEXT_SAVED',
+ actor: { initials: 'AB', color: '#123456', name: 'Anna Beta' },
+ documentId: 'doc-x',
+ documentTitle: 'Some document',
+ happenedAt: new Date().toISOString(),
+ youMentioned: false,
+ count: 1,
+ ...partial
+ };
+}
+
+function atOffsetDays(days: number): string {
+ const d = new Date();
+ d.setDate(d.getDate() - days);
+ return d.toISOString();
+}
+
+describe('ChronikTimeline', () => {
+ it('renders nothing / no bucket headers when items is empty', async () => {
+ render(ChronikTimeline, { items: [] });
+ expect(document.querySelector('[data-testid="chronik-bucket-today"]')).toBeNull();
+ expect(document.querySelector('[data-testid="chronik-bucket-yesterday"]')).toBeNull();
+ expect(document.querySelector('[data-testid="chronik-bucket-thisWeek"]')).toBeNull();
+ expect(document.querySelector('[data-testid="chronik-bucket-older"]')).toBeNull();
+ });
+
+ it('places today items in the today bucket with a "Heute" header', async () => {
+ render(ChronikTimeline, {
+ items: [
+ item({
+ documentId: 'doc-today',
+ documentTitle: 'Frisches Dokument',
+ happenedAt: new Date().toISOString()
+ })
+ ]
+ });
+ const today = document.querySelector('[data-testid="chronik-bucket-today"]');
+ expect(today).not.toBeNull();
+ await expect.element(page.getByText('Heute', { exact: true })).toBeInTheDocument();
+ // The row for the today item should be inside the today bucket.
+ expect(today?.textContent).toContain('Frisches Dokument');
+ });
+
+ it('does not render an empty bucket header when no items fall into it', async () => {
+ render(ChronikTimeline, {
+ items: [item({ happenedAt: new Date().toISOString() })]
+ });
+ // Only today bucket should exist.
+ expect(document.querySelector('[data-testid="chronik-bucket-today"]')).not.toBeNull();
+ expect(document.querySelector('[data-testid="chronik-bucket-older"]')).toBeNull();
+ });
+
+ it('places older items in the older bucket', async () => {
+ render(ChronikTimeline, {
+ items: [
+ item({
+ documentId: 'doc-old',
+ documentTitle: 'Alt Doc',
+ happenedAt: atOffsetDays(30)
+ })
+ ]
+ });
+ const older = document.querySelector('[data-testid="chronik-bucket-older"]');
+ expect(older).not.toBeNull();
+ expect(older?.textContent).toContain('Alt Doc');
+ });
+
+ it('groups multiple items into their respective buckets', async () => {
+ render(ChronikTimeline, {
+ items: [
+ item({
+ documentId: 'd1',
+ documentTitle: 'Heute Item',
+ happenedAt: new Date().toISOString()
+ }),
+ item({ documentId: 'd2', documentTitle: 'Alt Item', happenedAt: atOffsetDays(30) })
+ ]
+ });
+ const today = document.querySelector('[data-testid="chronik-bucket-today"]');
+ const older = document.querySelector('[data-testid="chronik-bucket-older"]');
+ expect(today?.textContent).toContain('Heute Item');
+ expect(today?.textContent).not.toContain('Alt Item');
+ expect(older?.textContent).toContain('Alt Item');
+ expect(older?.textContent).not.toContain('Heute Item');
+ });
+});
--
2.49.1
From c17801e008e412dcce542bbc4eea6524172cf86c Mon Sep 17 00:00:00 2001
From: Marcel
Date: Mon, 20 Apr 2026 17:01:48 +0200
Subject: [PATCH 11/19] feat(chronik): add /chronik route (page.server.ts +
+page.svelte + spec)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
page.server.ts loads /api/dashboard/activity (limit=40) and unread
/api/notifications in parallel via Promise.allSettled so a dashboard-activity
failure still renders the Für-dich box. Form actions ?/dismiss and ?/mark-all
back the Dismiss and "Alle gelesen" controls with CSRF-safe SvelteKit
endpoints.
+page.svelte composes all six chronik components:
- ChronikFuerDichBox at the top, seeded from the SSR unread set on first
render and switching to the live SSE singleton once notifications arrive;
- ChronikFilterPills below, wired to URL via goto(?filter=…) with
replaceState so the browser history stays clean across filter changes;
- ChronikTimeline for the day-bucketed feed, filtered client-side per pill
(alle / fuer-dich / hochgeladen / transkription / kommentare);
- ChronikEmptyState for first-run vs filter-empty states;
- ChronikErrorCard on activity load failure.
"Mehr laden" pagination keeps focus on the button after load (via tick() +
$state-bound ref), renders 3 static skeleton rows with aria-busy, and
announces "{count} weitere Einträge geladen" through a polite aria-live
region. Inbox-zero in the Für-dich box links to /chronik?filter=fuer-dich.
Co-located page.server.spec.ts covers load(): limit=40, unread=read:false,
filter parsing with "alle" fallback, activity-fulfilled-but-not-ok surfaces
loadError, plus the dismiss and mark-all actions (success + missing-id
branch). 8 tests green.
Part of #285.
Co-Authored-By: Claude Opus 4.7 (1M context)
---
frontend/src/routes/chronik/+page.server.ts | 81 +++++++
frontend/src/routes/chronik/+page.svelte | 208 ++++++++++++++++++
.../src/routes/chronik/page.server.spec.ts | 129 +++++++++++
3 files changed, 418 insertions(+)
create mode 100644 frontend/src/routes/chronik/+page.server.ts
create mode 100644 frontend/src/routes/chronik/+page.svelte
create mode 100644 frontend/src/routes/chronik/page.server.spec.ts
diff --git a/frontend/src/routes/chronik/+page.server.ts b/frontend/src/routes/chronik/+page.server.ts
new file mode 100644
index 00000000..83bff5d4
--- /dev/null
+++ b/frontend/src/routes/chronik/+page.server.ts
@@ -0,0 +1,81 @@
+import { fail } from '@sveltejs/kit';
+import { createApiClient } from '$lib/api.server';
+import type { components } from '$lib/generated/api';
+
+type ActivityFeedItemDTO = components['schemas']['ActivityFeedItemDTO'];
+type NotificationDTO = components['schemas']['NotificationDTO'];
+
+export type FilterValue = 'alle' | 'fuer-dich' | 'hochgeladen' | 'transkription' | 'kommentare';
+
+const VALID_FILTERS: FilterValue[] = [
+ 'alle',
+ 'fuer-dich',
+ 'hochgeladen',
+ 'transkription',
+ 'kommentare'
+];
+
+function parseFilter(raw: string | null): FilterValue {
+ if (raw && (VALID_FILTERS as string[]).includes(raw)) return raw as FilterValue;
+ return 'alle';
+}
+
+export async function load({ fetch, url }) {
+ const api = createApiClient(fetch);
+ const filter = parseFilter(url.searchParams.get('filter'));
+ const limit = Math.min(Number(url.searchParams.get('limit')) || 40, 40);
+
+ const [activityResult, unreadResult] = await Promise.allSettled([
+ api.GET('/api/dashboard/activity', { params: { query: { limit } } }),
+ api.GET('/api/notifications', {
+ params: { query: { read: false, page: 0, size: 20 } }
+ })
+ ]);
+
+ let activityFeed: ActivityFeedItemDTO[] = [];
+ let unreadNotifications: NotificationDTO[] = [];
+ let loadError: string | null = null;
+
+ if (activityResult.status === 'fulfilled' && activityResult.value.response.ok) {
+ activityFeed = (activityResult.value.data as ActivityFeedItemDTO[]) ?? [];
+ } else if (activityResult.status === 'fulfilled') {
+ loadError = 'activity';
+ }
+
+ if (unreadResult.status === 'fulfilled' && unreadResult.value.response.ok) {
+ unreadNotifications = unreadResult.value.data?.content ?? [];
+ }
+
+ return {
+ filter,
+ activityFeed,
+ unreadNotifications,
+ loadError
+ };
+}
+
+export const actions = {
+ dismiss: async ({ request, fetch }) => {
+ const api = createApiClient(fetch);
+ const formData = await request.formData();
+ const id = formData.get('id');
+ if (typeof id !== 'string' || id.length === 0) {
+ return fail(400, { error: 'missing id' });
+ }
+ const result = await api.PATCH('/api/notifications/{id}/read', {
+ params: { path: { id } }
+ });
+ if (!result.response.ok) {
+ return fail(result.response.status, { error: 'failed' });
+ }
+ return { success: true };
+ },
+ 'mark-all': async ({ fetch }) => {
+ const api = createApiClient(fetch);
+ const result = await api.POST('/api/notifications/read-all');
+ if (!result.response.ok) {
+ return fail(result.response.status, { error: 'failed' });
+ }
+ return { success: true };
+ }
+};
diff --git a/frontend/src/routes/chronik/+page.svelte b/frontend/src/routes/chronik/+page.svelte
new file mode 100644
index 00000000..24bbc394
--- /dev/null
+++ b/frontend/src/routes/chronik/+page.svelte
@@ -0,0 +1,208 @@
+
+
+
+ {m.chronik_page_title()}
+
+
+
+
+ {m.chronik_page_title()}
+
+
+ {#if data.loadError === 'activity'}
+
+ {:else}
+
+
+
+
+
+
+ {#if isEmpty}
+
+
+
+ {:else}
+
+
+ {announcement}
+
+
+
+
+ {#if isLoadingMore}
+
+ {#each [0, 1, 2] as i (i)}
+
+ {/each}
+
+ {/if}
+
+ {/if}
+ {/if}
+
diff --git a/frontend/src/routes/chronik/page.server.spec.ts b/frontend/src/routes/chronik/page.server.spec.ts
new file mode 100644
index 00000000..7966147c
--- /dev/null
+++ b/frontend/src/routes/chronik/page.server.spec.ts
@@ -0,0 +1,129 @@
+import { beforeEach, describe, expect, it, vi } from 'vitest';
+import { actions, load } from './+page.server';
+
+const mockApi = {
+ GET: vi.fn(),
+ POST: vi.fn(),
+ PATCH: vi.fn()
+};
+
+vi.mock('$lib/api.server', () => ({
+ createApiClient: () => mockApi
+}));
+
+function buildUrl(search = ''): URL {
+ return new URL(`http://localhost/chronik${search}`);
+}
+
+beforeEach(() => {
+ vi.clearAllMocks();
+});
+
+describe('chronik/load', () => {
+ it('requests the activity feed with a 40-item limit', async () => {
+ mockApi.GET.mockImplementation((path: string) => {
+ if (path === '/api/dashboard/activity') {
+ return Promise.resolve({ response: { ok: true }, data: [] });
+ }
+ return Promise.resolve({ response: { ok: true }, data: { content: [] } });
+ });
+
+ await load({ fetch, url: buildUrl() } as never);
+
+ expect(mockApi.GET).toHaveBeenCalledWith('/api/dashboard/activity', {
+ params: { query: { limit: 40 } }
+ });
+ });
+
+ it('requests only unread notifications for Für-dich', async () => {
+ mockApi.GET.mockImplementation((path: string) => {
+ if (path === '/api/dashboard/activity') {
+ return Promise.resolve({ response: { ok: true }, data: [] });
+ }
+ return Promise.resolve({ response: { ok: true }, data: { content: [] } });
+ });
+
+ await load({ fetch, url: buildUrl() } as never);
+
+ expect(mockApi.GET).toHaveBeenCalledWith('/api/notifications', {
+ params: { query: { read: false, page: 0, size: 20 } }
+ });
+ });
+
+ it('returns the activity feed and unread notifications on success', async () => {
+ const feed = [{ kind: 'FILE_UPLOADED', documentId: 'd1' }];
+ const unread = [{ id: 'n1', type: 'MENTION' }];
+ mockApi.GET.mockImplementation((path: string) => {
+ if (path === '/api/dashboard/activity') {
+ return Promise.resolve({ response: { ok: true }, data: feed });
+ }
+ return Promise.resolve({ response: { ok: true }, data: { content: unread } });
+ });
+
+ const result = await load({ fetch, url: buildUrl() } as never);
+
+ expect(result.activityFeed).toEqual(feed);
+ expect(result.unreadNotifications).toEqual(unread);
+ expect(result.filter).toBe('alle');
+ expect(result.loadError).toBeNull();
+ });
+
+ it('surfaces "activity" loadError when the dashboard endpoint returns non-ok', async () => {
+ mockApi.GET.mockImplementation((path: string) => {
+ if (path === '/api/dashboard/activity') {
+ return Promise.resolve({ response: { ok: false, status: 500 }, error: {} });
+ }
+ return Promise.resolve({ response: { ok: true }, data: { content: [] } });
+ });
+
+ const result = await load({ fetch, url: buildUrl() } as never);
+
+ expect(result.loadError).toBe('activity');
+ expect(result.activityFeed).toEqual([]);
+ });
+
+ it('parses the filter query param, falling back to "alle" for invalid values', async () => {
+ mockApi.GET.mockResolvedValue({ response: { ok: true }, data: [] });
+
+ const validResult = await load({ fetch, url: buildUrl('?filter=fuer-dich') } as never);
+ expect(validResult.filter).toBe('fuer-dich');
+
+ mockApi.GET.mockResolvedValue({ response: { ok: true }, data: [] });
+ const invalidResult = await load({ fetch, url: buildUrl('?filter=bogus') } as never);
+ expect(invalidResult.filter).toBe('alle');
+ });
+});
+
+describe('chronik/actions', () => {
+ it('dismiss: PATCHes /api/notifications/{id}/read with the form id', async () => {
+ mockApi.PATCH.mockResolvedValue({ response: { ok: true } });
+ const formData = new FormData();
+ formData.set('id', 'n-42');
+
+ const result = await actions.dismiss({
+ request: { formData: async () => formData },
+ fetch
+ } as never);
+
+ expect(mockApi.PATCH).toHaveBeenCalledWith('/api/notifications/{id}/read', {
+ params: { path: { id: 'n-42' } }
+ });
+ expect(result).toEqual({ success: true });
+ });
+
+ it('dismiss: fails with 400 when id is missing', async () => {
+ const formData = new FormData();
+ const result = await actions.dismiss({
+ request: { formData: async () => formData },
+ fetch
+ } as never);
+ expect((result as { status: number }).status).toBe(400);
+ });
+
+ it('mark-all: POSTs /api/notifications/read-all', async () => {
+ mockApi.POST.mockResolvedValue({ response: { ok: true } });
+ const result = await actions['mark-all']({ fetch } as never);
+ expect(mockApi.POST).toHaveBeenCalledWith('/api/notifications/read-all');
+ expect(result).toEqual({ success: true });
+ });
+});
--
2.49.1
From 380a2c83e0c148997600c04e319b273355be1d51 Mon Sep 17 00:00:00 2001
From: Marcel
Date: Mon, 20 Apr 2026 17:05:36 +0200
Subject: [PATCH 12/19] feat(notifications): delete /notifications route in
favor of /chronik
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
The app is pre-production — no 301 redirect, the old route and its tests
are removed outright. Profile page's "Benachrichtigungsverlauf ansehen"
link now points to /chronik.
Part of #285.
Co-Authored-By: Claude Opus 4.7 (1M context)
---
.../src/routes/notifications/+page.server.ts | 35 ---
.../src/routes/notifications/+page.svelte | 279 ------------------
.../routes/notifications/page.server.spec.ts | 136 ---------
frontend/src/routes/profile/+page.svelte | 5 +-
4 files changed, 1 insertion(+), 454 deletions(-)
delete mode 100644 frontend/src/routes/notifications/+page.server.ts
delete mode 100644 frontend/src/routes/notifications/+page.svelte
delete mode 100644 frontend/src/routes/notifications/page.server.spec.ts
diff --git a/frontend/src/routes/notifications/+page.server.ts b/frontend/src/routes/notifications/+page.server.ts
deleted file mode 100644
index 42485660..00000000
--- a/frontend/src/routes/notifications/+page.server.ts
+++ /dev/null
@@ -1,35 +0,0 @@
-import { error, redirect } from '@sveltejs/kit';
-import { createApiClient } from '$lib/api.server';
-import { getErrorMessage } from '$lib/errors';
-import type { PageServerLoad, Actions } from './$types';
-
-export const load: PageServerLoad = async ({ fetch, url }) => {
- const api = createApiClient(fetch);
-
- const type = url.searchParams.get('type') ?? undefined;
- const readParam = url.searchParams.get('read');
- const read = readParam !== null ? readParam === 'true' : undefined;
-
- const result = await api.GET('/api/notifications', {
- params: { query: { type: type as 'MENTION' | 'REPLY' | undefined, read, page: 0, size: 20 } }
- });
-
- if (!result.response.ok) {
- const code = (result.error as unknown as { code?: string })?.code;
- throw error(result.response.status, getErrorMessage(code));
- }
-
- const page = result.data!;
- const notifications = page.content ?? [];
- const unreadCount = notifications.filter((n) => !n.read).length;
-
- return { notifications, unreadCount, totalPages: page.totalPages ?? 1 };
-};
-
-export const actions: Actions = {
- 'mark-all': async ({ fetch }) => {
- const api = createApiClient(fetch);
- await api.POST('/api/notifications/read-all');
- redirect(303, '/notifications');
- }
-};
diff --git a/frontend/src/routes/notifications/+page.svelte b/frontend/src/routes/notifications/+page.svelte
deleted file mode 100644
index 42a35391..00000000
--- a/frontend/src/routes/notifications/+page.svelte
+++ /dev/null
@@ -1,279 +0,0 @@
-
-
-
- {m.notification_history_heading()}
-
-
-
-
-
-
-
- {m.btn_back_to_overview()}
-
-
-
-
-
- {m.notification_history_heading()}
-
- {#if data.unreadCount > 0}
-
- {/if}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {#if allNotifications.length === 0}
-
-
-
- {m.notification_empty_history()}
-
-
- {m.notification_empty_history_body()}
-
-
- {:else}
-
- {/if}
-
-
- {#if hasMore}
-
- {/if}
-
-
diff --git a/frontend/src/routes/notifications/page.server.spec.ts b/frontend/src/routes/notifications/page.server.spec.ts
deleted file mode 100644
index 05d4fb2a..00000000
--- a/frontend/src/routes/notifications/page.server.spec.ts
+++ /dev/null
@@ -1,136 +0,0 @@
-import { describe, expect, it, vi, beforeEach } from 'vitest';
-
-vi.mock('$lib/api.server', () => ({ createApiClient: vi.fn() }));
-
-import { load, actions } from './+page.server';
-import { createApiClient } from '$lib/api.server';
-
-beforeEach(() => vi.clearAllMocks());
-
-function makeUrl(params: Record = {}) {
- const url = new URL('http://localhost/notifications');
- for (const [key, value] of Object.entries(params)) {
- url.searchParams.set(key, value);
- }
- return url;
-}
-
-// ─── load ─────────────────────────────────────────────────────────────────────
-
-describe('notifications page load', () => {
- it('returns notifications and unreadCount from API response', async () => {
- const mockGet = vi.fn().mockResolvedValueOnce({
- response: { ok: true },
- data: {
- content: [
- { id: 'n1', read: false },
- { id: 'n2', read: true },
- { id: 'n3', read: false }
- ],
- totalElements: 3,
- totalPages: 1,
- number: 0
- }
- });
- vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
- typeof createApiClient
- >);
-
- const result = await load({ url: makeUrl(), fetch: vi.fn() as unknown as typeof fetch });
-
- expect(result.notifications).toHaveLength(3);
- expect(result.unreadCount).toBe(2);
- });
-
- it('passes type param to API when ?type=MENTION is in URL', async () => {
- const mockGet = vi.fn().mockResolvedValueOnce({
- response: { ok: true },
- data: { content: [], totalElements: 0, totalPages: 0, number: 0 }
- });
- vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
- typeof createApiClient
- >);
-
- await load({ url: makeUrl({ type: 'MENTION' }), fetch: vi.fn() as unknown as typeof fetch });
-
- const queryParams = mockGet.mock.calls[0][1].params.query;
- expect(queryParams.type).toBe('MENTION');
- });
-
- it('passes read=false to API when ?read=false is in URL', async () => {
- const mockGet = vi.fn().mockResolvedValueOnce({
- response: { ok: true },
- data: { content: [], totalElements: 0, totalPages: 0, number: 0 }
- });
- vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
- typeof createApiClient
- >);
-
- await load({ url: makeUrl({ read: 'false' }), fetch: vi.fn() as unknown as typeof fetch });
-
- const queryParams = mockGet.mock.calls[0][1].params.query;
- expect(queryParams.read).toBe(false);
- });
-
- it('passes no filter params when no search params present', async () => {
- const mockGet = vi.fn().mockResolvedValueOnce({
- response: { ok: true },
- data: { content: [], totalElements: 0, totalPages: 0, number: 0 }
- });
- vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
- typeof createApiClient
- >);
-
- await load({ url: makeUrl(), fetch: vi.fn() as unknown as typeof fetch });
-
- const queryParams = mockGet.mock.calls[0][1].params.query;
- expect(queryParams.type).toBeUndefined();
- expect(queryParams.read).toBeUndefined();
- });
-
- it('calls the API exactly once — no separate round-trip for unreadCount', async () => {
- const mockGet = vi.fn().mockResolvedValueOnce({
- response: { ok: true },
- data: { content: [], totalElements: 0, totalPages: 0, number: 0 }
- });
- vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
- typeof createApiClient
- >);
-
- await load({ url: makeUrl(), fetch: vi.fn() as unknown as typeof fetch });
-
- expect(mockGet).toHaveBeenCalledTimes(1);
- });
-
- it('throws 401 error when API returns 401', async () => {
- const mockGet = vi.fn().mockResolvedValueOnce({
- response: { ok: false, status: 401 },
- data: null
- });
- vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
- typeof createApiClient
- >);
-
- await expect(
- load({ url: makeUrl(), fetch: vi.fn() as unknown as typeof fetch })
- ).rejects.toMatchObject({ status: 401 });
- });
-});
-
-// ─── mark-all action ──────────────────────────────────────────────────────────
-
-describe('notifications mark-all action', () => {
- it('calls POST /api/notifications/read-all and redirects', async () => {
- const mockPost = vi.fn().mockResolvedValueOnce({ response: { ok: true } });
- vi.mocked(createApiClient).mockReturnValue({ POST: mockPost } as ReturnType<
- typeof createApiClient
- >);
-
- const markAll = actions['mark-all'] as (ctx: { fetch: typeof fetch }) => Promise;
- await expect(markAll({ fetch: vi.fn() as unknown as typeof fetch })).rejects.toMatchObject({
- location: '/notifications'
- });
-
- expect(mockPost).toHaveBeenCalledTimes(1);
- });
-});
diff --git a/frontend/src/routes/profile/+page.svelte b/frontend/src/routes/profile/+page.svelte
index 39de19a7..24d25c12 100644
--- a/frontend/src/routes/profile/+page.svelte
+++ b/frontend/src/routes/profile/+page.svelte
@@ -102,10 +102,7 @@ const hasEmail = $derived(!!data.user?.email);
--
2.49.1
From 4c8bc8517b56ea315f0c0e22c7e75fb67f948c2d Mon Sep 17 00:00:00 2001
From: Marcel
Date: Mon, 20 Apr 2026 17:06:33 +0200
Subject: [PATCH 13/19] fix(dashboard): retarget feed footer to /chronik +
render rollup rows
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
- "Alle anzeigen" link now goes to /chronik (was /documents — the dead-end
bug called out in #285).
- Rollup rows (count > 1) render a primary-colored count badge plus a
compound timestamp line: "14. Apr. · 14:02–14:32" (en-dash U+2013).
- Singleton rows render the existing "14. Apr. 2026" date line.
- BLOCK_REVIEWED now has a verb mapping (re-using the annotation verb until
the spec pins a distinct copy).
- Three new spec cases: rollup count badge + en-dash range, no badge on
singletons, /chronik link assertion.
Part of #285.
Co-Authored-By: Claude Opus 4.7 (1M context)
---
.../components/DashboardActivityFeed.svelte | 35 +++++++++++++++++--
.../DashboardActivityFeed.svelte.spec.ts | 29 ++++++++++++++-
2 files changed, 60 insertions(+), 4 deletions(-)
diff --git a/frontend/src/lib/components/DashboardActivityFeed.svelte b/frontend/src/lib/components/DashboardActivityFeed.svelte
index d24fdd91..2595fd3e 100644
--- a/frontend/src/lib/components/DashboardActivityFeed.svelte
+++ b/frontend/src/lib/components/DashboardActivityFeed.svelte
@@ -1,6 +1,8 @@
@@ -35,7 +56,7 @@ function formatDate(iso: string): string {
{m.feed_caption()}
{m.feed_show_all()}
@@ -66,6 +87,14 @@ function formatDate(iso: string): string {
{item.documentTitle}
+ {#if item.count > 1}
+
+ {item.count}
+
+ {/if}
{#if item.youMentioned}
{/if}
- {formatDate(item.happenedAt)}
+ {timestamp(item)}
{/each}
diff --git a/frontend/src/lib/components/DashboardActivityFeed.svelte.spec.ts b/frontend/src/lib/components/DashboardActivityFeed.svelte.spec.ts
index b12c682c..51bfd384 100644
--- a/frontend/src/lib/components/DashboardActivityFeed.svelte.spec.ts
+++ b/frontend/src/lib/components/DashboardActivityFeed.svelte.spec.ts
@@ -17,7 +17,8 @@ const baseItem: ActivityFeedItemDTO = {
documentId: 'doc-1',
documentTitle: 'Brief 1920',
happenedAt: '2026-04-19T10:00:00Z',
- youMentioned: false
+ youMentioned: false,
+ count: 1
};
describe('DashboardActivityFeed', () => {
@@ -39,4 +40,30 @@ describe('DashboardActivityFeed', () => {
const section = page.getByText('Kommentare & Aktivität');
await expect.element(section).toBeInTheDocument();
});
+
+ it('renders count badge and en-dash time range for rollup rows (count > 1)', async () => {
+ const rollup: ActivityFeedItemDTO = {
+ ...baseItem,
+ count: 20,
+ happenedAtUntil: '2026-04-19T10:32:00Z'
+ };
+ render(DashboardActivityFeed, { feed: [rollup] });
+ const badge = page.getByTestId('feed-rollup-count');
+ await expect.element(badge).toHaveTextContent('20');
+ // "–" is U+2013 en-dash
+ const stamp = page.getByText(/\u2013/);
+ await expect.element(stamp).toBeInTheDocument();
+ });
+
+ it('does not render count badge for singleton rows (count === 1)', async () => {
+ render(DashboardActivityFeed, { feed: [baseItem] });
+ const badge = page.getByTestId('feed-rollup-count');
+ await expect.element(badge).not.toBeInTheDocument();
+ });
+
+ it('links the "show all" footer to /chronik, not /documents', async () => {
+ render(DashboardActivityFeed, { feed: [] });
+ const link = page.getByRole('link', { name: /alle anzeigen/i });
+ await expect.element(link).toHaveAttribute('href', '/chronik');
+ });
});
--
2.49.1
From d5b74d7569cedaf06ac7ac2a2790b5c429f8ce86 Mon Sep 17 00:00:00 2001
From: Marcel
Date: Mon, 20 Apr 2026 17:07:19 +0200
Subject: [PATCH 14/19] fix(notifications): retarget bell dropdown footer to
/chronik
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
The "Alle anzeigen" link at the bottom of the notification dropdown now
points to /chronik with the new "Zur Chronik →" label key, matching the
unified activity page introduced in #285.
Part of #285.
Co-Authored-By: Claude Opus 4.7 (1M context)
---
frontend/src/lib/components/NotificationDropdown.svelte | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/frontend/src/lib/components/NotificationDropdown.svelte b/frontend/src/lib/components/NotificationDropdown.svelte
index 14cc78df..4cf775dd 100644
--- a/frontend/src/lib/components/NotificationDropdown.svelte
+++ b/frontend/src/lib/components/NotificationDropdown.svelte
@@ -128,11 +128,11 @@ let { notifications, onMarkRead, onMarkAllRead, onClose }: Props = $props();
--
2.49.1
From b33d7442695d7cca29198e5660eb76ddc41ea6f0 Mon Sep 17 00:00:00 2001
From: Marcel
Date: Mon, 20 Apr 2026 17:08:33 +0200
Subject: [PATCH 15/19] test(a11y): add /chronik to AUTHENTICATED_PAGES for
axe-playwright sweep
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Three free axe checks light up (light / system-dark / manual-dark) without
further code changes — they run the existing parameterized spec against
/chronik.
Part of #285.
Co-Authored-By: Claude Opus 4.7 (1M context)
---
frontend/e2e/accessibility.spec.ts | 1 +
1 file changed, 1 insertion(+)
diff --git a/frontend/e2e/accessibility.spec.ts b/frontend/e2e/accessibility.spec.ts
index c5b23944..5722390d 100644
--- a/frontend/e2e/accessibility.spec.ts
+++ b/frontend/e2e/accessibility.spec.ts
@@ -10,6 +10,7 @@ import { test, expect } from '@playwright/test';
const AUTHENTICATED_PAGES = [
{ name: 'home', path: '/' },
{ name: 'persons', path: '/persons' },
+ { name: 'chronik', path: '/chronik' },
{ name: 'admin', path: '/admin' }
];
--
2.49.1
From 7035c5d73f44fd7b72202554b57294937ee2fcce Mon Sep 17 00:00:00 2001
From: Marcel
Date: Mon, 20 Apr 2026 18:05:28 +0200
Subject: [PATCH 16/19] =?UTF-8?q?fix(chronik):=20split=20F=C3=BCr-dich=20r?=
=?UTF-8?q?ow=20markup=20=E2=80=94=20Dismiss=20is=20sibling=20of=20link,?=
=?UTF-8?q?=20not=20nested?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
HTML5 forbids interactive content (