Compare commits
315 Commits
refactor/i
...
feat/issue
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d5e0d2226a | ||
|
|
b6466fcd95 | ||
|
|
e1d51728d9 | ||
|
|
55ce696428 | ||
|
|
12d92c78ea | ||
|
|
d9157b99dd | ||
|
|
3ede42503a | ||
|
|
117044aad9 | ||
|
|
eac025dec1 | ||
|
|
5147973379 | ||
|
|
3589e8659e | ||
|
|
bc762246e5 | ||
|
|
7f6380452f | ||
|
|
267380f714 | ||
|
|
7506f8743a | ||
|
|
520cca58b8 | ||
|
|
4bd1ebfd1e | ||
|
|
647a82b085 | ||
|
|
a3a9ad0471 | ||
|
|
812053cd6b | ||
|
|
20cac8f6d9 | ||
|
|
935a8b16d2 | ||
|
|
24b203ac80 | ||
|
|
5a98edac86 | ||
|
|
d34e8986af | ||
|
|
06c75af96b | ||
|
|
ddd811c634 | ||
|
|
250a00ff3c | ||
|
|
56a44bcef9 | ||
|
|
b3ae379be7 | ||
|
|
74febd37f6 | ||
|
|
70a2bbfaad | ||
|
|
5246638014 | ||
|
|
d6e5d3d1e8 | ||
|
|
94823f85c8 | ||
|
|
6494b13147 | ||
|
|
2bb08b6877 | ||
|
|
148710f2ed | ||
|
|
18e321b1e6 | ||
|
|
3aec856bac | ||
|
|
3f773cd9c3 | ||
|
|
09a8081e35 | ||
|
|
d19116fd05 | ||
|
|
bae07c8171 | ||
|
|
64c5b40eae | ||
|
|
0c65d5d748 | ||
|
|
031f6ea29a | ||
|
|
43f19ebe87 | ||
|
|
77a4cbd188 | ||
|
|
9407cb9dc4 | ||
|
|
80c952cd6c | ||
|
|
615392216c | ||
|
|
37203e96ab | ||
|
|
10dbce1c70 | ||
|
|
99247ed58d | ||
|
|
714f00ef9d | ||
|
|
9e0b72bc10 | ||
|
|
c678432d25 | ||
|
|
19832dc1e0 | ||
|
|
b3013c42c0 | ||
|
|
cb02dc84f6 | ||
|
|
428c63a2f2 | ||
|
|
5a3b5ff3c7 | ||
|
|
2deaaf167e | ||
|
|
9887968236 | ||
|
|
793b863096 | ||
|
|
692c2c0629 | ||
|
|
d07f7debf8 | ||
|
|
1926e8e6e5 | ||
| 18a93f5b38 | |||
|
|
88012a1193 | ||
|
|
9fc4993fca | ||
|
|
f8f5ea634e | ||
|
|
103d454e14 | ||
|
|
daea748a20 | ||
|
|
61fa35df67 | ||
|
|
b4004fce56 | ||
|
|
e1ddd66704 | ||
|
|
d816e94a90 | ||
|
|
5e01db1c74 | ||
|
|
c4444a07d1 | ||
|
|
79259aa348 | ||
|
|
0b0559cbe9 | ||
|
|
fced33e033 | ||
|
|
208c1adc3e | ||
|
|
a7a5123839 | ||
|
|
d31ea12086 | ||
|
|
b0ea5f5552 | ||
|
|
8225bd660b | ||
|
|
fcc4c4665c | ||
|
|
9bad9e807b | ||
|
|
91500c4cf1 | ||
|
|
f7ed154e4d | ||
|
|
3c3680b1e6 | ||
|
|
c4e1f1e599 | ||
|
|
8ed66ae82f | ||
|
|
f0bdcf334b | ||
|
|
fa14a11244 | ||
|
|
0c2435e0a8 | ||
|
|
c62bf9085c | ||
|
|
047b7c71ff | ||
|
|
1d9990715d | ||
|
|
96f8bfd822 | ||
|
|
40db46945f | ||
|
|
f7747ba352 | ||
|
|
88f3f3e7eb | ||
|
|
10eefc48c7 | ||
|
|
af5918b5e8 | ||
|
|
a3a40ed179 | ||
|
|
38a9719bdb | ||
|
|
699d5e5759 | ||
|
|
afe84a6af7 | ||
|
|
3ee4424556 | ||
|
|
b23118268b | ||
|
|
4ddb095cb1 | ||
|
|
af49bf5e7a | ||
|
|
3bbc64cfc6 | ||
|
|
8860f17129 | ||
|
|
5e4f031537 | ||
|
|
8acae8ea4d | ||
|
|
7a500644a9 | ||
|
|
3b3f960a30 | ||
|
|
8e844dd16e | ||
|
|
12c4d433ba | ||
|
|
16787f2771 | ||
|
|
c3939e0f13 | ||
|
|
4f86011ffb | ||
|
|
3ecda655c5 | ||
|
|
68ec66002a | ||
|
|
a296ad527e | ||
|
|
a8bd2606a0 | ||
|
|
607a3567e6 | ||
|
|
7cc90b8a90 | ||
|
|
cd31bf63c1 | ||
|
|
add799c57f | ||
|
|
a146a2ec3c | ||
|
|
548ad0fa68 | ||
|
|
e3c8e1a067 | ||
|
|
1279753ddb | ||
|
|
6c2e7078ba | ||
|
|
cea1234400 | ||
|
|
9ff498a194 | ||
|
|
8128769feb | ||
|
|
16bcd0f73c | ||
|
|
fc892f0f59 | ||
|
|
2466553216 | ||
|
|
794000cbd1 | ||
|
|
269894a47a | ||
|
|
a00617194c | ||
|
|
b879d28761 | ||
|
|
8acb830649 | ||
|
|
0d8ac46639 | ||
|
|
5f4e60a14c | ||
|
|
f533817c7b | ||
|
|
99e7176eac | ||
|
|
c3fa09d12e | ||
|
|
178afcd496 | ||
|
|
b1b7418404 | ||
|
|
a52c8bf079 | ||
|
|
da0a7e9194 | ||
|
|
bbfd234746 | ||
|
|
b396fccd52 | ||
|
|
64a854aad6 | ||
|
|
92f3c04d54 | ||
|
|
0d5f3f38d0 | ||
|
|
4aa477555d | ||
|
|
84c09e41ef | ||
|
|
e16dcdb7dc | ||
|
|
000079fd50 | ||
|
|
a09a9e6043 | ||
|
|
1e289100a1 | ||
|
|
0c2175aa07 | ||
|
|
f76a9cce1f | ||
|
|
e2081b57e7 | ||
|
|
07035b9fa9 | ||
|
|
57ffb7d751 | ||
|
|
eab37b9ac9 | ||
|
|
2459408930 | ||
|
|
09f4601d15 | ||
|
|
1b34a36a77 | ||
|
|
8d041a377d | ||
|
|
18cf839fac | ||
|
|
78eca8e9a1 | ||
|
|
386dc83958 | ||
|
|
60c1ec7b5f | ||
|
|
10a4a4d94b | ||
|
|
e0b7cfdada | ||
|
|
b5e1a8ac2f | ||
|
|
64d27d6d61 | ||
|
|
7a342a07cf | ||
|
|
bd23a76330 | ||
|
|
c5e6ed922b | ||
|
|
ec85f228c1 | ||
|
|
fea24aee25 | ||
|
|
68b57918eb | ||
|
|
77100ab1e6 | ||
|
|
092131930c | ||
|
|
47f9a0bf73 | ||
|
|
30a6cbeb7f | ||
|
|
6faaa3b7d6 | ||
|
|
77747aa556 | ||
|
|
9a64c0698f | ||
|
|
4cb7c975f5 | ||
|
|
97c94c91f8 | ||
|
|
eaefd4091e | ||
|
|
ba36a88b65 | ||
|
|
b310caaeeb | ||
|
|
615d404ba9 | ||
|
|
7183fc4428 | ||
|
|
bf010a23c3 | ||
|
|
b01761d800 | ||
|
|
6b7829d5c8 | ||
|
|
1b617aa08b | ||
|
|
5120dd19a1 | ||
|
|
d075bf390a | ||
|
|
59b7f7cddf | ||
|
|
3b1317af98 | ||
|
|
4442b25a7a | ||
|
|
47d57b96c8 | ||
|
|
902172e4e2 | ||
|
|
654575bf16 | ||
|
|
b5ea04e47a | ||
|
|
4ec4062274 | ||
|
|
3cd6483042 | ||
|
|
aff7afa7cb | ||
|
|
be7009f9ed | ||
|
|
e6497ebff4 | ||
|
|
ba8758c085 | ||
|
|
61976e9479 | ||
|
|
901483ab73 | ||
|
|
6f6ff8e9ed | ||
|
|
7919ba3a57 | ||
|
|
d7a46de1cc | ||
|
|
172c5613ed | ||
|
|
f1889ff20c | ||
|
|
4d670de156 | ||
|
|
b6b1b142dc | ||
|
|
a3660a79e1 | ||
|
|
53d89a44fc | ||
|
|
83629e0c6e | ||
|
|
97fbf1e4ca | ||
|
|
9b5af67780 | ||
|
|
e01733eaf2 | ||
|
|
a669f6368d | ||
|
|
5e5c249aba | ||
|
|
609d242f5d | ||
|
|
c03c391879 | ||
|
|
f921284db6 | ||
|
|
b9b572436a | ||
|
|
a05d9c22ae | ||
|
|
de7c48117b | ||
|
|
06fd5ae2da | ||
|
|
171f06da22 | ||
|
|
89949977c7 | ||
|
|
532692e0fb | ||
|
|
39ed66c97f | ||
|
|
7f53651f13 | ||
|
|
d900480920 | ||
|
|
abba85a451 | ||
|
|
b54d2b0125 | ||
|
|
e03fb38274 | ||
|
|
e8e54cc282 | ||
|
|
e4f21bd896 | ||
|
|
c3e007d421 | ||
|
|
57dc72b51d | ||
|
|
3fba740469 | ||
|
|
f9ac963b9f | ||
|
|
b0c6d15f99 | ||
|
|
e808525312 | ||
|
|
da5c92fe39 | ||
|
|
6c2da648db | ||
|
|
ca660f103d | ||
|
|
06eb1cada8 | ||
|
|
d78685c5a4 | ||
|
|
23410aa4b8 | ||
|
|
e041c75793 | ||
|
|
adea7d498f | ||
|
|
4cf01a0f1d | ||
|
|
2e4d9a8375 | ||
|
|
ff1606f63d | ||
|
|
8980d810d4 | ||
|
|
ca0cf4903c | ||
|
|
9fb1821db5 | ||
|
|
86a216918f | ||
|
|
48152517aa | ||
|
|
4af2e4ad17 | ||
|
|
94b5d1a5a8 | ||
|
|
aa8fb70d10 | ||
|
|
9404ec34ce | ||
|
|
78abc7f726 | ||
|
|
f36bebd1a8 | ||
|
|
53c5d90340 | ||
|
|
2ea603a3bf | ||
|
|
d7b2357834 | ||
|
|
eb18d4f568 | ||
|
|
091f7e5d25 | ||
|
|
32f151ff31 | ||
|
|
9ff8423da6 | ||
|
|
162397d4eb | ||
|
|
fabab6b502 | ||
|
|
bcb2898e5f | ||
|
|
2c64a6d8a4 | ||
|
|
b74ae27171 | ||
|
|
2817410f94 | ||
|
|
63d1a2e1ff | ||
|
|
bb29cac496 | ||
|
|
60dc73ba04 | ||
|
|
6cffd36b22 | ||
|
|
f723a83011 | ||
|
|
c235151075 | ||
|
|
741eebc276 | ||
|
|
8a5ca6868f | ||
|
|
a15b5ebf17 | ||
|
|
ed12a54339 | ||
| 4b8da0024f |
@@ -47,6 +47,26 @@ jobs:
|
|||||||
name: unit-test-screenshots
|
name: unit-test-screenshots
|
||||||
path: frontend/test-results/screenshots/
|
path: frontend/test-results/screenshots/
|
||||||
|
|
||||||
|
# ─── OCR Service Unit Tests ───────────────────────────────────────────────────
|
||||||
|
# Only spell_check.py, test_confidence.py, test_sender_registry.py — no ML stack required.
|
||||||
|
ocr-tests:
|
||||||
|
name: OCR Service Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
|
||||||
|
- name: Install test dependencies
|
||||||
|
run: pip install "pyspellchecker==0.9.0" pytest pytest-asyncio
|
||||||
|
working-directory: ocr-service
|
||||||
|
|
||||||
|
- name: Run OCR unit tests (no ML stack required)
|
||||||
|
run: python -m pytest test_spell_check.py test_confidence.py test_sender_registry.py -v
|
||||||
|
working-directory: ocr-service
|
||||||
|
|
||||||
# ─── Backend Unit & Slice Tests ───────────────────────────────────────────────
|
# ─── Backend Unit & Slice Tests ───────────────────────────────────────────────
|
||||||
# Pure Mockito + WebMvcTest — no DB or S3 needed.
|
# Pure Mockito + WebMvcTest — no DB or S3 needed.
|
||||||
backend-unit-tests:
|
backend-unit-tests:
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -12,3 +12,4 @@ scripts/large-data.sql
|
|||||||
|
|
||||||
.vitest-attachments
|
.vitest-attachments
|
||||||
**/test-results/
|
**/test-results/
|
||||||
|
.worktrees/
|
||||||
|
|||||||
@@ -103,6 +103,11 @@
|
|||||||
<groupId>org.springframework.boot</groupId>
|
<groupId>org.springframework.boot</groupId>
|
||||||
<artifactId>spring-boot-starter-webmvc-test</artifactId>
|
<artifactId>spring-boot-starter-webmvc-test</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.awaitility</groupId>
|
||||||
|
<artifactId>awaitility</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- Excel Bearbeitung (Apache POI) -->
|
<!-- Excel Bearbeitung (Apache POI) -->
|
||||||
<dependency>
|
<dependency>
|
||||||
@@ -146,6 +151,12 @@
|
|||||||
<artifactId>flyway-database-postgresql</artifactId>
|
<artifactId>flyway-database-postgresql</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Caffeine cache for in-memory rate limiting -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.github.ben-manes.caffeine</groupId>
|
||||||
|
<artifactId>caffeine</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<!-- OpenAPI / Swagger UI — enabled only in the dev Spring profile -->
|
<!-- OpenAPI / Swagger UI — enabled only in the dev Spring profile -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.springdoc</groupId>
|
<groupId>org.springdoc</groupId>
|
||||||
|
|||||||
@@ -0,0 +1,10 @@
|
|||||||
|
package org.raddatz.familienarchiv.audit;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import jakarta.annotation.Nullable;
|
||||||
|
|
||||||
|
public record ActivityActorDTO(
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String initials,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String color,
|
||||||
|
@Nullable String name
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
package org.raddatz.familienarchiv.audit;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
public interface ActivityFeedRow {
|
||||||
|
String getKind();
|
||||||
|
UUID getActorId();
|
||||||
|
String getActorInitials();
|
||||||
|
String getActorColor();
|
||||||
|
String getActorName();
|
||||||
|
UUID getDocumentId();
|
||||||
|
Instant getHappenedAt();
|
||||||
|
boolean isYouMentioned();
|
||||||
|
}
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
package org.raddatz.familienarchiv.audit;
|
||||||
|
|
||||||
|
public enum AuditKind {
|
||||||
|
|
||||||
|
/** Payload: none */
|
||||||
|
FILE_UPLOADED,
|
||||||
|
|
||||||
|
/** Payload: {@code {"oldStatus": "UPLOADED", "newStatus": "TRANSCRIBED"}} */
|
||||||
|
STATUS_CHANGED,
|
||||||
|
|
||||||
|
/** Payload: none */
|
||||||
|
METADATA_UPDATED,
|
||||||
|
|
||||||
|
/** Payload: {@code {"pageNumber": 3}} */
|
||||||
|
TEXT_SAVED,
|
||||||
|
|
||||||
|
/** Payload: none */
|
||||||
|
BLOCK_REVIEWED,
|
||||||
|
|
||||||
|
/** Payload: {@code {"pageNumber": 3}} */
|
||||||
|
ANNOTATION_CREATED,
|
||||||
|
|
||||||
|
/** Payload: {@code {"commentId": "uuid"}} */
|
||||||
|
COMMENT_ADDED,
|
||||||
|
|
||||||
|
/** Payload: {@code {"commentId": "uuid", "mentionedUserId": "uuid"}} */
|
||||||
|
MENTION_CREATED,
|
||||||
|
}
|
||||||
@@ -0,0 +1,46 @@
|
|||||||
|
package org.raddatz.familienarchiv.audit;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import jakarta.persistence.*;
|
||||||
|
import lombok.*;
|
||||||
|
import org.hibernate.annotations.CreationTimestamp;
|
||||||
|
import org.hibernate.annotations.JdbcTypeCode;
|
||||||
|
import org.hibernate.type.SqlTypes;
|
||||||
|
|
||||||
|
import java.time.OffsetDateTime;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
@Entity
|
||||||
|
@Table(name = "audit_log")
|
||||||
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Builder
|
||||||
|
public class AuditLog {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@GeneratedValue(strategy = GenerationType.UUID)
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private UUID id;
|
||||||
|
|
||||||
|
@Column(name = "happened_at", nullable = false, updatable = false)
|
||||||
|
@CreationTimestamp
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private OffsetDateTime happenedAt;
|
||||||
|
|
||||||
|
@Column(name = "actor_id")
|
||||||
|
private UUID actorId;
|
||||||
|
|
||||||
|
@Enumerated(EnumType.STRING)
|
||||||
|
@Column(name = "kind", nullable = false)
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private AuditKind kind;
|
||||||
|
|
||||||
|
@Column(name = "document_id")
|
||||||
|
private UUID documentId;
|
||||||
|
|
||||||
|
@JdbcTypeCode(SqlTypes.JSON)
|
||||||
|
@Column(columnDefinition = "jsonb")
|
||||||
|
private Map<String, Object> payload;
|
||||||
|
}
|
||||||
@@ -0,0 +1,109 @@
|
|||||||
|
package org.raddatz.familienarchiv.audit;
|
||||||
|
|
||||||
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
|
import java.time.OffsetDateTime;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
public interface AuditLogQueryRepository extends JpaRepository<AuditLog, UUID> {
|
||||||
|
|
||||||
|
@Query(value = """
|
||||||
|
SELECT a.document_id
|
||||||
|
FROM audit_log a
|
||||||
|
WHERE a.kind IN ('TEXT_SAVED', 'ANNOTATION_CREATED')
|
||||||
|
AND a.actor_id = :userId
|
||||||
|
AND a.document_id IS NOT NULL
|
||||||
|
ORDER BY a.happened_at DESC
|
||||||
|
LIMIT 1
|
||||||
|
""", nativeQuery = true)
|
||||||
|
Optional<UUID> findMostRecentDocumentIdByActor(@Param("userId") UUID userId);
|
||||||
|
|
||||||
|
@Query(value = """
|
||||||
|
SELECT * FROM (
|
||||||
|
SELECT DISTINCT ON (a.actor_id, a.document_id, a.kind, date_trunc('hour', a.happened_at))
|
||||||
|
a.kind AS kind,
|
||||||
|
a.actor_id AS actorId,
|
||||||
|
CASE
|
||||||
|
WHEN u.first_name IS NOT NULL AND u.last_name IS NOT NULL
|
||||||
|
THEN UPPER(LEFT(u.first_name, 1)) || UPPER(LEFT(u.last_name, 1))
|
||||||
|
WHEN u.first_name IS NOT NULL THEN UPPER(LEFT(u.first_name, 1))
|
||||||
|
WHEN u.last_name IS NOT NULL THEN UPPER(LEFT(u.last_name, 1))
|
||||||
|
ELSE '?'
|
||||||
|
END AS actorInitials,
|
||||||
|
COALESCE(u.color, '') AS actorColor,
|
||||||
|
CONCAT_WS(' ', u.first_name, u.last_name) AS actorName,
|
||||||
|
a.document_id AS documentId,
|
||||||
|
a.happened_at AS happened_at,
|
||||||
|
(a.kind = 'MENTION_CREATED'
|
||||||
|
AND a.payload->>'mentionedUserId' = :currentUserId) AS youMentioned
|
||||||
|
FROM audit_log a
|
||||||
|
LEFT JOIN users u ON u.id = a.actor_id
|
||||||
|
WHERE a.kind IN ('TEXT_SAVED','FILE_UPLOADED','ANNOTATION_CREATED','COMMENT_ADDED','MENTION_CREATED')
|
||||||
|
AND a.document_id IS NOT NULL
|
||||||
|
ORDER BY a.actor_id, a.document_id, a.kind,
|
||||||
|
date_trunc('hour', a.happened_at), a.happened_at DESC
|
||||||
|
) deduped
|
||||||
|
ORDER BY happened_at DESC
|
||||||
|
LIMIT :limit
|
||||||
|
""", nativeQuery = true)
|
||||||
|
List<ActivityFeedRow> findDedupedActivityFeed(
|
||||||
|
@Param("currentUserId") String currentUserId,
|
||||||
|
@Param("limit") int limit);
|
||||||
|
|
||||||
|
@Query(value = """
|
||||||
|
SELECT
|
||||||
|
COUNT(DISTINCT (a.document_id::text || '|' || (a.payload->>'pageNumber'))) AS pages,
|
||||||
|
COUNT(*) FILTER (WHERE a.kind = 'ANNOTATION_CREATED') AS annotated,
|
||||||
|
COUNT(DISTINCT a.payload->>'blockId') FILTER (WHERE a.kind = 'TEXT_SAVED') AS transcribed,
|
||||||
|
COUNT(DISTINCT a.document_id) FILTER (WHERE a.kind = 'FILE_UPLOADED') AS uploaded,
|
||||||
|
COUNT(DISTINCT (a.document_id::text || '|' || (a.payload->>'pageNumber')))
|
||||||
|
FILTER (WHERE (a.kind = 'ANNOTATION_CREATED' OR a.kind = 'TEXT_SAVED')
|
||||||
|
AND a.actor_id::text = :userId) AS yourPages
|
||||||
|
FROM audit_log a
|
||||||
|
WHERE a.happened_at >= :weekStart
|
||||||
|
AND a.kind IN ('ANNOTATION_CREATED','TEXT_SAVED','FILE_UPLOADED')
|
||||||
|
""", nativeQuery = true)
|
||||||
|
PulseStatsRow getPulseStats(
|
||||||
|
@Param("weekStart") OffsetDateTime weekStart,
|
||||||
|
@Param("userId") String userId);
|
||||||
|
|
||||||
|
@Query(value = """
|
||||||
|
SELECT DISTINCT ON (a.document_id)
|
||||||
|
a.document_id AS documentId,
|
||||||
|
a.actor_id AS actorId
|
||||||
|
FROM audit_log a
|
||||||
|
WHERE a.kind = :kind
|
||||||
|
AND a.document_id IN :documentIds
|
||||||
|
AND a.actor_id IS NOT NULL
|
||||||
|
ORDER BY a.document_id, a.happened_at DESC
|
||||||
|
""", nativeQuery = true)
|
||||||
|
List<Object[]> findMostRecentActorPerDocument(
|
||||||
|
@Param("documentIds") List<UUID> documentIds,
|
||||||
|
@Param("kind") String kind);
|
||||||
|
|
||||||
|
@Query(value = """
|
||||||
|
SELECT
|
||||||
|
a.document_id AS documentId,
|
||||||
|
CASE
|
||||||
|
WHEN u.first_name IS NOT NULL AND u.last_name IS NOT NULL
|
||||||
|
THEN UPPER(LEFT(u.first_name, 1)) || UPPER(LEFT(u.last_name, 1))
|
||||||
|
WHEN u.first_name IS NOT NULL THEN UPPER(LEFT(u.first_name, 1))
|
||||||
|
WHEN u.last_name IS NOT NULL THEN UPPER(LEFT(u.last_name, 1))
|
||||||
|
ELSE '?'
|
||||||
|
END AS actorInitials,
|
||||||
|
COALESCE(u.color, '') AS actorColor,
|
||||||
|
CONCAT_WS(' ', u.first_name, u.last_name) AS actorName
|
||||||
|
FROM audit_log a
|
||||||
|
LEFT JOIN users u ON u.id = a.actor_id
|
||||||
|
WHERE a.kind IN ('ANNOTATION_CREATED', 'TEXT_SAVED', 'BLOCK_REVIEWED')
|
||||||
|
AND a.document_id IN :documentIds
|
||||||
|
AND a.actor_id IS NOT NULL
|
||||||
|
GROUP BY a.document_id, a.actor_id, u.first_name, u.last_name, u.color
|
||||||
|
ORDER BY a.document_id, MIN(a.happened_at)
|
||||||
|
""", nativeQuery = true)
|
||||||
|
List<ContributorRow> findContributorsPerDocument(@Param("documentIds") List<UUID> documentIds);
|
||||||
|
}
|
||||||
@@ -0,0 +1,49 @@
|
|||||||
|
package org.raddatz.familienarchiv.audit;
|
||||||
|
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import java.time.OffsetDateTime;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
@Service
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class AuditLogQueryService {
|
||||||
|
|
||||||
|
private final AuditLogQueryRepository queryRepository;
|
||||||
|
|
||||||
|
public Optional<UUID> findMostRecentDocumentForUser(UUID userId) {
|
||||||
|
return queryRepository.findMostRecentDocumentIdByActor(userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<ActivityFeedRow> findActivityFeed(UUID currentUserId, int limit) {
|
||||||
|
return queryRepository.findDedupedActivityFeed(currentUserId.toString(), limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
public PulseStatsRow getPulseStats(OffsetDateTime weekStart, UUID userId) {
|
||||||
|
return queryRepository.getPulseStats(weekStart, userId.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map<UUID, UUID> findMostRecentActorPerDocument(List<UUID> documentIds, String kind) {
|
||||||
|
if (documentIds.isEmpty()) return Map.of();
|
||||||
|
List<Object[]> rows = queryRepository.findMostRecentActorPerDocument(documentIds, kind);
|
||||||
|
Map<UUID, UUID> result = new LinkedHashMap<>();
|
||||||
|
for (Object[] row : rows) {
|
||||||
|
UUID docId = (UUID) row[0];
|
||||||
|
UUID actorId = (UUID) row[1];
|
||||||
|
result.put(docId, actorId);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map<UUID, List<ActivityActorDTO>> findContributorsPerDocument(List<UUID> documentIds) {
|
||||||
|
if (documentIds.isEmpty()) return Map.of();
|
||||||
|
List<ContributorRow> rows = queryRepository.findContributorsPerDocument(documentIds);
|
||||||
|
Map<UUID, List<ActivityActorDTO>> result = new LinkedHashMap<>();
|
||||||
|
for (ContributorRow row : rows) {
|
||||||
|
result.computeIfAbsent(row.getDocumentId(), k -> new ArrayList<>())
|
||||||
|
.add(new ActivityActorDTO(row.getActorInitials(), row.getActorColor(), row.getActorName()));
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
package org.raddatz.familienarchiv.audit;
|
||||||
|
|
||||||
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
public interface AuditLogRepository extends JpaRepository<AuditLog, UUID> {
|
||||||
|
}
|
||||||
@@ -0,0 +1,57 @@
|
|||||||
|
package org.raddatz.familienarchiv.audit;
|
||||||
|
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.core.task.TaskExecutor;
|
||||||
|
import org.springframework.scheduling.annotation.Async;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
import org.springframework.transaction.support.TransactionSynchronization;
|
||||||
|
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
@Service
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@Slf4j
|
||||||
|
public class AuditService {
|
||||||
|
|
||||||
|
private final AuditLogRepository auditLogRepository;
|
||||||
|
@Qualifier("auditExecutor")
|
||||||
|
private final TaskExecutor auditExecutor;
|
||||||
|
|
||||||
|
@Async("auditExecutor")
|
||||||
|
public void log(AuditKind kind, UUID actorId, UUID documentId, Map<String, Object> payload) {
|
||||||
|
writeLog(kind, actorId, documentId, payload);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void logAfterCommit(AuditKind kind, UUID actorId, UUID documentId, Map<String, Object> payload) {
|
||||||
|
if (TransactionSynchronizationManager.isActualTransactionActive()) {
|
||||||
|
TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronization() {
|
||||||
|
@Override
|
||||||
|
public void afterCommit() {
|
||||||
|
// Run on a separate thread: the afterCommit() callback fires while Spring's
|
||||||
|
// transaction synchronizations are still active on the current thread, which
|
||||||
|
// prevents SimpleJpaRepository.save() from starting a new transaction inline.
|
||||||
|
auditExecutor.execute(() -> writeLog(kind, actorId, documentId, payload));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
writeLog(kind, actorId, documentId, payload);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void writeLog(AuditKind kind, UUID actorId, UUID documentId, Map<String, Object> payload) {
|
||||||
|
try {
|
||||||
|
auditLogRepository.save(AuditLog.builder()
|
||||||
|
.kind(kind)
|
||||||
|
.actorId(actorId)
|
||||||
|
.documentId(documentId)
|
||||||
|
.payload(payload)
|
||||||
|
.build());
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Audit log write failed: kind={}, document={}", kind, documentId, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
package org.raddatz.familienarchiv.audit;
|
||||||
|
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
public interface ContributorRow {
|
||||||
|
UUID getDocumentId();
|
||||||
|
String getActorInitials();
|
||||||
|
String getActorColor();
|
||||||
|
String getActorName();
|
||||||
|
}
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
package org.raddatz.familienarchiv.audit;
|
||||||
|
|
||||||
|
public interface PulseStatsRow {
|
||||||
|
long getPages();
|
||||||
|
long getAnnotated();
|
||||||
|
long getTranscribed();
|
||||||
|
long getUploaded();
|
||||||
|
long getYourPages();
|
||||||
|
}
|
||||||
@@ -23,4 +23,18 @@ public class AsyncConfig {
|
|||||||
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy());
|
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy());
|
||||||
return executor;
|
return executor;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Bean("auditExecutor")
|
||||||
|
public Executor auditExecutor() {
|
||||||
|
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
|
||||||
|
executor.setCorePoolSize(1);
|
||||||
|
executor.setMaxPoolSize(2);
|
||||||
|
executor.setQueueCapacity(50);
|
||||||
|
executor.setThreadNamePrefix("Audit-");
|
||||||
|
// AbortPolicy instead of CallerRunsPolicy: if CallerRunsPolicy ran the task on the
|
||||||
|
// afterCommit() callback thread, Spring's transaction synchronizations would still be
|
||||||
|
// active on that thread and SimpleJpaRepository.save() would throw IllegalStateException.
|
||||||
|
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy());
|
||||||
|
return executor;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -31,8 +31,8 @@ import java.util.Set;
|
|||||||
@DependsOn("flyway")
|
@DependsOn("flyway")
|
||||||
public class DataInitializer {
|
public class DataInitializer {
|
||||||
|
|
||||||
@Value("${app.admin.username:admin}")
|
@Value("${app.admin.email:admin@familyarchive.local}")
|
||||||
private String adminUsername;
|
private String adminEmail;
|
||||||
|
|
||||||
@Value("${app.admin.password:admin123}")
|
@Value("${app.admin.password:admin123}")
|
||||||
private String adminPassword;
|
private String adminPassword;
|
||||||
@@ -43,26 +43,23 @@ public class DataInitializer {
|
|||||||
@Bean
|
@Bean
|
||||||
public CommandLineRunner initAdminUser(PasswordEncoder passwordEncoder) {
|
public CommandLineRunner initAdminUser(PasswordEncoder passwordEncoder) {
|
||||||
return args -> {
|
return args -> {
|
||||||
if (userRepository.findByUsername(adminUsername).isEmpty()) {
|
if (userRepository.findByEmail(adminEmail).isEmpty()) {
|
||||||
log.info("Kein Admin-User '{}' gefunden. Erstelle Default-Admin...", adminUsername);
|
log.info("Kein Admin-User '{}' gefunden. Erstelle Default-Admin...", adminEmail);
|
||||||
|
|
||||||
// 1. Admin Gruppe erstellen
|
|
||||||
UserGroup adminGroup = UserGroup.builder()
|
UserGroup adminGroup = UserGroup.builder()
|
||||||
.name("Administrators")
|
.name("Administrators")
|
||||||
.permissions(Set.of("ADMIN", "READ_ALL", "WRITE_ALL", "ANNOTATE_ALL", "ADMIN_USER", "ADMIN_TAG", "ADMIN_PERMISSION"))
|
.permissions(Set.of("ADMIN", "READ_ALL", "WRITE_ALL", "ANNOTATE_ALL", "ADMIN_USER", "ADMIN_TAG", "ADMIN_PERMISSION"))
|
||||||
.build();
|
.build();
|
||||||
groupRepository.save(adminGroup);
|
groupRepository.save(adminGroup);
|
||||||
|
|
||||||
// 2. Admin User erstellen
|
|
||||||
AppUser admin = AppUser.builder()
|
AppUser admin = AppUser.builder()
|
||||||
.username(adminUsername)
|
.email(adminEmail)
|
||||||
.password(passwordEncoder.encode(adminPassword)) // Passwort verschlüsseln!
|
.password(passwordEncoder.encode(adminPassword))
|
||||||
.email("admin@familyarchive.local")
|
|
||||||
.groups(Set.of(adminGroup))
|
.groups(Set.of(adminGroup))
|
||||||
.build();
|
.build();
|
||||||
userRepository.save(admin);
|
userRepository.save(admin);
|
||||||
|
|
||||||
log.info("Default Admin erstellt: User='{}'", adminUsername);
|
log.info("Default Admin erstellt: Email='{}'", adminEmail);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -84,16 +81,13 @@ public class DataInitializer {
|
|||||||
TagRepository tagRepo,
|
TagRepository tagRepo,
|
||||||
PasswordEncoder passwordEncoder) {
|
PasswordEncoder passwordEncoder) {
|
||||||
return args -> {
|
return args -> {
|
||||||
// Always reset the admin password to the configured value so a failed password-reset
|
userRepository.findByEmail(adminEmail).ifPresent(admin -> {
|
||||||
// test from a previous run can never leave the account locked out.
|
|
||||||
userRepository.findByUsername(adminUsername).ifPresent(admin -> {
|
|
||||||
admin.setPassword(passwordEncoder.encode(adminPassword));
|
admin.setPassword(passwordEncoder.encode(adminPassword));
|
||||||
userRepository.save(admin);
|
userRepository.save(admin);
|
||||||
log.info("E2E seed: Admin-Passwort auf konfigurierten Wert zurückgesetzt.");
|
log.info("E2E seed: Admin-Passwort auf konfigurierten Wert zurückgesetzt.");
|
||||||
});
|
});
|
||||||
|
|
||||||
// Always ensure the read-only test user exists, even when seed data was already loaded.
|
if (userRepository.findByEmail("reader@familyarchive.local").isEmpty()) {
|
||||||
if (userRepository.findByUsername("reader").isEmpty()) {
|
|
||||||
log.info("E2E seed: Erstelle 'reader'-Testbenutzer...");
|
log.info("E2E seed: Erstelle 'reader'-Testbenutzer...");
|
||||||
UserGroup leserGroup = groupRepository.findByName("Leser").orElseGet(() ->
|
UserGroup leserGroup = groupRepository.findByName("Leser").orElseGet(() ->
|
||||||
groupRepository.save(UserGroup.builder()
|
groupRepository.save(UserGroup.builder()
|
||||||
@@ -101,7 +95,7 @@ public class DataInitializer {
|
|||||||
.permissions(Set.of("READ_ALL"))
|
.permissions(Set.of("READ_ALL"))
|
||||||
.build()));
|
.build()));
|
||||||
userRepository.save(AppUser.builder()
|
userRepository.save(AppUser.builder()
|
||||||
.username("reader")
|
.email("reader@familyarchive.local")
|
||||||
.password(passwordEncoder.encode("reader123"))
|
.password(passwordEncoder.encode("reader123"))
|
||||||
.groups(Set.of(leserGroup))
|
.groups(Set.of(leserGroup))
|
||||||
.build());
|
.build());
|
||||||
@@ -131,7 +125,6 @@ public class DataInitializer {
|
|||||||
Tag tagUrlaub = tagRepo.save(Tag.builder().name("Urlaub").build());
|
Tag tagUrlaub = tagRepo.save(Tag.builder().name("Urlaub").build());
|
||||||
|
|
||||||
// ── Documents ────────────────────────────────────────────────────
|
// ── Documents ────────────────────────────────────────────────────
|
||||||
// 1. Fully transcribed letter — used by search + detail E2E tests
|
|
||||||
docRepo.save(Document.builder()
|
docRepo.save(Document.builder()
|
||||||
.title("Geburtsurkunde Hans Müller")
|
.title("Geburtsurkunde Hans Müller")
|
||||||
.originalFilename("geburtsurkunde_hans.pdf")
|
.originalFilename("geburtsurkunde_hans.pdf")
|
||||||
@@ -144,7 +137,6 @@ public class DataInitializer {
|
|||||||
.transcription("Hiermit wird beurkundet, dass Hans Müller am 12. April 1923 in Berlin geboren wurde.")
|
.transcription("Hiermit wird beurkundet, dass Hans Müller am 12. April 1923 in Berlin geboren wurde.")
|
||||||
.build());
|
.build());
|
||||||
|
|
||||||
// 2. Letter with multiple receivers and tags — tests multi-receiver display
|
|
||||||
docRepo.save(Document.builder()
|
docRepo.save(Document.builder()
|
||||||
.title("Brief aus dem Krieg")
|
.title("Brief aus dem Krieg")
|
||||||
.originalFilename("brief_krieg_1944.pdf")
|
.originalFilename("brief_krieg_1944.pdf")
|
||||||
@@ -157,7 +149,6 @@ public class DataInitializer {
|
|||||||
.transcription("Liebe Anna, ich schreibe dir aus der Front. Es geht mir den Umständen entsprechend gut.")
|
.transcription("Liebe Anna, ich schreibe dir aus der Front. Es geht mir den Umständen entsprechend gut.")
|
||||||
.build());
|
.build());
|
||||||
|
|
||||||
// 3. Postcard — no transcription, tests PLACEHOLDER status
|
|
||||||
docRepo.save(Document.builder()
|
docRepo.save(Document.builder()
|
||||||
.title("Urlaubspostkarte Ostsee")
|
.title("Urlaubspostkarte Ostsee")
|
||||||
.originalFilename("postkarte_1965.jpg")
|
.originalFilename("postkarte_1965.jpg")
|
||||||
@@ -169,7 +160,6 @@ public class DataInitializer {
|
|||||||
.tags(Set.of(tagUrlaub))
|
.tags(Set.of(tagUrlaub))
|
||||||
.build());
|
.build());
|
||||||
|
|
||||||
// 4. Document with no sender — tests null-sender display ("Unbekannt")
|
|
||||||
docRepo.save(Document.builder()
|
docRepo.save(Document.builder()
|
||||||
.title("Unbekanntes Dokument")
|
.title("Unbekanntes Dokument")
|
||||||
.originalFilename("unbekannt.pdf")
|
.originalFilename("unbekannt.pdf")
|
||||||
@@ -179,7 +169,6 @@ public class DataInitializer {
|
|||||||
.receivers(Set.of(maria))
|
.receivers(Set.of(maria))
|
||||||
.build());
|
.build());
|
||||||
|
|
||||||
// 5. Document with minimal metadata — tests sparse display
|
|
||||||
docRepo.save(Document.builder()
|
docRepo.save(Document.builder()
|
||||||
.title("Scan ohne Titel")
|
.title("Scan ohne Titel")
|
||||||
.originalFilename("scan_ohne_titel.pdf")
|
.originalFilename("scan_ohne_titel.pdf")
|
||||||
|
|||||||
@@ -0,0 +1,69 @@
|
|||||||
|
package org.raddatz.familienarchiv.config;
|
||||||
|
|
||||||
|
import com.github.benmanes.caffeine.cache.Cache;
|
||||||
|
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||||
|
import jakarta.servlet.http.HttpServletRequest;
|
||||||
|
import jakarta.servlet.http.HttpServletResponse;
|
||||||
|
import org.springframework.http.HttpStatus;
|
||||||
|
import org.springframework.web.servlet.HandlerInterceptor;
|
||||||
|
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
|
public class RateLimitInterceptor implements HandlerInterceptor {
|
||||||
|
|
||||||
|
private static final int MAX_REQUESTS_PER_MINUTE = 10;
|
||||||
|
|
||||||
|
// Caffeine cache: per-IP counter that expires 1 minute after first access.
|
||||||
|
// Bounded to 10_000 entries to prevent OOM from IP exhaustion.
|
||||||
|
private final Cache<String, AtomicInteger> requestCounts = Caffeine.newBuilder()
|
||||||
|
.expireAfterAccess(1, TimeUnit.MINUTES)
|
||||||
|
.maximumSize(10_000)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler)
|
||||||
|
throws Exception {
|
||||||
|
String ip = resolveClientIp(request);
|
||||||
|
AtomicInteger count = requestCounts.get(ip, k -> new AtomicInteger(0));
|
||||||
|
if (count.incrementAndGet() > MAX_REQUESTS_PER_MINUTE) {
|
||||||
|
response.setStatus(HttpStatus.TOO_MANY_REQUESTS.value());
|
||||||
|
response.getWriter().write("{\"code\":\"RATE_LIMIT_EXCEEDED\",\"message\":\"Too many requests\"}");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private String resolveClientIp(HttpServletRequest request) {
|
||||||
|
// Only trust X-Forwarded-For when the direct connection comes from a known
|
||||||
|
// reverse proxy (loopback or Docker private network). Trusting it unconditionally
|
||||||
|
// allows any client to spoof a different IP and bypass per-IP rate limiting.
|
||||||
|
String remoteAddr = request.getRemoteAddr();
|
||||||
|
if (isTrustedProxy(remoteAddr)) {
|
||||||
|
String forwarded = request.getHeader("X-Forwarded-For");
|
||||||
|
if (forwarded != null && !forwarded.isBlank()) {
|
||||||
|
return forwarded.split(",")[0].trim();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return remoteAddr;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean isTrustedProxy(String ip) {
|
||||||
|
if (ip.equals("127.0.0.1") || ip.equals("::1") || ip.startsWith("10.") || ip.startsWith("192.168.")) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
// Only RFC 1918 172.16.0.0/12 (172.16–172.31), not all of 172.x
|
||||||
|
if (ip.startsWith("172.")) {
|
||||||
|
String[] parts = ip.split("\\.");
|
||||||
|
if (parts.length >= 2) {
|
||||||
|
try {
|
||||||
|
int second = Integer.parseInt(parts[1]);
|
||||||
|
return second >= 16 && second <= 31;
|
||||||
|
} catch (NumberFormatException ignored) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -50,6 +50,8 @@ public class SecurityConfig {
|
|||||||
auth.requestMatchers("/actuator/health").permitAll();
|
auth.requestMatchers("/actuator/health").permitAll();
|
||||||
// Password reset endpoints are unauthenticated by nature
|
// Password reset endpoints are unauthenticated by nature
|
||||||
auth.requestMatchers("/api/auth/forgot-password", "/api/auth/reset-password").permitAll();
|
auth.requestMatchers("/api/auth/forgot-password", "/api/auth/reset-password").permitAll();
|
||||||
|
// Invite-based registration endpoints are public
|
||||||
|
auth.requestMatchers("/api/auth/invite/**", "/api/auth/register").permitAll();
|
||||||
// E2E test helper (only active under "e2e" profile)
|
// E2E test helper (only active under "e2e" profile)
|
||||||
auth.requestMatchers("/api/auth/reset-token-for-test").permitAll();
|
auth.requestMatchers("/api/auth/reset-token-for-test").permitAll();
|
||||||
// In dev, allow unauthenticated access to the OpenAPI spec and Swagger UI
|
// In dev, allow unauthenticated access to the OpenAPI spec and Swagger UI
|
||||||
@@ -67,7 +69,7 @@ public class SecurityConfig {
|
|||||||
.frameOptions(frameOptions -> frameOptions.sameOrigin()))
|
.frameOptions(frameOptions -> frameOptions.sameOrigin()))
|
||||||
// Erlaubt Login via Browser-Popup oder REST-Header (Authorization: Basic ...)
|
// Erlaubt Login via Browser-Popup oder REST-Header (Authorization: Basic ...)
|
||||||
.httpBasic(Customizer.withDefaults())
|
.httpBasic(Customizer.withDefaults())
|
||||||
.formLogin(Customizer.withDefaults());
|
.formLogin(form -> form.usernameParameter("email"));
|
||||||
|
|
||||||
return http.build();
|
return http.build();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,15 @@
|
|||||||
|
package org.raddatz.familienarchiv.config;
|
||||||
|
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
|
||||||
|
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
public class WebConfig implements WebMvcConfigurer {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void addInterceptors(InterceptorRegistry registry) {
|
||||||
|
registry.addInterceptor(new RateLimitInterceptor())
|
||||||
|
.addPathPatterns("/api/auth/invite/**", "/api/auth/register");
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -72,7 +72,7 @@ public class AnnotationController {
|
|||||||
private UUID resolveUserId(Authentication authentication) {
|
private UUID resolveUserId(Authentication authentication) {
|
||||||
if (authentication == null || !authentication.isAuthenticated()) return null;
|
if (authentication == null || !authentication.isAuthenticated()) return null;
|
||||||
try {
|
try {
|
||||||
AppUser user = userService.findByUsername(authentication.getName());
|
AppUser user = userService.findByEmail(authentication.getName());
|
||||||
return user != null ? user.getId() : null;
|
return user != null ? user.getId() : null;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("Could not resolve user for annotation: {}", e.getMessage());
|
log.warn("Could not resolve user for annotation: {}", e.getMessage());
|
||||||
|
|||||||
@@ -1,14 +1,18 @@
|
|||||||
package org.raddatz.familienarchiv.controller;
|
package org.raddatz.familienarchiv.controller;
|
||||||
|
|
||||||
|
import jakarta.validation.Valid;
|
||||||
import org.raddatz.familienarchiv.dto.ForgotPasswordRequest;
|
import org.raddatz.familienarchiv.dto.ForgotPasswordRequest;
|
||||||
|
import org.raddatz.familienarchiv.dto.InvitePrefillDTO;
|
||||||
|
import org.raddatz.familienarchiv.dto.RegisterRequest;
|
||||||
import org.raddatz.familienarchiv.dto.ResetPasswordRequest;
|
import org.raddatz.familienarchiv.dto.ResetPasswordRequest;
|
||||||
|
import org.raddatz.familienarchiv.model.AppUser;
|
||||||
|
import org.raddatz.familienarchiv.model.InviteToken;
|
||||||
|
import org.raddatz.familienarchiv.service.InviteService;
|
||||||
import org.raddatz.familienarchiv.service.PasswordResetService;
|
import org.raddatz.familienarchiv.service.PasswordResetService;
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.http.HttpStatus;
|
||||||
import org.springframework.http.ResponseEntity;
|
import org.springframework.http.ResponseEntity;
|
||||||
import org.springframework.web.bind.annotation.PostMapping;
|
import org.springframework.web.bind.annotation.*;
|
||||||
import org.springframework.web.bind.annotation.RequestBody;
|
|
||||||
import org.springframework.web.bind.annotation.RequestMapping;
|
|
||||||
import org.springframework.web.bind.annotation.RestController;
|
|
||||||
|
|
||||||
import lombok.RequiredArgsConstructor;
|
import lombok.RequiredArgsConstructor;
|
||||||
|
|
||||||
@@ -18,6 +22,7 @@ import lombok.RequiredArgsConstructor;
|
|||||||
public class AuthController {
|
public class AuthController {
|
||||||
|
|
||||||
private final PasswordResetService passwordResetService;
|
private final PasswordResetService passwordResetService;
|
||||||
|
private final InviteService inviteService;
|
||||||
|
|
||||||
@Value("${app.base-url:http://localhost:3000}")
|
@Value("${app.base-url:http://localhost:3000}")
|
||||||
private String appBaseUrl;
|
private String appBaseUrl;
|
||||||
@@ -34,4 +39,20 @@ public class AuthController {
|
|||||||
passwordResetService.resetPassword(request);
|
passwordResetService.resetPassword(request);
|
||||||
return ResponseEntity.noContent().build();
|
return ResponseEntity.noContent().build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@GetMapping("/invite/{code}")
|
||||||
|
public InvitePrefillDTO getInvitePrefill(@PathVariable String code) {
|
||||||
|
InviteToken token = inviteService.validateCode(code);
|
||||||
|
return new InvitePrefillDTO(
|
||||||
|
token.getPrefillFirstName(),
|
||||||
|
token.getPrefillLastName(),
|
||||||
|
token.getPrefillEmail()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping("/register")
|
||||||
|
public ResponseEntity<AppUser> register(@Valid @RequestBody RegisterRequest request) {
|
||||||
|
AppUser user = inviteService.redeemInvite(request);
|
||||||
|
return ResponseEntity.status(HttpStatus.CREATED).body(user);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -144,7 +144,7 @@ public class CommentController {
|
|||||||
private AppUser resolveUser(Authentication authentication) {
|
private AppUser resolveUser(Authentication authentication) {
|
||||||
if (authentication == null || !authentication.isAuthenticated()) return null;
|
if (authentication == null || !authentication.isAuthenticated()) return null;
|
||||||
try {
|
try {
|
||||||
return userService.findByUsername(authentication.getName());
|
return userService.findByEmail(authentication.getName());
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("Could not resolve user for comment: {}", e.getMessage());
|
log.warn("Could not resolve user for comment: {}", e.getMessage());
|
||||||
return null;
|
return null;
|
||||||
|
|||||||
@@ -15,8 +15,8 @@ import io.swagger.v3.oas.annotations.Parameter;
|
|||||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||||
import org.raddatz.familienarchiv.dto.DocumentSearchResult;
|
import org.raddatz.familienarchiv.dto.DocumentSearchResult;
|
||||||
import org.raddatz.familienarchiv.dto.DocumentUpdateDTO;
|
import org.raddatz.familienarchiv.dto.DocumentUpdateDTO;
|
||||||
|
import org.raddatz.familienarchiv.dto.TagOperator;
|
||||||
import org.raddatz.familienarchiv.dto.DocumentVersionSummary;
|
import org.raddatz.familienarchiv.dto.DocumentVersionSummary;
|
||||||
import org.raddatz.familienarchiv.dto.IncompleteDocumentDTO;
|
|
||||||
import org.raddatz.familienarchiv.exception.DomainException;
|
import org.raddatz.familienarchiv.exception.DomainException;
|
||||||
import org.raddatz.familienarchiv.exception.ErrorCode;
|
import org.raddatz.familienarchiv.exception.ErrorCode;
|
||||||
import org.raddatz.familienarchiv.model.Document;
|
import org.raddatz.familienarchiv.model.Document;
|
||||||
@@ -24,12 +24,16 @@ import org.raddatz.familienarchiv.dto.DocumentSort;
|
|||||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||||
import org.raddatz.familienarchiv.model.TrainingLabel;
|
import org.raddatz.familienarchiv.model.TrainingLabel;
|
||||||
import org.raddatz.familienarchiv.model.DocumentVersion;
|
import org.raddatz.familienarchiv.model.DocumentVersion;
|
||||||
|
import org.raddatz.familienarchiv.model.AppUser;
|
||||||
import org.raddatz.familienarchiv.security.Permission;
|
import org.raddatz.familienarchiv.security.Permission;
|
||||||
import org.raddatz.familienarchiv.security.RequirePermission;
|
import org.raddatz.familienarchiv.security.RequirePermission;
|
||||||
|
import org.raddatz.familienarchiv.security.SecurityUtils;
|
||||||
import org.raddatz.familienarchiv.service.DocumentService;
|
import org.raddatz.familienarchiv.service.DocumentService;
|
||||||
import org.raddatz.familienarchiv.service.DocumentVersionService;
|
import org.raddatz.familienarchiv.service.DocumentVersionService;
|
||||||
import org.raddatz.familienarchiv.service.FileService;
|
import org.raddatz.familienarchiv.service.FileService;
|
||||||
|
import org.raddatz.familienarchiv.service.UserService;
|
||||||
import org.springframework.data.domain.Sort;
|
import org.springframework.data.domain.Sort;
|
||||||
|
import org.springframework.security.core.Authentication;
|
||||||
import org.springframework.http.HttpHeaders;
|
import org.springframework.http.HttpHeaders;
|
||||||
import org.springframework.http.MediaType;
|
import org.springframework.http.MediaType;
|
||||||
import org.springframework.http.ResponseEntity;
|
import org.springframework.http.ResponseEntity;
|
||||||
@@ -62,6 +66,7 @@ public class DocumentController {
|
|||||||
private final DocumentService documentService;
|
private final DocumentService documentService;
|
||||||
private final DocumentVersionService documentVersionService;
|
private final DocumentVersionService documentVersionService;
|
||||||
private final FileService fileService;
|
private final FileService fileService;
|
||||||
|
private final UserService userService;
|
||||||
|
|
||||||
// --- DOWNLOAD ---
|
// --- DOWNLOAD ---
|
||||||
@GetMapping("/{id}/file")
|
@GetMapping("/{id}/file")
|
||||||
@@ -111,9 +116,10 @@ public class DocumentController {
|
|||||||
public Document updateDocument(
|
public Document updateDocument(
|
||||||
@PathVariable UUID id,
|
@PathVariable UUID id,
|
||||||
@ModelAttribute DocumentUpdateDTO dto,
|
@ModelAttribute DocumentUpdateDTO dto,
|
||||||
@RequestPart(value = "file", required = false) MultipartFile file) {
|
@RequestPart(value = "file", required = false) MultipartFile file,
|
||||||
|
Authentication authentication) {
|
||||||
try {
|
try {
|
||||||
return documentService.updateDocument(id, dto, file);
|
return documentService.updateDocument(id, dto, file, requireUserId(authentication));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw DomainException.internal(ErrorCode.FILE_UPLOAD_FAILED, "Failed to upload file: " + e.getMessage());
|
throw DomainException.internal(ErrorCode.FILE_UPLOAD_FAILED, "Failed to upload file: " + e.getMessage());
|
||||||
}
|
}
|
||||||
@@ -128,18 +134,34 @@ public class DocumentController {
|
|||||||
return ResponseEntity.noContent().build();
|
return ResponseEntity.noContent().build();
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- QUICK UPLOAD ---
|
// --- ATTACH FILE ---
|
||||||
|
|
||||||
private static final Set<String> ALLOWED_CONTENT_TYPES = Set.of(
|
private static final Set<String> ALLOWED_CONTENT_TYPES = Set.of(
|
||||||
"application/pdf", "image/jpeg", "image/png", "image/tiff");
|
"application/pdf", "image/jpeg", "image/png", "image/tiff");
|
||||||
|
|
||||||
|
@PostMapping(value = "/{id}/file", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
|
||||||
|
@RequirePermission(Permission.WRITE_ALL)
|
||||||
|
public Document attachFile(
|
||||||
|
@PathVariable UUID id,
|
||||||
|
@RequestPart("file") MultipartFile file,
|
||||||
|
Authentication authentication) {
|
||||||
|
String contentType = file.getContentType();
|
||||||
|
if (contentType == null || !ALLOWED_CONTENT_TYPES.contains(contentType)) {
|
||||||
|
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "Unsupported file type: " + contentType);
|
||||||
|
}
|
||||||
|
return documentService.attachFile(id, file, requireUserId(authentication));
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- QUICK UPLOAD ---
|
||||||
|
|
||||||
public record UploadError(String filename, String code) {}
|
public record UploadError(String filename, String code) {}
|
||||||
public record QuickUploadResult(List<Document> created, List<Document> updated, List<UploadError> errors) {}
|
public record QuickUploadResult(List<Document> created, List<Document> updated, List<UploadError> errors) {}
|
||||||
|
|
||||||
@PostMapping(value = "/quick-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
|
@PostMapping(value = "/quick-upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
|
||||||
@RequirePermission(Permission.WRITE_ALL)
|
@RequirePermission(Permission.WRITE_ALL)
|
||||||
public QuickUploadResult quickUpload(
|
public QuickUploadResult quickUpload(
|
||||||
@RequestPart(value = "files", required = false) List<MultipartFile> files) {
|
@RequestPart(value = "files", required = false) List<MultipartFile> files,
|
||||||
|
Authentication authentication) {
|
||||||
List<Document> created = new ArrayList<>();
|
List<Document> created = new ArrayList<>();
|
||||||
List<Document> updated = new ArrayList<>();
|
List<Document> updated = new ArrayList<>();
|
||||||
List<UploadError> errors = new ArrayList<>();
|
List<UploadError> errors = new ArrayList<>();
|
||||||
@@ -148,13 +170,14 @@ public class DocumentController {
|
|||||||
return new QuickUploadResult(created, updated, errors);
|
return new QuickUploadResult(created, updated, errors);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
UUID actorId = requireUserId(authentication);
|
||||||
for (MultipartFile file : files) {
|
for (MultipartFile file : files) {
|
||||||
if (!ALLOWED_CONTENT_TYPES.contains(file.getContentType())) {
|
if (!ALLOWED_CONTENT_TYPES.contains(file.getContentType())) {
|
||||||
errors.add(new UploadError(file.getOriginalFilename(), "UNSUPPORTED_FILE_TYPE"));
|
errors.add(new UploadError(file.getOriginalFilename(), "UNSUPPORTED_FILE_TYPE"));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
DocumentService.StoreResult result = documentService.storeDocument(file);
|
DocumentService.StoreResult result = documentService.storeDocument(file, actorId);
|
||||||
if (result.isNew()) {
|
if (result.isNew()) {
|
||||||
created.add(result.document());
|
created.add(result.document());
|
||||||
} else {
|
} else {
|
||||||
@@ -174,12 +197,6 @@ public class DocumentController {
|
|||||||
return Map.of("count", documentService.getIncompleteCount());
|
return Map.of("count", documentService.getIncompleteCount());
|
||||||
}
|
}
|
||||||
|
|
||||||
@GetMapping("/incomplete")
|
|
||||||
public List<IncompleteDocumentDTO> getIncomplete(
|
|
||||||
@Parameter(description = "Maximum number of results") @RequestParam(defaultValue = "10") int size) {
|
|
||||||
return documentService.findIncompleteDocuments(size);
|
|
||||||
}
|
|
||||||
|
|
||||||
@GetMapping("/incomplete/next")
|
@GetMapping("/incomplete/next")
|
||||||
public ResponseEntity<Document> getNextIncomplete(@RequestParam UUID excludeId) {
|
public ResponseEntity<Document> getNextIncomplete(@RequestParam UUID excludeId) {
|
||||||
return documentService.findNextIncompleteDocument(excludeId)
|
return documentService.findNextIncompleteDocument(excludeId)
|
||||||
@@ -187,12 +204,6 @@ public class DocumentController {
|
|||||||
.orElse(ResponseEntity.noContent().build());
|
.orElse(ResponseEntity.noContent().build());
|
||||||
}
|
}
|
||||||
|
|
||||||
@GetMapping("/recent-activity")
|
|
||||||
public ResponseEntity<List<Document>> getRecentActivity(
|
|
||||||
@RequestParam(defaultValue = "5") int size) {
|
|
||||||
return ResponseEntity.ok(documentService.getRecentActivity(size));
|
|
||||||
}
|
|
||||||
|
|
||||||
@GetMapping("/search")
|
@GetMapping("/search")
|
||||||
public ResponseEntity<DocumentSearchResult> search(
|
public ResponseEntity<DocumentSearchResult> search(
|
||||||
@RequestParam(required = false) String q,
|
@RequestParam(required = false) String q,
|
||||||
@@ -204,12 +215,15 @@ public class DocumentController {
|
|||||||
@RequestParam(required = false) String tagQ,
|
@RequestParam(required = false) String tagQ,
|
||||||
@Parameter(description = "Filter by document status") @RequestParam(required = false) DocumentStatus status,
|
@Parameter(description = "Filter by document status") @RequestParam(required = false) DocumentStatus status,
|
||||||
@Parameter(description = "Sort field") @RequestParam(required = false) DocumentSort sort,
|
@Parameter(description = "Sort field") @RequestParam(required = false) DocumentSort sort,
|
||||||
@Parameter(description = "Sort direction: ASC or DESC") @RequestParam(required = false, defaultValue = "DESC") String dir) {
|
@Parameter(description = "Sort direction: ASC or DESC") @RequestParam(required = false, defaultValue = "DESC") String dir,
|
||||||
|
@Parameter(description = "Tag operator: AND (default) or OR") @RequestParam(required = false) String tagOp) {
|
||||||
if (!"ASC".equalsIgnoreCase(dir) && !"DESC".equalsIgnoreCase(dir)) {
|
if (!"ASC".equalsIgnoreCase(dir) && !"DESC".equalsIgnoreCase(dir)) {
|
||||||
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "dir must be ASC or DESC");
|
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "dir must be ASC or DESC");
|
||||||
}
|
}
|
||||||
List<Document> results = documentService.searchDocuments(q, from, to, senderId, receiverId, tags, tagQ, status, sort, dir);
|
// tagOp is a raw String at the HTTP boundary; any value other than "OR" (case-insensitive)
|
||||||
return ResponseEntity.ok(DocumentSearchResult.of(results));
|
// defaults to AND, which matches the frontend default and keeps old clients working.
|
||||||
|
TagOperator operator = "OR".equalsIgnoreCase(tagOp) ? TagOperator.OR : TagOperator.AND;
|
||||||
|
return ResponseEntity.ok(documentService.searchDocuments(q, from, to, senderId, receiverId, tags, tagQ, status, sort, dir, operator));
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- TRAINING LABELS ---
|
// --- TRAINING LABELS ---
|
||||||
@@ -258,4 +272,8 @@ public class DocumentController {
|
|||||||
Sort sort = Sort.by(Sort.Direction.fromString(dir.toUpperCase()), "documentDate");
|
Sort sort = Sort.by(Sort.Direction.fromString(dir.toUpperCase()), "documentDate");
|
||||||
return documentService.getConversationFiltered(senderId, receiverId, from, to, sort);
|
return documentService.getConversationFiltered(senderId, receiverId, from, to, sort);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private UUID requireUserId(Authentication authentication) {
|
||||||
|
return SecurityUtils.requireUserId(authentication, userService);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,57 @@
|
|||||||
|
package org.raddatz.familienarchiv.controller;
|
||||||
|
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import org.raddatz.familienarchiv.dto.CreateInviteRequest;
|
||||||
|
import org.raddatz.familienarchiv.dto.InviteListItemDTO;
|
||||||
|
import org.raddatz.familienarchiv.model.AppUser;
|
||||||
|
import org.raddatz.familienarchiv.security.Permission;
|
||||||
|
import org.raddatz.familienarchiv.security.RequirePermission;
|
||||||
|
import org.raddatz.familienarchiv.service.InviteService;
|
||||||
|
import org.raddatz.familienarchiv.service.UserService;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.http.HttpStatus;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.security.core.annotation.AuthenticationPrincipal;
|
||||||
|
import org.springframework.security.core.userdetails.UserDetails;
|
||||||
|
import org.springframework.web.bind.annotation.*;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/invites")
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class InviteController {
|
||||||
|
|
||||||
|
private final InviteService inviteService;
|
||||||
|
private final UserService userService;
|
||||||
|
|
||||||
|
@Value("${app.base-url:http://localhost:3000}")
|
||||||
|
private String appBaseUrl;
|
||||||
|
|
||||||
|
@GetMapping
|
||||||
|
@RequirePermission(Permission.ADMIN_USER)
|
||||||
|
public List<InviteListItemDTO> listInvites(
|
||||||
|
@RequestParam(value = "status", defaultValue = "active") String status) {
|
||||||
|
boolean activeOnly = !"all".equalsIgnoreCase(status);
|
||||||
|
return inviteService.listInvites(activeOnly, appBaseUrl);
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping
|
||||||
|
@RequirePermission(Permission.ADMIN_USER)
|
||||||
|
public ResponseEntity<InviteListItemDTO> createInvite(
|
||||||
|
@RequestBody CreateInviteRequest request,
|
||||||
|
@AuthenticationPrincipal UserDetails principal) {
|
||||||
|
AppUser creator = userService.findByEmail(principal.getUsername());
|
||||||
|
InviteListItemDTO created = inviteService.toListItemDTO(
|
||||||
|
inviteService.createInvite(request, creator), appBaseUrl);
|
||||||
|
return ResponseEntity.status(HttpStatus.CREATED).body(created);
|
||||||
|
}
|
||||||
|
|
||||||
|
@DeleteMapping("/{id}")
|
||||||
|
@RequirePermission(Permission.ADMIN_USER)
|
||||||
|
public ResponseEntity<Void> revokeInvite(@PathVariable UUID id) {
|
||||||
|
inviteService.revokeInvite(id);
|
||||||
|
return ResponseEntity.noContent().build();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -100,6 +100,6 @@ public class NotificationController {
|
|||||||
// ─── private helpers ──────────────────────────────────────────────────────
|
// ─── private helpers ──────────────────────────────────────────────────────
|
||||||
|
|
||||||
private AppUser resolveUser(Authentication authentication) {
|
private AppUser resolveUser(Authentication authentication) {
|
||||||
return userService.findByUsername(authentication.getName());
|
return userService.findByEmail(authentication.getName());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,10 @@ import lombok.RequiredArgsConstructor;
|
|||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.raddatz.familienarchiv.dto.BatchOcrDTO;
|
import org.raddatz.familienarchiv.dto.BatchOcrDTO;
|
||||||
import org.raddatz.familienarchiv.dto.OcrStatusDTO;
|
import org.raddatz.familienarchiv.dto.OcrStatusDTO;
|
||||||
|
import org.raddatz.familienarchiv.dto.TrainingHistoryResponse;
|
||||||
|
import org.raddatz.familienarchiv.dto.TrainingInfoResponse;
|
||||||
import org.raddatz.familienarchiv.dto.TriggerOcrDTO;
|
import org.raddatz.familienarchiv.dto.TriggerOcrDTO;
|
||||||
|
import org.raddatz.familienarchiv.dto.TriggerSenderTrainingDTO;
|
||||||
import org.raddatz.familienarchiv.model.AppUser;
|
import org.raddatz.familienarchiv.model.AppUser;
|
||||||
import org.raddatz.familienarchiv.model.OcrJob;
|
import org.raddatz.familienarchiv.model.OcrJob;
|
||||||
import org.raddatz.familienarchiv.model.OcrTrainingRun;
|
import org.raddatz.familienarchiv.model.OcrTrainingRun;
|
||||||
@@ -15,6 +18,7 @@ import org.raddatz.familienarchiv.service.OcrProgressService;
|
|||||||
import org.raddatz.familienarchiv.service.OcrService;
|
import org.raddatz.familienarchiv.service.OcrService;
|
||||||
import org.raddatz.familienarchiv.service.OcrTrainingService;
|
import org.raddatz.familienarchiv.service.OcrTrainingService;
|
||||||
import org.raddatz.familienarchiv.service.SegmentationTrainingExportService;
|
import org.raddatz.familienarchiv.service.SegmentationTrainingExportService;
|
||||||
|
import org.raddatz.familienarchiv.service.SenderModelService;
|
||||||
import org.raddatz.familienarchiv.service.TrainingDataExportService;
|
import org.raddatz.familienarchiv.service.TrainingDataExportService;
|
||||||
import org.raddatz.familienarchiv.service.UserService;
|
import org.raddatz.familienarchiv.service.UserService;
|
||||||
import org.springframework.http.HttpHeaders;
|
import org.springframework.http.HttpHeaders;
|
||||||
@@ -42,6 +46,7 @@ public class OcrController {
|
|||||||
private final TrainingDataExportService trainingDataExportService;
|
private final TrainingDataExportService trainingDataExportService;
|
||||||
private final SegmentationTrainingExportService segmentationTrainingExportService;
|
private final SegmentationTrainingExportService segmentationTrainingExportService;
|
||||||
private final OcrTrainingService ocrTrainingService;
|
private final OcrTrainingService ocrTrainingService;
|
||||||
|
private final SenderModelService senderModelService;
|
||||||
|
|
||||||
@PostMapping("/api/documents/{documentId}/ocr")
|
@PostMapping("/api/documents/{documentId}/ocr")
|
||||||
@ResponseStatus(HttpStatus.ACCEPTED)
|
@ResponseStatus(HttpStatus.ACCEPTED)
|
||||||
@@ -130,14 +135,33 @@ public class OcrController {
|
|||||||
|
|
||||||
@GetMapping("/api/ocr/training-info")
|
@GetMapping("/api/ocr/training-info")
|
||||||
@RequirePermission(Permission.ADMIN)
|
@RequirePermission(Permission.ADMIN)
|
||||||
public OcrTrainingService.TrainingInfoResponse getTrainingInfo() {
|
public TrainingInfoResponse getTrainingInfo() {
|
||||||
return ocrTrainingService.getTrainingInfo();
|
return ocrTrainingService.getTrainingInfo();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@GetMapping("/api/ocr/training-info/global")
|
||||||
|
@RequirePermission(Permission.ADMIN)
|
||||||
|
public TrainingHistoryResponse getGlobalTrainingHistory() {
|
||||||
|
return ocrTrainingService.getGlobalTrainingHistory();
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/api/ocr/training-info/{personId}")
|
||||||
|
@RequirePermission(Permission.ADMIN)
|
||||||
|
public TrainingHistoryResponse getSenderTrainingHistory(@PathVariable UUID personId) {
|
||||||
|
return ocrTrainingService.getSenderTrainingHistory(personId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping("/api/ocr/train-sender")
|
||||||
|
@ResponseStatus(HttpStatus.ACCEPTED)
|
||||||
|
@RequirePermission(Permission.ADMIN)
|
||||||
|
public OcrTrainingRun triggerSenderTraining(@Valid @RequestBody TriggerSenderTrainingDTO dto) {
|
||||||
|
return senderModelService.triggerManualSenderTraining(dto.personId());
|
||||||
|
}
|
||||||
|
|
||||||
private UUID resolveUserId(Authentication authentication) {
|
private UUID resolveUserId(Authentication authentication) {
|
||||||
if (authentication == null || !authentication.isAuthenticated()) return null;
|
if (authentication == null || !authentication.isAuthenticated()) return null;
|
||||||
try {
|
try {
|
||||||
AppUser user = userService.findByUsername(authentication.getName());
|
AppUser user = userService.findByEmail(authentication.getName());
|
||||||
return user != null ? user.getId() : null;
|
return user != null ? user.getId() : null;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("Failed to resolve user ID for authentication: {}", authentication.getName(), e);
|
log.warn("Failed to resolve user ID for authentication: {}", authentication.getName(), e);
|
||||||
|
|||||||
@@ -1,23 +1,29 @@
|
|||||||
package org.raddatz.familienarchiv.controller;
|
package org.raddatz.familienarchiv.controller;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import org.raddatz.familienarchiv.dto.MergeTagDTO;
|
||||||
|
import org.raddatz.familienarchiv.dto.TagTreeNodeDTO;
|
||||||
|
import org.raddatz.familienarchiv.dto.TagUpdateDTO;
|
||||||
import org.raddatz.familienarchiv.model.Tag;
|
import org.raddatz.familienarchiv.model.Tag;
|
||||||
import org.raddatz.familienarchiv.security.Permission;
|
import org.raddatz.familienarchiv.security.Permission;
|
||||||
import org.raddatz.familienarchiv.security.RequirePermission;
|
import org.raddatz.familienarchiv.security.RequirePermission;
|
||||||
import org.raddatz.familienarchiv.service.DocumentService;
|
import org.raddatz.familienarchiv.service.DocumentService;
|
||||||
import org.raddatz.familienarchiv.service.TagService;
|
import org.raddatz.familienarchiv.service.TagService;
|
||||||
|
import org.springframework.http.HttpStatus;
|
||||||
import org.springframework.http.ResponseEntity;
|
import org.springframework.http.ResponseEntity;
|
||||||
import org.springframework.web.bind.annotation.DeleteMapping;
|
import org.springframework.web.bind.annotation.DeleteMapping;
|
||||||
import org.springframework.web.bind.annotation.GetMapping;
|
import org.springframework.web.bind.annotation.GetMapping;
|
||||||
import org.springframework.web.bind.annotation.PathVariable;
|
import org.springframework.web.bind.annotation.PathVariable;
|
||||||
|
import org.springframework.web.bind.annotation.PostMapping;
|
||||||
import org.springframework.web.bind.annotation.PutMapping;
|
import org.springframework.web.bind.annotation.PutMapping;
|
||||||
import org.springframework.web.bind.annotation.RequestBody;
|
import org.springframework.web.bind.annotation.RequestBody;
|
||||||
import org.springframework.web.bind.annotation.RequestMapping;
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
import org.springframework.web.bind.annotation.RequestParam;
|
import org.springframework.web.bind.annotation.RequestParam;
|
||||||
|
import org.springframework.web.bind.annotation.ResponseStatus;
|
||||||
import org.springframework.web.bind.annotation.RestController;
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
|
import jakarta.validation.Valid;
|
||||||
|
|
||||||
import lombok.RequiredArgsConstructor;
|
import lombok.RequiredArgsConstructor;
|
||||||
|
|
||||||
@@ -31,8 +37,8 @@ public class TagController {
|
|||||||
|
|
||||||
@PutMapping("/{id}")
|
@PutMapping("/{id}")
|
||||||
@RequirePermission(Permission.ADMIN_TAG)
|
@RequirePermission(Permission.ADMIN_TAG)
|
||||||
public ResponseEntity<Tag> updateTag(@PathVariable UUID id, @RequestBody Map<String, String> payload) {
|
public ResponseEntity<Tag> updateTag(@PathVariable UUID id, @RequestBody TagUpdateDTO dto) {
|
||||||
return ResponseEntity.ok(tagService.update(id, payload.get("name")));
|
return ResponseEntity.ok(tagService.update(id, dto));
|
||||||
}
|
}
|
||||||
|
|
||||||
@DeleteMapping("/{id}")
|
@DeleteMapping("/{id}")
|
||||||
@@ -46,4 +52,22 @@ public class TagController {
|
|||||||
public List<Tag> searchTags(@RequestParam(defaultValue = "") String query) {
|
public List<Tag> searchTags(@RequestParam(defaultValue = "") String query) {
|
||||||
return tagService.search(query);
|
return tagService.search(query);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@GetMapping("/tree")
|
||||||
|
public List<TagTreeNodeDTO> getTagTree() {
|
||||||
|
return tagService.getTagTree();
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping("/{id}/merge")
|
||||||
|
@RequirePermission(Permission.ADMIN_TAG)
|
||||||
|
public ResponseEntity<Tag> mergeTag(@PathVariable UUID id, @Valid @RequestBody MergeTagDTO dto) {
|
||||||
|
return ResponseEntity.ok(tagService.mergeTags(id, dto.targetId()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@DeleteMapping("/{id}/subtree")
|
||||||
|
@ResponseStatus(HttpStatus.NO_CONTENT)
|
||||||
|
@RequirePermission(Permission.ADMIN_TAG)
|
||||||
|
public void deleteSubtree(@PathVariable UUID id) {
|
||||||
|
tagService.deleteWithDescendants(id);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,12 +5,11 @@ import lombok.extern.slf4j.Slf4j;
|
|||||||
import org.raddatz.familienarchiv.dto.CreateTranscriptionBlockDTO;
|
import org.raddatz.familienarchiv.dto.CreateTranscriptionBlockDTO;
|
||||||
import org.raddatz.familienarchiv.dto.ReorderTranscriptionBlocksDTO;
|
import org.raddatz.familienarchiv.dto.ReorderTranscriptionBlocksDTO;
|
||||||
import org.raddatz.familienarchiv.dto.UpdateTranscriptionBlockDTO;
|
import org.raddatz.familienarchiv.dto.UpdateTranscriptionBlockDTO;
|
||||||
import org.raddatz.familienarchiv.exception.DomainException;
|
|
||||||
import org.raddatz.familienarchiv.model.AppUser;
|
|
||||||
import org.raddatz.familienarchiv.model.TranscriptionBlock;
|
import org.raddatz.familienarchiv.model.TranscriptionBlock;
|
||||||
import org.raddatz.familienarchiv.model.TranscriptionBlockVersion;
|
import org.raddatz.familienarchiv.model.TranscriptionBlockVersion;
|
||||||
import org.raddatz.familienarchiv.security.Permission;
|
import org.raddatz.familienarchiv.security.Permission;
|
||||||
import org.raddatz.familienarchiv.security.RequirePermission;
|
import org.raddatz.familienarchiv.security.RequirePermission;
|
||||||
|
import org.raddatz.familienarchiv.security.SecurityUtils;
|
||||||
import org.raddatz.familienarchiv.service.TranscriptionService;
|
import org.raddatz.familienarchiv.service.TranscriptionService;
|
||||||
import org.raddatz.familienarchiv.service.UserService;
|
import org.raddatz.familienarchiv.service.UserService;
|
||||||
import org.springframework.http.HttpStatus;
|
import org.springframework.http.HttpStatus;
|
||||||
@@ -85,8 +84,10 @@ public class TranscriptionBlockController {
|
|||||||
@RequirePermission(Permission.WRITE_ALL)
|
@RequirePermission(Permission.WRITE_ALL)
|
||||||
public TranscriptionBlock reviewBlock(
|
public TranscriptionBlock reviewBlock(
|
||||||
@PathVariable UUID documentId,
|
@PathVariable UUID documentId,
|
||||||
@PathVariable UUID blockId) {
|
@PathVariable UUID blockId,
|
||||||
return transcriptionService.reviewBlock(documentId, blockId);
|
Authentication authentication) {
|
||||||
|
UUID userId = requireUserId(authentication);
|
||||||
|
return transcriptionService.reviewBlock(documentId, blockId, userId);
|
||||||
}
|
}
|
||||||
|
|
||||||
@GetMapping("/{blockId}/history")
|
@GetMapping("/{blockId}/history")
|
||||||
@@ -98,13 +99,6 @@ public class TranscriptionBlockController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private UUID requireUserId(Authentication authentication) {
|
private UUID requireUserId(Authentication authentication) {
|
||||||
if (authentication == null || !authentication.isAuthenticated()) {
|
return SecurityUtils.requireUserId(authentication, userService);
|
||||||
throw DomainException.unauthorized("Authentication required");
|
|
||||||
}
|
|
||||||
AppUser user = userService.findByUsername(authentication.getName());
|
|
||||||
if (user == null) {
|
|
||||||
throw DomainException.unauthorized("User not found");
|
|
||||||
}
|
|
||||||
return user.getId();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,47 @@
|
|||||||
|
package org.raddatz.familienarchiv.controller;
|
||||||
|
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import org.raddatz.familienarchiv.dto.TranscriptionQueueItemDTO;
|
||||||
|
import org.raddatz.familienarchiv.dto.TranscriptionWeeklyStatsDTO;
|
||||||
|
import org.raddatz.familienarchiv.security.Permission;
|
||||||
|
import org.raddatz.familienarchiv.security.RequirePermission;
|
||||||
|
import org.raddatz.familienarchiv.service.TranscriptionQueueService;
|
||||||
|
import org.springframework.http.ResponseEntity;
|
||||||
|
import org.springframework.web.bind.annotation.GetMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Serves the three Mission Control Strip columns for the dashboard.
|
||||||
|
* All endpoints require READ_ALL — same guard as the rest of the archive.
|
||||||
|
*/
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/transcription")
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@RequirePermission(Permission.READ_ALL)
|
||||||
|
public class TranscriptionQueueController {
|
||||||
|
|
||||||
|
private final TranscriptionQueueService transcriptionQueueService;
|
||||||
|
|
||||||
|
@GetMapping("/segmentation-queue")
|
||||||
|
public ResponseEntity<List<TranscriptionQueueItemDTO>> getSegmentationQueue() {
|
||||||
|
return ResponseEntity.ok(transcriptionQueueService.getSegmentationQueue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/transcription-queue")
|
||||||
|
public ResponseEntity<List<TranscriptionQueueItemDTO>> getTranscriptionQueue() {
|
||||||
|
return ResponseEntity.ok(transcriptionQueueService.getTranscriptionQueue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/ready-to-read")
|
||||||
|
public ResponseEntity<List<TranscriptionQueueItemDTO>> getReadyToRead() {
|
||||||
|
return ResponseEntity.ok(transcriptionQueueService.getReadyToReadQueue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/weekly-stats")
|
||||||
|
public ResponseEntity<TranscriptionWeeklyStatsDTO> getWeeklyStats() {
|
||||||
|
return ResponseEntity.ok(transcriptionQueueService.getWeeklyStats());
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,6 +4,7 @@ import java.util.List;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import jakarta.validation.Valid;
|
||||||
import org.raddatz.familienarchiv.dto.AdminUpdateUserRequest;
|
import org.raddatz.familienarchiv.dto.AdminUpdateUserRequest;
|
||||||
import org.raddatz.familienarchiv.dto.ChangePasswordDTO;
|
import org.raddatz.familienarchiv.dto.ChangePasswordDTO;
|
||||||
import org.raddatz.familienarchiv.dto.CreateUserRequest;
|
import org.raddatz.familienarchiv.dto.CreateUserRequest;
|
||||||
@@ -38,7 +39,7 @@ public class UserController {
|
|||||||
if (authentication == null || !authentication.isAuthenticated()) {
|
if (authentication == null || !authentication.isAuthenticated()) {
|
||||||
return ResponseEntity.status(HttpStatus.UNAUTHORIZED).build();
|
return ResponseEntity.status(HttpStatus.UNAUTHORIZED).build();
|
||||||
}
|
}
|
||||||
AppUser user = userService.findByUsername(authentication.getName());
|
AppUser user = userService.findByEmail(authentication.getName());
|
||||||
user.setPassword(null);
|
user.setPassword(null);
|
||||||
return ResponseEntity.ok(user);
|
return ResponseEntity.ok(user);
|
||||||
}
|
}
|
||||||
@@ -46,7 +47,7 @@ public class UserController {
|
|||||||
@PutMapping("users/me")
|
@PutMapping("users/me")
|
||||||
public ResponseEntity<AppUser> updateProfile(Authentication authentication,
|
public ResponseEntity<AppUser> updateProfile(Authentication authentication,
|
||||||
@RequestBody UpdateProfileDTO dto) {
|
@RequestBody UpdateProfileDTO dto) {
|
||||||
AppUser current = userService.findByUsername(authentication.getName());
|
AppUser current = userService.findByEmail(authentication.getName());
|
||||||
AppUser updated = userService.updateProfile(current.getId(), dto);
|
AppUser updated = userService.updateProfile(current.getId(), dto);
|
||||||
updated.setPassword(null);
|
updated.setPassword(null);
|
||||||
return ResponseEntity.ok(updated);
|
return ResponseEntity.ok(updated);
|
||||||
@@ -56,7 +57,7 @@ public class UserController {
|
|||||||
@ResponseStatus(HttpStatus.NO_CONTENT)
|
@ResponseStatus(HttpStatus.NO_CONTENT)
|
||||||
public void changePassword(Authentication authentication,
|
public void changePassword(Authentication authentication,
|
||||||
@RequestBody ChangePasswordDTO dto) {
|
@RequestBody ChangePasswordDTO dto) {
|
||||||
AppUser current = userService.findByUsername(authentication.getName());
|
AppUser current = userService.findByEmail(authentication.getName());
|
||||||
userService.changePassword(current.getId(), dto);
|
userService.changePassword(current.getId(), dto);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -77,7 +78,7 @@ public class UserController {
|
|||||||
|
|
||||||
@PostMapping("/users")
|
@PostMapping("/users")
|
||||||
@RequirePermission(Permission.ADMIN_USER)
|
@RequirePermission(Permission.ADMIN_USER)
|
||||||
public ResponseEntity<AppUser> createUser(@RequestBody CreateUserRequest request) {
|
public ResponseEntity<AppUser> createUser(@Valid @RequestBody CreateUserRequest request) {
|
||||||
return ResponseEntity.ok(userService.createUserOrUpdate(request));
|
return ResponseEntity.ok(userService.createUserOrUpdate(request));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,18 @@
|
|||||||
|
package org.raddatz.familienarchiv.dashboard;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import jakarta.annotation.Nullable;
|
||||||
|
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
|
||||||
|
import org.raddatz.familienarchiv.audit.AuditKind;
|
||||||
|
|
||||||
|
import java.time.OffsetDateTime;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
public record ActivityFeedItemDTO(
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) AuditKind kind,
|
||||||
|
@Nullable ActivityActorDTO actor,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID documentId,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String documentTitle,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) OffsetDateTime happenedAt,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) boolean youMentioned
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,42 @@
|
|||||||
|
package org.raddatz.familienarchiv.dashboard;
|
||||||
|
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import org.raddatz.familienarchiv.security.Permission;
|
||||||
|
import org.raddatz.familienarchiv.security.RequirePermission;
|
||||||
|
import org.raddatz.familienarchiv.security.SecurityUtils;
|
||||||
|
import org.raddatz.familienarchiv.service.UserService;
|
||||||
|
import org.springframework.security.core.Authentication;
|
||||||
|
import org.springframework.web.bind.annotation.*;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/dashboard")
|
||||||
|
@RequirePermission(Permission.READ_ALL)
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class DashboardController {
|
||||||
|
|
||||||
|
private final DashboardService dashboardService;
|
||||||
|
private final UserService userService;
|
||||||
|
|
||||||
|
@GetMapping("/resume")
|
||||||
|
public DashboardResumeDTO getResume(Authentication authentication) {
|
||||||
|
UUID userId = SecurityUtils.requireUserId(authentication, userService);
|
||||||
|
return dashboardService.getResume(userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/pulse")
|
||||||
|
public DashboardPulseDTO getPulse(Authentication authentication) {
|
||||||
|
UUID userId = SecurityUtils.requireUserId(authentication, userService);
|
||||||
|
return dashboardService.getPulse(userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/activity")
|
||||||
|
public List<ActivityFeedItemDTO> getActivity(
|
||||||
|
Authentication authentication,
|
||||||
|
@RequestParam(defaultValue = "7") int limit) {
|
||||||
|
UUID userId = SecurityUtils.requireUserId(authentication, userService);
|
||||||
|
return dashboardService.getActivity(userId, Math.min(limit, 20));
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
package org.raddatz.familienarchiv.dashboard;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public record DashboardPulseDTO(
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int pages,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int annotated,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int transcribed,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int uploaded,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int yourPages,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) List<ActivityActorDTO> contributors
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,19 @@
|
|||||||
|
package org.raddatz.familienarchiv.dashboard;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import jakarta.annotation.Nullable;
|
||||||
|
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
public record DashboardResumeDTO(
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID documentId,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String title,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String caption,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String excerpt,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int totalBlocks,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int pct,
|
||||||
|
@Nullable String thumbnailUrl,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) List<ActivityActorDTO> collaborators
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,181 @@
|
|||||||
|
package org.raddatz.familienarchiv.dashboard;
|
||||||
|
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
|
||||||
|
import org.raddatz.familienarchiv.audit.ActivityFeedRow;
|
||||||
|
import org.raddatz.familienarchiv.audit.AuditLogQueryService;
|
||||||
|
import org.raddatz.familienarchiv.audit.PulseStatsRow;
|
||||||
|
import org.raddatz.familienarchiv.model.AppUser;
|
||||||
|
import org.raddatz.familienarchiv.model.Document;
|
||||||
|
import org.raddatz.familienarchiv.model.Person;
|
||||||
|
import org.raddatz.familienarchiv.model.TranscriptionBlock;
|
||||||
|
import org.raddatz.familienarchiv.service.DocumentService;
|
||||||
|
import org.raddatz.familienarchiv.service.TranscriptionService;
|
||||||
|
import org.raddatz.familienarchiv.service.UserService;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import java.time.DayOfWeek;
|
||||||
|
import java.time.OffsetDateTime;
|
||||||
|
import java.time.ZoneOffset;
|
||||||
|
import java.time.temporal.TemporalAdjusters;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
@Service
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@Slf4j
|
||||||
|
public class DashboardService {
|
||||||
|
|
||||||
|
private final AuditLogQueryService auditLogQueryService;
|
||||||
|
private final DocumentService documentService;
|
||||||
|
private final TranscriptionService transcriptionService;
|
||||||
|
private final UserService userService;
|
||||||
|
|
||||||
|
public DashboardResumeDTO getResume(UUID userId) {
|
||||||
|
Optional<UUID> docIdOpt = auditLogQueryService.findMostRecentDocumentForUser(userId);
|
||||||
|
if (docIdOpt.isEmpty()) return null;
|
||||||
|
|
||||||
|
UUID docId = docIdOpt.get();
|
||||||
|
Document doc;
|
||||||
|
try {
|
||||||
|
doc = documentService.getDocumentById(docId);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("Resume: document {} not found for user {}", docId, userId);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
List<TranscriptionBlock> blocks = transcriptionService.listBlocks(docId);
|
||||||
|
String excerpt = blocks.stream()
|
||||||
|
.filter(b -> b.getText() != null && !b.getText().isBlank())
|
||||||
|
.min(Comparator.comparingInt(TranscriptionBlock::getSortOrder))
|
||||||
|
.map(b -> b.getText().length() > 200 ? b.getText().substring(0, 200) + "…" : b.getText())
|
||||||
|
.orElse("");
|
||||||
|
|
||||||
|
int totalBlocks = blocks.size();
|
||||||
|
long reviewedBlocks = blocks.stream().filter(TranscriptionBlock::isReviewed).count();
|
||||||
|
int pct = totalBlocks > 0 ? (int) (reviewedBlocks * 100L / totalBlocks) : 0;
|
||||||
|
|
||||||
|
String caption = buildCaption(doc);
|
||||||
|
|
||||||
|
List<UUID> collaboratorIds = blocks.stream()
|
||||||
|
.map(TranscriptionBlock::getUpdatedBy)
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.distinct()
|
||||||
|
.limit(5)
|
||||||
|
.toList();
|
||||||
|
|
||||||
|
List<ActivityActorDTO> collaborators = collaboratorIds.stream()
|
||||||
|
.map(uid -> {
|
||||||
|
try {
|
||||||
|
AppUser u = userService.getById(uid);
|
||||||
|
return toActorDTO(u);
|
||||||
|
} catch (Exception e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.toList();
|
||||||
|
|
||||||
|
return new DashboardResumeDTO(docId, doc.getTitle(), caption, excerpt,
|
||||||
|
totalBlocks, pct, null, collaborators);
|
||||||
|
}
|
||||||
|
|
||||||
|
public DashboardPulseDTO getPulse(UUID userId) {
|
||||||
|
OffsetDateTime weekStart = OffsetDateTime.now(ZoneOffset.UTC)
|
||||||
|
.with(TemporalAdjusters.previousOrSame(DayOfWeek.MONDAY))
|
||||||
|
.withHour(0).withMinute(0).withSecond(0).withNano(0);
|
||||||
|
|
||||||
|
PulseStatsRow stats = auditLogQueryService.getPulseStats(weekStart, userId);
|
||||||
|
|
||||||
|
List<ActivityFeedRow> feed = auditLogQueryService.findActivityFeed(userId, 50);
|
||||||
|
List<ActivityActorDTO> contributors = feed.stream()
|
||||||
|
.filter(r -> r.getActorId() != null)
|
||||||
|
.map(r -> new ActivityActorDTO(r.getActorInitials(), r.getActorColor(), r.getActorName()))
|
||||||
|
.filter(a -> !a.initials().isBlank())
|
||||||
|
.distinct()
|
||||||
|
.limit(6)
|
||||||
|
.toList();
|
||||||
|
|
||||||
|
return new DashboardPulseDTO(
|
||||||
|
(int) stats.getPages(),
|
||||||
|
(int) stats.getAnnotated(),
|
||||||
|
(int) stats.getTranscribed(),
|
||||||
|
(int) stats.getUploaded(),
|
||||||
|
(int) stats.getYourPages(),
|
||||||
|
contributors
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<ActivityFeedItemDTO> getActivity(UUID currentUserId, int limit) {
|
||||||
|
List<ActivityFeedRow> rows = auditLogQueryService.findActivityFeed(currentUserId, limit);
|
||||||
|
|
||||||
|
List<UUID> docIds = rows.stream()
|
||||||
|
.map(ActivityFeedRow::getDocumentId)
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.distinct()
|
||||||
|
.toList();
|
||||||
|
|
||||||
|
Map<UUID, String> titleCache = new HashMap<>();
|
||||||
|
try {
|
||||||
|
documentService.getDocumentsByIds(docIds)
|
||||||
|
.forEach(d -> titleCache.put(d.getId(), d.getTitle()));
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.warn("Activity: failed to bulk-load document titles", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return rows.stream().map(row -> {
|
||||||
|
ActivityActorDTO actor = row.getActorId() != null
|
||||||
|
? new ActivityActorDTO(row.getActorInitials(), row.getActorColor(), row.getActorName())
|
||||||
|
: null;
|
||||||
|
String docTitle = titleCache.getOrDefault(row.getDocumentId(), "");
|
||||||
|
return new ActivityFeedItemDTO(
|
||||||
|
org.raddatz.familienarchiv.audit.AuditKind.valueOf(row.getKind()),
|
||||||
|
actor,
|
||||||
|
row.getDocumentId(),
|
||||||
|
docTitle,
|
||||||
|
row.getHappenedAt().atOffset(ZoneOffset.UTC),
|
||||||
|
row.isYouMentioned()
|
||||||
|
);
|
||||||
|
}).toList();
|
||||||
|
}
|
||||||
|
|
||||||
|
private String buildCaption(Document doc) {
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
if (doc.getSender() != null) sb.append(personName(doc.getSender()));
|
||||||
|
if (!doc.getReceivers().isEmpty()) {
|
||||||
|
String receivers = doc.getReceivers().stream()
|
||||||
|
.map(this::personName).collect(Collectors.joining(", "));
|
||||||
|
if (!sb.isEmpty()) sb.append(" an ");
|
||||||
|
sb.append(receivers);
|
||||||
|
}
|
||||||
|
if (doc.getDocumentDate() != null) {
|
||||||
|
if (!sb.isEmpty()) sb.append(" · ");
|
||||||
|
sb.append(doc.getDocumentDate());
|
||||||
|
}
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
private String personName(Person p) {
|
||||||
|
if (p == null) return "";
|
||||||
|
if (p.getFirstName() != null && p.getLastName() != null) return p.getFirstName() + " " + p.getLastName();
|
||||||
|
if (p.getFirstName() != null) return p.getFirstName();
|
||||||
|
if (p.getLastName() != null) return p.getLastName();
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
private ActivityActorDTO toActorDTO(AppUser u) {
|
||||||
|
String initials = "";
|
||||||
|
if (u.getFirstName() != null && !u.getFirstName().isBlank())
|
||||||
|
initials += u.getFirstName().charAt(0);
|
||||||
|
if (u.getLastName() != null && !u.getLastName().isBlank())
|
||||||
|
initials += u.getLastName().charAt(0);
|
||||||
|
if (initials.isBlank() && u.getEmail() != null)
|
||||||
|
initials = u.getEmail().substring(0, 1).toUpperCase();
|
||||||
|
String fullName = Stream.of(u.getFirstName(), u.getLastName())
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.collect(Collectors.joining(" "));
|
||||||
|
return new ActivityActorDTO(initials.toUpperCase(), u.getColor(), fullName);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public class CreateInviteRequest {
|
||||||
|
private String label;
|
||||||
|
private Integer maxUses;
|
||||||
|
private String prefillFirstName;
|
||||||
|
private String prefillLastName;
|
||||||
|
private String prefillEmail;
|
||||||
|
private List<UUID> groupIds;
|
||||||
|
private LocalDateTime expiresAt;
|
||||||
|
}
|
||||||
@@ -1,6 +1,8 @@
|
|||||||
package org.raddatz.familienarchiv.dto;
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
import jakarta.validation.constraints.Email;
|
||||||
|
import jakarta.validation.constraints.NotBlank;
|
||||||
|
import jakarta.validation.constraints.Pattern;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
|
|
||||||
import java.time.LocalDate;
|
import java.time.LocalDate;
|
||||||
@@ -9,7 +11,9 @@ import java.util.UUID;
|
|||||||
|
|
||||||
@Data
|
@Data
|
||||||
public class CreateUserRequest {
|
public class CreateUserRequest {
|
||||||
private String username;
|
@NotBlank
|
||||||
|
@Email
|
||||||
|
@Pattern(regexp = "^[^:]+$", message = "Email must not contain a colon")
|
||||||
private String email;
|
private String email;
|
||||||
private String initialPassword;
|
private String initialPassword;
|
||||||
private List<UUID> groupIds;
|
private List<UUID> groupIds;
|
||||||
|
|||||||
@@ -1,16 +1,35 @@
|
|||||||
package org.raddatz.familienarchiv.dto;
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
import org.raddatz.familienarchiv.model.Document;
|
import org.raddatz.familienarchiv.model.Document;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
public record DocumentSearchResult(List<Document> documents, long total) {
|
public record DocumentSearchResult(
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
List<Document> documents,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
long total,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
Map<UUID, SearchMatchData> matchData
|
||||||
|
) {
|
||||||
/**
|
/**
|
||||||
* Creates a result where total equals the list size.
|
* Creates a fully-enriched result from documents and their match overlay data.
|
||||||
|
* Absent map entries (e.g. document deleted between FTS and enrichment) are safe —
|
||||||
|
* the frontend treats a missing entry as "no match data".
|
||||||
|
*/
|
||||||
|
public static DocumentSearchResult withMatchData(List<Document> documents, Map<UUID, SearchMatchData> matchData) {
|
||||||
|
return new DocumentSearchResult(documents, documents.size(), matchData);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a result without match data — used for filter-only searches (no text query).
|
||||||
* No pagination yet — the full matched set is always returned.
|
* No pagination yet — the full matched set is always returned.
|
||||||
* When pagination is added, total must come from a DB COUNT query, not list.size().
|
* When pagination is added, total must come from a DB COUNT query, not list.size().
|
||||||
*/
|
*/
|
||||||
public static DocumentSearchResult of(List<Document> documents) {
|
public static DocumentSearchResult of(List<Document> documents) {
|
||||||
return new DocumentSearchResult(documents, documents.size());
|
return withMatchData(documents, Map.of());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,35 @@
|
|||||||
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Builder
|
||||||
|
public class InviteListItemDTO {
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private UUID id;
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private String code;
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private String displayCode;
|
||||||
|
private String label;
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private int useCount;
|
||||||
|
private Integer maxUses;
|
||||||
|
private LocalDateTime expiresAt;
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private boolean revoked;
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private String status;
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private LocalDateTime createdAt;
|
||||||
|
private String shareableUrl;
|
||||||
|
}
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
public class InvitePrefillDTO {
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private String firstName;
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private String lastName;
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private String email;
|
||||||
|
}
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Character-level offset of a highlighted term within a text field.
|
||||||
|
* Offsets are Java {@code String} character positions (UTF-16 code units),
|
||||||
|
* which are identical to JavaScript string positions — consistent end-to-end
|
||||||
|
* for all German BMP characters (ä, ö, ü, ß, etc.).
|
||||||
|
*/
|
||||||
|
public record MatchOffset(
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int start,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int length
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
import jakarta.validation.constraints.NotNull;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
public record MergeTagDTO(@NotNull UUID targetId) {}
|
||||||
@@ -0,0 +1,19 @@
|
|||||||
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
import jakarta.validation.constraints.Email;
|
||||||
|
import jakarta.validation.constraints.NotBlank;
|
||||||
|
import lombok.Data;
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public class RegisterRequest {
|
||||||
|
@NotBlank
|
||||||
|
private String code;
|
||||||
|
@NotBlank
|
||||||
|
@Email
|
||||||
|
private String email;
|
||||||
|
@NotBlank
|
||||||
|
private String password;
|
||||||
|
private String firstName;
|
||||||
|
private String lastName;
|
||||||
|
private boolean notifyOnMention = true;
|
||||||
|
}
|
||||||
@@ -0,0 +1,67 @@
|
|||||||
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Match signals for a single document in a full-text search result.
|
||||||
|
* All fields are non-null except {@code transcriptionSnippet} and {@code summarySnippet},
|
||||||
|
* which are null when the respective field did not match the query.
|
||||||
|
*/
|
||||||
|
public record SearchMatchData(
|
||||||
|
/**
|
||||||
|
* Best-ranked matching transcription line, or null if no block matched.
|
||||||
|
*/
|
||||||
|
String transcriptionSnippet,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Character offsets of highlighted terms within the document title.
|
||||||
|
* Empty when the title did not contribute to the match.
|
||||||
|
*/
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
List<MatchOffset> titleOffsets,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* True when the sender's name matched the query.
|
||||||
|
*/
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
boolean senderMatched,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* IDs of receiver persons whose names matched the query.
|
||||||
|
*/
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
List<UUID> matchedReceiverIds,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* IDs of tags whose names matched the query.
|
||||||
|
*/
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
List<UUID> matchedTagIds,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Character offsets of highlighted terms within the transcription snippet.
|
||||||
|
* Empty when no transcription block matched or the snippet has no highlights.
|
||||||
|
*/
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
List<MatchOffset> snippetOffsets,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Highlighted summary excerpt, or null if the summary did not match the query.
|
||||||
|
*/
|
||||||
|
String summarySnippet,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Character offsets of highlighted terms within the summary snippet.
|
||||||
|
* Empty when the summary did not match or has no highlights.
|
||||||
|
*/
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
List<MatchOffset> summaryOffsets
|
||||||
|
) {
|
||||||
|
/** Canonical "no match data" value for a single document. */
|
||||||
|
public static SearchMatchData empty() {
|
||||||
|
return new SearchMatchData(null, List.of(), false, List.of(), List.of(), List.of(), null, List.of());
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
/** Determines how multiple selected tag filters are combined in a document search. */
|
||||||
|
public enum TagOperator {
|
||||||
|
/** Every tag set must match (default). */
|
||||||
|
AND,
|
||||||
|
/** At least one tag set must match. */
|
||||||
|
OR
|
||||||
|
}
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
|
||||||
|
public record TagTreeNodeDTO(
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID id,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String name,
|
||||||
|
String color,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int documentCount,
|
||||||
|
List<TagTreeNodeDTO> children,
|
||||||
|
@Schema(description = "Parent tag ID, null for root tags") UUID parentId) {}
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
public record TagUpdateDTO(String name, UUID parentId, String color) {}
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
import org.raddatz.familienarchiv.model.OcrTrainingRun;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public record TrainingHistoryResponse(
|
||||||
|
List<OcrTrainingRun> runs,
|
||||||
|
Map<String, String> personNames
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,19 @@
|
|||||||
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
import org.raddatz.familienarchiv.model.OcrTrainingRun;
|
||||||
|
import org.raddatz.familienarchiv.model.SenderModel;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public record TrainingInfoResponse(
|
||||||
|
int availableBlocks,
|
||||||
|
int totalOcrBlocks,
|
||||||
|
int availableDocuments,
|
||||||
|
int availableSegBlocks,
|
||||||
|
boolean ocrServiceAvailable,
|
||||||
|
OcrTrainingRun lastRun,
|
||||||
|
List<OcrTrainingRun> runs,
|
||||||
|
Map<String, String> personNames,
|
||||||
|
List<SenderModel> senderModels
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,19 @@
|
|||||||
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
|
||||||
|
|
||||||
|
import java.time.LocalDate;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
public record TranscriptionQueueItemDTO(
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) UUID id,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) String title,
|
||||||
|
LocalDate documentDate,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int annotationCount,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int textedBlockCount,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) int reviewedBlockCount,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) List<ActivityActorDTO> contributors,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) boolean hasMoreContributors
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Weekly activity pulse for the Mission Control Strip column headers.
|
||||||
|
* Counts documents that received new work in each pipeline stage
|
||||||
|
* during the last 7 days.
|
||||||
|
*/
|
||||||
|
public record TranscriptionWeeklyStatsDTO(
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) long segmentationCount,
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED) long transcriptionCount
|
||||||
|
) {}
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
package org.raddatz.familienarchiv.dto;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import jakarta.validation.constraints.NotNull;
|
||||||
|
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
public record TriggerSenderTrainingDTO(
|
||||||
|
@NotNull
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
UUID personId
|
||||||
|
) {}
|
||||||
@@ -38,6 +38,16 @@ public enum ErrorCode {
|
|||||||
/** A mass import is already in progress; only one can run at a time. 409 */
|
/** A mass import is already in progress; only one can run at a time. 409 */
|
||||||
IMPORT_ALREADY_RUNNING,
|
IMPORT_ALREADY_RUNNING,
|
||||||
|
|
||||||
|
// --- Invites ---
|
||||||
|
/** The invite code does not exist. 404 */
|
||||||
|
INVITE_NOT_FOUND,
|
||||||
|
/** The invite has already reached its use limit. 409 */
|
||||||
|
INVITE_EXHAUSTED,
|
||||||
|
/** The invite has been revoked by an admin. 409 */
|
||||||
|
INVITE_REVOKED,
|
||||||
|
/** The invite has passed its expiry date. 410 */
|
||||||
|
INVITE_EXPIRED,
|
||||||
|
|
||||||
// --- Auth ---
|
// --- Auth ---
|
||||||
/** The request is not authenticated. 401 */
|
/** The request is not authenticated. 401 */
|
||||||
UNAUTHORIZED,
|
UNAUTHORIZED,
|
||||||
@@ -77,6 +87,20 @@ public enum ErrorCode {
|
|||||||
OCR_PROCESSING_FAILED,
|
OCR_PROCESSING_FAILED,
|
||||||
/** A training run is already in progress. 409 */
|
/** A training run is already in progress. 409 */
|
||||||
TRAINING_ALREADY_RUNNING,
|
TRAINING_ALREADY_RUNNING,
|
||||||
|
/** Internal inconsistency: expected training run row was not found after creation. 500 */
|
||||||
|
OCR_TRAINING_CONFLICT,
|
||||||
|
|
||||||
|
// --- Tags ---
|
||||||
|
/** A tag with the given ID does not exist. 404 */
|
||||||
|
TAG_NOT_FOUND,
|
||||||
|
/** The supplied color token is not in the allowed palette. 400 */
|
||||||
|
INVALID_TAG_COLOR,
|
||||||
|
/** Setting this parent would create a cycle in the tag hierarchy. 400 */
|
||||||
|
TAG_CYCLE_DETECTED,
|
||||||
|
/** Merge source and target are the same tag. 400 */
|
||||||
|
TAG_MERGE_SELF,
|
||||||
|
/** The merge target is a descendant of the source tag. 400 */
|
||||||
|
TAG_MERGE_INVALID_TARGET,
|
||||||
|
|
||||||
// --- Generic ---
|
// --- Generic ---
|
||||||
/** Request validation failed (missing or malformed fields). 400 */
|
/** Request validation failed (missing or malformed fields). 400 */
|
||||||
|
|||||||
@@ -1,6 +1,9 @@
|
|||||||
package org.raddatz.familienarchiv.model;
|
package org.raddatz.familienarchiv.model;
|
||||||
|
|
||||||
import jakarta.persistence.*;
|
import jakarta.persistence.*;
|
||||||
|
import jakarta.validation.constraints.Email;
|
||||||
|
import jakarta.validation.constraints.NotBlank;
|
||||||
|
import jakarta.validation.constraints.Pattern;
|
||||||
import lombok.*;
|
import lombok.*;
|
||||||
|
|
||||||
import org.hibernate.annotations.CreationTimestamp;
|
import org.hibernate.annotations.CreationTimestamp;
|
||||||
@@ -16,8 +19,12 @@ import java.util.HashSet;
|
|||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import jakarta.persistence.PostLoad;
|
||||||
|
import jakarta.persistence.PrePersist;
|
||||||
|
import jakarta.persistence.PreUpdate;
|
||||||
|
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "users") // Tabellenname in Postgres
|
@Table(name = "users")
|
||||||
@Data
|
@Data
|
||||||
@NoArgsConstructor
|
@NoArgsConstructor
|
||||||
@AllArgsConstructor
|
@AllArgsConstructor
|
||||||
@@ -30,26 +37,26 @@ public class AppUser {
|
|||||||
private UUID id;
|
private UUID id;
|
||||||
|
|
||||||
@Column(unique = true, nullable = false)
|
@Column(unique = true, nullable = false)
|
||||||
|
@NotBlank
|
||||||
|
@Email
|
||||||
|
@Pattern(regexp = "^[^:]+$", message = "Email must not contain a colon")
|
||||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
private String username;
|
private String email;
|
||||||
|
|
||||||
@Column(nullable = false)
|
@Column(nullable = false)
|
||||||
@JsonProperty(access = JsonProperty.Access.WRITE_ONLY)
|
@JsonProperty(access = JsonProperty.Access.WRITE_ONLY)
|
||||||
private String password; // Wird verschlüsselt gespeichert (BCrypt)
|
private String password;
|
||||||
|
|
||||||
private String firstName;
|
private String firstName;
|
||||||
private String lastName;
|
private String lastName;
|
||||||
private LocalDate birthDate;
|
private LocalDate birthDate;
|
||||||
|
|
||||||
@Column(unique = true)
|
|
||||||
private String email;
|
|
||||||
|
|
||||||
@Column(columnDefinition = "TEXT")
|
@Column(columnDefinition = "TEXT")
|
||||||
private String contact;
|
private String contact;
|
||||||
|
|
||||||
@Builder.Default
|
@Builder.Default
|
||||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
private boolean enabled = true; // Um User zu sperren ohne sie zu löschen
|
private boolean enabled = true;
|
||||||
|
|
||||||
@Column(nullable = false)
|
@Column(nullable = false)
|
||||||
@Builder.Default
|
@Builder.Default
|
||||||
@@ -61,7 +68,6 @@ public class AppUser {
|
|||||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
private boolean notifyOnMention = false;
|
private boolean notifyOnMention = false;
|
||||||
|
|
||||||
// Ein User kann in mehreren Gruppen sein
|
|
||||||
@ManyToMany(fetch = FetchType.EAGER)
|
@ManyToMany(fetch = FetchType.EAGER)
|
||||||
@JoinTable(name = "users_groups", joinColumns = @JoinColumn(name = "user_id"), inverseJoinColumns = @JoinColumn(name = "group_id"))
|
@JoinTable(name = "users_groups", joinColumns = @JoinColumn(name = "user_id"), inverseJoinColumns = @JoinColumn(name = "group_id"))
|
||||||
@Builder.Default
|
@Builder.Default
|
||||||
@@ -72,19 +78,36 @@ public class AppUser {
|
|||||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
private LocalDateTime createdAt;
|
private LocalDateTime createdAt;
|
||||||
|
|
||||||
|
@Column(nullable = false)
|
||||||
|
@Builder.Default
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private String color = "";
|
||||||
|
|
||||||
|
private static final String[] PALETTE = {
|
||||||
|
"#7a4f9a", "#5a8a6a", "#3060b0", "#a0522d", "#c0446e", "#c17a00", "#0e7490", "#1d4ed8"
|
||||||
|
};
|
||||||
|
|
||||||
|
public static String computeColor(UUID id) {
|
||||||
|
return PALETTE[Math.abs(id.hashCode()) % PALETTE.length];
|
||||||
|
}
|
||||||
|
|
||||||
|
@PrePersist
|
||||||
|
@PreUpdate
|
||||||
|
@PostLoad
|
||||||
|
void deriveColor() {
|
||||||
|
if (id != null && (color == null || color.isEmpty())) {
|
||||||
|
this.color = computeColor(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public boolean hasPermission(String permission) {
|
public boolean hasPermission(String permission) {
|
||||||
if (groups == null || groups.isEmpty()) {
|
if (groups == null || groups.isEmpty()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return this.groups.stream().anyMatch(group -> group.getPermissions().contains(permission));
|
return this.groups.stream().anyMatch(group -> group.getPermissions().contains(permission));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public AppUser updateFromRequest(CreateUserRequest request, PasswordEncoder passwordEncoder, Set<UserGroup> groups) {
|
public AppUser updateFromRequest(CreateUserRequest request, PasswordEncoder passwordEncoder, Set<UserGroup> groups) {
|
||||||
if (request.getUsername() != null && !request.getUsername().isBlank()) {
|
|
||||||
this.username = request.getUsername();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (request.getEmail() != null && !request.getEmail().isBlank()) {
|
if (request.getEmail() != null && !request.getEmail().isBlank()) {
|
||||||
this.email = request.getEmail();
|
this.email = request.getEmail();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,76 @@
|
|||||||
|
package org.raddatz.familienarchiv.model;
|
||||||
|
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import jakarta.persistence.*;
|
||||||
|
import lombok.*;
|
||||||
|
import org.hibernate.annotations.CreationTimestamp;
|
||||||
|
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
@Entity
|
||||||
|
@Table(name = "invite_tokens")
|
||||||
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Builder
|
||||||
|
public class InviteToken {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@GeneratedValue(strategy = GenerationType.UUID)
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private UUID id;
|
||||||
|
|
||||||
|
@Column(nullable = false, unique = true, length = 10)
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private String code;
|
||||||
|
|
||||||
|
private String label;
|
||||||
|
|
||||||
|
private Integer maxUses;
|
||||||
|
|
||||||
|
@Column(nullable = false)
|
||||||
|
@Builder.Default
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private int useCount = 0;
|
||||||
|
|
||||||
|
private String prefillFirstName;
|
||||||
|
private String prefillLastName;
|
||||||
|
private String prefillEmail;
|
||||||
|
|
||||||
|
@ElementCollection(fetch = FetchType.EAGER)
|
||||||
|
@CollectionTable(name = "invite_token_group_ids", joinColumns = @JoinColumn(name = "invite_token_id"))
|
||||||
|
@Column(name = "group_id")
|
||||||
|
@Builder.Default
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private Set<UUID> groupIds = new HashSet<>();
|
||||||
|
|
||||||
|
private LocalDateTime expiresAt;
|
||||||
|
|
||||||
|
@ManyToOne(fetch = FetchType.LAZY)
|
||||||
|
@JoinColumn(name = "created_by", nullable = false)
|
||||||
|
private AppUser createdBy;
|
||||||
|
|
||||||
|
@CreationTimestamp
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private LocalDateTime createdAt;
|
||||||
|
|
||||||
|
@Column(nullable = false)
|
||||||
|
@Builder.Default
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private boolean revoked = false;
|
||||||
|
|
||||||
|
public boolean isExhausted() {
|
||||||
|
return maxUses != null && useCount >= maxUses;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isExpired() {
|
||||||
|
return expiresAt != null && expiresAt.isBefore(LocalDateTime.now());
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isActive() {
|
||||||
|
return !revoked && !isExhausted() && !isExpired();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -59,6 +59,9 @@ public class OcrTrainingRun {
|
|||||||
@Column(name = "triggered_by")
|
@Column(name = "triggered_by")
|
||||||
private UUID triggeredBy;
|
private UUID triggeredBy;
|
||||||
|
|
||||||
|
@Column(name = "person_id")
|
||||||
|
private UUID personId;
|
||||||
|
|
||||||
@CreationTimestamp
|
@CreationTimestamp
|
||||||
@Column(name = "created_at", nullable = false, updatable = false)
|
@Column(name = "created_at", nullable = false, updatable = false)
|
||||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
|||||||
@@ -0,0 +1,56 @@
|
|||||||
|
package org.raddatz.familienarchiv.model;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||||
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
|
import jakarta.persistence.*;
|
||||||
|
import lombok.AllArgsConstructor;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.NoArgsConstructor;
|
||||||
|
import org.hibernate.annotations.CreationTimestamp;
|
||||||
|
import org.hibernate.annotations.UpdateTimestamp;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
@Entity
|
||||||
|
@Table(name = "sender_models")
|
||||||
|
@Data
|
||||||
|
@NoArgsConstructor
|
||||||
|
@AllArgsConstructor
|
||||||
|
@Builder
|
||||||
|
public class SenderModel {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@GeneratedValue(strategy = GenerationType.UUID)
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private UUID id;
|
||||||
|
|
||||||
|
@Column(name = "person_id", nullable = false, unique = true)
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private UUID personId;
|
||||||
|
|
||||||
|
@JsonIgnore
|
||||||
|
@Column(name = "model_path", nullable = false)
|
||||||
|
private String modelPath;
|
||||||
|
|
||||||
|
@Column
|
||||||
|
private Double accuracy;
|
||||||
|
|
||||||
|
@Column
|
||||||
|
private Double cer;
|
||||||
|
|
||||||
|
@Column(name = "corrected_lines_at_training", nullable = false)
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private int correctedLinesAtTraining;
|
||||||
|
|
||||||
|
@CreationTimestamp
|
||||||
|
@Column(name = "created_at", nullable = false, updatable = false)
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private Instant createdAt;
|
||||||
|
|
||||||
|
@UpdateTimestamp
|
||||||
|
@Column(name = "updated_at", nullable = false)
|
||||||
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
|
private Instant updatedAt;
|
||||||
|
}
|
||||||
@@ -20,4 +20,11 @@ public class Tag {
|
|||||||
@Column(unique = true, nullable = false)
|
@Column(unique = true, nullable = false)
|
||||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||||
private String name;
|
private String name;
|
||||||
|
|
||||||
|
/** UUID of the parent tag, or null for root-level tags. */
|
||||||
|
@Column(name = "parent_id")
|
||||||
|
private UUID parentId;
|
||||||
|
|
||||||
|
/** Color token name (e.g. "sage"), only set on root-level tags. Null means no color. */
|
||||||
|
private String color;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package org.raddatz.familienarchiv.model;
|
package org.raddatz.familienarchiv.model;
|
||||||
|
|
||||||
public enum TrainingStatus {
|
public enum TrainingStatus {
|
||||||
|
QUEUED,
|
||||||
RUNNING,
|
RUNNING,
|
||||||
DONE,
|
DONE,
|
||||||
FAILED
|
FAILED
|
||||||
|
|||||||
@@ -13,11 +13,10 @@ import java.util.UUID;
|
|||||||
|
|
||||||
@Repository
|
@Repository
|
||||||
public interface AppUserRepository extends JpaRepository<AppUser, UUID> {
|
public interface AppUserRepository extends JpaRepository<AppUser, UUID> {
|
||||||
Optional<AppUser> findByUsername(String username);
|
|
||||||
Optional<AppUser> findByEmail(String email);
|
Optional<AppUser> findByEmail(String email);
|
||||||
|
|
||||||
@Query("SELECT u FROM AppUser u WHERE " +
|
@Query("SELECT u FROM AppUser u WHERE " +
|
||||||
"LOWER(COALESCE(u.firstName, '') || ' ' || COALESCE(u.lastName, '')) LIKE LOWER(CONCAT('%', :q, '%')) " +
|
"LOWER(u.email) LIKE LOWER(CONCAT('%', :q, '%')) " +
|
||||||
"OR LOWER(u.username) LIKE LOWER(CONCAT('%', :q, '%'))")
|
"OR LOWER(COALESCE(u.firstName, '') || ' ' || COALESCE(u.lastName, '')) LIKE LOWER(CONCAT('%', :q, '%'))")
|
||||||
List<AppUser> searchByNameOrUsername(@Param("q") String q, Pageable pageable);
|
List<AppUser> searchByEmailOrName(@Param("q") String q, Pageable pageable);
|
||||||
}
|
}
|
||||||
@@ -83,10 +83,157 @@ public interface DocumentRepository extends JpaRepository<Document, UUID>, JpaSp
|
|||||||
|
|
||||||
@Query(nativeQuery = true, value = """
|
@Query(nativeQuery = true, value = """
|
||||||
SELECT d.id FROM documents d
|
SELECT d.id FROM documents d
|
||||||
WHERE d.search_vector @@ websearch_to_tsquery('german', :query)
|
CROSS JOIN LATERAL (
|
||||||
ORDER BY ts_rank(d.search_vector, websearch_to_tsquery('german', :query)) DESC,
|
SELECT CASE WHEN websearch_to_tsquery('german', :query)::text <> ''
|
||||||
|
THEN to_tsquery('german', regexp_replace(
|
||||||
|
websearch_to_tsquery('german', :query)::text,
|
||||||
|
'''([^'']+)''',
|
||||||
|
'''\\1'':*',
|
||||||
|
'g'))
|
||||||
|
END AS pq
|
||||||
|
) q
|
||||||
|
WHERE d.search_vector @@ q.pq
|
||||||
|
ORDER BY ts_rank(d.search_vector, q.pq) DESC,
|
||||||
d.meta_date DESC NULLS LAST
|
d.meta_date DESC NULLS LAST
|
||||||
""")
|
""")
|
||||||
List<UUID> findRankedIdsByFts(@Param("query") String query);
|
List<UUID> findRankedIdsByFts(@Param("query") String query);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns match-enrichment data for a set of documents identified by their IDs.
|
||||||
|
* Each row contains (in column order):
|
||||||
|
* <ol>
|
||||||
|
* <li>UUID — document id</li>
|
||||||
|
* <li>String — title headline with \x01/\x02 delimiters around matched terms</li>
|
||||||
|
* <li>String — best-ranked transcription snippet with \x01/\x02 delimiters, or null</li>
|
||||||
|
* <li>Boolean — whether the sender's name matched the query</li>
|
||||||
|
* <li>String — comma-separated matched receiver UUIDs, or null</li>
|
||||||
|
* <li>String — comma-separated matched tag UUIDs, or null</li>
|
||||||
|
* <li>String — summary snippet with \x01/\x02 delimiters, or null if summary didn't match</li>
|
||||||
|
* </ol>
|
||||||
|
* Short-circuit before calling this method when {@code ids} is empty or {@code query} is blank.
|
||||||
|
*/
|
||||||
|
@Query(nativeQuery = true, value = """
|
||||||
|
SELECT
|
||||||
|
d.id,
|
||||||
|
ts_headline('german', d.title, q.pq,
|
||||||
|
'StartSel=' || chr(1) || ',StopSel=' || chr(2) || ',HighlightAll=true')
|
||||||
|
AS title_headline,
|
||||||
|
CASE WHEN best_block.text IS NOT NULL THEN
|
||||||
|
ts_headline('german', best_block.text, q.pq,
|
||||||
|
'StartSel=' || chr(1) || ',StopSel=' || chr(2) || ',MaxWords=50,MinWords=20')
|
||||||
|
END AS transcription_snippet,
|
||||||
|
(s.id IS NOT NULL AND
|
||||||
|
to_tsvector('german', COALESCE(s.first_name, '') || ' ' || COALESCE(s.last_name, ''))
|
||||||
|
@@ q.pq)
|
||||||
|
AS sender_matched,
|
||||||
|
(SELECT string_agg(r.id::text, ',')
|
||||||
|
FROM document_receivers dr
|
||||||
|
JOIN persons r ON r.id = dr.person_id
|
||||||
|
WHERE dr.document_id = d.id
|
||||||
|
AND to_tsvector('german', COALESCE(r.first_name, '') || ' ' || r.last_name)
|
||||||
|
@@ q.pq
|
||||||
|
) AS matched_receiver_ids,
|
||||||
|
(SELECT string_agg(t.id::text, ',')
|
||||||
|
FROM document_tags dt
|
||||||
|
JOIN tag t ON t.id = dt.tag_id
|
||||||
|
WHERE dt.document_id = d.id
|
||||||
|
AND to_tsvector('german', t.name) @@ q.pq
|
||||||
|
) AS matched_tag_ids,
|
||||||
|
CASE WHEN d.summary IS NOT NULL AND d.summary <> ''
|
||||||
|
AND to_tsvector('german', d.summary) @@ q.pq
|
||||||
|
THEN ts_headline('german', d.summary, q.pq,
|
||||||
|
'StartSel=' || chr(1) || ',StopSel=' || chr(2) || ',MaxWords=50,MinWords=20')
|
||||||
|
END AS summary_snippet
|
||||||
|
FROM documents d
|
||||||
|
CROSS JOIN LATERAL (
|
||||||
|
SELECT CASE WHEN websearch_to_tsquery('german', :query)::text <> ''
|
||||||
|
THEN to_tsquery('german', regexp_replace(
|
||||||
|
websearch_to_tsquery('german', :query)::text,
|
||||||
|
'''([^'']+)''',
|
||||||
|
'''\\1'':*',
|
||||||
|
'g'))
|
||||||
|
END AS pq
|
||||||
|
) q
|
||||||
|
LEFT JOIN persons s ON s.id = d.sender_id
|
||||||
|
LEFT JOIN LATERAL (
|
||||||
|
SELECT tb.text
|
||||||
|
FROM transcription_blocks tb
|
||||||
|
WHERE tb.document_id = d.id
|
||||||
|
AND to_tsvector('german', tb.text) @@ q.pq
|
||||||
|
ORDER BY ts_rank(to_tsvector('german', tb.text), q.pq) DESC
|
||||||
|
LIMIT 1
|
||||||
|
) best_block ON true
|
||||||
|
WHERE d.id IN :ids
|
||||||
|
""")
|
||||||
|
List<Object[]> findEnrichmentData(@Param("ids") Collection<UUID> ids, @Param("query") String query);
|
||||||
|
|
||||||
|
// --- Mission Control Strip queues ---
|
||||||
|
|
||||||
|
/** Documents with no annotations — Segmentierung column. */
|
||||||
|
@Query(nativeQuery = true, value = """
|
||||||
|
SELECT d.id, d.title, d.meta_date AS documentDate,
|
||||||
|
0 AS annotationCount, 0 AS textedBlockCount, 0 AS reviewedBlockCount
|
||||||
|
FROM documents d
|
||||||
|
WHERE d.status NOT IN ('PLACEHOLDER')
|
||||||
|
AND NOT EXISTS (SELECT 1 FROM document_annotations da WHERE da.document_id = d.id)
|
||||||
|
ORDER BY HASHTEXT(d.id::text || EXTRACT(WEEK FROM NOW())::int::text)
|
||||||
|
LIMIT :limit
|
||||||
|
""")
|
||||||
|
List<TranscriptionQueueProjection> findSegmentationQueue(@Param("limit") int limit);
|
||||||
|
|
||||||
|
/** Documents with annotations but not yet fully reviewed — Transkription column. */
|
||||||
|
@Query(nativeQuery = true, value = """
|
||||||
|
SELECT d.id, d.title, d.meta_date AS documentDate,
|
||||||
|
COUNT(DISTINCT da.id) AS annotationCount,
|
||||||
|
COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) AS textedBlockCount,
|
||||||
|
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END) AS reviewedBlockCount
|
||||||
|
FROM documents d
|
||||||
|
JOIN document_annotations da ON da.document_id = d.id
|
||||||
|
LEFT JOIN transcription_blocks tb ON tb.document_id = d.id
|
||||||
|
GROUP BY d.id, d.title, d.meta_date
|
||||||
|
HAVING COUNT(DISTINCT da.id) > 0
|
||||||
|
AND (
|
||||||
|
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END)::float /
|
||||||
|
COUNT(DISTINCT da.id)
|
||||||
|
) < 0.90
|
||||||
|
ORDER BY COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) DESC,
|
||||||
|
HASHTEXT(d.id::text || EXTRACT(WEEK FROM NOW())::int::text)
|
||||||
|
LIMIT :limit
|
||||||
|
""")
|
||||||
|
List<TranscriptionQueueProjection> findTranscriptionQueue(@Param("limit") int limit);
|
||||||
|
|
||||||
|
/** Documents with reviewed_pct >= 90 % — Lesefertig column. */
|
||||||
|
@Query(nativeQuery = true, value = """
|
||||||
|
SELECT d.id, d.title, d.meta_date AS documentDate,
|
||||||
|
COUNT(DISTINCT da.id) AS annotationCount,
|
||||||
|
COUNT(DISTINCT CASE WHEN tb.text IS NOT NULL AND tb.text <> '' THEN tb.id END) AS textedBlockCount,
|
||||||
|
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END) AS reviewedBlockCount
|
||||||
|
FROM documents d
|
||||||
|
JOIN document_annotations da ON da.document_id = d.id
|
||||||
|
LEFT JOIN transcription_blocks tb ON tb.document_id = d.id
|
||||||
|
GROUP BY d.id, d.title, d.meta_date
|
||||||
|
HAVING COUNT(DISTINCT da.id) > 0
|
||||||
|
AND (
|
||||||
|
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END)::float /
|
||||||
|
COUNT(DISTINCT da.id)
|
||||||
|
) >= 0.90
|
||||||
|
ORDER BY (
|
||||||
|
COUNT(DISTINCT CASE WHEN tb.reviewed = true THEN tb.id END)::float /
|
||||||
|
COUNT(DISTINCT da.id)
|
||||||
|
) DESC
|
||||||
|
LIMIT :limit
|
||||||
|
""")
|
||||||
|
List<TranscriptionQueueProjection> findReadyToReadQueue(@Param("limit") int limit);
|
||||||
|
|
||||||
|
/** Weekly pulse: distinct documents that received new work in each pipeline stage. */
|
||||||
|
@Query(nativeQuery = true, value = """
|
||||||
|
SELECT
|
||||||
|
(SELECT COUNT(DISTINCT da.document_id) FROM document_annotations da
|
||||||
|
WHERE da.created_at >= NOW() - INTERVAL '7 days') AS segmentationCount,
|
||||||
|
(SELECT COUNT(DISTINCT tb.document_id) FROM transcription_blocks tb
|
||||||
|
WHERE tb.created_at >= NOW() - INTERVAL '7 days'
|
||||||
|
AND tb.text IS NOT NULL AND tb.text <> '') AS transcriptionCount
|
||||||
|
""")
|
||||||
|
TranscriptionWeeklyStatsProjection findWeeklyStats();
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -4,6 +4,7 @@ import jakarta.persistence.criteria.*;
|
|||||||
import java.time.LocalDate;
|
import java.time.LocalDate;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
import org.raddatz.familienarchiv.model.Document;
|
import org.raddatz.familienarchiv.model.Document;
|
||||||
@@ -54,32 +55,62 @@ public class DocumentSpecifications {
|
|||||||
return (root, query, cb) -> status == null ? null : cb.equal(root.get("status"), status);
|
return (root, query, cb) -> status == null ? null : cb.equal(root.get("status"), status);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Filtert nach Schlagworten (UND-Verknüpfung, exakter Match)
|
/**
|
||||||
public static Specification<Document> hasTags(List<String> tags) {
|
* Filtert nach vorausgeweiteten Tag-ID-Sets mit AND- oder OR-Logik.
|
||||||
|
*
|
||||||
|
* <p>AND (useOr=false): Das Dokument muss mindestens einen Tag aus <em>jedem</em> Set besitzen.
|
||||||
|
* <p>OR (useOr=true): Das Dokument muss mindestens einen Tag aus der Vereinigung aller Sets besitzen.
|
||||||
|
*
|
||||||
|
* <p>Jedes Set repräsentiert einen ausgewählten Tag inklusive aller seiner Nachkommen
|
||||||
|
* (vorausgeweitet durch {@code TagRepository.findDescendantIdsByName}).
|
||||||
|
*/
|
||||||
|
public static Specification<Document> hasTags(List<Set<UUID>> tagIdSets, boolean useOr) {
|
||||||
return (root, query, cb) -> {
|
return (root, query, cb) -> {
|
||||||
if (tags == null || tags.isEmpty())
|
if (tagIdSets == null || tagIdSets.isEmpty())
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
|
if (!useOr) {
|
||||||
|
// AND mode: an empty set means the tag resolved to no IDs (doesn't exist) —
|
||||||
|
// no document can satisfy the condition, so return no results immediately.
|
||||||
|
boolean hasEmptySet = tagIdSets.stream().anyMatch(s -> s == null || s.isEmpty());
|
||||||
|
if (hasEmptySet) return cb.disjunction();
|
||||||
|
}
|
||||||
|
|
||||||
|
List<Set<UUID>> nonEmpty = tagIdSets.stream()
|
||||||
|
.filter(s -> s != null && !s.isEmpty())
|
||||||
|
.toList();
|
||||||
|
if (nonEmpty.isEmpty()) return null;
|
||||||
|
|
||||||
|
if (useOr) {
|
||||||
|
Set<UUID> union = new java.util.HashSet<>();
|
||||||
|
nonEmpty.forEach(union::addAll);
|
||||||
|
return documentHasTagIn(root, query, cb, union);
|
||||||
|
}
|
||||||
|
|
||||||
|
// AND: one EXISTS subquery per set
|
||||||
List<Predicate> predicates = new ArrayList<>();
|
List<Predicate> predicates = new ArrayList<>();
|
||||||
|
for (Set<UUID> ids : nonEmpty) {
|
||||||
|
predicates.add(documentHasTagIn(root, query, cb, ids));
|
||||||
|
}
|
||||||
|
return cb.and(predicates.toArray(new Predicate[0]));
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
for (String tagName : tags) {
|
private static Predicate documentHasTagIn(
|
||||||
if (!StringUtils.hasText(tagName)) continue;
|
Root<Document> root,
|
||||||
|
jakarta.persistence.criteria.CriteriaQuery<?> query,
|
||||||
Subquery<Long> subquery = query.subquery(Long.class);
|
jakarta.persistence.criteria.CriteriaBuilder cb,
|
||||||
|
Set<UUID> tagIds) {
|
||||||
|
Subquery<UUID> subquery = query.subquery(UUID.class);
|
||||||
Root<Document> subRoot = subquery.from(Document.class);
|
Root<Document> subRoot = subquery.from(Document.class);
|
||||||
Join<Document, Tag> subTags = subRoot.join("tags");
|
Join<Document, Tag> subTags = subRoot.join("tags");
|
||||||
|
|
||||||
subquery.select(subRoot.get("id"))
|
subquery.select(subRoot.get("id"))
|
||||||
.where(
|
.where(
|
||||||
cb.equal(subRoot.get("id"), root.get("id")),
|
cb.equal(subRoot.get("id"), root.get("id")),
|
||||||
cb.equal(cb.lower(subTags.get("name")), tagName.trim().toLowerCase())
|
subTags.get("id").in(tagIds)
|
||||||
);
|
);
|
||||||
|
return cb.exists(subquery);
|
||||||
predicates.add(cb.exists(subquery));
|
|
||||||
}
|
|
||||||
|
|
||||||
return cb.and(predicates.toArray(new Predicate[0]));
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Filtert nach partiellem Tag-Namen (ILIKE) — für Live-Tag-Suche
|
// Filtert nach partiellem Tag-Namen (ILIKE) — für Live-Tag-Suche
|
||||||
|
|||||||
@@ -0,0 +1,27 @@
|
|||||||
|
package org.raddatz.familienarchiv.repository;
|
||||||
|
|
||||||
|
import jakarta.persistence.LockModeType;
|
||||||
|
import org.raddatz.familienarchiv.model.InviteToken;
|
||||||
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
import org.springframework.data.jpa.repository.Lock;
|
||||||
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
public interface InviteTokenRepository extends JpaRepository<InviteToken, UUID> {
|
||||||
|
|
||||||
|
Optional<InviteToken> findByCode(String code);
|
||||||
|
|
||||||
|
@Lock(LockModeType.PESSIMISTIC_WRITE)
|
||||||
|
@Query("SELECT t FROM InviteToken t WHERE t.code = :code")
|
||||||
|
Optional<InviteToken> findByCodeForUpdate(@Param("code") String code);
|
||||||
|
|
||||||
|
@Query("SELECT t FROM InviteToken t WHERE t.revoked = false AND (t.expiresAt IS NULL OR t.expiresAt > CURRENT_TIMESTAMP) AND (t.maxUses IS NULL OR t.useCount < t.maxUses) ORDER BY t.createdAt DESC")
|
||||||
|
List<InviteToken> findActive();
|
||||||
|
|
||||||
|
@Query("SELECT t FROM InviteToken t ORDER BY t.createdAt DESC")
|
||||||
|
List<InviteToken> findAllOrderedByCreatedAt();
|
||||||
|
}
|
||||||
@@ -12,5 +12,15 @@ public interface OcrTrainingRunRepository extends JpaRepository<OcrTrainingRun,
|
|||||||
|
|
||||||
Optional<OcrTrainingRun> findFirstByStatus(TrainingStatus status);
|
Optional<OcrTrainingRun> findFirstByStatus(TrainingStatus status);
|
||||||
|
|
||||||
List<OcrTrainingRun> findTop10ByOrderByCreatedAtDesc();
|
Optional<OcrTrainingRun> findFirstByStatusOrderByCreatedAtAsc(TrainingStatus status);
|
||||||
|
|
||||||
|
Optional<OcrTrainingRun> findFirstByPersonIdAndStatus(UUID personId, TrainingStatus status);
|
||||||
|
|
||||||
|
boolean existsByPersonIdAndStatus(UUID personId, TrainingStatus status);
|
||||||
|
|
||||||
|
List<OcrTrainingRun> findTop20ByOrderByCreatedAtDesc();
|
||||||
|
|
||||||
|
List<OcrTrainingRun> findByPersonIdIsNullOrderByCreatedAtDesc();
|
||||||
|
|
||||||
|
List<OcrTrainingRun> findByPersonIdOrderByCreatedAtDesc(UUID personId);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,12 @@
|
|||||||
|
package org.raddatz.familienarchiv.repository;
|
||||||
|
|
||||||
|
import org.raddatz.familienarchiv.model.SenderModel;
|
||||||
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
public interface SenderModelRepository extends JpaRepository<SenderModel, UUID> {
|
||||||
|
|
||||||
|
Optional<SenderModel> findByPersonId(UUID personId);
|
||||||
|
}
|
||||||
@@ -1,13 +1,126 @@
|
|||||||
package org.raddatz.familienarchiv.repository;
|
package org.raddatz.familienarchiv.repository;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
import org.raddatz.familienarchiv.model.Tag;
|
import org.raddatz.familienarchiv.model.Tag;
|
||||||
import org.springframework.data.jpa.repository.JpaRepository;
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
import org.springframework.data.jpa.repository.Modifying;
|
||||||
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
public interface TagRepository extends JpaRepository<Tag, UUID> {
|
public interface TagRepository extends JpaRepository<Tag, UUID> {
|
||||||
Optional<Tag> findByNameIgnoreCase(String name);
|
|
||||||
List<Tag> findByNameContainingIgnoreCase(String name);
|
/** Typed projection for document-count aggregation results. */
|
||||||
|
interface TagCount {
|
||||||
|
UUID getTagId();
|
||||||
|
Long getCount();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Optional<Tag> findByNameIgnoreCase(String name);
|
||||||
|
|
||||||
|
List<Tag> findByNameContainingIgnoreCase(String name);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the IDs of all ancestors of the given tag (parent, grandparent, …)
|
||||||
|
* via a recursive CTE. Used for cycle detection before assigning a new parent.
|
||||||
|
* Includes a depth guard of 50 levels to prevent runaway queries.
|
||||||
|
*/
|
||||||
|
@Query(value = """
|
||||||
|
WITH RECURSIVE ancestors AS (
|
||||||
|
SELECT parent_id, 0 AS depth
|
||||||
|
FROM tag
|
||||||
|
WHERE id = :tagId AND parent_id IS NOT NULL
|
||||||
|
UNION ALL
|
||||||
|
SELECT t.parent_id, a.depth + 1
|
||||||
|
FROM tag t
|
||||||
|
JOIN ancestors a ON t.id = a.parent_id
|
||||||
|
WHERE t.parent_id IS NOT NULL AND a.depth < 50
|
||||||
|
)
|
||||||
|
SELECT parent_id FROM ancestors
|
||||||
|
""", nativeQuery = true)
|
||||||
|
List<UUID> findAncestorIds(@Param("tagId") UUID tagId);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the IDs of the tag with the given name AND all of its descendants
|
||||||
|
* via a recursive CTE. Used to expand a selected tag to inclusive hierarchy results.
|
||||||
|
* Includes a depth guard of 50 levels to prevent runaway queries.
|
||||||
|
*/
|
||||||
|
@Query(value = """
|
||||||
|
WITH RECURSIVE descendants AS (
|
||||||
|
SELECT id, 0 AS depth FROM tag WHERE LOWER(name) = LOWER(:name)
|
||||||
|
UNION ALL
|
||||||
|
SELECT t.id, d.depth + 1 FROM tag t
|
||||||
|
JOIN descendants d ON t.parent_id = d.id
|
||||||
|
WHERE d.depth < 50
|
||||||
|
)
|
||||||
|
SELECT id FROM descendants
|
||||||
|
""", nativeQuery = true)
|
||||||
|
List<UUID> findDescendantIdsByName(@Param("name") String name);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the IDs of the tag with the given ID AND all of its descendants
|
||||||
|
* via a recursive CTE. Used for merge validation and subtree delete.
|
||||||
|
* Includes a depth guard of 50 levels to prevent runaway queries.
|
||||||
|
*/
|
||||||
|
@Query(value = """
|
||||||
|
WITH RECURSIVE descendants AS (
|
||||||
|
SELECT id, 0 AS depth FROM tag WHERE id = :tagId
|
||||||
|
UNION ALL
|
||||||
|
SELECT t.id, d.depth + 1 FROM tag t
|
||||||
|
JOIN descendants d ON t.parent_id = d.id
|
||||||
|
WHERE d.depth < 50
|
||||||
|
)
|
||||||
|
SELECT id FROM descendants
|
||||||
|
""", nativeQuery = true)
|
||||||
|
List<UUID> findDescendantIds(@Param("tagId") UUID tagId);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reassigns document_tags rows from source to target, skipping rows where
|
||||||
|
* the target tag is already present (to avoid PK conflicts).
|
||||||
|
*/
|
||||||
|
@Modifying(clearAutomatically = true)
|
||||||
|
@Query(value = """
|
||||||
|
UPDATE document_tags
|
||||||
|
SET tag_id = :targetId
|
||||||
|
WHERE tag_id = :sourceId
|
||||||
|
AND NOT EXISTS (
|
||||||
|
SELECT 1 FROM document_tags d2
|
||||||
|
WHERE d2.document_id = document_tags.document_id
|
||||||
|
AND d2.tag_id = :targetId
|
||||||
|
)
|
||||||
|
""", nativeQuery = true)
|
||||||
|
void reassignDocumentTags(@Param("sourceId") UUID sourceId, @Param("targetId") UUID targetId);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes all document_tags rows for the given tag.
|
||||||
|
*/
|
||||||
|
@Modifying(clearAutomatically = true)
|
||||||
|
@Query(value = "DELETE FROM document_tags WHERE tag_id = :tagId", nativeQuery = true)
|
||||||
|
void deleteDocumentTagsByTagId(@Param("tagId") UUID tagId);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes all document_tags rows for the given collection of tag IDs.
|
||||||
|
* Caller must guard against an empty collection — PostgreSQL rejects IN ().
|
||||||
|
*/
|
||||||
|
@Modifying(clearAutomatically = true)
|
||||||
|
@Query(value = "DELETE FROM document_tags WHERE tag_id IN :ids", nativeQuery = true)
|
||||||
|
void deleteDocumentTagsByTagIds(@Param("ids") Collection<UUID> ids);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Re-parents all direct children of sourceId to targetId.
|
||||||
|
*/
|
||||||
|
@Modifying(clearAutomatically = true)
|
||||||
|
@Query(value = "UPDATE tag SET parent_id = :targetId WHERE parent_id = :sourceId", nativeQuery = true)
|
||||||
|
void reparentChildren(@Param("sourceId") UUID sourceId, @Param("targetId") UUID targetId);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns (tagId, count) pairs for all tags that appear in document_tags.
|
||||||
|
* Used to populate documentCount in the tag tree without N+1 queries.
|
||||||
|
*/
|
||||||
|
@Query(value = "SELECT tag_id AS tagId, COUNT(*) AS count FROM document_tags GROUP BY tag_id", nativeQuery = true)
|
||||||
|
List<TagCount> findDocumentCountsPerTag();
|
||||||
}
|
}
|
||||||
@@ -3,6 +3,7 @@ package org.raddatz.familienarchiv.repository;
|
|||||||
import org.raddatz.familienarchiv.model.TranscriptionBlock;
|
import org.raddatz.familienarchiv.model.TranscriptionBlock;
|
||||||
import org.springframework.data.jpa.repository.JpaRepository;
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
import org.springframework.data.jpa.repository.Query;
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
@@ -37,4 +38,22 @@ public interface TranscriptionBlockRepository extends JpaRepository<Transcriptio
|
|||||||
AND 'KURRENT_SEGMENTATION' MEMBER OF d.trainingLabels
|
AND 'KURRENT_SEGMENTATION' MEMBER OF d.trainingLabels
|
||||||
""")
|
""")
|
||||||
List<TranscriptionBlock> findSegmentationBlocks();
|
List<TranscriptionBlock> findSegmentationBlocks();
|
||||||
|
|
||||||
|
@Query("""
|
||||||
|
SELECT COUNT(b) FROM TranscriptionBlock b
|
||||||
|
JOIN Document d ON d.id = b.documentId
|
||||||
|
WHERE b.source = 'MANUAL'
|
||||||
|
AND d.sender.id = :personId
|
||||||
|
AND d.scriptType = 'HANDWRITING_KURRENT'
|
||||||
|
""")
|
||||||
|
long countManualKurrentBlocksByPerson(@Param("personId") UUID personId);
|
||||||
|
|
||||||
|
@Query("""
|
||||||
|
SELECT b FROM TranscriptionBlock b
|
||||||
|
JOIN Document d ON d.id = b.documentId
|
||||||
|
WHERE b.source = 'MANUAL'
|
||||||
|
AND d.sender.id = :personId
|
||||||
|
AND d.scriptType = 'HANDWRITING_KURRENT'
|
||||||
|
""")
|
||||||
|
List<TranscriptionBlock> findManualKurrentBlocksByPerson(@Param("personId") UUID personId);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,17 @@
|
|||||||
|
package org.raddatz.familienarchiv.repository;
|
||||||
|
|
||||||
|
import java.time.LocalDate;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Spring Data projection for a single row in one of the three Mission Control Strip queues.
|
||||||
|
* Column aliases in the native SQL queries must match these getter names exactly.
|
||||||
|
*/
|
||||||
|
public interface TranscriptionQueueProjection {
|
||||||
|
UUID getId();
|
||||||
|
String getTitle();
|
||||||
|
LocalDate getDocumentDate();
|
||||||
|
int getAnnotationCount();
|
||||||
|
int getTextedBlockCount();
|
||||||
|
int getReviewedBlockCount();
|
||||||
|
}
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
package org.raddatz.familienarchiv.repository;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Spring Data projection for the weekly activity pulse stats.
|
||||||
|
* Column aliases in the native SQL query must match these getter names exactly.
|
||||||
|
*/
|
||||||
|
public interface TranscriptionWeeklyStatsProjection {
|
||||||
|
long getSegmentationCount();
|
||||||
|
long getTranscriptionCount();
|
||||||
|
}
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
package org.raddatz.familienarchiv.security;
|
||||||
|
|
||||||
|
import org.raddatz.familienarchiv.exception.DomainException;
|
||||||
|
import org.raddatz.familienarchiv.model.AppUser;
|
||||||
|
import org.raddatz.familienarchiv.service.UserService;
|
||||||
|
import org.springframework.security.core.Authentication;
|
||||||
|
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
public final class SecurityUtils {
|
||||||
|
|
||||||
|
private SecurityUtils() {}
|
||||||
|
|
||||||
|
public static UUID requireUserId(Authentication authentication, UserService userService) {
|
||||||
|
if (authentication == null || !authentication.isAuthenticated()) {
|
||||||
|
throw DomainException.unauthorized("Authentication required");
|
||||||
|
}
|
||||||
|
AppUser user = userService.findByEmail(authentication.getName());
|
||||||
|
if (user == null) {
|
||||||
|
throw DomainException.unauthorized("User not found");
|
||||||
|
}
|
||||||
|
return user.getId();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,6 +2,8 @@ package org.raddatz.familienarchiv.service;
|
|||||||
|
|
||||||
import lombok.RequiredArgsConstructor;
|
import lombok.RequiredArgsConstructor;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.raddatz.familienarchiv.audit.AuditKind;
|
||||||
|
import org.raddatz.familienarchiv.audit.AuditService;
|
||||||
import org.raddatz.familienarchiv.dto.CreateAnnotationDTO;
|
import org.raddatz.familienarchiv.dto.CreateAnnotationDTO;
|
||||||
import org.raddatz.familienarchiv.dto.UpdateAnnotationDTO;
|
import org.raddatz.familienarchiv.dto.UpdateAnnotationDTO;
|
||||||
import org.raddatz.familienarchiv.exception.DomainException;
|
import org.raddatz.familienarchiv.exception.DomainException;
|
||||||
@@ -14,6 +16,7 @@ import org.springframework.stereotype.Service;
|
|||||||
import org.springframework.transaction.annotation.Transactional;
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
@@ -23,6 +26,7 @@ public class AnnotationService {
|
|||||||
|
|
||||||
private final AnnotationRepository annotationRepository;
|
private final AnnotationRepository annotationRepository;
|
||||||
private final TranscriptionBlockRepository blockRepository;
|
private final TranscriptionBlockRepository blockRepository;
|
||||||
|
private final AuditService auditService;
|
||||||
|
|
||||||
public List<DocumentAnnotation> listAnnotations(UUID documentId) {
|
public List<DocumentAnnotation> listAnnotations(UUID documentId) {
|
||||||
return annotationRepository.findByDocumentId(documentId);
|
return annotationRepository.findByDocumentId(documentId);
|
||||||
@@ -42,7 +46,10 @@ public class AnnotationService {
|
|||||||
.createdBy(userId)
|
.createdBy(userId)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
return annotationRepository.save(annotation);
|
DocumentAnnotation saved = annotationRepository.save(annotation);
|
||||||
|
auditService.logAfterCommit(AuditKind.ANNOTATION_CREATED, userId, saved.getDocumentId(),
|
||||||
|
Map.of("pageNumber", saved.getPageNumber()));
|
||||||
|
return saved;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
package org.raddatz.familienarchiv.service;
|
package org.raddatz.familienarchiv.service;
|
||||||
|
|
||||||
import lombok.RequiredArgsConstructor;
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import org.raddatz.familienarchiv.audit.AuditKind;
|
||||||
|
import org.raddatz.familienarchiv.audit.AuditService;
|
||||||
import org.raddatz.familienarchiv.dto.MentionDTO;
|
import org.raddatz.familienarchiv.dto.MentionDTO;
|
||||||
import org.raddatz.familienarchiv.exception.DomainException;
|
import org.raddatz.familienarchiv.exception.DomainException;
|
||||||
import org.raddatz.familienarchiv.exception.ErrorCode;
|
import org.raddatz.familienarchiv.exception.ErrorCode;
|
||||||
@@ -12,6 +14,7 @@ import org.springframework.transaction.annotation.Transactional;
|
|||||||
|
|
||||||
import java.util.LinkedHashSet;
|
import java.util.LinkedHashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
@@ -22,6 +25,7 @@ public class CommentService {
|
|||||||
private final CommentRepository commentRepository;
|
private final CommentRepository commentRepository;
|
||||||
private final UserService userService;
|
private final UserService userService;
|
||||||
private final NotificationService notificationService;
|
private final NotificationService notificationService;
|
||||||
|
private final AuditService auditService;
|
||||||
|
|
||||||
public List<DocumentComment> getCommentsForDocument(UUID documentId) {
|
public List<DocumentComment> getCommentsForDocument(UUID documentId) {
|
||||||
List<DocumentComment> roots =
|
List<DocumentComment> roots =
|
||||||
@@ -53,6 +57,7 @@ public class CommentService {
|
|||||||
DocumentComment saved = commentRepository.save(comment);
|
DocumentComment saved = commentRepository.save(comment);
|
||||||
withMentionDTOs(saved);
|
withMentionDTOs(saved);
|
||||||
notificationService.notifyMentions(mentionedUserIds, saved);
|
notificationService.notifyMentions(mentionedUserIds, saved);
|
||||||
|
logCommentPosted(author, documentId, saved, mentionedUserIds);
|
||||||
return saved;
|
return saved;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -70,6 +75,7 @@ public class CommentService {
|
|||||||
DocumentComment saved = commentRepository.save(comment);
|
DocumentComment saved = commentRepository.save(comment);
|
||||||
withMentionDTOs(saved);
|
withMentionDTOs(saved);
|
||||||
notificationService.notifyMentions(mentionedUserIds, saved);
|
notificationService.notifyMentions(mentionedUserIds, saved);
|
||||||
|
logCommentPosted(author, documentId, saved, mentionedUserIds);
|
||||||
return saved;
|
return saved;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -101,6 +107,7 @@ public class CommentService {
|
|||||||
participantIds.remove(author.getId());
|
participantIds.remove(author.getId());
|
||||||
notificationService.notifyReply(saved, participantIds);
|
notificationService.notifyReply(saved, participantIds);
|
||||||
notificationService.notifyMentions(mentionedUserIds, saved);
|
notificationService.notifyMentions(mentionedUserIds, saved);
|
||||||
|
logCommentPosted(author, documentId, saved, mentionedUserIds);
|
||||||
return saved;
|
return saved;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -171,11 +178,22 @@ public class CommentService {
|
|||||||
ErrorCode.COMMENT_NOT_FOUND, "Comment not found: " + commentId));
|
ErrorCode.COMMENT_NOT_FOUND, "Comment not found: " + commentId));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void logCommentPosted(AppUser author, UUID documentId, DocumentComment saved, List<UUID> mentionedUserIds) {
|
||||||
|
UUID actorId = author != null ? author.getId() : null;
|
||||||
|
String commentId = saved.getId().toString();
|
||||||
|
auditService.logAfterCommit(AuditKind.COMMENT_ADDED, actorId, documentId, Map.of("commentId", commentId));
|
||||||
|
if (mentionedUserIds != null) {
|
||||||
|
mentionedUserIds.forEach(mentionedUserId ->
|
||||||
|
auditService.logAfterCommit(AuditKind.MENTION_CREATED, actorId, documentId,
|
||||||
|
Map.of("commentId", commentId, "mentionedUserId", mentionedUserId.toString())));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private String resolveAuthorName(AppUser author) {
|
private String resolveAuthorName(AppUser author) {
|
||||||
String first = author.getFirstName();
|
String first = author.getFirstName();
|
||||||
String last = author.getLastName();
|
String last = author.getLastName();
|
||||||
if ((first == null || first.isBlank()) && (last == null || last.isBlank())) {
|
if ((first == null || first.isBlank()) && (last == null || last.isBlank())) {
|
||||||
return author.getUsername();
|
return author.getEmail();
|
||||||
}
|
}
|
||||||
return ((first != null ? first : "") + " " + (last != null ? last : "")).strip();
|
return ((first != null ? first : "") + " " + (last != null ? last : "")).strip();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -29,24 +29,22 @@ public class CustomUserDetailsService implements UserDetailsService {
|
|||||||
private final AppUserRepository userRepository;
|
private final AppUserRepository userRepository;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
|
public UserDetails loadUserByUsername(String email) throws UsernameNotFoundException {
|
||||||
AppUser appUser = userRepository.findByUsername(username)
|
AppUser appUser = userRepository.findByEmail(email)
|
||||||
.orElseThrow(() -> new UsernameNotFoundException("User nicht gefunden: " + username));
|
.orElseThrow(() -> new UsernameNotFoundException("User nicht gefunden: " + email));
|
||||||
|
|
||||||
// Collect all permissions from all groups; warn about any that don't match a known Permission enum value
|
|
||||||
var authorities = appUser.getGroups().stream()
|
var authorities = appUser.getGroups().stream()
|
||||||
.flatMap(group -> group.getPermissions().stream())
|
.flatMap(group -> group.getPermissions().stream())
|
||||||
.peek(p -> {
|
.peek(p -> {
|
||||||
if (!KNOWN_PERMISSIONS.contains(p)) {
|
if (!KNOWN_PERMISSIONS.contains(p)) {
|
||||||
log.warn("Unknown permission '{}' found in database for user '{}' — it will be granted but never matched by @RequirePermission", p, appUser.getUsername());
|
log.warn("Unknown permission '{}' found in database for user '{}' — it will be granted but never matched by @RequirePermission", p, appUser.getEmail());
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.map(SimpleGrantedAuthority::new)
|
.map(SimpleGrantedAuthority::new)
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
// Rückgabe des Standard Spring Security User Objekts
|
|
||||||
return new User(
|
return new User(
|
||||||
appUser.getUsername(),
|
appUser.getEmail(),
|
||||||
appUser.getPassword(),
|
appUser.getPassword(),
|
||||||
appUser.isEnabled(),
|
appUser.isEnabled(),
|
||||||
true, true, true,
|
true, true, true,
|
||||||
|
|||||||
@@ -3,10 +3,16 @@ package org.raddatz.familienarchiv.service;
|
|||||||
import lombok.RequiredArgsConstructor;
|
import lombok.RequiredArgsConstructor;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
|
||||||
|
import org.raddatz.familienarchiv.audit.AuditKind;
|
||||||
|
import org.raddatz.familienarchiv.audit.AuditService;
|
||||||
|
import org.raddatz.familienarchiv.dto.DocumentSearchResult;
|
||||||
|
import org.raddatz.familienarchiv.dto.DocumentSort;
|
||||||
import org.raddatz.familienarchiv.dto.DocumentUpdateDTO;
|
import org.raddatz.familienarchiv.dto.DocumentUpdateDTO;
|
||||||
import org.raddatz.familienarchiv.dto.IncompleteDocumentDTO;
|
import org.raddatz.familienarchiv.dto.IncompleteDocumentDTO;
|
||||||
|
import org.raddatz.familienarchiv.dto.MatchOffset;
|
||||||
|
import org.raddatz.familienarchiv.dto.SearchMatchData;
|
||||||
|
import org.raddatz.familienarchiv.dto.TagOperator;
|
||||||
import org.raddatz.familienarchiv.model.Document;
|
import org.raddatz.familienarchiv.model.Document;
|
||||||
import org.raddatz.familienarchiv.dto.DocumentSort;
|
|
||||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||||
import org.raddatz.familienarchiv.model.ScriptType;
|
import org.raddatz.familienarchiv.model.ScriptType;
|
||||||
import org.raddatz.familienarchiv.model.TrainingLabel;
|
import org.raddatz.familienarchiv.model.TrainingLabel;
|
||||||
@@ -52,6 +58,7 @@ public class DocumentService {
|
|||||||
private final TagService tagService;
|
private final TagService tagService;
|
||||||
private final DocumentVersionService documentVersionService;
|
private final DocumentVersionService documentVersionService;
|
||||||
private final AnnotationService annotationService;
|
private final AnnotationService annotationService;
|
||||||
|
private final AuditService auditService;
|
||||||
|
|
||||||
public record StoreResult(Document document, boolean isNew) {}
|
public record StoreResult(Document document, boolean isNew) {}
|
||||||
|
|
||||||
@@ -71,7 +78,7 @@ public class DocumentService {
|
|||||||
* - Wenn NEIN: Erstellt neuen Eintrag — isNew = true.
|
* - Wenn NEIN: Erstellt neuen Eintrag — isNew = true.
|
||||||
*/
|
*/
|
||||||
@Transactional
|
@Transactional
|
||||||
public StoreResult storeDocument(MultipartFile file) throws IOException {
|
public StoreResult storeDocument(MultipartFile file, UUID actorId) throws IOException {
|
||||||
String originalFilename = file.getOriginalFilename();
|
String originalFilename = file.getOriginalFilename();
|
||||||
|
|
||||||
// 1. Check for existing record (findFirst to survive duplicate filenames in the DB)
|
// 1. Check for existing record (findFirst to survive duplicate filenames in the DB)
|
||||||
@@ -104,11 +111,16 @@ public class DocumentService {
|
|||||||
document.setFilePath(upload.s3Key());
|
document.setFilePath(upload.s3Key());
|
||||||
document.setFileHash(upload.fileHash());
|
document.setFileHash(upload.fileHash());
|
||||||
document.setContentType(file.getContentType());
|
document.setContentType(file.getContentType());
|
||||||
if (document.getStatus() == DocumentStatus.PLACEHOLDER) {
|
boolean wasPlaceholder = document.getStatus() == DocumentStatus.PLACEHOLDER;
|
||||||
|
if (wasPlaceholder) {
|
||||||
document.setStatus(DocumentStatus.UPLOADED);
|
document.setStatus(DocumentStatus.UPLOADED);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new StoreResult(documentRepository.save(document), isNew);
|
Document saved = documentRepository.save(document);
|
||||||
|
if (wasPlaceholder) {
|
||||||
|
auditService.logAfterCommit(AuditKind.FILE_UPLOADED, actorId, saved.getId(), null);
|
||||||
|
}
|
||||||
|
return new StoreResult(saved, isNew);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
@@ -184,10 +196,12 @@ public class DocumentService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public Document updateDocument(UUID id, DocumentUpdateDTO dto, MultipartFile newFile) throws IOException {
|
public Document updateDocument(UUID id, DocumentUpdateDTO dto, MultipartFile newFile, UUID actorId) throws IOException {
|
||||||
Document doc = documentRepository.findById(id)
|
Document doc = documentRepository.findById(id)
|
||||||
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id));
|
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id));
|
||||||
|
|
||||||
|
DocumentStatus statusBefore = doc.getStatus();
|
||||||
|
|
||||||
// 1. Einfache Felder Update
|
// 1. Einfache Felder Update
|
||||||
doc.setTitle(dto.getTitle());
|
doc.setTitle(dto.getTitle());
|
||||||
doc.setDocumentDate(dto.getDocumentDate());
|
doc.setDocumentDate(dto.getDocumentDate());
|
||||||
@@ -241,6 +255,14 @@ public class DocumentService {
|
|||||||
|
|
||||||
Document saved = documentRepository.save(doc);
|
Document saved = documentRepository.save(doc);
|
||||||
documentVersionService.recordVersion(saved);
|
documentVersionService.recordVersion(saved);
|
||||||
|
|
||||||
|
if (saved.getStatus() != statusBefore) {
|
||||||
|
auditService.logAfterCommit(AuditKind.STATUS_CHANGED, actorId, saved.getId(),
|
||||||
|
Map.of("oldStatus", statusBefore.name(), "newStatus", saved.getStatus().name()));
|
||||||
|
} else {
|
||||||
|
auditService.logAfterCommit(AuditKind.METADATA_UPDATED, actorId, saved.getId(), null);
|
||||||
|
}
|
||||||
|
|
||||||
return saved;
|
return saved;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -282,6 +304,32 @@ public class DocumentService {
|
|||||||
return documentRepository.save(doc);
|
return documentRepository.save(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Transactional
|
||||||
|
public Document attachFile(UUID id, MultipartFile file, UUID actorId) {
|
||||||
|
Document doc = documentRepository.findById(id)
|
||||||
|
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id));
|
||||||
|
FileService.UploadResult upload;
|
||||||
|
try {
|
||||||
|
upload = fileService.uploadFile(file, file.getOriginalFilename());
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw DomainException.internal(ErrorCode.FILE_UPLOAD_FAILED, "Failed to upload file: " + e.getMessage());
|
||||||
|
}
|
||||||
|
doc.setFilePath(upload.s3Key());
|
||||||
|
doc.setFileHash(upload.fileHash());
|
||||||
|
doc.setOriginalFilename(file.getOriginalFilename());
|
||||||
|
doc.setContentType(file.getContentType());
|
||||||
|
boolean wasPlaceholder = doc.getStatus() == DocumentStatus.PLACEHOLDER;
|
||||||
|
if (wasPlaceholder) {
|
||||||
|
doc.setStatus(DocumentStatus.UPLOADED);
|
||||||
|
}
|
||||||
|
Document saved = documentRepository.save(doc);
|
||||||
|
documentVersionService.recordVersion(saved);
|
||||||
|
if (wasPlaceholder) {
|
||||||
|
auditService.logAfterCommit(AuditKind.FILE_UPLOADED, actorId, saved.getId(), null);
|
||||||
|
}
|
||||||
|
return saved;
|
||||||
|
}
|
||||||
|
|
||||||
// 0. Zuletzt aktive Dokumente (sortiert nach updatedAt DESC)
|
// 0. Zuletzt aktive Dokumente (sortiert nach updatedAt DESC)
|
||||||
public List<Document> getRecentActivity(int size) {
|
public List<Document> getRecentActivity(int size) {
|
||||||
return documentRepository.findAll(
|
return documentRepository.findAll(
|
||||||
@@ -290,21 +338,24 @@ public class DocumentService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 1. Allgemeine Suche (für das Suchfeld im Frontend)
|
// 1. Allgemeine Suche (für das Suchfeld im Frontend)
|
||||||
public List<Document> searchDocuments(String text, LocalDate from, LocalDate to, UUID sender, UUID receiver, List<String> tags, String tagQ, DocumentStatus status, DocumentSort sort, String dir) {
|
public DocumentSearchResult searchDocuments(String text, LocalDate from, LocalDate to, UUID sender, UUID receiver, List<String> tags, String tagQ, DocumentStatus status, DocumentSort sort, String dir, TagOperator tagOperator) {
|
||||||
boolean hasText = StringUtils.hasText(text);
|
boolean hasText = StringUtils.hasText(text);
|
||||||
List<UUID> rankedIds = null;
|
List<UUID> rankedIds = null;
|
||||||
|
|
||||||
if (hasText) {
|
if (hasText) {
|
||||||
rankedIds = documentRepository.findRankedIdsByFts(text);
|
rankedIds = documentRepository.findRankedIdsByFts(text);
|
||||||
if (rankedIds.isEmpty()) return List.of();
|
if (rankedIds.isEmpty()) return DocumentSearchResult.withMatchData(List.of(), Map.of());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
boolean useOrLogic = tagOperator == TagOperator.OR;
|
||||||
|
List<Set<UUID>> expandedTagSets = tagService.expandTagNamesToDescendantIdSets(tags);
|
||||||
|
|
||||||
Specification<Document> textSpec = hasText ? hasIds(rankedIds) : (root, query, cb) -> null;
|
Specification<Document> textSpec = hasText ? hasIds(rankedIds) : (root, query, cb) -> null;
|
||||||
Specification<Document> spec = Specification.where(textSpec)
|
Specification<Document> spec = Specification.where(textSpec)
|
||||||
.and(isBetween(from, to))
|
.and(isBetween(from, to))
|
||||||
.and(hasSender(sender))
|
.and(hasSender(sender))
|
||||||
.and(hasReceiver(receiver))
|
.and(hasReceiver(receiver))
|
||||||
.and(hasTags(tags))
|
.and(hasTags(expandedTagSets, useOrLogic))
|
||||||
.and(hasTagPartial(tagQ))
|
.and(hasTagPartial(tagQ))
|
||||||
.and(hasStatus(status));
|
.and(hasStatus(status));
|
||||||
|
|
||||||
@@ -312,11 +363,13 @@ public class DocumentService {
|
|||||||
// generates an INNER JOIN that silently drops documents with null sender/receivers.
|
// generates an INNER JOIN that silently drops documents with null sender/receivers.
|
||||||
if (sort == DocumentSort.RECEIVER) {
|
if (sort == DocumentSort.RECEIVER) {
|
||||||
List<Document> results = documentRepository.findAll(spec);
|
List<Document> results = documentRepository.findAll(spec);
|
||||||
return sortByFirstReceiver(results, dir);
|
List<Document> sorted = sortByFirstReceiver(results, dir);
|
||||||
|
return DocumentSearchResult.withMatchData(resolveDocumentTagColors(sorted), enrichWithMatchData(sorted, text));
|
||||||
}
|
}
|
||||||
if (sort == DocumentSort.SENDER) {
|
if (sort == DocumentSort.SENDER) {
|
||||||
List<Document> results = documentRepository.findAll(spec);
|
List<Document> results = documentRepository.findAll(spec);
|
||||||
return sortBySender(results, dir);
|
List<Document> sorted = sortBySender(results, dir);
|
||||||
|
return DocumentSearchResult.withMatchData(resolveDocumentTagColors(sorted), enrichWithMatchData(sorted, text));
|
||||||
}
|
}
|
||||||
|
|
||||||
// RELEVANCE: default when text present and no explicit sort given
|
// RELEVANCE: default when text present and no explicit sort given
|
||||||
@@ -325,14 +378,16 @@ public class DocumentService {
|
|||||||
List<Document> results = documentRepository.findAll(spec);
|
List<Document> results = documentRepository.findAll(spec);
|
||||||
Map<UUID, Integer> rankMap = new HashMap<>();
|
Map<UUID, Integer> rankMap = new HashMap<>();
|
||||||
for (int i = 0; i < rankedIds.size(); i++) rankMap.put(rankedIds.get(i), i);
|
for (int i = 0; i < rankedIds.size(); i++) rankMap.put(rankedIds.get(i), i);
|
||||||
return results.stream()
|
List<Document> sorted = results.stream()
|
||||||
.sorted(Comparator.comparingInt(
|
.sorted(Comparator.comparingInt(
|
||||||
doc -> rankMap.getOrDefault(doc.getId(), Integer.MAX_VALUE)))
|
doc -> rankMap.getOrDefault(doc.getId(), Integer.MAX_VALUE)))
|
||||||
.toList();
|
.toList();
|
||||||
|
return DocumentSearchResult.withMatchData(resolveDocumentTagColors(sorted), enrichWithMatchData(sorted, text));
|
||||||
}
|
}
|
||||||
|
|
||||||
Sort springSort = resolveSort(sort, dir);
|
Sort springSort = resolveSort(sort, dir);
|
||||||
return documentRepository.findAll(spec, springSort);
|
List<Document> results = documentRepository.findAll(spec, springSort);
|
||||||
|
return DocumentSearchResult.withMatchData(resolveDocumentTagColors(results), enrichWithMatchData(results, text));
|
||||||
}
|
}
|
||||||
|
|
||||||
private Sort resolveSort(DocumentSort sort, String dir) {
|
private Sort resolveSort(DocumentSort sort, String dir) {
|
||||||
@@ -423,8 +478,14 @@ public class DocumentService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public Document getDocumentById(UUID id) {
|
public Document getDocumentById(UUID id) {
|
||||||
return documentRepository.findById(id)
|
Document doc = documentRepository.findById(id)
|
||||||
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id));
|
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Document not found: " + id));
|
||||||
|
tagService.resolveEffectiveColors(doc.getTags());
|
||||||
|
return doc;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Document> getDocumentsByIds(List<UUID> ids) {
|
||||||
|
return documentRepository.findAllById(ids);
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<Document> getDocumentsWithoutVersions() {
|
public List<Document> getDocumentsWithoutVersions() {
|
||||||
@@ -503,6 +564,12 @@ public class DocumentService {
|
|||||||
|
|
||||||
// ─── private helpers ──────────────────────────────────────────────────────
|
// ─── private helpers ──────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
private List<Document> resolveDocumentTagColors(List<Document> docs) {
|
||||||
|
List<Tag> allTags = docs.stream().flatMap(d -> d.getTags().stream()).toList();
|
||||||
|
tagService.resolveEffectiveColors(allTags);
|
||||||
|
return docs;
|
||||||
|
}
|
||||||
|
|
||||||
private static String stripExtension(String filename) {
|
private static String stripExtension(String filename) {
|
||||||
if (filename == null) return null;
|
if (filename == null) return null;
|
||||||
int dot = filename.lastIndexOf('.');
|
int dot = filename.lastIndexOf('.');
|
||||||
@@ -584,6 +651,93 @@ public class DocumentService {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calls {@code findEnrichmentData} and converts the raw Object[] rows into a
|
||||||
|
* {@link SearchMatchData} per document. Short-circuits when the list is empty or
|
||||||
|
* the query is blank (no text search active).
|
||||||
|
*/
|
||||||
|
private Map<UUID, SearchMatchData> enrichWithMatchData(List<Document> docs, String query) {
|
||||||
|
if (docs.isEmpty() || !StringUtils.hasText(query)) return Map.of();
|
||||||
|
List<UUID> ids = docs.stream().map(Document::getId).toList();
|
||||||
|
Map<UUID, SearchMatchData> result = new HashMap<>();
|
||||||
|
for (Object[] row : documentRepository.findEnrichmentData(ids, query)) {
|
||||||
|
UUID docId = (UUID) row[0];
|
||||||
|
String titleHeadline = (String) row[1];
|
||||||
|
String snippetHeadline = (String) row[2];
|
||||||
|
Boolean senderMatched = (Boolean) row[3];
|
||||||
|
String receiverIdsStr = (String) row[4];
|
||||||
|
String tagIdsStr = (String) row[5];
|
||||||
|
String summaryHeadline = (String) row[6];
|
||||||
|
ParsedHighlight snippet = parseHighlight(snippetHeadline);
|
||||||
|
ParsedHighlight summary = parseHighlight(summaryHeadline);
|
||||||
|
result.put(docId, new SearchMatchData(
|
||||||
|
snippet != null ? snippet.cleanText() : null,
|
||||||
|
parseTitleOffsets(titleHeadline),
|
||||||
|
senderMatched != null && senderMatched,
|
||||||
|
parseUUIDs(receiverIdsStr),
|
||||||
|
parseUUIDs(tagIdsStr),
|
||||||
|
snippet != null ? snippet.offsets() : List.of(),
|
||||||
|
summary != null ? summary.cleanText() : null,
|
||||||
|
summary != null ? summary.offsets() : List.of()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Clean text + highlight offsets parsed from a {@code ts_headline} sentinel-delimited string. */
|
||||||
|
public record ParsedHighlight(String cleanText, List<MatchOffset> offsets) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses a {@code ts_headline} result that uses {@code chr(1)}/{@code chr(2)} as
|
||||||
|
* start/stop delimiters. Returns the clean text (delimiters stripped) together with
|
||||||
|
* the character offsets of each highlighted span. Returns {@code null} when
|
||||||
|
* {@code headline} is {@code null}.
|
||||||
|
*/
|
||||||
|
public static ParsedHighlight parseHighlight(String headline) {
|
||||||
|
if (headline == null) return null;
|
||||||
|
StringBuilder clean = new StringBuilder(headline.length());
|
||||||
|
List<MatchOffset> offsets = new ArrayList<>();
|
||||||
|
int i = 0;
|
||||||
|
int pos = 0; // position in the clean string (no delimiters)
|
||||||
|
while (i < headline.length()) {
|
||||||
|
char c = headline.charAt(i);
|
||||||
|
if (c == '\u0001') {
|
||||||
|
int start = pos;
|
||||||
|
i++;
|
||||||
|
while (i < headline.length() && headline.charAt(i) != '\u0002') {
|
||||||
|
clean.append(headline.charAt(i));
|
||||||
|
i++;
|
||||||
|
pos++;
|
||||||
|
}
|
||||||
|
offsets.add(new MatchOffset(start, pos - start));
|
||||||
|
i++; // skip \u0002
|
||||||
|
} else {
|
||||||
|
clean.append(c);
|
||||||
|
i++;
|
||||||
|
pos++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return new ParsedHighlight(clean.toString(), offsets);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts only the {@link MatchOffset} list from a title headline.
|
||||||
|
* The clean title text comes from the {@link Document} entity itself.
|
||||||
|
*/
|
||||||
|
private static List<MatchOffset> parseTitleOffsets(String headline) {
|
||||||
|
ParsedHighlight parsed = parseHighlight(headline);
|
||||||
|
return parsed != null ? parsed.offsets() : List.of();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<UUID> parseUUIDs(String csv) {
|
||||||
|
if (csv == null || csv.isBlank()) return List.of();
|
||||||
|
return Arrays.stream(csv.split(","))
|
||||||
|
.map(String::trim)
|
||||||
|
.filter(s -> !s.isEmpty())
|
||||||
|
.map(UUID::fromString)
|
||||||
|
.toList();
|
||||||
|
}
|
||||||
|
|
||||||
private static String sha256Hex(byte[] bytes) {
|
private static String sha256Hex(byte[] bytes) {
|
||||||
try {
|
try {
|
||||||
MessageDigest digest = MessageDigest.getInstance("SHA-256");
|
MessageDigest digest = MessageDigest.getInstance("SHA-256");
|
||||||
@@ -597,4 +751,5 @@ public class DocumentService {
|
|||||||
throw new IllegalStateException("SHA-256 not available", e);
|
throw new IllegalStateException("SHA-256 not available", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -100,7 +100,7 @@ public class DocumentVersionService {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
return userService.findByUsername(auth.getName());
|
return userService.findByEmail(auth.getName());
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn("Could not resolve editor for version snapshot: {}", e.getMessage());
|
log.warn("Could not resolve editor for version snapshot: {}", e.getMessage());
|
||||||
return null;
|
return null;
|
||||||
@@ -114,7 +114,7 @@ public class DocumentVersionService {
|
|||||||
if (first != null && !first.isBlank() && last != null && !last.isBlank()) {
|
if (first != null && !first.isBlank() && last != null && !last.isBlank()) {
|
||||||
return first + " " + last;
|
return first + " " + last;
|
||||||
}
|
}
|
||||||
return user.getUsername();
|
return user.getEmail();
|
||||||
}
|
}
|
||||||
|
|
||||||
private String serializeSnapshot(Document doc) {
|
private String serializeSnapshot(Document doc) {
|
||||||
|
|||||||
@@ -0,0 +1,165 @@
|
|||||||
|
package org.raddatz.familienarchiv.service;
|
||||||
|
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.raddatz.familienarchiv.dto.CreateInviteRequest;
|
||||||
|
import org.raddatz.familienarchiv.dto.InviteListItemDTO;
|
||||||
|
import org.raddatz.familienarchiv.dto.RegisterRequest;
|
||||||
|
import org.raddatz.familienarchiv.exception.DomainException;
|
||||||
|
import org.raddatz.familienarchiv.exception.ErrorCode;
|
||||||
|
import org.raddatz.familienarchiv.model.AppUser;
|
||||||
|
import org.raddatz.familienarchiv.model.InviteToken;
|
||||||
|
import org.raddatz.familienarchiv.model.UserGroup;
|
||||||
|
import org.raddatz.familienarchiv.repository.InviteTokenRepository;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
|
||||||
|
import java.security.SecureRandom;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
@Service
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@Slf4j
|
||||||
|
public class InviteService {
|
||||||
|
|
||||||
|
static final int MIN_PASSWORD_LENGTH = 8;
|
||||||
|
private static final String CODE_ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||||
|
private static final int CODE_LENGTH = 10;
|
||||||
|
private static final int MAX_CODE_ATTEMPTS = 10;
|
||||||
|
private static final SecureRandom SECURE_RANDOM = new SecureRandom();
|
||||||
|
|
||||||
|
private final InviteTokenRepository inviteTokenRepository;
|
||||||
|
private final UserService userService;
|
||||||
|
|
||||||
|
public String generateCode() {
|
||||||
|
for (int attempt = 0; attempt < MAX_CODE_ATTEMPTS; attempt++) {
|
||||||
|
String code = buildRandomCode();
|
||||||
|
if (inviteTokenRepository.findByCode(code).isEmpty()) {
|
||||||
|
return code;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw DomainException.internal(ErrorCode.INTERNAL_ERROR, "Failed to generate unique invite code after " + MAX_CODE_ATTEMPTS + " attempts");
|
||||||
|
}
|
||||||
|
|
||||||
|
public InviteToken validateCode(String code) {
|
||||||
|
InviteToken token = inviteTokenRepository.findByCode(code)
|
||||||
|
.orElseThrow(() -> DomainException.notFound(ErrorCode.INVITE_NOT_FOUND, "Invite not found: " + code));
|
||||||
|
checkTokenState(token);
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Transactional
|
||||||
|
public InviteToken createInvite(CreateInviteRequest dto, AppUser creator) {
|
||||||
|
Set<UUID> groupIds = new HashSet<>();
|
||||||
|
if (dto.getGroupIds() != null && !dto.getGroupIds().isEmpty()) {
|
||||||
|
List<UserGroup> groups = userService.findGroupsByIds(dto.getGroupIds());
|
||||||
|
groups.forEach(g -> groupIds.add(g.getId()));
|
||||||
|
}
|
||||||
|
|
||||||
|
InviteToken token = InviteToken.builder()
|
||||||
|
.code(generateCode())
|
||||||
|
.label(dto.getLabel())
|
||||||
|
.maxUses(dto.getMaxUses())
|
||||||
|
.prefillFirstName(dto.getPrefillFirstName())
|
||||||
|
.prefillLastName(dto.getPrefillLastName())
|
||||||
|
.prefillEmail(dto.getPrefillEmail())
|
||||||
|
.groupIds(groupIds)
|
||||||
|
.expiresAt(dto.getExpiresAt())
|
||||||
|
.createdBy(creator)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
return inviteTokenRepository.save(token);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Transactional
|
||||||
|
public AppUser redeemInvite(RegisterRequest dto) {
|
||||||
|
InviteToken token = inviteTokenRepository.findByCodeForUpdate(dto.getCode())
|
||||||
|
.orElseThrow(() -> DomainException.notFound(ErrorCode.INVITE_NOT_FOUND, "Invite not found: " + dto.getCode()));
|
||||||
|
|
||||||
|
checkTokenState(token);
|
||||||
|
|
||||||
|
if (dto.getPassword() == null || dto.getPassword().length() < MIN_PASSWORD_LENGTH) {
|
||||||
|
throw DomainException.badRequest(ErrorCode.VALIDATION_ERROR,
|
||||||
|
"Password must be at least " + MIN_PASSWORD_LENGTH + " characters");
|
||||||
|
}
|
||||||
|
|
||||||
|
AppUser user = userService.createUser(
|
||||||
|
dto.getEmail(),
|
||||||
|
dto.getPassword(),
|
||||||
|
dto.getFirstName(),
|
||||||
|
dto.getLastName(),
|
||||||
|
token.getGroupIds()
|
||||||
|
);
|
||||||
|
|
||||||
|
userService.updateNotificationPreferences(user.getId(), dto.isNotifyOnMention(), dto.isNotifyOnMention());
|
||||||
|
|
||||||
|
token.setUseCount(token.getUseCount() + 1);
|
||||||
|
inviteTokenRepository.save(token);
|
||||||
|
|
||||||
|
log.info("User {} registered via invite code {}", dto.getEmail(), dto.getCode());
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Transactional
|
||||||
|
public void revokeInvite(UUID id) {
|
||||||
|
InviteToken token = inviteTokenRepository.findById(id)
|
||||||
|
.orElseThrow(() -> DomainException.notFound(ErrorCode.INVITE_NOT_FOUND, "Invite not found: " + id));
|
||||||
|
token.setRevoked(true);
|
||||||
|
inviteTokenRepository.save(token);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<InviteListItemDTO> listInvites(boolean activeOnly, String appBaseUrl) {
|
||||||
|
List<InviteToken> tokens = activeOnly
|
||||||
|
? inviteTokenRepository.findActive()
|
||||||
|
: inviteTokenRepository.findAllOrderedByCreatedAt();
|
||||||
|
return tokens.stream().map(t -> toListItemDTO(t, appBaseUrl)).toList();
|
||||||
|
}
|
||||||
|
|
||||||
|
public InviteListItemDTO toListItemDTO(InviteToken token, String appBaseUrl) {
|
||||||
|
String status;
|
||||||
|
if (token.isRevoked()) status = "revoked";
|
||||||
|
else if (token.isExpired()) status = "expired";
|
||||||
|
else if (token.isExhausted()) status = "exhausted";
|
||||||
|
else status = "active";
|
||||||
|
|
||||||
|
return InviteListItemDTO.builder()
|
||||||
|
.id(token.getId())
|
||||||
|
.code(token.getCode())
|
||||||
|
.displayCode(formatDisplayCode(token.getCode()))
|
||||||
|
.label(token.getLabel())
|
||||||
|
.useCount(token.getUseCount())
|
||||||
|
.maxUses(token.getMaxUses())
|
||||||
|
.expiresAt(token.getExpiresAt())
|
||||||
|
.revoked(token.isRevoked())
|
||||||
|
.status(status)
|
||||||
|
.createdAt(token.getCreatedAt())
|
||||||
|
.shareableUrl(appBaseUrl + "/register?code=" + token.getCode())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkTokenState(InviteToken token) {
|
||||||
|
if (token.isRevoked()) {
|
||||||
|
throw DomainException.conflict(ErrorCode.INVITE_REVOKED, "Invite has been revoked");
|
||||||
|
}
|
||||||
|
if (token.isExpired()) {
|
||||||
|
throw new DomainException(ErrorCode.INVITE_EXPIRED, org.springframework.http.HttpStatus.GONE,
|
||||||
|
"Invite has expired");
|
||||||
|
}
|
||||||
|
if (token.isExhausted()) {
|
||||||
|
throw DomainException.conflict(ErrorCode.INVITE_EXHAUSTED, "Invite use limit reached");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String buildRandomCode() {
|
||||||
|
StringBuilder sb = new StringBuilder(CODE_LENGTH);
|
||||||
|
for (int i = 0; i < CODE_LENGTH; i++) {
|
||||||
|
sb.append(CODE_ALPHABET.charAt(SECURE_RANDOM.nextInt(CODE_ALPHABET.length())));
|
||||||
|
}
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String formatDisplayCode(String code) {
|
||||||
|
if (code == null || code.length() != CODE_LENGTH) return code;
|
||||||
|
return code.substring(0, 5) + "-" + code.substring(5);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,10 +9,12 @@ import org.raddatz.familienarchiv.repository.OcrJobRepository;
|
|||||||
import org.springframework.scheduling.annotation.Async;
|
import org.springframework.scheduling.annotation.Async;
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
|
||||||
@Component
|
@Component
|
||||||
@RequiredArgsConstructor
|
@RequiredArgsConstructor
|
||||||
@@ -29,6 +31,7 @@ public class OcrAsyncRunner {
|
|||||||
private final OcrJobRepository ocrJobRepository;
|
private final OcrJobRepository ocrJobRepository;
|
||||||
private final OcrJobDocumentRepository ocrJobDocumentRepository;
|
private final OcrJobDocumentRepository ocrJobDocumentRepository;
|
||||||
private final OcrProgressService ocrProgressService;
|
private final OcrProgressService ocrProgressService;
|
||||||
|
private final SenderModelService senderModelService;
|
||||||
|
|
||||||
@Async
|
@Async
|
||||||
public void runSingleDocument(UUID jobId, UUID documentId, UUID userId) {
|
public void runSingleDocument(UUID jobId, UUID documentId, UUID userId) {
|
||||||
@@ -68,12 +71,18 @@ public class OcrAsyncRunner {
|
|||||||
|
|
||||||
String pdfUrl = fileService.generatePresignedUrl(doc.getFilePath());
|
String pdfUrl = fileService.generatePresignedUrl(doc.getFilePath());
|
||||||
|
|
||||||
|
String senderModelPath = null;
|
||||||
|
if (doc.getSender() != null && doc.getScriptType() == ScriptType.HANDWRITING_KURRENT) {
|
||||||
|
senderModelPath = senderModelService.maybeGetModelPath(doc.getSender().getId()).orElse(null);
|
||||||
|
}
|
||||||
|
|
||||||
AtomicInteger blockCounter = new AtomicInteger(0);
|
AtomicInteger blockCounter = new AtomicInteger(0);
|
||||||
AtomicInteger currentPage = new AtomicInteger(0);
|
AtomicInteger currentPage = new AtomicInteger(0);
|
||||||
AtomicInteger skippedPages = new AtomicInteger(0);
|
AtomicInteger skippedPages = new AtomicInteger(0);
|
||||||
AtomicInteger totalPages = new AtomicInteger(0);
|
AtomicInteger totalPages = new AtomicInteger(0);
|
||||||
|
|
||||||
ocrClient.streamBlocks(pdfUrl, doc.getScriptType(), regions, event -> {
|
final String finalSenderModelPath = senderModelPath;
|
||||||
|
ocrClient.streamBlocks(pdfUrl, doc.getScriptType(), regions, finalSenderModelPath, event -> {
|
||||||
switch (event) {
|
switch (event) {
|
||||||
case OcrStreamEvent.Start start -> {
|
case OcrStreamEvent.Start start -> {
|
||||||
totalPages.set(start.totalPages());
|
totalPages.set(start.totalPages());
|
||||||
@@ -82,6 +91,10 @@ public class OcrAsyncRunner {
|
|||||||
ocrJobDocumentRepository.save(jobDoc);
|
ocrJobDocumentRepository.save(jobDoc);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
case OcrStreamEvent.Preprocessing preprocessing -> {
|
||||||
|
updateProgress(job, "PREPROCESSING_PAGE:" + preprocessing.pageNumber()
|
||||||
|
+ ":" + totalPages.get());
|
||||||
|
}
|
||||||
case OcrStreamEvent.Page page -> {
|
case OcrStreamEvent.Page page -> {
|
||||||
for (OcrBlockResult block : page.blocks()) {
|
for (OcrBlockResult block : page.blocks()) {
|
||||||
createSingleBlock(documentId, block, userId,
|
createSingleBlock(documentId, block, userId,
|
||||||
@@ -203,7 +216,25 @@ public class OcrAsyncRunner {
|
|||||||
clearExistingBlocks(documentId);
|
clearExistingBlocks(documentId);
|
||||||
|
|
||||||
String pdfUrl = fileService.generatePresignedUrl(doc.getFilePath());
|
String pdfUrl = fileService.generatePresignedUrl(doc.getFilePath());
|
||||||
List<OcrBlockResult> blocks = ocrClient.extractBlocks(pdfUrl, doc.getScriptType());
|
|
||||||
|
String senderModelPath = null;
|
||||||
|
if (doc.getSender() != null && doc.getScriptType() == ScriptType.HANDWRITING_KURRENT) {
|
||||||
|
senderModelPath = senderModelService.maybeGetModelPath(doc.getSender().getId()).orElse(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
final AtomicReference<List<OcrBlockResult>> blocksRef = new AtomicReference<>();
|
||||||
|
final String finalSenderModelPath = senderModelPath;
|
||||||
|
ocrClient.streamBlocks(pdfUrl, doc.getScriptType(), null, finalSenderModelPath, event -> {
|
||||||
|
switch (event) {
|
||||||
|
case OcrStreamEvent.Page page -> {
|
||||||
|
blocksRef.compareAndSet(null, new ArrayList<>());
|
||||||
|
blocksRef.get().addAll(page.blocks());
|
||||||
|
}
|
||||||
|
default -> {}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
List<OcrBlockResult> blocks = blocksRef.get() != null ? blocksRef.get() : List.of();
|
||||||
createTranscriptionBlocks(documentId, blocks, userId, doc.getFileHash());
|
createTranscriptionBlocks(documentId, blocks, userId, doc.getFileHash());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package org.raddatz.familienarchiv.service;
|
package org.raddatz.familienarchiv.service;
|
||||||
|
|
||||||
import org.raddatz.familienarchiv.model.ScriptType;
|
import org.raddatz.familienarchiv.model.ScriptType;
|
||||||
|
import org.springframework.lang.Nullable;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
@@ -37,15 +38,27 @@ public interface OcrClient {
|
|||||||
TrainingResult segtrainModel(byte[] trainingDataZip);
|
TrainingResult segtrainModel(byte[] trainingDataZip);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stream OCR results page-by-page via NDJSON. Implementations should override
|
* Fine-tune the Kurrent model for a specific sender.
|
||||||
* this method. The default exists only for backward compatibility during migration
|
*
|
||||||
* — it calls extractBlocks() and synthesizes events from the collected result.
|
* @param trainingDataZip raw ZIP bytes produced by TrainingDataExportService.exportForSender()
|
||||||
|
* @param outputModelPath where to save the trained model (e.g. /app/models/sender_{uuid}.mlmodel)
|
||||||
|
* @return training result metrics
|
||||||
|
*/
|
||||||
|
TrainingResult trainSenderModel(byte[] trainingDataZip, String outputModelPath);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stream OCR results page-by-page via NDJSON, optionally using a sender-specific model.
|
||||||
|
* The default implementation synthesizes events from extractBlocks() for backward compatibility.
|
||||||
|
* Implementations that support real streaming (e.g. RestClientOcrClient) override this.
|
||||||
*
|
*
|
||||||
* @param regions optional list of pre-drawn annotation regions; when non-null,
|
* @param regions optional list of pre-drawn annotation regions; when non-null,
|
||||||
* the OCR service runs in guided mode (crop + recognize per region)
|
* the OCR service runs in guided mode (crop + recognize per region)
|
||||||
|
* @param senderModelPath optional path to a per-sender model file; null means use base model
|
||||||
*/
|
*/
|
||||||
default void streamBlocks(String pdfUrl, ScriptType scriptType,
|
default void streamBlocks(String pdfUrl, ScriptType scriptType,
|
||||||
List<OcrRegion> regions, Consumer<OcrStreamEvent> handler) {
|
List<OcrRegion> regions,
|
||||||
|
@Nullable String senderModelPath,
|
||||||
|
Consumer<OcrStreamEvent> handler) {
|
||||||
List<OcrBlockResult> allBlocks = extractBlocks(pdfUrl, scriptType);
|
List<OcrBlockResult> allBlocks = extractBlocks(pdfUrl, scriptType);
|
||||||
|
|
||||||
LinkedHashMap<Integer, List<OcrBlockResult>> byPage = new LinkedHashMap<>();
|
LinkedHashMap<Integer, List<OcrBlockResult>> byPage = new LinkedHashMap<>();
|
||||||
@@ -62,4 +75,9 @@ public interface OcrClient {
|
|||||||
|
|
||||||
handler.accept(new OcrStreamEvent.Done(allBlocks.size(), 0));
|
handler.accept(new OcrStreamEvent.Done(allBlocks.size(), 0));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
default void streamBlocks(String pdfUrl, ScriptType scriptType,
|
||||||
|
List<OcrRegion> regions, Consumer<OcrStreamEvent> handler) {
|
||||||
|
streamBlocks(pdfUrl, scriptType, regions, null, handler);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,8 @@ public sealed interface OcrStreamEvent {
|
|||||||
|
|
||||||
record Start(int totalPages) implements OcrStreamEvent {}
|
record Start(int totalPages) implements OcrStreamEvent {}
|
||||||
|
|
||||||
|
record Preprocessing(int pageNumber) implements OcrStreamEvent {}
|
||||||
|
|
||||||
record Page(int pageNumber, List<OcrBlockResult> blocks) implements OcrStreamEvent {}
|
record Page(int pageNumber, List<OcrBlockResult> blocks) implements OcrStreamEvent {}
|
||||||
|
|
||||||
record Error(int pageNumber, String message) implements OcrStreamEvent {}
|
record Error(int pageNumber, String message) implements OcrStreamEvent {}
|
||||||
|
|||||||
@@ -2,9 +2,12 @@ package org.raddatz.familienarchiv.service;
|
|||||||
|
|
||||||
import lombok.RequiredArgsConstructor;
|
import lombok.RequiredArgsConstructor;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.raddatz.familienarchiv.dto.TrainingHistoryResponse;
|
||||||
|
import org.raddatz.familienarchiv.dto.TrainingInfoResponse;
|
||||||
import org.raddatz.familienarchiv.exception.DomainException;
|
import org.raddatz.familienarchiv.exception.DomainException;
|
||||||
import org.raddatz.familienarchiv.exception.ErrorCode;
|
import org.raddatz.familienarchiv.exception.ErrorCode;
|
||||||
import org.raddatz.familienarchiv.model.OcrTrainingRun;
|
import org.raddatz.familienarchiv.model.OcrTrainingRun;
|
||||||
|
import org.raddatz.familienarchiv.model.SenderModel;
|
||||||
import org.raddatz.familienarchiv.model.TrainingStatus;
|
import org.raddatz.familienarchiv.model.TrainingStatus;
|
||||||
import org.raddatz.familienarchiv.repository.OcrTrainingRunRepository;
|
import org.raddatz.familienarchiv.repository.OcrTrainingRunRepository;
|
||||||
import org.raddatz.familienarchiv.repository.TranscriptionBlockRepository;
|
import org.raddatz.familienarchiv.repository.TranscriptionBlockRepository;
|
||||||
@@ -17,9 +20,11 @@ import org.springframework.transaction.support.TransactionTemplate;
|
|||||||
|
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
@Service
|
@Service
|
||||||
@@ -34,16 +39,8 @@ public class OcrTrainingService {
|
|||||||
private final OcrHealthClient ocrHealthClient;
|
private final OcrHealthClient ocrHealthClient;
|
||||||
private final TranscriptionBlockRepository blockRepository;
|
private final TranscriptionBlockRepository blockRepository;
|
||||||
private final TransactionTemplate txTemplate;
|
private final TransactionTemplate txTemplate;
|
||||||
|
private final PersonService personService;
|
||||||
public record TrainingInfoResponse(
|
private final SenderModelService senderModelService;
|
||||||
int availableBlocks,
|
|
||||||
int totalOcrBlocks,
|
|
||||||
int availableDocuments,
|
|
||||||
int availableSegBlocks,
|
|
||||||
boolean ocrServiceAvailable,
|
|
||||||
OcrTrainingRun lastRun,
|
|
||||||
List<OcrTrainingRun> runs
|
|
||||||
) {}
|
|
||||||
|
|
||||||
private void assertNoRunningTraining() {
|
private void assertNoRunningTraining() {
|
||||||
if (trainingRunRepository.findFirstByStatus(TrainingStatus.RUNNING).isPresent()) {
|
if (trainingRunRepository.findFirstByStatus(TrainingStatus.RUNNING).isPresent()) {
|
||||||
@@ -195,9 +192,21 @@ public class OcrTrainingService {
|
|||||||
int totalOcrBlocks = (int) blockRepository.count();
|
int totalOcrBlocks = (int) blockRepository.count();
|
||||||
int availableSegBlocks = segmentationTrainingExportService.querySegmentationBlocks().size();
|
int availableSegBlocks = segmentationTrainingExportService.querySegmentationBlocks().size();
|
||||||
|
|
||||||
List<OcrTrainingRun> recentRuns = trainingRunRepository.findTop10ByOrderByCreatedAtDesc();
|
List<OcrTrainingRun> recentRuns = trainingRunRepository.findTop20ByOrderByCreatedAtDesc();
|
||||||
OcrTrainingRun lastRun = recentRuns.isEmpty() ? null : recentRuns.get(0);
|
OcrTrainingRun lastRun = recentRuns.isEmpty() ? null : recentRuns.get(0);
|
||||||
|
|
||||||
|
List<SenderModel> senderModels = senderModelService.getAllSenderModels();
|
||||||
|
|
||||||
|
List<UUID> allPersonIds = senderModels.stream()
|
||||||
|
.map(SenderModel::getPersonId)
|
||||||
|
.distinct()
|
||||||
|
.toList();
|
||||||
|
Map<String, String> personNames = new HashMap<>();
|
||||||
|
if (!allPersonIds.isEmpty()) {
|
||||||
|
personService.getAllById(allPersonIds)
|
||||||
|
.forEach(p -> personNames.put(p.getId().toString(), p.getDisplayName()));
|
||||||
|
}
|
||||||
|
|
||||||
return new TrainingInfoResponse(
|
return new TrainingInfoResponse(
|
||||||
eligibleBlocks.size(),
|
eligibleBlocks.size(),
|
||||||
totalOcrBlocks,
|
totalOcrBlocks,
|
||||||
@@ -205,10 +214,23 @@ public class OcrTrainingService {
|
|||||||
availableSegBlocks,
|
availableSegBlocks,
|
||||||
ocrHealthClient.isHealthy(),
|
ocrHealthClient.isHealthy(),
|
||||||
lastRun,
|
lastRun,
|
||||||
recentRuns
|
recentRuns,
|
||||||
|
personNames,
|
||||||
|
senderModels
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public TrainingHistoryResponse getGlobalTrainingHistory() {
|
||||||
|
List<OcrTrainingRun> runs = trainingRunRepository.findByPersonIdIsNullOrderByCreatedAtDesc();
|
||||||
|
return new TrainingHistoryResponse(runs, Map.of());
|
||||||
|
}
|
||||||
|
|
||||||
|
public TrainingHistoryResponse getSenderTrainingHistory(UUID personId) {
|
||||||
|
String personName = personService.getById(personId).getDisplayName();
|
||||||
|
List<OcrTrainingRun> runs = trainingRunRepository.findByPersonIdOrderByCreatedAtDesc(personId);
|
||||||
|
return new TrainingHistoryResponse(runs, Map.of(personId.toString(), personName));
|
||||||
|
}
|
||||||
|
|
||||||
@EventListener(ApplicationReadyEvent.class)
|
@EventListener(ApplicationReadyEvent.class)
|
||||||
@Transactional
|
@Transactional
|
||||||
public void recoverOrphanedRuns() {
|
public void recoverOrphanedRuns() {
|
||||||
@@ -224,15 +246,4 @@ public class OcrTrainingService {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public Map<String, Object> buildTrainingInfoMap(TrainingInfoResponse info) {
|
|
||||||
return Map.of(
|
|
||||||
"availableBlocks", info.availableBlocks(),
|
|
||||||
"totalOcrBlocks", info.totalOcrBlocks(),
|
|
||||||
"availableDocuments", info.availableDocuments(),
|
|
||||||
"availableSegBlocks", info.availableSegBlocks(),
|
|
||||||
"ocrServiceAvailable", info.ocrServiceAvailable(),
|
|
||||||
"lastRun", info.lastRun() != null ? info.lastRun() : Map.of(),
|
|
||||||
"runs", info.runs()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ import org.springframework.http.HttpEntity;
|
|||||||
import org.springframework.http.HttpHeaders;
|
import org.springframework.http.HttpHeaders;
|
||||||
import org.springframework.http.MediaType;
|
import org.springframework.http.MediaType;
|
||||||
import org.springframework.http.client.JdkClientHttpRequestFactory;
|
import org.springframework.http.client.JdkClientHttpRequestFactory;
|
||||||
|
import org.springframework.lang.Nullable;
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
import org.springframework.util.LinkedMultiValueMap;
|
import org.springframework.util.LinkedMultiValueMap;
|
||||||
import org.springframework.util.MultiValueMap;
|
import org.springframework.util.MultiValueMap;
|
||||||
@@ -102,6 +103,13 @@ public class RestClientOcrClient implements OcrClient, OcrHealthClient {
|
|||||||
.toList();
|
.toList();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private RestClient.RequestBodySpec addTrainingAuth(RestClient.RequestBodySpec spec) {
|
||||||
|
if (trainingToken != null && !trainingToken.isBlank()) {
|
||||||
|
return spec.header("X-Training-Token", trainingToken);
|
||||||
|
}
|
||||||
|
return spec;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public OcrClient.TrainingResult trainModel(byte[] trainingDataZip) {
|
public OcrClient.TrainingResult trainModel(byte[] trainingDataZip) {
|
||||||
ByteArrayResource zipResource = new ByteArrayResource(trainingDataZip) {
|
ByteArrayResource zipResource = new ByteArrayResource(trainingDataZip) {
|
||||||
@@ -114,15 +122,10 @@ public class RestClientOcrClient implements OcrClient, OcrHealthClient {
|
|||||||
partHeaders.setContentType(MediaType.parseMediaType("application/zip"));
|
partHeaders.setContentType(MediaType.parseMediaType("application/zip"));
|
||||||
body.add("file", new HttpEntity<>(zipResource, partHeaders));
|
body.add("file", new HttpEntity<>(zipResource, partHeaders));
|
||||||
|
|
||||||
var spec = trainingRestClient.post()
|
TrainingResultJson result = addTrainingAuth(
|
||||||
|
trainingRestClient.post()
|
||||||
.uri("/train")
|
.uri("/train")
|
||||||
.contentType(MediaType.MULTIPART_FORM_DATA);
|
.contentType(MediaType.MULTIPART_FORM_DATA))
|
||||||
|
|
||||||
if (trainingToken != null && !trainingToken.isBlank()) {
|
|
||||||
spec = spec.header("X-Training-Token", trainingToken);
|
|
||||||
}
|
|
||||||
|
|
||||||
TrainingResultJson result = spec
|
|
||||||
.body(body)
|
.body(body)
|
||||||
.retrieve()
|
.retrieve()
|
||||||
.body(TrainingResultJson.class);
|
.body(TrainingResultJson.class);
|
||||||
@@ -143,15 +146,35 @@ public class RestClientOcrClient implements OcrClient, OcrHealthClient {
|
|||||||
partHeaders.setContentType(MediaType.parseMediaType("application/zip"));
|
partHeaders.setContentType(MediaType.parseMediaType("application/zip"));
|
||||||
body.add("file", new HttpEntity<>(zipResource, partHeaders));
|
body.add("file", new HttpEntity<>(zipResource, partHeaders));
|
||||||
|
|
||||||
var spec = trainingRestClient.post()
|
TrainingResultJson result = addTrainingAuth(
|
||||||
|
trainingRestClient.post()
|
||||||
.uri("/segtrain")
|
.uri("/segtrain")
|
||||||
.contentType(MediaType.MULTIPART_FORM_DATA);
|
.contentType(MediaType.MULTIPART_FORM_DATA))
|
||||||
|
.body(body)
|
||||||
|
.retrieve()
|
||||||
|
.body(TrainingResultJson.class);
|
||||||
|
|
||||||
if (trainingToken != null && !trainingToken.isBlank()) {
|
if (result == null) return new OcrClient.TrainingResult(null, null, null, null);
|
||||||
spec = spec.header("X-Training-Token", trainingToken);
|
return new OcrClient.TrainingResult(result.loss(), result.accuracy(), result.cer(), result.epochs());
|
||||||
}
|
}
|
||||||
|
|
||||||
TrainingResultJson result = spec
|
@Override
|
||||||
|
public OcrClient.TrainingResult trainSenderModel(byte[] trainingDataZip, String outputModelPath) {
|
||||||
|
ByteArrayResource zipResource = new ByteArrayResource(trainingDataZip) {
|
||||||
|
@Override
|
||||||
|
public String getFilename() { return "sender-training-data.zip"; }
|
||||||
|
};
|
||||||
|
|
||||||
|
MultiValueMap<String, Object> body = new LinkedMultiValueMap<>();
|
||||||
|
HttpHeaders partHeaders = new HttpHeaders();
|
||||||
|
partHeaders.setContentType(MediaType.parseMediaType("application/zip"));
|
||||||
|
body.add("file", new HttpEntity<>(zipResource, partHeaders));
|
||||||
|
body.add("output_model_path", outputModelPath);
|
||||||
|
|
||||||
|
TrainingResultJson result = addTrainingAuth(
|
||||||
|
trainingRestClient.post()
|
||||||
|
.uri("/train-sender")
|
||||||
|
.contentType(MediaType.MULTIPART_FORM_DATA))
|
||||||
.body(body)
|
.body(body)
|
||||||
.retrieve()
|
.retrieve()
|
||||||
.body(TrainingResultJson.class);
|
.body(TrainingResultJson.class);
|
||||||
@@ -176,7 +199,8 @@ public class RestClientOcrClient implements OcrClient, OcrHealthClient {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void streamBlocks(String pdfUrl, ScriptType scriptType,
|
public void streamBlocks(String pdfUrl, ScriptType scriptType,
|
||||||
List<OcrRegion> regions, Consumer<OcrStreamEvent> handler) {
|
List<OcrRegion> regions, @Nullable String senderModelPath,
|
||||||
|
Consumer<OcrStreamEvent> handler) {
|
||||||
String body;
|
String body;
|
||||||
try {
|
try {
|
||||||
var requestMap = new java.util.LinkedHashMap<String, Object>();
|
var requestMap = new java.util.LinkedHashMap<String, Object>();
|
||||||
@@ -186,6 +210,9 @@ public class RestClientOcrClient implements OcrClient, OcrHealthClient {
|
|||||||
if (regions != null && !regions.isEmpty()) {
|
if (regions != null && !regions.isEmpty()) {
|
||||||
requestMap.put("regions", regions);
|
requestMap.put("regions", regions);
|
||||||
}
|
}
|
||||||
|
if (senderModelPath != null) {
|
||||||
|
requestMap.put("senderModelPath", senderModelPath);
|
||||||
|
}
|
||||||
body = NDJSON_MAPPER.writeValueAsString(requestMap);
|
body = NDJSON_MAPPER.writeValueAsString(requestMap);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new RuntimeException("Failed to serialize OCR request", e);
|
throw new RuntimeException("Failed to serialize OCR request", e);
|
||||||
@@ -204,7 +231,12 @@ public class RestClientOcrClient implements OcrClient, OcrHealthClient {
|
|||||||
|
|
||||||
if (response.statusCode() == 404) {
|
if (response.statusCode() == 404) {
|
||||||
log.info("OCR service does not support /ocr/stream (404), falling back to /ocr");
|
log.info("OCR service does not support /ocr/stream (404), falling back to /ocr");
|
||||||
OcrClient.super.streamBlocks(pdfUrl, scriptType, regions, handler);
|
List<OcrBlockResult> allBlocks = extractBlocks(pdfUrl, scriptType);
|
||||||
|
handler.accept(new OcrStreamEvent.Start(0));
|
||||||
|
for (OcrBlockResult block : allBlocks) {
|
||||||
|
handler.accept(new OcrStreamEvent.Page(block.pageNumber(), List.of(block)));
|
||||||
|
}
|
||||||
|
handler.accept(new OcrStreamEvent.Done(allBlocks.size(), 0));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -232,6 +264,8 @@ public class RestClientOcrClient implements OcrClient, OcrHealthClient {
|
|||||||
switch (type) {
|
switch (type) {
|
||||||
case "start" -> handler.accept(
|
case "start" -> handler.accept(
|
||||||
new OcrStreamEvent.Start(node.path("totalPages").asInt()));
|
new OcrStreamEvent.Start(node.path("totalPages").asInt()));
|
||||||
|
case "preprocessing" -> handler.accept(
|
||||||
|
new OcrStreamEvent.Preprocessing(node.path("pageNumber").asInt()));
|
||||||
case "page" -> {
|
case "page" -> {
|
||||||
int pageNumber = node.path("pageNumber").asInt();
|
int pageNumber = node.path("pageNumber").asInt();
|
||||||
List<OcrBlockResult> blocks = NDJSON_MAPPER.convertValue(
|
List<OcrBlockResult> blocks = NDJSON_MAPPER.convertValue(
|
||||||
|
|||||||
@@ -0,0 +1,234 @@
|
|||||||
|
package org.raddatz.familienarchiv.service;
|
||||||
|
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.raddatz.familienarchiv.exception.DomainException;
|
||||||
|
import org.raddatz.familienarchiv.exception.ErrorCode;
|
||||||
|
import org.raddatz.familienarchiv.model.OcrTrainingRun;
|
||||||
|
import org.raddatz.familienarchiv.model.SenderModel;
|
||||||
|
import org.raddatz.familienarchiv.model.TrainingStatus;
|
||||||
|
import org.raddatz.familienarchiv.repository.OcrTrainingRunRepository;
|
||||||
|
import org.raddatz.familienarchiv.repository.SenderModelRepository;
|
||||||
|
import org.raddatz.familienarchiv.repository.TranscriptionBlockRepository;
|
||||||
|
import org.slf4j.MDC;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.context.annotation.Lazy;
|
||||||
|
import org.springframework.scheduling.annotation.Async;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
import org.springframework.transaction.support.TransactionTemplate;
|
||||||
|
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
@Service
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
@Slf4j
|
||||||
|
public class SenderModelService {
|
||||||
|
|
||||||
|
private final SenderModelRepository senderModelRepository;
|
||||||
|
private final TranscriptionBlockRepository blockRepository;
|
||||||
|
private final OcrTrainingRunRepository trainingRunRepository;
|
||||||
|
private final OcrClient ocrClient;
|
||||||
|
private final TransactionTemplate txTemplate;
|
||||||
|
private final TrainingDataExportService trainingDataExportService;
|
||||||
|
private final PersonService personService;
|
||||||
|
|
||||||
|
// Self-reference through the Spring proxy so @Async is honoured on self-calls.
|
||||||
|
@Lazy
|
||||||
|
@Autowired
|
||||||
|
private SenderModelService self;
|
||||||
|
|
||||||
|
@Value("${ocr.sender-model.activation-threshold:100}")
|
||||||
|
private int activationThreshold;
|
||||||
|
|
||||||
|
@Value("${ocr.sender-model.retrain-delta:50}")
|
||||||
|
private int retrainDelta;
|
||||||
|
|
||||||
|
/** Returns the model path if a trained sender model exists for this person. */
|
||||||
|
public Optional<String> maybeGetModelPath(UUID personId) {
|
||||||
|
return senderModelRepository.findByPersonId(personId)
|
||||||
|
.map(SenderModel::getModelPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<SenderModel> getAllSenderModels() {
|
||||||
|
return senderModelRepository.findAll();
|
||||||
|
}
|
||||||
|
|
||||||
|
public OcrTrainingRun triggerManualSenderTraining(UUID personId) {
|
||||||
|
personService.getById(personId);
|
||||||
|
long correctedLines = blockRepository.countManualKurrentBlocksByPerson(personId);
|
||||||
|
boolean runNow = runOrQueueSenderTraining(personId, (int) correctedLines);
|
||||||
|
TrainingStatus targetStatus = runNow ? TrainingStatus.RUNNING : TrainingStatus.QUEUED;
|
||||||
|
OcrTrainingRun run = trainingRunRepository.findFirstByPersonIdAndStatus(personId, targetStatus)
|
||||||
|
.orElseThrow(() -> DomainException.internal(
|
||||||
|
ErrorCode.OCR_TRAINING_CONFLICT,
|
||||||
|
"Expected " + targetStatus + " run for person " + personId));
|
||||||
|
if (runNow) {
|
||||||
|
self.runSenderTraining(personId);
|
||||||
|
}
|
||||||
|
return run;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Async
|
||||||
|
public void runSenderTraining(UUID personId) {
|
||||||
|
long correctedLines = blockRepository.countManualKurrentBlocksByPerson(personId);
|
||||||
|
triggerSenderTraining(personId, (int) correctedLines);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Called after every MANUAL block save for HANDWRITING_KURRENT documents.
|
||||||
|
* Checks activation and retrain thresholds; enqueues or starts sender training when met.
|
||||||
|
*/
|
||||||
|
@Async
|
||||||
|
public void checkAndTriggerTraining(UUID personId) {
|
||||||
|
long correctedLines = blockRepository.countManualKurrentBlocksByPerson(personId);
|
||||||
|
Optional<SenderModel> existing = senderModelRepository.findByPersonId(personId);
|
||||||
|
|
||||||
|
boolean shouldActivate = existing.isEmpty() && correctedLines >= activationThreshold;
|
||||||
|
boolean shouldRetrain = existing.isPresent()
|
||||||
|
&& (correctedLines - existing.get().getCorrectedLinesAtTraining()) >= retrainDelta;
|
||||||
|
|
||||||
|
if (!shouldActivate && !shouldRetrain) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info("Sender training threshold met for person {} (correctedLines={}, activate={}, retrain={})",
|
||||||
|
personId, correctedLines, shouldActivate, shouldRetrain);
|
||||||
|
|
||||||
|
boolean runNow = runOrQueueSenderTraining(personId, (int) correctedLines);
|
||||||
|
if (runNow) {
|
||||||
|
triggerSenderTraining(personId, (int) correctedLines);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Atomically checks the queue state and either creates a RUNNING row (returns true) or a
|
||||||
|
* QUEUED row (returns false). All three operations — idle check, duplicate-queue guard, and
|
||||||
|
* RUNNING row creation — happen in one transaction, eliminating the race window that would
|
||||||
|
* otherwise exist between the check and a separate RUNNING row creation.
|
||||||
|
*/
|
||||||
|
@Transactional
|
||||||
|
public boolean runOrQueueSenderTraining(UUID personId, int correctedLines) {
|
||||||
|
if (trainingRunRepository.existsByPersonIdAndStatus(personId, TrainingStatus.QUEUED)) {
|
||||||
|
log.info("Sender training already queued for person {} — skipping duplicate trigger", personId);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (trainingRunRepository.findFirstByStatus(TrainingStatus.RUNNING).isPresent()) {
|
||||||
|
int blockCount = (int) blockRepository.countManualKurrentBlocksByPerson(personId);
|
||||||
|
trainingRunRepository.save(OcrTrainingRun.builder()
|
||||||
|
.status(TrainingStatus.QUEUED)
|
||||||
|
.personId(personId)
|
||||||
|
.blockCount(blockCount)
|
||||||
|
.documentCount(0)
|
||||||
|
.modelName("sender_" + personId)
|
||||||
|
.build());
|
||||||
|
log.info("Queued sender training for person {} — training already running", personId);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
long blockCount = blockRepository.countManualKurrentBlocksByPerson(personId);
|
||||||
|
trainingRunRepository.save(OcrTrainingRun.builder()
|
||||||
|
.status(TrainingStatus.RUNNING)
|
||||||
|
.personId(personId)
|
||||||
|
.blockCount((int) blockCount)
|
||||||
|
.documentCount(0)
|
||||||
|
.modelName("sender_" + personId)
|
||||||
|
.build());
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Executes sender training synchronously. Caller must run this on a background thread.
|
||||||
|
* The RUNNING row is expected to already exist — created atomically by
|
||||||
|
* runOrQueueSenderTraining (for new runs) or by promoteNextQueuedRun (for promoted runs).
|
||||||
|
*/
|
||||||
|
public void triggerSenderTraining(UUID personId, int correctedLines) {
|
||||||
|
String outputModelPath = "/app/models/sender_" + personId + ".mlmodel";
|
||||||
|
|
||||||
|
OcrTrainingRun run = Objects.requireNonNull(txTemplate.execute(status ->
|
||||||
|
trainingRunRepository.findFirstByPersonIdAndStatus(personId, TrainingStatus.RUNNING)
|
||||||
|
.orElseThrow(() -> DomainException.internal(
|
||||||
|
ErrorCode.OCR_TRAINING_CONFLICT,
|
||||||
|
"Expected RUNNING row for person " + personId + " but none found"))));
|
||||||
|
|
||||||
|
String runId = run.getId().toString();
|
||||||
|
MDC.put("trainingRunId", runId);
|
||||||
|
log.info("Started sender training run {} for person {}", runId, personId);
|
||||||
|
|
||||||
|
try {
|
||||||
|
byte[] zipBytes = exportSenderData(personId);
|
||||||
|
log.info("[trainingRun={}] Sending {} bytes to OCR service for sender training", runId, zipBytes.length);
|
||||||
|
OcrClient.TrainingResult result = ocrClient.trainSenderModel(zipBytes, outputModelPath);
|
||||||
|
|
||||||
|
txTemplate.execute(status -> {
|
||||||
|
SenderModel model = senderModelRepository.findByPersonId(personId)
|
||||||
|
.orElseGet(() -> SenderModel.builder().personId(personId).build());
|
||||||
|
model.setModelPath(outputModelPath);
|
||||||
|
model.setCer(result.cer());
|
||||||
|
model.setAccuracy(result.accuracy());
|
||||||
|
model.setCorrectedLinesAtTraining(correctedLines);
|
||||||
|
senderModelRepository.save(model);
|
||||||
|
|
||||||
|
run.setStatus(TrainingStatus.DONE);
|
||||||
|
run.setCompletedAt(Instant.now());
|
||||||
|
run.setCer(result.cer());
|
||||||
|
run.setAccuracy(result.accuracy());
|
||||||
|
run.setEpochs(result.epochs());
|
||||||
|
trainingRunRepository.save(run);
|
||||||
|
log.info("[trainingRun={}] Sender training completed — cer={}", runId, result.cer());
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
} catch (Exception e) {
|
||||||
|
txTemplate.execute(status -> {
|
||||||
|
run.setStatus(TrainingStatus.FAILED);
|
||||||
|
run.setErrorMessage(e.getMessage());
|
||||||
|
run.setCompletedAt(Instant.now());
|
||||||
|
trainingRunRepository.save(run);
|
||||||
|
log.error("[trainingRun={}] Sender training failed: {}", runId, e.getMessage(), e);
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
MDC.remove("trainingRunId");
|
||||||
|
promoteNextQueuedRun();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private byte[] exportSenderData(UUID personId) throws java.io.IOException {
|
||||||
|
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||||
|
trainingDataExportService.exportForSender(personId).writeTo(baos);
|
||||||
|
return baos.toByteArray();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Promotes the oldest QUEUED sender run to RUNNING and triggers its training.
|
||||||
|
* Called in the finally block of triggerSenderTraining, creating a sequential chain:
|
||||||
|
* each run promotes the next only after it fully completes (success or failure).
|
||||||
|
*
|
||||||
|
* This is intentionally tail-recursive via the @Async thread: the same thread holds the
|
||||||
|
* full queue drain, serialising all sender training runs naturally without an external
|
||||||
|
* scheduler. With N queued runs the thread stays occupied for N sequential trainings —
|
||||||
|
* acceptable because the @Async executor is dedicated to long-running background work.
|
||||||
|
*/
|
||||||
|
private void promoteNextQueuedRun() {
|
||||||
|
Optional<OcrTrainingRun> queuedOpt = txTemplate.execute(status ->
|
||||||
|
trainingRunRepository.findFirstByStatusOrderByCreatedAtAsc(TrainingStatus.QUEUED)
|
||||||
|
.map(queued -> {
|
||||||
|
queued.setStatus(TrainingStatus.RUNNING);
|
||||||
|
return trainingRunRepository.save(queued);
|
||||||
|
}));
|
||||||
|
|
||||||
|
if (queuedOpt != null && queuedOpt.isPresent()) {
|
||||||
|
OcrTrainingRun promoted = queuedOpt.get();
|
||||||
|
log.info("Promoting queued sender training run {} for person {}", promoted.getId(), promoted.getPersonId());
|
||||||
|
long freshCount = blockRepository.countManualKurrentBlocksByPerson(promoted.getPersonId());
|
||||||
|
triggerSenderTraining(promoted.getPersonId(), (int) freshCount);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,30 +1,52 @@
|
|||||||
package org.raddatz.familienarchiv.service;
|
package org.raddatz.familienarchiv.service;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import org.raddatz.familienarchiv.dto.TagTreeNodeDTO;
|
||||||
|
import org.raddatz.familienarchiv.dto.TagUpdateDTO;
|
||||||
|
import org.raddatz.familienarchiv.exception.DomainException;
|
||||||
|
import org.raddatz.familienarchiv.exception.ErrorCode;
|
||||||
import org.raddatz.familienarchiv.model.Tag;
|
import org.raddatz.familienarchiv.model.Tag;
|
||||||
import org.raddatz.familienarchiv.repository.TagRepository;
|
import org.raddatz.familienarchiv.repository.TagRepository;
|
||||||
import org.springframework.http.HttpStatus;
|
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
import org.springframework.transaction.annotation.Transactional;
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
import org.springframework.web.server.ResponseStatusException;
|
import org.springframework.util.StringUtils;
|
||||||
|
|
||||||
import lombok.RequiredArgsConstructor;
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
|
||||||
@Service
|
@Service
|
||||||
@RequiredArgsConstructor
|
@RequiredArgsConstructor
|
||||||
|
@Slf4j
|
||||||
public class TagService {
|
public class TagService {
|
||||||
|
|
||||||
|
// These 10 color tokens are the fixed palette.
|
||||||
|
// Keep in sync with the --c-tag-* tokens defined in frontend/src/routes/layout.css.
|
||||||
|
static final Set<String> ALLOWED_TAG_COLORS = Set.of(
|
||||||
|
"sage", "sienna", "amber", "slate", "violet",
|
||||||
|
"rose", "cobalt", "moss", "sand", "coral"
|
||||||
|
);
|
||||||
|
|
||||||
private final TagRepository tagRepository;
|
private final TagRepository tagRepository;
|
||||||
|
|
||||||
public List<Tag> search(String query) {
|
public List<Tag> search(String query) {
|
||||||
return tagRepository.findByNameContainingIgnoreCase(query);
|
List<Tag> matched = tagRepository.findByNameContainingIgnoreCase(query);
|
||||||
|
if (matched.isEmpty()) return matched;
|
||||||
|
return enrichWithRelatives(matched);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Tag getById(UUID id) {
|
public Tag getById(UUID id) {
|
||||||
return tagRepository.findById(id)
|
return tagRepository.findById(id)
|
||||||
.orElseThrow(() -> new ResponseStatusException(HttpStatus.NOT_FOUND, "Tag nicht gefunden"));
|
.orElseThrow(() -> DomainException.notFound(ErrorCode.TAG_NOT_FOUND, "Tag not found: " + id));
|
||||||
}
|
}
|
||||||
|
|
||||||
public Tag findOrCreate(String name) {
|
public Tag findOrCreate(String name) {
|
||||||
@@ -34,9 +56,22 @@ public class TagService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public Tag update(UUID id, String newName) {
|
public Tag update(UUID id, TagUpdateDTO dto) {
|
||||||
Tag tag = getById(id);
|
Tag tag = getById(id);
|
||||||
tag.setName(newName);
|
|
||||||
|
if (dto.parentId() != null) {
|
||||||
|
validateNoSelfReference(id, dto.parentId());
|
||||||
|
validateNoAncestorCycle(id, dto.parentId());
|
||||||
|
getById(dto.parentId()); // ensure parent exists
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dto.color() != null) {
|
||||||
|
validateColor(dto.color());
|
||||||
|
}
|
||||||
|
|
||||||
|
tag.setName(dto.name());
|
||||||
|
tag.setParentId(dto.parentId());
|
||||||
|
tag.setColor(dto.color());
|
||||||
return tagRepository.save(tag);
|
return tagRepository.save(tag);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -44,4 +79,175 @@ public class TagService {
|
|||||||
public void delete(UUID id) {
|
public void delete(UUID id) {
|
||||||
tagRepository.delete(getById(id));
|
tagRepository.delete(getById(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Transactional
|
||||||
|
public Tag mergeTags(UUID sourceId, UUID targetId) {
|
||||||
|
validateNotSelf(sourceId, targetId);
|
||||||
|
Tag source = getById(sourceId);
|
||||||
|
Tag target = getById(targetId);
|
||||||
|
log.info("Merging tag '{}' ({}) into '{}' ({})", source.getName(), sourceId, target.getName(), targetId);
|
||||||
|
validateNotDescendant(sourceId, targetId);
|
||||||
|
transferDocuments(sourceId, targetId);
|
||||||
|
tagRepository.reparentChildren(sourceId, targetId);
|
||||||
|
tagRepository.deleteById(sourceId);
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Transactional
|
||||||
|
public void deleteWithDescendants(UUID id) {
|
||||||
|
log.info("Deleting subtree rooted at {}", id);
|
||||||
|
getById(id);
|
||||||
|
List<UUID> ids = tagRepository.findDescendantIds(id);
|
||||||
|
if (!ids.isEmpty()) tagRepository.deleteDocumentTagsByTagIds(ids);
|
||||||
|
tagRepository.deleteAllById(ids);
|
||||||
|
log.info("Deleted subtree rooted at {}, {} nodes", id, ids.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the effective (inherited) color on child tags that have no color of their own.
|
||||||
|
* Colors are stored only on root-level tags; children inherit the parent's color.
|
||||||
|
* Parent tags are batch-loaded in a single query. Safe to call on detached entities.
|
||||||
|
*/
|
||||||
|
public void resolveEffectiveColors(Collection<Tag> tags) {
|
||||||
|
if (tags == null || tags.isEmpty()) return;
|
||||||
|
|
||||||
|
Set<UUID> parentIdsNeeded = tags.stream()
|
||||||
|
.filter(t -> t.getColor() == null && t.getParentId() != null)
|
||||||
|
.map(Tag::getParentId)
|
||||||
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
|
if (parentIdsNeeded.isEmpty()) return;
|
||||||
|
|
||||||
|
Map<UUID, String> parentColors = tagRepository.findAllById(parentIdsNeeded)
|
||||||
|
.stream()
|
||||||
|
.filter(p -> p.getColor() != null)
|
||||||
|
.collect(Collectors.toMap(Tag::getId, Tag::getColor));
|
||||||
|
|
||||||
|
tags.forEach(tag -> {
|
||||||
|
if (tag.getColor() == null && tag.getParentId() != null) {
|
||||||
|
String resolved = parentColors.get(tag.getParentId());
|
||||||
|
if (resolved != null) {
|
||||||
|
tag.setColor(resolved);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For each tag name, returns the set of that tag's ID plus all descendant IDs.
|
||||||
|
* Used by DocumentService to expand selected filter tags before applying AND/OR logic.
|
||||||
|
*/
|
||||||
|
public List<Set<UUID>> expandTagNamesToDescendantIdSets(List<String> tagNames) {
|
||||||
|
if (tagNames == null || tagNames.isEmpty()) return List.of();
|
||||||
|
return tagNames.stream()
|
||||||
|
.filter(StringUtils::hasText)
|
||||||
|
.map(name -> (Set<UUID>) new HashSet<>(tagRepository.findDescendantIdsByName(name.trim())))
|
||||||
|
.toList();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns all tags assembled into a tree with document counts per node.
|
||||||
|
* Uses a single aggregate query to avoid N+1 behaviour.
|
||||||
|
* NOTE: document counts are global per tag, not scoped to any search filter.
|
||||||
|
* The tree endpoint is only used for the admin sidebar, so this is intentional.
|
||||||
|
*/
|
||||||
|
public List<TagTreeNodeDTO> getTagTree() {
|
||||||
|
List<Tag> all = tagRepository.findAll();
|
||||||
|
Map<UUID, Long> counts = tagRepository.findDocumentCountsPerTag().stream()
|
||||||
|
.collect(Collectors.toMap(
|
||||||
|
TagRepository.TagCount::getTagId,
|
||||||
|
TagRepository.TagCount::getCount
|
||||||
|
));
|
||||||
|
return buildTree(all, counts);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── private helpers ─────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
// Each matched tag issues 1 CTE query (findDescendantIds or findAncestorIds) + 1 batch
|
||||||
|
// fetch for extras. Typical queries match 1–3 tags at depth ≤ 4, so 3–5 queries total.
|
||||||
|
private List<Tag> enrichWithRelatives(List<Tag> matched) {
|
||||||
|
Set<UUID> matchedIds = matched.stream().map(Tag::getId).collect(Collectors.toSet());
|
||||||
|
Set<UUID> extraIds = new HashSet<>();
|
||||||
|
|
||||||
|
for (Tag tag : matched) {
|
||||||
|
if (tag.getParentId() == null) {
|
||||||
|
extraIds.addAll(tagRepository.findDescendantIds(tag.getId()));
|
||||||
|
} else {
|
||||||
|
extraIds.addAll(tagRepository.findAncestorIds(tag.getId()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
extraIds.removeAll(matchedIds);
|
||||||
|
|
||||||
|
List<Tag> result = new ArrayList<>(matched);
|
||||||
|
if (!extraIds.isEmpty()) {
|
||||||
|
result.addAll(tagRepository.findAllById(extraIds));
|
||||||
|
}
|
||||||
|
resolveEffectiveColors(result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void validateNotSelf(UUID sourceId, UUID targetId) {
|
||||||
|
if (sourceId.equals(targetId)) {
|
||||||
|
throw DomainException.badRequest(ErrorCode.TAG_MERGE_SELF,
|
||||||
|
"Source and target must not be the same tag: " + sourceId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void validateNotDescendant(UUID sourceId, UUID targetId) {
|
||||||
|
List<UUID> descendants = tagRepository.findDescendantIds(sourceId);
|
||||||
|
if (descendants.contains(targetId)) {
|
||||||
|
throw DomainException.badRequest(ErrorCode.TAG_MERGE_INVALID_TARGET,
|
||||||
|
"Target " + targetId + " is a descendant of source " + sourceId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void transferDocuments(UUID sourceId, UUID targetId) {
|
||||||
|
tagRepository.reassignDocumentTags(sourceId, targetId);
|
||||||
|
tagRepository.deleteDocumentTagsByTagId(sourceId);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void validateNoSelfReference(UUID tagId, UUID proposedParentId) {
|
||||||
|
if (tagId.equals(proposedParentId)) {
|
||||||
|
throw DomainException.badRequest(ErrorCode.TAG_CYCLE_DETECTED,
|
||||||
|
"A tag cannot be its own parent: " + tagId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void validateNoAncestorCycle(UUID tagId, UUID proposedParentId) {
|
||||||
|
// TOCTOU note: concurrent admin writes could both pass this check and create a
|
||||||
|
// multi-node cycle. This is intentionally not locked because: (a) the endpoint
|
||||||
|
// requires ADMIN_TAG permission so concurrency is rare, (b) the DB-level
|
||||||
|
// CHECK (parent_id != id) prevents infinite self-loops as a hard backstop,
|
||||||
|
// and (c) the window is microseconds. Do NOT add a pessimistic lock here.
|
||||||
|
List<UUID> ancestors = tagRepository.findAncestorIds(proposedParentId);
|
||||||
|
if (ancestors.contains(tagId)) {
|
||||||
|
throw DomainException.badRequest(ErrorCode.TAG_CYCLE_DETECTED,
|
||||||
|
"Assigning parent " + proposedParentId + " to tag " + tagId + " would create a cycle");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void validateColor(String color) {
|
||||||
|
if (!ALLOWED_TAG_COLORS.contains(color)) {
|
||||||
|
throw DomainException.badRequest(ErrorCode.INVALID_TAG_COLOR,
|
||||||
|
"Color '" + color + "' is not in the allowed palette");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<TagTreeNodeDTO> buildTree(List<Tag> tags, Map<UUID, Long> counts) {
|
||||||
|
Map<UUID, TagTreeNodeDTO> nodeById = new LinkedHashMap<>();
|
||||||
|
for (Tag tag : tags) {
|
||||||
|
int documentCount = counts.getOrDefault(tag.getId(), 0L).intValue();
|
||||||
|
nodeById.put(tag.getId(), new TagTreeNodeDTO(
|
||||||
|
tag.getId(), tag.getName(), tag.getColor(), documentCount,
|
||||||
|
new ArrayList<>(), tag.getParentId()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
for (TagTreeNodeDTO node : nodeById.values()) {
|
||||||
|
if (node.parentId() != null) {
|
||||||
|
TagTreeNodeDTO parent = nodeById.get(node.parentId());
|
||||||
|
if (parent != null) parent.children().add(node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nodeById.values().stream().filter(n -> n.parentId() == null).toList();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -38,10 +38,20 @@ public class TrainingDataExportService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public StreamingResponseBody exportToZip() {
|
public StreamingResponseBody exportToZip() {
|
||||||
// Collect all data before entering the lambda — no open DB txn during streaming
|
return exportBlocksToZip(queryEligibleBlocks());
|
||||||
List<TranscriptionBlock> blocks = queryEligibleBlocks();
|
}
|
||||||
|
|
||||||
|
public List<TranscriptionBlock> queryBlocksForSender(UUID personId) {
|
||||||
|
return blockRepository.findManualKurrentBlocksByPerson(personId);
|
||||||
|
}
|
||||||
|
|
||||||
|
public StreamingResponseBody exportForSender(UUID personId) {
|
||||||
|
return exportBlocksToZip(queryBlocksForSender(personId));
|
||||||
|
}
|
||||||
|
|
||||||
|
private StreamingResponseBody exportBlocksToZip(List<TranscriptionBlock> blocks) {
|
||||||
if (blocks.isEmpty()) {
|
if (blocks.isEmpty()) {
|
||||||
return out -> {}; // caller checks isEmpty() for 204 response
|
return out -> {};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Group blocks by documentId so we only download each PDF once
|
// Group blocks by documentId so we only download each PDF once
|
||||||
|
|||||||
@@ -0,0 +1,69 @@
|
|||||||
|
package org.raddatz.familienarchiv.service;
|
||||||
|
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
|
||||||
|
import org.raddatz.familienarchiv.audit.AuditLogQueryService;
|
||||||
|
import org.raddatz.familienarchiv.dto.TranscriptionQueueItemDTO;
|
||||||
|
import org.raddatz.familienarchiv.dto.TranscriptionWeeklyStatsDTO;
|
||||||
|
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||||
|
import org.raddatz.familienarchiv.repository.TranscriptionQueueProjection;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
@Service
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class TranscriptionQueueService {
|
||||||
|
|
||||||
|
private static final int DEFAULT_QUEUE_SIZE = 5;
|
||||||
|
private static final int MAX_CONTRIBUTORS = 5;
|
||||||
|
|
||||||
|
private final DocumentRepository documentRepository;
|
||||||
|
private final AuditLogQueryService auditLogQueryService;
|
||||||
|
|
||||||
|
public List<TranscriptionQueueItemDTO> getSegmentationQueue() {
|
||||||
|
return enrichWithContributors(documentRepository.findSegmentationQueue(DEFAULT_QUEUE_SIZE));
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<TranscriptionQueueItemDTO> getTranscriptionQueue() {
|
||||||
|
return enrichWithContributors(documentRepository.findTranscriptionQueue(DEFAULT_QUEUE_SIZE));
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<TranscriptionQueueItemDTO> getReadyToReadQueue() {
|
||||||
|
return enrichWithContributors(documentRepository.findReadyToReadQueue(DEFAULT_QUEUE_SIZE));
|
||||||
|
}
|
||||||
|
|
||||||
|
public TranscriptionWeeklyStatsDTO getWeeklyStats() {
|
||||||
|
var stats = documentRepository.findWeeklyStats();
|
||||||
|
return new TranscriptionWeeklyStatsDTO(
|
||||||
|
stats.getSegmentationCount(),
|
||||||
|
stats.getTranscriptionCount()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<TranscriptionQueueItemDTO> enrichWithContributors(List<TranscriptionQueueProjection> projections) {
|
||||||
|
if (projections.isEmpty()) return List.of();
|
||||||
|
List<UUID> ids = projections.stream().map(TranscriptionQueueProjection::getId).toList();
|
||||||
|
Map<UUID, List<ActivityActorDTO>> contributorMap = auditLogQueryService.findContributorsPerDocument(ids);
|
||||||
|
return projections.stream()
|
||||||
|
.map(p -> toDTO(p, contributorMap.getOrDefault(p.getId(), List.of())))
|
||||||
|
.toList();
|
||||||
|
}
|
||||||
|
|
||||||
|
private TranscriptionQueueItemDTO toDTO(TranscriptionQueueProjection p, List<ActivityActorDTO> allContributors) {
|
||||||
|
boolean hasMore = allContributors.size() > MAX_CONTRIBUTORS;
|
||||||
|
List<ActivityActorDTO> capped = hasMore ? allContributors.subList(0, MAX_CONTRIBUTORS) : allContributors;
|
||||||
|
return new TranscriptionQueueItemDTO(
|
||||||
|
p.getId(),
|
||||||
|
p.getTitle(),
|
||||||
|
p.getDocumentDate(),
|
||||||
|
p.getAnnotationCount(),
|
||||||
|
p.getTextedBlockCount(),
|
||||||
|
p.getReviewedBlockCount(),
|
||||||
|
capped,
|
||||||
|
hasMore
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,6 +2,8 @@ package org.raddatz.familienarchiv.service;
|
|||||||
|
|
||||||
import lombok.RequiredArgsConstructor;
|
import lombok.RequiredArgsConstructor;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.raddatz.familienarchiv.audit.AuditKind;
|
||||||
|
import org.raddatz.familienarchiv.audit.AuditService;
|
||||||
import org.raddatz.familienarchiv.dto.CreateAnnotationDTO;
|
import org.raddatz.familienarchiv.dto.CreateAnnotationDTO;
|
||||||
import org.raddatz.familienarchiv.dto.CreateTranscriptionBlockDTO;
|
import org.raddatz.familienarchiv.dto.CreateTranscriptionBlockDTO;
|
||||||
import org.raddatz.familienarchiv.dto.ReorderTranscriptionBlocksDTO;
|
import org.raddatz.familienarchiv.dto.ReorderTranscriptionBlocksDTO;
|
||||||
@@ -11,6 +13,7 @@ import org.raddatz.familienarchiv.exception.ErrorCode;
|
|||||||
import org.raddatz.familienarchiv.model.BlockSource;
|
import org.raddatz.familienarchiv.model.BlockSource;
|
||||||
import org.raddatz.familienarchiv.model.Document;
|
import org.raddatz.familienarchiv.model.Document;
|
||||||
import org.raddatz.familienarchiv.model.DocumentAnnotation;
|
import org.raddatz.familienarchiv.model.DocumentAnnotation;
|
||||||
|
import org.raddatz.familienarchiv.model.ScriptType;
|
||||||
import org.raddatz.familienarchiv.model.TranscriptionBlock;
|
import org.raddatz.familienarchiv.model.TranscriptionBlock;
|
||||||
import org.raddatz.familienarchiv.model.TranscriptionBlockVersion;
|
import org.raddatz.familienarchiv.model.TranscriptionBlockVersion;
|
||||||
import org.raddatz.familienarchiv.repository.AnnotationRepository;
|
import org.raddatz.familienarchiv.repository.AnnotationRepository;
|
||||||
@@ -20,6 +23,8 @@ import org.springframework.stereotype.Service;
|
|||||||
import org.springframework.transaction.annotation.Transactional;
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Optional;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
@Service
|
@Service
|
||||||
@@ -35,6 +40,8 @@ public class TranscriptionService {
|
|||||||
private final AnnotationRepository annotationRepository;
|
private final AnnotationRepository annotationRepository;
|
||||||
private final AnnotationService annotationService;
|
private final AnnotationService annotationService;
|
||||||
private final DocumentService documentService;
|
private final DocumentService documentService;
|
||||||
|
private final SenderModelService senderModelService;
|
||||||
|
private final AuditService auditService;
|
||||||
|
|
||||||
public List<TranscriptionBlock> listBlocks(UUID documentId) {
|
public List<TranscriptionBlock> listBlocks(UUID documentId) {
|
||||||
return blockRepository.findByDocumentIdOrderBySortOrderAsc(documentId);
|
return blockRepository.findByDocumentIdOrderBySortOrderAsc(documentId);
|
||||||
@@ -120,8 +127,10 @@ public class TranscriptionService {
|
|||||||
UpdateTranscriptionBlockDTO dto, UUID userId) {
|
UpdateTranscriptionBlockDTO dto, UUID userId) {
|
||||||
TranscriptionBlock block = getBlock(documentId, blockId);
|
TranscriptionBlock block = getBlock(documentId, blockId);
|
||||||
|
|
||||||
|
String previousText = block.getText();
|
||||||
String text = sanitizeText(dto.getText());
|
String text = sanitizeText(dto.getText());
|
||||||
block.setText(text);
|
block.setText(text);
|
||||||
|
block.setSource(BlockSource.MANUAL);
|
||||||
if (dto.getLabel() != null) {
|
if (dto.getLabel() != null) {
|
||||||
block.setLabel(dto.getLabel());
|
block.setLabel(dto.getLabel());
|
||||||
}
|
}
|
||||||
@@ -129,6 +138,19 @@ public class TranscriptionService {
|
|||||||
|
|
||||||
TranscriptionBlock saved = blockRepository.save(block);
|
TranscriptionBlock saved = blockRepository.save(block);
|
||||||
saveVersion(saved, userId);
|
saveVersion(saved, userId);
|
||||||
|
|
||||||
|
if (!text.equals(previousText)) {
|
||||||
|
Optional<DocumentAnnotation> annotation = annotationRepository.findById(block.getAnnotationId());
|
||||||
|
int pageNumber = annotation.map(DocumentAnnotation::getPageNumber).orElse(0);
|
||||||
|
auditService.logAfterCommit(AuditKind.TEXT_SAVED, userId, documentId,
|
||||||
|
Map.of("pageNumber", pageNumber, "blockId", saved.getId().toString()));
|
||||||
|
}
|
||||||
|
|
||||||
|
Document doc = documentService.getDocumentById(documentId);
|
||||||
|
if (doc.getSender() != null && doc.getScriptType() == ScriptType.HANDWRITING_KURRENT) {
|
||||||
|
senderModelService.checkAndTriggerTraining(doc.getSender().getId());
|
||||||
|
}
|
||||||
|
|
||||||
return saved;
|
return saved;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -172,10 +194,15 @@ public class TranscriptionService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public TranscriptionBlock reviewBlock(UUID documentId, UUID blockId) {
|
public TranscriptionBlock reviewBlock(UUID documentId, UUID blockId, UUID userId) {
|
||||||
TranscriptionBlock block = getBlock(documentId, blockId);
|
TranscriptionBlock block = getBlock(documentId, blockId);
|
||||||
block.setReviewed(!block.isReviewed());
|
boolean wasReviewed = block.isReviewed();
|
||||||
return blockRepository.save(block);
|
block.setReviewed(!wasReviewed);
|
||||||
|
TranscriptionBlock saved = blockRepository.save(block);
|
||||||
|
if (!wasReviewed && saved.isReviewed()) {
|
||||||
|
auditService.logAfterCommit(AuditKind.BLOCK_REVIEWED, userId, documentId, null);
|
||||||
|
}
|
||||||
|
return saved;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<TranscriptionBlockVersion> getBlockHistory(UUID documentId, UUID blockId) {
|
public List<TranscriptionBlockVersion> getBlockHistory(UUID documentId, UUID blockId) {
|
||||||
@@ -199,4 +226,5 @@ public class TranscriptionService {
|
|||||||
}
|
}
|
||||||
return text;
|
return text;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,6 +18,6 @@ public class UserSearchService {
|
|||||||
|
|
||||||
public List<AppUser> search(String query) {
|
public List<AppUser> search(String query) {
|
||||||
if (query == null || query.isBlank()) return List.of();
|
if (query == null || query.isBlank()) return List.of();
|
||||||
return userRepository.searchByNameOrUsername(query.trim(), PageRequest.of(0, MAX_RESULTS));
|
return userRepository.searchByEmailOrName(query.trim(), PageRequest.of(0, MAX_RESULTS));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -36,23 +36,22 @@ public class UserService {
|
|||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public AppUser createUserOrUpdate(CreateUserRequest request) {
|
public AppUser createUserOrUpdate(CreateUserRequest request) {
|
||||||
log.info("Creating or updating user: {}", request.getUsername());
|
log.info("Creating or updating user: {}", request.getEmail());
|
||||||
|
|
||||||
Set<UserGroup> groups = new HashSet<>();
|
Set<UserGroup> groups = new HashSet<>();
|
||||||
if (request.getGroupIds() != null && !request.getGroupIds().isEmpty()) {
|
if (request.getGroupIds() != null && !request.getGroupIds().isEmpty()) {
|
||||||
groups.addAll(groupRepository.findAllById(request.getGroupIds()));
|
groups.addAll(groupRepository.findAllById(request.getGroupIds()));
|
||||||
}
|
}
|
||||||
|
|
||||||
Optional<AppUser> existingUser = userRepository.findByUsername(request.getUsername());
|
Optional<AppUser> existingUser = userRepository.findByEmail(request.getEmail());
|
||||||
AppUser user;
|
AppUser user;
|
||||||
|
|
||||||
if (existingUser.isPresent()) {
|
if (existingUser.isPresent()) {
|
||||||
log.info("User exists, updating: {}", request.getUsername());
|
log.info("User exists, updating: {}", request.getEmail());
|
||||||
user = existingUser.get().updateFromRequest(request, passwordEncoder, groups);
|
user = existingUser.get().updateFromRequest(request, passwordEncoder, groups);
|
||||||
} else {
|
} else {
|
||||||
log.info("Creating new user: {}", request.getUsername());
|
log.info("Creating new user: {}", request.getEmail());
|
||||||
user = AppUser.builder()
|
user = AppUser.builder()
|
||||||
.username(request.getUsername())
|
|
||||||
.email(request.getEmail())
|
.email(request.getEmail())
|
||||||
.password(passwordEncoder.encode(request.getInitialPassword()))
|
.password(passwordEncoder.encode(request.getInitialPassword()))
|
||||||
.groups(groups)
|
.groups(groups)
|
||||||
@@ -67,6 +66,33 @@ public class UserService {
|
|||||||
return userRepository.save(user);
|
return userRepository.save(user);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Transactional
|
||||||
|
public AppUser createUser(String email, String rawPassword, String firstName, String lastName, Set<UUID> groupIds) {
|
||||||
|
userRepository.findByEmail(email).ifPresent(existing -> {
|
||||||
|
throw DomainException.conflict(ErrorCode.EMAIL_ALREADY_IN_USE, "Email already registered: " + email);
|
||||||
|
});
|
||||||
|
|
||||||
|
Set<UserGroup> groups = new HashSet<>();
|
||||||
|
if (groupIds != null && !groupIds.isEmpty()) {
|
||||||
|
groups.addAll(groupRepository.findAllById(groupIds));
|
||||||
|
}
|
||||||
|
|
||||||
|
AppUser user = AppUser.builder()
|
||||||
|
.email(email)
|
||||||
|
.password(passwordEncoder.encode(rawPassword))
|
||||||
|
.firstName(firstName)
|
||||||
|
.lastName(lastName)
|
||||||
|
.groups(groups)
|
||||||
|
.enabled(true)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
return userRepository.save(user);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<UserGroup> findGroupsByIds(Collection<UUID> ids) {
|
||||||
|
return groupRepository.findAllById(ids);
|
||||||
|
}
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
public void deleteUser(UUID userId) {
|
public void deleteUser(UUID userId) {
|
||||||
AppUser user = userRepository.findById(userId)
|
AppUser user = userRepository.findById(userId)
|
||||||
@@ -103,8 +129,8 @@ public class UserService {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
user.setEmail(dto.getEmail().trim());
|
user.setEmail(dto.getEmail().trim());
|
||||||
} else if (dto.getEmail() != null && dto.getEmail().isBlank()) {
|
} else if (dto.getEmail() != null) {
|
||||||
user.setEmail(null);
|
throw DomainException.badRequest(ErrorCode.VALIDATION_ERROR, "Email must not be blank");
|
||||||
}
|
}
|
||||||
|
|
||||||
user.setFirstName(dto.getFirstName());
|
user.setFirstName(dto.getFirstName());
|
||||||
@@ -126,8 +152,8 @@ public class UserService {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
user.setEmail(dto.getEmail().trim());
|
user.setEmail(dto.getEmail().trim());
|
||||||
} else if (dto.getEmail() != null && dto.getEmail().isBlank()) {
|
} else if (dto.getEmail() != null) {
|
||||||
user.setEmail(null);
|
throw DomainException.badRequest(ErrorCode.VALIDATION_ERROR, "Email must not be blank");
|
||||||
}
|
}
|
||||||
|
|
||||||
user.setFirstName(dto.getFirstName());
|
user.setFirstName(dto.getFirstName());
|
||||||
@@ -158,9 +184,9 @@ public class UserService {
|
|||||||
userRepository.save(user);
|
userRepository.save(user);
|
||||||
}
|
}
|
||||||
|
|
||||||
public AppUser findByUsername(String username) {
|
public AppUser findByEmail(String email) {
|
||||||
return userRepository.findByUsername(username)
|
return userRepository.findByEmail(email)
|
||||||
.orElseThrow(() -> DomainException.notFound(ErrorCode.USER_NOT_FOUND, "No user found for username: " + username));
|
.orElseThrow(() -> DomainException.notFound(ErrorCode.USER_NOT_FOUND, "No user found for email: " + email));
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<AppUser> getAllUsers() {
|
public List<AppUser> getAllUsers() {
|
||||||
|
|||||||
@@ -77,3 +77,8 @@ app:
|
|||||||
tags: 10
|
tags: 10
|
||||||
summary: 11
|
summary: 11
|
||||||
transcription: 13
|
transcription: 13
|
||||||
|
|
||||||
|
ocr:
|
||||||
|
sender-model:
|
||||||
|
activation-threshold: 100
|
||||||
|
retrain-delta: 50
|
||||||
|
|||||||
@@ -0,0 +1,4 @@
|
|||||||
|
-- Index on transcription_blocks.document_id to speed up the LATERAL join
|
||||||
|
-- used in DocumentService.findEnrichmentData (FTS match enrichment).
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_transcription_blocks_document_id
|
||||||
|
ON transcription_blocks (document_id);
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
-- Indexes to support the weekly stats correlated subqueries in findWeeklyStats().
|
||||||
|
-- Without these, COUNT(DISTINCT ...) with a date range filter performs a full table scan
|
||||||
|
-- on every dashboard load.
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_document_annotations_created_at ON document_annotations(created_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_transcription_blocks_created_at ON transcription_blocks(created_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_transcription_blocks_updated_at ON transcription_blocks(updated_at);
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
-- Add self-referencing parent FK for tag hierarchy (adjacency list model).
|
||||||
|
-- ON DELETE SET NULL: deleting a parent promotes its children to root level.
|
||||||
|
ALTER TABLE tag ADD COLUMN parent_id UUID REFERENCES tag(id) ON DELETE SET NULL;
|
||||||
|
ALTER TABLE tag ADD CONSTRAINT chk_tag_no_self_reference CHECK (parent_id != id);
|
||||||
|
CREATE INDEX idx_tag_parent_id ON tag(parent_id);
|
||||||
|
|
||||||
|
-- Optional color token (e.g. "sage", "teal") for root-level tags.
|
||||||
|
-- Validated against the allowed palette in TagService before save.
|
||||||
|
ALTER TABLE tag ADD COLUMN color VARCHAR(20);
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
CREATE TABLE sender_models (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
person_id UUID NOT NULL UNIQUE REFERENCES persons(id) ON DELETE CASCADE,
|
||||||
|
model_path TEXT NOT NULL,
|
||||||
|
accuracy DOUBLE PRECISION,
|
||||||
|
cer DOUBLE PRECISION,
|
||||||
|
corrected_lines_at_training INT NOT NULL DEFAULT 0,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||||
|
);
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE ocr_training_runs
|
||||||
|
ADD COLUMN person_id UUID REFERENCES persons(id) ON DELETE SET NULL;
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
CREATE UNIQUE INDEX idx_training_runs_queued_per_person
|
||||||
|
ON ocr_training_runs(person_id)
|
||||||
|
WHERE status = 'QUEUED';
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
-- Abort if any user has no email address set.
|
||||||
|
-- All users must have an email before this migration can run.
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
IF EXISTS (SELECT 1 FROM users WHERE email IS NULL) THEN
|
||||||
|
RAISE EXCEPTION 'Migration aborted: some users have no email address. Set emails for all users before running this migration.';
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
ALTER TABLE users ALTER COLUMN email SET NOT NULL;
|
||||||
|
ALTER TABLE users DROP COLUMN username;
|
||||||
@@ -0,0 +1,22 @@
|
|||||||
|
CREATE TABLE invite_tokens (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
code VARCHAR(10) UNIQUE NOT NULL,
|
||||||
|
label VARCHAR(255),
|
||||||
|
max_uses INTEGER,
|
||||||
|
use_count INTEGER NOT NULL DEFAULT 0,
|
||||||
|
prefill_first_name VARCHAR(255),
|
||||||
|
prefill_last_name VARCHAR(255),
|
||||||
|
prefill_email VARCHAR(255),
|
||||||
|
expires_at TIMESTAMP,
|
||||||
|
created_by UUID NOT NULL REFERENCES users(id),
|
||||||
|
created_at TIMESTAMP NOT NULL DEFAULT NOW(),
|
||||||
|
revoked BOOLEAN NOT NULL DEFAULT FALSE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_invite_tokens_code ON invite_tokens(code);
|
||||||
|
|
||||||
|
CREATE TABLE invite_token_group_ids (
|
||||||
|
invite_token_id UUID NOT NULL REFERENCES invite_tokens(id),
|
||||||
|
group_id UUID NOT NULL REFERENCES user_groups(id),
|
||||||
|
PRIMARY KEY (invite_token_id, group_id)
|
||||||
|
);
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
-- Append-only audit trail for domain-level archive activity.
|
||||||
|
-- Enables dashboard queries (Family Pulse, activity feed, resume card) in #271.
|
||||||
|
|
||||||
|
CREATE TABLE audit_log (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
happened_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||||
|
-- ON DELETE SET NULL is by design: GDPR right-to-erasure. Deleted users' events
|
||||||
|
-- retain their timestamp and kind but lose actor attribution.
|
||||||
|
actor_id UUID REFERENCES users(id) ON DELETE SET NULL,
|
||||||
|
kind VARCHAR(50) NOT NULL,
|
||||||
|
document_id UUID REFERENCES documents(id) ON DELETE CASCADE,
|
||||||
|
payload JSONB
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_audit_log_happened_at ON audit_log (happened_at DESC);
|
||||||
|
CREATE INDEX idx_audit_log_document_id ON audit_log (document_id);
|
||||||
|
CREATE INDEX idx_audit_log_actor_id ON audit_log (actor_id);
|
||||||
|
CREATE INDEX idx_audit_log_kind ON audit_log (kind);
|
||||||
|
|
||||||
|
-- Enforce append-only at the database layer: the application role may INSERT
|
||||||
|
-- but must not UPDATE or DELETE audit rows.
|
||||||
|
-- NOTE: This REVOKE is a no-op when the current user is the table owner.
|
||||||
|
-- PostgreSQL owners retain all privileges regardless of REVOKE. The append-only
|
||||||
|
-- guarantee is enforced at the application layer only.
|
||||||
|
REVOKE UPDATE, DELETE ON audit_log FROM CURRENT_USER;
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
-- Add deterministic avatar color to app_users.
|
||||||
|
-- Assigned at application layer (AppUser.java) from a fixed 8-colour palette.
|
||||||
|
-- Also corrects V46's REVOKE which hardcoded 'app_user' instead of CURRENT_USER.
|
||||||
|
|
||||||
|
ALTER TABLE users ADD COLUMN color VARCHAR(20) NOT NULL DEFAULT '';
|
||||||
|
|
||||||
|
-- Fix V46 append-only enforcement for the actual application role.
|
||||||
|
REVOKE UPDATE, DELETE ON audit_log FROM CURRENT_USER;
|
||||||
@@ -0,0 +1,53 @@
|
|||||||
|
package org.raddatz.familienarchiv.audit;
|
||||||
|
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.raddatz.familienarchiv.PostgresContainerConfig;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.boot.test.context.SpringBootTest;
|
||||||
|
import org.springframework.context.annotation.Import;
|
||||||
|
import org.springframework.test.annotation.DirtiesContext;
|
||||||
|
import org.springframework.test.context.ActiveProfiles;
|
||||||
|
import org.springframework.test.context.bean.override.mockito.MockitoBean;
|
||||||
|
import org.springframework.transaction.support.TransactionTemplate;
|
||||||
|
import software.amazon.awssdk.services.s3.S3Client;
|
||||||
|
|
||||||
|
import static java.util.concurrent.TimeUnit.SECONDS;
|
||||||
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
import static org.awaitility.Awaitility.await;
|
||||||
|
|
||||||
|
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE)
|
||||||
|
@ActiveProfiles("test")
|
||||||
|
@Import(PostgresContainerConfig.class)
|
||||||
|
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_EACH_TEST_METHOD)
|
||||||
|
class AuditServiceIntegrationTest {
|
||||||
|
|
||||||
|
@MockitoBean S3Client s3Client;
|
||||||
|
@Autowired AuditService auditService;
|
||||||
|
@Autowired AuditLogRepository auditLogRepository;
|
||||||
|
@Autowired TransactionTemplate transactionTemplate;
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void logAfterCommit_writes_ANNOTATION_CREATED_row_after_transaction_commits() {
|
||||||
|
transactionTemplate.execute(status -> {
|
||||||
|
auditService.logAfterCommit(AuditKind.ANNOTATION_CREATED, null, null, null);
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
await().atMost(5, SECONDS).until(() -> auditLogRepository.count() > 0);
|
||||||
|
assertThat(auditLogRepository.findAll())
|
||||||
|
.extracting(AuditLog::getKind)
|
||||||
|
.containsExactly(AuditKind.ANNOTATION_CREATED);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void logAfterCommit_writes_no_row_when_transaction_rolls_back() {
|
||||||
|
try {
|
||||||
|
transactionTemplate.execute(status -> {
|
||||||
|
auditService.logAfterCommit(AuditKind.ANNOTATION_CREATED, null, null, null);
|
||||||
|
throw new RuntimeException("force rollback");
|
||||||
|
});
|
||||||
|
} catch (RuntimeException ignored) {}
|
||||||
|
|
||||||
|
assertThat(auditLogRepository.count()).isZero();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,121 @@
|
|||||||
|
package org.raddatz.familienarchiv.audit;
|
||||||
|
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
|
import org.mockito.ArgumentCaptor;
|
||||||
|
import org.mockito.InjectMocks;
|
||||||
|
import org.mockito.Mock;
|
||||||
|
import org.mockito.MockedStatic;
|
||||||
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
|
import org.springframework.core.task.TaskExecutor;
|
||||||
|
import org.springframework.transaction.support.TransactionSynchronization;
|
||||||
|
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
import static org.assertj.core.api.Assertions.assertThatCode;
|
||||||
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
|
import static org.mockito.Mockito.*;
|
||||||
|
|
||||||
|
@ExtendWith(MockitoExtension.class)
|
||||||
|
class AuditServiceTest {
|
||||||
|
|
||||||
|
@Mock AuditLogRepository auditLogRepository;
|
||||||
|
@Mock TaskExecutor auditExecutor;
|
||||||
|
@InjectMocks AuditService auditService;
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void log_savesAuditRowWithCorrectFields() {
|
||||||
|
UUID actorId = UUID.randomUUID();
|
||||||
|
UUID documentId = UUID.randomUUID();
|
||||||
|
Map<String, Object> payload = Map.of("pageNumber", 3);
|
||||||
|
|
||||||
|
when(auditLogRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||||
|
|
||||||
|
auditService.log(AuditKind.TEXT_SAVED, actorId, documentId, payload);
|
||||||
|
|
||||||
|
ArgumentCaptor<AuditLog> captor = ArgumentCaptor.forClass(AuditLog.class);
|
||||||
|
verify(auditLogRepository).save(captor.capture());
|
||||||
|
AuditLog saved = captor.getValue();
|
||||||
|
|
||||||
|
assertThat(saved.getKind()).isEqualTo(AuditKind.TEXT_SAVED);
|
||||||
|
assertThat(saved.getActorId()).isEqualTo(actorId);
|
||||||
|
assertThat(saved.getDocumentId()).isEqualTo(documentId);
|
||||||
|
assertThat(saved.getPayload()).isEqualTo(payload);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void log_doesNotPropagateException_whenRepoThrows() {
|
||||||
|
when(auditLogRepository.save(any())).thenThrow(new RuntimeException("DB down"));
|
||||||
|
|
||||||
|
assertThatCode(() ->
|
||||||
|
auditService.log(AuditKind.METADATA_UPDATED, UUID.randomUUID(), UUID.randomUUID(), null)
|
||||||
|
).doesNotThrowAnyException();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void log_acceptsNullPayload() {
|
||||||
|
when(auditLogRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||||
|
|
||||||
|
auditService.log(AuditKind.FILE_UPLOADED, UUID.randomUUID(), UUID.randomUUID(), null);
|
||||||
|
|
||||||
|
ArgumentCaptor<AuditLog> captor = ArgumentCaptor.forClass(AuditLog.class);
|
||||||
|
verify(auditLogRepository).save(captor.capture());
|
||||||
|
assertThat(captor.getValue().getPayload()).isNull();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void log_acceptsNullActorId() {
|
||||||
|
when(auditLogRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||||
|
|
||||||
|
auditService.log(AuditKind.BLOCK_REVIEWED, null, UUID.randomUUID(), null);
|
||||||
|
|
||||||
|
ArgumentCaptor<AuditLog> captor = ArgumentCaptor.forClass(AuditLog.class);
|
||||||
|
verify(auditLogRepository).save(captor.capture());
|
||||||
|
assertThat(captor.getValue().getActorId()).isNull();
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── logAfterCommit ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void logAfterCommit_savesDirectly_whenNoTransactionIsActive() {
|
||||||
|
when(auditLogRepository.save(any())).thenAnswer(inv -> inv.getArgument(0));
|
||||||
|
|
||||||
|
try (MockedStatic<TransactionSynchronizationManager> mocked =
|
||||||
|
mockStatic(TransactionSynchronizationManager.class)) {
|
||||||
|
mocked.when(TransactionSynchronizationManager::isActualTransactionActive).thenReturn(false);
|
||||||
|
|
||||||
|
auditService.logAfterCommit(AuditKind.METADATA_UPDATED, null, null, null);
|
||||||
|
|
||||||
|
verify(auditLogRepository).save(any());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void logAfterCommit_registersCallback_andSubmitsToExecutor_afterCommit() {
|
||||||
|
try (MockedStatic<TransactionSynchronizationManager> mocked =
|
||||||
|
mockStatic(TransactionSynchronizationManager.class)) {
|
||||||
|
mocked.when(TransactionSynchronizationManager::isActualTransactionActive).thenReturn(true);
|
||||||
|
List<TransactionSynchronization> captured = new ArrayList<>();
|
||||||
|
mocked.when(() -> TransactionSynchronizationManager.registerSynchronization(any()))
|
||||||
|
.thenAnswer(inv -> { captured.add(inv.getArgument(0)); return null; });
|
||||||
|
|
||||||
|
auditService.logAfterCommit(AuditKind.TEXT_SAVED, null, null, null);
|
||||||
|
|
||||||
|
// Callback registered but executor not yet invoked
|
||||||
|
assertThat(captured).hasSize(1);
|
||||||
|
verify(auditExecutor, never()).execute(any());
|
||||||
|
|
||||||
|
// Simulate transaction commit
|
||||||
|
captured.get(0).afterCommit();
|
||||||
|
|
||||||
|
// Write submitted to executor — not called inline
|
||||||
|
verify(auditExecutor).execute(any());
|
||||||
|
verify(auditLogRepository, never()).save(any());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user