feat(documents): paginate /documents search so first paint isn't 1500 rows #316
@@ -13,6 +13,11 @@ import java.util.UUID;
|
||||
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import jakarta.validation.constraints.Max;
|
||||
import jakarta.validation.constraints.Min;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
import org.raddatz.familienarchiv.dto.DocumentSearchResult;
|
||||
import org.raddatz.familienarchiv.dto.DocumentUpdateDTO;
|
||||
import org.raddatz.familienarchiv.dto.TagOperator;
|
||||
@@ -62,6 +67,7 @@ import lombok.extern.slf4j.Slf4j;
|
||||
@RequestMapping("/api/documents")
|
||||
@RequiredArgsConstructor
|
||||
@Slf4j
|
||||
@Validated
|
||||
public class DocumentController {
|
||||
|
||||
private final DocumentService documentService;
|
||||
@@ -252,14 +258,20 @@ public class DocumentController {
|
||||
@Parameter(description = "Filter by document status") @RequestParam(required = false) DocumentStatus status,
|
||||
@Parameter(description = "Sort field") @RequestParam(required = false) DocumentSort sort,
|
||||
@Parameter(description = "Sort direction: ASC or DESC") @RequestParam(required = false, defaultValue = "DESC") String dir,
|
||||
@Parameter(description = "Tag operator: AND (default) or OR") @RequestParam(required = false) String tagOp) {
|
||||
@Parameter(description = "Tag operator: AND (default) or OR") @RequestParam(required = false) String tagOp,
|
||||
// @Max on page guards against overflow when pageable.getOffset() is computed
|
||||
// as page * size — Integer.MAX_VALUE * 50 would wrap to a negative long, which
|
||||
// Hibernate cheerfully turns into an invalid SQL OFFSET.
|
||||
@Parameter(description = "Page number (0-indexed)") @RequestParam(defaultValue = "0") @Min(0) @Max(100_000) int page,
|
||||
@Parameter(description = "Page size (max 100)") @RequestParam(defaultValue = "50") @Min(1) @Max(100) int size) {
|
||||
if (!"ASC".equalsIgnoreCase(dir) && !"DESC".equalsIgnoreCase(dir)) {
|
||||
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "dir must be ASC or DESC");
|
||||
}
|
||||
// tagOp is a raw String at the HTTP boundary; any value other than "OR" (case-insensitive)
|
||||
// defaults to AND, which matches the frontend default and keeps old clients working.
|
||||
TagOperator operator = "OR".equalsIgnoreCase(tagOp) ? TagOperator.OR : TagOperator.AND;
|
||||
return ResponseEntity.ok(documentService.searchDocuments(q, from, to, senderId, receiverId, tags, tagQ, status, sort, dir, operator));
|
||||
Pageable pageable = PageRequest.of(page, size);
|
||||
return ResponseEntity.ok(documentService.searchDocuments(q, from, to, senderId, receiverId, tags, tagQ, status, sort, dir, operator, pageable));
|
||||
}
|
||||
|
||||
// --- TRAINING LABELS ---
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package org.raddatz.familienarchiv.dto;
|
||||
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@@ -8,9 +9,30 @@ public record DocumentSearchResult(
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||
List<DocumentSearchItem> items,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||
long total
|
||||
long totalElements,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||
int pageNumber,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||
int pageSize,
|
||||
@Schema(requiredMode = Schema.RequiredMode.REQUIRED)
|
||||
int totalPages
|
||||
) {
|
||||
/**
|
||||
* Single-page convenience factory used by empty-result shortcuts and by tests that
|
||||
* don't care about paging. Treats the whole list as page 0 of itself.
|
||||
*/
|
||||
public static DocumentSearchResult of(List<DocumentSearchItem> items) {
|
||||
return new DocumentSearchResult(items, items.size());
|
||||
int size = items.size();
|
||||
return new DocumentSearchResult(items, size, 0, size, size == 0 ? 0 : 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Paged factory used by the service when it has a real Pageable + full match count
|
||||
* (e.g. from Spring's Page<T> or from an in-memory sort-then-slice).
|
||||
*/
|
||||
public static DocumentSearchResult paged(List<DocumentSearchItem> slice, Pageable pageable, long totalElements) {
|
||||
int pageSize = pageable.getPageSize();
|
||||
int totalPages = pageSize == 0 ? 0 : (int) ((totalElements + pageSize - 1) / pageSize);
|
||||
return new DocumentSearchResult(slice, totalElements, pageable.getPageNumber(), pageSize, totalPages);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,7 +22,9 @@ import org.raddatz.familienarchiv.model.TrainingLabel;
|
||||
import org.raddatz.familienarchiv.model.Person;
|
||||
import org.raddatz.familienarchiv.model.Tag;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.jpa.domain.Specification;
|
||||
import org.raddatz.familienarchiv.exception.DomainException;
|
||||
@@ -355,7 +357,7 @@ public class DocumentService {
|
||||
}
|
||||
|
||||
// 1. Allgemeine Suche (für das Suchfeld im Frontend)
|
||||
public DocumentSearchResult searchDocuments(String text, LocalDate from, LocalDate to, UUID sender, UUID receiver, List<String> tags, String tagQ, DocumentStatus status, DocumentSort sort, String dir, TagOperator tagOperator) {
|
||||
public DocumentSearchResult searchDocuments(String text, LocalDate from, LocalDate to, UUID sender, UUID receiver, List<String> tags, String tagQ, DocumentStatus status, DocumentSort sort, String dir, TagOperator tagOperator, Pageable pageable) {
|
||||
boolean hasText = StringUtils.hasText(text);
|
||||
List<UUID> rankedIds = null;
|
||||
|
||||
@@ -376,15 +378,18 @@ public class DocumentService {
|
||||
.and(hasTagPartial(tagQ))
|
||||
.and(hasStatus(status));
|
||||
|
||||
// SENDER and RECEIVER are sorted in-memory because JPA's Sort.by("sender.lastName")
|
||||
// generates an INNER JOIN that silently drops documents with null sender/receivers.
|
||||
// SENDER, RECEIVER and RELEVANCE sorts load the full match set and slice in memory.
|
||||
// JPA's Sort.by("sender.lastName") generates an INNER JOIN that silently drops
|
||||
// documents with null sender/receivers; RELEVANCE maps a DB order to an external
|
||||
// rank list. Cost scales linearly with match count — acceptable while documents
|
||||
// stays under ~10k rows. Past that, replace with SQL-level LEFT JOIN sort.
|
||||
if (sort == DocumentSort.RECEIVER) {
|
||||
List<Document> results = documentRepository.findAll(spec);
|
||||
return buildResult(sortByFirstReceiver(results, dir), text);
|
||||
List<Document> sorted = sortByFirstReceiver(documentRepository.findAll(spec), dir);
|
||||
return buildResultPaged(pageSlice(sorted, pageable), text, pageable, sorted.size());
|
||||
}
|
||||
if (sort == DocumentSort.SENDER) {
|
||||
List<Document> results = documentRepository.findAll(spec);
|
||||
return buildResult(sortBySender(results, dir), text);
|
||||
List<Document> sorted = sortBySender(documentRepository.findAll(spec), dir);
|
||||
return buildResultPaged(pageSlice(sorted, pageable), text, pageable, sorted.size());
|
||||
}
|
||||
|
||||
// RELEVANCE: default when text present and no explicit sort given
|
||||
@@ -397,15 +402,26 @@ public class DocumentService {
|
||||
.sorted(Comparator.comparingInt(
|
||||
doc -> rankMap.getOrDefault(doc.getId(), Integer.MAX_VALUE)))
|
||||
.toList();
|
||||
return buildResult(sorted, text);
|
||||
return buildResultPaged(pageSlice(sorted, pageable), text, pageable, sorted.size());
|
||||
}
|
||||
|
||||
Sort springSort = resolveSort(sort, dir);
|
||||
List<Document> results = documentRepository.findAll(spec, springSort);
|
||||
return buildResult(results, text);
|
||||
// Fast path — push sort + paging into the DB and enrich only the returned slice.
|
||||
PageRequest pageRequest = PageRequest.of(pageable.getPageNumber(), pageable.getPageSize(), resolveSort(sort, dir));
|
||||
Page<Document> page = documentRepository.findAll(spec, pageRequest);
|
||||
return buildResultPaged(page.getContent(), text, pageable, page.getTotalElements());
|
||||
}
|
||||
|
||||
private DocumentSearchResult buildResult(List<Document> documents, String text) {
|
||||
private static <T> List<T> pageSlice(List<T> sorted, Pageable pageable) {
|
||||
int from = Math.min((int) pageable.getOffset(), sorted.size());
|
||||
int to = Math.min(from + pageable.getPageSize(), sorted.size());
|
||||
return sorted.subList(from, to);
|
||||
}
|
||||
|
||||
private DocumentSearchResult buildResultPaged(List<Document> slice, String text, Pageable pageable, long totalElements) {
|
||||
return DocumentSearchResult.paged(enrichItems(slice, text), pageable, totalElements);
|
||||
}
|
||||
|
||||
private List<DocumentSearchItem> enrichItems(List<Document> documents, String text) {
|
||||
List<Document> colorResolved = resolveDocumentTagColors(documents);
|
||||
Map<UUID, SearchMatchData> matchData = enrichWithMatchData(colorResolved, text);
|
||||
|
||||
@@ -413,14 +429,12 @@ public class DocumentService {
|
||||
Map<UUID, Integer> completionByDoc = fetchCompletionPercentages(docIds);
|
||||
Map<UUID, List<ActivityActorDTO>> contributorsByDoc = auditLogQueryService.findRecentContributorsPerDocument(docIds);
|
||||
|
||||
List<DocumentSearchItem> items = colorResolved.stream().map(doc -> new DocumentSearchItem(
|
||||
return colorResolved.stream().map(doc -> new DocumentSearchItem(
|
||||
doc,
|
||||
matchData.getOrDefault(doc.getId(), SearchMatchData.empty()),
|
||||
completionByDoc.getOrDefault(doc.getId(), 0),
|
||||
contributorsByDoc.getOrDefault(doc.getId(), List.of())
|
||||
)).toList();
|
||||
|
||||
return DocumentSearchResult.of(items);
|
||||
}
|
||||
|
||||
private Map<UUID, Integer> fetchCompletionPercentages(List<UUID> docIds) {
|
||||
|
||||
@@ -69,7 +69,7 @@ class DocumentControllerTest {
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_returns200_whenAuthenticated() throws Exception {
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
.thenReturn(DocumentSearchResult.of(List.of()));
|
||||
|
||||
mockMvc.perform(get("/api/documents/search"))
|
||||
@@ -79,13 +79,13 @@ class DocumentControllerTest {
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_withStatusParam_passesItToService() throws Exception {
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), eq(DocumentStatus.REVIEWED), any(), any(), any()))
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), eq(DocumentStatus.REVIEWED), any(), any(), any(), any()))
|
||||
.thenReturn(DocumentSearchResult.of(List.of()));
|
||||
|
||||
mockMvc.perform(get("/api/documents/search").param("status", "REVIEWED"))
|
||||
.andExpect(status().isOk());
|
||||
|
||||
verify(documentService).searchDocuments(any(), any(), any(), any(), any(), any(), any(), eq(DocumentStatus.REVIEWED), any(), any(), any());
|
||||
verify(documentService).searchDocuments(any(), any(), any(), any(), any(), any(), any(), eq(DocumentStatus.REVIEWED), any(), any(), any(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -112,12 +112,12 @@ class DocumentControllerTest {
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_responseContainsTotalCount() throws Exception {
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
.thenReturn(DocumentSearchResult.of(List.of()));
|
||||
|
||||
mockMvc.perform(get("/api/documents/search"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.total").value(0))
|
||||
.andExpect(jsonPath("$.totalElements").value(0))
|
||||
.andExpect(jsonPath("$.items").isArray());
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@ class DocumentControllerTest {
|
||||
.build();
|
||||
var matchData = new SearchMatchData(
|
||||
"Er schrieb einen langen Brief", List.of(), false, List.of(), List.of(), List.of(), null, List.of());
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
.thenReturn(DocumentSearchResult.of(List.of(new DocumentSearchItem(doc, matchData, 0, List.of()))));
|
||||
|
||||
mockMvc.perform(get("/api/documents/search").param("q", "Brief"))
|
||||
@@ -143,6 +143,70 @@ class DocumentControllerTest {
|
||||
.value("Er schrieb einen langen Brief"));
|
||||
}
|
||||
|
||||
// ─── /api/documents/search pagination ─────────────────────────────────────
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_responseExposesPagingFields() throws Exception {
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
.thenReturn(DocumentSearchResult.of(List.of()));
|
||||
|
||||
mockMvc.perform(get("/api/documents/search"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.pageNumber").exists())
|
||||
.andExpect(jsonPath("$.pageSize").exists())
|
||||
.andExpect(jsonPath("$.totalPages").exists())
|
||||
.andExpect(jsonPath("$.totalElements").exists());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_returns400_whenSizeExceedsMax() throws Exception {
|
||||
// Locks @Validated on the controller — removing it silently reopens the
|
||||
// DoS window where a client could request all 1500 docs + enrichment.
|
||||
mockMvc.perform(get("/api/documents/search").param("size", "101"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_returns400_whenSizeBelowMin() throws Exception {
|
||||
mockMvc.perform(get("/api/documents/search").param("size", "0"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_returns400_whenPageNegative() throws Exception {
|
||||
mockMvc.perform(get("/api/documents/search").param("page", "-1"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_returns400_whenPageAboveMax() throws Exception {
|
||||
// Guards against page * size overflow into negative SQL OFFSET
|
||||
mockMvc.perform(get("/api/documents/search").param("page", "200000"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
@WithMockUser
|
||||
void search_passesPageRequestToService() throws Exception {
|
||||
when(documentService.searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any()))
|
||||
.thenReturn(DocumentSearchResult.of(List.of()));
|
||||
|
||||
mockMvc.perform(get("/api/documents/search").param("page", "2").param("size", "25"))
|
||||
.andExpect(status().isOk());
|
||||
|
||||
org.mockito.ArgumentCaptor<org.springframework.data.domain.Pageable> captor =
|
||||
org.mockito.ArgumentCaptor.forClass(org.springframework.data.domain.Pageable.class);
|
||||
verify(documentService).searchDocuments(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), captor.capture());
|
||||
org.springframework.data.domain.Pageable pageable = captor.getValue();
|
||||
org.assertj.core.api.Assertions.assertThat(pageable.getPageNumber()).isEqualTo(2);
|
||||
org.assertj.core.api.Assertions.assertThat(pageable.getPageSize()).isEqualTo(25);
|
||||
}
|
||||
|
||||
// ─── POST /api/documents ─────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
|
||||
@@ -5,6 +5,7 @@ import org.junit.jupiter.api.Test;
|
||||
import org.raddatz.familienarchiv.audit.ActivityActorDTO;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
@@ -24,10 +25,43 @@ class DocumentSearchResultTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void of_total_equals_list_size() {
|
||||
DocumentSearchResult result = DocumentSearchResult.of(List.of(item(UUID.randomUUID()), item(UUID.randomUUID())));
|
||||
void of_totalElements_equals_list_size_for_unpaged_shortcut() {
|
||||
DocumentSearchResult result = DocumentSearchResult.of(
|
||||
List.of(item(UUID.randomUUID()), item(UUID.randomUUID())));
|
||||
|
||||
assertThat(result.total()).isEqualTo(2L);
|
||||
assertThat(result.totalElements()).isEqualTo(2L);
|
||||
assertThat(result.pageNumber()).isZero();
|
||||
assertThat(result.pageSize()).isEqualTo(2);
|
||||
assertThat(result.totalPages()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void of_empty_shortcut_has_zero_totalPages() {
|
||||
DocumentSearchResult result = DocumentSearchResult.of(List.of());
|
||||
|
||||
assertThat(result.totalElements()).isZero();
|
||||
assertThat(result.totalPages()).isZero();
|
||||
}
|
||||
|
||||
@Test
|
||||
void paged_factory_populates_paging_fields_from_pageable_and_total() {
|
||||
List<DocumentSearchItem> slice = List.of(item(UUID.randomUUID()), item(UUID.randomUUID()));
|
||||
|
||||
DocumentSearchResult result = DocumentSearchResult.paged(slice, PageRequest.of(1, 50), 120L);
|
||||
|
||||
assertThat(result.items()).hasSize(2);
|
||||
assertThat(result.totalElements()).isEqualTo(120L);
|
||||
assertThat(result.pageNumber()).isEqualTo(1);
|
||||
assertThat(result.pageSize()).isEqualTo(50);
|
||||
assertThat(result.totalPages()).isEqualTo(3); // ceil(120 / 50)
|
||||
}
|
||||
|
||||
@Test
|
||||
void paged_factory_totalPages_rounds_up_on_remainder() {
|
||||
DocumentSearchResult result =
|
||||
DocumentSearchResult.paged(List.of(), PageRequest.of(0, 7), 30L);
|
||||
|
||||
assertThat(result.totalPages()).isEqualTo(5); // ceil(30 / 7)
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -53,9 +87,18 @@ class DocumentSearchResultTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void total_component_is_annotated_as_required_in_openapi_schema() throws NoSuchFieldException {
|
||||
Schema schema = DocumentSearchResult.class.getDeclaredField("total").getAnnotation(Schema.class);
|
||||
void totalElements_component_is_annotated_as_required_in_openapi_schema() throws NoSuchFieldException {
|
||||
Schema schema = DocumentSearchResult.class.getDeclaredField("totalElements").getAnnotation(Schema.class);
|
||||
assertThat(schema).isNotNull();
|
||||
assertThat(schema.requiredMode()).isEqualTo(Schema.RequiredMode.REQUIRED);
|
||||
}
|
||||
|
||||
@Test
|
||||
void paging_components_are_annotated_as_required_in_openapi_schema() throws NoSuchFieldException {
|
||||
for (String name : List.of("pageNumber", "pageSize", "totalPages")) {
|
||||
Schema schema = DocumentSearchResult.class.getDeclaredField(name).getAnnotation(Schema.class);
|
||||
assertThat(schema).as(name + " must have @Schema").isNotNull();
|
||||
assertThat(schema.requiredMode()).isEqualTo(Schema.RequiredMode.REQUIRED);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,137 @@
|
||||
package org.raddatz.familienarchiv.service;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.raddatz.familienarchiv.PostgresContainerConfig;
|
||||
import org.raddatz.familienarchiv.dto.DocumentSearchResult;
|
||||
import org.raddatz.familienarchiv.dto.DocumentSort;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ActiveProfiles;
|
||||
import org.springframework.test.context.bean.override.mockito.MockitoBean;
|
||||
import software.amazon.awssdk.services.s3.S3Client;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
/**
|
||||
* End-to-end paged search test with real PostgreSQL (Testcontainers). Covers the
|
||||
* Specification→Pageable→Page→DTO path that unit tests mock around. Seeds 120
|
||||
* UPLOADED documents and asserts the slice/total/totalPages arithmetic holds
|
||||
* against the actual JPA query.
|
||||
*
|
||||
* <p>Closes the integration-coverage gap Sara flagged on PR #316.
|
||||
*/
|
||||
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE)
|
||||
@ActiveProfiles("test")
|
||||
@Import(PostgresContainerConfig.class)
|
||||
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_EACH_TEST_METHOD)
|
||||
class DocumentSearchPagedIntegrationTest {
|
||||
|
||||
private static final int FIXTURE_SIZE = 120;
|
||||
|
||||
@MockitoBean S3Client s3Client;
|
||||
@Autowired DocumentService documentService;
|
||||
@Autowired DocumentRepository documentRepository;
|
||||
|
||||
@BeforeEach
|
||||
void seed() {
|
||||
// Deterministic date spread so DATE-DESC order is predictable:
|
||||
// document #0 has the oldest date, document #119 has the newest.
|
||||
for (int i = 0; i < FIXTURE_SIZE; i++) {
|
||||
Document doc = Document.builder()
|
||||
.title("Dok-" + String.format("%03d", i))
|
||||
.originalFilename("dok-" + i + ".pdf")
|
||||
.status(DocumentStatus.UPLOADED)
|
||||
.documentDate(LocalDate.of(1900, 1, 1).plusDays(i))
|
||||
.build();
|
||||
documentRepository.save(doc);
|
||||
}
|
||||
assertThat(documentRepository.count()).isEqualTo(FIXTURE_SIZE);
|
||||
}
|
||||
|
||||
@Test
|
||||
void search_firstPage_returnsExactlyPageSizeItems_andCorrectTotalElements() {
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null,
|
||||
DocumentSort.DATE, "DESC", null,
|
||||
PageRequest.of(0, 50));
|
||||
|
||||
assertThat(result.items()).hasSize(50);
|
||||
assertThat(result.totalElements()).isEqualTo(FIXTURE_SIZE);
|
||||
assertThat(result.pageNumber()).isZero();
|
||||
assertThat(result.pageSize()).isEqualTo(50);
|
||||
assertThat(result.totalPages()).isEqualTo(3); // ceil(120 / 50)
|
||||
}
|
||||
|
||||
@Test
|
||||
void search_lastPartialPage_returnsRemainingItems() {
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null,
|
||||
DocumentSort.DATE, "DESC", null,
|
||||
PageRequest.of(2, 50));
|
||||
|
||||
// Page 2 (offset 100) of 120 docs → exactly 20 items on the tail.
|
||||
assertThat(result.items()).hasSize(20);
|
||||
assertThat(result.totalElements()).isEqualTo(FIXTURE_SIZE);
|
||||
assertThat(result.pageNumber()).isEqualTo(2);
|
||||
}
|
||||
|
||||
@Test
|
||||
void search_pageBeyondLast_returnsEmptyContent_totalElementsStillCorrect() {
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null,
|
||||
DocumentSort.DATE, "DESC", null,
|
||||
PageRequest.of(99, 50));
|
||||
|
||||
assertThat(result.items()).isEmpty();
|
||||
assertThat(result.totalElements()).isEqualTo(FIXTURE_SIZE);
|
||||
}
|
||||
|
||||
@Test
|
||||
void search_senderSort_pageOne_slicesInMemory_withCorrectTotal() {
|
||||
// SENDER sort path fetches all + sorts + slices in-memory (see scaling
|
||||
// comment in DocumentService). Proves that the in-memory slice path
|
||||
// returns the correct total from a real repository fetch.
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null,
|
||||
DocumentSort.SENDER, "asc", null,
|
||||
PageRequest.of(1, 50));
|
||||
|
||||
assertThat(result.items()).hasSize(50);
|
||||
assertThat(result.totalElements()).isEqualTo(FIXTURE_SIZE);
|
||||
assertThat(result.pageNumber()).isEqualTo(1);
|
||||
assertThat(result.totalPages()).isEqualTo(3);
|
||||
}
|
||||
|
||||
@Test
|
||||
void search_differentPagesReturnDisjointSlices() {
|
||||
DocumentSearchResult page0 = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null,
|
||||
DocumentSort.DATE, "DESC", null,
|
||||
PageRequest.of(0, 50));
|
||||
DocumentSearchResult page1 = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null,
|
||||
DocumentSort.DATE, "DESC", null,
|
||||
PageRequest.of(1, 50));
|
||||
|
||||
// No document id should appear on both pages — slicing must be exclusive.
|
||||
var idsOnPage0 = page0.items().stream()
|
||||
.map(item -> item.document().getId())
|
||||
.toList();
|
||||
var idsOnPage1 = page1.items().stream()
|
||||
.map(item -> item.document().getId())
|
||||
.toList();
|
||||
for (UUID id : idsOnPage0) {
|
||||
assertThat(idsOnPage1).doesNotContain(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -11,7 +11,8 @@ import org.raddatz.familienarchiv.dto.DocumentSort;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.jpa.domain.Specification;
|
||||
|
||||
import java.time.LocalDate;
|
||||
@@ -25,6 +26,8 @@ import static org.mockito.Mockito.when;
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class DocumentServiceSortTest {
|
||||
|
||||
private static final Pageable UNPAGED = org.springframework.data.domain.PageRequest.of(0, 10_000);
|
||||
|
||||
@Mock DocumentRepository documentRepository;
|
||||
@Mock PersonService personService;
|
||||
@Mock FileService fileService;
|
||||
@@ -51,12 +54,12 @@ class DocumentServiceSortTest {
|
||||
|
||||
// FTS returns id1 first (higher rank), id2 second
|
||||
when(documentRepository.findRankedIdsByFts("Brief")).thenReturn(List.of(id1, id2));
|
||||
// findAll(spec, sort) — the correct date path — returns date-DESC order
|
||||
when(documentRepository.findAll(any(Specification.class), any(Sort.class)))
|
||||
.thenReturn(List.of(newer, older));
|
||||
// findAll(spec, pageable) — the correct date path — returns date-DESC order
|
||||
when(documentRepository.findAll(any(Specification.class), any(Pageable.class)))
|
||||
.thenReturn(new PageImpl<>(List.of(newer, older)));
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
"Brief", null, null, null, null, null, null, null, DocumentSort.DATE, "DESC", null);
|
||||
"Brief", null, null, null, null, null, null, null, DocumentSort.DATE, "DESC", null, UNPAGED);
|
||||
|
||||
// Expect: date order (newer 1960 first), NOT rank order (older 1940 first)
|
||||
assertThat(result.items()).hasSize(2);
|
||||
@@ -78,7 +81,7 @@ class DocumentServiceSortTest {
|
||||
.thenReturn(List.of(doc2, doc1)); // unordered from DB
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
"Brief", null, null, null, null, null, null, null, DocumentSort.RELEVANCE, null, null);
|
||||
"Brief", null, null, null, null, null, null, null, DocumentSort.RELEVANCE, null, null, UNPAGED);
|
||||
|
||||
// Expect: rank order restored (id1 first)
|
||||
assertThat(result.items().get(0).document().getId()).isEqualTo(id1);
|
||||
@@ -97,7 +100,7 @@ class DocumentServiceSortTest {
|
||||
.thenReturn(List.of(doc2, doc1));
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
"Brief", null, null, null, null, null, null, null, null, null, null);
|
||||
"Brief", null, null, null, null, null, null, null, null, null, null, UNPAGED);
|
||||
|
||||
assertThat(result.items().get(0).document().getId()).isEqualTo(id1);
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ import org.raddatz.familienarchiv.model.Tag;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.mock.web.MockMultipartFile;
|
||||
@@ -46,6 +47,12 @@ import static org.mockito.Mockito.*;
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class DocumentServiceTest {
|
||||
|
||||
// Used by tests that don't care about paging. 10 000 is chosen large enough
|
||||
// to hold any fixture in this file but small enough that totalPages math
|
||||
// stays in int range. Swap to `PageRequest.of(0, 10_000)` elsewhere is a
|
||||
// red flag — use this constant.
|
||||
private static final Pageable UNPAGED = PageRequest.of(0, 10_000);
|
||||
|
||||
@Mock DocumentRepository documentRepository;
|
||||
@Mock PersonService personService;
|
||||
@Mock FileService fileService;
|
||||
@@ -1323,26 +1330,124 @@ class DocumentServiceTest {
|
||||
assertThat(result).isNull();
|
||||
}
|
||||
|
||||
// ─── searchDocuments — pagination ────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void searchDocuments_fastPath_usesFindAllWithPageable_notWithSort() {
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class)))
|
||||
.thenReturn(new PageImpl<>(List.of()));
|
||||
|
||||
documentService.searchDocuments(null, null, null, null, null, null, null, null,
|
||||
org.raddatz.familienarchiv.dto.DocumentSort.DATE, "DESC", null,
|
||||
org.springframework.data.domain.PageRequest.of(1, 50));
|
||||
|
||||
verify(documentRepository).findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class));
|
||||
verify(documentRepository, never()).findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Sort.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchDocuments_fastPath_propagatesPageableToDatabase() {
|
||||
ArgumentCaptor<Pageable> captor = ArgumentCaptor.forClass(Pageable.class);
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class)))
|
||||
.thenReturn(new PageImpl<>(List.of()));
|
||||
|
||||
documentService.searchDocuments(null, null, null, null, null, null, null, null,
|
||||
org.raddatz.familienarchiv.dto.DocumentSort.DATE, "DESC", null,
|
||||
org.springframework.data.domain.PageRequest.of(3, 25));
|
||||
|
||||
verify(documentRepository).findAll(any(org.springframework.data.jpa.domain.Specification.class), captor.capture());
|
||||
assertThat(captor.getValue().getPageNumber()).isEqualTo(3);
|
||||
assertThat(captor.getValue().getPageSize()).isEqualTo(25);
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchDocuments_fastPath_returnsPageableTotalsOnResult() {
|
||||
// The service MUST report the full match count from Page.getTotalElements(),
|
||||
// not the slice size — otherwise the frontend's "N Briefe gefunden" label is wrong.
|
||||
Document d = Document.builder().id(UUID.randomUUID()).title("T").build();
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class)))
|
||||
.thenReturn(new PageImpl<>(List.of(d), org.springframework.data.domain.PageRequest.of(0, 50), 120L));
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(null, null, null, null, null, null, null, null,
|
||||
org.raddatz.familienarchiv.dto.DocumentSort.DATE, "DESC", null,
|
||||
org.springframework.data.domain.PageRequest.of(0, 50));
|
||||
|
||||
assertThat(result.totalElements()).isEqualTo(120L);
|
||||
assertThat(result.pageNumber()).isZero();
|
||||
assertThat(result.pageSize()).isEqualTo(50);
|
||||
assertThat(result.totalPages()).isEqualTo(3); // ceil(120/50)
|
||||
assertThat(result.items()).hasSize(1); // only the slice is enriched
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchDocuments_senderSort_slicesInMemoryAndReportsFullTotal() {
|
||||
// Fixture: 120 docs with senders; request page 1, size 50 → expect 50 items
|
||||
// back with totalElements = 120.
|
||||
List<Document> all = new java.util.ArrayList<>();
|
||||
for (int i = 0; i < 120; i++) {
|
||||
Person p = Person.builder()
|
||||
.id(UUID.randomUUID())
|
||||
.firstName("F" + i)
|
||||
.lastName(String.format("L%03d", i))
|
||||
.build();
|
||||
all.add(Document.builder().id(UUID.randomUUID()).title("D" + i).sender(p).build());
|
||||
}
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class)))
|
||||
.thenReturn(all);
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(null, null, null, null, null, null, null, null,
|
||||
org.raddatz.familienarchiv.dto.DocumentSort.SENDER, "asc", null,
|
||||
org.springframework.data.domain.PageRequest.of(1, 50));
|
||||
|
||||
assertThat(result.totalElements()).isEqualTo(120L);
|
||||
assertThat(result.pageNumber()).isEqualTo(1);
|
||||
assertThat(result.pageSize()).isEqualTo(50);
|
||||
assertThat(result.totalPages()).isEqualTo(3);
|
||||
assertThat(result.items()).hasSize(50);
|
||||
// Page 1 (offset 50) under ascending sender sort should start at L050
|
||||
assertThat(result.items().get(0).document().getSender().getLastName()).isEqualTo("L050");
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchDocuments_pageBeyondLast_returnsEmptyContentAndCorrectTotal() {
|
||||
// Guards the JPA edge case where page * size > totalElements.
|
||||
// Must not throw, must return empty content + correct totalElements.
|
||||
List<Document> all = new java.util.ArrayList<>();
|
||||
for (int i = 0; i < 30; i++) {
|
||||
Person p = Person.builder().id(UUID.randomUUID()).lastName(String.format("L%02d", i)).build();
|
||||
all.add(Document.builder().id(UUID.randomUUID()).title("D" + i).sender(p).build());
|
||||
}
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class)))
|
||||
.thenReturn(all);
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(null, null, null, null, null, null, null, null,
|
||||
org.raddatz.familienarchiv.dto.DocumentSort.SENDER, "asc", null,
|
||||
org.springframework.data.domain.PageRequest.of(10, 50));
|
||||
|
||||
assertThat(result.items()).isEmpty();
|
||||
assertThat(result.totalElements()).isEqualTo(30L);
|
||||
}
|
||||
|
||||
// ─── searchDocuments — status filter ─────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void searchDocuments_passesStatusSpecificationToRepository() {
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Sort.class)))
|
||||
.thenReturn(List.of());
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class)))
|
||||
.thenReturn(new PageImpl<>(List.of()));
|
||||
|
||||
documentService.searchDocuments(null, null, null, null, null, null, null, DocumentStatus.REVIEWED, null, null, null);
|
||||
documentService.searchDocuments(null, null, null, null, null, null, null, DocumentStatus.REVIEWED, null, null, null, UNPAGED);
|
||||
|
||||
verify(documentRepository).findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Sort.class));
|
||||
verify(documentRepository).findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
void searchDocuments_withNullStatus_doesNotFilterByStatus() {
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Sort.class)))
|
||||
.thenReturn(List.of());
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class)))
|
||||
.thenReturn(new PageImpl<>(List.of()));
|
||||
|
||||
documentService.searchDocuments(null, null, null, null, null, null, null, null, null, null, null);
|
||||
documentService.searchDocuments(null, null, null, null, null, null, null, null, null, null, null, UNPAGED);
|
||||
|
||||
verify(documentRepository).findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Sort.class));
|
||||
verify(documentRepository).findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class));
|
||||
}
|
||||
|
||||
// ─── getRecentActivity ────────────────────────────────────────────────────
|
||||
@@ -1418,7 +1523,7 @@ class DocumentServiceTest {
|
||||
.thenReturn(List.of(withSender, noSender));
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null, DocumentSort.SENDER, "asc", null);
|
||||
null, null, null, null, null, null, null, null, DocumentSort.SENDER, "asc", null, UNPAGED);
|
||||
|
||||
assertThat(result.items()).hasSize(2);
|
||||
assertThat(result.items()).extracting(item -> item.document().getTitle()).containsExactly("Has Sender", "No Sender");
|
||||
@@ -1438,7 +1543,7 @@ class DocumentServiceTest {
|
||||
.thenReturn(List.of(noReceivers, withReceiver));
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null, DocumentSort.RECEIVER, "asc", null);
|
||||
null, null, null, null, null, null, null, null, DocumentSort.RECEIVER, "asc", null, UNPAGED);
|
||||
|
||||
assertThat(result.items()).extracting(item -> item.document().getTitle())
|
||||
.containsExactly("Has Receiver", "No Receivers");
|
||||
@@ -1460,7 +1565,7 @@ class DocumentServiceTest {
|
||||
.thenReturn(List.of(docNullName, docSmith));
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null, DocumentSort.SENDER, "asc", null);
|
||||
null, null, null, null, null, null, null, null, DocumentSort.SENDER, "asc", null, UNPAGED);
|
||||
|
||||
// null lastName should sort to end (treated as empty), not before "smith" (as "null")
|
||||
assertThat(result.items()).extracting(item -> item.document().getTitle())
|
||||
@@ -1482,7 +1587,7 @@ class DocumentServiceTest {
|
||||
when(documentRepository.findEnrichmentData(any(), eq("Brief"))).thenReturn(rows);
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
"Brief", null, null, null, null, null, null, null, DocumentSort.RELEVANCE, null, null);
|
||||
"Brief", null, null, null, null, null, null, null, DocumentSort.RELEVANCE, null, null, UNPAGED);
|
||||
|
||||
assertThat(result.items()).hasSize(1);
|
||||
SearchMatchData md = result.items().get(0).matchData();
|
||||
@@ -1492,11 +1597,12 @@ class DocumentServiceTest {
|
||||
|
||||
@Test
|
||||
void searchDocuments_withoutTextQuery_returnsEmptyMatchData() {
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Sort.class)))
|
||||
.thenReturn(List.of());
|
||||
when(documentRepository.findAll(any(org.springframework.data.jpa.domain.Specification.class), any(Pageable.class)))
|
||||
.thenReturn(new PageImpl<>(List.of()));
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
null, null, null, null, null, null, null, null, null, null, null);
|
||||
null, null, null, null, null, null, null, null, null, null, null,
|
||||
UNPAGED);
|
||||
|
||||
assertThat(result.items()).isEmpty();
|
||||
}
|
||||
@@ -1515,7 +1621,7 @@ class DocumentServiceTest {
|
||||
when(documentRepository.findEnrichmentData(any(), eq("Brief"))).thenReturn(rows);
|
||||
|
||||
DocumentSearchResult result = documentService.searchDocuments(
|
||||
"Brief", null, null, null, null, null, null, null, DocumentSort.RELEVANCE, null, null);
|
||||
"Brief", null, null, null, null, null, null, null, DocumentSort.RELEVANCE, null, null, UNPAGED);
|
||||
|
||||
SearchMatchData md = result.items().get(0).matchData();
|
||||
assertThat(md.transcriptionSnippet()).isEqualTo("Hier ist der Brief aus Berlin");
|
||||
|
||||
@@ -806,5 +806,9 @@
|
||||
"chronik_load_more": "Mehr laden",
|
||||
"chronik_loading": "Lädt …",
|
||||
"chronik_load_more_announcement": "{count} weitere Einträge geladen",
|
||||
"chronik_view_all": "Alle Aktivitäten →"
|
||||
"chronik_view_all": "Alle Aktivitäten →",
|
||||
"pagination_prev": "Zurück",
|
||||
"pagination_next": "Weiter",
|
||||
"pagination_page_of": "Seite {page} von {total}",
|
||||
"pagination_nav_label": "Seitennavigation"
|
||||
}
|
||||
|
||||
@@ -806,5 +806,9 @@
|
||||
"chronik_load_more": "Load more",
|
||||
"chronik_loading": "Loading …",
|
||||
"chronik_load_more_announcement": "{count} more entries loaded",
|
||||
"chronik_view_all": "All activity →"
|
||||
"chronik_view_all": "All activity →",
|
||||
"pagination_prev": "Previous",
|
||||
"pagination_next": "Next",
|
||||
"pagination_page_of": "Page {page} of {total}",
|
||||
"pagination_nav_label": "Pagination"
|
||||
}
|
||||
|
||||
@@ -806,5 +806,9 @@
|
||||
"chronik_load_more": "Cargar más",
|
||||
"chronik_loading": "Cargando …",
|
||||
"chronik_load_more_announcement": "{count} entradas más cargadas",
|
||||
"chronik_view_all": "Todas las actividades →"
|
||||
"chronik_view_all": "Todas las actividades →",
|
||||
"pagination_prev": "Anterior",
|
||||
"pagination_next": "Siguiente",
|
||||
"pagination_page_of": "Página {page} de {total}",
|
||||
"pagination_nav_label": "Paginación"
|
||||
}
|
||||
|
||||
80
frontend/src/lib/components/Pagination.svelte
Normal file
80
frontend/src/lib/components/Pagination.svelte
Normal file
@@ -0,0 +1,80 @@
|
||||
<script lang="ts">
|
||||
import * as m from '$lib/paraglide/messages.js';
|
||||
|
||||
interface Props {
|
||||
/** 0-indexed current page. */
|
||||
page: number;
|
||||
/** Total number of pages. `0` or `1` hides the control as trivially there's nothing to navigate. */
|
||||
totalPages: number;
|
||||
/** Given a 0-indexed page number, returns the href the link should point at. */
|
||||
makeHref: (page: number) => string;
|
||||
/** Optional override for the outer `<nav>`'s aria-label. */
|
||||
ariaLabel?: string;
|
||||
}
|
||||
|
||||
const { page, totalPages, makeHref, ariaLabel }: Props = $props();
|
||||
|
||||
const hasPrev = $derived(page > 0);
|
||||
const hasNext = $derived(page < totalPages - 1);
|
||||
const controlBase =
|
||||
'inline-flex min-h-[44px] min-w-[44px] items-center justify-center gap-1.5 rounded-sm border border-line bg-white px-4 py-2 font-sans text-sm font-bold text-ink';
|
||||
const linkBase = `${controlBase} transition-colors hover:bg-surface focus-visible:ring-2 focus-visible:ring-brand-navy focus-visible:ring-offset-2 focus-visible:outline-none`;
|
||||
const disabledBase = `${controlBase} cursor-not-allowed opacity-40`;
|
||||
</script>
|
||||
|
||||
{#if totalPages > 1}
|
||||
<nav
|
||||
aria-label={ariaLabel ?? m.pagination_nav_label()}
|
||||
class="mt-6 flex flex-col items-center gap-3 sm:flex-row sm:justify-between"
|
||||
>
|
||||
<!--
|
||||
At the bounds we render a <span aria-hidden="true"> instead of an
|
||||
<a aria-disabled>. aria-disabled on a link is the documented pattern
|
||||
but screen readers still announce "Previous, link, disabled" — which
|
||||
is confusing on a pagination control where the disabled state is
|
||||
purely visual. Hiding the element from the AT tree entirely is the
|
||||
cleaner semantic.
|
||||
-->
|
||||
{#if hasPrev}
|
||||
<a
|
||||
data-testid="pagination-prev"
|
||||
aria-label={m.pagination_prev()}
|
||||
href={makeHref(page - 1)}
|
||||
class={linkBase}
|
||||
>
|
||||
<span aria-hidden="true">«</span>
|
||||
{m.pagination_prev()}
|
||||
</a>
|
||||
{:else}
|
||||
<span data-testid="pagination-prev" aria-hidden="true" class={disabledBase}>
|
||||
<span aria-hidden="true">«</span>
|
||||
{m.pagination_prev()}
|
||||
</span>
|
||||
{/if}
|
||||
|
||||
<span
|
||||
data-testid="pagination-page-label"
|
||||
aria-current="page"
|
||||
class="font-sans text-sm text-ink-2"
|
||||
>
|
||||
{m.pagination_page_of({ page: page + 1, total: totalPages })}
|
||||
</span>
|
||||
|
||||
{#if hasNext}
|
||||
<a
|
||||
data-testid="pagination-next"
|
||||
aria-label={m.pagination_next()}
|
||||
href={makeHref(page + 1)}
|
||||
class={linkBase}
|
||||
>
|
||||
{m.pagination_next()}
|
||||
<span aria-hidden="true">»</span>
|
||||
</a>
|
||||
{:else}
|
||||
<span data-testid="pagination-next" aria-hidden="true" class={disabledBase}>
|
||||
{m.pagination_next()}
|
||||
<span aria-hidden="true">»</span>
|
||||
</span>
|
||||
{/if}
|
||||
</nav>
|
||||
{/if}
|
||||
86
frontend/src/lib/components/Pagination.svelte.spec.ts
Normal file
86
frontend/src/lib/components/Pagination.svelte.spec.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { describe, it, expect, afterEach, vi } from 'vitest';
|
||||
import { cleanup, render } from 'vitest-browser-svelte';
|
||||
import { page } from 'vitest/browser';
|
||||
|
||||
import Pagination from './Pagination.svelte';
|
||||
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
});
|
||||
|
||||
const makeHref = (p: number) => `/documents?page=${p}`;
|
||||
|
||||
describe('Pagination', () => {
|
||||
it('renders the page-of-total label for the current page', async () => {
|
||||
render(Pagination, { page: 2, totalPages: 10, makeHref });
|
||||
|
||||
const label = page.getByTestId('pagination-page-label');
|
||||
await expect.element(label).toHaveTextContent(/3/); // page is 0-indexed, label is 1-indexed
|
||||
await expect.element(label).toHaveTextContent(/10/);
|
||||
});
|
||||
|
||||
it('marks the current page label with aria-current="page"', async () => {
|
||||
render(Pagination, { page: 0, totalPages: 3, makeHref });
|
||||
|
||||
const label = page.getByTestId('pagination-page-label');
|
||||
await expect.element(label).toHaveAttribute('aria-current', 'page');
|
||||
});
|
||||
|
||||
it('renders prev as a link pointing at page - 1 when not on first page', async () => {
|
||||
render(Pagination, { page: 4, totalPages: 10, makeHref });
|
||||
|
||||
const prev = page.getByTestId('pagination-prev');
|
||||
await expect.element(prev).toHaveAttribute('href', '/documents?page=3');
|
||||
});
|
||||
|
||||
it('renders disabled prev as an aria-hidden non-link so screen readers skip it', async () => {
|
||||
render(Pagination, { page: 0, totalPages: 3, makeHref });
|
||||
|
||||
const prev = page.getByTestId('pagination-prev');
|
||||
// Not a link — no href, no role=link
|
||||
await expect.element(prev).not.toHaveAttribute('href');
|
||||
// Hidden from assistive tech — AT shouldn't read "Previous, link, disabled"
|
||||
await expect.element(prev).toHaveAttribute('aria-hidden', 'true');
|
||||
});
|
||||
|
||||
it('renders next as a link pointing at page + 1 when not on last page', async () => {
|
||||
render(Pagination, { page: 0, totalPages: 3, makeHref });
|
||||
|
||||
const next = page.getByTestId('pagination-next');
|
||||
await expect.element(next).toHaveAttribute('href', '/documents?page=1');
|
||||
});
|
||||
|
||||
it('renders disabled next as an aria-hidden non-link on the last page', async () => {
|
||||
render(Pagination, { page: 2, totalPages: 3, makeHref });
|
||||
|
||||
const next = page.getByTestId('pagination-next');
|
||||
await expect.element(next).not.toHaveAttribute('href');
|
||||
await expect.element(next).toHaveAttribute('aria-hidden', 'true');
|
||||
});
|
||||
|
||||
it('calls makeHref with p-1 and p+1', async () => {
|
||||
const spy = vi.fn(makeHref);
|
||||
render(Pagination, { page: 3, totalPages: 10, makeHref: spy });
|
||||
|
||||
const calls = spy.mock.calls.map((c) => c[0]).sort((a, b) => a - b);
|
||||
expect(calls).toContain(2);
|
||||
expect(calls).toContain(4);
|
||||
});
|
||||
|
||||
it('renders decorative chevron inside aria-hidden span so screen readers skip it', async () => {
|
||||
render(Pagination, { page: 1, totalPages: 3, makeHref });
|
||||
|
||||
const prev = page.getByTestId('pagination-prev');
|
||||
await expect
|
||||
.element(prev.getByText('«', { exact: true }))
|
||||
.toHaveAttribute('aria-hidden', 'true');
|
||||
});
|
||||
|
||||
it('prev and next have min 44px touch targets', async () => {
|
||||
render(Pagination, { page: 1, totalPages: 3, makeHref });
|
||||
|
||||
const prev = page.getByTestId('pagination-prev');
|
||||
await expect.element(prev).toHaveClass(/min-h-\[44px\]/);
|
||||
await expect.element(prev).toHaveClass(/min-w-\[44px\]/);
|
||||
});
|
||||
});
|
||||
@@ -1921,7 +1921,13 @@ export interface components {
|
||||
DocumentSearchResult: {
|
||||
items: components["schemas"]["DocumentSearchItem"][];
|
||||
/** Format: int64 */
|
||||
total: number;
|
||||
totalElements: number;
|
||||
/** Format: int32 */
|
||||
pageNumber: number;
|
||||
/** Format: int32 */
|
||||
pageSize: number;
|
||||
/** Format: int32 */
|
||||
totalPages: number;
|
||||
};
|
||||
MatchOffset: {
|
||||
/** Format: int32 */
|
||||
@@ -4032,6 +4038,16 @@ export interface operations {
|
||||
dir?: string;
|
||||
/** @description Tag operator: AND (default) or OR */
|
||||
tagOp?: string;
|
||||
/**
|
||||
* @description Page number (0-indexed)
|
||||
* @default 0
|
||||
*/
|
||||
page?: number;
|
||||
/**
|
||||
* @description Page size (max 100)
|
||||
* @default 50
|
||||
*/
|
||||
size?: number;
|
||||
};
|
||||
header?: never;
|
||||
path?: never;
|
||||
|
||||
@@ -10,6 +10,8 @@ type ValidSort = (typeof VALID_SORTS)[number];
|
||||
const VALID_DIRS = ['asc', 'desc'] as const;
|
||||
type ValidDir = (typeof VALID_DIRS)[number];
|
||||
|
||||
const PAGE_SIZE = 50;
|
||||
|
||||
export async function load({ url, fetch }) {
|
||||
const q = url.searchParams.get('q') || '';
|
||||
const from = url.searchParams.get('from') || '';
|
||||
@@ -27,6 +29,7 @@ export async function load({ url, fetch }) {
|
||||
: 'desc';
|
||||
const tagQ = url.searchParams.get('tagQ') || '';
|
||||
const tagOp = url.searchParams.get('tagOp') === 'OR' ? 'OR' : 'AND';
|
||||
const page = Math.max(0, Number(url.searchParams.get('page') ?? '0') || 0);
|
||||
|
||||
const api = createApiClient(fetch);
|
||||
|
||||
@@ -44,14 +47,19 @@ export async function load({ url, fetch }) {
|
||||
tagQ: tagQ && !tags.length ? tagQ : undefined,
|
||||
tagOp: tagOp === 'OR' ? 'OR' : undefined,
|
||||
sort,
|
||||
dir: dir || undefined
|
||||
dir: dir || undefined,
|
||||
page,
|
||||
size: PAGE_SIZE
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch {
|
||||
return {
|
||||
items: [] as DocumentSearchItem[],
|
||||
total: 0,
|
||||
totalElements: 0,
|
||||
pageNumber: 0,
|
||||
pageSize: PAGE_SIZE,
|
||||
totalPages: 0,
|
||||
q,
|
||||
from,
|
||||
to,
|
||||
@@ -77,7 +85,10 @@ export async function load({ url, fetch }) {
|
||||
|
||||
return {
|
||||
items: (result.data?.items ?? []) as DocumentSearchItem[],
|
||||
total: result.data?.total ?? 0,
|
||||
totalElements: result.data?.totalElements ?? 0,
|
||||
pageNumber: result.data?.pageNumber ?? page,
|
||||
pageSize: result.data?.pageSize ?? PAGE_SIZE,
|
||||
totalPages: result.data?.totalPages ?? 0,
|
||||
q,
|
||||
from,
|
||||
to,
|
||||
|
||||
@@ -5,6 +5,7 @@ import { untrack } from 'svelte';
|
||||
import { SvelteURLSearchParams } from 'svelte/reactivity';
|
||||
import SearchFilterBar from '../SearchFilterBar.svelte';
|
||||
import DocumentList from '../DocumentList.svelte';
|
||||
import Pagination from '$lib/components/Pagination.svelte';
|
||||
import * as m from '$lib/paraglide/messages.js';
|
||||
|
||||
let { data } = $props();
|
||||
@@ -35,21 +36,88 @@ let showAdvanced = $state(untrack(hasAdvancedFilters));
|
||||
|
||||
let searchTimer: ReturnType<typeof setTimeout>;
|
||||
|
||||
function triggerSearch() {
|
||||
type FilterSnapshot = {
|
||||
q: string;
|
||||
from: string;
|
||||
to: string;
|
||||
senderId: string;
|
||||
receiverId: string;
|
||||
tags: string[];
|
||||
sort: string;
|
||||
dir: string;
|
||||
tagQ: string;
|
||||
tagOp: 'AND' | 'OR';
|
||||
};
|
||||
|
||||
/**
|
||||
* Builds a URLSearchParams from a filter snapshot. Single source of truth for
|
||||
* which params the `/documents` URL understands — add a filter here and both
|
||||
* filter-change nav (triggerSearch) and page nav (buildPageHref) will pick it
|
||||
* up. `page` is appended only when > 0 so the default page 0 stays out of the
|
||||
* URL, keeping the filter-change-resets-to-page-0 behaviour implicit.
|
||||
*/
|
||||
function buildSearchParams(filters: FilterSnapshot, targetPage?: number): SvelteURLSearchParams {
|
||||
const params = new SvelteURLSearchParams();
|
||||
if (q) params.set('q', q);
|
||||
if (from) params.set('from', from);
|
||||
if (to) params.set('to', to);
|
||||
if (senderId) params.set('senderId', senderId);
|
||||
if (receiverId) params.set('receiverId', receiverId);
|
||||
tagNames.forEach((tag) => params.append('tag', tag.name));
|
||||
if (sort) params.set('sort', sort);
|
||||
if (dir) params.set('dir', dir);
|
||||
if (tagQ) params.set('tagQ', tagQ);
|
||||
if (tagOperator === 'OR') params.set('tagOp', 'OR');
|
||||
if (filters.q) params.set('q', filters.q);
|
||||
if (filters.from) params.set('from', filters.from);
|
||||
if (filters.to) params.set('to', filters.to);
|
||||
if (filters.senderId) params.set('senderId', filters.senderId);
|
||||
if (filters.receiverId) params.set('receiverId', filters.receiverId);
|
||||
filters.tags.forEach((tag) => params.append('tag', tag));
|
||||
if (filters.sort) params.set('sort', filters.sort);
|
||||
if (filters.dir) params.set('dir', filters.dir);
|
||||
if (filters.tagQ) params.set('tagQ', filters.tagQ);
|
||||
if (filters.tagOp === 'OR') params.set('tagOp', 'OR');
|
||||
if (targetPage !== undefined && targetPage > 0) params.set('page', String(targetPage));
|
||||
return params;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rebuilds the URL from the CURRENT local filter state. `page` is intentionally
|
||||
* not carried over — any filter change implicitly resets back to page 0.
|
||||
*/
|
||||
function triggerSearch() {
|
||||
const params = buildSearchParams({
|
||||
q,
|
||||
from,
|
||||
to,
|
||||
senderId,
|
||||
receiverId,
|
||||
tags: tagNames.map((t) => t.name),
|
||||
sort,
|
||||
dir,
|
||||
tagQ,
|
||||
tagOp: tagOperator
|
||||
});
|
||||
goto(`/documents?${params.toString()}`, { keepFocus: true, noScroll: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds the href for a Pagination prev/next link. Preserves every filter
|
||||
* param from server `data` and updates `page`. Uses a normal <a href> (not
|
||||
* goto) so SvelteKit's default scroll restoration brings the user to the top
|
||||
* of the new slice — the expected behaviour for page navigation.
|
||||
*/
|
||||
function buildPageHref(targetPage: number): string {
|
||||
const params = buildSearchParams(
|
||||
{
|
||||
q: data.q || '',
|
||||
from: data.from || '',
|
||||
to: data.to || '',
|
||||
senderId: data.senderId || '',
|
||||
receiverId: data.receiverId || '',
|
||||
tags: data.tags || [],
|
||||
sort: data.sort || '',
|
||||
dir: data.dir || '',
|
||||
tagQ: data.tagQ || '',
|
||||
tagOp: (data.tagOp as 'AND' | 'OR') || 'AND'
|
||||
},
|
||||
targetPage
|
||||
);
|
||||
const qs = params.toString();
|
||||
return qs ? `/documents?${qs}` : '/documents';
|
||||
}
|
||||
|
||||
function handleTextSearch() {
|
||||
clearTimeout(searchTimer);
|
||||
searchTimer = setTimeout(() => triggerSearch(), 500);
|
||||
@@ -115,10 +183,12 @@ $effect(() => {
|
||||
|
||||
<DocumentList
|
||||
items={data.items}
|
||||
total={data.total}
|
||||
total={data.totalElements}
|
||||
q={data.q}
|
||||
canWrite={data.canWrite}
|
||||
error={data.error}
|
||||
sort={sort}
|
||||
/>
|
||||
|
||||
<Pagination page={data.pageNumber} totalPages={data.totalPages} makeHref={buildPageHref} />
|
||||
</main>
|
||||
|
||||
@@ -25,7 +25,7 @@ describe('documents page load — search params', () => {
|
||||
it('passes q, from, to to the search API', async () => {
|
||||
const mockGet = vi.fn().mockResolvedValue({
|
||||
response: { ok: true, status: 200 },
|
||||
data: { items: [], total: 0 }
|
||||
data: { items: [], totalElements: 0, pageNumber: 0, pageSize: 50, totalPages: 0 }
|
||||
});
|
||||
vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
|
||||
typeof createApiClient
|
||||
@@ -49,7 +49,7 @@ describe('documents page load — search params', () => {
|
||||
it('passes senderId and receiverId to the search API', async () => {
|
||||
const mockGet = vi.fn().mockResolvedValue({
|
||||
response: { ok: true, status: 200 },
|
||||
data: { items: [], total: 0 }
|
||||
data: { items: [], totalElements: 0, pageNumber: 0, pageSize: 50, totalPages: 0 }
|
||||
});
|
||||
vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
|
||||
typeof createApiClient
|
||||
@@ -73,7 +73,7 @@ describe('documents page load — search params', () => {
|
||||
it('passes sort, dir, tagQ to the search API', async () => {
|
||||
const mockGet = vi.fn().mockResolvedValue({
|
||||
response: { ok: true, status: 200 },
|
||||
data: { items: [], total: 0 }
|
||||
data: { items: [], totalElements: 0, pageNumber: 0, pageSize: 50, totalPages: 0 }
|
||||
});
|
||||
vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
|
||||
typeof createApiClient
|
||||
@@ -103,7 +103,7 @@ describe('documents page load — search params', () => {
|
||||
};
|
||||
const mockGet = vi.fn().mockResolvedValue({
|
||||
response: { ok: true, status: 200 },
|
||||
data: { items: [item], total: 42 }
|
||||
data: { items: [item], totalElements: 42, pageNumber: 0, pageSize: 50, totalPages: 1 }
|
||||
});
|
||||
vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
|
||||
typeof createApiClient
|
||||
@@ -115,13 +115,13 @@ describe('documents page load — search params', () => {
|
||||
});
|
||||
|
||||
expect(result.items).toHaveLength(1);
|
||||
expect(result.total).toBe(42);
|
||||
expect(result.totalElements).toBe(42);
|
||||
});
|
||||
|
||||
it('returns filter values in the result for pre-filling the UI', async () => {
|
||||
const mockGet = vi.fn().mockResolvedValue({
|
||||
response: { ok: true, status: 200 },
|
||||
data: { items: [], total: 0 }
|
||||
data: { items: [], totalElements: 0, pageNumber: 0, pageSize: 50, totalPages: 0 }
|
||||
});
|
||||
vi.mocked(createApiClient).mockReturnValue({ GET: mockGet } as ReturnType<
|
||||
typeof createApiClient
|
||||
|
||||
@@ -118,4 +118,20 @@ describe('documents page — URL building', () => {
|
||||
expect.objectContaining({ keepFocus: true, noScroll: true })
|
||||
);
|
||||
});
|
||||
|
||||
it('filter change does not carry the current page — goto URL drops page param', async () => {
|
||||
const { goto } = await import('$app/navigation');
|
||||
vi.mocked(goto).mockClear();
|
||||
|
||||
// User is mid-way through results at page 5; change the search text.
|
||||
render(Page, { data: makeData({ q: 'old', pageNumber: 5 }) });
|
||||
|
||||
const input = page.getByRole('textbox', { name: SEARCH_LABEL });
|
||||
await input.fill('Brief');
|
||||
vi.advanceTimersByTime(500);
|
||||
|
||||
const [url] = vi.mocked(goto).mock.calls[0];
|
||||
expect(url).toContain('q=Brief');
|
||||
expect(url).not.toContain('page=');
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user