feat(documents): extend quick-upload with optional batch metadata part

- Add DocumentBatchMetadataDTO (titles, senderId, receiverIds, documentDate, location, tags, metadataComplete)
- Add BATCH_TOO_LARGE to ErrorCode
- Extend quickUpload to accept optional @RequestPart("metadata"); dispatches to storeDocumentWithBatchMetadata when present
- Cap batch at 50 files/request; reject 400 when titles.size > files.size
- Add DocumentService.storeDocumentWithBatchMetadata applying shared fields + index-based titles to both created and updated docs
- Raise max-request-size to 500MB (10-file chunk at max per-file size)
- Add structured SLF4J logging for every quickUpload call

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Marcel
2026-04-24 16:01:21 +02:00
parent 6e021fb23a
commit cffdb1fb4b
6 changed files with 212 additions and 4 deletions

View File

@@ -18,6 +18,7 @@ import jakarta.validation.constraints.Min;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.validation.annotation.Validated;
import org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO;
import org.raddatz.familienarchiv.dto.DocumentSearchResult;
import org.raddatz.familienarchiv.dto.DocumentUpdateDTO;
import org.raddatz.familienarchiv.dto.TagOperator;
@@ -193,6 +194,7 @@ public class DocumentController {
@RequirePermission(Permission.WRITE_ALL)
public QuickUploadResult quickUpload(
@RequestPart(value = "files", required = false) List<MultipartFile> files,
@RequestPart(value = "metadata", required = false) DocumentBatchMetadataDTO metadata,
Authentication authentication) {
List<Document> created = new ArrayList<>();
List<Document> updated = new ArrayList<>();
@@ -202,14 +204,26 @@ public class DocumentController {
return new QuickUploadResult(created, updated, errors);
}
if (files.size() > 50) {
throw DomainException.badRequest(ErrorCode.BATCH_TOO_LARGE, "Batch exceeds maximum of 50 files per request");
}
if (metadata != null && metadata.getTitles() != null && metadata.getTitles().size() > files.size()) {
throw DomainException.badRequest(ErrorCode.VALIDATION_ERROR, "titles count must not exceed files count");
}
UUID actorId = requireUserId(authentication);
for (MultipartFile file : files) {
long totalBytes = files.stream().mapToLong(MultipartFile::getSize).sum();
for (int i = 0; i < files.size(); i++) {
MultipartFile file = files.get(i);
if (!ALLOWED_CONTENT_TYPES.contains(file.getContentType())) {
errors.add(new UploadError(file.getOriginalFilename(), "UNSUPPORTED_FILE_TYPE"));
continue;
}
try {
DocumentService.StoreResult result = documentService.storeDocument(file, actorId);
DocumentService.StoreResult result = metadata != null
? documentService.storeDocumentWithBatchMetadata(file, metadata, i, actorId)
: documentService.storeDocument(file, actorId);
if (result.isNew()) {
created.add(result.document());
} else {
@@ -221,6 +235,10 @@ public class DocumentController {
}
}
log.info("quickUpload actor={} files={} totalBytes={} withMetadata={} created={} updated={} errors={}",
actorId, files.size(), totalBytes, metadata != null,
created.size(), updated.size(), errors.size());
return new QuickUploadResult(created, updated, errors);
}

View File

@@ -0,0 +1,18 @@
package org.raddatz.familienarchiv.dto;
import lombok.Data;
import java.time.LocalDate;
import java.util.List;
import java.util.UUID;
@Data
public class DocumentBatchMetadataDTO {
private List<String> titles;
private UUID senderId;
private List<UUID> receiverIds;
private LocalDate documentDate;
private String location;
private String tags;
private Boolean metadataComplete;
}

View File

@@ -109,6 +109,8 @@ public enum ErrorCode {
// --- Generic ---
/** Request validation failed (missing or malformed fields). 400 */
VALIDATION_ERROR,
/** Batch upload exceeds the maximum allowed file count per request. 400 */
BATCH_TOO_LARGE,
/** An unexpected server-side error occurred. 500 */
INTERNAL_ERROR,
}

View File

@@ -7,6 +7,7 @@ import org.raddatz.familienarchiv.audit.ActivityActorDTO;
import org.raddatz.familienarchiv.audit.AuditKind;
import org.raddatz.familienarchiv.audit.AuditLogQueryService;
import org.raddatz.familienarchiv.audit.AuditService;
import org.raddatz.familienarchiv.dto.DocumentBatchMetadataDTO;
import org.raddatz.familienarchiv.dto.DocumentSearchItem;
import org.raddatz.familienarchiv.dto.DocumentSearchResult;
import org.raddatz.familienarchiv.dto.DocumentSort;
@@ -132,6 +133,43 @@ public class DocumentService {
return new StoreResult(saved, isNew);
}
@Transactional
public StoreResult storeDocumentWithBatchMetadata(
MultipartFile file, DocumentBatchMetadataDTO metadata, int fileIndex, UUID actorId) throws IOException {
StoreResult base = storeDocument(file, actorId);
Document doc = base.document();
if (metadata.getTitles() != null && fileIndex < metadata.getTitles().size()) {
doc.setTitle(metadata.getTitles().get(fileIndex));
}
if (metadata.getSenderId() != null) {
doc.setSender(personService.getById(metadata.getSenderId()));
}
if (metadata.getReceiverIds() != null && !metadata.getReceiverIds().isEmpty()) {
doc.setReceivers(new HashSet<>(personService.getAllById(metadata.getReceiverIds())));
}
if (metadata.getDocumentDate() != null) {
doc.setDocumentDate(metadata.getDocumentDate());
}
if (metadata.getLocation() != null) {
doc.setLocation(metadata.getLocation());
}
if (metadata.getMetadataComplete() != null) {
doc.setMetadataComplete(metadata.getMetadataComplete());
}
if (metadata.getTags() != null && !metadata.getTags().isBlank()) {
List<String> tagNames = Arrays.stream(metadata.getTags().split(","))
.map(String::trim).filter(s -> !s.isEmpty()).toList();
UUID docId = doc.getId();
updateDocumentTags(docId, tagNames);
doc = documentRepository.findById(docId)
.orElseThrow(() -> DomainException.notFound(ErrorCode.DOCUMENT_NOT_FOUND, "Not found after batch metadata: " + docId));
}
Document saved = documentRepository.save(doc);
return new StoreResult(saved, base.isNew());
}
@Transactional
public Document createDocument(DocumentUpdateDTO dto, MultipartFile file) throws IOException {
String filename = (file != null && !file.isEmpty())

View File

@@ -23,7 +23,8 @@ spring:
servlet:
multipart:
max-file-size: 50MB
max-request-size: 50MB
max-request-size: 500MB # supports 10-file chunk at max per-file size; see #317
file-size-threshold: 2KB
mail:
host: ${MAIL_HOST:}