init
This commit is contained in:
38
.devcontainer/devcontainer.json
Normal file
38
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"name": "Java Backend",
|
||||
"dockerComposeFile": [
|
||||
"../../docker-compose.yml"
|
||||
],
|
||||
"service": "backend",
|
||||
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
|
||||
|
||||
// WICHTIG: Features, die im Container installiert werden sollen
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/java:1": {
|
||||
"version": "21",
|
||||
"installMaven": "true",
|
||||
"installGradle": "false"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
"version": "24"
|
||||
}
|
||||
},
|
||||
|
||||
// VS Code Extensions, die automatisch im Container installiert werden
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"vscjava.vscode-java-pack", // Java Extension Pack (Pflicht!)
|
||||
"vmware.vscode-spring-boot", // Spring Boot Tools
|
||||
"gabrielbb.vscode-lombok", // Lombok Support (falls genutzt)
|
||||
"humao.rest-client" // Um API Calls direkt zu testen
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
// Port 8080 weiterleiten, damit Sie http://localhost:8080 im Browser aufrufen können
|
||||
"forwardPorts": [8080],
|
||||
|
||||
// Nach dem Erstellen: User auf 'vscode' setzen (nicht root, für Sicherheit)
|
||||
"remoteUser": "vscode"
|
||||
}
|
||||
15
.vscode/launch.json
vendored
Normal file
15
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "java",
|
||||
"name": "FamilienarchivApplication",
|
||||
"request": "launch",
|
||||
"mainClass": "org.raddatz.familienarchiv.FamilienarchivApplication",
|
||||
"projectName": "familienarchiv"
|
||||
}
|
||||
]
|
||||
}
|
||||
4
.vscode/settings.json
vendored
Normal file
4
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"java.configuration.updateBuildConfiguration": "interactive",
|
||||
"java.compile.nullAnalysis.mode": "automatic"
|
||||
}
|
||||
8
workspaces/backend/Dockerfile
Normal file
8
workspaces/backend/Dockerfile
Normal file
@@ -0,0 +1,8 @@
|
||||
# Wir nutzen Java 21 (LTS), da Spring Boot 3 das empfiehlt
|
||||
FROM mcr.microsoft.com/devcontainers/java:1-21-bullseye
|
||||
|
||||
# Optional: Zusätzliche OS-Pakete installieren
|
||||
# RUN apt-get update && apt-get install -y <package-name>
|
||||
|
||||
# Port für Spring Boot
|
||||
EXPOSE 8080
|
||||
49
workspaces/backend/HELP.md
Normal file
49
workspaces/backend/HELP.md
Normal file
@@ -0,0 +1,49 @@
|
||||
# Getting Started
|
||||
|
||||
### Reference Documentation
|
||||
For further reference, please consider the following sections:
|
||||
|
||||
* [Official Apache Maven documentation](https://maven.apache.org/guides/index.html)
|
||||
* [Spring Boot Maven Plugin Reference Guide](https://docs.spring.io/spring-boot/4.0.0/maven-plugin)
|
||||
* [Create an OCI image](https://docs.spring.io/spring-boot/4.0.0/maven-plugin/build-image.html)
|
||||
* [Docker Compose Support](https://docs.spring.io/spring-boot/4.0.0/reference/features/dev-services.html#features.dev-services.docker-compose)
|
||||
* [Spring Web](https://docs.spring.io/spring-boot/4.0.0/reference/web/servlet.html)
|
||||
* [Rest Repositories](https://docs.spring.io/spring-boot/4.0.0/how-to/data-access.html#howto.data-access.exposing-spring-data-repositories-as-rest)
|
||||
* [Spring Session for JDBC](https://docs.spring.io/spring-session/reference/)
|
||||
* [Spring Security](https://docs.spring.io/spring-boot/4.0.0/reference/web/spring-security.html)
|
||||
* [JDBC API](https://docs.spring.io/spring-boot/4.0.0/reference/data/sql.html)
|
||||
* [Spring Data JPA](https://docs.spring.io/spring-boot/4.0.0/reference/data/sql.html#data.sql.jpa-and-spring-data)
|
||||
* [Spring Boot Actuator](https://docs.spring.io/spring-boot/4.0.0/reference/actuator/index.html)
|
||||
|
||||
### Guides
|
||||
The following guides illustrate how to use some features concretely:
|
||||
|
||||
* [Building a RESTful Web Service](https://spring.io/guides/gs/rest-service/)
|
||||
* [Serving Web Content with Spring MVC](https://spring.io/guides/gs/serving-web-content/)
|
||||
* [Building REST services with Spring](https://spring.io/guides/tutorials/rest/)
|
||||
* [Accessing JPA Data with REST](https://spring.io/guides/gs/accessing-data-rest/)
|
||||
* [Accessing Neo4j Data with REST](https://spring.io/guides/gs/accessing-neo4j-data-rest/)
|
||||
* [Accessing MongoDB Data with REST](https://spring.io/guides/gs/accessing-mongodb-data-rest/)
|
||||
* [Securing a Web Application](https://spring.io/guides/gs/securing-web/)
|
||||
* [Spring Boot and OAuth2](https://spring.io/guides/tutorials/spring-boot-oauth2/)
|
||||
* [Authenticating a User with LDAP](https://spring.io/guides/gs/authenticating-ldap/)
|
||||
* [Accessing Relational Data using JDBC with Spring](https://spring.io/guides/gs/relational-data-access/)
|
||||
* [Managing Transactions](https://spring.io/guides/gs/managing-transactions/)
|
||||
* [Accessing Data with JPA](https://spring.io/guides/gs/accessing-data-jpa/)
|
||||
* [Building a RESTful Web Service with Spring Boot Actuator](https://spring.io/guides/gs/actuator-service/)
|
||||
|
||||
### Docker Compose support
|
||||
This project contains a Docker Compose file named `compose.yaml`.
|
||||
In this file, the following services have been defined:
|
||||
|
||||
* postgres: [`postgres:latest`](https://hub.docker.com/_/postgres)
|
||||
|
||||
Please review the tags of the used images and set them to the same as you're running in production.
|
||||
|
||||
### Maven Parent overrides
|
||||
|
||||
Due to Maven's design, elements are inherited from the parent POM to the project POM.
|
||||
While most of the inheritance is fine, it also inherits unwanted elements like `<license>` and `<developers>` from the parent.
|
||||
To prevent this, the project POM contains empty overrides for these elements.
|
||||
If you manually switch to a different parent and actually want the inheritance, you need to remove those overrides.
|
||||
|
||||
31
workspaces/backend/api_tests/Admin-Auth.http
Normal file
31
workspaces/backend/api_tests/Admin-Auth.http
Normal file
@@ -0,0 +1,31 @@
|
||||
### 1. Einen "Gast" User erstellen (Muss vom Admin gemacht werden)
|
||||
# Wir lassen groupIds leer -> Der User hat KEINE Rechte
|
||||
POST http://localhost:8080/api/admin/users
|
||||
Authorization: Basic admin admin123
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"username": "Gast_User",
|
||||
"email": "gast@test.de",
|
||||
"initialPassword": "gast",
|
||||
"groupIds": []
|
||||
}
|
||||
|
||||
### 2. POSITIV-TEST: Admin darf Admin-Endpunkt aufrufen -> Sollte 200 OK sein
|
||||
GET http://localhost:8080/api/admin/users
|
||||
Authorization: Basic admin admin123
|
||||
|
||||
### 3. NEGATIV-TEST: Gast darf KEINEN Admin-Endpunkt aufrufen -> Sollte 403 Forbidden sein
|
||||
GET http://localhost:8080/api/admin/users
|
||||
Authorization: Basic Gast_User gast
|
||||
|
||||
### 4. ABER: Gast darf normale Dokumenten-API nutzen (sofern er eingeloggt ist) -> 200 OK
|
||||
GET http://localhost:8080/api/documents/upload
|
||||
Authorization: Basic Gast_User gast
|
||||
# (Gibt 405 Method Not Allowed zurück, weil es ein POST Endpunkt ist,
|
||||
# aber das beweist, dass Auth geklappt hat. Bei Auth-Fehler käme 401/403)
|
||||
|
||||
###Groups
|
||||
#GET
|
||||
GET http://localhost:8080/api/admin/tags
|
||||
Authorization: Basic admin admin123
|
||||
23
workspaces/backend/api_tests/Document.http
Normal file
23
workspaces/backend/api_tests/Document.http
Normal file
@@ -0,0 +1,23 @@
|
||||
### 1. Upload Test (MIT AUTH)
|
||||
POST http://localhost:8080/api/documents/upload
|
||||
Authorization: Basic admin admin123
|
||||
Content-Type: multipart/form-data; boundary=boundary
|
||||
|
||||
--boundary
|
||||
Content-Disposition: form-data; name="file"; filename="scan_79.pdf"
|
||||
Content-Type: application/pdf
|
||||
|
||||
< ./demo.pdf
|
||||
--boundary--
|
||||
|
||||
### 2. Excel Import Test (MIT AUTH)
|
||||
POST http://localhost:8080/api/documents/import-excel?overwrite=true
|
||||
Authorization: Basic admin admin123
|
||||
Content-Type: multipart/form-data; boundary=boundary2
|
||||
|
||||
--boundary2
|
||||
Content-Disposition: form-data; name="file"; filename="metadata.xlsx"
|
||||
Content-Type: application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
|
||||
|
||||
< ./metadata.xlsx
|
||||
--boundary2--
|
||||
17
workspaces/backend/api_tests/User.http
Normal file
17
workspaces/backend/api_tests/User.http
Normal file
@@ -0,0 +1,17 @@
|
||||
### Schritt 1: Gruppen IDs herausfinden (als Admin)
|
||||
GET http://localhost:8080/api/admin/groups
|
||||
Authorization: Basic admin admin123
|
||||
|
||||
### Schritt 2: Einen neuen User "Onkel_Heinz" anlegen
|
||||
POST http://localhost:8080/api/admin/users
|
||||
Authorization: Basic admin admin123
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"username": "Onkel_Heinz",
|
||||
"email": "heinz@familie.de",
|
||||
"initialPassword": "geheim",
|
||||
"groupIds": [
|
||||
"26f33481-b68a-410d-852b-a081ca9ae930"
|
||||
]
|
||||
}
|
||||
BIN
workspaces/backend/api_tests/demo.pdf
Normal file
BIN
workspaces/backend/api_tests/demo.pdf
Normal file
Binary file not shown.
BIN
workspaces/backend/api_tests/metadata.xlsx
Normal file
BIN
workspaces/backend/api_tests/metadata.xlsx
Normal file
Binary file not shown.
295
workspaces/backend/mvnw
vendored
Executable file
295
workspaces/backend/mvnw
vendored
Executable file
@@ -0,0 +1,295 @@
|
||||
#!/bin/sh
|
||||
# ----------------------------------------------------------------------------
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Apache Maven Wrapper startup batch script, version 3.3.4
|
||||
#
|
||||
# Optional ENV vars
|
||||
# -----------------
|
||||
# JAVA_HOME - location of a JDK home dir, required when download maven via java source
|
||||
# MVNW_REPOURL - repo url base for downloading maven distribution
|
||||
# MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven
|
||||
# MVNW_VERBOSE - true: enable verbose log; debug: trace the mvnw script; others: silence the output
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
set -euf
|
||||
[ "${MVNW_VERBOSE-}" != debug ] || set -x
|
||||
|
||||
# OS specific support.
|
||||
native_path() { printf %s\\n "$1"; }
|
||||
case "$(uname)" in
|
||||
CYGWIN* | MINGW*)
|
||||
[ -z "${JAVA_HOME-}" ] || JAVA_HOME="$(cygpath --unix "$JAVA_HOME")"
|
||||
native_path() { cygpath --path --windows "$1"; }
|
||||
;;
|
||||
esac
|
||||
|
||||
# set JAVACMD and JAVACCMD
|
||||
set_java_home() {
|
||||
# For Cygwin and MinGW, ensure paths are in Unix format before anything is touched
|
||||
if [ -n "${JAVA_HOME-}" ]; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ]; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
JAVACCMD="$JAVA_HOME/jre/sh/javac"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
JAVACCMD="$JAVA_HOME/bin/javac"
|
||||
|
||||
if [ ! -x "$JAVACMD" ] || [ ! -x "$JAVACCMD" ]; then
|
||||
echo "The JAVA_HOME environment variable is not defined correctly, so mvnw cannot run." >&2
|
||||
echo "JAVA_HOME is set to \"$JAVA_HOME\", but \"\$JAVA_HOME/bin/java\" or \"\$JAVA_HOME/bin/javac\" does not exist." >&2
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
else
|
||||
JAVACMD="$(
|
||||
'set' +e
|
||||
'unset' -f command 2>/dev/null
|
||||
'command' -v java
|
||||
)" || :
|
||||
JAVACCMD="$(
|
||||
'set' +e
|
||||
'unset' -f command 2>/dev/null
|
||||
'command' -v javac
|
||||
)" || :
|
||||
|
||||
if [ ! -x "${JAVACMD-}" ] || [ ! -x "${JAVACCMD-}" ]; then
|
||||
echo "The java/javac command does not exist in PATH nor is JAVA_HOME set, so mvnw cannot run." >&2
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# hash string like Java String::hashCode
|
||||
hash_string() {
|
||||
str="${1:-}" h=0
|
||||
while [ -n "$str" ]; do
|
||||
char="${str%"${str#?}"}"
|
||||
h=$(((h * 31 + $(LC_CTYPE=C printf %d "'$char")) % 4294967296))
|
||||
str="${str#?}"
|
||||
done
|
||||
printf %x\\n $h
|
||||
}
|
||||
|
||||
verbose() { :; }
|
||||
[ "${MVNW_VERBOSE-}" != true ] || verbose() { printf %s\\n "${1-}"; }
|
||||
|
||||
die() {
|
||||
printf %s\\n "$1" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
trim() {
|
||||
# MWRAPPER-139:
|
||||
# Trims trailing and leading whitespace, carriage returns, tabs, and linefeeds.
|
||||
# Needed for removing poorly interpreted newline sequences when running in more
|
||||
# exotic environments such as mingw bash on Windows.
|
||||
printf "%s" "${1}" | tr -d '[:space:]'
|
||||
}
|
||||
|
||||
scriptDir="$(dirname "$0")"
|
||||
scriptName="$(basename "$0")"
|
||||
|
||||
# parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties
|
||||
while IFS="=" read -r key value; do
|
||||
case "${key-}" in
|
||||
distributionUrl) distributionUrl=$(trim "${value-}") ;;
|
||||
distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;;
|
||||
esac
|
||||
done <"$scriptDir/.mvn/wrapper/maven-wrapper.properties"
|
||||
[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties"
|
||||
|
||||
case "${distributionUrl##*/}" in
|
||||
maven-mvnd-*bin.*)
|
||||
MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/
|
||||
case "${PROCESSOR_ARCHITECTURE-}${PROCESSOR_ARCHITEW6432-}:$(uname -a)" in
|
||||
*AMD64:CYGWIN* | *AMD64:MINGW*) distributionPlatform=windows-amd64 ;;
|
||||
:Darwin*x86_64) distributionPlatform=darwin-amd64 ;;
|
||||
:Darwin*arm64) distributionPlatform=darwin-aarch64 ;;
|
||||
:Linux*x86_64*) distributionPlatform=linux-amd64 ;;
|
||||
*)
|
||||
echo "Cannot detect native platform for mvnd on $(uname)-$(uname -m), use pure java version" >&2
|
||||
distributionPlatform=linux-amd64
|
||||
;;
|
||||
esac
|
||||
distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip"
|
||||
;;
|
||||
maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;;
|
||||
*) MVN_CMD="mvn${scriptName#mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;;
|
||||
esac
|
||||
|
||||
# apply MVNW_REPOURL and calculate MAVEN_HOME
|
||||
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash>
|
||||
[ -z "${MVNW_REPOURL-}" ] || distributionUrl="$MVNW_REPOURL$_MVNW_REPO_PATTERN${distributionUrl#*"$_MVNW_REPO_PATTERN"}"
|
||||
distributionUrlName="${distributionUrl##*/}"
|
||||
distributionUrlNameMain="${distributionUrlName%.*}"
|
||||
distributionUrlNameMain="${distributionUrlNameMain%-bin}"
|
||||
MAVEN_USER_HOME="${MAVEN_USER_HOME:-${HOME}/.m2}"
|
||||
MAVEN_HOME="${MAVEN_USER_HOME}/wrapper/dists/${distributionUrlNameMain-}/$(hash_string "$distributionUrl")"
|
||||
|
||||
exec_maven() {
|
||||
unset MVNW_VERBOSE MVNW_USERNAME MVNW_PASSWORD MVNW_REPOURL || :
|
||||
exec "$MAVEN_HOME/bin/$MVN_CMD" "$@" || die "cannot exec $MAVEN_HOME/bin/$MVN_CMD"
|
||||
}
|
||||
|
||||
if [ -d "$MAVEN_HOME" ]; then
|
||||
verbose "found existing MAVEN_HOME at $MAVEN_HOME"
|
||||
exec_maven "$@"
|
||||
fi
|
||||
|
||||
case "${distributionUrl-}" in
|
||||
*?-bin.zip | *?maven-mvnd-?*-?*.zip) ;;
|
||||
*) die "distributionUrl is not valid, must match *-bin.zip or maven-mvnd-*.zip, but found '${distributionUrl-}'" ;;
|
||||
esac
|
||||
|
||||
# prepare tmp dir
|
||||
if TMP_DOWNLOAD_DIR="$(mktemp -d)" && [ -d "$TMP_DOWNLOAD_DIR" ]; then
|
||||
clean() { rm -rf -- "$TMP_DOWNLOAD_DIR"; }
|
||||
trap clean HUP INT TERM EXIT
|
||||
else
|
||||
die "cannot create temp dir"
|
||||
fi
|
||||
|
||||
mkdir -p -- "${MAVEN_HOME%/*}"
|
||||
|
||||
# Download and Install Apache Maven
|
||||
verbose "Couldn't find MAVEN_HOME, downloading and installing it ..."
|
||||
verbose "Downloading from: $distributionUrl"
|
||||
verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName"
|
||||
|
||||
# select .zip or .tar.gz
|
||||
if ! command -v unzip >/dev/null; then
|
||||
distributionUrl="${distributionUrl%.zip}.tar.gz"
|
||||
distributionUrlName="${distributionUrl##*/}"
|
||||
fi
|
||||
|
||||
# verbose opt
|
||||
__MVNW_QUIET_WGET=--quiet __MVNW_QUIET_CURL=--silent __MVNW_QUIET_UNZIP=-q __MVNW_QUIET_TAR=''
|
||||
[ "${MVNW_VERBOSE-}" != true ] || __MVNW_QUIET_WGET='' __MVNW_QUIET_CURL='' __MVNW_QUIET_UNZIP='' __MVNW_QUIET_TAR=v
|
||||
|
||||
# normalize http auth
|
||||
case "${MVNW_PASSWORD:+has-password}" in
|
||||
'') MVNW_USERNAME='' MVNW_PASSWORD='' ;;
|
||||
has-password) [ -n "${MVNW_USERNAME-}" ] || MVNW_USERNAME='' MVNW_PASSWORD='' ;;
|
||||
esac
|
||||
|
||||
if [ -z "${MVNW_USERNAME-}" ] && command -v wget >/dev/null; then
|
||||
verbose "Found wget ... using wget"
|
||||
wget ${__MVNW_QUIET_WGET:+"$__MVNW_QUIET_WGET"} "$distributionUrl" -O "$TMP_DOWNLOAD_DIR/$distributionUrlName" || die "wget: Failed to fetch $distributionUrl"
|
||||
elif [ -z "${MVNW_USERNAME-}" ] && command -v curl >/dev/null; then
|
||||
verbose "Found curl ... using curl"
|
||||
curl ${__MVNW_QUIET_CURL:+"$__MVNW_QUIET_CURL"} -f -L -o "$TMP_DOWNLOAD_DIR/$distributionUrlName" "$distributionUrl" || die "curl: Failed to fetch $distributionUrl"
|
||||
elif set_java_home; then
|
||||
verbose "Falling back to use Java to download"
|
||||
javaSource="$TMP_DOWNLOAD_DIR/Downloader.java"
|
||||
targetZip="$TMP_DOWNLOAD_DIR/$distributionUrlName"
|
||||
cat >"$javaSource" <<-END
|
||||
public class Downloader extends java.net.Authenticator
|
||||
{
|
||||
protected java.net.PasswordAuthentication getPasswordAuthentication()
|
||||
{
|
||||
return new java.net.PasswordAuthentication( System.getenv( "MVNW_USERNAME" ), System.getenv( "MVNW_PASSWORD" ).toCharArray() );
|
||||
}
|
||||
public static void main( String[] args ) throws Exception
|
||||
{
|
||||
setDefault( new Downloader() );
|
||||
java.nio.file.Files.copy( java.net.URI.create( args[0] ).toURL().openStream(), java.nio.file.Paths.get( args[1] ).toAbsolutePath().normalize() );
|
||||
}
|
||||
}
|
||||
END
|
||||
# For Cygwin/MinGW, switch paths to Windows format before running javac and java
|
||||
verbose " - Compiling Downloader.java ..."
|
||||
"$(native_path "$JAVACCMD")" "$(native_path "$javaSource")" || die "Failed to compile Downloader.java"
|
||||
verbose " - Running Downloader.java ..."
|
||||
"$(native_path "$JAVACMD")" -cp "$(native_path "$TMP_DOWNLOAD_DIR")" Downloader "$distributionUrl" "$(native_path "$targetZip")"
|
||||
fi
|
||||
|
||||
# If specified, validate the SHA-256 sum of the Maven distribution zip file
|
||||
if [ -n "${distributionSha256Sum-}" ]; then
|
||||
distributionSha256Result=false
|
||||
if [ "$MVN_CMD" = mvnd.sh ]; then
|
||||
echo "Checksum validation is not supported for maven-mvnd." >&2
|
||||
echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
|
||||
exit 1
|
||||
elif command -v sha256sum >/dev/null; then
|
||||
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c - >/dev/null 2>&1; then
|
||||
distributionSha256Result=true
|
||||
fi
|
||||
elif command -v shasum >/dev/null; then
|
||||
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | shasum -a 256 -c >/dev/null 2>&1; then
|
||||
distributionSha256Result=true
|
||||
fi
|
||||
else
|
||||
echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." >&2
|
||||
echo "Please install either command, or disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
|
||||
exit 1
|
||||
fi
|
||||
if [ $distributionSha256Result = false ]; then
|
||||
echo "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised." >&2
|
||||
echo "If you updated your Maven version, you need to update the specified distributionSha256Sum property." >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# unzip and move
|
||||
if command -v unzip >/dev/null; then
|
||||
unzip ${__MVNW_QUIET_UNZIP:+"$__MVNW_QUIET_UNZIP"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -d "$TMP_DOWNLOAD_DIR" || die "failed to unzip"
|
||||
else
|
||||
tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar"
|
||||
fi
|
||||
|
||||
# Find the actual extracted directory name (handles snapshots where filename != directory name)
|
||||
actualDistributionDir=""
|
||||
|
||||
# First try the expected directory name (for regular distributions)
|
||||
if [ -d "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" ]; then
|
||||
if [ -f "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/bin/$MVN_CMD" ]; then
|
||||
actualDistributionDir="$distributionUrlNameMain"
|
||||
fi
|
||||
fi
|
||||
|
||||
# If not found, search for any directory with the Maven executable (for snapshots)
|
||||
if [ -z "$actualDistributionDir" ]; then
|
||||
# enable globbing to iterate over items
|
||||
set +f
|
||||
for dir in "$TMP_DOWNLOAD_DIR"/*; do
|
||||
if [ -d "$dir" ]; then
|
||||
if [ -f "$dir/bin/$MVN_CMD" ]; then
|
||||
actualDistributionDir="$(basename "$dir")"
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done
|
||||
set -f
|
||||
fi
|
||||
|
||||
if [ -z "$actualDistributionDir" ]; then
|
||||
verbose "Contents of $TMP_DOWNLOAD_DIR:"
|
||||
verbose "$(ls -la "$TMP_DOWNLOAD_DIR")"
|
||||
die "Could not find Maven distribution directory in extracted archive"
|
||||
fi
|
||||
|
||||
verbose "Found extracted Maven distribution directory: $actualDistributionDir"
|
||||
printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$actualDistributionDir/mvnw.url"
|
||||
mv -- "$TMP_DOWNLOAD_DIR/$actualDistributionDir" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME"
|
||||
|
||||
clean || :
|
||||
exec_maven "$@"
|
||||
189
workspaces/backend/mvnw.cmd
vendored
Normal file
189
workspaces/backend/mvnw.cmd
vendored
Normal file
@@ -0,0 +1,189 @@
|
||||
<# : batch portion
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Licensed to the Apache Software Foundation (ASF) under one
|
||||
@REM or more contributor license agreements. See the NOTICE file
|
||||
@REM distributed with this work for additional information
|
||||
@REM regarding copyright ownership. The ASF licenses this file
|
||||
@REM to you under the Apache License, Version 2.0 (the
|
||||
@REM "License"); you may not use this file except in compliance
|
||||
@REM with the License. You may obtain a copy of the License at
|
||||
@REM
|
||||
@REM http://www.apache.org/licenses/LICENSE-2.0
|
||||
@REM
|
||||
@REM Unless required by applicable law or agreed to in writing,
|
||||
@REM software distributed under the License is distributed on an
|
||||
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
@REM KIND, either express or implied. See the License for the
|
||||
@REM specific language governing permissions and limitations
|
||||
@REM under the License.
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Apache Maven Wrapper startup batch script, version 3.3.4
|
||||
@REM
|
||||
@REM Optional ENV vars
|
||||
@REM MVNW_REPOURL - repo url base for downloading maven distribution
|
||||
@REM MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven
|
||||
@REM MVNW_VERBOSE - true: enable verbose log; others: silence the output
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@IF "%__MVNW_ARG0_NAME__%"=="" (SET __MVNW_ARG0_NAME__=%~nx0)
|
||||
@SET __MVNW_CMD__=
|
||||
@SET __MVNW_ERROR__=
|
||||
@SET __MVNW_PSMODULEP_SAVE=%PSModulePath%
|
||||
@SET PSModulePath=
|
||||
@FOR /F "usebackq tokens=1* delims==" %%A IN (`powershell -noprofile "& {$scriptDir='%~dp0'; $script='%__MVNW_ARG0_NAME__%'; icm -ScriptBlock ([Scriptblock]::Create((Get-Content -Raw '%~f0'))) -NoNewScope}"`) DO @(
|
||||
IF "%%A"=="MVN_CMD" (set __MVNW_CMD__=%%B) ELSE IF "%%B"=="" (echo %%A) ELSE (echo %%A=%%B)
|
||||
)
|
||||
@SET PSModulePath=%__MVNW_PSMODULEP_SAVE%
|
||||
@SET __MVNW_PSMODULEP_SAVE=
|
||||
@SET __MVNW_ARG0_NAME__=
|
||||
@SET MVNW_USERNAME=
|
||||
@SET MVNW_PASSWORD=
|
||||
@IF NOT "%__MVNW_CMD__%"=="" ("%__MVNW_CMD__%" %*)
|
||||
@echo Cannot start maven from wrapper >&2 && exit /b 1
|
||||
@GOTO :EOF
|
||||
: end batch / begin powershell #>
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
if ($env:MVNW_VERBOSE -eq "true") {
|
||||
$VerbosePreference = "Continue"
|
||||
}
|
||||
|
||||
# calculate distributionUrl, requires .mvn/wrapper/maven-wrapper.properties
|
||||
$distributionUrl = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionUrl
|
||||
if (!$distributionUrl) {
|
||||
Write-Error "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties"
|
||||
}
|
||||
|
||||
switch -wildcard -casesensitive ( $($distributionUrl -replace '^.*/','') ) {
|
||||
"maven-mvnd-*" {
|
||||
$USE_MVND = $true
|
||||
$distributionUrl = $distributionUrl -replace '-bin\.[^.]*$',"-windows-amd64.zip"
|
||||
$MVN_CMD = "mvnd.cmd"
|
||||
break
|
||||
}
|
||||
default {
|
||||
$USE_MVND = $false
|
||||
$MVN_CMD = $script -replace '^mvnw','mvn'
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
# apply MVNW_REPOURL and calculate MAVEN_HOME
|
||||
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash>
|
||||
if ($env:MVNW_REPOURL) {
|
||||
$MVNW_REPO_PATTERN = if ($USE_MVND -eq $False) { "/org/apache/maven/" } else { "/maven/mvnd/" }
|
||||
$distributionUrl = "$env:MVNW_REPOURL$MVNW_REPO_PATTERN$($distributionUrl -replace "^.*$MVNW_REPO_PATTERN",'')"
|
||||
}
|
||||
$distributionUrlName = $distributionUrl -replace '^.*/',''
|
||||
$distributionUrlNameMain = $distributionUrlName -replace '\.[^.]*$','' -replace '-bin$',''
|
||||
|
||||
$MAVEN_M2_PATH = "$HOME/.m2"
|
||||
if ($env:MAVEN_USER_HOME) {
|
||||
$MAVEN_M2_PATH = "$env:MAVEN_USER_HOME"
|
||||
}
|
||||
|
||||
if (-not (Test-Path -Path $MAVEN_M2_PATH)) {
|
||||
New-Item -Path $MAVEN_M2_PATH -ItemType Directory | Out-Null
|
||||
}
|
||||
|
||||
$MAVEN_WRAPPER_DISTS = $null
|
||||
if ((Get-Item $MAVEN_M2_PATH).Target[0] -eq $null) {
|
||||
$MAVEN_WRAPPER_DISTS = "$MAVEN_M2_PATH/wrapper/dists"
|
||||
} else {
|
||||
$MAVEN_WRAPPER_DISTS = (Get-Item $MAVEN_M2_PATH).Target[0] + "/wrapper/dists"
|
||||
}
|
||||
|
||||
$MAVEN_HOME_PARENT = "$MAVEN_WRAPPER_DISTS/$distributionUrlNameMain"
|
||||
$MAVEN_HOME_NAME = ([System.Security.Cryptography.SHA256]::Create().ComputeHash([byte[]][char[]]$distributionUrl) | ForEach-Object {$_.ToString("x2")}) -join ''
|
||||
$MAVEN_HOME = "$MAVEN_HOME_PARENT/$MAVEN_HOME_NAME"
|
||||
|
||||
if (Test-Path -Path "$MAVEN_HOME" -PathType Container) {
|
||||
Write-Verbose "found existing MAVEN_HOME at $MAVEN_HOME"
|
||||
Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD"
|
||||
exit $?
|
||||
}
|
||||
|
||||
if (! $distributionUrlNameMain -or ($distributionUrlName -eq $distributionUrlNameMain)) {
|
||||
Write-Error "distributionUrl is not valid, must end with *-bin.zip, but found $distributionUrl"
|
||||
}
|
||||
|
||||
# prepare tmp dir
|
||||
$TMP_DOWNLOAD_DIR_HOLDER = New-TemporaryFile
|
||||
$TMP_DOWNLOAD_DIR = New-Item -Itemtype Directory -Path "$TMP_DOWNLOAD_DIR_HOLDER.dir"
|
||||
$TMP_DOWNLOAD_DIR_HOLDER.Delete() | Out-Null
|
||||
trap {
|
||||
if ($TMP_DOWNLOAD_DIR.Exists) {
|
||||
try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null }
|
||||
catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" }
|
||||
}
|
||||
}
|
||||
|
||||
New-Item -Itemtype Directory -Path "$MAVEN_HOME_PARENT" -Force | Out-Null
|
||||
|
||||
# Download and Install Apache Maven
|
||||
Write-Verbose "Couldn't find MAVEN_HOME, downloading and installing it ..."
|
||||
Write-Verbose "Downloading from: $distributionUrl"
|
||||
Write-Verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName"
|
||||
|
||||
$webclient = New-Object System.Net.WebClient
|
||||
if ($env:MVNW_USERNAME -and $env:MVNW_PASSWORD) {
|
||||
$webclient.Credentials = New-Object System.Net.NetworkCredential($env:MVNW_USERNAME, $env:MVNW_PASSWORD)
|
||||
}
|
||||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
|
||||
$webclient.DownloadFile($distributionUrl, "$TMP_DOWNLOAD_DIR/$distributionUrlName") | Out-Null
|
||||
|
||||
# If specified, validate the SHA-256 sum of the Maven distribution zip file
|
||||
$distributionSha256Sum = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionSha256Sum
|
||||
if ($distributionSha256Sum) {
|
||||
if ($USE_MVND) {
|
||||
Write-Error "Checksum validation is not supported for maven-mvnd. `nPlease disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties."
|
||||
}
|
||||
Import-Module $PSHOME\Modules\Microsoft.PowerShell.Utility -Function Get-FileHash
|
||||
if ((Get-FileHash "$TMP_DOWNLOAD_DIR/$distributionUrlName" -Algorithm SHA256).Hash.ToLower() -ne $distributionSha256Sum) {
|
||||
Write-Error "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised. If you updated your Maven version, you need to update the specified distributionSha256Sum property."
|
||||
}
|
||||
}
|
||||
|
||||
# unzip and move
|
||||
Expand-Archive "$TMP_DOWNLOAD_DIR/$distributionUrlName" -DestinationPath "$TMP_DOWNLOAD_DIR" | Out-Null
|
||||
|
||||
# Find the actual extracted directory name (handles snapshots where filename != directory name)
|
||||
$actualDistributionDir = ""
|
||||
|
||||
# First try the expected directory name (for regular distributions)
|
||||
$expectedPath = Join-Path "$TMP_DOWNLOAD_DIR" "$distributionUrlNameMain"
|
||||
$expectedMvnPath = Join-Path "$expectedPath" "bin/$MVN_CMD"
|
||||
if ((Test-Path -Path $expectedPath -PathType Container) -and (Test-Path -Path $expectedMvnPath -PathType Leaf)) {
|
||||
$actualDistributionDir = $distributionUrlNameMain
|
||||
}
|
||||
|
||||
# If not found, search for any directory with the Maven executable (for snapshots)
|
||||
if (!$actualDistributionDir) {
|
||||
Get-ChildItem -Path "$TMP_DOWNLOAD_DIR" -Directory | ForEach-Object {
|
||||
$testPath = Join-Path $_.FullName "bin/$MVN_CMD"
|
||||
if (Test-Path -Path $testPath -PathType Leaf) {
|
||||
$actualDistributionDir = $_.Name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!$actualDistributionDir) {
|
||||
Write-Error "Could not find Maven distribution directory in extracted archive"
|
||||
}
|
||||
|
||||
Write-Verbose "Found extracted Maven distribution directory: $actualDistributionDir"
|
||||
Rename-Item -Path "$TMP_DOWNLOAD_DIR/$actualDistributionDir" -NewName $MAVEN_HOME_NAME | Out-Null
|
||||
try {
|
||||
Move-Item -Path "$TMP_DOWNLOAD_DIR/$MAVEN_HOME_NAME" -Destination $MAVEN_HOME_PARENT | Out-Null
|
||||
} catch {
|
||||
if (! (Test-Path -Path "$MAVEN_HOME" -PathType Container)) {
|
||||
Write-Error "fail to move MAVEN_HOME"
|
||||
}
|
||||
} finally {
|
||||
try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null }
|
||||
catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" }
|
||||
}
|
||||
|
||||
Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD"
|
||||
152
workspaces/backend/pom.xml
Normal file
152
workspaces/backend/pom.xml
Normal file
@@ -0,0 +1,152 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-parent</artifactId>
|
||||
<version>4.0.0</version>
|
||||
<relativePath/> <!-- lookup parent from repository -->
|
||||
</parent>
|
||||
<groupId>org.raddatz</groupId>
|
||||
<artifactId>familienarchiv</artifactId>
|
||||
<version>0.0.1-SNAPSHOT</version>
|
||||
<name>Familienarchiv</name>
|
||||
<description>Backend für das Familenarchiv</description>
|
||||
<url/>
|
||||
<licenses>
|
||||
<license/>
|
||||
</licenses>
|
||||
<developers>
|
||||
<developer/>
|
||||
</developers>
|
||||
<scm>
|
||||
<connection/>
|
||||
<developerConnection/>
|
||||
<tag/>
|
||||
<url/>
|
||||
</scm>
|
||||
<properties>
|
||||
<java.version>21</java.version>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-actuator</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-data-jpa</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-data-rest</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-jdbc</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-security</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-session-jdbc</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-webmvc</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-tomcat</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-jetty</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-actuator-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-data-jpa-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-data-rest-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-jdbc-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-security-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-session-jdbc-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-webmvc-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- Excel Bearbeitung (Apache POI) -->
|
||||
<dependency>
|
||||
<groupId>org.apache.poi</groupId>
|
||||
<artifactId>poi</artifactId>
|
||||
<version>5.5.0</version> <!-- Aktuellste Version -->
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.poi</groupId>
|
||||
<artifactId>poi-ooxml</artifactId>
|
||||
<version>5.5.0</version>
|
||||
</dependency>
|
||||
|
||||
<!-- S3 Support für MinIO (AWS SDK v2) -->
|
||||
<dependency>
|
||||
<groupId>software.amazon.awssdk</groupId>
|
||||
<artifactId>s3</artifactId>
|
||||
<version>2.29.0</version> <!-- Oder aktuellste Stable -->
|
||||
</dependency>
|
||||
<!-- Nötig für AWS SDK v2 Authentifizierung -->
|
||||
<dependency>
|
||||
<groupId>software.amazon.awssdk</groupId>
|
||||
<artifactId>auth</artifactId>
|
||||
<version>2.29.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
@@ -0,0 +1,13 @@
|
||||
package org.raddatz.familienarchiv;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
|
||||
@SpringBootApplication
|
||||
public class FamilienarchivApplication {
|
||||
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(FamilienarchivApplication.class, args);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,176 @@
|
||||
package org.raddatz.familienarchiv.config;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import org.raddatz.familienarchiv.model.AppUser;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||
import org.raddatz.familienarchiv.model.Person;
|
||||
import org.raddatz.familienarchiv.model.UserGroup;
|
||||
import org.raddatz.familienarchiv.repository.AppUserRepository;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.raddatz.familienarchiv.repository.PersonRepository;
|
||||
import org.raddatz.familienarchiv.repository.UserGroupRepository;
|
||||
import org.springframework.boot.CommandLineRunner;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.security.crypto.password.PasswordEncoder;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
|
||||
@Configuration
|
||||
@RequiredArgsConstructor
|
||||
@Slf4j
|
||||
public class DataInitializer {
|
||||
|
||||
private final AppUserRepository userRepository;
|
||||
private final UserGroupRepository groupRepository;
|
||||
|
||||
@Bean
|
||||
public CommandLineRunner initAdminUser(PasswordEncoder passwordEncoder) {
|
||||
return args -> {
|
||||
if (userRepository.count() == 0) {
|
||||
log.info("Keine User gefunden. Erstelle Default-Admin...");
|
||||
|
||||
// 1. Admin Gruppe erstellen
|
||||
UserGroup adminGroup = UserGroup.builder()
|
||||
.name("Administrators")
|
||||
.permissions(Set.of("ADMIN", "READ_ALL", "WRITE_ALL"))
|
||||
.build();
|
||||
groupRepository.save(adminGroup);
|
||||
|
||||
// 2. Admin User erstellen
|
||||
AppUser admin = AppUser.builder()
|
||||
.username("admin")
|
||||
.password(passwordEncoder.encode("admin123")) // Passwort verschlüsseln!
|
||||
.email("admin@familyarchive.local")
|
||||
.groups(Set.of(adminGroup))
|
||||
.build();
|
||||
userRepository.save(admin);
|
||||
|
||||
log.info("Default Admin erstellt: User='admin', Pass='admin123'");
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Bean
|
||||
public CommandLineRunner initData(PersonRepository personRepo,
|
||||
DocumentRepository docRepo,
|
||||
UserGroupRepository groupRepo) {
|
||||
return args -> {
|
||||
// Nur ausführen, wenn DB leer ist
|
||||
if (personRepo.count() > 0) {
|
||||
System.out.println("Datenbank enthält bereits Daten. Überspringe Initialisierung.");
|
||||
return;
|
||||
}
|
||||
|
||||
System.out.println("Generiere Testdaten...");
|
||||
|
||||
// 1. Personen erstellen
|
||||
List<Person> persons = new ArrayList<>();
|
||||
String[] firstNames = { "Hans", "Helga", "Thomas", "Maria", "Otto", "Anna", "Paul", "Lisa" };
|
||||
String[] lastNames = { "Müller", "Schmidt", "Schneider", "Fischer", "Weber", "Meyer" };
|
||||
|
||||
for (int i = 0; i < 4; i++) {
|
||||
String fn = firstNames[ThreadLocalRandom.current().nextInt(firstNames.length)];
|
||||
String ln = lastNames[ThreadLocalRandom.current().nextInt(lastNames.length)];
|
||||
|
||||
persons.add(personRepo.save(Person.builder()
|
||||
.firstName(fn)
|
||||
.lastName(ln)
|
||||
.alias(i % 5 == 0 ? "Alias " + i : null)
|
||||
.build()));
|
||||
}
|
||||
// Speichern (falls nicht im Loop geschehen, aber save returns entity)
|
||||
// Hier nutzen wir die return values aus dem Loop, da save() die ID setzt.
|
||||
|
||||
// 2. Gruppen erstellen
|
||||
UserGroup admins = UserGroup.builder()
|
||||
.name("Admins")
|
||||
.permissions(Set.of("READ_ALL", "WRITE_ALL", "ADMIN"))
|
||||
.build();
|
||||
groupRepo.save(admins);
|
||||
|
||||
// 3. Dokumente erstellen
|
||||
List<Document> documents = new ArrayList<>();
|
||||
String[] cities = { "Berlin", "München", "Hamburg", "Köln" };
|
||||
|
||||
for (int i = 0; i < 500; i++) {
|
||||
Person sender = persons.get(ThreadLocalRandom.current().nextInt(persons.size()));
|
||||
|
||||
// Zufällige Empfänger (0 bis 3)
|
||||
Set<Person> receivers = new HashSet<>();
|
||||
int numReceivers = ThreadLocalRandom.current().nextInt(4);
|
||||
for (int j = 0; j < numReceivers; j++) {
|
||||
receivers.add(persons.get(ThreadLocalRandom.current().nextInt(persons.size())));
|
||||
}
|
||||
|
||||
Document doc = Document.builder()
|
||||
.title("Dokument " + i)
|
||||
.originalFilename("scan_" + i + ".pdf")
|
||||
.status(i%2 == 0? DocumentStatus.TRANSCRIBED: DocumentStatus.PLACEHOLDER)
|
||||
.documentDate(LocalDate.now().minusDays(ThreadLocalRandom.current().nextInt(365 * 50))) // Bis
|
||||
// zu 50
|
||||
// Jahre
|
||||
// alt
|
||||
.location(cities[ThreadLocalRandom.current().nextInt(cities.length)])
|
||||
.transcription(i%2 == 0? LOREM_IPSUM_LANG: null)
|
||||
.sender(sender)
|
||||
.receivers(receivers)
|
||||
.build();
|
||||
|
||||
documents.add(doc);
|
||||
}
|
||||
|
||||
// Batch Save ist performanter
|
||||
docRepo.saveAll(documents);
|
||||
|
||||
System.out.println("Initialisierung abgeschlossen: 50 Personen und 500 Dokumente erstellt.");
|
||||
};
|
||||
}
|
||||
private final String LOREM_IPSUM_LANG="Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. \n" + //
|
||||
"\n" + //
|
||||
"Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat. \n" + //
|
||||
"\n" + //
|
||||
"Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat. Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi. \n" + //
|
||||
"\n" + //
|
||||
"Nam liber tempor cum soluta nobis eleifend option congue nihil imperdiet doming id quod mazim placerat facer possim assum. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat. Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat. \n" + //
|
||||
"\n" + //
|
||||
"Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis. \n" + //
|
||||
"\n" + //
|
||||
"At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, At accusam aliquyam diam diam dolore dolores duo eirmod eos erat, et nonumy sed tempor et et invidunt justo labore Stet clita ea et gubergren, kasd magna no rebum. sanctus sea sed takimata ut vero voluptua. est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat. \n" + //
|
||||
"\n" + //
|
||||
"Consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus. \n" + //
|
||||
"\n" + //
|
||||
"Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. \n" + //
|
||||
"\n" + //
|
||||
"Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat. \n" + //
|
||||
"\n" + //
|
||||
"Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat. Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi. \n" + //
|
||||
"\n" + //
|
||||
"Nam liber tempor cum soluta nobis eleifend option congue nihil imperdiet doming id quod mazim placerat facer possim assum. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat. Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat. \n" + //
|
||||
"\n" + //
|
||||
"Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis. \n" + //
|
||||
"\n" + //
|
||||
"At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, At accusam aliquyam diam diam dolore dolores duo eirmod eos erat, et nonumy sed tempor et et invidunt justo labore Stet clita ea et gubergren, kasd magna no rebum. sanctus sea sed takimata ut vero voluptua. est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat. \n" + //
|
||||
"\n" + //
|
||||
"Consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus. \n" + //
|
||||
"\n" + //
|
||||
"Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. \n" + //
|
||||
"\n" + //
|
||||
"Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat. \n" + //
|
||||
"\n" + //
|
||||
"Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat. Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla facilisi. \n" + //
|
||||
"\n" + //
|
||||
"Nam liber tempor cum soluta nobis eleifend option congue nihil imperdiet doming id quod mazim placerat facer possim assum. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat. Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat. \n" + //
|
||||
"\n" + //
|
||||
"Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat nulla facilisis. \n" + //
|
||||
"\n" + //
|
||||
"At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, At accusam aliquyam diam diam dolore dolores duo eirmod eos erat, et nonumy sed tempor et et invidunt justo labore Stet clita ea et gubergren, kasd magna no rebum. sanctus sea sed takimata ut vero voluptua. est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat. ";
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
package org.raddatz.familienarchiv.config;
|
||||
|
||||
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
|
||||
import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
|
||||
import software.amazon.awssdk.regions.Region;
|
||||
import software.amazon.awssdk.services.s3.S3Client;
|
||||
import software.amazon.awssdk.services.s3.S3Configuration;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.CommandLineRunner;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
import java.net.URI;
|
||||
|
||||
@Configuration
|
||||
public class MinioConfig {
|
||||
|
||||
@Value("${app.s3.endpoint}")
|
||||
private String endpoint;
|
||||
|
||||
@Value("${app.s3.access-key}")
|
||||
private String accessKey;
|
||||
|
||||
@Value("${app.s3.secret-key}")
|
||||
private String secretKey;
|
||||
|
||||
@Value("${app.s3.region}")
|
||||
private String region;
|
||||
|
||||
@Bean
|
||||
public S3Client s3Client() {
|
||||
return S3Client.builder()
|
||||
.endpointOverride(URI.create(endpoint))
|
||||
// Wichtig für MinIO: Path Style Access erzwingen (http://host/bucket/file)
|
||||
.serviceConfiguration(S3Configuration.builder()
|
||||
.pathStyleAccessEnabled(true)
|
||||
.build())
|
||||
.region(Region.of(region))
|
||||
.credentialsProvider(StaticCredentialsProvider.create(
|
||||
AwsBasicCredentials.create(accessKey, secretKey)))
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public CommandLineRunner testS3Connection(S3Client s3Client) {
|
||||
return args -> {
|
||||
System.out.println("--- S3 VERBINDUNGSTEST START ---");
|
||||
try {
|
||||
s3Client.listBuckets().buckets().forEach(b -> System.out.println("Gefundener Bucket: " + b.name()));
|
||||
System.out.println("--- S3 VERBINDUNGSTEST ERFOLGREICH ---");
|
||||
} catch (Exception e) {
|
||||
System.err.println("--- S3 FEHLER: " + e.getMessage());
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
package org.raddatz.familienarchiv.config;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import org.raddatz.familienarchiv.service.CustomUserDetailsService;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.security.authentication.dao.DaoAuthenticationProvider;
|
||||
import org.springframework.security.config.Customizer;
|
||||
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
|
||||
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
|
||||
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
|
||||
import org.springframework.security.crypto.password.PasswordEncoder;
|
||||
import org.springframework.security.web.SecurityFilterChain;
|
||||
|
||||
@Configuration
|
||||
@EnableWebSecurity
|
||||
@RequiredArgsConstructor
|
||||
public class SecurityConfig {
|
||||
|
||||
private final CustomUserDetailsService userDetailsService;
|
||||
|
||||
@Bean
|
||||
public PasswordEncoder passwordEncoder() {
|
||||
return new BCryptPasswordEncoder();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public DaoAuthenticationProvider authenticationProvider() {
|
||||
DaoAuthenticationProvider authProvider = new DaoAuthenticationProvider(userDetailsService);
|
||||
authProvider.setPasswordEncoder(passwordEncoder());
|
||||
return authProvider;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public SecurityFilterChain securityFilterChain(HttpSecurity http) throws Exception {
|
||||
http
|
||||
// CSRF für Entwicklung aus, damit wir Postman/REST Client nutzen können
|
||||
.csrf(csrf -> csrf.disable())
|
||||
|
||||
.authorizeHttpRequests(auth -> auth
|
||||
// Wir sperren jetzt ALLES. Nur eingeloggte User dürfen irgendwas.
|
||||
.anyRequest().authenticated()
|
||||
|
||||
)
|
||||
// erlaubt pdf im Iframe
|
||||
.headers(headers -> headers
|
||||
.frameOptions(frameOptions -> frameOptions.sameOrigin()))
|
||||
// Erlaubt Login via Browser-Popup oder REST-Header (Authorization: Basic ...)
|
||||
.httpBasic(Customizer.withDefaults())
|
||||
.formLogin(Customizer.withDefaults());
|
||||
|
||||
return http.build();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
package org.raddatz.familienarchiv.controller;
|
||||
|
||||
import org.raddatz.familienarchiv.security.Permission;
|
||||
import org.raddatz.familienarchiv.security.RequirePermission;
|
||||
import org.raddatz.familienarchiv.service.MassImportService;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/admin")
|
||||
@RequirePermission(Permission.ADMIN)
|
||||
@RequiredArgsConstructor
|
||||
public class AdminController {
|
||||
|
||||
private final MassImportService massImportService;
|
||||
|
||||
@PostMapping("/trigger-import")
|
||||
public ResponseEntity<String> triggerMassImport() {
|
||||
new Thread(massImportService::runImport).start();
|
||||
return ResponseEntity.ok("Massenimport gestartet.");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,120 @@
|
||||
package org.raddatz.familienarchiv.controller;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.raddatz.familienarchiv.dto.DocumentUpdateDTO;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.raddatz.familienarchiv.security.Permission;
|
||||
import org.raddatz.familienarchiv.security.RequirePermission;
|
||||
import org.raddatz.familienarchiv.service.DocumentService;
|
||||
import org.raddatz.familienarchiv.service.FileService;
|
||||
import org.springframework.core.io.InputStreamResource;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.ModelAttribute;
|
||||
import org.springframework.web.bind.annotation.PathVariable;
|
||||
import org.springframework.web.bind.annotation.PutMapping;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RequestPart;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
import org.springframework.web.server.ResponseStatusException;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/documents")
|
||||
@RequiredArgsConstructor
|
||||
@Slf4j
|
||||
public class DocumentController {
|
||||
|
||||
private final DocumentRepository documentRepository;
|
||||
private final DocumentService documentService;
|
||||
private final FileService fileService;
|
||||
|
||||
// --- DOWNLOAD ---
|
||||
@GetMapping("/{id}/file")
|
||||
public ResponseEntity<InputStreamResource> getDocumentFile(@PathVariable UUID id) {
|
||||
// 1. Look up path in DB
|
||||
Document doc = documentRepository.findById(id)
|
||||
.orElseThrow(() -> new ResponseStatusException(HttpStatus.NOT_FOUND));
|
||||
|
||||
if (doc.getFilePath() == null) {
|
||||
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "No file attached");
|
||||
}
|
||||
|
||||
// 2. Delegate Retrieval to FileService
|
||||
try {
|
||||
FileService.S3FileDownload download = fileService.downloadFile(doc.getFilePath());
|
||||
|
||||
return ResponseEntity.ok()
|
||||
.contentType(MediaType.parseMediaType(download.contentType()))
|
||||
.header(HttpHeaders.CONTENT_DISPOSITION, "inline; filename=\"" + doc.getOriginalFilename() + "\"")
|
||||
.body(download.resource());
|
||||
} catch (FileService.StorageFileNotFoundException e) {
|
||||
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "File missing in storage");
|
||||
}
|
||||
}
|
||||
|
||||
// --- METADATA ---
|
||||
@GetMapping("/{id}")
|
||||
public Document getDocument(@PathVariable UUID id) {
|
||||
return documentRepository.findById(id)
|
||||
.orElseThrow(() -> new ResponseStatusException(HttpStatus.NOT_FOUND));
|
||||
}
|
||||
|
||||
@PutMapping(value = "/{id}", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
|
||||
@RequirePermission(Permission.WRITE_ALL)
|
||||
public Document updateDocument(
|
||||
@PathVariable UUID id,
|
||||
@ModelAttribute DocumentUpdateDTO dto, // Bindet Form-Felder automatisch
|
||||
@RequestPart(value = "file", required = false) MultipartFile file) {
|
||||
try {
|
||||
return documentService.updateDocument(id, dto, file);
|
||||
} catch (IOException e) {
|
||||
throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "Fehler beim Upload");
|
||||
}
|
||||
}
|
||||
|
||||
@GetMapping("/search")
|
||||
public ResponseEntity<List<Document>> search(
|
||||
@RequestParam(required = false) String q,
|
||||
@RequestParam(required = false) LocalDate from,
|
||||
@RequestParam(required = false) LocalDate to,
|
||||
@RequestParam(required = false) UUID senderId,
|
||||
@RequestParam(required = false) UUID receiverId,
|
||||
@RequestParam(required = false, name = "tag") List<String> tags) {
|
||||
return ResponseEntity.ok(documentService.searchDocuments(q, from, to, senderId, receiverId, tags));
|
||||
}
|
||||
|
||||
@GetMapping("/conversation")
|
||||
public List<Document> getConversation(
|
||||
@RequestParam UUID senderId,
|
||||
@RequestParam UUID receiverId,
|
||||
@RequestParam(required = false) LocalDate from,
|
||||
@RequestParam(required = false) LocalDate to,
|
||||
@RequestParam(defaultValue = "DESC") String dir // ASC oder DESC
|
||||
) {
|
||||
// 1. Standard-Datumswerte setzen
|
||||
LocalDate dateFrom = (from != null) ? from : LocalDate.parse("0000-01-01");
|
||||
LocalDate dateTo = (to != null) ? to : LocalDate.now();
|
||||
|
||||
// 2. Sortierung
|
||||
Sort.Direction direction = Sort.Direction.fromString(dir.toUpperCase());
|
||||
Sort sort = Sort.by(direction, "documentDate");
|
||||
|
||||
// 3. Abfrage
|
||||
return documentRepository.findConversation(
|
||||
senderId, receiverId, dateFrom, dateTo, sort);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
package org.raddatz.familienarchiv.controller;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.raddatz.familienarchiv.dto.GroupDTO;
|
||||
import org.raddatz.familienarchiv.model.UserGroup;
|
||||
import org.raddatz.familienarchiv.repository.UserGroupRepository;
|
||||
import org.raddatz.familienarchiv.security.Permission;
|
||||
import org.raddatz.familienarchiv.security.RequirePermission;
|
||||
import org.raddatz.familienarchiv.service.UserService;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.DeleteMapping;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PatchMapping;
|
||||
import org.springframework.web.bind.annotation.PathVariable;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.RequestBody;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
import org.springframework.web.server.ResponseStatusException;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/groups")
|
||||
@RequirePermission(Permission.ADMIN_PERMISSION)
|
||||
@RequiredArgsConstructor
|
||||
public class GroupController {
|
||||
private final UserGroupRepository groupRepository;
|
||||
private final UserService userService;
|
||||
|
||||
@PostMapping("")
|
||||
public ResponseEntity<UserGroup> createGroup(@RequestBody GroupDTO dto) {
|
||||
UserGroup group = new UserGroup();
|
||||
group.setName(dto.getName());
|
||||
group.setPermissions(dto.getPermissions()); // Assuming entity has Set<String> or Set<Enum>
|
||||
return ResponseEntity.ok(groupRepository.save(group));
|
||||
}
|
||||
|
||||
@PatchMapping("/{id}")
|
||||
public ResponseEntity<UserGroup> updateGroup(@PathVariable UUID id, @RequestBody GroupDTO dto) {
|
||||
UserGroup group = groupRepository.findById(id)
|
||||
.orElseThrow(() -> new ResponseStatusException(HttpStatus.NOT_FOUND));
|
||||
|
||||
if (dto.getName() != null)
|
||||
group.setName(dto.getName());
|
||||
if (dto.getPermissions() != null)
|
||||
group.setPermissions(dto.getPermissions());
|
||||
|
||||
return ResponseEntity.ok(groupRepository.save(group));
|
||||
}
|
||||
|
||||
@DeleteMapping("/{id}")
|
||||
public ResponseEntity<?> deleteGroup(@PathVariable UUID id) {
|
||||
try {
|
||||
// Optional: Check if users are assigned before deleting
|
||||
groupRepository.deleteById(id);
|
||||
return ResponseEntity.ok().build();
|
||||
} catch (Exception e) {
|
||||
return ResponseEntity.badRequest().body("Gruppe konnte nicht gelöscht werden.");
|
||||
}
|
||||
}
|
||||
|
||||
@GetMapping("")
|
||||
public ResponseEntity<List<UserGroup>> getAllGroups() {
|
||||
return ResponseEntity.ok(userService.getAllGroups());
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
package org.raddatz.familienarchiv.controller;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.Person;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.raddatz.familienarchiv.repository.PersonRepository;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PathVariable;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
import org.springframework.web.server.ResponseStatusException;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/persons")
|
||||
@RequiredArgsConstructor
|
||||
public class PersonController {
|
||||
|
||||
private final PersonRepository personRepository;
|
||||
private final DocumentRepository documentRepository;
|
||||
|
||||
@GetMapping
|
||||
public ResponseEntity<List<Person>> getPersons(@RequestParam(required = false) String q) {
|
||||
if (q != null && !q.isBlank()) {
|
||||
return ResponseEntity.ok(personRepository.searchByName(q));
|
||||
}
|
||||
return ResponseEntity.ok(personRepository.findAllByOrderByLastNameAscFirstNameAsc());
|
||||
}
|
||||
|
||||
|
||||
@GetMapping("/{id}")
|
||||
public Person getPerson(@PathVariable UUID id) {
|
||||
return personRepository.findById(id)
|
||||
.orElseThrow(() -> new ResponseStatusException(HttpStatus.NOT_FOUND, "Person nicht gefunden"));
|
||||
}
|
||||
|
||||
@GetMapping("/{id}/documents")
|
||||
public List<Document> getPersonDocuments(@PathVariable UUID id) {
|
||||
return documentRepository.findBySenderId(id);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,69 @@
|
||||
package org.raddatz.familienarchiv.controller;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.Tag;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.raddatz.familienarchiv.repository.TagRepository;
|
||||
import org.raddatz.familienarchiv.security.Permission;
|
||||
import org.raddatz.familienarchiv.security.RequirePermission;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.security.access.prepost.PreAuthorize;
|
||||
import org.springframework.web.bind.annotation.DeleteMapping;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PathVariable;
|
||||
import org.springframework.web.bind.annotation.PutMapping;
|
||||
import org.springframework.web.bind.annotation.RequestBody;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import jakarta.transaction.Transactional;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/tags")
|
||||
@RequiredArgsConstructor
|
||||
public class TagController {
|
||||
private final TagRepository tagRepository;
|
||||
private final DocumentRepository documentRepository;
|
||||
|
||||
// Rename Tag
|
||||
@PutMapping("/{id}")
|
||||
@RequirePermission(Permission.ADMIN_TAG)
|
||||
public ResponseEntity<Tag> updateTag(@PathVariable UUID id, @RequestBody Map<String, String> payload) {
|
||||
Tag tag = tagRepository.findById(id).orElseThrow();
|
||||
tag.setName(payload.get("name"));
|
||||
return ResponseEntity.ok(tagRepository.save(tag));
|
||||
}
|
||||
|
||||
// Delete Tag
|
||||
@DeleteMapping("/{id}")
|
||||
@RequirePermission(Permission.ADMIN_TAG)
|
||||
@Transactional
|
||||
public ResponseEntity<Void> deleteTag(@PathVariable UUID id) {
|
||||
Tag tag = tagRepository.findById(id).orElseThrow();
|
||||
|
||||
// Remove tag from all documents first to prevent FK constraint errors
|
||||
List<Document> documents = documentRepository.findByTags_Id(id);
|
||||
for (Document doc : documents) {
|
||||
doc.getTags().remove(tag);
|
||||
documentRepository.save(doc);
|
||||
}
|
||||
|
||||
tagRepository.delete(tag);
|
||||
return ResponseEntity.ok().build();
|
||||
}
|
||||
|
||||
@GetMapping
|
||||
public List<String> searchTags(@RequestParam(defaultValue = "") String query) {
|
||||
return tagRepository.findByNameContainingIgnoreCase(query)
|
||||
.stream()
|
||||
.map(Tag::getName)
|
||||
.toList();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
package org.raddatz.familienarchiv.controller;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.raddatz.familienarchiv.dto.CreateUserRequest;
|
||||
import org.raddatz.familienarchiv.model.AppUser;
|
||||
import org.raddatz.familienarchiv.security.Permission;
|
||||
import org.raddatz.familienarchiv.security.RequirePermission;
|
||||
import org.raddatz.familienarchiv.service.UserService;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.security.core.Authentication;
|
||||
import org.springframework.web.bind.annotation.DeleteMapping;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PathVariable;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.RequestBody;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/")
|
||||
@AllArgsConstructor
|
||||
public class UserController {
|
||||
private UserService userService;
|
||||
|
||||
@GetMapping("users/me")
|
||||
public ResponseEntity<AppUser> getCurrentUser(Authentication authentication) {
|
||||
if (authentication == null || !authentication.isAuthenticated()) {
|
||||
return ResponseEntity.status(HttpStatus.UNAUTHORIZED).build();
|
||||
}
|
||||
|
||||
// Fetch full user object from DB to get latest permissions/groups
|
||||
AppUser user = userService.findByUsername(authentication.getName());
|
||||
|
||||
// Security: Remove password before sending
|
||||
user.setPassword(null);
|
||||
|
||||
return ResponseEntity.ok(user);
|
||||
}
|
||||
|
||||
@GetMapping("/users")
|
||||
@RequirePermission(Permission.ADMIN_USER)
|
||||
public ResponseEntity<List<AppUser>> getAllUsers() {
|
||||
List<AppUser> users = userService.getAllUsers();
|
||||
return ResponseEntity.ok(users);
|
||||
}
|
||||
|
||||
@PostMapping("/users")
|
||||
@RequirePermission(Permission.ADMIN_USER)
|
||||
public ResponseEntity<?> createUser(@RequestBody CreateUserRequest request) {
|
||||
try {
|
||||
AppUser createdUser = userService.createUserOrUpdate(request);
|
||||
return ResponseEntity.ok(createdUser);
|
||||
} catch (IllegalArgumentException e) {
|
||||
return ResponseEntity.badRequest().body(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@DeleteMapping("/users/{id}")
|
||||
@RequirePermission(Permission.ADMIN_USER)
|
||||
public ResponseEntity<?> deleteUser(@PathVariable UUID id) {
|
||||
try {
|
||||
userService.deleteUser(id);
|
||||
return ResponseEntity.ok().build();
|
||||
} catch (IllegalArgumentException e) {
|
||||
return ResponseEntity.badRequest().body(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
package org.raddatz.familienarchiv.dto;
|
||||
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
@Data
|
||||
public class CreateUserRequest {
|
||||
private String username;
|
||||
private String email;
|
||||
private String initialPassword;
|
||||
private List<UUID> groupIds; // In welche Gruppen soll der User?
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
package org.raddatz.familienarchiv.dto;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
public class DocumentUpdateDTO {
|
||||
private String title;
|
||||
private LocalDate documentDate;
|
||||
private String location;
|
||||
private String documentLocation;
|
||||
private String transcription;
|
||||
private String summary;
|
||||
private UUID senderId;
|
||||
private List<UUID> receiverIds;
|
||||
private String tags;
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
package org.raddatz.familienarchiv.dto;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
public class GroupDTO {
|
||||
private String name;
|
||||
private Set<String> permissions; // e.g., ["ADMIN", "READ_ALL"]
|
||||
}
|
||||
@@ -0,0 +1,77 @@
|
||||
package org.raddatz.familienarchiv.model;
|
||||
|
||||
import jakarta.persistence.*;
|
||||
import lombok.*;
|
||||
|
||||
import org.hibernate.annotations.CreationTimestamp;
|
||||
import org.raddatz.familienarchiv.dto.CreateUserRequest;
|
||||
import org.springframework.security.crypto.password.PasswordEncoder;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
@Entity
|
||||
@Table(name = "users") // Tabellenname in Postgres
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
public class AppUser {
|
||||
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.UUID)
|
||||
private UUID id;
|
||||
|
||||
@Column(unique = true, nullable = false)
|
||||
private String username;
|
||||
|
||||
@Column(nullable = false)
|
||||
@JsonProperty(access = JsonProperty.Access.WRITE_ONLY)
|
||||
private String password; // Wird verschlüsselt gespeichert (BCrypt)
|
||||
|
||||
private String email;
|
||||
|
||||
@Builder.Default
|
||||
private boolean enabled = true; // Um User zu sperren ohne sie zu löschen
|
||||
|
||||
// Ein User kann in mehreren Gruppen sein
|
||||
@ManyToMany(fetch = FetchType.EAGER)
|
||||
@JoinTable(name = "users_groups", joinColumns = @JoinColumn(name = "user_id"), inverseJoinColumns = @JoinColumn(name = "group_id"))
|
||||
@Builder.Default
|
||||
private Set<UserGroup> groups = new HashSet<>();
|
||||
|
||||
@CreationTimestamp
|
||||
private LocalDateTime createdAt;
|
||||
|
||||
public boolean hasPermission(String permission) {
|
||||
if (groups == null || groups.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
return this.groups.stream().anyMatch(group -> group.getPermissions().contains(permission));
|
||||
|
||||
}
|
||||
|
||||
public AppUser updateFromRequest(CreateUserRequest request, PasswordEncoder passwordEncoder, Set<UserGroup> groups) {
|
||||
if (request.getUsername() != null && !request.getUsername().isBlank()) {
|
||||
this.username = request.getUsername();
|
||||
}
|
||||
|
||||
if (request.getEmail() != null && !request.getEmail().isBlank()) {
|
||||
this.email = request.getEmail();
|
||||
}
|
||||
|
||||
if (request.getInitialPassword() != null && !request.getInitialPassword().isBlank()) {
|
||||
this.password = passwordEncoder.encode(request.getInitialPassword());
|
||||
}
|
||||
|
||||
if (groups != null && !groups.isEmpty()) {
|
||||
this.groups = groups;
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,78 @@
|
||||
package org.raddatz.familienarchiv.model;
|
||||
|
||||
import jakarta.persistence.*;
|
||||
import lombok.*;
|
||||
import org.hibernate.annotations.CreationTimestamp;
|
||||
import org.hibernate.annotations.UpdateTimestamp;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
@Entity
|
||||
@Table(name = "documents")
|
||||
@Data // Lombok: Generiert Getter, Setter, ToString, etc.
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
public class Document {
|
||||
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.UUID)
|
||||
private UUID id;
|
||||
|
||||
@Column(nullable = false)
|
||||
private String title;
|
||||
|
||||
// Der Dateiname im S3 (oder Pfad)
|
||||
@Column(name = "file_path")
|
||||
private String filePath;
|
||||
|
||||
// Originaler Dateiname beim Upload (z.B. "Brief_Oma_1940.pdf")
|
||||
@Column(name = "original_filename", nullable = false)
|
||||
private String originalFilename;
|
||||
|
||||
// Der Status hilft uns beim Excel-Import Workflow
|
||||
@Enumerated(EnumType.STRING)
|
||||
@Column(nullable = false)
|
||||
private DocumentStatus status;
|
||||
|
||||
// Metadaten aus dem Brief
|
||||
@Column(name = "meta_date")
|
||||
private LocalDate documentDate; // Wann wurde der Brief geschrieben?
|
||||
|
||||
@Column(name = "meta_location")
|
||||
private String location;
|
||||
|
||||
@Column(name = "meta_document_location")
|
||||
private String documentLocation;
|
||||
|
||||
@Column(columnDefinition = "TEXT")
|
||||
private String transcription;
|
||||
|
||||
@Column(columnDefinition = "TEXT")
|
||||
private String summary;
|
||||
|
||||
@CreationTimestamp
|
||||
@Column(updatable = false)
|
||||
private LocalDateTime createdAt;
|
||||
|
||||
@UpdateTimestamp
|
||||
private LocalDateTime updatedAt;
|
||||
|
||||
@ManyToMany
|
||||
@JoinTable(name = "document_receivers", joinColumns = @JoinColumn(name = "document_id"), inverseJoinColumns = @JoinColumn(name = "person_id"))
|
||||
private Set<Person> receivers = new HashSet<>();
|
||||
|
||||
@ManyToOne
|
||||
@JoinColumn(name = "sender_id")
|
||||
private Person sender;
|
||||
|
||||
@ManyToMany(fetch = FetchType.EAGER)
|
||||
@JoinTable(name = "document_tags", joinColumns = @JoinColumn(name = "document_id"), inverseJoinColumns = @JoinColumn(name = "tag_id"))
|
||||
private Set<Tag> tags = new HashSet<>();
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
package org.raddatz.familienarchiv.model;
|
||||
|
||||
public enum DocumentStatus {
|
||||
PLACEHOLDER, // Durch Excel angelegt, aber Datei fehlt noch
|
||||
UPLOADED, // Datei ist da, aber Metadaten fehlen/ungeprüft
|
||||
TRANSCRIBED, // Text wurde abgetippt
|
||||
REVIEWED, // Fertig bearbeitet
|
||||
ARCHIVED // Gelöscht/Archiviert
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
package org.raddatz.familienarchiv.model;
|
||||
|
||||
import jakarta.persistence.*;
|
||||
import lombok.*;
|
||||
|
||||
import java.util.UUID;
|
||||
@Entity
|
||||
@Table(name = "persons")
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.UUID)
|
||||
private UUID id;
|
||||
|
||||
@Column(nullable = false)
|
||||
private String firstName;
|
||||
|
||||
@Column(nullable = false)
|
||||
private String lastName;
|
||||
|
||||
// Optional: Aliasse für die Suche (z.B. "Opa Hans")
|
||||
private String alias;
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
package org.raddatz.familienarchiv.model;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
import jakarta.persistence.*;
|
||||
import lombok.*;
|
||||
|
||||
@Entity
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
public class Tag {
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.UUID)
|
||||
private UUID id;
|
||||
|
||||
@Column(unique = true, nullable = false)
|
||||
private String name;
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
package org.raddatz.familienarchiv.model;
|
||||
|
||||
import jakarta.persistence.*;
|
||||
import lombok.*;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
@Entity
|
||||
@Table(name = "user_groups")
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@Builder
|
||||
public class UserGroup {
|
||||
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.UUID)
|
||||
private UUID id;
|
||||
|
||||
@Column(unique = true, nullable = false)
|
||||
private String name; // z.B. "Admins", "Familie Müller"
|
||||
|
||||
// Wir speichern Rechte als einfache Strings (z.B. "READ_ALL", "WRITE_DOCUMENTS")
|
||||
@ElementCollection(fetch = FetchType.EAGER)
|
||||
@CollectionTable(name = "group_permissions", joinColumns = @JoinColumn(name = "group_id"))
|
||||
@Column(name = "permission")
|
||||
@Builder.Default
|
||||
private Set<String> permissions = new HashSet<>();
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
package org.raddatz.familienarchiv.repository;
|
||||
|
||||
|
||||
import org.raddatz.familienarchiv.model.AppUser;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
@Repository
|
||||
public interface AppUserRepository extends JpaRepository<AppUser, UUID> {
|
||||
Optional<AppUser> findByUsername(String username);
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
package org.raddatz.familienarchiv.repository;
|
||||
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
@Repository
|
||||
public interface DocumentRepository extends JpaRepository<Document, UUID>, JpaSpecificationExecutor<Document> {
|
||||
|
||||
// Findet ein Dokument anhand des ursprünglichen Dateinamens
|
||||
// Wichtig für den Abgleich beim Excel-Import & Datei-Upload
|
||||
Optional<Document> findByOriginalFilename(String originalFilename);
|
||||
|
||||
// Findet alle Dokumente mit einem bestimmten Status
|
||||
// z.B. um alle offenen "PLACEHOLDER" zu finden
|
||||
List<Document> findByStatus(DocumentStatus status);
|
||||
|
||||
// Prüft effizient, ob ein Dateiname schon existiert (gibt true/false zurück)
|
||||
boolean existsByOriginalFilename(String originalFilename);
|
||||
|
||||
List<Document> findBySenderId(UUID senderId);
|
||||
|
||||
List<Document> findByTags_Id(UUID tagId);
|
||||
|
||||
@Query("SELECT DISTINCT d FROM Document d " +
|
||||
"JOIN d.receivers r " +
|
||||
"WHERE " +
|
||||
// Logik: (Sender A an Empfänger B) ODER (Sender B an Empfänger A)
|
||||
"((d.sender.id = :person1 AND r.id = :person2) " +
|
||||
" OR " +
|
||||
" (d.sender.id = :person2 AND r.id = :person1)) " +
|
||||
// UND das Datum stimmt
|
||||
"AND d.documentDate BETWEEN :from AND :to")
|
||||
List<Document> findConversation(
|
||||
@Param("person1") UUID person1,
|
||||
@Param("person2") UUID person2,
|
||||
@Param("from") LocalDate from,
|
||||
@Param("to") LocalDate to,
|
||||
Sort sort);
|
||||
|
||||
}
|
||||
@@ -0,0 +1,89 @@
|
||||
package org.raddatz.familienarchiv.repository;
|
||||
|
||||
import jakarta.persistence.criteria.*;
|
||||
import java.time.LocalDate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.Tag;
|
||||
import org.springframework.data.jpa.domain.Specification;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
public class DocumentSpecifications {
|
||||
|
||||
// Filtert nach Text (in Titel, Dateiname oder Transkription)
|
||||
public static Specification<Document> hasText(String text) {
|
||||
return (root, query, cb) -> {
|
||||
if (!StringUtils.hasText(text))
|
||||
return null;
|
||||
String likePattern = "%" + text.toLowerCase() + "%";
|
||||
|
||||
return cb.or(
|
||||
cb.like(cb.lower(root.get("title")), likePattern),
|
||||
cb.like(cb.lower(root.get("originalFilename")), likePattern),
|
||||
cb.like(cb.lower(root.get("transcription")), likePattern),
|
||||
cb.like(cb.lower(root.get("location")), likePattern));
|
||||
};
|
||||
}
|
||||
|
||||
// Filtert nach Sender (für Schriftwechsel wichtig)
|
||||
public static Specification<Document> hasSender(UUID personId) {
|
||||
return (root, query, cb) -> personId == null ? null : cb.equal(root.get("sender").get("id"), personId);
|
||||
}
|
||||
|
||||
// Filtert nach Empfänger (nutzt Join)
|
||||
public static Specification<Document> hasReceiver(UUID personId) {
|
||||
return (root, query, cb) -> {
|
||||
if (personId == null)
|
||||
return null;
|
||||
return cb.equal(root.join("receivers").get("id"), personId);
|
||||
};
|
||||
}
|
||||
|
||||
// Filtert nach Zeitraum
|
||||
public static Specification<Document> isBetween(LocalDate start, LocalDate end) {
|
||||
return (root, query, cb) -> {
|
||||
if (start == null && end == null)
|
||||
return null;
|
||||
if (start != null && end != null)
|
||||
return cb.between(root.get("documentDate"), start, end);
|
||||
if (start != null)
|
||||
return cb.greaterThanOrEqualTo(root.get("documentDate"), start);
|
||||
return cb.lessThanOrEqualTo(root.get("documentDate"), end);
|
||||
};
|
||||
}
|
||||
|
||||
// Filtert nach Schlagworten (UND-Verknüpfung)
|
||||
public static Specification<Document> hasTags(List<String> tags) {
|
||||
return (root, query, cb) -> {
|
||||
System.out.println(tags);
|
||||
if (tags == null || tags.isEmpty())
|
||||
return null;
|
||||
|
||||
List<Predicate> predicates = new ArrayList<>();
|
||||
|
||||
for (String tagName : tags) {
|
||||
if (!StringUtils.hasText(tagName)) continue;
|
||||
|
||||
// Subquery erstellen: "Gibt es für dieses Dokument (root.id) einen Tag mit dem Namen X?"
|
||||
// Dies stellt sicher, dass ALLE Tags vorhanden sein müssen (AND Logik).
|
||||
Subquery<Long> subquery = query.subquery(Long.class);
|
||||
Root<Document> subRoot = subquery.from(Document.class);
|
||||
Join<Document, Tag> subTags = subRoot.join("tags");
|
||||
|
||||
subquery.select(subRoot.get("id"))
|
||||
.where(
|
||||
cb.equal(subRoot.get("id"), root.get("id")), // Korrelation zum Haupt-Query
|
||||
cb.equal(cb.lower(subTags.get("name")), tagName.trim().toLowerCase())
|
||||
);
|
||||
|
||||
predicates.add(cb.exists(subquery));
|
||||
}
|
||||
|
||||
return cb.and(predicates.toArray(new Predicate[0]));
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
package org.raddatz.familienarchiv.repository;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.raddatz.familienarchiv.model.Person;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
@Repository
|
||||
public interface PersonRepository extends JpaRepository<Person, UUID> {
|
||||
|
||||
// Suche nach String in Vor- ODER Nachnamen, sortiert nach Nachname
|
||||
@Query("SELECT p FROM Person p WHERE " +
|
||||
"LOWER(CONCAT(p.firstName,' ',p.lastName)) LIKE LOWER(CONCAT('%', :query, '%')) OR " +
|
||||
"LOWER(CONCAT(p.lastName, ' ', p.firstName)) LIKE LOWER(CONCAT('%', :query, '%')) OR " +
|
||||
"LOWER(p.alias) LIKE LOWER(CONCAT('%', :query, '%')) " +
|
||||
"ORDER BY p.lastName ASC, p.firstName ASC")
|
||||
List<Person> searchByName(@Param("query") String query);
|
||||
|
||||
// Hilfsmethode: Alle sortiert laden (für den leeren Status)
|
||||
List<Person> findAllByOrderByLastNameAscFirstNameAsc();
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
package org.raddatz.familienarchiv.repository;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.raddatz.familienarchiv.model.Tag;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
|
||||
public interface TagRepository extends JpaRepository<Tag, UUID> {
|
||||
Optional<Tag> findByNameIgnoreCase(String name);
|
||||
List<Tag> findByNameContainingIgnoreCase(String name);
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
package org.raddatz.familienarchiv.repository;
|
||||
|
||||
import org.raddatz.familienarchiv.model.UserGroup;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
@Repository
|
||||
public interface UserGroupRepository extends JpaRepository<UserGroup, UUID> {
|
||||
Optional<UserGroup> findByName(String name);
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
package org.raddatz.familienarchiv.security;
|
||||
|
||||
public enum Permission {
|
||||
//Every authenticated user has read rights
|
||||
WRITE_ALL,
|
||||
ADMIN,
|
||||
ADMIN_USER,
|
||||
ADMIN_TAG,
|
||||
ADMIN_PERMISSION
|
||||
}
|
||||
@@ -0,0 +1,61 @@
|
||||
package org.raddatz.familienarchiv.security;
|
||||
|
||||
import org.aspectj.lang.ProceedingJoinPoint;
|
||||
import org.aspectj.lang.annotation.Around;
|
||||
import org.aspectj.lang.annotation.Aspect;
|
||||
import org.aspectj.lang.reflect.MethodSignature;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.security.core.Authentication;
|
||||
import org.springframework.security.core.context.SecurityContextHolder;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.web.server.ResponseStatusException;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
@Aspect
|
||||
@Component
|
||||
public class PermissionAspect {
|
||||
|
||||
// Triggers if the Method OR the Class has the annotation
|
||||
@Around("@annotation(org.raddatz.familienarchiv.security.RequirePermission) || @within(org.raddatz.familienarchiv.security.RequirePermission)")
|
||||
public Object checkPermission(ProceedingJoinPoint joinPoint) throws Throwable {
|
||||
|
||||
// 1. Find the annotation (Priority: Method > Class)
|
||||
RequirePermission permission = getAnnotation(joinPoint);
|
||||
|
||||
if (permission != null) {
|
||||
validateUserAccess(permission.value());
|
||||
}
|
||||
|
||||
return joinPoint.proceed();
|
||||
}
|
||||
|
||||
private RequirePermission getAnnotation(ProceedingJoinPoint joinPoint) {
|
||||
MethodSignature signature = (MethodSignature) joinPoint.getSignature();
|
||||
Method method = signature.getMethod();
|
||||
|
||||
// Check Method Level
|
||||
RequirePermission methodAnnotation = method.getAnnotation(RequirePermission.class);
|
||||
if (methodAnnotation != null) {
|
||||
return methodAnnotation;
|
||||
}
|
||||
|
||||
// Check Class Level
|
||||
return joinPoint.getTarget().getClass().getAnnotation(RequirePermission.class);
|
||||
}
|
||||
|
||||
private void validateUserAccess(Permission requiredPerm) {
|
||||
Authentication auth = SecurityContextHolder.getContext().getAuthentication();
|
||||
|
||||
if (auth == null || !auth.isAuthenticated()) {
|
||||
throw new ResponseStatusException(HttpStatus.UNAUTHORIZED, "Nicht authentifiziert");
|
||||
}
|
||||
|
||||
boolean hasPermission = auth.getAuthorities().stream()
|
||||
.anyMatch(a -> a.getAuthority().equals(requiredPerm.name()));
|
||||
|
||||
if (!hasPermission) {
|
||||
throw new ResponseStatusException(HttpStatus.FORBIDDEN, "Fehlende Berechtigung: " + requiredPerm);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
package org.raddatz.familienarchiv.security;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
@Target({ElementType.METHOD, ElementType.TYPE})
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface RequirePermission {
|
||||
Permission value(); // e.g. "ADMIN" or "WRITE_ALL"
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
package org.raddatz.familienarchiv.service;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import org.raddatz.familienarchiv.model.AppUser;
|
||||
import org.raddatz.familienarchiv.repository.AppUserRepository;
|
||||
import org.springframework.security.core.authority.SimpleGrantedAuthority;
|
||||
import org.springframework.security.core.userdetails.User;
|
||||
import org.springframework.security.core.userdetails.UserDetails;
|
||||
import org.springframework.security.core.userdetails.UserDetailsService;
|
||||
import org.springframework.security.core.userdetails.UsernameNotFoundException;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class CustomUserDetailsService implements UserDetailsService {
|
||||
|
||||
private final AppUserRepository userRepository;
|
||||
|
||||
@Override
|
||||
public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
|
||||
AppUser appUser = userRepository.findByUsername(username)
|
||||
.orElseThrow(() -> new UsernameNotFoundException("User nicht gefunden: " + username));
|
||||
|
||||
// Wir sammeln alle Permissions aus allen Gruppen des Users
|
||||
var authorities = appUser.getGroups().stream()
|
||||
.flatMap(group -> group.getPermissions().stream())
|
||||
.map(SimpleGrantedAuthority::new)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
// Rückgabe des Standard Spring Security User Objekts
|
||||
return new User(
|
||||
appUser.getUsername(),
|
||||
appUser.getPassword(),
|
||||
appUser.isEnabled(),
|
||||
true, true, true,
|
||||
authorities
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,240 @@
|
||||
package org.raddatz.familienarchiv.service;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import org.raddatz.familienarchiv.dto.DocumentUpdateDTO;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||
import org.raddatz.familienarchiv.model.Person;
|
||||
import org.raddatz.familienarchiv.model.Tag;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.raddatz.familienarchiv.repository.PersonRepository;
|
||||
import org.raddatz.familienarchiv.repository.TagRepository;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.jpa.domain.Specification;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
import org.springframework.web.server.ResponseStatusException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.LocalDate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import static org.raddatz.familienarchiv.repository.DocumentSpecifications.*;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor // Lombok: Erzeugt Constructor für 'final' Felder (Dependency Injection)
|
||||
@Slf4j // Lombok: Logging
|
||||
public class DocumentService {
|
||||
|
||||
private final DocumentRepository documentRepository;
|
||||
private final PersonRepository personRepository;
|
||||
private final FileService fileService;
|
||||
private final TagRepository tagRepository;
|
||||
|
||||
/**
|
||||
* Lädt eine Datei hoch.
|
||||
* - Prüft, ob ein Eintrag (aus Excel) schon existiert.
|
||||
* - Wenn JA: Aktualisiert Status und verknüpft Datei.
|
||||
* - Wenn NEIN: Erstellt neuen Eintrag (wartet auf Metadaten).
|
||||
*/
|
||||
@Transactional
|
||||
public Document storeDocument(MultipartFile file) throws IOException {
|
||||
String originalFilename = file.getOriginalFilename();
|
||||
|
||||
// 1. Check for existing record
|
||||
Optional<Document> existingDoc = documentRepository.findByOriginalFilename(originalFilename);
|
||||
Document document;
|
||||
|
||||
if (existingDoc.isPresent()) {
|
||||
document = existingDoc.get();
|
||||
} else {
|
||||
document = Document.builder()
|
||||
.originalFilename(originalFilename)
|
||||
.title(originalFilename)
|
||||
.status(DocumentStatus.UPLOADED)
|
||||
.build();
|
||||
}
|
||||
|
||||
// 2. Delegate Storage to FileService
|
||||
String s3Key = fileService.uploadFile(file, originalFilename);
|
||||
|
||||
// 3. Update Database
|
||||
document.setFilePath(s3Key);
|
||||
if (document.getStatus() == DocumentStatus.PLACEHOLDER) {
|
||||
document.setStatus(DocumentStatus.UPLOADED);
|
||||
}
|
||||
|
||||
return documentRepository.save(document);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public Document updateDocument(UUID id, DocumentUpdateDTO dto, MultipartFile newFile) throws IOException {
|
||||
Document doc = documentRepository.findById(id)
|
||||
.orElseThrow(() -> new ResponseStatusException(HttpStatus.NOT_FOUND, "Dokument nicht gefunden"));
|
||||
|
||||
// 1. Einfache Felder Update
|
||||
doc.setTitle(dto.getTitle());
|
||||
doc.setDocumentDate(dto.getDocumentDate());
|
||||
doc.setLocation(dto.getLocation());
|
||||
doc.setTranscription(dto.getTranscription());
|
||||
doc.setSummary(dto.getSummary());
|
||||
doc.setDocumentLocation(dto.getDocumentLocation());
|
||||
|
||||
List<String> tags = new ArrayList<>();
|
||||
if (dto.getTags() != null && !dto.getTags().isBlank()) {
|
||||
// Split by comma, strip whitespace
|
||||
tags = Arrays.stream(dto.getTags().split(","))
|
||||
.map(String::trim)
|
||||
.filter(s -> !s.isEmpty())
|
||||
.toList();
|
||||
}
|
||||
updateDocumentTags(doc.getId(), tags);
|
||||
|
||||
// 2. Sender verknüpfen
|
||||
if (dto.getSenderId() != null) {
|
||||
Person sender = personRepository.findById(dto.getSenderId()).orElse(null);
|
||||
doc.setSender(sender);
|
||||
} else {
|
||||
doc.setSender(null);
|
||||
}
|
||||
|
||||
// 3. Empfänger verknüpfen
|
||||
if (dto.getReceiverIds() != null && !dto.getReceiverIds().isEmpty()) {
|
||||
List<Person> receivers = personRepository.findAllById(dto.getReceiverIds());
|
||||
|
||||
doc.setReceivers(new HashSet<>(receivers));
|
||||
} else {
|
||||
doc.getReceivers().clear(); // Alle entfernen
|
||||
}
|
||||
|
||||
// 4. Datei austauschen (nur wenn eine neue ausgewählt wurde)
|
||||
if (newFile != null && !newFile.isEmpty()) {
|
||||
// Alte Datei könnte man hier theoretisch löschen (optional)
|
||||
|
||||
// Neue Datei hochladen
|
||||
String s3Key = fileService.uploadFile(newFile, newFile.getOriginalFilename());
|
||||
|
||||
doc.setFilePath(s3Key);
|
||||
doc.setOriginalFilename(newFile.getOriginalFilename());
|
||||
doc.setStatus(DocumentStatus.UPLOADED);
|
||||
}
|
||||
|
||||
return documentRepository.save(doc);
|
||||
}
|
||||
|
||||
public Document updateDocumentTags(UUID docId, List<String> tagNames) {
|
||||
Document doc = documentRepository.findById(docId).orElseThrow();
|
||||
|
||||
Set<Tag> newTags = new HashSet<>();
|
||||
|
||||
for (String name : tagNames) {
|
||||
// Clean the string
|
||||
String cleanName = name.trim();
|
||||
if (cleanName.isEmpty())
|
||||
continue;
|
||||
|
||||
// Find existing or Create new
|
||||
Tag tag = tagRepository.findByNameIgnoreCase(cleanName)
|
||||
.orElseGet(() -> tagRepository.save(Tag.builder().name(cleanName).build()));
|
||||
|
||||
newTags.add(tag);
|
||||
}
|
||||
|
||||
doc.setTags(newTags);
|
||||
return documentRepository.save(doc);
|
||||
}
|
||||
|
||||
/**
|
||||
* Hilfsmethode: Erstellt Platzhalter (wird später vom Excel-Service genutzt)
|
||||
*/
|
||||
@Transactional
|
||||
public Document createPlaceholder(String filenameFromExcel) {
|
||||
if (documentRepository.existsByOriginalFilename(filenameFromExcel)) {
|
||||
log.info("Platzhalter für {} existiert schon, überspringe.", filenameFromExcel);
|
||||
return documentRepository.findByOriginalFilename(filenameFromExcel).get();
|
||||
}
|
||||
|
||||
Document doc = Document.builder()
|
||||
.originalFilename(filenameFromExcel)
|
||||
.title(filenameFromExcel)
|
||||
.status(DocumentStatus.PLACEHOLDER)
|
||||
.build();
|
||||
|
||||
return documentRepository.save(doc);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wird vom Excel-Service aufgerufen für jede Zeile.
|
||||
*/
|
||||
@Transactional
|
||||
public void updateOrCreateFromExcel(String filename, LocalDate date, String location, String transcription,
|
||||
boolean overwrite) {
|
||||
Optional<Document> existingOpt = documentRepository.findByOriginalFilename(filename);
|
||||
|
||||
Document doc;
|
||||
if (existingOpt.isPresent()) {
|
||||
doc = existingOpt.get();
|
||||
log.info("Excel-Import: Aktualisiere Metadaten für {}", filename);
|
||||
|
||||
// Logik: Nur überschreiben, wenn Feld leer ist ODER overwrite=true gesetzt ist
|
||||
if (doc.getDocumentDate() == null || overwrite) {
|
||||
doc.setDocumentDate(date);
|
||||
}
|
||||
if (doc.getLocation() == null || overwrite) {
|
||||
doc.setLocation(location);
|
||||
}
|
||||
if (doc.getTranscription() == null || overwrite) {
|
||||
doc.setTranscription(transcription);
|
||||
}
|
||||
} else {
|
||||
log.info("Excel-Import: Erstelle Platzhalter für {}", filename);
|
||||
doc = Document.builder()
|
||||
.originalFilename(filename)
|
||||
.title(filename) // Vorläufiger Titel
|
||||
.status(DocumentStatus.PLACEHOLDER)
|
||||
.documentDate(date)
|
||||
.location(location)
|
||||
.transcription(transcription)
|
||||
.build();
|
||||
}
|
||||
documentRepository.save(doc);
|
||||
}
|
||||
|
||||
// 1. Allgemeine Suche (für das Suchfeld im Frontend)
|
||||
public List<Document> searchDocuments(String text, LocalDate from, LocalDate to, UUID sender, UUID reciever, List<String> tags) {
|
||||
log.info("Tags", tags);
|
||||
Specification<Document> spec = Specification.where(hasText(text))
|
||||
.and(isBetween(from, to))
|
||||
.and(hasSender(sender))
|
||||
.and(hasReceiver(reciever))
|
||||
.and(hasTags(tags));
|
||||
|
||||
// Immer sortiert nach Datum
|
||||
return documentRepository.findAll(spec, Sort.by(Sort.Direction.ASC, "documentDate"));
|
||||
}
|
||||
|
||||
// 2. SPEZIALITÄT: Der Schriftwechsel
|
||||
// Findet alle Briefe ZWISCHEN zwei Personen (egal wer Sender/Empfänger war)
|
||||
public List<Document> getConversation(UUID personA, UUID personB) {
|
||||
|
||||
// Fall 1: A schreibt an B
|
||||
Specification<Document> aToB = Specification.where(hasSender(personA)).and(hasReceiver(personB));
|
||||
|
||||
// Fall 2: B schreibt an A
|
||||
Specification<Document> bToA = Specification.where(hasSender(personB)).and(hasReceiver(personA));
|
||||
|
||||
// Wir wollen (A->B) ODER (B->A)
|
||||
Specification<Document> conversation = aToB.or(bToA);
|
||||
|
||||
return documentRepository.findAll(conversation, Sort.by(Sort.Direction.ASC, "documentDate"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,81 @@
|
||||
package org.raddatz.familienarchiv.service;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.poi.ss.usermodel.*;
|
||||
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.time.LocalDate;
|
||||
import java.time.ZoneId;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@Slf4j
|
||||
public class ExcelService {
|
||||
|
||||
private final DocumentService documentService;
|
||||
|
||||
// Wir lesen die Konfiguration ein (mit Defaults, falls nichts in properties steht)
|
||||
@Value("${app.import.excel.col.filename:0}")
|
||||
private int colFilename;
|
||||
|
||||
@Value("${app.import.excel.col.date:1}")
|
||||
private int colDate;
|
||||
|
||||
@Value("${app.import.excel.col.location:2}")
|
||||
private int colLocation;
|
||||
|
||||
@Value("${app.import.excel.col.transcription:3}")
|
||||
private int colTranscription;
|
||||
|
||||
public void importExcel(MultipartFile file, boolean overwrite) {
|
||||
try (InputStream is = file.getInputStream();
|
||||
Workbook workbook = new XSSFWorkbook(is)) {
|
||||
|
||||
Sheet sheet = workbook.getSheetAt(0);
|
||||
log.info("Starte Excel Import mit Mapping: File={}, Date={}, Loc={}, Text={}",
|
||||
colFilename, colDate, colLocation, colTranscription);
|
||||
|
||||
for (int i = 1; i <= sheet.getLastRowNum(); i++) {
|
||||
Row row = sheet.getRow(i);
|
||||
if (row == null) continue;
|
||||
|
||||
// Wir nutzen jetzt die dynamischen Index-Variablen
|
||||
String filename = getCellValueAsString(row.getCell(colFilename));
|
||||
if (filename == null || filename.isBlank()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
LocalDate date = null;
|
||||
Cell dateCell = row.getCell(colDate);
|
||||
if (dateCell != null && dateCell.getCellType() == CellType.NUMERIC && DateUtil.isCellDateFormatted(dateCell)) {
|
||||
date = dateCell.getDateCellValue().toInstant()
|
||||
.atZone(ZoneId.systemDefault()).toLocalDate();
|
||||
}
|
||||
|
||||
String location = getCellValueAsString(row.getCell(colLocation));
|
||||
String transcription = getCellValueAsString(row.getCell(colTranscription));
|
||||
|
||||
documentService.updateOrCreateFromExcel(filename, date, location, transcription, overwrite);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Fehler beim Excel Import", e);
|
||||
throw new RuntimeException("Excel konnte nicht verarbeitet werden: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private String getCellValueAsString(Cell cell) {
|
||||
if (cell == null) return null;
|
||||
return switch (cell.getCellType()) {
|
||||
case STRING -> cell.getStringCellValue();
|
||||
case NUMERIC -> String.valueOf((int) cell.getNumericCellValue());
|
||||
case BOOLEAN -> String.valueOf(cell.getBooleanCellValue());
|
||||
default -> "";
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,101 @@
|
||||
package org.raddatz.familienarchiv.service;
|
||||
|
||||
import software.amazon.awssdk.core.ResponseInputStream;
|
||||
import software.amazon.awssdk.core.sync.RequestBody;
|
||||
import software.amazon.awssdk.services.s3.S3Client;
|
||||
import software.amazon.awssdk.services.s3.model.*;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
import org.springframework.core.io.InputStreamResource;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.UUID;
|
||||
|
||||
@Service
|
||||
public class FileService {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(FileService.class);
|
||||
|
||||
private final S3Client s3Client;
|
||||
private final String bucketName;
|
||||
|
||||
public FileService(S3Client s3Client, @Value("${app.s3.bucket}") String bucketName) {
|
||||
this.s3Client = s3Client;
|
||||
this.bucketName = bucketName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a file to S3/MinIO and returns the generated object key.
|
||||
*/
|
||||
public String uploadFile(MultipartFile file, String originalFilename) throws IOException {
|
||||
// Generate secure unique path: "documents/UUID_filename"
|
||||
String s3Key = "documents/" + UUID.randomUUID() + "_" + originalFilename;
|
||||
|
||||
try {
|
||||
PutObjectRequest putObjectRequest = PutObjectRequest.builder()
|
||||
.bucket(bucketName)
|
||||
.key(s3Key)
|
||||
.contentType(file.getContentType())
|
||||
.build();
|
||||
|
||||
s3Client.putObject(putObjectRequest,
|
||||
RequestBody.fromInputStream(file.getInputStream(), file.getSize()));
|
||||
|
||||
log.info("Uploaded file to S3: {}", s3Key);
|
||||
return s3Key;
|
||||
} catch (S3Exception e) {
|
||||
log.error("S3 Upload Error", e);
|
||||
throw new IOException("Failed to upload file to storage", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads a file stream from S3/MinIO.
|
||||
* Returns a wrapper containing the stream and content type.
|
||||
*/
|
||||
public S3FileDownload downloadFile(String s3Key) {
|
||||
try {
|
||||
GetObjectRequest getObjectRequest = GetObjectRequest.builder()
|
||||
.bucket(bucketName)
|
||||
.key(s3Key)
|
||||
.build();
|
||||
|
||||
ResponseInputStream<GetObjectResponse> s3Object = s3Client.getObject(getObjectRequest);
|
||||
|
||||
// 1. Versuche Content-Type von S3 zu bekommen
|
||||
String contentType = s3Object.response().contentType();
|
||||
|
||||
// 2. FIX: Wenn S3 "octet-stream" sagt (oder null ist), raten wir anhand der Endung
|
||||
if (contentType == null || contentType.isEmpty() || contentType.equals("application/octet-stream")) {
|
||||
String keyLower = s3Key.toLowerCase();
|
||||
if (keyLower.endsWith(".pdf")) {
|
||||
contentType = "application/pdf";
|
||||
} else if (keyLower.endsWith(".jpg") || keyLower.endsWith(".jpeg")) {
|
||||
contentType = "image/jpeg";
|
||||
} else if (keyLower.endsWith(".png")) {
|
||||
contentType = "image/png";
|
||||
} else {
|
||||
contentType = "application/octet-stream"; // Fallback
|
||||
}
|
||||
}
|
||||
|
||||
return new S3FileDownload(new InputStreamResource(s3Object), contentType);
|
||||
|
||||
} catch (NoSuchKeyException e) {
|
||||
throw new StorageFileNotFoundException("File not found in storage: " + s3Key);
|
||||
} catch (S3Exception e) {
|
||||
throw new RuntimeException("Storage Error: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
// Helper Record to carry the stream and metadata back to the controller
|
||||
public record S3FileDownload(InputStreamResource resource, String contentType) {}
|
||||
|
||||
// Custom Exception
|
||||
public static class StorageFileNotFoundException extends RuntimeException {
|
||||
public StorageFileNotFoundException(String message) { super(message); }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,170 @@
|
||||
package org.raddatz.familienarchiv.service;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.poi.ss.usermodel.*;
|
||||
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
|
||||
import org.raddatz.familienarchiv.model.Document;
|
||||
import org.raddatz.familienarchiv.model.DocumentStatus;
|
||||
import org.raddatz.familienarchiv.repository.DocumentRepository;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import software.amazon.awssdk.core.sync.RequestBody;
|
||||
import software.amazon.awssdk.services.s3.S3Client;
|
||||
import software.amazon.awssdk.services.s3.model.PutObjectRequest;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.time.LocalDate;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@Slf4j
|
||||
public class MassImportService {
|
||||
|
||||
private final DocumentRepository documentRepository;
|
||||
private final S3Client s3Client;
|
||||
|
||||
@Value("${app.s3.bucket}")
|
||||
private String bucketName;
|
||||
|
||||
// Konfiguration der Spalten (wie im ExcelService)
|
||||
@Value("${app.import.excel.col.filename:0}") private int colFilename;
|
||||
@Value("${app.import.excel.col.date:1}") private int colDate;
|
||||
@Value("${app.import.excel.col.location:2}") private int colLocation;
|
||||
@Value("${app.import.excel.col.transcription:3}") private int colTranscription;
|
||||
|
||||
private static final String IMPORT_DIR = "/import";
|
||||
|
||||
public String runImport() {
|
||||
try {
|
||||
// 1. Excel finden
|
||||
File excelFile = findExcelFile();
|
||||
log.info("Starte Massenimport aus: {}", excelFile.getAbsolutePath());
|
||||
|
||||
// 2. Excel verarbeiten
|
||||
int processed = processExcel(excelFile);
|
||||
|
||||
return "Import abgeschlossen. " + processed + " Dokumente verarbeitet.";
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Massenimport fehlgeschlagen", e);
|
||||
return "Fehler: " + e.getMessage();
|
||||
}
|
||||
}
|
||||
|
||||
private File findExcelFile() throws IOException {
|
||||
try (Stream<Path> files = Files.list(Paths.get(IMPORT_DIR))) {
|
||||
return files.filter(p -> p.toString().endsWith(".xlsx"))
|
||||
.findFirst()
|
||||
.orElseThrow(() -> new RuntimeException("Keine .xlsx Datei in " + IMPORT_DIR + " gefunden!"))
|
||||
.toFile();
|
||||
}
|
||||
}
|
||||
|
||||
private int processExcel(File excelFile) throws IOException {
|
||||
int count = 0;
|
||||
try (FileInputStream fis = new FileInputStream(excelFile);
|
||||
Workbook workbook = new XSSFWorkbook(fis)) {
|
||||
|
||||
Sheet sheet = workbook.getSheetAt(0);
|
||||
|
||||
// Wir nehmen an: Spalte "FilePath" im Excel ist RELATIV zum Import-Ordner
|
||||
// ODER: Wir suchen die Datei rekursiv, wenn nur der Name angegeben ist.
|
||||
|
||||
for (int i = 1; i <= sheet.getLastRowNum(); i++) {
|
||||
Row row = sheet.getRow(i);
|
||||
if (row == null) continue;
|
||||
|
||||
String filename = getCellValue(row.getCell(colFilename));
|
||||
if (filename == null || filename.isBlank()) continue;
|
||||
|
||||
// Datei auf der Festplatte suchen
|
||||
Optional<File> fileOnDisk = findFileRecursive(filename);
|
||||
|
||||
if (fileOnDisk.isPresent()) {
|
||||
importSingleDocument(row, fileOnDisk.get(), filename);
|
||||
count++;
|
||||
} else {
|
||||
log.warn("Datei aus Excel nicht gefunden: {}", filename);
|
||||
}
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
protected void importSingleDocument(Row row, File file, String originalFilename) {
|
||||
// Metadaten lesen
|
||||
LocalDate date = null;
|
||||
Cell dateCell = row.getCell(colDate);
|
||||
if (dateCell != null && dateCell.getCellType() == CellType.NUMERIC) {
|
||||
date = dateCell.getDateCellValue().toInstant().atZone(ZoneId.systemDefault()).toLocalDate();
|
||||
}
|
||||
String location = getCellValue(row.getCell(colLocation));
|
||||
String transcription = getCellValue(row.getCell(colTranscription));
|
||||
|
||||
// Prüfen ob schon da
|
||||
Optional<Document> existing = documentRepository.findByOriginalFilename(originalFilename);
|
||||
if (existing.isPresent() && existing.get().getStatus() != DocumentStatus.PLACEHOLDER) {
|
||||
log.info("Dokument {} existiert bereits, überspringe.", originalFilename);
|
||||
return;
|
||||
}
|
||||
|
||||
// Upload zu S3
|
||||
String s3Key = "documents/" + UUID.randomUUID() + "_" + file.getName();
|
||||
try {
|
||||
s3Client.putObject(PutObjectRequest.builder()
|
||||
.bucket(bucketName)
|
||||
.key(s3Key)
|
||||
.build(),
|
||||
RequestBody.fromFile(file));
|
||||
} catch (Exception e) {
|
||||
log.error("S3 Upload Fehler für " + file.getName(), e);
|
||||
return; // Abbruch für dieses Dokument
|
||||
}
|
||||
|
||||
// DB Speichern
|
||||
Document doc = existing.orElse(Document.builder()
|
||||
.originalFilename(originalFilename)
|
||||
.title(originalFilename)
|
||||
.build());
|
||||
|
||||
doc.setFilePath(s3Key);
|
||||
doc.setStatus(DocumentStatus.UPLOADED); // Jetzt ist es da!
|
||||
doc.setDocumentDate(date);
|
||||
doc.setLocation(location);
|
||||
doc.setTranscription(transcription);
|
||||
|
||||
documentRepository.save(doc);
|
||||
log.info("Importiert: {}", originalFilename);
|
||||
}
|
||||
|
||||
// Sucht Datei im gesamten /import Ordner (rekursiv)
|
||||
private Optional<File> findFileRecursive(String filename) {
|
||||
try (Stream<Path> walk = Files.walk(Paths.get(IMPORT_DIR))) {
|
||||
return walk.filter(p -> !Files.isDirectory(p))
|
||||
.filter(p -> p.getFileName().toString().equals(filename))
|
||||
.map(Path::toFile)
|
||||
.findFirst();
|
||||
} catch (IOException e) {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
private String getCellValue(Cell cell) {
|
||||
if (cell == null) return null;
|
||||
if (cell.getCellType() == CellType.STRING) return cell.getStringCellValue();
|
||||
if (cell.getCellType() == CellType.NUMERIC) return String.valueOf((int)cell.getNumericCellValue());
|
||||
return "";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,82 @@
|
||||
package org.raddatz.familienarchiv.service;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import org.raddatz.familienarchiv.dto.CreateUserRequest;
|
||||
import org.raddatz.familienarchiv.model.AppUser;
|
||||
import org.raddatz.familienarchiv.model.UserGroup;
|
||||
import org.raddatz.familienarchiv.repository.AppUserRepository;
|
||||
import org.raddatz.familienarchiv.repository.UserGroupRepository;
|
||||
import org.springframework.security.crypto.password.PasswordEncoder;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@Slf4j
|
||||
public class UserService {
|
||||
|
||||
private final AppUserRepository userRepository;
|
||||
private final UserGroupRepository groupRepository;
|
||||
private final PasswordEncoder passwordEncoder;
|
||||
|
||||
@Transactional
|
||||
public AppUser createUserOrUpdate(CreateUserRequest request) {
|
||||
log.info("Versuche neuen User anzulegen: {}", request.getUsername());
|
||||
|
||||
Set<UserGroup> groups = new HashSet<>();
|
||||
if (request.getGroupIds() != null && !request.getGroupIds().isEmpty()) {
|
||||
List<UserGroup> foundGroups = groupRepository.findAllById(request.getGroupIds());
|
||||
groups.addAll(foundGroups);
|
||||
}
|
||||
log.info("GroupsIds {}", groups.toString());
|
||||
log.info("Groupds in DB {}", groupRepository.findAll().toString());
|
||||
|
||||
Optional<AppUser> dbUser = userRepository.findByUsername(request.getUsername());
|
||||
AppUser user;
|
||||
|
||||
if (dbUser.isPresent()) {
|
||||
log.info("Found user in DB. Will update.");
|
||||
user = dbUser.get().updateFromRequest(request, passwordEncoder, groups);
|
||||
} else {
|
||||
log.info("Creating new user.");
|
||||
user = AppUser.builder()
|
||||
.username(request.getUsername())
|
||||
.email(request.getEmail())
|
||||
.password(passwordEncoder.encode(request.getInitialPassword()))
|
||||
.groups(groups)
|
||||
.enabled(true)
|
||||
.build();
|
||||
}
|
||||
log.info("Saving new user {}", user.toString());
|
||||
return userRepository.save(user);
|
||||
}
|
||||
@Transactional
|
||||
public void deleteUser(UUID userId) {
|
||||
log.info("Delete user {}", userId);
|
||||
|
||||
AppUser user = userRepository.findById(userId)
|
||||
.orElseThrow(() -> new IllegalArgumentException(String.format("No User found for id %", userId)));
|
||||
userRepository.delete(user);
|
||||
}
|
||||
|
||||
public AppUser findByUsername(String username) {
|
||||
return userRepository.findByUsername(username).orElseThrow(
|
||||
() -> new IllegalArgumentException(String.format("No User found for userrname %", username)));
|
||||
}
|
||||
|
||||
public List<AppUser> getAllUsers() {
|
||||
return userRepository.findAll();
|
||||
}
|
||||
|
||||
public List<UserGroup> getAllGroups() {
|
||||
return groupRepository.findAll();
|
||||
}
|
||||
}
|
||||
28
workspaces/backend/src/main/resources/application.properties
Normal file
28
workspaces/backend/src/main/resources/application.properties
Normal file
@@ -0,0 +1,28 @@
|
||||
spring.application.name=Familienarchiv
|
||||
# --- Datenbank Konfiguration ---
|
||||
spring.datasource.url=${SPRING_DATASOURCE_URL}
|
||||
spring.datasource.username=${SPRING_DATASOURCE_USERNAME}
|
||||
spring.datasource.password=${SPRING_DATASOURCE_PASSWORD}
|
||||
spring.datasource.driver-class-name=org.postgresql.Driver
|
||||
|
||||
# JPA / Hibernate
|
||||
spring.jpa.hibernate.ddl-auto=create
|
||||
spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.PostgreSQLDialect
|
||||
spring.jpa.show-sql=false
|
||||
# --- MinIO (S3) Konfiguration ---
|
||||
# Eigene Properties (werden wir in einer Config-Klasse auslesen)
|
||||
app.s3.endpoint=${S3_ENDPOINT}
|
||||
app.s3.access-key=${S3_ACCESS_KEY}
|
||||
app.s3.secret-key=${S3_SECRET_KEY}
|
||||
app.s3.bucket=${S3_BUCKET_NAME}
|
||||
app.s3.region=${S3_REGION}
|
||||
|
||||
# Upload Limits erhöhen (für große Scans)
|
||||
spring.servlet.multipart.max-file-size=50MB
|
||||
spring.servlet.multipart.max-request-size=50MB
|
||||
# --- Excel Import Mapping ---
|
||||
# Spaltenindex (0 = Spalte A, 1 = Spalte B, usw.)
|
||||
app.import.excel.col.filename=0
|
||||
app.import.excel.col.date=1
|
||||
app.import.excel.col.location=2
|
||||
app.import.excel.col.transcription=3
|
||||
@@ -0,0 +1,13 @@
|
||||
package org.raddatz.familienarchiv;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
|
||||
@SpringBootTest
|
||||
class FamilienarchivApplicationTests {
|
||||
|
||||
@Test
|
||||
void contextLoads() {
|
||||
}
|
||||
|
||||
}
|
||||
28
workspaces/backend/target/classes/application.properties
Normal file
28
workspaces/backend/target/classes/application.properties
Normal file
@@ -0,0 +1,28 @@
|
||||
spring.application.name=Familienarchiv
|
||||
# --- Datenbank Konfiguration ---
|
||||
spring.datasource.url=${SPRING_DATASOURCE_URL}
|
||||
spring.datasource.username=${SPRING_DATASOURCE_USERNAME}
|
||||
spring.datasource.password=${SPRING_DATASOURCE_PASSWORD}
|
||||
spring.datasource.driver-class-name=org.postgresql.Driver
|
||||
|
||||
# JPA / Hibernate
|
||||
spring.jpa.hibernate.ddl-auto=create
|
||||
spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.PostgreSQLDialect
|
||||
spring.jpa.show-sql=false
|
||||
# --- MinIO (S3) Konfiguration ---
|
||||
# Eigene Properties (werden wir in einer Config-Klasse auslesen)
|
||||
app.s3.endpoint=${S3_ENDPOINT}
|
||||
app.s3.access-key=${S3_ACCESS_KEY}
|
||||
app.s3.secret-key=${S3_SECRET_KEY}
|
||||
app.s3.bucket=${S3_BUCKET_NAME}
|
||||
app.s3.region=${S3_REGION}
|
||||
|
||||
# Upload Limits erhöhen (für große Scans)
|
||||
spring.servlet.multipart.max-file-size=50MB
|
||||
spring.servlet.multipart.max-request-size=50MB
|
||||
# --- Excel Import Mapping ---
|
||||
# Spaltenindex (0 = Spalte A, 1 = Spalte B, usw.)
|
||||
app.import.excel.col.filename=0
|
||||
app.import.excel.col.date=1
|
||||
app.import.excel.col.location=2
|
||||
app.import.excel.col.transcription=3
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
26
workspaces/frontend/.gitignore
vendored
Normal file
26
workspaces/frontend/.gitignore
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
node_modules
|
||||
|
||||
# Output
|
||||
.output
|
||||
.vercel
|
||||
.netlify
|
||||
.wrangler
|
||||
/.svelte-kit
|
||||
/build
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Env
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
!.env.test
|
||||
|
||||
# Vite
|
||||
vite.config.js.timestamp-*
|
||||
vite.config.ts.timestamp-*
|
||||
|
||||
# Paraglide
|
||||
src/lib/paraglide
|
||||
1
workspaces/frontend/.npmrc
Normal file
1
workspaces/frontend/.npmrc
Normal file
@@ -0,0 +1 @@
|
||||
engine-strict=true
|
||||
9
workspaces/frontend/.prettierignore
Normal file
9
workspaces/frontend/.prettierignore
Normal file
@@ -0,0 +1,9 @@
|
||||
# Package Managers
|
||||
package-lock.json
|
||||
pnpm-lock.yaml
|
||||
yarn.lock
|
||||
bun.lock
|
||||
bun.lockb
|
||||
|
||||
# Miscellaneous
|
||||
/static/
|
||||
18
workspaces/frontend/.prettierrc
Normal file
18
workspaces/frontend/.prettierrc
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"useTabs": true,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"printWidth": 100,
|
||||
"plugins": [
|
||||
"prettier-plugin-tailwindcss"
|
||||
],
|
||||
"overrides": [
|
||||
{
|
||||
"files": "*.svelte",
|
||||
"options": {
|
||||
"parser": "svelte"
|
||||
}
|
||||
}
|
||||
],
|
||||
"tailwindStylesheet": "./src/routes/layout.css"
|
||||
}
|
||||
5
workspaces/frontend/.vscode/settings.json
vendored
Normal file
5
workspaces/frontend/.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"files.associations": {
|
||||
"*.css": "tailwindcss"
|
||||
}
|
||||
}
|
||||
38
workspaces/frontend/README.md
Normal file
38
workspaces/frontend/README.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# sv
|
||||
|
||||
Everything you need to build a Svelte project, powered by [`sv`](https://github.com/sveltejs/cli).
|
||||
|
||||
## Creating a project
|
||||
|
||||
If you're seeing this, you've probably already done this step. Congrats!
|
||||
|
||||
```sh
|
||||
# create a new project in the current directory
|
||||
npx sv create
|
||||
|
||||
# create a new project in my-app
|
||||
npx sv create my-app
|
||||
```
|
||||
|
||||
## Developing
|
||||
|
||||
Once you've created a project and installed dependencies with `npm install` (or `pnpm install` or `yarn`), start a development server:
|
||||
|
||||
```sh
|
||||
npm run dev
|
||||
|
||||
# or start the server and open the app in a new browser tab
|
||||
npm run dev -- --open
|
||||
```
|
||||
|
||||
## Building
|
||||
|
||||
To create a production version of your app:
|
||||
|
||||
```sh
|
||||
npm run build
|
||||
```
|
||||
|
||||
You can preview the production build with `npm run preview`.
|
||||
|
||||
> To deploy your app, you may need to install an [adapter](https://svelte.dev/docs/kit/adapters) for your target environment.
|
||||
43
workspaces/frontend/eslint.config.js
Normal file
43
workspaces/frontend/eslint.config.js
Normal file
@@ -0,0 +1,43 @@
|
||||
import prettier from 'eslint-config-prettier';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { includeIgnoreFile } from '@eslint/compat';
|
||||
import js from '@eslint/js';
|
||||
import svelte from 'eslint-plugin-svelte';
|
||||
import { defineConfig } from 'eslint/config';
|
||||
import globals from 'globals';
|
||||
import ts from 'typescript-eslint';
|
||||
import svelteConfig from './svelte.config.js';
|
||||
|
||||
const gitignorePath = fileURLToPath(new URL('./.gitignore', import.meta.url));
|
||||
|
||||
export default defineConfig(
|
||||
includeIgnoreFile(gitignorePath),
|
||||
js.configs.recommended,
|
||||
...ts.configs.recommended,
|
||||
...svelte.configs.recommended,
|
||||
prettier,
|
||||
...svelte.configs.prettier,
|
||||
{
|
||||
languageOptions: {
|
||||
globals: { ...globals.browser, ...globals.node }
|
||||
},
|
||||
rules: { // typescript-eslint strongly recommend that you do not use the no-undef lint rule on TypeScript projects.
|
||||
// see: https://typescript-eslint.io/troubleshooting/faqs/eslint/#i-get-errors-from-the-no-undef-rule-about-global-variables-not-being-defined-even-though-there-are-no-typescript-errors
|
||||
"no-undef": 'off' }
|
||||
},
|
||||
{
|
||||
files: [
|
||||
'**/*.svelte',
|
||||
'**/*.svelte.ts',
|
||||
'**/*.svelte.js'
|
||||
],
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
projectService: true,
|
||||
extraFileExtensions: ['.svelte'],
|
||||
parser: ts.parser,
|
||||
svelteConfig
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
4
workspaces/frontend/messages/de.json
Normal file
4
workspaces/frontend/messages/de.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"$schema": "https://inlang.com/schema/inlang-message-format",
|
||||
"hello_world": "Hello, {name} from de!"
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user